Merge branch 'master' into alt-tx-unfurls
This commit is contained in:
commit
0b7aa8a83c
@ -7,11 +7,14 @@ const config: Config.InitialOptions = {
|
||||
automock: false,
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: ["./src/**/**.ts"],
|
||||
coverageProvider: "v8",
|
||||
coverageProvider: "babel",
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
lines: 1
|
||||
}
|
||||
}
|
||||
},
|
||||
setupFiles: [
|
||||
"./testSetup.ts",
|
||||
],
|
||||
}
|
||||
export default config;
|
||||
|
62
backend/src/__tests__/api/difficulty-adjustment.test.ts
Normal file
62
backend/src/__tests__/api/difficulty-adjustment.test.ts
Normal file
@ -0,0 +1,62 @@
|
||||
import { calcDifficultyAdjustment, DifficultyAdjustment } from '../../api/difficulty-adjustment';
|
||||
|
||||
describe('Mempool Difficulty Adjustment', () => {
|
||||
test('should calculate Difficulty Adjustments properly', () => {
|
||||
const dt = (dtString) => {
|
||||
return Math.floor(new Date(dtString).getTime() / 1000);
|
||||
};
|
||||
|
||||
const vectors = [
|
||||
[ // Vector 1
|
||||
[ // Inputs
|
||||
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
|
||||
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
|
||||
750134, // Current block height
|
||||
0.6280047707459726, // Previous retarget % (Passed through)
|
||||
'mainnet', // Network (if testnet, next value is non-zero)
|
||||
0, // If not testnet, not used
|
||||
],
|
||||
{ // Expected Result
|
||||
progressPercent: 9.027777777777777,
|
||||
difficultyChange: 12.562233927411782,
|
||||
estimatedRetargetDate: 1661895424692,
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: 0,
|
||||
},
|
||||
],
|
||||
[ // Vector 2 (testnet)
|
||||
[ // Inputs
|
||||
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
|
||||
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
|
||||
750134, // Current block height
|
||||
0.6280047707459726, // Previous retarget % (Passed through)
|
||||
'testnet', // Network
|
||||
dt('2022-08-19T13:52:46.000Z'), // Latest block timestamp in seconds
|
||||
],
|
||||
{ // Expected Result is same other than timeOffset
|
||||
progressPercent: 9.027777777777777,
|
||||
difficultyChange: 12.562233927411782,
|
||||
estimatedRetargetDate: 1661895424692,
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
|
||||
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
|
||||
},
|
||||
],
|
||||
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
|
||||
|
||||
for (const vector of vectors) {
|
||||
const result = calcDifficultyAdjustment(...vector[0]);
|
||||
// previousRetarget is passed through untouched
|
||||
expect(result.previousRetarget).toStrictEqual(vector[0][3]);
|
||||
expect(result).toStrictEqual(vector[1]);
|
||||
}
|
||||
});
|
||||
});
|
@ -136,5 +136,4 @@ describe('Mempool Backend Config', () => {
|
||||
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
|
||||
});
|
||||
});
|
||||
|
||||
});
|
||||
|
@ -4,7 +4,7 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 39;
|
||||
private static currentVersion = 40;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
@ -342,6 +342,12 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
|
||||
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 40 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2,6 +2,84 @@ import config from '../config';
|
||||
import { IDifficultyAdjustment } from '../mempool.interfaces';
|
||||
import blocks from './blocks';
|
||||
|
||||
export interface DifficultyAdjustment {
|
||||
progressPercent: number; // Percent: 0 to 100
|
||||
difficultyChange: number; // Percent: -75 to 300
|
||||
estimatedRetargetDate: number; // Unix time in ms
|
||||
remainingBlocks: number; // Block count
|
||||
remainingTime: number; // Duration of time in ms
|
||||
previousRetarget: number; // Percent: -75 to 300
|
||||
nextRetargetHeight: number; // Block Height
|
||||
timeAvg: number; // Duration of time in ms
|
||||
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
|
||||
}
|
||||
|
||||
export function calcDifficultyAdjustment(
|
||||
DATime: number,
|
||||
nowSeconds: number,
|
||||
blockHeight: number,
|
||||
previousRetarget: number,
|
||||
network: string,
|
||||
latestBlockTimestamp: number,
|
||||
): DifficultyAdjustment {
|
||||
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
|
||||
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
|
||||
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
|
||||
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
|
||||
|
||||
const diffSeconds = nowSeconds - DATime;
|
||||
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
|
||||
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
|
||||
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
|
||||
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
|
||||
|
||||
let difficultyChange = 0;
|
||||
let timeAvgSecs = BLOCK_SECONDS_TARGET;
|
||||
// Only calculate the estimate once we have 7.2% of blocks in current epoch
|
||||
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
|
||||
timeAvgSecs = diffSeconds / blocksInEpoch;
|
||||
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
|
||||
// Max increase is x4 (+300%)
|
||||
if (difficultyChange > 300) {
|
||||
difficultyChange = 300;
|
||||
}
|
||||
// Max decrease is /4 (-75%)
|
||||
if (difficultyChange < -75) {
|
||||
difficultyChange = -75;
|
||||
}
|
||||
}
|
||||
|
||||
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
|
||||
// therefore the time between blocks will always be below 20 minutes (1200s).
|
||||
let timeOffset = 0;
|
||||
if (network === 'testnet') {
|
||||
if (timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
|
||||
timeAvgSecs = TESTNET_MAX_BLOCK_SECONDS;
|
||||
}
|
||||
|
||||
const secondsSinceLastBlock = nowSeconds - latestBlockTimestamp;
|
||||
if (secondsSinceLastBlock + timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
|
||||
timeOffset = -Math.min(secondsSinceLastBlock, TESTNET_MAX_BLOCK_SECONDS) * 1000;
|
||||
}
|
||||
}
|
||||
|
||||
const timeAvg = Math.floor(timeAvgSecs * 1000);
|
||||
const remainingTime = remainingBlocks * timeAvg;
|
||||
const estimatedRetargetDate = remainingTime + nowSeconds * 1000;
|
||||
|
||||
return {
|
||||
progressPercent,
|
||||
difficultyChange,
|
||||
estimatedRetargetDate,
|
||||
remainingBlocks,
|
||||
remainingTime,
|
||||
previousRetarget,
|
||||
nextRetargetHeight,
|
||||
timeAvg,
|
||||
timeOffset,
|
||||
};
|
||||
}
|
||||
|
||||
class DifficultyAdjustmentApi {
|
||||
constructor() { }
|
||||
|
||||
@ -11,56 +89,12 @@ class DifficultyAdjustmentApi {
|
||||
const blockHeight = blocks.getCurrentBlockHeight();
|
||||
const blocksCache = blocks.getBlocks();
|
||||
const latestBlock = blocksCache[blocksCache.length - 1];
|
||||
const nowSeconds = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
const now = new Date().getTime() / 1000;
|
||||
const diff = now - DATime;
|
||||
const blocksInEpoch = blockHeight % 2016;
|
||||
const progressPercent = (blocksInEpoch >= 0) ? blocksInEpoch / 2016 * 100 : 100;
|
||||
const remainingBlocks = 2016 - blocksInEpoch;
|
||||
const nextRetargetHeight = blockHeight + remainingBlocks;
|
||||
|
||||
let difficultyChange = 0;
|
||||
if (remainingBlocks < 1870) {
|
||||
if (blocksInEpoch > 0) {
|
||||
difficultyChange = (600 / (diff / blocksInEpoch) - 1) * 100;
|
||||
}
|
||||
if (difficultyChange > 300) {
|
||||
difficultyChange = 300;
|
||||
}
|
||||
if (difficultyChange < -75) {
|
||||
difficultyChange = -75;
|
||||
}
|
||||
}
|
||||
|
||||
let timeAvgMins = blocksInEpoch && blocksInEpoch > 146 ? diff / blocksInEpoch / 60 : 10;
|
||||
|
||||
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
|
||||
// therefore the time between blocks will always be below 20 minutes (1200s).
|
||||
let timeOffset = 0;
|
||||
if (config.MEMPOOL.NETWORK === 'testnet') {
|
||||
if (timeAvgMins > 20) {
|
||||
timeAvgMins = 20;
|
||||
}
|
||||
if (now - latestBlock.timestamp + timeAvgMins * 60 > 1200) {
|
||||
timeOffset = -Math.min(now - latestBlock.timestamp, 1200) * 1000;
|
||||
}
|
||||
}
|
||||
|
||||
const timeAvg = timeAvgMins * 60 * 1000 ;
|
||||
const remainingTime = (remainingBlocks * timeAvg) + (now * 1000);
|
||||
const estimatedRetargetDate = remainingTime + now;
|
||||
|
||||
return {
|
||||
progressPercent,
|
||||
difficultyChange,
|
||||
estimatedRetargetDate,
|
||||
remainingBlocks,
|
||||
remainingTime,
|
||||
previousRetarget,
|
||||
nextRetargetHeight,
|
||||
timeAvg,
|
||||
timeOffset,
|
||||
};
|
||||
return calcDifficultyAdjustment(
|
||||
DATime, nowSeconds, blockHeight, previousRetarget,
|
||||
config.MEMPOOL.NETWORK, latestBlock.timestamp
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -288,21 +288,36 @@ class ChannelsApi {
|
||||
|
||||
const channels: any[] = []
|
||||
for (const row of allChannels) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channels.push({
|
||||
status: row.status,
|
||||
closing_reason: row.closing_reason,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
});
|
||||
let channel;
|
||||
if (index >= 0) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channel = {
|
||||
status: row.status,
|
||||
closing_reason: row.closing_reason,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
};
|
||||
} else if (index === -1) {
|
||||
channel = {
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
channels.push(channel);
|
||||
}
|
||||
|
||||
return channels;
|
||||
|
@ -47,8 +47,17 @@ class ChannelsRoutes {
|
||||
res.status(400).send('Missing parameter: public_key');
|
||||
return;
|
||||
}
|
||||
|
||||
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
|
||||
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
|
||||
|
||||
if (index < -1) {
|
||||
res.status(400).send('Invalid index');
|
||||
}
|
||||
if (['open', 'active', 'closed'].includes(status) === false) {
|
||||
res.status(400).send('Invalid status');
|
||||
}
|
||||
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
|
||||
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
|
||||
res.header('Pragma', 'public');
|
||||
@ -61,7 +70,7 @@ class ChannelsRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response) {
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(400).send('Not an array');
|
||||
@ -74,27 +83,26 @@ class ChannelsRoutes {
|
||||
}
|
||||
}
|
||||
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
|
||||
const inputs: any[] = [];
|
||||
const outputs: any[] = [];
|
||||
const result: any[] = [];
|
||||
for (const txid of txIds) {
|
||||
const foundChannelInputs = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelInputs) {
|
||||
inputs.push(foundChannelInputs);
|
||||
} else {
|
||||
inputs.push(null);
|
||||
const inputs: any = {};
|
||||
const outputs: any = {};
|
||||
// Assuming that we only have one lightning close input in each transaction. This may not be true in the future
|
||||
const foundChannelsFromInput = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelsFromInput) {
|
||||
inputs[0] = foundChannelsFromInput;
|
||||
}
|
||||
const foundChannelOutputs = channels.find((channel) => channel.transaction_id === txid);
|
||||
if (foundChannelOutputs) {
|
||||
outputs.push(foundChannelOutputs);
|
||||
} else {
|
||||
outputs.push(null);
|
||||
const foundChannelsFromOutputs = channels.filter((channel) => channel.transaction_id === txid);
|
||||
for (const output of foundChannelsFromOutputs) {
|
||||
outputs[output.transaction_vout] = output;
|
||||
}
|
||||
result.push({
|
||||
inputs,
|
||||
outputs,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
});
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
|
@ -115,17 +115,13 @@ class NodesApi {
|
||||
|
||||
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
node_stats.capacity
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
nodes.capacity
|
||||
FROM nodes
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
@ -133,16 +129,14 @@ class NodesApi {
|
||||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
@ -163,17 +157,13 @@ class NodesApi {
|
||||
|
||||
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
nodes.channels
|
||||
FROM nodes
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100;
|
||||
`;
|
||||
@ -181,16 +171,14 @@ class NodesApi {
|
||||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
@ -260,8 +248,8 @@ class NodesApi {
|
||||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const publicKeySearch = search.replace('%', '') + '%';
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR MATCH nodes.alias_search AGAINST (? IN BOOLEAN MODE) GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const query = `SELECT public_key, alias, capacity, channels FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
@ -276,7 +264,7 @@ class NodesApi {
|
||||
|
||||
// List all channels and the two linked ISP
|
||||
query = `
|
||||
SELECT short_id, capacity,
|
||||
SELECT short_id, channels.capacity,
|
||||
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
|
||||
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
|
||||
FROM channels
|
||||
@ -391,17 +379,11 @@ class NodesApi {
|
||||
public async $getNodesPerCountry(countryId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
@ -426,17 +408,11 @@ class NodesApi {
|
||||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
@ -464,7 +440,6 @@ class NodesApi {
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY country_id
|
||||
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
|
||||
`;
|
||||
@ -555,7 +530,7 @@ class NodesApi {
|
||||
}
|
||||
|
||||
private aliasToSearchText(str: string): string {
|
||||
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '');
|
||||
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ import { ILightningApi } from '../../../api/lightning/lightning-api.interface';
|
||||
import { isIP } from 'net';
|
||||
import { Common } from '../../../api/common';
|
||||
import channelsApi from '../../../api/explorer/channels.api';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
@ -32,7 +33,26 @@ class LightningStatsImporter {
|
||||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
const [nodesInDbRaw]: any[] = await DB.query(`SELECT public_key FROM nodes`);
|
||||
const nodesInDb = {};
|
||||
for (const node of nodesInDbRaw) {
|
||||
nodesInDb[node.public_key] = node;
|
||||
}
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
// If we don't know about this node, insert it in db
|
||||
if (isHistorical === true && !nodesInDb[node.pub_key]) {
|
||||
await nodesApi.$saveNode({
|
||||
last_update: node.last_update,
|
||||
pub_key: node.pub_key,
|
||||
alias: node.alias,
|
||||
addresses: node.addresses,
|
||||
color: node.color,
|
||||
features: node.features,
|
||||
});
|
||||
nodesInDb[node.pub_key] = node;
|
||||
}
|
||||
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
@ -69,7 +89,7 @@ class LightningStatsImporter {
|
||||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id, created FROM channels`);
|
||||
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id FROM channels`);
|
||||
const channelsInDb = {};
|
||||
for (const channel of channelsInDbRaw) {
|
||||
channelsInDb[channel.short_id] = channel;
|
||||
@ -84,29 +104,19 @@ class LightningStatsImporter {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Channel is already in db, check if we need to update 'created' field
|
||||
if (isHistorical === true) {
|
||||
//@ts-ignore
|
||||
if (channelsInDb[short_id] && channel.timestamp < channel.created) {
|
||||
await DB.query(`
|
||||
UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.short_id = ?`,
|
||||
//@ts-ignore
|
||||
[channel.timestamp, short_id]
|
||||
);
|
||||
} else if (!channelsInDb[short_id]) {
|
||||
await channelsApi.$saveChannel({
|
||||
channel_id: short_id,
|
||||
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
|
||||
//@ts-ignore
|
||||
last_update: channel.timestamp,
|
||||
node1_pub: channel.node1_pub,
|
||||
node2_pub: channel.node2_pub,
|
||||
capacity: (tx.value * 100000000).toString(),
|
||||
node1_policy: null,
|
||||
node2_policy: null,
|
||||
}, 0);
|
||||
channelsInDb[channel.channel_id] = channel;
|
||||
}
|
||||
// If we don't know about this channel, insert it in db
|
||||
if (isHistorical === true && !channelsInDb[short_id]) {
|
||||
await channelsApi.$saveChannel({
|
||||
channel_id: short_id,
|
||||
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
|
||||
last_update: channel.last_update,
|
||||
node1_pub: channel.node1_pub,
|
||||
node2_pub: channel.node2_pub,
|
||||
capacity: (tx.value * 100000000).toString(),
|
||||
node1_policy: null,
|
||||
node2_policy: null,
|
||||
}, 0);
|
||||
channelsInDb[channel.channel_id] = channel;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.node1_pub]) {
|
||||
@ -269,6 +279,17 @@ class LightningStatsImporter {
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
|
||||
if (!isHistorical) {
|
||||
await DB.query(
|
||||
`UPDATE nodes SET capacity = ?, channels = ? WHERE public_key = ?`,
|
||||
[
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
public_key,
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
@ -281,6 +302,7 @@ class LightningStatsImporter {
|
||||
* Import topology files LN historical data into the database
|
||||
*/
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
logger.debug('Run the historical importer');
|
||||
try {
|
||||
let fileList: string[] = [];
|
||||
try {
|
||||
@ -294,7 +316,7 @@ class LightningStatsImporter {
|
||||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, node_count
|
||||
SELECT UNIX_TIMESTAMP(added) AS added
|
||||
FROM lightning_stats
|
||||
ORDER BY added DESC
|
||||
`);
|
||||
@ -391,12 +413,16 @@ class LightningStatsImporter {
|
||||
});
|
||||
}
|
||||
|
||||
let rgb = node.rgb_color ?? '#000000';
|
||||
if (rgb.indexOf('#') === -1) {
|
||||
rgb = `#${rgb}`;
|
||||
}
|
||||
newGraph.nodes.push({
|
||||
last_update: node.timestamp ?? 0,
|
||||
pub_key: node.id ?? null,
|
||||
alias: node.alias ?? null,
|
||||
alias: node.alias ?? node.id.slice(0, 20),
|
||||
addresses: addresses,
|
||||
color: node.rgb_color ?? null,
|
||||
color: rgb,
|
||||
features: {},
|
||||
});
|
||||
}
|
||||
|
5
backend/testSetup.ts
Normal file
5
backend/testSetup.ts
Normal file
@ -0,0 +1,5 @@
|
||||
jest.mock('./mempool-config.json', () => ({}), { virtual: true });
|
||||
jest.mock('./src/logger.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/rbf-cache.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/mempool.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/memory-cache.ts', () => ({}), { virtual: true });
|
@ -10,7 +10,7 @@
|
||||
<ng-template #blocksPlural let-i i18n="shared.blocks">{{ i }} <span class="shared-block">blocks</span></ng-template>
|
||||
<ng-template #blocksSingular let-i i18n="shared.block">{{ i }} <span class="shared-block">block</span></ng-template>
|
||||
</div>
|
||||
<div class="symbol"><app-time-until [time]="epochData.remainingTime" [fastRender]="true"></app-time-until></div>
|
||||
<div class="symbol"><app-time-until [time]="epochData.estimatedRetargetDate" [fastRender]="true"></app-time-until></div>
|
||||
</div>
|
||||
<div class="item">
|
||||
<h5 class="card-title" i18n="difficulty-box.estimate">Estimate</h5>
|
||||
|
@ -11,7 +11,7 @@ interface EpochProgress {
|
||||
newDifficultyHeight: number;
|
||||
colorAdjustments: string;
|
||||
colorPreviousAdjustments: string;
|
||||
remainingTime: number;
|
||||
estimatedRetargetDate: number;
|
||||
previousRetarget: number;
|
||||
blocksUntilHalving: number;
|
||||
timeUntilHalving: number;
|
||||
@ -74,7 +74,7 @@ export class DifficultyComponent implements OnInit {
|
||||
colorAdjustments,
|
||||
colorPreviousAdjustments,
|
||||
newDifficultyHeight: da.nextRetargetHeight,
|
||||
remainingTime: da.remainingTime,
|
||||
estimatedRetargetDate: da.estimatedRetargetDate,
|
||||
previousRetarget: da.previousRetarget,
|
||||
blocksUntilHalving,
|
||||
timeUntilHalving,
|
||||
|
@ -1,7 +1,7 @@
|
||||
<form [formGroup]="searchForm" (submit)="searchForm.valid && search()" novalidate>
|
||||
<div class="d-flex">
|
||||
<div class="search-box-container mr-2">
|
||||
<input (focus)="focus$.next($any($event).target.value)" (click)="click$.next($any($event).target.value)" formControlName="searchText" type="text" class="form-control" i18n-placeholder="search-form.searchbar-placeholder" placeholder="TXID, block height, hash or address">
|
||||
<input (focus)="focus$.next($any($event).target.value)" (click)="click$.next($any($event).target.value)" formControlName="searchText" type="text" class="form-control" i18n-placeholder="search-form.searchbar-placeholder" placeholder="Search the full Bitcoin ecosystem">
|
||||
|
||||
<app-search-results #searchResults [results]="typeAhead$ | async" [searchTerm]="searchForm.get('searchText').value" (selectedResult)="selectedResult($event)"></app-search-results>
|
||||
|
||||
|
@ -20,7 +20,7 @@
|
||||
<div class="col">
|
||||
<table class="table table-borderless smaller-text table-sm table-tx-vin">
|
||||
<tbody>
|
||||
<ng-template ngFor let-vin [ngForOf]="tx['@vinLimit'] ? ((tx.vin.length > rowLimit) ? tx.vin.slice(0, rowLimit - 2) : tx.vin.slice(0, rowLimit)) : tx.vin" [ngForTrackBy]="trackByIndexFn">
|
||||
<ng-template ngFor let-vin let-vindex="index" [ngForOf]="tx['@vinLimit'] ? ((tx.vin.length > rowLimit) ? tx.vin.slice(0, rowLimit - 2) : tx.vin.slice(0, rowLimit)) : tx.vin" [ngForTrackBy]="trackByIndexFn">
|
||||
<tr [ngClass]="{
|
||||
'assetBox': assetsMinimal && vin.prevout && assetsMinimal[vin.prevout.asset] && !vin.is_coinbase && vin.prevout.scriptpubkey_address && tx._unblinded,
|
||||
'highlight': vin.prevout?.scriptpubkey_address === this.address && this.address !== ''
|
||||
@ -77,7 +77,7 @@
|
||||
{{ vin.prevout.scriptpubkey_type?.toUpperCase() }}
|
||||
</ng-template>
|
||||
<div>
|
||||
<app-address-labels [vin]="vin" [channel]="channels && channels.inputs[i] || null"></app-address-labels>
|
||||
<app-address-labels [vin]="vin" [channel]="tx._channels && tx._channels.inputs[vindex] || null"></app-address-labels>
|
||||
</div>
|
||||
</ng-template>
|
||||
</ng-container>
|
||||
@ -172,7 +172,7 @@
|
||||
</span>
|
||||
</a>
|
||||
<div>
|
||||
<app-address-labels [vout]="vout" [channel]="channels && channels.outputs[i] && channels.outputs[i].transaction_vout === vindex ? channels.outputs[i] : null"></app-address-labels>
|
||||
<app-address-labels [vout]="vout" [channel]="tx._channels && tx._channels.outputs[vindex] ? tx._channels.outputs[vindex] : null"></app-address-labels>
|
||||
</div>
|
||||
<ng-template #scriptpubkey_type>
|
||||
<ng-template [ngIf]="vout.pegout" [ngIfElse]="defaultscriptpubkey_type">
|
||||
@ -212,15 +212,15 @@
|
||||
</ng-template>
|
||||
</td>
|
||||
<td class="arrow-td">
|
||||
<span *ngIf="!outspends[i] || vout.scriptpubkey_type === 'op_return' || vout.scriptpubkey_type === 'fee' ; else outspend" class="grey">
|
||||
<span *ngIf="!tx._outspends || vout.scriptpubkey_type === 'op_return' || vout.scriptpubkey_type === 'fee' ; else outspend" class="grey">
|
||||
<fa-icon [icon]="['fas', 'arrow-alt-circle-right']" [fixedWidth]="true"></fa-icon>
|
||||
</span>
|
||||
<ng-template #outspend>
|
||||
<span *ngIf="!outspends[i][vindex] || !outspends[i][vindex].spent; else spent" class="green">
|
||||
<span *ngIf="!tx._outspends[vindex] || !tx._outspends[vindex].spent; else spent" class="green">
|
||||
<fa-icon [icon]="['fas', 'arrow-alt-circle-right']" [fixedWidth]="true"></fa-icon>
|
||||
</span>
|
||||
<ng-template #spent>
|
||||
<a *ngIf="outspends[i][vindex].txid else outputNoTxId" [routerLink]="['/tx/' | relativeUrl, outspends[i][vindex].txid]" class="red">
|
||||
<a *ngIf="tx._outspends[vindex].txid else outputNoTxId" [routerLink]="['/tx/' | relativeUrl, tx._outspends[vindex].txid]" class="red">
|
||||
<fa-icon [icon]="['fas', 'arrow-alt-circle-right']" [fixedWidth]="true"></fa-icon>
|
||||
</a>
|
||||
<ng-template #outputNoTxId>
|
||||
|
@ -27,7 +27,6 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
@Input() outputIndex: number;
|
||||
@Input() address: string = '';
|
||||
@Input() rowLimit = 12;
|
||||
@Input() channels: { inputs: any[], outputs: any[] };
|
||||
|
||||
@Output() loadMore = new EventEmitter();
|
||||
|
||||
@ -36,8 +35,8 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
refreshOutspends$: ReplaySubject<string[]> = new ReplaySubject();
|
||||
refreshChannels$: ReplaySubject<string[]> = new ReplaySubject();
|
||||
showDetails$ = new BehaviorSubject<boolean>(false);
|
||||
outspends: Outspend[][] = [];
|
||||
assetsMinimal: any;
|
||||
transactionsLength: number = 0;
|
||||
|
||||
constructor(
|
||||
public stateService: StateService,
|
||||
@ -47,7 +46,7 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
private ref: ChangeDetectorRef,
|
||||
) { }
|
||||
|
||||
ngOnInit() {
|
||||
ngOnInit(): void {
|
||||
this.latestBlock$ = this.stateService.blocks$.pipe(map(([block]) => block));
|
||||
this.stateService.networkChanged$.subscribe((network) => this.network = network);
|
||||
|
||||
@ -62,14 +61,17 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
.pipe(
|
||||
switchMap((txIds) => this.apiService.getOutspendsBatched$(txIds)),
|
||||
tap((outspends: Outspend[][]) => {
|
||||
this.outspends = this.outspends.concat(outspends);
|
||||
const transactions = this.transactions.filter((tx) => !tx._outspends);
|
||||
outspends.forEach((outspend, i) => {
|
||||
transactions[i]._outspends = outspend;
|
||||
});
|
||||
}),
|
||||
),
|
||||
this.stateService.utxoSpent$
|
||||
.pipe(
|
||||
tap((utxoSpent) => {
|
||||
for (const i in utxoSpent) {
|
||||
this.outspends[0][i] = {
|
||||
this.transactions[0]._outspends[i] = {
|
||||
spent: true,
|
||||
txid: utxoSpent[i].txid,
|
||||
vin: utxoSpent[i].vin,
|
||||
@ -81,21 +83,23 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
.pipe(
|
||||
filter(() => this.stateService.env.LIGHTNING),
|
||||
switchMap((txIds) => this.apiService.getChannelByTxIds$(txIds)),
|
||||
map((channels) => {
|
||||
this.channels = channels;
|
||||
tap((channels) => {
|
||||
const transactions = this.transactions.filter((tx) => !tx._channels);
|
||||
channels.forEach((channel, i) => {
|
||||
transactions[i]._channels = channel;
|
||||
});
|
||||
}),
|
||||
)
|
||||
,
|
||||
).subscribe(() => this.ref.markForCheck());
|
||||
}
|
||||
|
||||
ngOnChanges() {
|
||||
ngOnChanges(): void {
|
||||
if (!this.transactions || !this.transactions.length) {
|
||||
return;
|
||||
}
|
||||
if (this.paginated) {
|
||||
this.outspends = [];
|
||||
}
|
||||
|
||||
this.transactionsLength = this.transactions.length;
|
||||
if (this.outputIndex) {
|
||||
setTimeout(() => {
|
||||
const assetBoxElements = document.getElementsByClassName('assetBox');
|
||||
@ -105,10 +109,10 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
}, 10);
|
||||
}
|
||||
|
||||
this.transactions.forEach((tx, i) => {
|
||||
this.transactions.forEach((tx) => {
|
||||
tx['@voutLimit'] = true;
|
||||
tx['@vinLimit'] = true;
|
||||
if (this.outspends[i]) {
|
||||
if (tx['addressValue'] !== undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -126,14 +130,19 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
tx['addressValue'] = addressIn - addressOut;
|
||||
}
|
||||
});
|
||||
const txIds = this.transactions.map((tx) => tx.txid);
|
||||
this.refreshOutspends$.next(txIds);
|
||||
if (!this.channels) {
|
||||
this.refreshChannels$.next(txIds);
|
||||
const txIds = this.transactions.filter((tx) => !tx._outspends).map((tx) => tx.txid);
|
||||
if (txIds.length) {
|
||||
this.refreshOutspends$.next(txIds);
|
||||
}
|
||||
if (this.stateService.env.LIGHTNING) {
|
||||
const txIds = this.transactions.filter((tx) => !tx._channels).map((tx) => tx.txid);
|
||||
if (txIds.length) {
|
||||
this.refreshChannels$.next(txIds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onScroll() {
|
||||
onScroll(): void {
|
||||
const scrollHeight = document.body.scrollHeight;
|
||||
const scrollTop = document.documentElement.scrollTop;
|
||||
if (scrollHeight > 0){
|
||||
@ -148,11 +157,11 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
return tx.vout.some((v: any) => v.value === undefined);
|
||||
}
|
||||
|
||||
getTotalTxOutput(tx: Transaction) {
|
||||
getTotalTxOutput(tx: Transaction): number {
|
||||
return tx.vout.map((v: Vout) => v.value || 0).reduce((a: number, b: number) => a + b);
|
||||
}
|
||||
|
||||
switchCurrency() {
|
||||
switchCurrency(): void {
|
||||
if (this.network === 'liquid' || this.network === 'liquidtestnet') {
|
||||
return;
|
||||
}
|
||||
@ -164,7 +173,7 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
return tx.txid + tx.status.confirmed;
|
||||
}
|
||||
|
||||
trackByIndexFn(index: number) {
|
||||
trackByIndexFn(index: number): number {
|
||||
return index;
|
||||
}
|
||||
|
||||
@ -177,7 +186,7 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
return Math.pow(base, exponent);
|
||||
}
|
||||
|
||||
toggleDetails() {
|
||||
toggleDetails(): void {
|
||||
if (this.showDetails$.value === true) {
|
||||
this.showDetails$.next(false);
|
||||
} else {
|
||||
@ -185,7 +194,7 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
}
|
||||
}
|
||||
|
||||
loadMoreInputs(tx: Transaction) {
|
||||
loadMoreInputs(tx: Transaction): void {
|
||||
tx['@vinLimit'] = false;
|
||||
|
||||
this.electrsApiService.getTransaction$(tx.txid)
|
||||
@ -196,7 +205,7 @@ export class TransactionsListComponent implements OnInit, OnChanges {
|
||||
});
|
||||
}
|
||||
|
||||
ngOnDestroy() {
|
||||
ngOnDestroy(): void {
|
||||
this.outspendsSubscription.unsubscribe();
|
||||
}
|
||||
}
|
||||
|
@ -114,11 +114,14 @@ export const restApiDocsData = [
|
||||
curl: [],
|
||||
response: `{
|
||||
progressPercent: 44.397234501112074,
|
||||
difficultyChange: 0.9845932018381687,
|
||||
estimatedRetargetDate: 1627762478.9111245,
|
||||
difficultyChange: 98.45932018381687,
|
||||
estimatedRetargetDate: 1627762478,
|
||||
remainingBlocks: 1121,
|
||||
remainingTime: 665977.6261244365,
|
||||
previousRetarget: -4.807005268478962
|
||||
remainingTime: 665977,
|
||||
previousRetarget: -4.807005268478962,
|
||||
nextRetargetHeight: 741888,
|
||||
timeAvg: 302328,
|
||||
timeOffset: 0
|
||||
}`
|
||||
},
|
||||
codeSampleTestnet: {
|
||||
@ -127,11 +130,14 @@ export const restApiDocsData = [
|
||||
curl: [],
|
||||
response: `{
|
||||
progressPercent: 44.397234501112074,
|
||||
difficultyChange: 0.9845932018381687,
|
||||
estimatedRetargetDate: 1627762478.9111245,
|
||||
difficultyChange: 98.45932018381687,
|
||||
estimatedRetargetDate: 1627762478,
|
||||
remainingBlocks: 1121,
|
||||
remainingTime: 665977.6261244365,
|
||||
previousRetarget: -4.807005268478962
|
||||
remainingTime: 665977,
|
||||
previousRetarget: -4.807005268478962,
|
||||
nextRetargetHeight: 741888,
|
||||
timeAvg: 302328,
|
||||
timeOffset: 0
|
||||
}`
|
||||
},
|
||||
codeSampleSignet: {
|
||||
@ -140,11 +146,14 @@ export const restApiDocsData = [
|
||||
curl: [],
|
||||
response: `{
|
||||
progressPercent: 44.397234501112074,
|
||||
difficultyChange: 0.9845932018381687,
|
||||
estimatedRetargetDate: 1627762478.9111245,
|
||||
difficultyChange: 98.45932018381687,
|
||||
estimatedRetargetDate: 1627762478,
|
||||
remainingBlocks: 1121,
|
||||
remainingTime: 665977.6261244365,
|
||||
previousRetarget: -4.807005268478962
|
||||
remainingTime: 665977,
|
||||
previousRetarget: -4.807005268478962,
|
||||
nextRetargetHeight: 741888,
|
||||
timeAvg: 302328,
|
||||
timeOffset: 0
|
||||
}`
|
||||
},
|
||||
codeSampleLiquid: {
|
||||
@ -153,11 +162,14 @@ export const restApiDocsData = [
|
||||
curl: [],
|
||||
response: `{
|
||||
progressPercent: 44.397234501112074,
|
||||
difficultyChange: 0.9845932018381687,
|
||||
estimatedRetargetDate: 1627762478.9111245,
|
||||
difficultyChange: 98.45932018381687,
|
||||
estimatedRetargetDate: 1627762478,
|
||||
remainingBlocks: 1121,
|
||||
remainingTime: 665977.6261244365,
|
||||
previousRetarget: -4.807005268478962
|
||||
remainingTime: 665977,
|
||||
previousRetarget: -4.807005268478962,
|
||||
nextRetargetHeight: 741888,
|
||||
timeAvg: 302328,
|
||||
timeOffset: 0
|
||||
}`
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { IChannel } from './node-api.interface';
|
||||
|
||||
export interface Transaction {
|
||||
txid: string;
|
||||
version: number;
|
||||
@ -19,6 +21,13 @@ export interface Transaction {
|
||||
deleteAfter?: number;
|
||||
_unblinded?: any;
|
||||
_deduced?: boolean;
|
||||
_outspends?: Outspend[];
|
||||
_channels?: TransactionChannels;
|
||||
}
|
||||
|
||||
export interface TransactionChannels {
|
||||
inputs: { [vin: number]: IChannel };
|
||||
outputs: { [vout: number]: IChannel };
|
||||
}
|
||||
|
||||
interface Ancestor {
|
||||
|
@ -189,3 +189,35 @@ export interface IOldestNodes {
|
||||
city?: any,
|
||||
country?: any,
|
||||
}
|
||||
|
||||
export interface IChannel {
|
||||
id: number;
|
||||
short_id: string;
|
||||
capacity: number;
|
||||
transaction_id: string;
|
||||
transaction_vout: number;
|
||||
closing_transaction_id: string;
|
||||
closing_reason: string;
|
||||
updated_at: string;
|
||||
created: string;
|
||||
status: number;
|
||||
node_left: Node,
|
||||
node_right: Node,
|
||||
}
|
||||
|
||||
|
||||
export interface INode {
|
||||
alias: string;
|
||||
public_key: string;
|
||||
channels: number;
|
||||
capacity: number;
|
||||
base_fee_mtokens: number;
|
||||
cltv_delta: number;
|
||||
fee_rate: number;
|
||||
is_disabled: boolean;
|
||||
max_htlc_mtokens: number;
|
||||
min_htlc_mtokens: number;
|
||||
updated_at: string;
|
||||
longitude: number;
|
||||
latitude: number;
|
||||
}
|
||||
|
@ -65,13 +65,13 @@
|
||||
<ng-container *ngIf="transactions$ | async as transactions">
|
||||
<ng-template [ngIf]="transactions[0]">
|
||||
<h3>Opening transaction</h3>
|
||||
<app-transactions-list [transactions]="[transactions[0]]" [showConfirmations]="true" [rowLimit]="5" [channels]="{ inputs: [], outputs: [channel] }"></app-transactions-list>
|
||||
<app-transactions-list [transactions]="[transactions[0]]" [showConfirmations]="true" [rowLimit]="5"></app-transactions-list>
|
||||
</ng-template>
|
||||
<ng-template [ngIf]="transactions[1]">
|
||||
<div class="closing-header">
|
||||
<h3 style="margin: 0;">Closing transaction</h3> <app-closing-type [type]="channel.closing_reason"></app-closing-type>
|
||||
</div>
|
||||
<app-transactions-list [transactions]="[transactions[1]]" [showConfirmations]="true" [rowLimit]="5" [channels]="{ inputs: [channel], outputs: [] }"></app-transactions-list>
|
||||
<app-transactions-list [transactions]="[transactions[1]]" [showConfirmations]="true" [rowLimit]="5"></app-transactions-list>
|
||||
</ng-template>
|
||||
</ng-container>
|
||||
|
||||
|
@ -2,6 +2,7 @@ import { ChangeDetectionStrategy, Component, OnInit } from '@angular/core';
|
||||
import { ActivatedRoute, ParamMap } from '@angular/router';
|
||||
import { forkJoin, Observable, of, share, zip } from 'rxjs';
|
||||
import { catchError, map, shareReplay, switchMap, tap } from 'rxjs/operators';
|
||||
import { IChannel } from 'src/app/interfaces/node-api.interface';
|
||||
import { ApiService } from 'src/app/services/api.service';
|
||||
import { ElectrsApiService } from 'src/app/services/electrs-api.service';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
@ -62,10 +63,15 @@ export class ChannelComponent implements OnInit {
|
||||
);
|
||||
|
||||
this.transactions$ = this.channel$.pipe(
|
||||
switchMap((data) => {
|
||||
switchMap((channel: IChannel) => {
|
||||
return zip([
|
||||
data.transaction_id ? this.electrsApiService.getTransaction$(data.transaction_id) : of(null),
|
||||
data.closing_transaction_id ? this.electrsApiService.getTransaction$(data.closing_transaction_id) : of(null),
|
||||
channel.transaction_id ? this.electrsApiService.getTransaction$(channel.transaction_id) : of(null),
|
||||
channel.closing_transaction_id ? this.electrsApiService.getTransaction$(channel.closing_transaction_id).pipe(
|
||||
map((tx) => {
|
||||
tx._channels = { inputs: {0: channel}, outputs: {}};
|
||||
return tx;
|
||||
})
|
||||
) : of(null),
|
||||
]);
|
||||
}),
|
||||
);
|
||||
|
@ -119,7 +119,7 @@
|
||||
</div>
|
||||
|
||||
<div *ngIf="!error">
|
||||
<div class="row">
|
||||
<div class="row" *ngIf="node.as_number">
|
||||
<div class="col-sm">
|
||||
<app-nodes-channels-map [style]="'nodepage'" [publicKey]="node.public_key" [hasLocation]="!!node.as_number"></app-nodes-channels-map>
|
||||
</div>
|
||||
@ -127,6 +127,9 @@
|
||||
<app-node-statistics-chart [publicKey]="node.public_key"></app-node-statistics-chart>
|
||||
</div>
|
||||
</div>
|
||||
<div *ngIf="!node.as_number">
|
||||
<app-node-statistics-chart [publicKey]="node.public_key"></app-node-statistics-chart>
|
||||
</div>
|
||||
|
||||
<h2 i18n="lightning.active-channels-map">Active channels map</h2>
|
||||
<app-node-channels style="display:block;margin-bottom: 40px" [publicKey]="node.public_key"></app-node-channels>
|
||||
|
@ -242,12 +242,12 @@ export class ApiService {
|
||||
return this.httpClient.get<any>(this.apiBaseUrl + this.apiBasePath + `/api/v1/enterprise/info/` + name);
|
||||
}
|
||||
|
||||
getChannelByTxIds$(txIds: string[]): Observable<{ inputs: any[], outputs: any[] }> {
|
||||
getChannelByTxIds$(txIds: string[]): Observable<any[]> {
|
||||
let params = new HttpParams();
|
||||
txIds.forEach((txId: string) => {
|
||||
params = params.append('txId[]', txId);
|
||||
});
|
||||
return this.httpClient.get<{ inputs: any[], outputs: any[] }>(this.apiBaseUrl + this.apiBasePath + '/api/v1/lightning/channels/txids/', { params });
|
||||
return this.httpClient.get<any[]>(this.apiBaseUrl + this.apiBasePath + '/api/v1/lightning/channels/txids/', { params });
|
||||
}
|
||||
|
||||
lightningSearch$(searchText: string): Observable<any[]> {
|
||||
|
@ -976,15 +976,28 @@ osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-kill-all stop
|
||||
osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-start-all start
|
||||
osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-restart-all restart
|
||||
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d
|
||||
osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf
|
||||
|
||||
echo "[*] Installing newsyslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/syslog.d/newsyslog-mempool-backend.conf
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/syslog.d/newsyslog-mempool-nginx.conf
|
||||
case $OS in
|
||||
FreeBSD)
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d
|
||||
osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf
|
||||
|
||||
echo "[*] Installing newsyslog configuration"
|
||||
osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-backend.conf
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-nginx.conf
|
||||
|
||||
echo "[*] Creating log files"
|
||||
osSudo "${ROOT_USER}" newsyslog -C
|
||||
;;
|
||||
Debian)
|
||||
echo "[*] Installing syslog configuration"
|
||||
osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/linux/rsyslog.conf" /etc/rsyslog.d/10-mempool.conf
|
||||
osSudo "${ROOT_USER}" sed -i.orig -e 's/^\*\.\*;auth,authpriv\.none/*\.*;auth,authpriv\.none,local7\.none/' /etc/rsyslog.d/50-default.conf
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "[*] Installing Mempool crontab"
|
||||
osSudo "${ROOT_USER}" crontab -u "${MEMPOOL_USER}" "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool.crontab"
|
||||
|
2
production/linux/rsyslog.conf
Normal file
2
production/linux/rsyslog.conf
Normal file
@ -0,0 +1,2 @@
|
||||
local7.info /var/log/mempool
|
||||
local7.* /var/log/mempool.debug
|
49
production/mempool-config.mainnet-lightning.json
Normal file
49
production/mempool-config.mainnet-lightning.json
Normal file
@ -0,0 +1,49 @@
|
||||
{
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "mainnet",
|
||||
"BACKEND": "esplora",
|
||||
"HTTP_PORT": 8993,
|
||||
"INDEXING_BLOCKS_AMOUNT": 0,
|
||||
"API_URL_PREFIX": "/api/v1/"
|
||||
},
|
||||
"SYSLOG": {
|
||||
"MIN_PRIORITY": "debug"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"PORT": 8332,
|
||||
"USERNAME": "__BITCOIN_RPC_USER__",
|
||||
"PASSWORD": "__BITCOIN_RPC_PASS__"
|
||||
},
|
||||
"ESPLORA": {
|
||||
"REST_API_URL": "http://127.0.0.1:4000"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": true,
|
||||
"BACKEND": "cln",
|
||||
"GRAPH_REFRESH_INTERVAL": 60,
|
||||
"TOPOLOGY_FOLDER": "/cln/topology/output"
|
||||
},
|
||||
"LND": {
|
||||
"REST_API_URL": "https://127.0.0.1:8888",
|
||||
"TLS_CERT_PATH": "/lnd/.lnd/tls.cert",
|
||||
"MACAROON_PATH": "/lnd/.lnd/data/chain/bitcoin/mainnet/readonly.macaroon"
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "/cln/.lightning/bitcoin/lightning-rpc"
|
||||
},
|
||||
"MAXMIND": {
|
||||
"ENABLED": true,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoIP2-City.mmdb"
|
||||
},
|
||||
"STATISTICS": {
|
||||
"ENABLED": false
|
||||
},
|
||||
"DATABASE": {
|
||||
"ENABLED": true,
|
||||
"HOST": "127.0.0.1",
|
||||
"PORT": 3306,
|
||||
"DATABASE": "mempool_mainnet_lightning",
|
||||
"USERNAME": "mempool_mainnet_lightning",
|
||||
"PASSWORD": "mempool_mainnet_lightning"
|
||||
}
|
||||
}
|
44
production/mempool-config.signet-lightning.json
Normal file
44
production/mempool-config.signet-lightning.json
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "signet",
|
||||
"BACKEND": "esplora",
|
||||
"HTTP_PORT": 8991,
|
||||
"INDEXING_BLOCKS_AMOUNT": 0,
|
||||
"API_URL_PREFIX": "/api/v1/"
|
||||
},
|
||||
"SYSLOG": {
|
||||
"MIN_PRIORITY": "debug"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"PORT": 38332,
|
||||
"USERNAME": "__BITCOIN_RPC_USER__",
|
||||
"PASSWORD": "__BITCOIN_RPC_PASS__"
|
||||
},
|
||||
"ESPLORA": {
|
||||
"REST_API_URL": "http://127.0.0.1:4003"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": true,
|
||||
"BACKEND": "cln",
|
||||
"GRAPH_REFRESH_INTERVAL": 60,
|
||||
"TOPOLOGY_FOLDER": ""
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "/cln/.lightning/signet/lightning-rpc"
|
||||
},
|
||||
"MAXMIND": {
|
||||
"ENABLED": true,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoIP2-City.mmdb"
|
||||
},
|
||||
"STATISTICS": {
|
||||
"ENABLED": false
|
||||
},
|
||||
"DATABASE": {
|
||||
"ENABLED": true,
|
||||
"HOST": "127.0.0.1",
|
||||
"PORT": 3306,
|
||||
"USERNAME": "mempool_signet_lightning",
|
||||
"PASSWORD": "mempool_signet_lightning",
|
||||
"DATABASE": "mempool_signet_lightning"
|
||||
}
|
||||
}
|
44
production/mempool-config.testnet-lightning.json
Normal file
44
production/mempool-config.testnet-lightning.json
Normal file
@ -0,0 +1,44 @@
|
||||
{
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "testnet",
|
||||
"BACKEND": "esplora",
|
||||
"HTTP_PORT": 8992,
|
||||
"INDEXING_BLOCKS_AMOUNT": 0,
|
||||
"API_URL_PREFIX": "/api/v1/"
|
||||
},
|
||||
"SYSLOG": {
|
||||
"MIN_PRIORITY": "debug"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"PORT": 18332,
|
||||
"USERNAME": "__BITCOIN_RPC_USER__",
|
||||
"PASSWORD": "__BITCOIN_RPC_PASS__"
|
||||
},
|
||||
"ESPLORA": {
|
||||
"REST_API_URL": "http://127.0.0.1:4002"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": true,
|
||||
"BACKEND": "cln",
|
||||
"GRAPH_REFRESH_INTERVAL": 60,
|
||||
"TOPOLOGY_FOLDER": ""
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "/cln/.lightning/testnet/lightning-rpc"
|
||||
},
|
||||
"MAXMIND": {
|
||||
"ENABLED": true,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoIP2-City.mmdb"
|
||||
},
|
||||
"STATISTICS": {
|
||||
"ENABLED": false
|
||||
},
|
||||
"DATABASE": {
|
||||
"ENABLED": true,
|
||||
"HOST": "127.0.0.1",
|
||||
"PORT": 3306,
|
||||
"USERNAME": "mempool_testnet_lightning",
|
||||
"PASSWORD": "mempool_testnet_lightning",
|
||||
"DATABASE": "mempool_testnet_lightning"
|
||||
}
|
||||
}
|
@ -70,30 +70,6 @@ location /api/v1/translators {
|
||||
proxy_hide_header content-security-policy;
|
||||
proxy_hide_header x-frame-options;
|
||||
}
|
||||
location /api/v1/enterprise/images {
|
||||
proxy_pass $mempoolSpaceServices;
|
||||
proxy_cache services;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_use_stale updating;
|
||||
proxy_cache_valid 200 10m;
|
||||
expires 10m;
|
||||
proxy_hide_header onion-location;
|
||||
proxy_hide_header strict-transport-security;
|
||||
proxy_hide_header content-security-policy;
|
||||
proxy_hide_header x-frame-options;
|
||||
}
|
||||
location /api/v1/enterprise {
|
||||
proxy_pass $mempoolSpaceServices;
|
||||
proxy_cache services;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_use_stale updating;
|
||||
proxy_cache_valid 200 5m;
|
||||
expires 5m;
|
||||
proxy_hide_header onion-location;
|
||||
proxy_hide_header strict-transport-security;
|
||||
proxy_hide_header content-security-policy;
|
||||
proxy_hide_header x-frame-options;
|
||||
}
|
||||
location /api/v1/assets {
|
||||
proxy_pass $mempoolSpaceServices;
|
||||
proxy_cache services;
|
||||
|
@ -4,7 +4,7 @@ location /testnet/api/v1/lightning {
|
||||
try_files /dev/null @mempool-testnet-api-v1-lightning;
|
||||
}
|
||||
location @mempool-testnet-api-v1-lightning {
|
||||
proxy_pass $mempoolSignetLightning;
|
||||
proxy_pass $mempoolTestnetLightning;
|
||||
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
|
@ -22,6 +22,11 @@ http {
|
||||
include mempool/production/nginx/http-proxy-cache.conf;
|
||||
include mempool/production/nginx/http-language.conf;
|
||||
|
||||
# match preview/unfurl bot user-agents
|
||||
map $http_user_agent $unfurlbot {
|
||||
default 0;
|
||||
}
|
||||
|
||||
# mempool configuration
|
||||
include mempool/production/nginx/upstream-mempool.conf;
|
||||
|
||||
@ -42,6 +47,7 @@ http {
|
||||
|
||||
# for services from mempool.space like contributors on about page
|
||||
set $mempoolSpaceServices "https://mempool.space";
|
||||
set $mempoolSpaceUnfurler "http://127.0.0.1:8001";
|
||||
|
||||
# for mempool daemons, see upstream-mempool.conf
|
||||
set $mempoolMainnet "http://mempool-bitcoin-mainnet";
|
||||
@ -77,6 +83,7 @@ http {
|
||||
|
||||
# for services from mempool.space like contributors on about page
|
||||
set $mempoolSpaceServices "https://mempool.space";
|
||||
set $mempoolSpaceUnfurler "http://127.0.0.1:8001";
|
||||
|
||||
# for mempool daemons, see upstream-mempool.conf
|
||||
set $mempoolBisq "http://mempool-bitcoin-bisq";
|
||||
@ -105,6 +112,7 @@ http {
|
||||
|
||||
# for services from mempool.space like contributors on about page
|
||||
set $mempoolSpaceServices "https://mempool.space";
|
||||
set $mempoolSpaceUnfurler "http://127.0.0.1:8001";
|
||||
|
||||
# for mempool daemons, see upstream-mempool.conf
|
||||
set $mempoolMainnet "http://mempool-liquid-mainnet";
|
||||
|
@ -59,7 +59,7 @@ location = / {
|
||||
# cache /resources/** for 1 week since they don't change often
|
||||
location ~ ^/[a-z][a-z]/resources/(.*) {
|
||||
try_files $uri /en-US/resources/$1 =404;
|
||||
expires 1w;
|
||||
expires 1w;
|
||||
}
|
||||
# cache /<lang>/main.f40e91d908a068a2.js forever since they never change
|
||||
location ~ ^/([a-z][a-z])/(.+\..+\.(js|css)) {
|
||||
@ -69,11 +69,14 @@ location ~ ^/([a-z][a-z])/(.+\..+\.(js|css)) {
|
||||
# cache everything else for 5 minutes
|
||||
location ~ ^/([a-z][a-z])$ {
|
||||
try_files $uri /$1/index.html /en-US/index.html =404;
|
||||
expires 5m;
|
||||
expires 5m;
|
||||
}
|
||||
location ~ ^/([a-z][a-z])/ {
|
||||
if ($unfurlbot) {
|
||||
proxy_pass $mempoolSpaceUnfurler;
|
||||
}
|
||||
try_files $uri /$1/index.html /en-US/index.html =404;
|
||||
expires 5m;
|
||||
expires 5m;
|
||||
}
|
||||
|
||||
# cache /resources/** for 1 week since they don't change often
|
||||
@ -86,9 +89,24 @@ location ~* ^/.+\..+\.(js|css) {
|
||||
try_files /$lang/$uri /en-US/$uri =404;
|
||||
expires 1y;
|
||||
}
|
||||
|
||||
# unfurl preview
|
||||
location /preview {
|
||||
try_files /$lang/$uri $uri /en-US/$uri /en-US/index.html =404;
|
||||
expires 10m;
|
||||
}
|
||||
# unfurl renderer
|
||||
location ^~ /render {
|
||||
proxy_pass $mempoolSpaceUnfurler;
|
||||
expires 10m;
|
||||
}
|
||||
|
||||
# catch-all for all URLs i.e. /address/foo /tx/foo /block/000
|
||||
# cache 5 minutes since they change frequently
|
||||
location / {
|
||||
if ($unfurlbot) {
|
||||
proxy_pass $mempoolSpaceUnfurler;
|
||||
}
|
||||
try_files /$lang/$uri $uri /en-US/$uri /en-US/index.html =404;
|
||||
expires 5m;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user