diff --git a/backend/src/api/database-migration.ts b/backend/src/api/database-migration.ts index 1d6f10a2c..1dc0b9704 100644 --- a/backend/src/api/database-migration.ts +++ b/backend/src/api/database-migration.ts @@ -4,7 +4,7 @@ import logger from '../logger'; import { Common } from './common'; class DatabaseMigration { - private static currentVersion = 39; + private static currentVersion = 40; private queryTimeout = 120000; private statisticsAddedIndexed = false; private uniqueLogs: string[] = []; @@ -342,6 +342,12 @@ class DatabaseMigration { await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`'); await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)'); } + + if (databaseSchemaVersion < 40 && isBitcoin === true) { + await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL'); + await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL'); + await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);'); + } } /** diff --git a/backend/src/api/explorer/channels.api.ts b/backend/src/api/explorer/channels.api.ts index a0a617e43..b5eac7499 100644 --- a/backend/src/api/explorer/channels.api.ts +++ b/backend/src/api/explorer/channels.api.ts @@ -288,21 +288,36 @@ class ChannelsApi { const channels: any[] = [] for (const row of allChannels) { - const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key); - channels.push({ - status: row.status, - closing_reason: row.closing_reason, - capacity: row.capacity ?? 0, - short_id: row.short_id, - id: row.id, - fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0, - node: { - alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20), - public_key: row.public_key, - channels: activeChannelsStats.active_channel_count ?? 0, - capacity: activeChannelsStats.capacity ?? 0, - } - }); + let channel; + if (index >= 0) { + const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key); + channel = { + status: row.status, + closing_reason: row.closing_reason, + capacity: row.capacity ?? 0, + short_id: row.short_id, + id: row.id, + fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0, + node: { + alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20), + public_key: row.public_key, + channels: activeChannelsStats.active_channel_count ?? 0, + capacity: activeChannelsStats.capacity ?? 0, + } + }; + } else if (index === -1) { + channel = { + capacity: row.capacity ?? 0, + short_id: row.short_id, + id: row.id, + node: { + alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20), + public_key: row.public_key, + } + }; + } + + channels.push(channel); } return channels; diff --git a/backend/src/api/explorer/channels.routes.ts b/backend/src/api/explorer/channels.routes.ts index 0fa91db92..eda3a6168 100644 --- a/backend/src/api/explorer/channels.routes.ts +++ b/backend/src/api/explorer/channels.routes.ts @@ -47,8 +47,17 @@ class ChannelsRoutes { res.status(400).send('Missing parameter: public_key'); return; } + const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0; const status: string = typeof req.query.status === 'string' ? req.query.status : ''; + + if (index < -1) { + res.status(400).send('Invalid index'); + } + if (['open', 'active', 'closed'].includes(status) === false) { + res.status(400).send('Invalid status'); + } + const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status); const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status); res.header('Pragma', 'public'); diff --git a/backend/src/api/explorer/nodes.api.ts b/backend/src/api/explorer/nodes.api.ts index 379df4213..c49ed9ac5 100644 --- a/backend/src/api/explorer/nodes.api.ts +++ b/backend/src/api/explorer/nodes.api.ts @@ -115,17 +115,13 @@ class NodesApi { public async $getTopCapacityNodes(full: boolean): Promise { try { - let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats'); - const latestDate = rows[0].maxAdded; - + let rows: any; let query: string; if (full === false) { query = ` SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, - node_stats.capacity - FROM node_stats - JOIN nodes ON nodes.public_key = node_stats.public_key - WHERE added = FROM_UNIXTIME(${latestDate}) + nodes.capacity + FROM nodes ORDER BY capacity DESC LIMIT 100 `; @@ -133,16 +129,14 @@ class NodesApi { [rows] = await DB.query(query); } else { query = ` - SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias, - CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, - CAST(COALESCE(node_stats.channels, 0) as INT) as channels, + SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, + CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, + CAST(COALESCE(nodes.channels, 0) as INT) as channels, UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt, geo_names_city.names as city, geo_names_country.names as country - FROM node_stats - RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key + FROM nodes LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' - WHERE added = FROM_UNIXTIME(${latestDate}) ORDER BY capacity DESC LIMIT 100 `; @@ -163,17 +157,13 @@ class NodesApi { public async $getTopChannelsNodes(full: boolean): Promise { try { - let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats'); - const latestDate = rows[0].maxAdded; - + let rows: any; let query: string; if (full === false) { query = ` SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, - node_stats.channels - FROM node_stats - JOIN nodes ON nodes.public_key = node_stats.public_key - WHERE added = FROM_UNIXTIME(${latestDate}) + nodes.channels + FROM nodes ORDER BY channels DESC LIMIT 100; `; @@ -181,16 +171,14 @@ class NodesApi { [rows] = await DB.query(query); } else { query = ` - SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias, - CAST(COALESCE(node_stats.channels, 0) as INT) as channels, - CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, + SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, + CAST(COALESCE(nodes.channels, 0) as INT) as channels, + CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt, geo_names_city.names as city, geo_names_country.names as country - FROM node_stats - RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key + FROM nodes LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' - WHERE added = FROM_UNIXTIME(${latestDate}) ORDER BY channels DESC LIMIT 100 `; @@ -260,8 +248,8 @@ class NodesApi { public async $searchNodeByPublicKeyOrAlias(search: string) { try { const publicKeySearch = search.replace('%', '') + '%'; - const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, '').split(' ').map((search) => '+' + search + '*').join(' '); - const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR MATCH nodes.alias_search AGAINST (? IN BOOLEAN MODE) GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`; + const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' '); + const query = `SELECT public_key, alias, capacity, channels FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`; const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]); return rows; } catch (e) { @@ -276,7 +264,7 @@ class NodesApi { // List all channels and the two linked ISP query = ` - SELECT short_id, capacity, + SELECT short_id, channels.capacity, channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID, channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID FROM channels @@ -391,17 +379,11 @@ class NodesApi { public async $getNodesPerCountry(countryId: string) { try { const query = ` - SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels, + SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels, nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, geo_names_city.names as city, geo_names_country.names as country, geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision - FROM node_stats - JOIN ( - SELECT public_key, MAX(added) as last_added - FROM node_stats - GROUP BY public_key - ) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added - RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key + FROM nodes LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' @@ -426,17 +408,10 @@ class NodesApi { public async $getNodesPerISP(ISPId: string) { try { const query = ` - SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels, + SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels, nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at, geo_names_city.names as city, geo_names_country.names as country, geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision - FROM node_stats - JOIN ( - SELECT public_key, MAX(added) as last_added - FROM node_stats - GROUP BY public_key - ) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added - RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country' LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city' LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' @@ -464,7 +439,6 @@ class NodesApi { FROM nodes JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country' JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code' - JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key GROUP BY country_id ORDER BY COUNT(DISTINCT nodes.public_key) DESC `; @@ -555,7 +529,7 @@ class NodesApi { } private aliasToSearchText(str: string): string { - return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z ]/g, ''); + return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, ''); } } diff --git a/backend/src/tasks/lightning/sync-tasks/stats-importer.ts b/backend/src/tasks/lightning/sync-tasks/stats-importer.ts index e05ba4ab3..7ac1c5885 100644 --- a/backend/src/tasks/lightning/sync-tasks/stats-importer.ts +++ b/backend/src/tasks/lightning/sync-tasks/stats-importer.ts @@ -7,6 +7,7 @@ import { ILightningApi } from '../../../api/lightning/lightning-api.interface'; import { isIP } from 'net'; import { Common } from '../../../api/common'; import channelsApi from '../../../api/explorer/channels.api'; +import nodesApi from '../../../api/explorer/nodes.api'; const fsPromises = promises; @@ -32,7 +33,26 @@ class LightningStatsImporter { let clearnetTorNodes = 0; let unannouncedNodes = 0; + const [nodesInDbRaw]: any[] = await DB.query(`SELECT public_key FROM nodes`); + const nodesInDb = {}; + for (const node of nodesInDbRaw) { + nodesInDb[node.public_key] = node; + } + for (const node of networkGraph.nodes) { + // If we don't know about this node, insert it in db + if (isHistorical === true && !nodesInDb[node.pub_key]) { + await nodesApi.$saveNode({ + last_update: node.last_update, + pub_key: node.pub_key, + alias: node.alias, + addresses: node.addresses, + color: node.color, + features: node.features, + }); + nodesInDb[node.pub_key] = node; + } + let hasOnion = false; let hasClearnet = false; let isUnnanounced = true; @@ -69,7 +89,7 @@ class LightningStatsImporter { const baseFees: number[] = []; const alreadyCountedChannels = {}; - const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id, created FROM channels`); + const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id FROM channels`); const channelsInDb = {}; for (const channel of channelsInDbRaw) { channelsInDb[channel.short_id] = channel; @@ -84,29 +104,19 @@ class LightningStatsImporter { continue; } - // Channel is already in db, check if we need to update 'created' field - if (isHistorical === true) { - //@ts-ignore - if (channelsInDb[short_id] && channel.timestamp < channel.created) { - await DB.query(` - UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.short_id = ?`, - //@ts-ignore - [channel.timestamp, short_id] - ); - } else if (!channelsInDb[short_id]) { - await channelsApi.$saveChannel({ - channel_id: short_id, - chan_point: `${tx.txid}:${short_id.split('x')[2]}`, - //@ts-ignore - last_update: channel.timestamp, - node1_pub: channel.node1_pub, - node2_pub: channel.node2_pub, - capacity: (tx.value * 100000000).toString(), - node1_policy: null, - node2_policy: null, - }, 0); - channelsInDb[channel.channel_id] = channel; - } + // If we don't know about this channel, insert it in db + if (isHistorical === true && !channelsInDb[short_id]) { + await channelsApi.$saveChannel({ + channel_id: short_id, + chan_point: `${tx.txid}:${short_id.split('x')[2]}`, + last_update: channel.last_update, + node1_pub: channel.node1_pub, + node2_pub: channel.node2_pub, + capacity: (tx.value * 100000000).toString(), + node1_policy: null, + node2_policy: null, + }, 0); + channelsInDb[channel.channel_id] = channel; } if (!nodeStats[channel.node1_pub]) { @@ -269,6 +279,17 @@ class LightningStatsImporter { nodeStats[public_key].capacity, nodeStats[public_key].channels, ]); + + if (!isHistorical) { + await DB.query( + `UPDATE nodes SET capacity = ?, channels = ? WHERE public_key = ?`, + [ + nodeStats[public_key].capacity, + nodeStats[public_key].channels, + public_key, + ] + ); + } } return { @@ -281,6 +302,7 @@ class LightningStatsImporter { * Import topology files LN historical data into the database */ async $importHistoricalLightningStats(): Promise { + logger.debug('Run the historical importer'); try { let fileList: string[] = []; try { @@ -294,7 +316,7 @@ class LightningStatsImporter { fileList.sort().reverse(); const [rows]: any[] = await DB.query(` - SELECT UNIX_TIMESTAMP(added) AS added, node_count + SELECT UNIX_TIMESTAMP(added) AS added FROM lightning_stats ORDER BY added DESC `); @@ -391,12 +413,16 @@ class LightningStatsImporter { }); } + let rgb = node.rgb_color ?? '#000000'; + if (rgb.indexOf('#') === -1) { + rgb = `#${rgb}`; + } newGraph.nodes.push({ last_update: node.timestamp ?? 0, pub_key: node.id ?? null, - alias: node.alias ?? null, + alias: node.alias ?? node.id.slice(0, 20), addresses: addresses, - color: node.rgb_color ?? null, + color: rgb, features: {}, }); } diff --git a/frontend/src/app/components/search-form/search-form.component.html b/frontend/src/app/components/search-form/search-form.component.html index 417414b58..378e38940 100644 --- a/frontend/src/app/components/search-form/search-form.component.html +++ b/frontend/src/app/components/search-form/search-form.component.html @@ -1,7 +1,7 @@
- + diff --git a/frontend/src/app/components/transactions-list/transactions-list.component.ts b/frontend/src/app/components/transactions-list/transactions-list.component.ts index 69b65a8a4..2a317e738 100644 --- a/frontend/src/app/components/transactions-list/transactions-list.component.ts +++ b/frontend/src/app/components/transactions-list/transactions-list.component.ts @@ -105,10 +105,10 @@ export class TransactionsListComponent implements OnInit, OnChanges { }, 10); } - this.transactions.forEach((tx, i) => { + this.transactions.forEach((tx) => { tx['@voutLimit'] = true; tx['@vinLimit'] = true; - if (this.outspends[i]) { + if (tx['addressValue'] !== undefined) { return; } diff --git a/frontend/src/app/lightning/node/node.component.html b/frontend/src/app/lightning/node/node.component.html index 12ca8eaa1..423b29afb 100644 --- a/frontend/src/app/lightning/node/node.component.html +++ b/frontend/src/app/lightning/node/node.component.html @@ -119,20 +119,30 @@
-
+
- +
+
+ +

Active channels map

-

Channels ({{ channelsListStatus === 'open' ? node.opened_channel_count : node.closed_channel_count }})

+

+ Open channels + ({{ node.opened_channel_count }}) +

+

+ Closed channels + ({{ node.closed_channel_count }}) +

; @@ -32,7 +33,7 @@ export class NodesChannelsMap implements OnInit { channelColor = '#466d9d'; channelCurve = 0; nodeSize = 4; - isLoading = true; + isLoading = false; chartInstance = undefined; chartOptions: EChartsOption = {}; @@ -73,6 +74,11 @@ export class NodesChannelsMap implements OnInit { this.channelsObservable = this.activatedRoute.paramMap .pipe( switchMap((params: ParamMap) => { + this.isLoading = true; + if (this.style === 'channelpage' && this.channel.length === 0 || !this.hasLocation) { + this.isLoading = false; + } + return zip( this.assetsService.getWorldMapJson$, this.style !== 'channelpage' ? this.apiService.getChannelsGeo$(params.get('public_key') ?? undefined, this.style) : [''], diff --git a/frontend/src/app/lightning/nodes-networks-chart/nodes-networks-chart.component.ts b/frontend/src/app/lightning/nodes-networks-chart/nodes-networks-chart.component.ts index ecbf92f39..22f46e8e7 100644 --- a/frontend/src/app/lightning/nodes-networks-chart/nodes-networks-chart.component.ts +++ b/frontend/src/app/lightning/nodes-networks-chart/nodes-networks-chart.component.ts @@ -121,7 +121,7 @@ export class NodesNetworksChartComponent implements OnInit { left: 'center', top: 'center', }; - } else if (data.tor_nodes.length > 0) { + } else if (this.widget && data.tor_nodes.length > 0) { title = { textStyle: { color: 'grey', diff --git a/frontend/src/app/lightning/statistics-chart/lightning-statistics-chart.component.ts b/frontend/src/app/lightning/statistics-chart/lightning-statistics-chart.component.ts index bd210b09a..6fb8fd1e2 100644 --- a/frontend/src/app/lightning/statistics-chart/lightning-statistics-chart.component.ts +++ b/frontend/src/app/lightning/statistics-chart/lightning-statistics-chart.component.ts @@ -113,7 +113,7 @@ export class LightningStatisticsChartComponent implements OnInit { left: 'center', top: 'center' }; - } else if (data.channel_count.length > 0) { + } else if (this.widget && data.channel_count.length > 0) { title = { textStyle: { color: 'grey', diff --git a/production/install b/production/install index 1187ee813..6d50e8bf7 100755 --- a/production/install +++ b/production/install @@ -385,7 +385,7 @@ DEBIAN_UNFURL_PKG+=(libxdamage-dev libxrandr-dev libgbm-dev libpango1.0-dev liba # packages needed for mempool ecosystem FREEBSD_PKG=() -FREEBSD_PKG+=(zsh sudo git screen curl wget calc neovim) +FREEBSD_PKG+=(zsh sudo git git-lfs screen curl wget calc neovim) FREEBSD_PKG+=(openssh-portable py39-pip rust llvm90 jq base64 libzmq4) FREEBSD_PKG+=(boost-libs autoconf automake gmake gcc libevent libtool pkgconf) FREEBSD_PKG+=(nginx rsync py39-certbot-nginx mariadb105-server keybase) @@ -976,15 +976,28 @@ osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-kill-all stop osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-start-all start osSudo "${MEMPOOL_USER}" ln -s mempool/production/mempool-restart-all restart -echo "[*] Installing syslog configuration" -osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d -osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger -osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf -echo "[*] Installing newsyslog configuration" -osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d -osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/syslog.d/newsyslog-mempool-backend.conf -osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/syslog.d/newsyslog-mempool-nginx.conf +case $OS in + FreeBSD) + echo "[*] Installing syslog configuration" + osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/syslog.d + osSudo "${ROOT_USER}" install -c -m 755 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool-logger" /usr/local/bin/mempool-logger + osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/syslog.conf" /usr/local/etc/syslog.d/mempool.conf + + echo "[*] Installing newsyslog configuration" + osSudo "${ROOT_USER}" mkdir -p /usr/local/etc/newsyslog.conf.d + osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-backend.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-backend.conf + osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/newsyslog-mempool-nginx.conf" /usr/local/etc/newsyslog.conf.d/newsyslog-mempool-nginx.conf + + echo "[*] Creating log files" + osSudo "${ROOT_USER}" newsyslog -C + ;; + Debian) + echo "[*] Installing syslog configuration" + osSudo "${ROOT_USER}" install -c -m 644 "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/linux/rsyslog.conf" /etc/rsyslog.d/10-mempool.conf + osSudo "${ROOT_USER}" sed -i.orig -e 's/^\*\.\*;auth,authpriv\.none/*\.*;auth,authpriv\.none,local7\.none/' /etc/rsyslog.d/50-default.conf + ;; +esac echo "[*] Installing Mempool crontab" osSudo "${ROOT_USER}" crontab -u "${MEMPOOL_USER}" "${MEMPOOL_HOME}/${MEMPOOL_REPO_NAME}/production/mempool.crontab" @@ -1267,7 +1280,7 @@ case $OS in osGroupCreate "${CLN_GROUP}" osUserCreate "${CLN_USER}" "${CLN_HOME}" "${CLN_GROUP}" osSudo "${ROOT_USER}" chsh -s `which zsh` "${CLN_USER}" - osSudo "${CLN_USER}" touch "${CLN_HOME}/.zshrc" + echo "export PATH=$PATH:$HOME/.local/bin" >> "${CLN_HOME}/.zshrc" osSudo "${ROOT_USER}" chown -R "${CLN_USER}:${CLN_GROUP}" "${CLN_HOME}" echo "[*] Installing Core Lightning package" diff --git a/production/linux/rsyslog.conf b/production/linux/rsyslog.conf new file mode 100644 index 000000000..92ce6e591 --- /dev/null +++ b/production/linux/rsyslog.conf @@ -0,0 +1,2 @@ +local7.info /var/log/mempool +local7.* /var/log/mempool.debug diff --git a/production/syslog.conf b/production/syslog.conf index 1a36100dd..7a55c57c7 100644 --- a/production/syslog.conf +++ b/production/syslog.conf @@ -1,4 +1,4 @@ -local7.>=notice |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops alerts +local7.>=err |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops alerts local7.>=info |/usr/local/bin/sudo -u mempool /usr/local/bin/mempool-logger mempool.ops node100 local7.>=info /var/log/mempool local7.* /var/log/mempool.debug