Compare commits

...

1250 Commits

Author SHA1 Message Date
softsimon
870e895144 Correcting docker disk cache config variable 2023-03-20 16:12:56 +09:00
softsimon
517cf613c1 Removing Sigterm. Cache write block interval configuration. 2023-03-20 15:46:05 +09:00
softsimon
d54bcc898b Fix missing temp cache in disk cache 2023-03-20 15:44:55 +09:00
wiz
704e1741ed Merge pull request #3449 from mempool/nymkappa/fix-tests 2023-03-20 14:22:01 +09:00
nymkappa
ad5ce6dba4 Fix maxmind tests 2023-03-20 14:02:31 +09:00
wiz
1718ddd4c3 Merge pull request #3444 from mempool/nymkappa/missing-docker
Update docker configs
2023-03-19 18:56:06 +09:00
wiz
32c2db2153 Fix backend docker config path for GeoIP data 2023-03-19 18:42:38 +09:00
wiz
0abb9cbb7c Fix boolean configuration option in docker backend config 2023-03-19 17:49:37 +09:00
wiz
e2e71c7a46 Add Maxmind GeoIP Lite download to Docker build 2023-03-19 17:49:08 +09:00
softsimon
5839ed428e Merge pull request #3378 from mempool/nymkappa/responsive
Fix block list component responsive
2023-03-19 16:48:47 +09:00
nymkappa
af6d115dbb Add missing MAXMIND in docker configs - Remove duplicated __MEMPOOL_INDEXING_BLOCKS_AMOUNT__ 2023-03-19 15:39:17 +09:00
nymkappa
194968d16f Fix block list component responsive 2023-03-19 15:23:00 +09:00
softsimon
30686bd322 Updating korean translation 2023-03-19 12:32:07 +09:00
softsimon
175c645777 Crediting korean translator 2023-03-19 12:05:13 +09:00
wiz
587a259843 Merge pull request #3440 from mempool/ops/bump-elements-v22.1
ops: Bump elementsd to v22.1
2023-03-18 20:06:53 +09:00
softsimon
64749ca726 Pull from transifex 2023-03-18 19:39:07 +09:00
wiz
8f2493dadb ops: Use old elements RPC port 7040 2023-03-18 18:48:37 +09:00
softsimon
7d8ea075d9 Merge pull request #3405 from mempool/nymkappa/pool-health
Show block health in pool block list
2023-03-18 18:48:10 +09:00
wiz
328327e5dc Merge pull request #3104 from mempool/mononaut/liquid-tooltip-fees
Fix missing fees in liquid block tooltips
2023-03-18 18:33:17 +09:00
wiz
fd1816d451 Merge pull request #3402 from mempool/hunicus/promo-subtitles
Add subtitles for promo video
2023-03-18 18:27:35 +09:00
hunicus
72f25b873c Add zh subtitles for promo video 2023-03-18 05:17:23 -04:00
hunicus
994656953c Fix promo video subtitles for sv 2023-03-18 05:15:48 -04:00
Mononaut
ed46232b83 Fix missing fees in liquid block tooltips 2023-03-18 18:11:10 +09:00
wiz
dca18a1c66 Merge branch 'master' into hunicus/promo-subtitles 2023-03-18 17:59:03 +09:00
wiz
c291ee1789 Merge pull request #3407 from mempool/hunicus/mempool-size-faq
Add faqs on mempool size and memory usage
2023-03-18 17:58:56 +09:00
hunicus
caf15351f7 Merge branch 'master' into hunicus/promo-subtitles 2023-03-18 17:58:29 +09:00
hunicus
6be9f23790 Add more languages for promo captions 2023-03-18 04:57:20 -04:00
nymkappa
adc51f6217 Update i18n 2023-03-18 16:40:30 +09:00
softsimon
ec8a46ede6 Pulling from transifex 2023-03-18 15:33:57 +09:00
softsimon
b78fdf5a23 Merge pull request #3353 from mempool/mononaut/mempool-block-animations
Improve mempool block animations
2023-03-18 12:46:20 +09:00
wiz
7c2493f3fa ops: Bump elementsd to v22.1 2023-03-17 22:07:48 +09:00
hunicus
b0a0ad11b4 Show mempool.space memory usage faq on official 2023-03-17 21:25:28 +09:00
hunicus
377f71eb52 Integrate feedback to memory usage faqs 2023-03-17 21:25:28 +09:00
hunicus
eefe343973 Add faqs on mempool size and memory usage 2023-03-17 21:25:28 +09:00
softsimon
41a6674fad Merge pull request #3379 from mempool/nymkappa/testnet-signet-price-zero
Don't fetch prices on signet/testnet, always show 0
2023-03-17 16:40:09 +09:00
softsimon
effba92729 Merge pull request #3381 from mempool/mononaut/network-special-blocks
Limit special blocks by network, add future halvings
2023-03-17 16:39:54 +09:00
wiz
c9f4bdda17 Merge pull request #3408 from knorrium/gha_docker_digest
Docker digest GHA
2023-03-17 14:49:19 +09:00
softsimon
ef54385068 Merge pull request #3413 from mempool/simon/pull-from-transifex-2023-03-17
Pull from transifex
2023-03-17 14:02:08 +09:00
softsimon
b8f77c4be4 Pull from transifex 2023-03-17 14:01:36 +09:00
Mononaut
b5c2073414 Fix getSimilarity error on empty mempool 2023-03-16 22:16:40 +09:00
Mononaut
25aacb5046 Calculate similarity score with audit disabled 2023-03-16 22:16:40 +09:00
Mononaut
c24724dcdf animate mempool blocks conditional on mined block similarity 2023-03-16 22:16:40 +09:00
Mononaut
64ab14f995 mempool block entry animation 2023-03-16 22:16:40 +09:00
nymkappa
4a64c0dfa5 Fix skeleton 2023-03-16 16:35:59 +09:00
nymkappa
0ebe0a5dc9 Add new stats in mining pool page 2023-03-16 16:13:11 +09:00
hunicus
c683a52a01 Show captions for non-english locales 2023-03-16 02:29:33 -04:00
Felipe Knorr Kuhn
5fbdd0bb2a Remove push trigger 2023-03-15 22:48:28 -07:00
Felipe Knorr Kuhn
599881366b Add GHA for Docker digest 2023-03-15 22:43:41 -07:00
hunicus
4c294b010d Make subtitles default to current locale 2023-03-15 23:06:10 -04:00
hunicus
418c32e334 Make git ignore mp4 and vtt files in /resources 2023-03-15 23:06:10 -04:00
hunicus
12b605e5cc Fetch subtitles files from github
For en, sv, and ja.
2023-03-15 23:06:10 -04:00
softsimon
870a7e51b1 Merge pull request #3373 from mempool/mononaut/fix-testnet-signet-features
Network-specific activation heights for transaction feature badges
2023-03-15 18:35:40 +09:00
hunicus
cdfde05452 Make new folder for promo video assets
Video, cover image, and subtitle files.
2023-03-15 02:50:17 -04:00
softsimon
ce24b8bb0a Merge pull request #3331 from mempool/mononaut/disk-cache-network-version
Add network versioning to disk cache
2023-03-15 14:32:28 +09:00
softsimon
1b2810ec0e Merge branch 'master' into mononaut/disk-cache-network-version 2023-03-14 21:05:16 +09:00
softsimon
c8e84ec056 Merge pull request #3329 from mempool/mutiny-integration
Add mutiny as community integration
2023-03-14 19:59:43 +09:00
Mononaut
7bf8fea9f2 Limit special blocks by network, add future halvings 2023-03-14 16:58:02 +09:00
softsimon
3458c5af71 Merge branch 'master' into mononaut/fix-testnet-signet-features 2023-03-14 16:50:06 +09:00
softsimon
3e46dabf7b Merge pull request #3345 from mempool/mononaut/hide-empty-features
Hide features row if tx has no features
2023-03-14 16:48:56 +09:00
nymkappa
a5dd141934 Don't fetch prices on signet/testnet, always show 0 2023-03-14 15:39:15 +09:00
softsimon
881af309ab Pull from transifex 2023-03-14 14:54:49 +09:00
softsimon
374ff50a62 Merge pull request #3376 from mempool/i18n/enable-danish
i18n: Enable Danish
2023-03-14 14:53:00 +09:00
wiz
2b9e63dbc5 i18n: Enable Danish 2023-03-14 14:31:20 +09:00
Mononaut
9f453deceb Use network-specific feature activation dates 2023-03-14 13:02:50 +09:00
softsimon
0b88d94573 Fix channels i18n string on world map
fixes #3336
2023-03-13 18:42:43 +09:00
wiz
536114853c Merge pull request #3351 from mempool/nymkappa/disable-pool-updater-no-mempool
Disable pool update when running lightning only
2023-03-13 18:16:29 +09:00
wiz
4d281277d6 Merge branch 'master' into nymkappa/disable-pool-updater-no-mempool 2023-03-13 18:02:03 +09:00
wiz
c97a722a3b Merge pull request #3350 from mempool/simon/video-height-fix
Correct video height on mobile
2023-03-13 18:01:17 +09:00
softsimon
adacb42d1a Merge branch 'master' into simon/video-height-fix 2023-03-13 17:41:45 +09:00
softsimon
b6427d6f67 Merge branch 'master' into nymkappa/disable-pool-updater-no-mempool 2023-03-13 17:41:04 +09:00
softsimon
433acb7b1d Merge pull request #3352 from mempool/simon/i18n-string-error-fix
Fixes i18n string error
2023-03-13 17:40:52 +09:00
softsimon
d015ee7824 Fixes i18n string error 2023-03-13 17:40:17 +09:00
nymkappa
ecfb980e75 Disable pool update when running lightning only 2023-03-13 17:24:23 +09:00
softsimon
cfdbd30956 Correct video height on mobile
#3342
2023-03-13 17:18:09 +09:00
softsimon
7acfec2406 Merge pull request #3349 from mempool/simon/update-missing-i18n-strings
Fixing more missing i18n keys
2023-03-13 17:08:50 +09:00
softsimon
070ee10fb0 Fixing more missing i18n keys
fixes #3339
fixes #3337
2023-03-13 17:08:27 +09:00
softsimon
446b0de8f3 Merge pull request #3347 from mempool/simon/ninja-i18n-pull
Ninja i18n pull
2023-03-13 13:04:13 +09:00
softsimon
99b7fc8814 Ninja i18n pull 2023-03-13 13:03:57 +09:00
softsimon
defb88a474 Merge pull request #3346 from mempool/simon/i18n-extract-2023-03-13
i18n extract
2023-03-13 12:58:48 +09:00
softsimon
7392535182 i18n extract 2023-03-13 12:58:35 +09:00
Mononaut
130ae8c3a5 Hide features row if tx has no features 2023-03-13 12:48:01 +09:00
softsimon
f4f8b2b271 Merge pull request #3344 from mempool/simon/pull-from-transifex-2023-03-13
Pull from transifex
2023-03-13 12:36:13 +09:00
softsimon
477d09412b Pull from transifex 2023-03-13 12:36:00 +09:00
softsimon
87bc7917d8 Merge pull request #3341 from mempool/mononaut/difficulty-skeleton-width
Fix difficulty skeleton width on firefox
2023-03-13 11:23:17 +09:00
Mononaut
9030d95207 Fix difficulty skeleton width on firefox 2023-03-13 10:28:26 +09:00
softsimon
5e4131b474 Merge pull request #3330 from mempool/mononaut/reduce-disk-cache-frequency
Save cache to disk every 6 blocks
2023-03-12 21:06:19 +09:00
Mononaut
3bf96dafde Add network versioning to disk cache 2023-03-12 19:20:29 +09:00
softsimon
2f4dba895c Merge pull request #3327 from mempool/nymkappa/bugfix/show-total-fee-last-mempool-block
Show cumulated fee on last mempool block
2023-03-12 19:17:57 +09:00
hunicus
a5e281706f Make community integration rows symmetric and full 2023-03-12 06:17:50 -04:00
nymkappa
eba0e1c25a Show cumulated fee on last mempool block 2023-03-12 19:13:39 +09:00
hunicus
97d82042c0 Add mutiny to community integrations 2023-03-12 06:03:23 -04:00
Mononaut
8bd05987e5 Save cache to disk every 6 blocks 2023-03-12 19:03:19 +09:00
wiz
7e676dbaf0 Merge pull request #3325 from mempool/mononaut/fix-liquid-asset-tooltips
Fix units in flow diagram tooltips for liquid assets
2023-03-12 18:31:49 +09:00
softsimon
760f3193d9 Merge pull request #3324 from mempool/hunicus/add-integrations-032023
Add 4 community integrations to about page
2023-03-12 18:15:02 +09:00
wiz
72663b30df Merge branch 'master' into mononaut/fix-liquid-asset-tooltips 2023-03-12 18:14:25 +09:00
hunicus
8995283a58 Remove trailing slash 2023-03-12 05:12:25 -04:00
Mononaut
a8ac6aedf7 Fix units in flow diagram tooltips for liquid assets 2023-03-12 18:07:31 +09:00
wiz
9613247283 Merge branch 'master' into hunicus/add-integrations-032023 2023-03-12 18:03:49 +09:00
hunicus
ab96a17e80 Make non-rounded icons smaller 2023-03-12 04:59:37 -04:00
hunicus
3b080ee5fb Add galoy and boltz to community integrations 2023-03-12 04:52:15 -04:00
hunicus
24a8cca758 Add bitcoin-s and edge to community integrations 2023-03-12 04:51:59 -04:00
softsimon
dbad3af8ba Merge pull request #3322 from mempool/mononaut/fix-new-block-animations
Fix new block animations
2023-03-12 17:42:33 +09:00
softsimon
cdddf3a8b2 Merge pull request #3319 from mempool/mononaut/more-fee-ratings
Show tx fee ratings for older blocks
2023-03-12 17:10:47 +09:00
Mononaut
b675bd8d55 Fix transaction confirmed arrow animation 2023-03-12 17:00:36 +09:00
wiz
cb04d67d07 Merge branch 'master' into mononaut/more-fee-ratings 2023-03-12 16:56:18 +09:00
wiz
b1c6c5cbb6 Merge pull request #3248 from mempool/hunicus/add-yt-pt
Add youtube and peertube links to about page
2023-03-12 16:54:59 +09:00
wiz
7f8f72e9d6 Merge pull request #3306 from mempool/simon/fix-use-same-fee-span-calc
Display same fee span on blocks
2023-03-12 16:52:55 +09:00
wiz
a1ea77ee50 Merge pull request #3311 from mempool/simon/lightning-stats-truncation
Lightning dashboard overflow titles fixes
2023-03-12 16:51:38 +09:00
wiz
19e2d687d0 Use correct BitcoinTV logo 2023-03-12 16:48:23 +09:00
Mononaut
4be8016eb1 Fix repeated new block animation on page navigation 2023-03-12 16:42:58 +09:00
wiz
627c913d5e Merge branch 'master' into simon/fix-use-same-fee-span-calc 2023-03-12 16:17:51 +09:00
wiz
3c5165603b Merge branch 'master' into simon/lightning-stats-truncation 2023-03-12 16:17:22 +09:00
hunicus
513277cef7 Replace peertube logo with bitcointv logo 2023-03-12 16:09:06 +09:00
hunicus
72a9b71901 Add youtube and peertube links to about page 2023-03-12 16:09:06 +09:00
Mononaut
60bef0eeb6 show tx fee ratings for older blocks 2023-03-12 15:54:39 +09:00
softsimon
eeb97bdb81 Merge pull request #3318 from mempool/simon/pull-transifex-2023-03-12
Pull from transifex
2023-03-12 15:45:44 +09:00
softsimon
8407c07b88 Pull from transifex 2023-03-12 15:44:49 +09:00
softsimon
951cf2daf9 Merge pull request #3314 from mempool/nymkappa/fix-node-distance-overflow
Fix some responsive issue on the node component
2023-03-12 14:37:27 +09:00
nymkappa
31cfbf6625 Fix some responsive issue on the node component 2023-03-12 10:16:49 +09:00
wiz
5dc52250e6 Merge pull request #3312 from mempool/mononaut/difficulty-widget-redesign
Fix epoch length in new difficulty widget
2023-03-11 19:49:07 +09:00
wiz
6b544ac179 Merge branch 'master' into mononaut/difficulty-widget-redesign 2023-03-11 19:38:15 +09:00
wiz
2ad9bf57f7 Merge pull request #3288 from mempool/nymkappa/esplora-warning
Log a warn if there are lot of 404 from esplora tx api while updating nodejs backend mempool
2023-03-11 19:36:51 +09:00
Mononaut
e0e97f0d5e Fix epoch length in difficulty widget 2023-03-11 19:32:59 +09:00
softsimon
c72024b4e3 Lightning dashboard overflow titles fixes
fixes #3127
2023-03-11 19:31:49 +09:00
wiz
093469c164 Merge pull request #3303 from mempool/mononaut/batch-address-outspend-lookup
batch address outspend lookups into <50 txids per request
2023-03-11 19:27:50 +09:00
wiz
24d9977919 Merge branch 'master' into nymkappa/esplora-warning 2023-03-11 18:36:22 +09:00
softsimon
7b2ea9c4c8 Merge pull request #3310 from mempool/simon/next-block-lower-case-css-fix
next block lower case css fix
2023-03-11 18:35:08 +09:00
softsimon
6b4650f3cd next block lower case css fix 2023-03-11 18:34:51 +09:00
softsimon
e971846b7e Merge pull request #3309 from mempool/simon/fix-i18n-duplicate-warning
Fixes i18n duplicate warning
2023-03-11 18:33:08 +09:00
softsimon
23ea5d582b Fixes i18n duplicate warning 2023-03-11 18:32:47 +09:00
softsimon
6196860387 Merge pull request #3296 from mempool/nymkappa/order-isp
Sort asn numerically - add few more top 10 isp in warm cache
2023-03-11 18:22:39 +09:00
softsimon
368f858ff2 Merge pull request #3258 from mempool/mononaut/difficulty-widget-redesign
Redesign difficulty adjustment dashboard widget
2023-03-11 18:11:34 +09:00
softsimon
ef0cc9d2db Changing interval to block time 2023-03-11 18:04:06 +09:00
Mononaut
39051e94e3 Redesign difficulty adjustment dashboard widget 2023-03-11 17:53:18 +09:00
softsimon
e1f0bb9901 Merge pull request #3308 from mempool/simon/pull-from-transifex-2023-03-11
Pull from transifex 2023-03-11
2023-03-11 17:26:50 +09:00
softsimon
097eed0baa Pull from transifex 2023-03-11 2023-03-11 17:26:33 +09:00
softsimon
25c7c84705 Merge pull request #3297 from mempool/nymkappa/duplicate-block
Fixes duplicate block in latest block component
2023-03-11 16:09:40 +09:00
wiz
7e873e6637 Merge branch 'master' into nymkappa/order-isp 2023-03-11 15:50:31 +09:00
wiz
36e1777b96 Merge branch 'master' into mononaut/batch-address-outspend-lookup 2023-03-11 15:16:59 +09:00
wiz
35a05a420d Merge branch 'master' into nymkappa/duplicate-block 2023-03-11 15:15:56 +09:00
softsimon
1be5c6ec53 Merge pull request #3307 from mempool/nymkappa/update-regtest-examples
Updated regtest example
2023-03-11 11:37:15 +09:00
softsimon
e4aa3c2091 Merge pull request #3304 from mempool/simon/fix-confirmation-arrow-bug
Fixes arrow position on confirmed blocks
2023-03-11 11:31:30 +09:00
nymkappa
4263977d99 Updated regtest example 2023-03-11 10:52:15 +09:00
softsimon
5bd9be7ab1 Display same fee span on blocks
fixes #3282
2023-03-11 10:14:35 +09:00
softsimon
74c95bfdf5 Merge pull request #3300 from mempool/simon/fix-transaction-expression-changed-error
Fixes changed after checked error in transaction page
2023-03-11 10:01:58 +09:00
softsimon
a1c0a58b9d Merge pull request #3305 from mempool/simon/remove-search-bar-border
Remove search bar focus border
2023-03-11 09:56:27 +09:00
softsimon
d3d67627f3 Remove search bar focus border 2023-03-10 20:19:37 +09:00
softsimon
4a0d9cb66f Fixes arrow position on confirmed blocks
fixes #3294
2023-03-10 18:35:26 +09:00
Mononaut
a2d673f9ed batch address outspend lookups into <50 txids per request 2023-03-10 00:27:22 -06:00
softsimon
8dccaee0b0 Merge pull request #3299 from mempool/mononaut/persist-cache-on-exit
Save cache to disk on SIGTERM/SIGINT
2023-03-10 14:27:00 +09:00
softsimon
ccc413a800 Merge pull request #3301 from mempool/mononaut/flow-diagram-alignment
pixel-perfect flow diagrams (again)
2023-03-10 13:38:56 +09:00
softsimon
4d7e23064c Merge pull request #3298 from mempool/mononaut/missing-fee-rating
fix missing cpfp fee ratings on mobile
2023-03-10 13:28:14 +09:00
Mononaut
a22a62836e pixel-perfect flow diagrams 2023-03-09 22:20:54 -06:00
softsimon
dd01371b61 Fixes changed after checked error in transaction page 2023-03-10 12:37:55 +09:00
Mononaut
46d89ac837 prevent disk cache file write corruption 2023-03-09 20:19:22 -06:00
Mononaut
796566e7ae Save cache to disk on SIGTERM/SIGINT 2023-03-09 19:47:54 -06:00
Mononaut
3f234431fb fix missing fee rating on mobile 2023-03-09 19:31:53 -06:00
nymkappa
778e2f9b64 Fixes #3217 2023-03-10 10:26:30 +09:00
nymkappa
6327ce7c89 Sort asn numerically - add few more top 10 isp in warm cache 2023-03-10 09:21:44 +09:00
softsimon
cec8445223 Merge pull request #3293 from mempool/nymkappa/search-autofocus
Autofocus search input when we load the app for the first time
2023-03-09 19:04:03 +09:00
nymkappa
548a6ea664 Autofocus search input when we load the app for the first time 2023-03-09 18:53:29 +09:00
softsimon
9cef9b67c1 Merge pull request #3291 from mempool/revert-3290-nymkappa/bump-axios
Revert "Bump axios from 0.27.2 -> 1.3.4"
2023-03-09 17:46:19 +09:00
softsimon
42228dc70f Revert "Bump axios from 0.27.2 -> 1.3.4" 2023-03-09 17:46:09 +09:00
nymkappa
63dd9fd09e Log a warn if there are lot of 404 from esplora tx api 2023-03-09 17:45:08 +09:00
softsimon
cd3c1ed82e Merge pull request #3290 from mempool/nymkappa/bump-axios
Bump axios from 0.27.2 -> 1.3.4
2023-03-09 17:35:35 +09:00
nymkappa
304089b3d0 Bump axios from 0.27.2 -> 1.3.4 2023-03-09 17:27:19 +09:00
softsimon
c60903565e Merge pull request #3286 from mempool/simon/some-missing-ln-i18n-strings
Fixing Channels and Capacity i18n strings
2023-03-09 15:30:23 +09:00
softsimon
37e94249df Fixing Channels and Capacity i18n strings 2023-03-09 15:30:04 +09:00
softsimon
d2337ae4e8 Merge pull request #3285 from mempool/wiz/add-old-special-block-events
Add old special block events to app constants
2023-03-09 15:14:11 +09:00
softsimon
7557c47502 Merge pull request #3264 from mempool/mononaut/fix-mining-dashboard-updates
Fix stale mining dashboard data
2023-03-09 14:42:28 +09:00
wiz
a8214bcbbd Add old special block events to app constants 2023-03-09 14:30:55 +09:00
softsimon
fcf51e2af8 Merge pull request #3284 from mempool/simon/pull-transifex-2023-03-09
Pull from transifex
2023-03-09 13:19:40 +09:00
softsimon
c657e622eb Merge pull request #3277 from mempool/i18n/enable-danish-disable-catalan
i18n: Enable Danish, disable Catalan
2023-03-09 13:08:02 +09:00
softsimon
526e46b8e4 Pull from transifex 2023-03-09 11:49:26 +09:00
wiz
ead7a13ff0 i18n: Enable Danish, disable Catalan 2023-03-08 21:14:46 +09:00
Mononaut
c3c0696844 Update hashrate estimate when new blocks arrive 2023-03-08 02:37:19 -06:00
Mononaut
2907054a01 Update pool ranking block count when new blocks arrive 2023-03-08 02:33:16 -06:00
wiz
64408bfd16 Merge pull request #3241 from mempool/simon/fiat-space-fix
Remove fiat plus space
2023-03-08 17:02:19 +09:00
wiz
ca690bf123 Merge branch 'master' into simon/fiat-space-fix 2023-03-08 16:47:36 +09:00
softsimon
0243428fe9 Merge pull request #3262 from mempool/simon/pull-from-transifex-2023-03-08
Pull from transifex 8/3
2023-03-08 16:27:12 +09:00
softsimon
901d32d8f7 Pull from transifex 8/3 2023-03-08 16:26:56 +09:00
wiz
8adacd4a0e ops: Add missing unfurl route in nginx/server-common.conf 2023-03-08 16:14:43 +09:00
wiz
b8a3c15ed2 Merge pull request #3259 from mempool/mononaut/fix-cpfp-memory-bug
Fix memory-intensive getCPFPUnindexedBlocks mysql query
2023-03-08 15:10:31 +09:00
wiz
3e31e68a19 Merge branch 'master' into mononaut/fix-cpfp-memory-bug 2023-03-08 14:31:46 +09:00
wiz
626b395ab7 Merge pull request #3227 from mempool/simon/add-4y
Adding 4 year button to mempool graph
2023-03-08 14:31:39 +09:00
Mononaut
5eae84bb75 Fix memory-intensive getCPFPUnindexedBlocks mysql query 2023-03-07 21:01:54 -06:00
softsimon
c5a01135b3 Merge pull request #3249 from knorrium/fix_typo_in_bulk_config
Fix typo in bulk config variable
2023-03-08 11:05:58 +09:00
Felipe Knorr Kuhn
1a4f3b105e Fix typo in bulk config variable 2023-03-07 12:29:00 -08:00
wiz
ae1e2dcb50 Merge branch 'master' into simon/add-4y 2023-03-07 19:00:27 +09:00
softsimon
da25577ea7 Merge pull request #3244 from mempool/mononaut/fix-ln-rtl
Fix miscellaneous RTL layout bugs
2023-03-07 17:00:11 +09:00
Mononaut
5937e959c3 Fix miscellaneous RTL layout bugs 2023-03-06 20:25:27 -06:00
softsimon
6360913e84 Merge pull request #3243 from mempool/simon/pull-from-transifex-2023-03-07
Pull from transifex
2023-03-07 11:11:21 +09:00
softsimon
fb71136dae Pull from transifex 2023-03-07 11:11:02 +09:00
softsimon
3739e5be0d Merge pull request #3238 from mempool/mononaut/fix-404s
Fix unnecessary cpfp/rbf 404 responses
2023-03-06 18:40:32 +09:00
softsimon
ebfd0b9ddd Merge pull request #3225 from mempool/hunicus/responsive-disclaimer
Make faq disclaimer responsive
2023-03-06 16:50:03 +09:00
softsimon
355acfd338 Merge pull request #3239 from mempool/mononaut/fix-cached-rbf-error
don't cache tx data for rbf replacements
2023-03-06 16:45:36 +09:00
softsimon
5a5ebe8435 Remove fiat plus space
fixes #3240
2023-03-06 16:16:52 +09:00
Mononaut
43b2fe2f9a don't cache tx data for rbf replacements 2023-03-06 00:19:12 -06:00
Mononaut
182cb16695 Fix unnecessary cpfp 404 responses 2023-03-06 00:02:21 -06:00
softsimon
5a09e3099c Merge pull request #3237 from mempool/mononaut/unify-time-components
unify time rendering components
2023-03-06 12:24:00 +09:00
Mononaut
7f78fefb21 revert time localization strings 2023-03-05 21:09:22 -06:00
Mononaut
ac932c641c unify time rendering components 2023-03-05 19:26:32 -06:00
softsimon
4f297f0a7a Merge pull request #3234 from mempool/nymkappa/bugfix/ln-world-map-full-height
Show ln channel world map using 100% height
2023-03-05 19:20:28 +09:00
nymkappa
c28d1c4610 Show ln channel world map using 100% height 2023-03-05 17:43:59 +09:00
softsimon
1ad4ff0683 Merge pull request #3235 from mempool/simon/extract-i18n-clearnet
Update clearnet i18n string
2023-03-05 17:37:32 +09:00
softsimon
c50f5d45e1 Update clearnet i18n string 2023-03-05 17:37:15 +09:00
wiz
226a4e9bde Fix two more strings for "Clearnet Only" 2023-03-05 17:34:30 +09:00
wiz
154e65d470 Fix string for "Clearnet Only" 2023-03-05 17:30:32 +09:00
softsimon
a895d21179 Merge pull request #3231 from mempool/simon/i18n-fixes-extract
I18n extract. Some minor fixes.
2023-03-05 17:08:52 +09:00
softsimon
d5ea2aec25 I18n extract. Some minor fixes. 2023-03-05 17:08:35 +09:00
wiz
07271f56d7 Merge pull request #3230 from mempool/mononaut/heap-monitor
Monitor heap memory usage
2023-03-05 15:47:06 +09:00
wiz
f37946118c Change heap size warning to 80% utilization 2023-03-05 15:45:28 +09:00
softsimon
fdbcef29e5 Merge pull request #3212 from mempool/nymkappa/bugfix/initial-pool-download
Fix initial pool update when db is empty
2023-03-05 15:44:14 +09:00
wiz
9eeaf76369 Merge branch 'master' into mononaut/heap-monitor 2023-03-05 15:35:00 +09:00
wiz
a9a2ff0347 Merge pull request #3215 from mempool/nymkappa/bugfix/price
Handle missing price (show 0)
2023-03-05 15:34:31 +09:00
softsimon
aae61bcb45 Merge pull request #3125 from mempool/nymkappa/feature/update-mining-indexer-log
Update some mining indexer log
2023-03-05 15:19:50 +09:00
Mononaut
43bed7cf56 Monitor heap memory usage 2023-03-04 23:13:55 -06:00
wiz
fca813147d Merge branch 'master' into nymkappa/bugfix/price 2023-03-05 14:13:51 +09:00
nymkappa
62ef1d4439 Fix log typo 2023-03-05 08:27:31 +09:00
nymkappa
ff7c85180d Fix initial pool update when db is empty 2023-03-05 08:27:30 +09:00
nymkappa
001be82f5a Move some notice into info 2023-03-05 08:23:05 +09:00
nymkappa
32a260473a Update some mining indexing logs 2023-03-05 08:23:04 +09:00
nymkappa
2e74d7fa4a Remove mining db stats - replaced by runtime state variable 2023-03-05 08:23:04 +09:00
softsimon
4e39c27c75 Adding 4 year button to mempool graph
fixes #3218
2023-03-04 18:48:16 +09:00
softsimon
aef26097ec Merge pull request #3223 from mempool/mononaut/fix-cpfp-table-widths
Fix overlapping columns in cpfp table on small screens
2023-03-04 17:56:35 +09:00
hunicus
7b24b124c2 Use svg component for warning svg 2023-03-04 17:40:39 +09:00
hunicus
82b0844928 Make faq disclaimer more responsive 2023-03-04 17:40:39 +09:00
softsimon
2ed4e5bb6e Merge pull request #3222 from mempool/simon/incoming-transactions-progress-color
Correct incoming transaction progress colors
2023-03-04 16:55:44 +09:00
softsimon
fdcf4ce501 Merge pull request #3220 from mempool/mononaut/fiat-decimals
drop decimal places from large fiat values
2023-03-04 16:55:05 +09:00
wiz
0223846f91 Merge pull request #3122 from mempool/nymkappa/bugfix/cleanup-mining-states
Remove mining db stats - replaced by runtime state variable
2023-03-04 16:54:10 +09:00
wiz
1192d4fbd4 Merge branch 'master' into nymkappa/bugfix/cleanup-mining-states 2023-03-04 16:46:44 +09:00
wiz
027603acf7 Merge pull request #2995 from mempool/simon/bisq-search-bar-fix
Bisq markets search bar fix
2023-03-04 16:22:04 +09:00
wiz
152b8e8a7d Merge branch 'master' into simon/bisq-search-bar-fix 2023-03-04 16:06:46 +09:00
softsimon
375219242c Merge pull request #3224 from mempool/ops/fix-bisq-address-prefix-api
ops: Add missing /api/address-prefix nginx route for bisq
2023-03-04 16:06:23 +09:00
wiz
5fb4eac4b7 ops: Add missing /api/address-prefix nginx route for bisq 2023-03-04 15:53:49 +09:00
softsimon
f8624020e8 Bisq markets search bar fix
fixes #2986
2023-03-04 15:35:08 +09:00
Mononaut
269dcb2b16 Fix overlapping columns in cpfp table on small screens 2023-03-04 00:22:50 -06:00
softsimon
ec2e7a46eb Correct incoming transaction progress colors
fixes #3216
2023-03-04 15:19:56 +09:00
Mononaut
a37bcfec65 drop decimal places from large fiat values 2023-03-04 00:10:47 -06:00
softsimon
0c8636c803 Merge pull request #3219 from mempool/mononaut/momentum-scrolling
blockchain momentum scrolling
2023-03-04 15:09:14 +09:00
Mononaut
059139689d Fix blockchain scrolling on high refresh-rate devices 2023-03-03 23:39:01 -06:00
Mononaut
4e68498979 blockchain momentum scrolling 2023-03-03 22:45:11 -06:00
wiz
9024e21868 Merge pull request #3202 from mempool/nymkappa/use-core-again-because-esplora-is-lol
Use core to fetch block because esplora/electrs still return integer difficulty
2023-03-04 13:34:15 +09:00
nymkappa
e0c3c732d1 Fix incorrect db schema version in db migration script 2023-03-04 10:55:27 +09:00
nymkappa
622929831e Merge branch 'master' into nymkappa/bugfix/cleanup-mining-states 2023-03-04 10:54:02 +09:00
nymkappa
d483362a9b Handle missing price (show 0) 2023-03-04 10:51:13 +09:00
softsimon
32466f4d46 Merge pull request #3072 from mempool/mononaut/optimize-mempool-block-7
Optimize mempool block 7 data
2023-03-04 10:46:18 +09:00
softsimon
b21fd0d37d Merge branch 'master' into mononaut/optimize-mempool-block-7 2023-03-04 10:34:46 +09:00
softsimon
a2688f1ca5 Merge pull request #3213 from mempool/mononaut/btc-tooltip-amount
Always show BTC amount in block tooltips
2023-03-04 10:23:52 +09:00
softsimon
51d50292e6 Merge pull request #3203 from knorrium/db_migration_owners
Define owners for the DB migration file
2023-03-03 21:04:16 +09:00
softsimon
f90a23bde6 Merge pull request #3211 from mempool/mononaut/fix-ios-block-scrolling
Fix blockchain scrolling on iOS devices
2023-03-03 18:30:05 +09:00
Mononaut
6a66d06208 Always show BTC amount in block tooltips 2023-03-03 02:55:48 -06:00
Mononaut
27cf23f9b0 Fix blockchain scrolling on iOS devices 2023-03-03 02:43:44 -06:00
wiz
d16bda0630 Merge branch 'master' into nymkappa/use-core-again-because-esplora-is-lol 2023-03-03 17:39:55 +09:00
softsimon
fa77161379 Merge pull request #3209 from mempool/nymkappa/wipe-cache-always
Wipe nodejs backend cache for any mining pool change - Update pools before loading disk cache
2023-03-03 17:37:58 +09:00
nymkappa
d76d14253a Update mining pools before loading the disk cache since we may need to wipe that cache 2023-03-03 17:29:46 +09:00
nymkappa
ad7d3d97de Wipe nodejs backend cache for any mining pool change 2023-03-03 16:58:40 +09:00
softsimon
3e27f684d1 Merge pull request #3206 from knorrium/update_cypress_v12_7_0
Update Cypress to v12.7.0
2023-03-03 16:58:18 +09:00
Felipe Knorr Kuhn
2cadb75163 Merge branch 'master' into update_cypress_v12_7_0 2023-03-02 23:51:12 -08:00
Felipe Knorr Kuhn
0e338a2c64 Fix linting on more specs 2023-03-02 23:33:12 -08:00
softsimon
ab88a51e01 Merge pull request #3204 from mempool/simon/vscode-default-relative-imports
Default relative paths in VsCode
2023-03-03 16:29:22 +09:00
softsimon
cffb41b54d Merge pull request #3208 from mempool/nymkappa/fix-paths
Fix import paths
2023-03-03 16:29:07 +09:00
nymkappa
fdd66a685a Fix import paths 2023-03-03 16:28:05 +09:00
Felipe Knorr Kuhn
98e9a91058 Merge branch 'master' into simon/vscode-default-relative-imports 2023-03-02 23:26:37 -08:00
wiz
5a8270a12d Merge branch 'master' into nymkappa/use-core-again-because-esplora-is-lol 2023-03-03 16:25:12 +09:00
Felipe Knorr Kuhn
5d976eff75 Fix linting on bisq spec 2023-03-02 23:17:48 -08:00
Felipe Knorr Kuhn
d87fd95523 Bump Cypress to v12.7.0 2023-03-02 23:17:26 -08:00
Felipe Knorr Kuhn
545b5be7d4 Remove the deprecated plugins file 2023-03-02 23:17:08 -08:00
softsimon
1ac8e5004f Merge pull request #3148 from mempool/nymkappa/bugfix/wipe-cache-reindexing
When we re-index blocks due to mining pools change, wipe the nodejs backend cache
2023-03-03 15:34:54 +09:00
Felipe Knorr Kuhn
dd7c56eb53 Define owners for the DB migration file 2023-03-02 22:01:50 -08:00
softsimon
c18dc44f4d Default relative paths in VsCode 2023-03-03 14:58:25 +09:00
softsimon
0f077afef2 Merge pull request #3198 from mempool/simon/about-video-fixes
About page video improvements
2023-03-03 14:48:38 +09:00
nymkappa
7ea2d3b808 Use core to fetch block because esplora/electrs still return integer difficulty 2023-03-03 13:59:17 +09:00
softsimon
0b77000aab Merge pull request #3199 from mempool/simon/pull-nl-translations-transifex
Pull transifex (nl)
2023-03-03 11:35:34 +09:00
softsimon
bc94560636 Pull transifex (nl) 2023-03-03 11:33:57 +09:00
softsimon
c404895b1b Merge branch 'master' into nymkappa/bugfix/wipe-cache-reindexing 2023-03-03 11:09:31 +09:00
softsimon
0846f346ee About page video improvements 2023-03-03 10:58:02 +09:00
wiz
0382f11052 Merge pull request #3196 from mempool/hunicus/electrum-responsive
Only show electrum tab on desktop
2023-03-02 23:46:34 +09:00
hunicus
89d9c1d78d Only show electrum tab on desktop 2023-03-02 23:38:47 +09:00
wiz
decc96d293 Merge pull request #3195 from mempool/hunicus/about-video
Add promo video to about page
2023-03-02 23:21:34 +09:00
hunicus
4f689885e6 Remove margin from about video 2023-03-02 23:02:35 +09:00
wiz
210843916e Add mempool promo video (via GitHub) in README 2023-03-02 22:45:51 +09:00
wiz
bc2d8dd7c3 Add mempool promo video (via YouTube) in README 2023-03-02 22:42:56 +09:00
hunicus
9b871dbfcc Make about video responsive 2023-03-02 22:41:04 +09:00
wiz
b1e7bbcc3e Merge branch 'master' into hunicus/about-video 2023-03-02 22:17:41 +09:00
softsimon
488693804c Merge pull request #3175 from mempool/hunicus/about-anchor
Add anchor links for about page sections
2023-03-02 22:01:09 +09:00
hunicus
a54684ad74 Add promo video to about page 2023-03-02 07:51:30 -05:00
softsimon
7b01286ed2 Run the go to anchor whenever data is loaded 2023-03-02 21:50:09 +09:00
softsimon
b4b08a8531 Merge pull request #3174 from mempool/hunicus/api-limit-note
Link API rate limit note to /enterprise
2023-03-02 21:33:28 +09:00
softsimon
96ddf98159 Merge pull request #3172 from mempool/hunicus/network-labels
Specify networks in lightning network graph labels
2023-03-02 21:30:06 +09:00
wiz
ec0d8b7c48 ops: Remove fork repos from upgrade script 2023-03-02 19:30:02 +09:00
softsimon
dfcbaabeda Merge pull request #3157 from hunicus/bulk-block-api
Add api documentation for blocks-bulk
2023-03-02 17:20:55 +09:00
softsimon
29498e136b Merge pull request #3185 from mempool/nymkappa/historical-frontend-price-flag
Add frontend config flag to toggle historical price fetching
2023-03-02 15:32:57 +09:00
softsimon
13db2626b0 Merge pull request #3181 from mempool/nymkappa/run-forensics-last
Run ln forensics last
2023-03-02 14:21:25 +09:00
softsimon
3d01ecd1a6 Merge pull request #3191 from mempool/nymkappa/remove-trigger-review-request
Don't run CI on "review_requested" event
2023-03-02 13:56:11 +09:00
softsimon
b14d6bc850 Merge pull request #3190 from mempool/nymkappa/cleanup-get-blocks
Remove useless code
2023-03-02 13:55:46 +09:00
Felipe Knorr Kuhn
743f15e08e Merge branch 'master' into nymkappa/remove-trigger-review-request 2023-03-01 18:09:18 -08:00
nymkappa
5129116fe4 Don't run CI on "review_requested" event 2023-03-02 10:45:14 +09:00
wiz
746e1bb3da Merge pull request #3189 from mempool/mononaut/cpfp-db-disabled
Don't try to fetch cpfp when database is disabled
2023-03-02 10:34:45 +09:00
nymkappa
be4bd691ee Remove useless code 2023-03-02 10:08:40 +09:00
Mononaut
2309a769cd Don't try to fetch cpfp if database disabled 2023-03-01 11:36:26 -06:00
wiz
4efabe18b1 Merge pull request #3166 from mempool/nymkappa/unify-blocks-apis 2023-03-01 19:56:02 +09:00
wiz
5e484d3eca Merge branch 'master' into nymkappa/unify-blocks-apis 2023-03-01 19:20:33 +09:00
wiz
9d290562cd Merge pull request #3186 from mempool/nymkappa/ignore-nega-usd-price
Ignore negative USD prices
2023-03-01 19:20:12 +09:00
softsimon
5fe8f42489 Merge pull request #3187 from mempool/simon/pull-from-transifex-03-01
Pull from transifex 1/3
2023-03-01 14:15:08 +04:00
softsimon
3265b32a56 Pull from transifex 1/3 2023-03-01 19:14:54 +09:00
wiz
2c55182cf0 Merge branch 'master' into nymkappa/ignore-nega-usd-price 2023-03-01 19:11:52 +09:00
nymkappa
9043d23a03 Ignore negative USD prices 2023-03-01 19:11:03 +09:00
wiz
e2ef8721d7 Merge branch 'master' into nymkappa/unify-blocks-apis 2023-03-01 18:19:45 +09:00
wiz
c27e165434 Merge pull request #3184 from mempool/nymkappa/bugfix/db-migration
Only run migration 57 if bitcoin
2023-03-01 18:18:54 +09:00
wiz
88913b1a89 Merge branch 'master' into nymkappa/bugfix/db-migration 2023-03-01 17:38:02 +09:00
wiz
7acfa6e13f Merge pull request #3160 from knorrium/run_tests_on_merge
Run Cypress tests on master after merging
2023-03-01 17:34:27 +09:00
nymkappa
9c5a9f2eba Only run migration 57 if bitcoin 2023-03-01 17:33:37 +09:00
nymkappa
d5342a4e9a Add frontend config flag to toggle historical price fetching 2023-03-01 17:26:53 +09:00
wiz
6c271ab6ee Merge branch 'master' into nymkappa/unify-blocks-apis 2023-03-01 17:05:57 +09:00
nymkappa
87d678e268 Run ln forensics last 2023-03-01 16:52:24 +09:00
nymkappa
8aebcf3e57 Remove mining db stats - replaced by runtime state variable 2023-03-01 16:42:26 +09:00
Felipe Knorr Kuhn
40c250502e Merge branch 'master' into run_tests_on_merge 2023-02-28 21:45:24 -08:00
softsimon
d673365a0e Merge pull request #3116 from mempool/nymkappa/feature/align-dashboards
Align dashboards
2023-03-01 09:34:21 +04:00
softsimon
43e0c7e0d8 Merge pull request #3179 from mempool/mononaut/center-scroll
Center-align blockchain when resetting scroll position
2023-03-01 09:16:57 +04:00
softsimon
7875829370 Merge pull request #3178 from mempool/mononaut/clear-block-cache
Reset scrolling blockchain cache when network changes
2023-03-01 09:12:37 +04:00
nymkappa
a67656389e Fix chain divergence detection upon new block (use the new interface) 2023-03-01 13:50:15 +09:00
Felipe Knorr Kuhn
1d5e4aa410 Merge branch 'master' into run_tests_on_merge 2023-02-28 20:09:30 -08:00
Mononaut
af2e3cb42a Center-align blockchain after resetting scroll 2023-02-28 21:36:16 -06:00
Mononaut
f09a2aab24 Reset scrolling blockchain cache when network changes 2023-02-28 21:30:20 -06:00
softsimon
0929d53c56 Merge branch 'master' into nymkappa/feature/align-dashboards 2023-03-01 06:31:25 +04:00
softsimon
ec7f0d1143 Merge pull request #3040 from mempool/nymkappa/bugfix/ignore-too-low-lightning-timestamps
[LND] Nullify zeroed timestamps
2023-03-01 06:26:49 +04:00
softsimon
a5acb81def Merge branch 'master' into nymkappa/bugfix/ignore-too-low-lightning-timestamps 2023-03-01 06:20:56 +04:00
softsimon
792cb12755 Merge pull request #3044 from mempool/nymkappa/bugfix/node-sockets-lnd
Fix node socket parsing with LND
2023-03-01 06:14:13 +04:00
softsimon
8630ae0682 Merge branch 'master' into nymkappa/bugfix/node-sockets-lnd 2023-03-01 06:08:44 +04:00
nymkappa
7316d7d7e8 Merge branch 'master' into nymkappa/bugfix/ignore-too-low-lightning-timestamps 2023-03-01 11:08:43 +09:00
hunicus
7e093d912b Remove special availability note mechanism 2023-02-28 05:02:17 -05:00
hunicus
54f7e59978 Correct number of blocks returned for bulk-blocks 2023-02-28 05:02:10 -05:00
hunicus
852513500a Add example responses for blocks-bulk 2023-02-28 05:02:01 -05:00
hunicus
f057b07021 Add note for special availability
Indicating that api endpoint is only available for
enterprise sponsors.
2023-02-28 05:01:53 -05:00
hunicus
73b90fcd25 Add skeleton for blocks-bulk endpoint 2023-02-28 05:01:43 -05:00
hunicus
4bdad54bb2 Link api rate limit note to /enterprise 2023-02-28 04:54:31 -05:00
hunicus
174758bdd9 Specify networks in lightning network graph labels 2023-02-28 00:20:30 -05:00
wiz
b585a90abd Merge pull request #3171 from mempool/nymkappa/relative-path
Use relative path to import price service
2023-02-28 11:06:06 +09:00
nymkappa
7d8875eb73 Use relative path to import price service 2023-02-28 10:59:39 +09:00
softsimon
143344cb7e Merge pull request #3170 from mempool/simon/transifex-pull-2-27-02
Transifex pull (2)
2023-02-27 23:09:31 +04:00
softsimon
f471a85d1d Transifex pull (2) 2023-02-27 23:09:11 +04:00
nymkappa
76ae9d4ccb Wipe the disk cache since we have a new block structure 2023-02-27 19:06:46 +09:00
nymkappa
01d699e454 Add missing match rate to the block returned from the database 2023-02-27 18:39:02 +09:00
nymkappa
0aff276a5c Enforce BlockExtended use for block indexing - Unify /api/v1/block(s) API(s) response format 2023-02-27 18:00:00 +09:00
nymkappa
5792dee553 Use bitcoinApiFactory when we don't need verbose blocks or confirmation number 2023-02-27 11:46:37 +09:00
wiz
416ba77394 Merge pull request #3158 from mempool/nymkappa/bugfix/truncate-coinbase
Truncate `coinbase_signature` and `coinbase_signature_ascii` before insertion if needed
2023-02-27 10:09:21 +09:00
Felipe Knorr Kuhn
4cc0f9b5c7 Reorder triggers 2023-02-26 09:16:33 -08:00
Felipe Knorr Kuhn
0ce4c5fa4d Run Cypress tests on master after merging 2023-02-26 08:23:44 -08:00
softsimon
321499da88 Merge pull request #3130 from mempool/nymkappa/bugfix/lightning-channel-404
Improve error handling on channel component
2023-02-26 15:02:27 +04:00
nymkappa
5fba448dca Truncate coinbase data if it's too long 2023-02-26 18:24:08 +09:00
softsimon
32a0f4f6e0 Merge pull request #3102 from mempool/mononaut/block-health-calculation
Ignore coinbase transaction in block health calculation
2023-02-26 13:08:31 +04:00
wiz
86a0488bba Merge branch 'master' into mononaut/block-health-calculation 2023-02-26 17:39:50 +09:00
wiz
75faa2b45a Merge pull request #3150 from mempool/ops/update-warm-cache-new-pool-slugs
ops: Update nginx-cache-warmer for new pool slugs API
2023-02-26 16:20:51 +09:00
wiz
91dfd06015 Merge pull request #3154 from mempool/nymkappa/feature/new-reindex-command
Replace `--reindex=xxx,xxx` command line with `--reindex-blocks`
2023-02-26 15:55:49 +09:00
wiz
0b92b692c4 Merge pull request #3153 from mempool/simon/enable-advanced-gbt-mempool-production-config
Enable GBT mempool in all production configs
2023-02-26 15:54:54 +09:00
wiz
f1863596d8 Merge pull request #3149 from mempool/nymkappa/bugfix/update-missing-pools-config
Update missing "pools.json" -> "pools-v2.json"
2023-02-26 15:54:35 +09:00
wiz
a7f17f4299 Merge pull request #3155 from mempool/nymkappa/bugfix/fix-wrong-config-sample
AUTOMATIC_BLOCK_REINDEXING is `false` by default
2023-02-26 15:43:53 +09:00
nymkappa
955e216037 AUTOMATIC_BLOCK_REINDEXING is false by default 2023-02-26 15:41:55 +09:00
nymkappa
d938448fe9 Replace --reindex=xxx,xxx command line with --reindex-blocks 2023-02-26 15:28:50 +09:00
softsimon
bc3400ce75 Enable GBT mempool in all production configs 2023-02-26 10:13:25 +04:00
softsimon
c731eeba87 Merge pull request #3152 from mempool/simon/transifex-pull-26-02
Transifex pull 26/2
2023-02-26 09:46:38 +04:00
softsimon
51bc749415 Transifex pull 26/2 2023-02-26 09:46:21 +04:00
wiz
200da478d2 Merge pull request #3077 from mempool/nymkappa/feature/fix-mining-pool-auto-update
Log warning when pools are out of date
2023-02-26 14:29:49 +09:00
nymkappa
d0d2303513 Document --update-pools - Added some logs 2023-02-26 14:19:10 +09:00
wiz
25186ae6fa Merge pull request #3151 from knorrium/remove_node19_from_ci
Remove node 19 from the CI test matrix
2023-02-26 14:13:39 +09:00
Felipe Knorr Kuhn
7519eaf5d8 Remove node 19 from the CI test matrix 2023-02-25 21:06:34 -08:00
wiz
7ad207766b ops: Update nginx-cache-warmer for new pool slugs API 2023-02-26 13:54:45 +09:00
nymkappa
57fb305452 Update missing "pools.json" -> "pools-v2.json" 2023-02-26 13:54:43 +09:00
wiz
5981e52534 Merge branch 'master' into nymkappa/feature/align-dashboards 2023-02-26 13:07:35 +09:00
nymkappa
9a4a5ad94e Silence ENOENT exception when we wipe the nodejs backend cache 2023-02-26 11:37:57 +09:00
nymkappa
32733a3023 When we re-index blocks due to mining pools change, wipe the nodejs backend cache 2023-02-26 11:30:12 +09:00
softsimon
b6c7c02a2d Merge pull request #3143 from mempool/simon/missing-ln-i18n-strings
Fixing some i18n strings.
2023-02-25 17:14:30 +04:00
softsimon
f2e7dd51af Fixing some i18n strings. 2023-02-25 17:14:11 +04:00
softsimon
a721761d49 Merge pull request #3138 from mempool/simon/fix-duplicate-i18n-string-11
Fix for duplicate i18n strings
2023-02-25 14:28:32 +04:00
softsimon
6b49096fa9 Fix for duplicate i18n strings 2023-02-25 14:28:04 +04:00
softsimon
303f123c17 Merge pull request #3137 from mempool/simon/extract-i18n-feb-25
Extracting i18n with new faq disclaimer
2023-02-25 14:24:11 +04:00
softsimon
0b9c64483c Extracting i18n with new faq disclaimer 2023-02-25 14:23:54 +04:00
softsimon
a4b9397286 Merge pull request #3134 from hunicus/i18n-faq-disclaimer
Add i18n tag for big faq disclaimer
2023-02-25 14:22:39 +04:00
softsimon
c1faefe74e Merge pull request #3133 from mempool/simon/localizing-search-box-strings
Localizing search box strings
2023-02-25 14:19:31 +04:00
hunicus
ba1cc05979 Add i18n tag for big faq disclaimer 2023-02-25 05:04:28 -05:00
softsimon
d3b681dca2 Localizing search box strings, channel tags
fixes #3124
fixes #3128
fixes #3083
2023-02-25 13:50:41 +04:00
hunicus
b7425dc339 Add anchor links for about page sections 2023-02-25 04:37:30 -05:00
wiz
619c4fdfb1 Merge pull request #3132 from hunicus/copyright-2023 2023-02-25 18:36:13 +09:00
hunicus
333aef5e94 Update legal notices for 2023 2023-02-25 04:34:09 -05:00
nymkappa
6d1e6a92ad [LND] Nullify zeroed timestamps 2023-02-25 18:30:29 +09:00
wiz
b7e6b6da13 Merge pull request #2869 from mempool/nymkappa/feature/rewrite-pool-parser
Rewrite mining pools parser
2023-02-25 17:50:19 +09:00
nymkappa
9395a5031e Log the whole exception in pool parser 2023-02-25 17:12:50 +09:00
nymkappa
2363a397f1 Remove duplicated db transaction 2023-02-25 17:05:58 +09:00
nymkappa
3d38064dbb Increase db schema version to 56 2023-02-25 16:48:11 +09:00
nymkappa
ad9e42db26 Use regexes instead of tags 2023-02-25 16:32:03 +09:00
nymkappa
c2f5cb9529 Update pool parser to work with no database 2023-02-25 16:32:03 +09:00
nymkappa
6cd42cfc73 Update missing POOLS_JSON_URL config 2023-02-25 16:32:03 +09:00
nymkappa
117aa1375d Disable mining pools update if AUTOMATIC_BLOCK_REINDEXING is not set - Re-index unknown blocks when a new pool is added 2023-02-25 16:32:03 +09:00
nymkappa
d87fb04a92 Point to the new mining pool files pools-v2.json 2023-02-25 16:32:02 +09:00
nymkappa
d3fdef256c Rewrite mining pools parser - Re-index blocks table 2023-02-25 16:31:47 +09:00
wiz
e8ffd4335f Merge pull request #3070 from mempool/nymkappa/feature/new-blocks-api
Index coinstatsindex - Add bulk block query api
2023-02-25 16:12:07 +09:00
nymkappa
210f939e65 Add missing truncate blocks table 2023-02-25 14:22:17 +09:00
nymkappa
8d9568016e Remove duplicated entry in backend/src/__fixtures__/mempool-config.template.json 2023-02-25 14:22:17 +09:00
nymkappa
5d7c9f9315 Add config.MEMPOOOL.MAX_BLOCKS_BULK_QUERY parameter (default to 0, API disable) 2023-02-25 14:22:17 +09:00
nymkappa
ad4cbd60d5 Do not download orphaned block if headers-only 2023-02-25 14:22:17 +09:00
nymkappa
822362c105 Increase cache schema version 2023-02-25 14:22:16 +09:00
nymkappa
6c3a273e75 Enabled coinstatsindex=1 2023-02-25 14:22:16 +09:00
nymkappa
ed8cf89fee Format percentiles in a more verbose way 2023-02-25 14:22:16 +09:00
nymkappa
e19db4ae35 Add missing coinbase_signature_ascii 2023-02-25 14:22:16 +09:00
nymkappa
aa1114926c previousblockhash -> previous_block_hash 2023-02-25 14:22:15 +09:00
nymkappa
0bf4d52183 Return zeroed out fee_amt_percentiles if there is no transaction 2023-02-25 14:22:15 +09:00
nymkappa
a0488dba76 Cleanup block before sending response in /blocks-bulk API
Remove block_time
Index summaries on the fly
2023-02-25 14:22:15 +09:00
nymkappa
086ee68b52 Remove block_time from indexed fields 2023-02-25 14:22:15 +09:00
nymkappa
75a99568bf Index coinbase signature in ascii 2023-02-25 14:22:14 +09:00
nymkappa
b2eaa7efb1 Fix fee percentiles indexing 2023-02-25 14:22:14 +09:00
nymkappa
eceedf0bdf Dont compute fee percentile / median fee when indexing is disabled because we need summaries 2023-02-25 14:22:14 +09:00
nymkappa
6965c8f41b Fix median time indexing 2023-02-25 14:22:14 +09:00
nymkappa
e2fe39f241 Wrap orphaned blocks updater into try/catch 2023-02-25 14:22:13 +09:00
nymkappa
281899f551 List orphaned blocks in the new blocks-bulk API 2023-02-25 14:22:13 +09:00
nymkappa
458f24c9f2 Compute median fee and fee percentiles in sats 2023-02-25 14:22:13 +09:00
nymkappa
8f716a1d8c Fix median timestamp field - Fix reponse format when block is indexed on the fly 2023-02-25 14:22:13 +09:00
nymkappa
8612dd2d73 Remove unescessary data from the blocks-bulk API 2023-02-25 14:22:12 +09:00
nymkappa
73f76474dd Implemented coinstatsindex indexing 2023-02-25 14:22:12 +09:00
nymkappa
c44896f53e Get blocks data set by bulk (non indexed) 2023-02-25 14:22:11 +09:00
nymkappa
9a246c68de Center wrapping error message on mobile 2023-02-25 13:43:48 +09:00
nymkappa
8df2476266 Improve error handling on channel component 2023-02-25 13:38:09 +09:00
nymkappa
80e0ef8970 Improve responsiveness on single column layout 2023-02-25 13:20:49 +09:00
wiz
58eb6ccc8e Merge pull request #3126 from mempool/ops/enable-cpfp-indexing
ops: Enable CPFP indexing for mainnet
2023-02-25 12:14:49 +09:00
wiz
b50e973573 ops: Enable CPFP indexing for mainnet 2023-02-25 12:14:07 +09:00
softsimon
56789532ed Merge pull request #3025 from mempool/mononaut/offline-loading-blocks
Handling for network interruptions in scrollable blockchain
2023-02-24 17:32:27 +04:00
nymkappa
92862939da Make sure we don't show more than 6 rows in channel ranking widget 2023-02-24 20:25:28 +09:00
nymkappa
98e709b739 Remove monospace from fiat amount 2023-02-24 17:49:06 +09:00
nymkappa
7cee6df369 Remove console.log 2023-02-24 17:47:33 +09:00
wiz
ab6219a828 Merge branch 'master' into nymkappa/feature/align-dashboards 2023-02-24 17:10:57 +09:00
wiz
7d3c7c3f45 Merge pull request #3119 from mempool/nymkappa/bugfix/historical-price-bitcoin-only
Only query historical price if we're running mempool BASE_MODULE
2023-02-24 17:10:22 +09:00
wiz
20404cf6a0 Merge branch 'master' into nymkappa/bugfix/historical-price-bitcoin-only 2023-02-24 16:29:42 +09:00
softsimon
ddd8109187 Merge pull request #3109 from mempool/nymkappa/bugfix/qr-code-border
Add border to qr code because it's a best practice
2023-02-24 10:57:06 +04:00
nymkappa
a26dc977ba Hide new columns when screen width is too small 2023-02-24 14:20:16 +09:00
wiz
4921ea1a2b Merge pull request #3118 from mempool/wiz/fix-db-migration-54-liquid
Fix DB migration 54 breaking liquid
2023-02-24 13:56:56 +09:00
nymkappa
b4d0e20d75 Only query historical price if we're running mempool BASE_MODULE 2023-02-24 12:12:50 +09:00
nymkappa
4d7c69dd73 Fix DB migration 54 breaking liquid 2023-02-24 10:41:17 +09:00
nymkappa
6f68c1666f Add border input in the qr code component 2023-02-24 09:55:48 +09:00
Mononaut
c65674479a fix gaps in loading blockchain 2023-02-23 14:31:01 -06:00
Mononaut
ee265be55e Show skeleton loader for all blocks while offline 2023-02-23 13:54:06 -06:00
Mononaut
8eca1e5f7e Handle network interruptions in scrollable blockchain 2023-02-23 13:52:08 -06:00
wiz
da3446f522 Merge pull request #3084 from knorrium/update_ci_node
Update node v16, LTS and current for CI
2023-02-23 22:31:33 +09:00
wiz
676f6ff2ef Merge pull request #3117 from hunicus/translator-imgs
Fit translator avatars neatly on 2 lines
2023-02-23 22:15:47 +09:00
wiz
de93a0c53e Merge branch 'master' into translator-imgs 2023-02-23 21:53:37 +09:00
wiz
d74429e359 Merge pull request #3111 from mempool/nymkappa/bugfix/optimize-price-frontend
Redo/Fix completely failed PR #3092 + add PR #3105
2023-02-23 21:49:56 +09:00
hunicus
bb5fd4b1b1 Fit translator avatars neatly on 2 lines 2023-02-23 07:39:57 -05:00
wiz
d2ea9215f0 Merge branch 'master' into nymkappa/bugfix/optimize-price-frontend 2023-02-23 21:32:58 +09:00
wiz
7cc0622702 Merge pull request #3115 from mempool/nymkappa/bugfix/fix-ln-label-y
Fix lightning widgets layout
2023-02-23 21:32:34 +09:00
wiz
b147898823 Merge branch 'master' into nymkappa/bugfix/fix-ln-label-y 2023-02-23 20:44:54 +09:00
softsimon
a7fb62ce4a Merge pull request #3007 from mempool/mononaut/mobile-chain-jumping
Fix blockchain scroll jumping on resize on mobile
2023-02-23 15:09:10 +04:00
softsimon
0147af1325 Merge branch 'master' into mononaut/mobile-chain-jumping 2023-02-23 14:55:41 +04:00
softsimon
aa9ebc277d Merge pull request #3113 from mempool/nymkappa/bugfix/usd-charts-only
Show only USD in block fees/rewards charts
2023-02-23 14:50:32 +04:00
wiz
4e55f5323e Merge branch 'master' into nymkappa/feature/align-dashboards 2023-02-23 19:18:09 +09:00
wiz
6cd946c8bb Merge pull request #3101 from mempool/mononaut/flow-diagram-alignment
pixel perfect flow diagrams
2023-02-23 19:17:55 +09:00
nymkappa
7e913e4d34 Show geolocation in node channels ranking widget 2023-02-23 18:57:55 +09:00
nymkappa
f9fe096669 Unsubscribe priceSubscription onDestroy 2023-02-23 18:43:32 +09:00
nymkappa
58f886b337 If we don't have a price for "single price" query then return empty price 2023-02-23 18:40:13 +09:00
nymkappa
50c3f83484 Fix design for node channels ranking 2023-02-23 18:36:13 +09:00
nymkappa
51ac04f207 Fix design for node liquidity ranking 2023-02-23 18:19:47 +09:00
wiz
aa561773ee Merge branch 'master' into mononaut/flow-diagram-alignment 2023-02-23 16:12:01 +09:00
nymkappa
ffe02c2509 Fix lightning chart widget layout 2023-02-23 16:01:56 +09:00
wiz
7929500dd2 Merge branch 'master' into mononaut/block-health-calculation 2023-02-23 15:27:04 +09:00
wiz
1b92099004 Merge pull request #3068 from mempool/mononaut/raise-memory-limits
Raise production memory limits
2023-02-23 15:26:53 +09:00
nymkappa
ee54e782f8 Only display reward and block fee charts in USD due to missing historical data 2023-02-23 15:13:30 +09:00
nymkappa
2d03332333 Add missing db schema incrementation 2023-02-23 14:52:29 +09:00
nymkappa
3f95d094a3 Merge branch 'master' into nymkappa/bugfix/optimize-price-frontend 2023-02-23 14:51:46 +09:00
nymkappa
5cfd715d4a Add database migration to re-index prices with negative values support 2023-02-23 14:50:20 +09:00
wiz
d487c78ae1 Merge pull request #3100 from mempool/simon/alias-search-syntax-error
Fixes a syntax error with certain keywords
2023-02-23 13:45:31 +09:00
wiz
10bfb51215 Add simon's comment to $searchNodeByPublicKeyOrAlias() 2023-02-23 13:42:54 +09:00
nymkappa
5b5de95828 Show historical price on block overview graph 2023-02-23 13:34:48 +09:00
wiz
03be3ab561 Merge branch 'master' into simon/alias-search-syntax-error 2023-02-23 13:20:35 +09:00
wiz
0851dad0b2 Merge pull request #3103 from mempool/mononaut/db-stats-overflow
increase size of mempool_byte_weight db column
2023-02-23 13:20:22 +09:00
nymkappa
5749820999 Optimize price API response size reduce the number of query to that API 2023-02-23 13:13:20 +09:00
wiz
ba2aa0456c Merge branch 'master' into mononaut/block-health-calculation 2023-02-23 13:11:00 +09:00
wiz
0ef259d408 Merge branch 'master' into mononaut/db-stats-overflow 2023-02-23 13:09:38 +09:00
wiz
22ad06a94c Merge pull request #3106 from mempool/nymkappa/bugfix/unknown-pool-yellow
Unknown pool color #FDD835
2023-02-23 12:55:28 +09:00
wiz
ea74a737ff Merge branch 'master' into nymkappa/bugfix/unknown-pool-yellow 2023-02-23 12:25:33 +09:00
nymkappa
f6c7839524 Show historical price on transaction bowtie chart 2023-02-23 10:46:18 +09:00
nymkappa
62e1fa03c1 Cache price API for 5 minutes 2023-02-23 10:04:31 +09:00
nymkappa
f44eacd5d5 Redo/Fix completely failed PR #3092 + add PR #3105 2023-02-23 09:50:34 +09:00
softsimon
6e7ed29caa Merge pull request #3110 from mempool/nymkappa/bugfix/missing-price-unconfirmed-tx
Fix 'NaN' price for unconfirmed transaction since we have no block timestamp
2023-02-22 15:48:05 +04:00
nymkappa
2246a6f3ce Fix 'NaN' price for unconfirmed transaction since we have no block timestamp 2023-02-22 20:42:32 +09:00
wiz
4b2083e756 Merge branch 'master' into simon/alias-search-syntax-error 2023-02-22 15:43:50 +09:00
wiz
4203972a49 Merge branch 'master' into mononaut/raise-memory-limits 2023-02-22 15:29:17 +09:00
softsimon
b6792784e8 Adding regex comments. 2023-02-22 10:19:32 +04:00
nymkappa
437350aaff Unknown pool color #FDD835 2023-02-22 14:06:08 +09:00
Mononaut
32b38e6cd1 increase size of mempool_byte_weight db column 2023-02-21 22:07:12 -06:00
Mononaut
2ff930ef3e Ignore coinbase tx in block health calculation 2023-02-21 22:01:30 -06:00
Mononaut
e8d54f254b pixel-perfect flow diagrams 2023-02-21 21:59:31 -06:00
wiz
00cbb5d0d8 Merge pull request #3076 from mempool/nymkappa/bugfix/channel-geo
Fix map not being updated when switching between channel pages
2023-02-22 10:42:38 +09:00
wiz
ef8a1c7393 Merge branch 'master' into nymkappa/bugfix/channel-geo 2023-02-22 10:32:52 +09:00
wiz
c1a3b5f045 Merge branch 'master' into simon/alias-search-syntax-error 2023-02-22 10:13:44 +09:00
wiz
e3603d9ce9 Merge pull request #3099 from mempool/nymkappa/bugfix/chart-all
Fix timespan UX in mining charts
2023-02-22 10:12:55 +09:00
wiz
680f7d39cf Merge branch 'master' into nymkappa/bugfix/chart-all 2023-02-22 09:58:33 +09:00
wiz
70113d9c91 Merge pull request #3092 from mempool/nymkappa/feature/historical-price
Use historical price in older blocks / transactions
2023-02-22 09:58:25 +09:00
wiz
70d96583fb Merge branch 'master' into nymkappa/feature/historical-price 2023-02-22 08:23:36 +09:00
wiz
6beab1f8c1 Merge pull request #3098 from mempool/nymkappa/bugfix/pool-hashrate-history-colors
Use mempool chart color palette on hashrate history
2023-02-22 08:23:11 +09:00
wiz
2bfe192324 Merge branch 'master' into nymkappa/bugfix/pool-hashrate-history-colors 2023-02-22 08:05:48 +09:00
softsimon
8f51e20b2e Fixes a syntax error with certain keywords 2023-02-21 17:14:51 +04:00
nymkappa
05594675c0 Disable timespan controls while isLoading 2023-02-21 18:48:09 +09:00
nymkappa
cf1bf9f0c5 Add missing formControl name 2023-02-21 18:47:27 +09:00
wiz
5bcd54b808 Merge pull request #3097 from mempool/ops/smooth-out-warm-cache-requests
ops: Add 250ms delay between warm cache requests
2023-02-21 18:42:28 +09:00
wiz
952e540d65 ops: Add 250ms delay between warm cache requests 2023-02-21 18:41:12 +09:00
wiz
0e5a4df978 Merge pull request #3096 from mempool/nymkappa/bugfix/liquid-scroll
Fix liquid infinite scrolling
2023-02-21 18:23:36 +09:00
nymkappa
b2162130d9 Keep block alignment if fee/fee-range is missing 2023-02-21 18:09:41 +09:00
nymkappa
de117e30f2 Use mempool chart color palette on hashrate history 2023-02-21 17:51:46 +09:00
nymkappa
c949fee49f Fix liquid infinite scrolling 2023-02-21 17:25:39 +09:00
nymkappa
3c94755a69 Use historical price for older blocks and transactions 2023-02-21 12:37:27 +09:00
wiz
994b31527b Merge pull request #3091 from mempool/nymkappa/feature/avg-block-health-pool
Show average block health in pool ranking
2023-02-20 18:33:33 +09:00
nymkappa
14be0fc547 Hide pool share on mobile in pool ranking 2023-02-20 18:14:12 +09:00
nymkappa
0dc2a598c3 Show avg block heath in pool ranking pie chart 2023-02-20 18:14:12 +09:00
nymkappa
e3e7271c9d Add avg mining pool block mathrate in pools stats API 2023-02-20 18:14:12 +09:00
wiz
e2585da7aa Merge pull request #3081 from mempool/simon/remove-included-in-block
Remove included in block
2023-02-20 17:45:54 +09:00
wiz
df578f64ee Merge branch 'master' into simon/remove-included-in-block 2023-02-20 17:28:05 +09:00
wiz
ac22a10e06 Merge pull request #3090 from mempool/ops/fix-typo-in-mainnet-lightning-backend-config
ops: Fix typo in mainnet lightning backend config
2023-02-20 16:10:24 +09:00
wiz
85251fcd5c ops: Fix typo in mainnet lightning backend config 2023-02-20 16:09:40 +09:00
wiz
f3ef1e7a53 Merge pull request #3089 from mempool/ops/fix-build-script-credentials-typo
ops: Fix another typo in build script credentials sed
2023-02-20 15:59:08 +09:00
wiz
ca15887e4b ops: Fix another typo in build script credentials sed 2023-02-20 15:58:07 +09:00
Felipe Knorr Kuhn
0e0acff00f Merge branch 'master' into mononaut/mobile-chain-jumping 2023-02-19 14:00:10 -08:00
Felipe Knorr Kuhn
89319d9117 Merge branch 'master' into nymkappa/bugfix/node-sockets-lnd 2023-02-19 13:56:48 -08:00
Felipe Knorr Kuhn
c260b4f0f2 Merge branch 'master' into mononaut/raise-memory-limits 2023-02-19 13:56:28 -08:00
Felipe Knorr Kuhn
6a285064af Merge branch 'master' into mononaut/optimize-mempool-block-7 2023-02-19 13:56:18 -08:00
Felipe Knorr Kuhn
250f07732f Merge branch 'master' into nymkappa/bugfix/channel-geo 2023-02-19 13:56:11 -08:00
Felipe Knorr Kuhn
e0f149550a Update node v16, LTS and current for CI 2023-02-19 13:50:12 -08:00
softsimon
ee6bdeec66 Merge pull request #3080 from mempool/nymkappa/bugfix/invalid-db-use
Fix database usage when database is disabled
2023-02-19 21:08:40 +07:00
softsimon
209d5a57c7 Merge pull request #3079 from mempool/nymkappa/bugfix/layout-fix
Only add clearfix on block list widget
2023-02-19 20:41:15 +07:00
nymkappa
66d52c9773 Update blocks-list.component.html 2023-02-19 20:43:51 +09:00
nymkappa
c1fc60e61e Wrap duplicate ngIf into a ng-template 2023-02-19 19:30:55 +09:00
softsimon
44865d18ae Remove included in block 2023-02-19 17:24:43 +07:00
nymkappa
761edbce9a Fix database usage when database is disabled 2023-02-19 19:08:29 +09:00
softsimon
f6800b848a Merge pull request #3048 from knorrium/update_mainnet_tests
Update mainnet tests
2023-02-19 16:44:40 +07:00
wiz
5a5e81c731 Merge pull request #3078 from mempool/nymkappa/bugfix/pegapool-color
Remove hardcoded mining pools colors as it's not relevant
2023-02-19 18:27:54 +09:00
wiz
9c02d6a276 Merge branch 'master' into update_mainnet_tests 2023-02-19 18:26:11 +09:00
nymkappa
6229708dfd Fixes #3034 2023-02-19 17:36:09 +09:00
nymkappa
e2eccf6e85 Remove hardcoded mining pools colors as it's not relevant 2023-02-19 17:14:49 +09:00
wiz
b2621e7c27 Merge pull request #3064 from mempool/nymkappa/bugfix/remove-unused-config-setting
Remove `config.MEMPOOL.PRICE_FEED_UPDATE_INTERVAL`
2023-02-19 16:28:39 +09:00
Felipe Knorr Kuhn
3d1d29bd3b Merge branch 'master' into nymkappa/bugfix/remove-unused-config-setting 2023-02-18 18:43:04 -08:00
wiz
bc4d21577d Merge pull request #3041 from mempool/nymkappa/update-readme
Add `--reindex` doc to backend README
2023-02-19 10:23:59 +09:00
Felipe Knorr Kuhn
95b029ee7b Merge branch 'master' into update_mainnet_tests 2023-02-18 15:36:20 -08:00
softsimon
330aba0985 Merge pull request #3066 from antonilol/sizeperweight-graph
Add size per weight graph and ts type for `getHistoricalBlockSizesAndWeights`
2023-02-18 23:57:06 +07:00
softsimon
83c248e4b6 Merge pull request #3022 from mempool/mononaut/esplora-keepalive
Reuse HTTP connections for esplora backend requests
2023-02-18 21:34:28 +07:00
Mononaut
7f54e30a26 Reuse HTTP connections to esplora backend 2023-02-18 21:34:09 +07:00
softsimon
65875c3ced Merge pull request #3009 from mempool/update-cpfp-faq-mined
Update cpfp faq for stored relationships
2023-02-18 20:44:15 +07:00
softsimon
83e80b90d7 Merge branch 'master' into update_mainnet_tests 2023-02-18 19:45:50 +07:00
softsimon
9b6537eb4e Merge pull request #3063 from mempool/mononaut/liquid-address-table
fix liquid address table overflow
2023-02-18 19:41:43 +07:00
nymkappa
224613e5ee Fix channel map not being updated (racing condition) 2023-02-18 21:20:22 +09:00
wiz
177fba2178 Merge branch 'master' into mononaut/liquid-address-table 2023-02-18 20:34:59 +09:00
softsimon
83fc60d6ee Merge pull request #2770 from mempool/nymkappa/bugfix/price-update-invalid-response
Make sure exchange API response format is valid before using it
2023-02-18 17:05:48 +07:00
softsimon
71b373463b Merge branch 'master' into nymkappa/bugfix/price-update-invalid-response 2023-02-18 16:59:11 +07:00
softsimon
3f0bcbe64c Merge pull request #3043 from mempool/nymkappa/bugfix/blocks-api
Fixes blocks api (missing fee range)
2023-02-18 15:26:08 +07:00
wiz
17c16ba4d5 Merge pull request #3075 from mempool/wiz/fix-mempool-build-script-lightning-variables
ops: Fix mempool build script lightning variables
2023-02-18 15:26:29 +09:00
wiz
e7a19dfe2f ops: Fix mempool build script lightning variables 2023-02-18 15:23:02 +09:00
wiz
508f8119d3 Merge pull request #3074 from mempool/ops/fix-lightning-mysql-credentials
ops: Fix lightning mysql credentials usage
2023-02-18 12:39:02 +09:00
wiz
87cda325a5 ops: Fix lightning mysql credentials usage 2023-02-18 12:37:06 +09:00
wiz
8e2ff1242c Merge pull request #3073 from mempool/simon/transifex-migration
Migrated transifex and fetched latest translations
2023-02-18 12:25:36 +09:00
softsimon
2993a286b8 Migrated transifex and fetched latest translations 2023-02-18 09:39:32 +07:00
Mononaut
59f08247ef Reduce data sent to mempool block 7 subscription 2023-02-17 19:11:12 -06:00
Mononaut
77686821ba Raise production memory limits
Increases maxmempool production bitcoin.conf setting to 2GB,
and raises `npm run start-production` nodejs memory limit to 8GB
2023-02-17 10:12:33 -06:00
Antoni Spaanderman
04d8926ef5 fix missing quote 2023-02-17 15:47:32 +01:00
Antoni Spaanderman
565e572b88 Update frontend/src/app/components/block-sizes-weights-graph/block-sizes-weights-graph.component.ts
change size per weight graph color

Co-authored-by: nymkappa <9780671+nymkappa@users.noreply.github.com>
2023-02-17 15:45:15 +01:00
Antoni Spaanderman
a897aebbc1 add size per weight graph and ts type for getHistoricalBlockSizesAndWeights 2023-02-16 15:39:09 +01:00
nymkappa
ce012b7d55 Remove config.MEMPOOL.PRICE_FEED_UPDATE_INTERVAL 2023-02-16 09:44:52 +09:00
wiz
f040c85fe9 Merge branch 'master' into mononaut/liquid-address-table 2023-02-16 05:20:41 +09:00
wiz
ae9f98f26c Merge pull request #3059 from hunicus/shrink-og-avatar
Shrink og avators to make group more square
2023-02-16 05:04:37 +09:00
wiz
40decd1556 Merge branch 'master' into shrink-og-avatar 2023-02-16 04:56:04 +09:00
Mononaut
cc5873f995 fix liquid address table overflow 2023-02-15 09:26:12 -06:00
wiz
2845a226f2 Merge pull request #3062 from mempool/nymkappa/bugfix/bisq-no-db-price
Fix database usage when database is disabled
2023-02-15 18:21:26 +09:00
wiz
1eb68962d8 Merge branch 'master' into nymkappa/bugfix/bisq-no-db-price 2023-02-15 18:09:20 +09:00
nymkappa
408b0a23ce Disable fiat display on pools blocks and ln channel component 2023-02-15 18:06:22 +09:00
nymkappa
9734052477 Fix database used when database disabled 2023-02-15 17:45:29 +09:00
nymkappa
5e10b75b87 Fix lightning dashboard btc/fiat UI issue 2023-02-15 17:33:30 +09:00
nymkappa
3beb85dd4a Add footer to all dashboards 2023-02-15 16:41:09 +09:00
hunicus
774f7630ce Remove colon from block overview table 2023-02-15 16:28:29 +09:00
hunicus
215e92d33e Switch audit faq conditions to env.audit
From OFFICIAL_MEMPOOL_SPACE.
2023-02-15 16:28:29 +09:00
Mononaut
c923a4bc22 simplify audit availability logic 2023-02-15 16:28:29 +09:00
Mononaut
2363631326 Add audit / block health config feature flag 2023-02-15 16:28:29 +09:00
nymkappa
28bd813fb8 Show correct currency label in 'Latest transactions' widget 2023-02-15 16:22:06 +09:00
nymkappa
6c0dc34dd6 Run database migration before running any business logic 2023-02-15 16:13:10 +09:00
nymkappa
32aa7aaff1 Remove bisq price fetch and replace it with our in house price index 2023-02-15 16:05:14 +09:00
hunicus
6f650e936d Shrink og avators to make group more square 2023-02-15 01:49:11 -05:00
wiz
b3a5d1a8fc Merge pull request #3058 from hunicus/remove-colon
Remove colon from block overview table
2023-02-15 15:39:05 +09:00
wiz
1b51193cdf Merge pull request #3039 from mempool/mononaut/audit-feature-flag
Add audit / block health config feature flag
2023-02-15 15:36:54 +09:00
hunicus
27a9622875 Switch audit faq conditions to env.audit
From OFFICIAL_MEMPOOL_SPACE.
2023-02-15 01:27:43 -05:00
nymkappa
fddbf51084 Only show supported currencies - Tweak UI 2023-02-15 15:01:07 +09:00
hunicus
9f1a6ad31d Remove colon from block overview table 2023-02-15 00:56:02 -05:00
wiz
fc216819d1 Merge branch 'master' into update_mainnet_tests 2023-02-15 12:15:21 +09:00
wiz
e009c78c3e Merge branch 'master' into mononaut/fiat-selector 2023-02-15 12:10:07 +09:00
Mononaut
b4c30cad5c simplify audit availability logic 2023-02-14 12:32:30 -06:00
Mononaut
8f2255a7a2 Add audit / block health config feature flag 2023-02-14 12:32:30 -06:00
nymkappa
56dad33fce Always return fully extended block in blocks API even if indexing is disabled 2023-02-14 22:14:28 +09:00
softsimon
14a794d9cc Merge pull request #3055 from mempool/simon/correcting-i18n-strings-health
Updating a few i18n strings
2023-02-14 17:25:12 +07:00
softsimon
014113bb33 Updating a few i18n strings 2023-02-14 17:24:48 +07:00
softsimon
bba9cb4a58 Merge pull request #3054 from mempool/simon/extracting-i18n-14-02
Extracting i18n
2023-02-14 17:20:27 +07:00
softsimon
763b59ce9e Extracting i18n 2023-02-14 17:20:01 +07:00
wiz
49754745d8 Merge pull request #3053 from hunicus/info-icon-audit
Add info icon on audit linking to audit faq
2023-02-14 18:42:10 +09:00
wiz
746b44d973 Merge branch 'master' into info-icon-audit 2023-02-14 18:33:55 +09:00
hunicus
b0b7546332 Make audit info icon look better 2023-02-14 04:32:15 -05:00
wiz
bc22098679 Merge branch 'master' into update_mainnet_tests 2023-02-14 17:46:05 +09:00
wiz
2b60ba0f38 Merge pull request #3049 from knorrium/update_staging_hosts
Update staging hosts to fra
2023-02-14 17:42:14 +09:00
wiz
b2329df81a Merge pull request #3047 from hunicus/projected-to-expected
Change 'projected' to 'expected' for block audit label
2023-02-14 17:41:50 +09:00
hunicus
491f2c6280 Add info icon on audit linking to audit faq 2023-02-14 03:40:00 -05:00
wiz
cbbb36b030 Merge branch 'master' into projected-to-expected 2023-02-14 17:34:18 +09:00
softsimon
52824abf00 Merge pull request #3045 from hunicus/health-audit-faq
Add block audit faq
2023-02-14 15:15:11 +07:00
hunicus
7aca045080 Fix error in logic for official_mempool_space 2023-02-14 03:01:37 -05:00
hunicus
33d7a0af60 Edit i18n attributes 2023-02-14 02:07:44 -05:00
softsimon
0e4bebb870 Merge pull request #3016 from Arooba-git/fix-subscription-memory-leak
Fix subscription memory leak
2023-02-14 13:37:00 +07:00
Felipe Knorr Kuhn
b817ee9e5c Update staging hosts to fra 2023-02-13 18:22:53 -08:00
hunicus
d8ebc5a92c Only show audit and health faqs on official 2023-02-13 20:22:51 -05:00
Felipe Knorr Kuhn
635fadd13f Update mainnet tests: increase blocks to 22, update locators and skip a test 2023-02-13 16:58:25 -08:00
Felipe Knorr Kuhn
771825f224 Add offset to blockchain blocks classes and locators 2023-02-13 16:57:39 -08:00
hunicus
38ce8b8dc1 Add note about audit/health availability
Not available on non-official instances.
2023-02-13 19:39:46 -05:00
hunicus
1c7698eb36 Remove repeated line 2023-02-13 18:45:29 -05:00
hunicus
610597b687 Change 'projected' to 'expected' for block audit 2023-02-13 18:34:09 -05:00
hunicus
198f85af2d Add block audit faq 2023-02-13 06:31:25 -05:00
softsimon
2b5442783e Merge branch 'master' into nymkappa/bugfix/blocks-api 2023-02-13 16:37:13 +07:00
softsimon
0148a5f489 Merge pull request #3038 from mempool/mononaut/reset-block-scroll
Fix firefox blockchain scroll reset bug
2023-02-13 16:35:09 +07:00
nymkappa
1fc6e13bf8 Fix node socket parsing with LND 2023-02-13 18:01:15 +09:00
nymkappa
cf720c4bef Fix missing fee range in blocks api when querying non indexed blocks 2023-02-13 15:50:22 +09:00
nymkappa
0176afa394 Add --reindex doc to backend README 2023-02-13 14:58:35 +09:00
Mononaut
c85d8cd29d Fix firefox blockchain scroll reset bug 2023-02-12 21:42:33 -06:00
softsimon
cf0897e27c Merge pull request #3028 from AlexLloyd0/patch-1
Add contributor agreement for AlexLloyd0
2023-02-11 20:36:17 +07:00
wiz
fd16f5c4c3 Merge pull request #3030 from mempool/mononaut/hotfix-gbt-mempool-clone
safer mempool cloning for different GBT algorithms
2023-02-11 11:31:48 +09:00
Mononaut
8f3b8276c5 safer mempool cloning for different GBT algorithms 2023-02-10 07:43:39 -06:00
Alex Lloyd
133044ab8e Create AlexLloyd0.txt
Contributing in https://github.com/mempool/mempool.js/pull/79
2023-02-09 16:27:36 +01:00
Aroooba
d66921938c Unsubscribe subscription in component destructor to avoid memory leak 2023-02-07 05:14:07 +09:00
Aroooba
a67a074eaf Add contributor license agreement 2023-02-07 05:14:00 +09:00
softsimon
241850beca Merge pull request #3010 from mempool/mononaut/cpfp-error-handling
More robust error checking & handling in CPFP repositories
2023-02-04 09:18:10 +07:00
softsimon
8d2856dc84 Merge pull request #2999 from mempool/mononaut/next-block-drift
fix block visualization getting out of sync
2023-02-04 07:39:38 +07:00
wiz
74c5e4af90 Merge branch 'master' into mononaut/next-block-drift 2023-02-04 00:03:16 +09:00
Mononaut
900e66aef7 More robust error checking & handling in CPFP repositories 2023-02-02 17:37:32 -06:00
hunicus
b269e99cfb Update cpfp faq for stored relationships 2023-02-02 14:23:08 -05:00
Mononaut
9f15e9a1d7 Fix blockchain scroll jumping on resize on mobile 2023-02-01 11:52:00 -06:00
Mononaut
368041e7d4 Use selected currency in app-amount component 2023-02-01 10:25:01 -06:00
Mononaut
d06dcdccb4 Display fiat mining graphs in selected currency 2023-02-01 10:25:00 -06:00
Mononaut
c2ff6a996a Multi-currency fiat formatting pipes & components 2023-02-01 10:25:00 -06:00
Mononaut
02655d757e Add currency preference to dashboard 2023-02-01 10:24:57 -06:00
Mononaut
aa8a3e60c2 expose other currencies in charts APIs 2023-02-01 10:23:55 -06:00
wiz
69f5d483ee Merge pull request #2993 from mempool/simon/credit-nepalese-translator
Credit nepalese translator
2023-02-01 16:51:57 +09:00
wiz
c45bc0a9eb Merge pull request #3001 from mempool/hunicus/block-health-faq
Add block health faq
2023-02-01 15:34:46 +09:00
hunicus
3f0cbcab63 Add 'removed' so meaning of r is more clear 2023-01-31 10:50:16 -05:00
hunicus
e3c1343a24 Remove merge conflict cruft 2023-01-31 10:30:16 -05:00
hunicus
7ce67f7ca5 Merge branch 'master' into hunicus/block-health-faq 2023-02-01 00:19:28 +09:00
wiz
b2251d99c9 Merge branch 'master' into mononaut/next-block-drift 2023-01-31 20:18:35 +09:00
wiz
e1c0f0a529 Merge pull request #2908 from mempool/simon/remove-beta
Removing Lightning Beta tag
2023-01-31 20:18:18 +09:00
softsimon
b1addfb45a Moving beta tag to Block Audit 2023-01-31 15:51:15 +07:00
softsimon
b867e9f00f Removing Lightning Beta tag 2023-01-31 15:37:39 +07:00
hunicus
ff3af3a159 Add link to faq next to block health metric 2023-01-31 02:24:08 -05:00
hunicus
573fe3515a Add block health faq 2023-01-31 02:24:08 -05:00
Mononaut
e8c3273541 fix drift in next block viz with mixed template algos 2023-01-30 16:26:37 -06:00
wiz
0293ed2b41 Merge pull request #2996 from mempool/simon/block-health-string-update
Block health string update
2023-01-30 19:17:37 +09:00
softsimon
e858408c4c Block health string update 2023-01-29 23:54:28 +04:00
softsimon
bc1735a5d0 Merge pull request #2992 from mempool/simon/audit-status-tooltip-badges
More prominent audit status badges
2023-01-29 20:27:41 +04:00
softsimon
f9026b8f35 Credit nepalese translator 2023-01-29 16:23:35 +04:00
softsimon
35d873e7a6 More prominent audit status badges 2023-01-29 13:09:11 +04:00
softsimon
150f16ea92 Merge pull request #2987 from mempool/mononaut/fix-liquid-scroll-blocks
Fix blockchain gaps when KEEP_BLOCKS_AMOUNT > INITIAL_BLOCKS_AMOUNT
2023-01-28 17:11:45 +04:00
wiz
922dc5bdbf Merge branch 'master' into mononaut/fix-liquid-scroll-blocks 2023-01-28 21:42:06 +09:00
softsimon
74eb5ea11b Merge pull request #2988 from mempool/mononaut/fix-liquid-block-error
Fix navigation-breaking JS error on liquid block page
2023-01-28 16:19:59 +04:00
Mononaut
05e23b058c Fix navigation-breaking js error on liquid block page 2023-01-27 20:01:31 -06:00
Mononaut
92352e1453 Fix blockchain gaps when KEEP_BLOCKS_AMOUNT > INITIAL_BLOCKS_AMOUNT 2023-01-27 19:52:57 -06:00
softsimon
22cd20bef2 Merge pull request #2973 from mempool/mononaut/back-to-tip
Add button to scroll back to tip of blockchain
2023-01-27 16:25:15 +04:00
softsimon
bb7b0d4595 Merge pull request #2978 from mempool/mononaut/audit-highlighting
Only show block audit highlighting when audit is enabled
2023-01-27 16:20:11 +04:00
softsimon
b82f76f9e2 Merge branch 'master' into mononaut/audit-highlighting 2023-01-27 16:11:03 +04:00
wiz
93bbf61180 Merge pull request #2981 from mempool/simon/audit-available-block-height-fix
Fix for disabling block audit below block height
2023-01-27 19:43:11 +09:00
softsimon
2c2003af5a Fix for disabling block audit below block height 2023-01-27 14:18:50 +04:00
softsimon
dd3d047aa8 Merge pull request #2899 from mempool/mononaut/audit-toggle
toggle to enable/disable block audits
2023-01-27 12:37:33 +04:00
Mononaut
d8737ef6e1 Only show audit-related tooltip info when audit enabled 2023-01-26 19:14:40 -06:00
Mononaut
3c3814910a Only show block audit highlighting when audit enabled 2023-01-26 18:58:41 -06:00
wiz
687451e5ee Merge pull request #2974 from mempool/simon/node-chart-legend-error-fix
Fix node chart legend position error
2023-01-27 04:43:19 +09:00
softsimon
1cf3e1814b Fix node chart legend position error 2023-01-26 23:18:12 +04:00
wiz
f6be04dafd Merge pull request #2951 from mempool/mononaut/fix-mobile-bottom-nav
Fix page elements obscured by bottom navbar on mobile
2023-01-27 04:06:18 +09:00
wiz
2e7a701ca7 Merge branch 'master' into mononaut/fix-mobile-bottom-nav 2023-01-27 03:49:07 +09:00
Mononaut
da51557960 Add button to scroll back to tip of blockchain 2023-01-26 11:55:26 -06:00
Mononaut
714700d8f1 change audit toggle to bootstrap button 2023-01-26 11:10:55 -06:00
Mononaut
d3b59bc459 Remove extra space after block details when audit disabled 2023-01-26 10:50:00 -06:00
Mononaut
2ed49cf944 add toggle to enable/disable block audits 2023-01-26 10:42:38 -06:00
softsimon
94add379d0 Merge pull request #2958 from mempool/mononaut/ln-mobile-css-fixes
Assorted mobile/tablet CSS fixes
2023-01-26 19:54:44 +04:00
softsimon
931abb7d59 Merge pull request #2833 from mempool/hunicus/doc-nav-fix
Fix routing for top-nav doc button
2023-01-26 17:46:08 +04:00
softsimon
f1188efb40 Merge pull request #2857 from Piterden/patch-1
Added syntax highlight
2023-01-26 17:38:38 +04:00
softsimon
5e0ad1da5c Merge pull request #2808 from mempool/hex-rendering
Fix hexadecimal conversion to show leading zeros
2023-01-26 17:36:53 +04:00
softsimon
5dc2d0ba98 Merge pull request #2802 from mempool/nymkappa/bugfix/tx-fetcher-crash
Fix crash when channel short id is not valid
2023-01-26 17:13:56 +04:00
softsimon
4407b42aab Merge branch 'master' into nymkappa/bugfix/tx-fetcher-crash 2023-01-26 17:02:35 +04:00
softsimon
34b90e77c2 Merge pull request #2972 from mempool/simon/adding-nepalese-i18n
i18n: adding nepalese
2023-01-26 15:54:27 +04:00
softsimon
c191bbe5be i18n: adding nepalese 2023-01-26 15:54:07 +04:00
softsimon
6ed4e24f86 Merge pull request #2950 from mempool/mononaut/fix-cpfp-table-drops
Correctly drop legacy cpfp db tables
2023-01-26 13:39:35 +04:00
softsimon
ee0ec8421f Merge pull request #2969 from mempool/simon/transifex-extract-0126
Extracting i18n strings
2023-01-26 03:31:46 +04:00
softsimon
6f3303315b Extracting i18n strings 2023-01-26 03:31:25 +04:00
softsimon
59eb271782 Merge pull request #2824 from mempool/mononaut/more-rbf-info
cache, serve & display more comprehensive RBF info
2023-01-25 17:29:21 +04:00
softsimon
e4fcac93f2 Using truncate component for replaced tx link 2023-01-25 17:24:00 +04:00
softsimon
42b3144a22 Merge pull request #2964 from mempool/mononaut/fit-witnesses
better frontend handling for very large witnesses
2023-01-25 15:30:00 +04:00
softsimon
15c590a63f Merge pull request #2966 from mempool/simon/fixing-absolute-path-import
Removing absolute path import
2023-01-25 01:14:22 +04:00
softsimon
d8e4d7054b Removing absolute path import 2023-01-25 01:13:58 +04:00
softsimon
78e4788ab3 Merge pull request #2960 from mempool/mononaut/fix-mempool-gradients
Fix mempool block gradients
2023-01-24 00:28:55 +04:00
Mononaut
a8b162387b better frontend handling for very large witnesses 2023-01-23 11:37:22 -06:00
softsimon
b401284eb3 Merge pull request #2959 from mempool/mononaut/subtler-marginal-audit-txs
reduce prominence of marginal fee rate audit txs
2023-01-21 17:47:16 +04:00
Mononaut
fc0af50ab5 Fix bugged mempool block gradients 2023-01-19 11:09:03 -06:00
Mononaut
ec6c96c997 make marginal fee audit txs less prominent 2023-01-18 17:10:57 -06:00
Mononaut
402d9496c3 fix misaligned channel id 2023-01-18 16:44:37 -06:00
Mononaut
c3ae46795b fix cramped/overflowing node ranking pages 2023-01-18 16:37:12 -06:00
Mononaut
d89b313db8 fix overflowing node ranking widgets 2023-01-18 16:35:14 -06:00
Mononaut
e842aa814e fix overlapping legend on block fee rates chart 2023-01-18 15:54:56 -06:00
Mononaut
c5f9682b5b fix overflowing tables on ln nodes-per-x chart pages 2023-01-18 15:48:01 -06:00
Mononaut
7da308c1e1 fix RBF detection 2023-01-17 19:25:00 -06:00
Mononaut
d778530620 keep cached RBF info for 24 hours after tx leaves the mempool 2023-01-17 19:24:57 -06:00
Mononaut
0481f57304 cache, serve & display more comprehensive RBF info 2023-01-17 16:09:16 -06:00
softsimon
47d2a6d5c7 Merge pull request #2957 from mempool/mononaut/improve-truncated-links
Support inner links in truncated string component
2023-01-17 03:22:30 +04:00
softsimon
8e954d59db Use OnPush change detection 2023-01-17 03:22:18 +04:00
softsimon
92b0b23765 Merge pull request #2956 from mempool/mononaut/fix-missing-clipboard
Fix missing clipboard buttons
2023-01-17 02:59:19 +04:00
Mononaut
73f2d54a26 support inner links in truncated string component 2023-01-16 16:47:05 -06:00
Mononaut
dfd1de67b2 Fix hidden clipboard toast on transactions page 2023-01-16 16:46:28 -06:00
softsimon
c46464b57b Merge pull request #2955 from mempool/mononaut/fix-missing-cpfp-button
Fix cpfp observable error
2023-01-17 01:45:37 +04:00
Mononaut
ce7e2e8801 fix missing clipboard icons 2023-01-16 15:29:17 -06:00
Mononaut
f81e11e313 Fix cpfp observable error 2023-01-16 12:04:24 -06:00
Mononaut
05d7ca2cd3 Fix lightning page elements obscured by bottom nav 2023-01-16 10:25:48 -06:00
softsimon
9b61e2bcb0 Merge pull request #2914 from mempool/mononaut/string-truncation
dynamic CSS text truncation component
2023-01-16 17:47:40 +04:00
softsimon
0202fb68a0 Merge pull request #2917 from mempool/nymkappa/feature/update-mining-stats
Show `Avg fees per block` instead of `Reward Per Tx`
2023-01-16 17:44:19 +04:00
wiz
b870e7fcaa Merge branch 'master' into mononaut/fix-mobile-bottom-nav 2023-01-16 15:31:38 +09:00
wiz
649dba940d Merge pull request #2943 from mempool/simon/node-as-overflow-fix
Node AS table overflow fix
2023-01-16 15:31:13 +09:00
softsimon
73dd34af6f Merge pull request #2953 from mempool/simon/pulled-from-transifex-0115
Pull from transifex 01-15
2023-01-15 23:11:50 +04:00
softsimon
2284772f6a Pull from transifex 01-15 2023-01-15 23:11:01 +04:00
Mononaut
c69f2f2bc2 tweak latest transactions txid truncation point 2023-01-13 17:03:02 -06:00
Mononaut
eead4d0af8 Correctly drop legacy cpfp db tables 2023-01-13 16:34:04 -06:00
Mononaut
fbd8e0588c Fix page elements obscured by bottom nav bar on mobile 2023-01-13 11:52:36 -06:00
softsimon
43e7328f6f Merge pull request #2887 from mempool/simon/backend-dependencies-29-12
Updating backend dependencies to please dependabot
2023-01-13 13:17:24 +04:00
softsimon
9fa19e7b26 Updating patch releases 2023-01-13 13:16:50 +04:00
softsimon
f685a23ead Merge pull request #2886 from mempool/simon/frontend-packages-29-12
Update frontend packages to please dependabot
2023-01-13 13:13:37 +04:00
softsimon
a2d241c687 Updating packages to please dependabot 2023-01-13 13:12:46 +04:00
softsimon
5bf00b565e Merge pull request #2946 from knorrium/tweak_dependabot_again
Dependabot: Monitor only production dependencies and add frontend Docker dir
2023-01-13 13:07:29 +04:00
Felipe Knorr Kuhn
8c58fbfacc Merge branch 'master' into tweak_dependabot_again 2023-01-12 23:45:01 -08:00
wiz
fd912c5284 Merge pull request #2947 from knorrium/fix_missing_audit_config
Fix missing audit Docker config
2023-01-13 16:43:27 +09:00
Felipe Knorr Kuhn
fd53d7ec30 Fix missing audit Docker config 2023-01-12 23:39:09 -08:00
Felipe Knorr Kuhn
04f00ca521 Merge branch 'master' into simon/backend-dependencies-29-12 2023-01-12 22:27:10 -08:00
Felipe Knorr Kuhn
2b9975ded9 Merge branch 'master' into simon/frontend-packages-29-12 2023-01-12 22:27:00 -08:00
Felipe Knorr Kuhn
a61106377c Merge branch 'master' into hunicus/doc-nav-fix 2023-01-12 22:26:20 -08:00
Felipe Knorr Kuhn
15ea4d3288 Dependabot: Monitor only production dependencies and add frontend Docker dir 2023-01-12 22:15:18 -08:00
softsimon
2187d96c25 Node AS table overflow fix 2023-01-12 20:48:16 +04:00
wiz
d7767a053a Merge branch 'master' into mononaut/string-truncation 2023-01-12 22:47:03 +09:00
wiz
646999eb56 Merge pull request #2922 from knorrium/fix_package_lock
Remove Cypress from the toplevel package-lock.json deps
2023-01-12 22:46:42 +09:00
wiz
fd105c9c99 Merge pull request #2885 from mempool/mononaut/empty-block-info
Add FAQ link to empty blocks
2023-01-12 22:46:12 +09:00
wiz
ab3186a99d Merge branch 'master' into mononaut/empty-block-info 2023-01-12 22:36:50 +09:00
softsimon
c15d5920d8 Merge pull request #2852 from mempool/nymkappa/feature/pools-json-frontend-cleanup
Remove pools.json download from the frontend
2023-01-12 17:36:39 +04:00
softsimon
500f94227a Merge pull request #2884 from mempool/mononaut/scrollable-blockchain
Infinitely scrolling blockchain
2023-01-12 17:35:49 +04:00
wiz
b2b8911030 Merge branch 'master' into mononaut/scrollable-blockchain 2023-01-12 22:01:45 +09:00
wiz
fc56c371ec Merge pull request #2865 from mempool/mononaut/document-backend-config
Add missing vars to docker config & readme
2023-01-12 22:00:17 +09:00
wiz
3b8802d39e Merge branch 'master' into mononaut/document-backend-config 2023-01-12 21:32:28 +09:00
wiz
01a46344b9 Merge pull request #2939 from mempool/mononaut/cpfp-optimizations
CPFP Optimizations
2023-01-12 21:28:55 +09:00
Felipe Knorr Kuhn
7f6d6b86a4 Merge branch 'master' into nymkappa/feature/pools-json-frontend-cleanup 2023-01-11 21:38:33 -08:00
Mononaut
2548d2a5e9 fix frontend js error on unconfirmed non-cpfp transactions 2023-01-11 08:44:11 -06:00
Mononaut
bd30f2eb12 migrate cpfp data from old to new schemas 2023-01-11 08:44:11 -06:00
Mononaut
f0d3bb87c6 handle gaps in indexed CPFP data 2023-01-11 08:44:11 -06:00
Mononaut
8de3fd0988 batch db inserts for cpfp data 2023-01-11 08:44:11 -06:00
Mononaut
fcd047f302 remove redundant fields from CPFP interfaces 2023-01-11 08:44:11 -06:00
Mononaut
01c96f80f9 add cpfp progress marker to avoid reindexing early blocks 2023-01-11 08:44:10 -06:00
Mononaut
b50936f001 compact schemas for cpfp tables 2023-01-11 08:44:10 -06:00
Mononaut
7793eaecbc fix cpfp indexing rate calculation 2023-01-11 08:44:10 -06:00
Mononaut
ee95d033ac remove slow cpfp indexing path 2023-01-11 08:44:10 -06:00
Mononaut
0d921cf7a6 don't rely on blocks table for cpfp indexing progress 2023-01-11 08:44:10 -06:00
wiz
b2dbb09ddb Merge pull request #2941 from mempool/simon/dark-mode-inputs
Dark mode inputs
2023-01-11 19:37:53 +09:00
softsimon
2e5d4a6df9 Dark mode inputs 2023-01-11 13:37:49 +04:00
softsimon
b4bac7ea09 Merge pull request #2940 from knorrium/augment_backend_info
Expose whether Lightning is enabled on the backend
2023-01-11 13:31:52 +04:00
Felipe Knorr Kuhn
5379ec0f30 Expose whether Lightning is enabled on the backend 2023-01-10 21:54:34 -08:00
softsimon
5f87d6c4f1 Merge pull request #2923 from mempool/nymkappa/bugfix/ignore-pools-logo-failure
Ignore pool logo download failure as it's not a critical error
2023-01-11 00:13:08 +04:00
softsimon
87fd6dc256 Merge pull request #2937 from mempool/simon/ln-alias-fallback
Fallback alias name to pubkey
2023-01-10 23:42:34 +04:00
softsimon
8ebe04baa7 Fallback alias name to pubkey
fixes #2935
2023-01-10 21:27:26 +04:00
wiz
549d61b41e Merge pull request #2933 from mempool/nymkappa/feature/add-more-mempool-nodes
Add new mempool lightning nodes and format the array so it's human readable
2023-01-11 02:03:36 +09:00
nymkappa
fcd34eb876 Add new mempool lightning nodes and format the array so it's human readable 2023-01-10 16:13:16 +01:00
Mononaut
649d14011e Remove cpfp_indexing=true from mainnet prod config 2023-01-09 10:36:08 -06:00
Mononaut
f5922b7b71 Swap TRANSACTION_INDEXING for CPFP_INDEXING in docker config 2023-01-09 10:36:08 -06:00
Mononaut
2e45dab4b7 Rename TRANSACTION_INDEXING to CPFP_INDEXING and add to mainnet prod config 2023-01-09 10:36:08 -06:00
Mononaut
7f903b0331 Add missing vars to docker config & readme 2023-01-09 10:36:08 -06:00
softsimon
0d7c52817e Merge pull request #2916 from mempool/nymkappa/bugfix/esplora-coinbase-tx
Fetch coinbase tx with core when esplora fails to do so
2023-01-08 16:53:44 +04:00
nymkappa
73c55c450c Ignore pool logo download failure as it's not a critical error 2023-01-08 11:43:18 +01:00
nymkappa
71d1c3de04 Update bitcoin-api-factory so we can use core only if needed 2023-01-08 11:24:23 +01:00
wiz
e3a82dae83 Merge pull request #2921 from mempool/ops/add-mempool-reset-script
ops: fix wildcards in mempool-reset-all script
2023-01-08 14:22:16 +09:00
Felipe Knorr Kuhn
333593c166 Remove Cypress from the toplevel package-lock.json deps 2023-01-07 20:51:55 -08:00
wiz
5a8d1795a6 ops: fix wildcards in mempool-reset-all script 2023-01-08 13:11:59 +09:00
wiz
0fdb5099e1 Merge pull request #2920 from mempool/ops/add-mempool-reset-script
ops: add mempool-reset-all script with ./reset symlink
2023-01-08 13:09:47 +09:00
wiz
31d732172c Merge pull request #2919 from mempool/ops/add-tk7-nodes-to-bitcoin.conf
ops: add tk7 nodes to prod bitcoin.conf
2023-01-08 13:06:17 +09:00
wiz
33d6892aa4 ops: add mempool-reset-all script with ./reset symlink 2023-01-08 13:05:50 +09:00
wiz
472d00a067 ops: add tk7 nodes to prod bitcoin.conf 2023-01-08 13:01:05 +09:00
wiz
98fbd524fc Merge pull request #2918 from mempool/ops/fix-mysql-credentials-path
ops: fix mysql_credentials path in install/build scripts
2023-01-08 12:59:55 +09:00
wiz
d987669b1e ops: fix mysql_credentials path in install/build scripts 2023-01-08 12:56:27 +09:00
nymkappa
8a3bcd3b28 Show Avg fees per block instead of Reward Per Tx 2023-01-07 20:55:59 +01:00
nymkappa
9f4107319f Fetch coinbase tx with core when esplora fails to do so 2023-01-07 11:17:30 +01:00
Mononaut
6d95cfadac Add FAQ link to empty blocks 2023-01-06 10:14:18 -06:00
Mononaut
bf941b0227 load block/tx pages at correct blockchain scroll position 2023-01-06 10:13:53 -06:00
Mononaut
32bf30872d improve block scrolling & new block animation 2023-01-06 10:13:53 -06:00
Mononaut
7be3ed416e create separate service for short term tx & block caching 2023-01-06 10:13:53 -06:00
Mononaut
befafaa60c add paginated virtual scrolling to blockchain blocks bar 2023-01-06 10:13:53 -06:00
wiz
5905eebaa6 Merge pull request #2910 from knorrium/add_missing_config_overrides
Add the missing block audit variables to the Docker script
2023-01-07 00:47:26 +09:00
wiz
f99aa8f1f0 Merge pull request #2909 from knorrium/fix_docker_gha_again
Fix Docker GHA again. Update deps and increase swap size
2023-01-07 00:46:56 +09:00
Felipe Knorr Kuhn
3e99605870 Add the missing block audit variables to the Docker script 2023-01-05 16:36:38 -08:00
Felipe Knorr Kuhn
05c4440680 Fix Docker GHA again. Update deps and increase swap size 2023-01-05 15:39:33 -08:00
softsimon
238a2e75b1 Merge pull request #2907 from mempool/wiz/enable-lightning-on-prod-frontend
Set `LIGHTNING: true` in prod frontend config
2023-01-06 01:58:09 +04:00
Mononaut
6448ad0ac7 proper truncation in RTL locales 2023-01-05 11:16:14 -06:00
Mononaut
42a39c1f7c migrate old text truncation implementations to new component 2023-01-05 11:01:46 -06:00
Mononaut
44147f5976 Add text truncation component 2023-01-05 11:00:08 -06:00
wiz
667fc4ea18 Set LIGHTNING: true in prod frontend config 2023-01-05 23:35:53 +09:00
softsimon
05a8154db0 Merge pull request #2851 from mempool/nymkappa/bugfix/error-500-blocks-list
Fix error 500 when querying /blocks using `"INDEXING_BLOCKS_AMOUNT": 0`
2023-01-05 01:16:09 +04:00
softsimon
68642aeb5f Merge pull request #2339 from mempool/nymkappa/bugfix/cleanup-logs
Logs cleanup
2023-01-05 01:11:00 +04:00
softsimon
dcc8b81ca6 Merge branch 'master' into nymkappa/bugfix/cleanup-logs 2023-01-05 00:46:21 +04:00
wiz
7e42d2d792 Merge branch 'master' into simon/frontend-packages-29-12 2023-01-04 23:12:53 +09:00
softsimon
2ca12a72f8 Updating dependencies to please dependabot 2022-12-30 16:28:48 +04:00
softsimon
26a92cda45 Merge pull request #2876 from mempool/simon/add-lithuanian
Adding Lithuanian
2022-12-30 15:36:35 +04:00
softsimon
2f7e74a2a6 Update frontend packages to please dependabot 2022-12-29 15:03:52 +04:00
softsimon
a51b4e88d8 Merge pull request #2883 from mempool/nymkappa/bugfix/empty-topology-folder
Don't try to import LN historical stats if no topology folder is set
2022-12-29 12:02:21 +04:00
nymkappa
a975936d3c Don't try to import LN historical stats if no topology folder is set 2022-12-28 12:13:41 +01:00
softsimon
fbbd86d8e0 Updating nginx to support lithuanian 2022-12-26 22:45:25 +04:00
softsimon
8ccfa5b038 Adding Lithuanian 2022-12-26 22:39:42 +04:00
wiz
04006b8c98 Merge pull request #2875 from mempool/simon/go-to-block-optimization
Go to block optimization
2022-12-26 22:51:03 +09:00
wiz
3317f5e6db Merge branch 'master' into simon/go-to-block-optimization 2022-12-26 22:36:12 +09:00
wiz
79e1beb2ca Merge pull request #2874 from mempool/simon/updating-vn-translator
Updating VN translator
2022-12-26 22:35:14 +09:00
wiz
811d0c824f Merge pull request #2867 from mempool/mononaut/block-page-layouts
refactor block page html
2022-12-26 22:31:57 +09:00
wiz
f3adab7d26 Merge branch 'master' into mononaut/block-page-layouts 2022-12-26 22:09:04 +09:00
softsimon
d730366ea7 Go to block optimization 2022-12-26 16:30:10 +04:00
softsimon
7e60526887 Updating VN translator 2022-12-26 16:15:30 +04:00
wiz
ab69c03d8d Merge pull request #2714 from Emzy/ops/cln-bitcoin-link
Add symlink to bitcoin config for user cln in prod install
2022-12-26 21:07:38 +09:00
wiz
efa352821a Merge pull request #2713 from Emzy/ops/cln-crontab
Add comandline options to cln on FreeBSD in prod install
2022-12-26 21:07:21 +09:00
wiz
1dcf6ab599 Merge branch 'master' into mononaut/block-page-layouts 2022-12-26 21:00:19 +09:00
wiz
869d7df844 Merge pull request #2859 from mempool/mononaut/block-health-ux
display block health as badge & add loaders
2022-12-26 20:51:47 +09:00
softsimon
597536efaf Changing Health to text-right in column 2022-12-26 15:31:07 +04:00
wiz
1f5754943a Merge branch 'master' into mononaut/block-health-ux 2022-12-26 20:29:35 +09:00
wiz
e98d03431c Merge pull request #2809 from mempool/mononaut/preview-miner-tag-style
fix unstable miner tag styles in block preview
2022-12-26 20:29:32 +09:00
wiz
39d92f57fa Merge branch 'master' into mononaut/preview-miner-tag-style 2022-12-26 20:17:15 +09:00
wiz
313df79e33 Merge pull request #2804 from mempool/mononaut/zero-value-tx-diagrams
Handle zero-value flow diagram edge case
2022-12-26 20:16:56 +09:00
wiz
13ceb368e2 Merge branch 'master' into mononaut/zero-value-tx-diagrams 2022-12-26 20:06:27 +09:00
wiz
4bcb4e37e8 Merge branch 'master' into nymkappa/bugfix/price-update-invalid-response 2022-12-26 13:13:30 +09:00
wiz
72e6e36cbb Merge pull request #2769 from mempool/simon/pull-from-transifex
Pull from transifex
2022-12-26 13:13:12 +09:00
wiz
729f2aff3e Merge pull request #2771 from mempool/simon/fix-javascript-errors-block-nav
Fix for javascript errors when navigating blocks
2022-12-26 13:12:16 +09:00
wiz
1348e2318d Merge branch 'master' into mononaut/zero-value-tx-diagrams 2022-12-26 12:24:09 +09:00
wiz
731443f670 Merge pull request #2810 from mempool/mononaut/cpfp-indexer-fixes
Fix & reenable cpfp indexer optimized path
2022-12-26 12:21:54 +09:00
wiz
fc2edbf1d1 Merge pull request #2872 from mempool/simon/indexing-spelling-fix 2022-12-26 07:30:16 +09:00
softsimon
8ac514733a Fix for spelling error in indexing status 2022-12-25 23:17:04 +04:00
wiz
e986aaf1d9 Merge branch 'master' into mononaut/cpfp-indexer-fixes 2022-12-25 22:41:39 +09:00
softsimon
606e6df834 Merge pull request #2793 from mempool/mononaut/gbt-thread-optimization
Refactor advanced GBT implementation to minimize inter-thread data transfer
2022-12-23 00:02:56 +04:00
Mononaut
743f2a1cd4 refactor block page html 2022-12-22 07:49:12 -06:00
wiz
5e633344c5 Merge branch 'master' into mononaut/gbt-thread-optimization 2022-12-22 09:20:15 +09:00
Mononaut
c4d5ea971e display block health as badge 2022-12-19 19:02:50 -06:00
wiz
04fec6c894 Merge branch 'master' into mononaut/cpfp-indexer-fixes 2022-12-19 08:23:30 +09:00
Denis Efremov
f4b70fa886 Added license agreement 2022-12-17 22:08:04 +03:00
Denis Efremov
1b9056298a Added syntax highlight 2022-12-17 21:59:39 +03:00
wiz
343a48818b Merge pull request #2656 from knorrium/update_gha
Update Cypress GHA to use newer checkout and setup-node actions
2022-12-17 13:15:26 +09:00
Felipe Knorr Kuhn
028a26f574 Merge branch 'master' into update_gha 2022-12-16 18:46:59 -08:00
wiz
dcd0a53fba Merge pull request #2853 from knorrium/update_cypress_deps 2022-12-17 09:56:51 +09:00
Felipe Knorr Kuhn
ea5ec7bc32 Update Cypress dependencies 2022-12-16 16:32:58 -08:00
nymkappa
908b92af71 Remove pools.json download from the frontend 2022-12-16 18:01:04 +01:00
nymkappa
db4bf52596 Fix error 500 when querying /blocks using "INDEXING_BLOCKS_AMOUNT": 0 2022-12-16 17:43:37 +01:00
wiz
1513c61cd5 Merge branch 'master' into mononaut/cpfp-indexer-fixes 2022-12-13 10:37:35 +09:00
hunicus
713ac64636 Fix routing for top-nav doc button 2022-12-12 01:42:01 -05:00
wiz
f18ea6a7a3 Merge pull request #2825 from knorrium/fix_dependabot_ignore 2022-12-10 14:55:59 +09:00
Felipe Knorr Kuhn
35ee58befb Merge branch 'master' into mononaut/preview-miner-tag-style 2022-12-09 21:03:08 -08:00
Felipe Knorr Kuhn
79f79b0e3b Fix the dependabot ignore settings - RTFM 2022-12-09 20:58:52 -08:00
Felipe Knorr Kuhn
250df0d56c Merge branch 'master' into update_gha 2022-12-07 19:06:48 -08:00
wiz
a210a3faf2 Merge pull request #2801 from knorrium/use_fra_for_bisq 2022-12-08 11:57:12 +09:00
Felipe Knorr Kuhn
b8edcbadf4 Merge branch 'master' into use_fra_for_bisq 2022-12-07 18:12:52 -08:00
Mononaut
fb137e6247 Fix & reenable cpfp indexer optimized path 2022-12-07 18:58:03 -06:00
Mononaut
1a4f699c95 fix unstable miner tag styles in block preview 2022-12-07 15:17:10 -06:00
Mononaut
56b6f79f97 improve thread error handling 2022-12-07 14:52:02 -06:00
Mononaut
4d0637768d Refactor advanced gbt to minimize inter-thread comms 2022-12-07 14:52:01 -06:00
softsimon
07987ff4b6 Merge pull request #2790 from mempool/nymkappa/bugfix/sql-query
Handle ISP with no nodes
2022-12-07 17:40:36 +04:00
softsimon
e7e0a64ca2 Merge pull request #2773 from knorrium/update_dependabot
Update ignore rules and frequency of dependabot updates
2022-12-07 17:23:27 +04:00
softsimon
484c503f6d Merge pull request #2779 from mempool/mononaut/fix-block-summaries-repo
Fix block summaries repo upsert race condition
2022-12-07 17:23:00 +04:00
softsimon
c59ab2a129 Merge pull request #2778 from mempool/mononaut/advanced-gbt-fixes
Fix bugs related to advanced GBT transaction selection
2022-12-07 16:13:16 +04:00
softsimon
8e668be703 Merge pull request #2777 from mempool/mononaut/ln-scan-throttle-config
Make forensics node backend call rate limiting configurable
2022-12-07 15:24:42 +04:00
hunicus
c62059c0f4 Fix hexadecimal conversion to show leading zeros.
Fixes #2805.
2022-12-07 00:06:06 -05:00
Mononaut
28c21b3770 Handle zero-value tx flow diagram edge case 2022-12-06 17:45:04 -06:00
nymkappa
3a7dffbe09 Fix crash when channel short id is not valid 2022-12-06 10:51:01 +01:00
Felipe Knorr Kuhn
7a7172bb64 Change Bisq staging host to fra so tests can pass 2022-12-05 22:00:31 -08:00
Felipe Knorr Kuhn
5658e053d0 Update Cypress GHA to v5 2022-12-05 20:22:53 -08:00
Felipe Knorr Kuhn
d17ccbc5ae Merge branch 'master' into update_gha 2022-12-05 20:15:14 -08:00
nymkappa
685433fe4c Handle ISP with no nodes 2022-12-05 08:11:46 +01:00
Mononaut
79f6ae3b6f fix post-block advanced gbt call 2022-12-03 12:11:21 +09:00
Mononaut
e54e896e56 fix skipped descendant updates on tx inclusion 2022-12-03 12:10:54 +09:00
Mononaut
3126a559a0 Make forensics backend call rate limiting configurable 2022-12-03 11:17:53 +09:00
Mononaut
132e848fdc Fix block summaries repo upsert race condition 2022-12-03 10:49:10 +09:00
softsimon
ade3c09b2a Pull from transifex 2022-12-02 15:40:22 +09:00
Felipe Knorr Kuhn
0d92779971 Update ignore rules and frequency of dependabot updates 2022-12-01 08:14:10 -08:00
nymkappa
efb48271f9 Cleanup logs 2022-12-01 15:52:06 +01:00
softsimon
3572ba837d Fix for javascript errors when navigating blocks 2022-12-01 23:12:02 +09:00
wiz
5ff5275b36 Merge pull request #2768 from mempool/simon/block-component-loader 2022-12-01 21:38:09 +09:00
softsimon
534f2e2781 Set auditDataMissing as soon as we know the block height 2022-12-01 20:23:45 +09:00
nymkappa
a671bfc226 Make sure exchange API response format is valid before using it 2022-12-01 12:05:23 +01:00
softsimon
2cd98c7c04 Add separate config for mainnet, testnet and signet. 2022-12-01 19:48:57 +09:00
softsimon
75459729ad Block page audit fallback 2022-12-01 19:17:59 +09:00
wiz
2b411aad0a Merge pull request #2730 from mempool/nymkappa/bugfix/node-map
Don't select nodes which do not have country info
2022-12-01 17:13:12 +09:00
softsimon
229dd7718a Merge pull request #2764 from mempool/simon/update-backend-packages
Update backend npm modules
2022-12-01 17:09:49 +09:00
wiz
13b52c427c Merge branch 'master' into nymkappa/bugfix/node-map 2022-12-01 16:55:22 +09:00
wiz
fc778e1e25 Merge branch 'master' into simon/update-backend-packages 2022-12-01 16:33:31 +09:00
wiz
f6813f1d1c Merge pull request #2766 from mempool/simon/update-few-more-frontend-deps
Update a few more frontend deps
2022-12-01 16:31:52 +09:00
softsimon
1db11d1d67 Downgrading axios to latest 0.27.x release 2022-12-01 16:15:19 +09:00
wiz
12b130cfdc Merge branch 'master' into simon/update-few-more-frontend-deps 2022-12-01 16:12:15 +09:00
wiz
175bcf7467 Merge branch 'master' into simon/update-backend-packages 2022-12-01 16:01:42 +09:00
wiz
0b54035e80 Merge pull request #2763 from mempool/mononaut/fix-tx-unfurler
Fix broken transaction preview unfurler page
2022-12-01 16:01:08 +09:00
wiz
92807dbdde Merge branch 'master' into mononaut/fix-tx-unfurler 2022-12-01 15:38:10 +09:00
wiz
059d5a94a9 Merge pull request #2712 from Emzy/ops/mysql-fix
Mysql user creation fix in prod install
2022-12-01 15:37:53 +09:00
wiz
501ca1832b Merge pull request #2765 from mempool/simon/synchronous-schema-update
Run schema update synchronously
2022-12-01 15:37:38 +09:00
softsimon
ddc7de0d4a Update a few more frontend deps 2022-12-01 15:36:19 +09:00
softsimon
59f1b031c8 Run schema update synchronously 2022-12-01 14:41:37 +09:00
softsimon
3d45054e38 Update backend npm modules 2022-12-01 14:24:41 +09:00
Mononaut
38c890626b fix CPFP handling in transaction preview 2022-12-01 12:41:04 +09:00
Mononaut
c7d61a3be4 only retry fetch CPFP for unconfirmed txs 2022-12-01 11:34:11 +09:00
wiz
0b37a02435 Merge pull request #2731 from mempool/mononaut/fix-db-migration-versions
save db schema version after each successful migration
2022-11-30 23:19:45 +09:00
Mononaut
03a3320e45 block audit truncation in separate db migrations. bump timeout to 1 hour. 2022-11-30 23:03:51 +09:00
wiz
6d075842f4 Merge pull request #2751 from hunicus/about-widths
Revert community integration icon size increase
2022-11-30 22:58:23 +09:00
Mononaut
ead60aaa21 save db schema version after each successful migration 2022-11-30 22:58:01 +09:00
wiz
0fd672a741 Merge branch 'master' into about-widths 2022-11-30 22:50:50 +09:00
wiz
6741a2b226 Merge pull request #2753 from mempool/simon/remove-console-log
Remove annoying frontend console log
2022-11-30 22:50:23 +09:00
wiz
100c1b292a Merge pull request #2750 from mempool/ops/increase-nginx-max-concurrent-streams
[ops] Increase nginx max concurrent streams
2022-11-30 22:49:48 +09:00
wiz
7e5c0a4c46 Merge pull request #2749 from mempool/simon/mempool-gbt-config
Mempool GBT config
2022-11-30 22:39:00 +09:00
softsimon
8117b9799c Adding production config to enable gbt audit. 2022-11-30 22:14:41 +09:00
softsimon
afc5c6786b Remove annoying frontend console log 2022-11-30 22:07:47 +09:00
wiz
c7cca500fa Merge branch 'master' into simon/mempool-gbt-config 2022-11-30 21:45:25 +09:00
wiz
5f1a71cc9b Merge pull request #2683 from mononaut/ln-forensics-output-attribution
Lightning linked channel forensics
2022-11-30 20:58:25 +09:00
hunicus
734c953714 Revert community integration icon size increase
From #2700. Narrow column instead.
2022-11-30 04:36:55 -05:00
Mononaut
ba10df69b7 improve precision of output attribution for mutual closes 2022-11-30 18:24:00 +09:00
Mononaut
ded11892f5 merge forensics columns into main channels table 2022-11-30 18:24:00 +09:00
Mononaut
609f68eb24 move linked channel scan into forensics task, add backend throttling 2022-11-30 18:24:00 +09:00
Mononaut
5e1f54e862 hide closing balances if channel still open 2022-11-30 18:24:00 +09:00
Mononaut
dc7d5bc94d handle batched channel opens. infer funding balances in both directions. 2022-11-30 18:24:00 +09:00
Mononaut
35ae672177 break long-running forensics tasks 2022-11-30 18:24:00 +09:00
Mononaut
8f0830f6d1 detect channels opened from change outputs 2022-11-30 18:24:00 +09:00
Mononaut
0c96a11150 display channel close forensics results 2022-11-30 18:23:59 +09:00
Mononaut
cf89ded14d detect links between channel close and open txs 2022-11-30 18:23:59 +09:00
wiz
a9e766046f [ops] Increase nginx max concurrent streams 2022-11-30 18:10:47 +09:00
softsimon
030889250f Mempool GBT config 2022-11-30 17:56:53 +09:00
wiz
50993d3b95 Merge pull request #2748 from mempool/simon/upgrade-frontend-packages
Upgrading some more frontend packages
2022-11-30 15:44:47 +09:00
Felipe Knorr Kuhn
33775f32e2 Merge branch 'master' into update_gha 2022-11-29 20:06:44 -08:00
softsimon
95e8789ba9 Upgrading some more frontend packages 2022-11-30 12:56:07 +09:00
wiz
194e4b4c80 Merge pull request #2737 from mempool/mononaut/index-cpfp-info
show CPFP info for mined transactions
2022-11-30 00:05:49 +09:00
wiz
272b6d2437 Disable optimization in CPFP indexing when block summaries indexing is enabled 2022-11-29 23:47:43 +09:00
wiz
89293b4358 Merge branch 'master' into mononaut/index-cpfp-info 2022-11-29 16:41:12 +09:00
wiz
c682a8e3ff Merge pull request #2743 from mempool/nymkappa/bugfix/block-timestamp
Disable ON UPDATE for blocks.blockTimestamp field
2022-11-29 16:39:53 +09:00
wiz
cc30da0b4d Merge branch 'master' into nymkappa/bugfix/block-timestamp 2022-11-29 14:25:45 +09:00
Mononaut
6d6dd09d11 get blocks from esplora for cpfp indexer 2022-11-29 11:42:08 +09:00
Mononaut
f2ad184d1f optimize cpfp indexing 2022-11-29 11:42:08 +09:00
Mononaut
ab5308e1c8 adjust database migration compatibility 2022-11-29 11:42:08 +09:00
Mononaut
205d832d31 return more complete cpfp data for mempool transactions 2022-11-29 11:42:08 +09:00
Mononaut
3e7270d1c5 show cpfp badges on non-mempool tx previews 2022-11-29 11:42:07 +09:00
Mononaut
fa515402bf display indexed cpfp info on non-mempool txs 2022-11-29 11:42:07 +09:00
Mononaut
9b6a012476 calculate & index cpfp packages 2022-11-29 11:42:07 +09:00
wiz
3406758fd2 Merge pull request #2742 from mempool/simon/angular-15
Upgrade to Angular 14
2022-11-28 21:41:58 +09:00
wiz
cc93674591 Merge branch 'master' into simon/angular-15 2022-11-28 19:02:14 +09:00
wiz
c9fc77490f Merge pull request #2744 from mempool/mononaut/liquid-no-extras
don't use block.extras on liquid
2022-11-28 19:01:42 +09:00
Mononaut
ddb4fbac5c don't use block.extras on liquid 2022-11-28 18:37:13 +09:00
nymkappa
3eb4ea9048 Disable ON UPDATE for blocks.blockTimestamp field 2022-11-28 17:33:07 +09:00
softsimon
6d99d0a9ce Removing SSR 2022-11-28 16:53:18 +09:00
softsimon
d43a9cc5ea ngBootstrap UX fix 2022-11-28 16:53:18 +09:00
softsimon
a3a2adac02 Upgrading more libs 2022-11-28 16:53:18 +09:00
softsimon
c8aea18c5e Refactored ngb components 2022-11-28 16:53:17 +09:00
softsimon
c2f45f9bc1 Upgrade to Angular 14 2022-11-28 16:53:17 +09:00
wiz
208946a8bf Merge pull request #2740 from mempool/mononaut/update-proxy-staging
update proxy.conf.staging target
2022-11-28 15:09:22 +09:00
Mononaut
0e8e5dc3a9 update proxy.conf.staging target 2022-11-28 15:01:30 +09:00
wiz
f1122384dd Merge pull request #2739 from mempool/mononaut/fix-liquid-tests
Fix broken tests on liquid
2022-11-28 14:57:45 +09:00
Mononaut
2290f98011 only query blocks_audits on bitcoin networks 2022-11-28 14:26:28 +09:00
wiz
b0e3022ddb Merge pull request #2726 from mempool/mononaut/fix-loading-block-title
Fix incorrect "Genesis" heading while loading the block page
2022-11-27 01:59:40 +09:00
wiz
acd633530f Merge branch 'master' into mononaut/fix-loading-block-title 2022-11-27 01:12:19 +09:00
wiz
f73dc59f49 Merge pull request #2734 from mempool/mononaut/block-page-fixes
Fix bugs on the new block page
2022-11-25 19:57:14 +09:00
Mononaut
e627122239 move block audit endpoint from mining to bitcoin routes 2022-11-25 19:32:50 +09:00
Mononaut
201b32bdcd better fallbacks for missing block summaries data 2022-11-25 10:16:58 +09:00
Mononaut
6ec9c2f816 fix 'unavailable' msg on block page on mobile 2022-11-25 10:16:06 +09:00
Mononaut
de04914851 optimize block audit scores db query 2022-11-24 17:11:45 +09:00
Mononaut
5fc3b8b70c merge block-audit and block pages 2022-11-24 17:10:53 +09:00
Mononaut
276470474d save 'fresh' transactions in block audit repository 2022-11-24 17:10:53 +09:00
nymkappa
1461cb1b17 Don't select nodes which do not have country info 2022-11-24 16:56:13 +09:00
softsimon
c43e4bb71b Merge pull request #2722 from mempool/mononaut/ln-penalty-scan-optimization
Optimize force close penalty scans
2022-11-24 16:06:39 +09:00
wiz
92538b1a48 Merge branch 'master' into mononaut/ln-penalty-scan-optimization 2022-11-24 15:55:33 +09:00
wiz
fa519a0d8f Merge pull request #2728 from mempool/simon/relative-import-fix
Change imports to relative paths
2022-11-24 14:47:29 +09:00
softsimon
da10b36524 Change imports to relative paths 2022-11-24 12:19:19 +09:00
Mononaut
c2b6316c8b Fix "Genesis" header while block page loading 2022-11-23 19:45:13 +09:00
Mononaut
6ada839282 reduce forensics throttle delay from 100ms to 20ms 2022-11-23 19:32:14 +09:00
softsimon
7de068368c Update backend/src/tasks/lightning/forensics.service.ts 2022-11-23 19:24:41 +09:00
wiz
0d797ff7fd Merge branch 'master' into mononaut/ln-penalty-scan-optimization 2022-11-23 19:14:20 +09:00
wiz
fe8cdb5867 Merge pull request #2514 from mempool/junderw/psbt-complete-inputs
Feature: Add endpoint for PSBT nonWitnessUtxo inclusion
2022-11-23 19:13:20 +09:00
softsimon
74dbd6cee1 Add support for application/base64 content type 2022-11-23 18:43:37 +09:00
softsimon
0b7182715f Merge pull request #2723 from mempool/ops/fix-nginx-redirects-liquid
Fix nginx redirects for /liquid etc.
2022-11-23 14:11:37 +09:00
wiz
e08902b85b Fix nginx redirects for /liquid etc. 2022-11-23 14:09:54 +09:00
Mononaut
7d3ec63335 move long-running forensics scans to separate service, throttle backend calls 2022-11-23 10:38:24 +09:00
softsimon
584f443f56 Adding new getTransactionHex api 2022-11-22 21:45:05 +09:00
softsimon
4f3296566a Make api available on all backends 2022-11-22 19:08:09 +09:00
wiz
1309a63430 Merge branch 'master' into junderw/psbt-complete-inputs 2022-11-22 18:57:36 +09:00
wiz
ca33a629cf Merge pull request #2721 from mononaut/fix-squashed-flow-diagram
fix squashed tx flow diagram
2022-11-22 18:57:22 +09:00
wiz
311774103e Merge branch 'master' into fix-squashed-flow-diagram 2022-11-22 18:39:48 +09:00
wiz
e72cdb42e8 Merge pull request #2679 from mononaut/limit-transaction-list-rows
"show more" instead of "show all"  button for transaction inputs/outputs
2022-11-22 18:39:42 +09:00
wiz
6f807b7a2c Merge branch 'master' into limit-transaction-list-rows 2022-11-22 18:24:01 +09:00
wiz
7f83b4be28 Merge pull request #2711 from mempool/nymkappa/bugfix/404-ftx-not-found
Remove FTX from the price feeds
2022-11-22 18:23:49 +09:00
wiz
802d38c363 Merge branch 'master' into nymkappa/bugfix/404-ftx-not-found 2022-11-22 18:18:14 +09:00
wiz
38311e191b Merge pull request #2686 from hunicus/add-advanced-and-2q
Add new faqs (timestamps and fee ranges)
2022-11-22 18:18:06 +09:00
wiz
a1c5769d0d Merge branch 'master' into add-advanced-and-2q 2022-11-22 18:08:50 +09:00
Mononaut
01a727a344 fix stray space, automatically show more outputs if <5 remaining 2022-11-22 18:08:47 +09:00
Mononaut
6cd1f9e870 Fix load more inputs for non-esplora backends 2022-11-22 18:08:01 +09:00
Mononaut
d107286344 Load 1000 more inputs/outputs per click. Fix label i18n. 2022-11-22 18:08:01 +09:00
Mononaut
330ab9682b "show more" instead of "show all" txos in lists 2022-11-22 18:08:01 +09:00
wiz
2b94849881 Merge branch 'master' into junderw/psbt-complete-inputs 2022-11-22 17:59:50 +09:00
wiz
37bf67aa38 Merge pull request #2654 from mempool/nymkappa/bugfix/node-count
Only show active nodes is isp page
2022-11-22 17:42:24 +09:00
wiz
28d5ec34b3 Merge branch 'master' into nymkappa/bugfix/node-count 2022-11-22 17:25:23 +09:00
wiz
eeea6cd9c8 Merge pull request #2708 from mempool/simon/support-maxmind-lite
Support Maxmind Lite
2022-11-22 17:25:09 +09:00
Mononaut
7bafeefa95 fix squashed tx flow diagram 2022-11-22 17:07:03 +09:00
wiz
dc86f41e03 Merge branch 'master' into simon/support-maxmind-lite 2022-11-22 17:02:19 +09:00
wiz
2f7aacaf3b Merge pull request #2716 from mononaut/rtl-flow-diagram
Reverse the direction of the flow diagram for RTL locales
2022-11-22 17:02:09 +09:00
wiz
446d76980a Merge branch 'master' into rtl-flow-diagram 2022-11-22 16:44:29 +09:00
wiz
92dbba64e6 Merge pull request #2706 from mononaut/fix-tx-preview-alignment
Fix tx preview alignment
2022-11-22 16:44:10 +09:00
Mononaut
43bb3aa50b align elements of tx preview 2022-11-22 16:32:09 +09:00
Mononaut
5198cc51dc ellipsis for long op_return messages in tx preview 2022-11-22 16:32:09 +09:00
wiz
56e00d7ea9 Merge pull request #2705 from mononaut/flow-diagram-zero-value
better representation of zero-value outputs in flow diagram
2022-11-22 16:27:43 +09:00
softsimon
5e72ecfdc9 Support Maxmind Lite
fixes #2553
2022-11-22 16:13:27 +09:00
Mononaut
6c1457e257 Reverse tx flow diagram for RTL locales 2022-11-22 16:00:19 +09:00
Mononaut
7e01a22265 fix rendering of many zero value outputs 2022-11-22 15:56:55 +09:00
Mononaut
cb7e25d646 disconnect zero value outputs from flow diagram 2022-11-22 15:56:55 +09:00
wiz
2653e7bf39 Merge pull request #2700 from hunicus/add-nunchuk
Add nunchuk to community integrations
2022-11-22 15:23:20 +09:00
wiz
d8d8a52445 Merge branch 'master' into add-nunchuk 2022-11-22 15:16:27 +09:00
wiz
3e50941351 Merge pull request #2698 from mononaut/tx-selection-threading
enable new tx selection algorithm in own thread with config setting
2022-11-22 15:16:11 +09:00
Mononaut
b9a761fb88 add ADVANCED_TRANSACTION_SELECTION default to config test 2022-11-22 15:10:24 +09:00
Mononaut
b1d490972b refactor async mempool/block update callbacks 2022-11-22 14:43:58 +09:00
Mononaut
786d73625a guard new tx selection algo behind config setting 2022-11-22 14:43:58 +09:00
Mononaut
08ad6a0da3 move new tx selection algorithm into thread worker 2022-11-22 14:43:55 +09:00
wiz
38cb45e026 Merge pull request #2664 from mononaut/block-audit-db-migration
db migration to clear obsolete block audit data
2022-11-22 14:40:33 +09:00
Mononaut
24dba5a2ef Bump db migration query timeout to 900s 2022-11-22 14:25:57 +09:00
Mononaut
a32f960c4a db migration to clear obsolete audit data 2022-11-22 14:07:29 +09:00
wiz
9345b1609f Merge pull request #2649 from mononaut/flow-diagram-spent-connectors
Extend flow diagram to differentiate spent and unspent TXOs
2022-11-22 13:52:22 +09:00
wiz
4abd77fe31 Merge branch 'master' into flow-diagram-spent-connectors 2022-11-22 13:42:33 +09:00
wiz
a9760326f2 Merge pull request #2694 from mononaut/ln-channel-distance
calculate & show avg channel distance on node page
2022-11-22 12:14:04 +09:00
Mononaut
ed184824d4 calculate & show avg channel distance on node page 2022-11-22 11:59:15 +09:00
nymkappa
9d5717f30d Make sure we handle all isp id in the queried list 2022-11-22 11:58:16 +09:00
wiz
547b60fce7 Merge pull request #2689 from mononaut/fix-block-viz-resize
resize block visualization instantly on window zoom and resize
2022-11-22 11:51:51 +09:00
wiz
b7bf2ec666 Merge branch 'master' into fix-block-viz-resize 2022-11-22 11:40:53 +09:00
wiz
9b5d8fdad6 Merge pull request #2687 from hunicus/add-big-disclaimer
Add general-purpose disclaimer to top of faq
2022-11-22 11:40:47 +09:00
wiz
782d4b391b Merge branch 'master' into add-big-disclaimer 2022-11-22 11:29:38 +09:00
wiz
19e778c4b5 Merge pull request #2684 from hunicus/fix-python-tab
Only show python example tab on ws tab
2022-11-22 11:29:29 +09:00
wiz
4bc5de306a Merge branch 'master' into fix-python-tab 2022-11-22 11:14:45 +09:00
wiz
47c61842f5 Merge pull request #2681 from mempool/nymkappa/feature/rename-mining-pool
Add support for renaming a mining pool without changing regex or address
2022-11-22 11:10:52 +09:00
nymkappa
672001af72 Update mining pool color 2022-11-22 11:03:28 +09:00
wiz
5da8f2b6dc Merge branch 'master' into nymkappa/feature/rename-mining-pool 2022-11-22 10:54:20 +09:00
Mononaut
9df0e602d3 longer input/output connectors on flow diagram & new nav logic 2022-11-22 10:40:41 +09:00
wiz
8a367fc6fd Merge pull request #2675 from knorrium/update_docker_action
Update Docker GHA dependencies
2022-11-22 10:38:59 +09:00
wiz
a33562a47a Merge pull request #2678 from mononaut/fix-tx-navigation-bug
fix error when navigating to huge transactions
2022-11-22 10:38:42 +09:00
wiz
fc7024351e Merge branch 'master' into fix-tx-navigation-bug 2022-11-22 10:28:54 +09:00
wiz
d3d4f93f85 Merge branch 'master' into update_docker_action 2022-11-22 10:26:47 +09:00
Mononaut
14ec427f5e Mouse events for flow diagram endcaps & connectors 2022-11-22 10:11:55 +09:00
Mononaut
2c1f38aa9d Fix clash w/ liquid unblinding and vin/vout syntax 2022-11-22 10:11:54 +09:00
Mononaut
eb2abefabc Add shapes to flow diagram to indicate spent txos 2022-11-22 10:11:54 +09:00
wiz
90912af62d Merge pull request #2671 from mononaut/fix-block-summary-vsize
Fix rounded vsize in block summaries
2022-11-21 21:29:13 +09:00
wiz
adcc1ba4f0 Merge branch 'master' into fix-block-summary-vsize 2022-11-21 21:03:45 +09:00
wiz
a0b6719105 Merge pull request #2670 from mononaut/expose-node-tlv-data
Show node tlv data & liquidity ads
2022-11-21 21:03:29 +09:00
Mononaut
c2ab0bc715 Parse & display liquidity ads on node page 2022-11-21 20:27:05 +09:00
Mononaut
010e9f2bb1 Display extension TLV data on node page 2022-11-21 20:27:05 +09:00
Mononaut
373e02a5b0 Store & expose node extension TLV data in backend 2022-11-21 20:27:03 +09:00
wiz
d36b239dbe Merge pull request #2667 from mononaut/scan-for-penalty-txs
Rescan unresolved LN channel force closes
2022-11-21 20:19:05 +09:00
wiz
eb03fc18ad Merge branch 'master' into scan-for-penalty-txs 2022-11-21 19:19:22 +09:00
wiz
a7c511fc1c Merge pull request #2663 from mononaut/block-audit-tweaks
Block audit tweaks
2022-11-21 19:17:54 +09:00
Mononaut
5b6f713ef3 Fetch missing block audit scores 2022-11-21 18:45:34 +09:00
Mononaut
1b3bc0ef4e Handle block height or hash in audit page 2022-11-21 18:43:52 +09:00
Mononaut
2022d3f6d5 Block audit UX adjustments 2022-11-21 18:43:52 +09:00
Mononaut
695d81a3f6 Fix block audit skeleton loaders 2022-11-21 18:43:52 +09:00
Mononaut
29f7c89c53 Tweak block audit algo to reduce false positives 2022-11-21 18:43:52 +09:00
wiz
7232c4755d Merge branch 'master' into nymkappa/bugfix/node-count 2022-11-21 18:19:53 +09:00
wiz
88fa6bffb5 Merge pull request #2617 from knorrium/frontend_runtime_config
Initial frontend runtime config support
2022-11-21 18:04:03 +09:00
wiz
235ac204b4 Merge branch 'master' into frontend_runtime_config 2022-11-21 17:46:02 +09:00
wiz
e051758ca7 Merge pull request #2564 from mempool/junderw/search-blocktime
[Feature] Search for block by timestamp
2022-11-21 17:45:02 +09:00
wiz
be3acf8694 Merge branch 'master' into junderw/psbt-complete-inputs 2022-11-21 17:34:26 +09:00
wiz
2020cd74e9 Merge pull request #2378 from mempool/simon/disable-mempool-config
Disable mempool config
2022-11-21 17:31:09 +09:00
softsimon
67cbbda04b Set mempool enabled to false in production. 2022-11-21 17:26:56 +09:00
wiz
5957b71774 Merge branch 'master' into simon/disable-mempool-config 2022-11-21 17:23:34 +09:00
wiz
b0198de7e8 Merge pull request #2337 from mempool/simon/updated-mempool-debug-output
Updated mempool debug log
2022-11-21 17:21:52 +09:00
wiz
8cc252642b Merge branch 'master' into simon/updated-mempool-debug-output 2022-11-21 17:12:07 +09:00
wiz
5e5daca600 Merge pull request #2547 from mempool/simon/search-bar-click-outside
Click to close search dropdown
2022-11-21 16:58:17 +09:00
wiz
cfb4fdb7a4 Merge branch 'master' into simon/search-bar-click-outside 2022-11-21 16:47:21 +09:00
Stephan Oeste
5d95eb475e Add symlink to bitcoin config for user cln in prod install 2022-11-20 14:55:00 +01:00
Stephan Oeste
c57542c8ae Add comandline options to cln on FreeBSD in pro install 2022-11-20 14:39:17 +01:00
Stephan Oeste
dbc2d752bc Mysql user creation fix in prod install 2022-11-20 12:48:55 +01:00
nymkappa
7c7273b696 Remove FTX from the price feeds 2022-11-20 19:23:51 +09:00
softsimon
34500f7d47 Merge pull request #2703 from mempool/simon/local-esplora-proxy
Proxy config for running esplora locally
2022-11-20 15:09:30 +09:00
softsimon
f18226bd01 Proxy config for running esplora locally 2022-11-20 12:35:32 +09:00
Mononaut
c1e741a025 Rescan unresolved LN channel force closes 2022-11-19 17:30:56 +09:00
hunicus
2a6ac4a5da Adjust image sizes to avoid dangling image 2022-11-16 23:53:01 -05:00
hunicus
34d5a2f9c0 Add nunchuk to community integrations 2022-11-16 23:39:58 -05:00
Mononaut
3654178c83 resize block visualization instantly on zoom 2022-11-14 12:11:21 -06:00
hunicus
5df54b6b3e Add general-purpose disclaimer to top of faq 2022-11-14 00:57:39 -05:00
hunicus
8bd3e14652 Add faq: why block fee ranges don't match tx fees 2022-11-13 22:38:36 -05:00
hunicus
ddcd387848 Add faq: why timestamps don't always increase 2022-11-13 22:36:46 -05:00
hunicus
ef27aca6e4 Update faq: what is full mempool 2022-11-13 22:34:15 -05:00
hunicus
997e8a4624 Create "advanced" + "self-hosted" faq categories
And re-arrange questions: move all old "advanced"
questions to "self-hosted", and move some "basic"
questions to "advanced".
2022-11-13 22:30:05 -05:00
hunicus
d65f267122 Only show python example tab on ws tab 2022-11-11 18:12:35 -05:00
nymkappa
d32d97fbaf Add support for renaming a mining pool without changing regex or addresses 2022-11-09 06:43:46 +01:00
Mononaut
65bfe8163c fix error when navigating to huge transactions 2022-11-07 20:05:33 -06:00
Felipe Knorr Kuhn
b069196c27 Merge branch 'master' into junderw/psbt-complete-inputs 2022-11-07 07:21:24 -08:00
Felipe Knorr Kuhn
38255a5452 Merge branch 'master' into simon/search-bar-click-outside 2022-11-07 07:21:19 -08:00
Felipe Knorr Kuhn
48e2df3f7a Merge branch 'master' into junderw/search-blocktime 2022-11-07 07:21:14 -08:00
Felipe Knorr Kuhn
4fc355a05d Merge branch 'master' into frontend_runtime_config 2022-11-07 07:11:45 -08:00
Felipe Knorr Kuhn
7c6349f2ba Merge branch 'master' into update_docker_action 2022-11-07 07:11:26 -08:00
Felipe Knorr Kuhn
899d6558ec Merge branch 'master' into fix-block-summary-vsize 2022-11-07 07:11:22 -08:00
Felipe Knorr Kuhn
dd5a1847d0 Merge branch 'master' into update_gha 2022-11-07 07:11:00 -08:00
softsimon
02820b0e68 Merge pull request #2674 from knorrium/update_staging_hosts
Update staging hosts for testing
2022-11-07 18:37:11 +04:00
wiz
4bb6a3800c Merge pull request #2676 from mempool/ops/fix-nvidia-package-name
[ops] Fix nvidia-driver package name
2022-11-07 15:45:25 +09:00
wiz
b6d4e6b993 [ops] Fix nvidia-driver package name 2022-11-07 15:44:25 +09:00
Felipe Knorr Kuhn
de46f7c10e Update Docker GHA dependencies 2022-11-06 21:06:16 -08:00
Felipe Knorr Kuhn
69a36e17a8 Update staging hosts for testing 2022-11-06 20:30:38 -08:00
Felipe Knorr Kuhn
06eeaf68e8 Merge branch 'master' into frontend_runtime_config 2022-11-06 18:18:11 -08:00
softsimon
f789334d47 Merge pull request #2673 from mempool/simon/use-relative-paths-import
Use relative import paths in the frontend
2022-11-07 04:30:42 +04:00
softsimon
387a51d87e Use relative import paths in the frontend 2022-11-07 04:28:23 +04:00
wiz
64426fa9c9 Merge pull request #2646 from Emzy/ops/zero-base-fee
Configure zero base fee as default for core lighting
2022-11-06 20:36:45 +09:00
Mononaut
9c6799e193 Fix rounded vsize in block summaries 2022-11-04 10:37:14 -06:00
wiz
8d6a0f867b Merge branch 'master' into frontend_runtime_config 2022-10-31 15:08:18 +09:00
wiz
057456504c Merge pull request #2662 from mononaut/block-audit-feature
Block audit feature
2022-10-31 15:07:35 +09:00
wiz
45273f9309 Merge branch 'master' into block-audit-feature 2022-10-31 14:46:05 +09:00
wiz
2cbb7231a7 Merge pull request #2621 from mononaut/projected-block-templates
WIP: new transaction selection algorithm & scoring for block audits
2022-10-31 14:44:55 +09:00
wiz
bee573fdb8 Merge branch 'master' into projected-block-templates 2022-10-31 13:04:25 +09:00
wiz
12bd89dade Merge pull request #2659 from hunicus/add-electrum-docs
Add electrum rpc doc tab for official instance
2022-10-31 13:04:02 +09:00
wiz
e24fd8e275 Merge branch 'master' into add-electrum-docs 2022-10-31 11:35:42 +09:00
wiz
8c4a8f3a71 Merge pull request #2652 from mononaut/fix-unfurler-stray-slashes
tolerate trailing slash in unfurler requests
2022-10-31 11:15:57 +09:00
hunicus
38ec5ef957 Position docs footer on bottom
For short docs pages (like electrum rpc).
2022-10-30 13:08:26 -04:00
hunicus
dbb6f267f4 Add electrum rpc port numbers and update note 2022-10-30 12:39:20 -04:00
wiz
23a4ab461e Merge branch 'master' into fix-unfurler-stray-slashes 2022-10-30 02:05:17 +09:00
Mononaut
b657eb4e7d Add match rate to blocks list page 2022-10-28 19:02:36 -06:00
Mononaut
f3eb403c17 Add match rate to block page 2022-10-28 18:49:29 -06:00
Mononaut
b6343ddc2d Clean up block audit page & tweak color scheme 2022-10-28 18:49:28 -06:00
Mononaut
d86f045150 differentiate censored/missing txs in block audit 2022-10-28 18:49:28 -06:00
Mononaut
e2e50ac6bf Fix block audit mobile toggle buttons 2022-10-28 18:48:47 -06:00
Mononaut
6d28259515 disable block audits unless indexing is enabled 2022-10-28 15:16:03 -06:00
Mononaut
968d7b827b Optimize makeBlockTemplates 2022-10-27 10:25:16 -06:00
Mononaut
832ccdac46 improve audit analysis and scoring 2022-10-27 10:25:15 -06:00
Mononaut
39afa4cda1 Fix errors in block audit tx selection algorithm 2022-10-27 10:25:15 -06:00
Mononaut
702ff2796a New projected block transaction selection algo 2022-10-27 10:25:15 -06:00
hunicus
cb576ce601 Add electrum rpc doc tab for official instance 2022-10-26 12:33:13 -04:00
softsimon
e14fff45d6 Merge pull request #2655 from mononaut/fix-tv-ltr
Fix mirrored blocks in TV view in LTR time mode
2022-10-23 23:05:33 +04:00
Felipe Knorr Kuhn
a28544d046 Update Cypress GHA to use newer checkout and setup-node actions 2022-10-22 13:20:16 -07:00
Felipe Knorr Kuhn
847aa1ba13 Merge branch 'master' into frontend_runtime_config 2022-10-22 10:28:35 -07:00
Mononaut
58371bbd7d Fix mirrored blocks in TV view in LTR time mode 2022-10-22 16:16:32 +00:00
softsimon
f3faf99c15 Merge pull request #2651 from mononaut/fix-close-channel-id
Fix lightning channel close classification logic
2022-10-21 23:58:00 +04:00
softsimon
a5c4f8e2f3 Adding migration to force rescan of closed channels 2022-10-21 23:42:37 +04:00
nymkappa
27c39ef557 Only show active nodes is isp page 2022-10-21 20:09:20 +02:00
softsimon
9e0a91efd2 Updating Docker README 2022-10-21 21:08:48 +04:00
softsimon
601a559784 Adding MEMPOOL.ENABLED config to Docker 2022-10-21 21:08:48 +04:00
softsimon
0e0ac363cf Updating unit test 2022-10-21 21:08:48 +04:00
softsimon
b31642e554 Disable mempool config
fixes #2090
2022-10-21 21:08:48 +04:00
softsimon
5f87cc6d37 Merge pull request #2650 from mononaut/fix-negative-taproot-savings
Fix negative potential taproot savings handling
2022-10-21 18:40:47 +04:00
softsimon
b89d526379 Update frontend/src/app/components/tx-features/tx-features.component.html 2022-10-21 18:37:31 +04:00
softsimon
67429d83b5 Merge pull request #2645 from mempool/simon/transifex-pull
Pulled from transifex
2022-10-21 18:22:18 +04:00
softsimon
5c6060780b Merge pull request #2627 from mononaut/fix-coinbase-flow-nav
Fix flow diagram navigation for coinbases & peg-ins
2022-10-21 16:56:04 +04:00
Mononaut
06a89bc1a7 tolerate extra '/'s in unfurler requests 2022-10-20 21:13:13 +00:00
Mononaut
022785a555 Fix ln close classification logic 2022-10-20 17:59:36 +00:00
softsimon
69baf97445 Pulled from transifex 2022-10-20 14:42:12 +04:00
Mononaut
04fa08085d Fix negative potential taproot savings tooltip 2022-10-19 21:26:35 +00:00
softsimon
9bb897307f Update README.md 2022-10-18 03:00:17 +04:00
Stephan Oeste
f3c947685a Configure zero base fee as default for core lighting 2022-10-17 18:53:46 +02:00
softsimon
dffe9fa4e6 Merge pull request #2587 from mempool/simon/network-match-ending-fix
Handle network url ending matching better
2022-10-17 12:55:29 +04:00
Mononaut
20bef70390 Fix flow diagram navigation for coinbases & pegins 2022-10-16 22:44:48 +00:00
Mononaut
ae9439a991 vin/vout selection syntax via url fragments 2022-10-16 22:42:38 +00:00
Felipe Knorr Kuhn
9964f1ab14 Stop using the cache busting config loader 2022-10-16 15:19:44 -07:00
Felipe Knorr Kuhn
f27abb1421 Change ownership of /var/www/mempool to the Docker user 2022-10-16 14:41:39 -07:00
wiz
ee6766e34c Merge pull request #2599 from mempool/nymkappa/bugfix/log-counter
Increment log counter in `Building partial channels` log
2022-10-17 04:35:09 +09:00
wiz
76764936f9 Merge branch 'master' into nymkappa/bugfix/log-counter 2022-10-17 04:25:27 +09:00
wiz
596c7afecb Merge pull request #2639 from mempool/simon/instant-search-results
Handle instant block, txid and address search
2022-10-17 04:22:10 +09:00
wiz
ffad5e2a30 Merge branch 'master' into simon/instant-search-results 2022-10-17 03:48:35 +09:00
wiz
8da476c48c Merge pull request #2626 from mononaut/save-flow-preference
Save flow diagram display preference to localStorage
2022-10-17 03:47:07 +09:00
Felipe Knorr Kuhn
5bfc8a9d58 Use a single command to find the config path 2022-10-16 08:40:22 -07:00
Felipe Knorr Kuhn
670f85b1f5 Copy the sample config before building the frontend 2022-10-16 08:39:44 -07:00
softsimon
82a4212b72 Click to close search dropdown 2022-10-16 12:54:29 +04:00
Felipe Knorr Kuhn
cfa8a9a7d6 Update the Docker frontend startup script to read and replace runtime config values 2022-10-15 19:46:30 -07:00
Felipe Knorr Kuhn
b77fe0dca2 Change template keys in generate-config script 2022-10-15 19:45:15 -07:00
Felipe Knorr Kuhn
81d35d9401 Update nginx cache settings for the frontend config files 2022-10-15 19:44:34 -07:00
wiz
2742acf6ee Merge branch 'master' into save-flow-preference 2022-10-16 08:11:03 +09:00
softsimon
8a2b144e29 Altering bc1 regex matching to correctly require 39 characters 2022-10-16 02:46:04 +04:00
softsimon
3e66e4d6db Handle instant block, txid and address search
fixes #2619
2022-10-16 02:46:04 +04:00
wiz
61e8892204 Merge pull request #2629 from mononaut/fix-preview-flow-highlight-bug
Fix transaction preview flow diagram highlight bug
2022-10-16 07:38:49 +09:00
wiz
543c4feaf9 Merge branch 'master' into fix-preview-flow-highlight-bug 2022-10-16 07:29:50 +09:00
wiz
992ea6da3c Merge pull request #2631 from mononaut/subnet-navigation
Maintain routing when switching network
2022-10-16 07:29:39 +09:00
wiz
f3cfc7f80b Merge branch 'master' into subnet-navigation 2022-10-16 07:13:24 +09:00
wiz
4c170b08f4 Merge pull request #2642 from mononaut/ln-fee-distribution
Add incoming/outgoing fee histogram to node page
2022-10-16 07:12:45 +09:00
wiz
d3b3c7df21 Merge branch 'master' into subnet-navigation 2022-10-16 06:55:04 +09:00
Mononaut
893aa03622 Add fee histogram chart to the node page 2022-10-15 01:19:45 +00:00
Mononaut
f4df51dd21 API method for node fee histogram data 2022-10-15 00:57:34 +00:00
wiz
3e41e512ad Merge branch 'master' into frontend_runtime_config 2022-10-15 04:28:56 +09:00
softsimon
7bdde13b40 Merge pull request #2625 from mononaut/fix-conf-badge-alignment
Fix tx confirmation badge alignment regression
2022-10-13 18:31:22 +04:00
softsimon
7ec0e3ac86 Merge pull request #2636 from mempool/ln-i18n-take-5
More Lightning i18n fixes (4)
2022-10-13 18:13:21 +04:00
softsimon
02340d57dd More Lightning i18n fixes (4) 2022-10-13 18:12:29 +04:00
softsimon
1e6ea0b5f5 Merge pull request #2635 from mempool/simon/ln-i18n-take-4
Lightning pie chart i18n tooltip fix
2022-10-13 18:10:17 +04:00
softsimon
5d9bcce5cd Lightning pie chart i18n tooltip fix 2022-10-13 18:09:56 +04:00
softsimon
39dd8ebe07 Merge pull request #2634 from mempool/simon/lightning-i18n-3rd-fix
More Lightning i18n fixes (2)
2022-10-13 17:51:51 +04:00
softsimon
e5ec152002 More Lightning i18n fixes (2) 2022-10-13 17:51:28 +04:00
softsimon
e77f48abd4 Merge pull request #2633 from mempool/simon/more-i18n-fixes
More Lightning i18n fixes
2022-10-13 17:40:34 +04:00
softsimon
3b692d05bc More Lightning i18n fixes 2022-10-13 17:40:13 +04:00
softsimon
6895eb0b05 Merge pull request #2632 from mempool/simon/i18n-corrections
Correcting i18n strings related to Lightning explorer
2022-10-13 17:15:40 +04:00
softsimon
61333b2286 Correcting i18n strings related to Lightning explorer 2022-10-13 17:15:17 +04:00
softsimon
1240a3f115 Merge pull request #2614 from mempool/simon/lightning-i18n
Correcting all Lightning explorer i18n and extract
2022-10-13 14:42:22 +04:00
Mononaut
f70ff9b402 Maintain page when switching networks 2022-10-12 23:04:47 +00:00
Mononaut
5cdb0c5ce9 Fix tx preview flow diagram highlight bug 2022-10-11 23:06:46 +00:00
Mononaut
3971814710 Save flow diagram preference to localStorage 2022-10-11 17:02:35 +00:00
Mononaut
c5d4e86e0e Fix tx confirmation badge alignment regression 2022-10-11 16:17:17 +00:00
Felipe Knorr Kuhn
ad7e7795f9 Update index files to read the new config file 2022-10-08 14:58:09 -07:00
Felipe Knorr Kuhn
71e00f66c9 Update config generator to output the template and new config file 2022-10-08 14:51:32 -07:00
Felipe Knorr Kuhn
5d21a61840 Serve the frontend config from resources, stop bundling the generated file 2022-10-08 13:48:29 -07:00
Felipe Knorr Kuhn
8ef88e9f39 Ignore the new config files 2022-10-08 13:47:33 -07:00
wiz
ddb1e97ce0 Merge pull request #2612 from mempool/simon/show-ln-capacity-on-mobile
Display LN capacity on mobile
2022-10-09 02:47:05 +09:00
wiz
b638719e72 Merge branch 'master' into simon/show-ln-capacity-on-mobile 2022-10-09 02:38:26 +09:00
wiz
4fee471992 Merge pull request #2615 from mempool/simon/lightning-inverting-avg-toggles
Inverting med/avg toggle
2022-10-09 02:38:21 +09:00
wiz
5365f61121 Merge branch 'master' into simon/lightning-inverting-avg-toggles 2022-10-09 02:30:45 +09:00
wiz
4924c521a4 Merge pull request #2616 from mononaut/detailed-unfurler-logs
More detailed unfurler error logs
2022-10-09 02:30:38 +09:00
wiz
43d56a2121 Merge branch 'master' into detailed-unfurler-logs 2022-10-09 02:23:06 +09:00
wiz
bede502f2d Merge pull request #2601 from mononaut/fix-tx-marker
Fix transaction marker boundary condition
2022-10-09 02:23:01 +09:00
wiz
6005bbea49 Merge branch 'master' into fix-tx-marker 2022-10-09 02:05:55 +09:00
wiz
3653e75810 Merge pull request #2603 from mononaut/rtl-language-time-default
Reverse time by default for RTL languages
2022-10-09 02:05:49 +09:00
wiz
66c99e2f3b Merge branch 'master' into rtl-language-time-default 2022-10-09 01:56:32 +09:00
wiz
9876805bc3 Merge pull request #2606 from mononaut/flow-toggle-labels
Shorten transaction diagram toggle labels
2022-10-09 01:56:24 +09:00
wiz
d08e5e293c Merge branch 'master' into flow-toggle-labels 2022-10-09 01:46:53 +09:00
wiz
6635238934 Merge pull request #2607 from mononaut/fix-conf-badge-css
Fix transaction confirmation badge layout bug
2022-10-09 01:46:47 +09:00
wiz
001f7a4fd7 Merge branch 'master' into fix-conf-badge-css 2022-10-09 01:37:25 +09:00
wiz
3ba4fd454e Merge pull request #2611 from mempool/simon/time-toggle-ux-size
Updating time toggle size
2022-10-09 01:37:18 +09:00
wiz
52b2ee4f35 Merge branch 'master' into simon/time-toggle-ux-size 2022-10-09 01:29:29 +09:00
wiz
bfac856eb2 Merge pull request #2605 from mononaut/flow-diagram-interactivity
Transaction diagram interactivity & navigation
2022-10-09 01:28:39 +09:00
wiz
42dec95738 Merge branch 'master' into flow-diagram-interactivity 2022-10-09 01:04:00 +09:00
wiz
6eacbf80d8 Merge pull request #2602 from mononaut/fix-reverse-time-scrollability
Enable block scrolling in LTR time mode
2022-10-09 01:02:58 +09:00
Mononaut
be7e2c2c80 Add requested url to unfurler error logs 2022-10-07 16:50:26 +00:00
softsimon
e428565d50 Inverting med/avg toggle 2022-10-07 01:20:32 +04:00
softsimon
50cc424679 Correcting all Lightning explorer i18n and extract
fixes  #2533
2022-10-07 00:54:33 +04:00
softsimon
d288da1e18 Display LN capacity on mobile
fixes #2610
2022-10-06 21:12:23 +04:00
softsimon
0c1993e264 Updating time toggle size 2022-10-06 20:33:54 +04:00
Mononaut
75fd036ec2 Highlight url input/output in tx diagram & list 2022-10-04 23:49:26 +00:00
Mononaut
626a1a2977 Navigate to vin/vout page on diagram click 2022-10-04 23:36:36 +00:00
Mononaut
d1cedbb981 Fix tx confirmation badge layout bug 2022-10-04 22:09:32 +00:00
Mononaut
0df796f873 Shorten tx diagram toggle labels 2022-10-04 21:14:54 +00:00
Mononaut
c10ace8fb5 Scroll to input/output when clicked in tx diagram 2022-10-04 21:04:52 +00:00
Mononaut
5d3ee50bca Reverse time by default for RTL languages 2022-10-03 22:07:59 +00:00
Mononaut
be2b72eea7 Enable block scrolling in ltr time mode 2022-10-03 21:44:55 +00:00
Mononaut
1af38456f3 Fix tx marker boundary condition 2022-10-03 16:57:15 +00:00
nymkappa
0a4c1c24af Fixes #2592 2022-09-30 19:10:11 +02:00
wiz
54c44565fb Merge pull request #2596 from mempool/simon/flip-icon-change
Updating flip icon
2022-09-30 19:09:10 +09:00
softsimon
b86d8bd836 Updating flip icon 2022-09-30 13:57:34 +04:00
wiz
5610afde36 Merge pull request #2589 from mononaut/node-group-preview
Add preview for lightning group pages
2022-09-30 18:51:58 +09:00
wiz
d88e12fc6e Merge pull request #2590 from mononaut/pool-preview-logo
Move logo to LHS on mining pool preview
2022-09-30 18:49:07 +09:00
wiz
95156eebd1 Merge pull request #2591 from mononaut/collapse-flow-param
Toggle option for transaction flow diagram with query param
2022-09-30 18:44:16 +09:00
wiz
8f0a9a9dd2 Merge pull request #2595 from mempool/simon/add-class-js-error-fix
Fix for add class js error
2022-09-30 18:42:23 +09:00
wiz
42189ec1a5 Merge pull request #2593 from mempool/simon/revert-btn-info-color
Revert "New button light color"
2022-09-30 18:33:13 +09:00
softsimon
f2889fc05c Fix for add class js error 2022-09-30 11:14:07 +04:00
wiz
6e235924d8 Merge pull request #2594 from mononaut/turn-back-time
Add a time turner toggle
2022-09-30 15:47:46 +09:00
Mononaut
15caef10d6 Fix liquid block divider position 2022-09-30 02:01:59 +00:00
Mononaut
21db64b2a5 Animate mempool block viz reversal 2022-09-30 00:54:30 +00:00
Mononaut
d07bf30737 Reversible blockchain components 2022-09-30 00:54:29 +00:00
Mononaut
135fbfc4f3 Reversible mempool block visualization 2022-09-30 00:54:29 +00:00
Mononaut
03c6a7c54f Reversible block arrows & key navigation 2022-09-30 00:54:20 +00:00
softsimon
9d3d3ed5f8 Revert "New button light color"
This reverts commit c79c1d9958.

# Conflicts:
#	frontend/src/styles.scss
2022-09-30 04:02:52 +04:00
Mononaut
619a6bd34d Toggle option for tx flow diagram w/ query param 2022-09-29 15:41:14 +00:00
Mononaut
d9c967b529 Move logo to LHS on mining pool preview 2022-09-29 15:36:38 +00:00
Mononaut
0e716165e5 Add preview for lightning group pages 2022-09-29 15:29:59 +00:00
softsimon
4154d3081d Handle network url ending matching better 2022-09-29 13:45:03 +04:00
junderw
5d1c5b51dd Fix: Add hash and reverse search order 2022-09-19 16:44:53 +09:00
junderw
19467de809 Backend: Add block height from timestamp endpoint 2022-09-18 22:30:09 +09:00
junderw
bd4cf980bd Spelling fix 2022-09-10 16:09:43 +09:00
junderw
9b1fc1e000 Fix response codes for various error states 2022-09-10 16:03:31 +09:00
Felipe Knorr Kuhn
90db8c15f2 Merge branch 'master' into junderw/psbt-complete-inputs 2022-09-09 21:23:14 -07:00
junderw
f062132636 Feature: Add endpoint for PSBT nonWitnessUtxo inclusion 2022-09-05 23:13:45 +09:00
softsimon
9c09c00fab Updated mempool debug log 2022-08-28 15:56:57 +02:00
463 changed files with 147711 additions and 40958 deletions

1
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1 @@
backend/src/api/database-migration.ts @wiz @softsimon

View File

@@ -1,20 +1,47 @@
version: 2
updates:
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: weekly
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: npm
directory: "/backend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
allow:
- dependency-type: "production"
- package-ecosystem: npm
directory: "/frontend"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
allow:
- dependency-type: "production"
- package-ecosystem: docker
directory: "/docker/backend"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: docker
directory: "/docker/frontend"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]

View File

@@ -9,7 +9,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.16.0", "18.5.0"]
node: ["16.16.0", "18.14.1"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"
@@ -55,7 +55,7 @@ jobs:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
strategy:
matrix:
node: ["16.15.0", "18.5.0"]
node: ["16.16.0", "18.14.1"]
flavor: ["dev", "prod"]
fail-fast: false
runs-on: "ubuntu-latest"

View File

@@ -1,8 +1,11 @@
name: Cypress Tests
on:
push:
branches: [master]
pull_request:
types: [ opened, review_requested, synchronize ]
types: [opened, synchronize]
jobs:
cypress:
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
@@ -24,36 +27,36 @@ jobs:
- module: "bisq"
spec: |
cypress/e2e/bisq/bisq.spec.ts
name: E2E tests for ${{ matrix.module }}
steps:
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v3
with:
path: ${{ matrix.module }}
- name: Setup node
uses: actions/setup-node@v2
uses: actions/setup-node@v3
with:
node-version: 16.15.0
cache: 'npm'
cache: "npm"
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
- name: Chrome browser tests (${{ matrix.module }})
uses: cypress-io/github-action@v4
uses: cypress-io/github-action@v5
with:
tag: ${{ github.event_name }}
working-directory: ${{ matrix.module }}/frontend
build: npm run config:defaults:${{ matrix.module }}
start: npm run start:local-staging
wait-on: 'http://localhost:4200'
wait-on: "http://localhost:4200"
wait-on-timeout: 120
record: true
parallel: true
spec: ${{ matrix.spec }}
group: Tests on Chrome (${{ matrix.module }})
browser: "chrome"
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
ci-build-id: "${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}"
env:
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}

26
.github/workflows/get_image_digest.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: 'Print images digest'
on:
workflow_dispatch:
inputs:
version:
description: 'Image Version'
required: false
default: 'latest'
type: string
jobs:
print-images-sha:
runs-on: 'ubuntu-latest'
name: Print digest for images
steps:
- name: Checkout
uses: actions/checkout@v3
with:
path: digest
- name: Run script
working-directory: digest
run: |
sh ./docker/scripts/get_image_digest.sh $VERSION
env:
VERSION: ${{ github.event.inputs.version }}

View File

@@ -31,7 +31,7 @@ jobs:
run: |
sudo swapoff /mnt/swapfile
sudo rm -v /mnt/swapfile
sudo fallocate -l 10G /mnt/swapfile
sudo fallocate -l 13G /mnt/swapfile
sudo chmod 600 /mnt/swapfile
sudo mkswap /mnt/swapfile
sudo swapon /mnt/swapfile
@@ -68,24 +68,24 @@ jobs:
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
- name: Checkout project
uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2
uses: actions/checkout@v3
- name: Init repo for Dockerization
run: docker/init.sh "$TAG"
- name: Set up QEMU
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1
uses: docker/setup-qemu-action@v2
id: qemu
- name: Setup Docker buildx action
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # v1
uses: docker/setup-buildx-action@v2
id: buildx
- name: Available platforms
run: echo ${{ steps.buildx.outputs.platforms }}
- name: Cache Docker layers
uses: actions/cache@661fd3eb7f2f20d8c7c84bc2b0509efd7a826628 # v2
uses: actions/cache@v3
id: cache
with:
path: /tmp/.buildx-cache

2
.gitignore vendored
View File

@@ -3,3 +3,5 @@ data
docker-compose.yml
backend/mempool-config.json
*.swp
frontend/src/resources/config.template.js
frontend/src/resources/config.js

View File

@@ -1,4 +1,5 @@
{
"editor.tabSize": 2,
"typescript.preferences.importModuleSpecifier": "relative",
"typescript.tsdk": "./backend/node_modules/typescript/lib"
}

View File

@@ -1,5 +1,5 @@
The Mempool Open Source Project
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
Copyright (c) 2019-2023 The Mempool Open Source Project Developers
This program is free software; you can redistribute it and/or modify it under
the terms of (at your option) either:

View File

@@ -1,5 +1,7 @@
# The Mempool Open Source Project™ [![mempool](https://img.shields.io/endpoint?url=https://dashboard.cypress.io/badge/simple/ry4br7/master&style=flat-square)](https://dashboard.cypress.io/projects/ry4br7/runs)
https://user-images.githubusercontent.com/232186/222445818-234aa6c9-c233-4c52-b3f0-e32b8232893b.mp4
Mempool is the fully-featured mempool visualizer, explorer, and API service running at [mempool.space](https://mempool.space/).
It is an open-source project developed and operated for the benefit of the Bitcoin community, with a focus on the emerging transaction fee market that is evolving Bitcoin into a multi-layer ecosystem.

View File

@@ -160,7 +160,7 @@ npm install -g ts-node nodemon
Then, run the watcher:
```
nodemon src/index.ts --ignore cache/ --ignore pools.json
nodemon src/index.ts --ignore cache/
```
`nodemon` should be in npm's global binary folder. If needed, you can determine where that is with `npm -g bin`.
@@ -171,50 +171,84 @@ Helpful link: https://gist.github.com/System-Glitch/cb4e87bf1ae3fec9925725bb3ebe
Run bitcoind on regtest:
```
bitcoind -regtest -rpcport=8332
bitcoind -regtest
```
Create a new wallet, if needed:
```
bitcoin-cli -regtest -rpcport=8332 createwallet test
bitcoin-cli -regtest createwallet test
```
Load wallet (this command may take a while if you have lot of UTXOs):
```
bitcoin-cli -regtest -rpcport=8332 loadwallet test
bitcoin-cli -regtest loadwallet test
```
Get a new address:
```
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
address=$(bitcoin-cli -regtest getnewaddress)
```
Mine blocks to the previously generated address. You need at least 101 blocks before you can spend. This will take some time to execute (~1 min):
```
bitcoin-cli -regtest -rpcport=8332 generatetoaddress 101 $address
bitcoin-cli -regtest generatetoaddress 101 $address
```
Send 0.1 BTC at 5 sat/vB to another address:
```
./src/bitcoin-cli -named -regtest -rpcport=8332 sendtoaddress address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress) amount=0.1 fee_rate=5
bitcoin-cli -named -regtest sendtoaddress address=$(bitcoin-cli -regtest getnewaddress) amount=0.1 fee_rate=5
```
See more example of `sendtoaddress`:
```
./src/bitcoin-cli sendtoaddress # will print the help
bitcoin-cli sendtoaddress # will print the help
```
Mini script to generate transactions with random TX fee-rate (between 1 to 100 sat/vB). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
Mini script to generate random network activity (random TX count with random tx fee-rate). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
```
#!/bin/bash
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
address=$(bitcoin-cli -regtest getnewaddress)
bitcoin-cli -regtest generatetoaddress 101 $address
for i in {1..1000000}
do
./src/bitcoin-cli -regtest -rpcport=8332 -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
for y in $(seq 1 "$(jot -r 1 1 1000)")
do
bitcoin-cli -regtest -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
done
bitcoin-cli -regtest generatetoaddress 1 $address
sleep 5
done
```
Generate block at regular interval (every 10 seconds in this example):
```
watch -n 10 "./src/bitcoin-cli -regtest -rpcport=8332 generatetoaddress 1 $address"
watch -n 10 "bitcoin-cli -regtest generatetoaddress 1 $address"
```
### Mining pools update
By default, mining pools will be not automatically updated regularly (`config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` is set to `false`).
To manually update your mining pools, you can use the `--update-pools` command line flag when you run the nodejs backend. For example `npm run start --update-pools`. This will trigger the mining pools update and automatically re-index appropriate blocks.
You can enabled the automatic mining pools update by settings `config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING` to `true` in your `mempool-config.json`.
When a `coinbase tag` or `coinbase address` change is detected, all blocks tagged to the `unknown` mining pools (starting from height 130635) will be deleted from the `blocks` table. Additionaly, all blocks which were tagged to the pool which has been updated will also be deleted from the `blocks` table. Of course, those blocks will be automatically reindexed.
### Re-index tables
You can manually force the nodejs backend to drop all data from a specified set of tables for future re-index. This is mostly useful for the mining dashboard and the lightning explorer.
Use the `--reindex` command to specify a list of comma separated table which will be truncated at start. Note that a 5 seconds delay will be observed before truncating tables in order to give you a chance to cancel (CTRL+C) in case of misuse of the command.
Usage:
```
npm run start --reindex=blocks,hashrates
```
Example output:
```
Feb 13 14:55:27 [63246] WARN: <lightning> Indexed data for "hashrates" tables will be erased in 5 seconds (using '--reindex')
Feb 13 14:55:32 [63246] NOTICE: <lightning> Table hashrates has been truncated
```
Reference: https://github.com/mempool/mempool/pull/1269

View File

@@ -2,6 +2,7 @@
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
@@ -14,7 +15,6 @@
"MEMPOOL_BLOCKS_AMOUNT": 8,
"INDEXING_BLOCKS_AMOUNT": 11000,
"BLOCKS_SUMMARIES_INDEXING": false,
"PRICE_FEED_UPDATE_INTERVAL": 600,
"USE_SECOND_NODE_FOR_MINFEE": false,
"EXTERNAL_ASSETS": [],
"EXTERNAL_MAX_RETRY": 1,
@@ -22,8 +22,13 @@
"USER_AGENT": "mempool",
"STDOUT_LOG_MIN_PRIORITY": "debug",
"AUTOMATIC_BLOCK_REINDEXING": false,
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
"AUDIT": false,
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false,
"DISK_CACHE_BLOCK_INTERVAL": 6
},
"CORE_RPC": {
"HOST": "127.0.0.1",
@@ -80,7 +85,9 @@
"BACKEND": "lnd",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": 20
},
"LND": {
"TLS_CERT_PATH": "tls.cert",

3805
backend/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -27,42 +27,42 @@
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
"start": "node --max-old-space-size=2048 dist/index.js",
"start-production": "node --max-old-space-size=4096 dist/index.js",
"start-production": "node --max-old-space-size=16384 dist/index.js",
"test": "./node_modules/.bin/jest --coverage",
"lint": "./node_modules/.bin/eslint . --ext .ts",
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
},
"dependencies": {
"@babel/core": "^7.18.6",
"@babel/core": "^7.20.12",
"@mempool/electrum-client": "^1.1.7",
"@types/node": "^16.11.41",
"@types/node": "^16.18.11",
"axios": "~0.27.2",
"bitcoinjs-lib": "6.0.2",
"crypto-js": "^4.0.0",
"express": "^4.18.0",
"maxmind": "^4.3.6",
"mysql2": "2.3.3",
"node-worker-threads-pool": "^1.5.1",
"bitcoinjs-lib": "~6.1.0",
"crypto-js": "~4.1.1",
"express": "~4.18.2",
"maxmind": "~4.3.8",
"mysql2": "~2.3.3",
"node-worker-threads-pool": "~1.5.1",
"socks-proxy-agent": "~7.0.0",
"typescript": "~4.7.4",
"ws": "~8.8.0"
"ws": "~8.11.0"
},
"devDependencies": {
"@babel/core": "^7.18.6",
"@babel/core": "^7.20.7",
"@babel/code-frame": "^7.18.6",
"@types/compression": "^1.7.2",
"@types/crypto-js": "^4.1.1",
"@types/express": "^4.17.13",
"@types/jest": "^28.1.4",
"@types/ws": "~8.5.3",
"@typescript-eslint/eslint-plugin": "^5.30.5",
"@typescript-eslint/parser": "^5.30.5",
"eslint": "^8.19.0",
"@types/express": "^4.17.15",
"@types/jest": "^29.2.5",
"@types/ws": "~8.5.4",
"@typescript-eslint/eslint-plugin": "^5.48.1",
"@typescript-eslint/parser": "^5.48.1",
"eslint": "^8.31.0",
"eslint-config-prettier": "^8.5.0",
"jest": "^28.1.2",
"prettier": "^2.7.1",
"ts-jest": "^28.0.5",
"ts-node": "^10.8.2"
"jest": "^29.3.1",
"prettier": "^2.8.2",
"ts-jest": "^29.0.3",
"ts-node": "^10.9.1"
}
}

View File

@@ -1,12 +1,13 @@
{
"MEMPOOL": {
"ENABLED": true,
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"BLOCKS_SUMMARIES_INDEXING": true,
"HTTP_PORT": 1,
"SPAWN_CLUSTER_PROCS": 2,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
"AUTOMATIC_BLOCK_REINDEXING": true,
"AUTOMATIC_BLOCK_REINDEXING": false,
"POLL_RATE_MS": 3,
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
"CLEAR_PROTECTION_MINUTES": 4,
@@ -14,7 +15,6 @@
"BLOCK_WEIGHT_UNITS": 6,
"INITIAL_BLOCKS_AMOUNT": 7,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"PRICE_FEED_UPDATE_INTERVAL": 9,
"USE_SECOND_NODE_FOR_MINFEE": 10,
"EXTERNAL_ASSETS": 11,
"EXTERNAL_MAX_RETRY": 12,
@@ -23,7 +23,13 @@
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": 14,
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
"POOLS_JSON_URL": "__POOLS_JSON_URL__"
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
"AUDIT": "__MEMPOOL_AUDIT__",
"ADVANCED_GBT_AUDIT": "__MEMPOOL_ADVANCED_GBT_AUDIT__",
"ADVANCED_GBT_MEMPOOL": "__MEMPOOL_ADVANCED_GBT_MEMPOOL__",
"CPFP_INDEXING": "__MEMPOOL_CPFP_INDEXING__",
"MAX_BLOCKS_BULK_QUERY": "__MEMPOOL_MAX_BLOCKS_BULK_QUERY__",
"DISK_CACHE_BLOCK_INTERVAL": "__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__"
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
@@ -95,7 +101,9 @@
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
"STATS_REFRESH_INTERVAL": 600,
"GRAPH_REFRESH_INTERVAL": 600,
"LOGGER_UPDATE_INTERVAL": 30
"LOGGER_UPDATE_INTERVAL": 30,
"FORENSICS_INTERVAL": 43200,
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
},
"LND": {
"TLS_CERT_PATH": "",

View File

@@ -23,9 +23,11 @@ describe('Mempool Difficulty Adjustment', () => {
remainingBlocks: 1834,
remainingTime: 977591692,
previousRetarget: 0.6280047707459726,
previousTime: 1660820820,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: 0,
expectedBlocks: 161.68833333333333,
},
],
[ // Vector 2 (testnet)
@@ -43,11 +45,13 @@ describe('Mempool Difficulty Adjustment', () => {
estimatedRetargetDate: 1661895424692,
remainingBlocks: 1834,
remainingTime: 977591692,
previousTime: 1660820820,
previousRetarget: 0.6280047707459726,
nextRetargetHeight: 751968,
timeAvg: 533038,
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
expectedBlocks: 161.68833333333333,
},
],
] as [[number, number, number, number, string, number], DifficultyAdjustment][];

View File

@@ -13,6 +13,7 @@ describe('Mempool Backend Config', () => {
const config = jest.requireActual('../config').default;
expect(config.MEMPOOL).toStrictEqual({
ENABLED: true,
NETWORK: 'mainnet',
BACKEND: 'none',
BLOCKS_SUMMARIES_INDEXING: false,
@@ -28,7 +29,6 @@ describe('Mempool Backend Config', () => {
INITIAL_BLOCKS_AMOUNT: 8,
MEMPOOL_BLOCKS_AMOUNT: 8,
INDEXING_BLOCKS_AMOUNT: 11000,
PRICE_FEED_UPDATE_INTERVAL: 600,
USE_SECOND_NODE_FOR_MINFEE: false,
EXTERNAL_ASSETS: [],
EXTERNAL_MAX_RETRY: 1,
@@ -36,7 +36,13 @@ describe('Mempool Backend Config', () => {
USER_AGENT: 'mempool',
STDOUT_LOG_MIN_PRIORITY: 'debug',
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json'
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
AUDIT: false,
ADVANCED_GBT_AUDIT: false,
ADVANCED_GBT_MEMPOOL: false,
CPFP_INDEXING: false,
MAX_BLOCKS_BULK_QUERY: 0,
DISK_CACHE_BLOCK_INTERVAL: 6,
});
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
@@ -101,6 +107,13 @@ describe('Mempool Backend Config', () => {
BISQ_URL: 'https://bisq.markets/api',
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
});
expect(config.MAXMIND).toStrictEqual({
ENABLED: false,
GEOLITE2_CITY: '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
GEOLITE2_ASN: '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
GEOIP2_ISP: '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
});
});
});

143
backend/src/api/audit.ts Normal file
View File

@@ -0,0 +1,143 @@
import config from '../config';
import { TransactionExtended, MempoolBlockWithTransactions } from '../mempool.interfaces';
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
class Audit {
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
: { censored: string[], added: string[], fresh: string[], score: number, similarity: number } {
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
return { censored: [], added: [], fresh: [], score: 0, similarity: 1 };
}
const matches: string[] = []; // present in both mined block and template
const added: string[] = []; // present in mined block, not in template
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
const isCensored = {}; // missing, without excuse
const isDisplaced = {};
let displacedWeight = 0;
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
const inTemplate = {};
const now = Math.round((Date.now() / 1000));
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// coinbase is always expected
if (transactions[0]) {
inTemplate[transactions[0].txid] = true;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const txid of projectedBlocks[0].transactionIds) {
if (!inBlock[txid]) {
// tx is recent, may have reached the miner too late for inclusion
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
fresh.push(txid);
} else {
isCensored[txid] = true;
}
displacedWeight += mempool[txid].weight;
} else {
matchedWeight += mempool[txid].weight;
}
projectedWeight += mempool[txid].weight;
inTemplate[txid] = true;
}
displacedWeight += (4000 - transactions[0].weight);
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
// these displaced transactions should occupy the first N weight units of the next projected block
let displacedWeightRemaining = displacedWeight;
let index = 0;
let lastFeeRate = Infinity;
let failures = 0;
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
const txid = projectedBlocks[1].transactionIds[index];
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
if (fits || feeMatches) {
isDisplaced[txid] = true;
if (fits) {
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
}
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
displacedWeightRemaining -= mempool[txid].weight;
}
failures = 0;
} else {
failures++;
}
index++;
}
// mark unexpected transactions in the mined block as 'added'
let overflowWeight = 0;
let totalWeight = 0;
for (const tx of transactions) {
if (inTemplate[tx.txid]) {
matches.push(tx.txid);
} else {
if (!isDisplaced[tx.txid]) {
added.push(tx.txid);
} else {
}
let blockIndex = -1;
let index = -1;
projectedBlocks.forEach((block, bi) => {
const i = block.transactionIds.indexOf(tx.txid);
if (i >= 0) {
blockIndex = bi;
index = i;
}
});
overflowWeight += tx.weight;
}
totalWeight += tx.weight;
}
// transactions missing from near the end of our template are probably not being censored
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
let maxOverflowRate = 0;
let rateThreshold = 0;
index = projectedBlocks[0].transactionIds.length - 1;
while (index >= 0) {
const txid = projectedBlocks[0].transactionIds[index];
if (overflowWeightRemaining > 0) {
if (isCensored[txid]) {
delete isCensored[txid];
}
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
}
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
if (isCensored[txid]) {
delete isCensored[txid];
}
}
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
index--;
}
const numCensored = Object.keys(isCensored).length;
const numMatches = matches.length - 1; // adjust for coinbase tx
const score = numMatches > 0 ? (numMatches / (numMatches + numCensored)) : 0;
const similarity = projectedWeight ? matchedWeight / projectedWeight : 1;
return {
censored: Object.keys(isCensored),
added,
fresh,
score,
similarity,
};
}
}
export default new Audit();

View File

@@ -2,6 +2,7 @@ import fs from 'fs';
import path from 'path';
import os from 'os';
import { IBackendInfo } from '../mempool.interfaces';
import config from '../config';
class BackendInfo {
private backendInfo: IBackendInfo;
@@ -22,7 +23,8 @@ class BackendInfo {
this.backendInfo = {
hostname: os.hostname(),
version: versionInfo.version,
gitCommit: versionInfo.gitCommit
gitCommit: versionInfo.gitCommit,
lightning: config.LIGHTNING.ENABLED
};
}

View File

@@ -3,13 +3,14 @@ import { IEsploraApi } from './esplora-api.interface';
export interface AbstractBitcoinApi {
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
$getTransactionHex(txId: string): Promise<string>;
$getBlockHeightTip(): Promise<number>;
$getBlockHashTip(): Promise<string>;
$getTxIdsForBlock(hash: string): Promise<string[]>;
$getBlockHash(height: number): Promise<string>;
$getBlockHeader(hash: string): Promise<string>;
$getBlock(hash: string): Promise<IEsploraApi.Block>;
$getRawBlock(hash: string): Promise<string>;
$getRawBlock(hash: string): Promise<Buffer>;
$getAddress(address: string): Promise<IEsploraApi.Address>;
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
$getAddressPrefix(prefix: string): string[];

View File

@@ -17,4 +17,6 @@ function bitcoinApiFactory(): AbstractBitcoinApi {
}
}
export const bitcoinCoreApi = new BitcoinApi(bitcoinClient);
export default bitcoinApiFactory();

View File

@@ -172,4 +172,35 @@ export namespace IBitcoinApi {
}
}
export interface BlockStats {
"avgfee": number;
"avgfeerate": number;
"avgtxsize": number;
"blockhash": string;
"feerate_percentiles": [number, number, number, number, number];
"height": number;
"ins": number;
"maxfee": number;
"maxfeerate": number;
"maxtxsize": number;
"medianfee": number;
"mediantime": number;
"mediantxsize": number;
"minfee": number;
"minfeerate": number;
"mintxsize": number;
"outs": number;
"subsidy": number;
"swtotal_size": number;
"swtotal_weight": number;
"swtxs": number;
"time": number;
"total_out": number;
"total_size": number;
"total_weight": number;
"totalfee": number;
"txs": number;
"utxo_increase": number;
"utxo_size_inc": number;
}
}

View File

@@ -28,6 +28,7 @@ class BitcoinApi implements AbstractBitcoinApi {
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
mediantime: block.mediantime,
};
}
@@ -57,6 +58,11 @@ class BitcoinApi implements AbstractBitcoinApi {
});
}
$getTransactionHex(txId: string): Promise<string> {
return this.$getRawTransaction(txId, true)
.then((tx) => tx.hex || '');
}
$getBlockHeightTip(): Promise<number> {
return this.bitcoindClient.getChainTips()
.then((result: IBitcoinApi.ChainTips[]) => {
@@ -76,7 +82,7 @@ class BitcoinApi implements AbstractBitcoinApi {
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
}
$getRawBlock(hash: string): Promise<string> {
$getRawBlock(hash: string): Promise<Buffer> {
return this.bitcoindClient.getBlock(hash, 0)
.then((raw: string) => Buffer.from(raw, "hex"));
}

View File

@@ -1,11 +1,12 @@
import { Application, Request, Response } from 'express';
import axios from 'axios';
import * as bitcoinjs from 'bitcoinjs-lib';
import config from '../../config';
import websocketHandler from '../websocket-handler';
import mempool from '../mempool';
import feeApi from '../fee-api';
import mempoolBlocks from '../mempool-blocks';
import bitcoinApi from './bitcoin-api-factory';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin-api-factory';
import { Common } from '../common';
import backendInfo from '../backend-info';
import transactionUtils from '../transaction-utils';
@@ -16,19 +17,23 @@ import logger from '../../logger';
import blocks from '../blocks';
import bitcoinClient from './bitcoin-client';
import difficultyAdjustment from '../difficulty-adjustment';
import transactionRepository from '../../repositories/TransactionRepository';
import rbfCache from '../rbf-cache';
class BitcoinRoutes {
public initRoutes(app: Application) {
app
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/replaces', this.getRbfHistory)
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/cached', this.getCachedTx)
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
try {
@@ -87,7 +92,11 @@ class BitcoinRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions);
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from', this.getBlocksByBulk.bind(this))
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks-bulk/:from/:to', this.getBlocksByBulk.bind(this))
;
if (config.MEMPOOL.BACKEND !== 'esplora') {
@@ -185,29 +194,43 @@ class BitcoinRoutes {
}
}
private getCpfpInfo(req: Request, res: Response) {
private async $getCpfpInfo(req: Request, res: Response) {
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
res.status(501).send(`Invalid transaction ID.`);
return;
}
const tx = mempool.getMempool()[req.params.txId];
if (!tx) {
res.status(404).send(`Transaction doesn't exist in the mempool.`);
if (tx) {
if (tx?.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
descendants: tx.descendants || null,
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
return;
} else {
let cpfpInfo;
if (config.DATABASE.ENABLED) {
cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
}
if (cpfpInfo) {
res.json(cpfpInfo);
return;
} else {
res.json({
ancestors: []
});
return;
}
}
if (tx.cpfpChecked) {
res.json({
ancestors: tx.ancestors,
bestDescendant: tx.bestDescendant || null,
});
return;
}
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
res.json(cpfpInfo);
}
private getBackendInfo(req: Request, res: Response) {
@@ -241,6 +264,74 @@ class BitcoinRoutes {
}
}
/**
* Takes the PSBT as text/plain body, parses it, and adds the full
* parent transaction to each input that doesn't already have it.
* This is used for BTCPayServer / Trezor users which need access to
* the full parent transaction even with segwit inputs.
* It will respond with a text/plain PSBT in the same format (hex|base64).
*/
private async postPsbtCompletion(req: Request, res: Response): Promise<void> {
res.setHeader('content-type', 'text/plain');
const notFoundError = `Couldn't get transaction hex for parent of input`;
try {
let psbt: bitcoinjs.Psbt;
let format: 'hex' | 'base64';
let isModified = false;
try {
psbt = bitcoinjs.Psbt.fromBase64(req.body);
format = 'base64';
} catch (e1) {
try {
psbt = bitcoinjs.Psbt.fromHex(req.body);
format = 'hex';
} catch (e2) {
throw new Error(`Unable to parse PSBT`);
}
}
for (const [index, input] of psbt.data.inputs.entries()) {
if (!input.nonWitnessUtxo) {
// Buffer.from ensures it won't be modified in place by reverse()
const txid = Buffer.from(psbt.txInputs[index].hash)
.reverse()
.toString('hex');
let transactionHex: string;
// If missing transaction, return 404 status error
try {
transactionHex = await bitcoinApi.$getTransactionHex(txid);
if (!transactionHex) {
throw new Error('');
}
} catch (err) {
throw new Error(`${notFoundError} #${index} @ ${txid}`);
}
psbt.updateInput(index, {
nonWitnessUtxo: Buffer.from(transactionHex, 'hex'),
});
if (!isModified) {
isModified = true;
}
}
}
if (isModified) {
res.send(format === 'hex' ? psbt.toHex() : psbt.toBase64());
} else {
// Not modified
// 422 Unprocessable Entity
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
res.status(422).send(`Psbt had no missing nonWitnessUtxos.`);
}
} catch (e: any) {
if (e instanceof Error && new RegExp(notFoundError).test(e.message)) {
res.status(404).send(e.message);
} else {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
private async getTransactionStatus(req: Request, res: Response) {
try {
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
@@ -254,6 +345,16 @@ class BitcoinRoutes {
}
}
private async getStrippedBlockTransactions(req: Request, res: Response) {
try {
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getBlock(req: Request, res: Response) {
try {
const block = await blocks.$getBlock(req.params.hash);
@@ -286,9 +387,9 @@ class BitcoinRoutes {
}
}
private async getStrippedBlockTransactions(req: Request, res: Response) {
private async getBlockAuditSummary(req: Request, res: Response) {
try {
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
res.json(transactions);
} catch (e) {
@@ -310,10 +411,46 @@ class BitcoinRoutes {
}
}
private async getBlocksByBulk(req: Request, res: Response) {
try {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) { // Liquid, Bisq - Not implemented
return res.status(404).send(`This API is only available for Bitcoin networks`);
}
if (config.MEMPOOL.MAX_BLOCKS_BULK_QUERY <= 0) {
return res.status(404).send(`This API is disabled. Set config.MEMPOOL.MAX_BLOCKS_BULK_QUERY to a positive number to enable it.`);
}
if (!Common.indexingEnabled()) {
return res.status(404).send(`Indexing is required for this API`);
}
const from = parseInt(req.params.from, 10);
if (!req.params.from || from < 0) {
return res.status(400).send(`Parameter 'from' must be a block height (integer)`);
}
const to = req.params.to === undefined ? await bitcoinApi.$getBlockHeightTip() : parseInt(req.params.to, 10);
if (to < 0) {
return res.status(400).send(`Parameter 'to' must be a block height (integer)`);
}
if (from > to) {
return res.status(400).send(`Parameter 'to' must be a higher block height than 'from'`);
}
if ((to - from + 1) > config.MEMPOOL.MAX_BLOCKS_BULK_QUERY) {
return res.status(400).send(`You can only query ${config.MEMPOOL.MAX_BLOCKS_BULK_QUERY} blocks at once.`);
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await blocks.$getBlocksBetweenHeight(from, to));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getLegacyBlocks(req: Request, res: Response) {
try {
const returnBlocks: IEsploraApi.Block[] = [];
const fromHeight = parseInt(req.params.height, 10) || blocks.getCurrentBlockHeight();
const tip = blocks.getCurrentBlockHeight();
const fromHeight = Math.min(parseInt(req.params.height, 10) || tip, tip);
// Check if block height exist in local cache to skip the hash lookup
const blockByHeight = blocks.getBlocks().find((b) => b.height === fromHeight);
@@ -331,7 +468,7 @@ class BitcoinRoutes {
returnBlocks.push(localBlock);
nextHash = localBlock.previousblockhash;
} else {
const block = await bitcoinApi.$getBlock(nextHash);
const block = await bitcoinCoreApi.$getBlock(nextHash);
returnBlocks.push(block);
nextHash = block.previousblockhash;
}
@@ -499,6 +636,28 @@ class BitcoinRoutes {
}
}
private async getRbfHistory(req: Request, res: Response) {
try {
const result = rbfCache.getReplaces(req.params.txId);
res.json(result || []);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getCachedTx(req: Request, res: Response) {
try {
const result = rbfCache.getTx(req.params.txId);
if (result) {
res.json(result);
} else {
res.status(204).send();
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async getTransactionOutspends(req: Request, res: Response) {
try {
const result = await bitcoinApi.$getOutspends(req.params.txId);

View File

@@ -88,6 +88,7 @@ export namespace IEsploraApi {
size: number;
weight: number;
previousblockhash: string;
mediantime: number;
}
export interface Address {

View File

@@ -1,8 +1,13 @@
import config from '../../config';
import axios, { AxiosRequestConfig } from 'axios';
import http from 'http';
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
import { IEsploraApi } from './esplora-api.interface';
const axiosConnection = axios.create({
httpAgent: new http.Agent({ keepAlive: true })
});
class ElectrsApi implements AbstractBitcoinApi {
axiosConfig: AxiosRequestConfig = {
timeout: 10000,
@@ -11,48 +16,53 @@ class ElectrsApi implements AbstractBitcoinApi {
constructor() { }
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]> {
return axios.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
return axiosConnection.get<IEsploraApi.Transaction['txid'][]>(config.ESPLORA.REST_API_URL + '/mempool/txids', this.axiosConfig)
.then((response) => response.data);
}
$getRawTransaction(txId: string): Promise<IEsploraApi.Transaction> {
return axios.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
return axiosConnection.get<IEsploraApi.Transaction>(config.ESPLORA.REST_API_URL + '/tx/' + txId, this.axiosConfig)
.then((response) => response.data);
}
$getTransactionHex(txId: string): Promise<string> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeightTip(): Promise<number> {
return axios.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
return axiosConnection.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHashTip(): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
.then((response) => response.data);
}
$getTxIdsForBlock(hash: string): Promise<string[]> {
return axios.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
return axiosConnection.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
.then((response) => response.data);
}
$getBlockHash(height: number): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block-height/' + height, this.axiosConfig)
.then((response) => response.data);
}
$getBlockHeader(hash: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/header', this.axiosConfig)
.then((response) => response.data);
}
$getBlock(hash: string): Promise<IEsploraApi.Block> {
return axios.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
return axiosConnection.get<IEsploraApi.Block>(config.ESPLORA.REST_API_URL + '/block/' + hash, this.axiosConfig)
.then((response) => response.data);
}
$getRawBlock(hash: string): Promise<string> {
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", this.axiosConfig)
.then((response) => response.data);
$getRawBlock(hash: string): Promise<Buffer> {
return axiosConnection.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
.then((response) => { return Buffer.from(response.data); });
}
$getAddress(address: string): Promise<IEsploraApi.Address> {
@@ -72,12 +82,12 @@ class ElectrsApi implements AbstractBitcoinApi {
}
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
return axios.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
return axiosConnection.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
.then((response) => response.data);
}
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
return axios.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
return axiosConnection.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
.then((response) => response.data);
}

View File

@@ -1,8 +1,8 @@
import config from '../config';
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
import logger from '../logger';
import memPool from './mempool';
import { BlockExtended, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
import { Common } from './common';
import diskCache from './disk-cache';
import transactionUtils from './transaction-utils';
@@ -13,17 +13,18 @@ import poolsRepository from '../repositories/PoolsRepository';
import blocksRepository from '../repositories/BlocksRepository';
import loadingIndicators from './loading-indicators';
import BitcoinApi from './bitcoin/bitcoin-api';
import { prepareBlock } from '../utils/blocks-utils';
import BlocksRepository from '../repositories/BlocksRepository';
import HashratesRepository from '../repositories/HashratesRepository';
import indexer from '../indexer';
import fiatConversion from './fiat-conversion';
import poolsParser from './pools-parser';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import cpfpRepository from '../repositories/CpfpRepository';
import mining from './mining/mining';
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
import PricesRepository from '../repositories/PricesRepository';
import priceUpdater from '../tasks/price-updater';
import chainTips from './chain-tips';
class Blocks {
private blocks: BlockExtended[] = [];
@@ -33,6 +34,7 @@ class Blocks {
private lastDifficultyAdjustmentTime = 0;
private previousDifficultyRetarget = 0;
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
constructor() { }
@@ -56,6 +58,10 @@ class Blocks {
this.newBlockCallbacks.push(fn);
}
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
this.newAsyncBlockCallbacks.push(fn);
}
/**
* Return the list of transaction for a block
* @param blockHash
@@ -92,12 +98,23 @@ class Blocks {
transactions.push(tx);
transactionsFetched++;
} catch (e) {
if (i === 0) {
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
} else {
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
try {
if (config.MEMPOOL.BACKEND === 'esplora') {
// Try again with core
const tx = await transactionUtils.$getTransactionExtended(txIds[i], false, false, true);
transactions.push(tx);
transactionsFetched++;
} else {
throw e;
}
} catch (e) {
if (i === 0) {
const msg = `Cannot fetch coinbase tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e);
logger.err(msg);
throw new Error(msg);
} else {
logger.err(`Cannot fetch tx ${txIds[i]}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
}
@@ -125,11 +142,14 @@ class Blocks {
* @param block
* @returns BlockSummary
*/
private summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
const stripped = block.tx.map((tx) => {
public summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
if (Common.isLiquid()) {
block = this.convertLiquidFees(block);
}
const stripped = block.tx.map((tx: IBitcoinApi.VerboseTransaction) => {
return {
txid: tx.txid,
vsize: tx.vsize,
vsize: tx.weight / 4,
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
};
@@ -141,6 +161,13 @@ class Blocks {
};
}
private convertLiquidFees(block: IBitcoinApi.VerboseBlock): IBitcoinApi.VerboseBlock {
block.tx.forEach(tx => {
tx.fee = Object.values(tx.fee || {}).reduce((total, output) => total + output, 0);
});
return block;
}
/**
* Return a block with additional data (reward, coinbase, fees...)
* @param block
@@ -148,33 +175,81 @@ class Blocks {
* @returns BlockExtended
*/
private async $getBlockExtended(block: IEsploraApi.Block, transactions: TransactionExtended[]): Promise<BlockExtended> {
const blockExtended: BlockExtended = Object.assign({ extras: {} }, block);
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
blockExtended.extras.usd = fiatConversion.getConversionRates().USD;
const coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
const blk: Partial<BlockExtended> = Object.assign({}, block);
const extras: Partial<BlockExtension> = {};
extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
extras.coinbaseRaw = coinbaseTx.vin[0].scriptsig;
extras.orphans = chainTips.getOrphanedBlocksAtHeight(blk.height);
if (block.height === 0) {
blockExtended.extras.medianFee = 0; // 50th percentiles
blockExtended.extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
blockExtended.extras.totalFees = 0;
blockExtended.extras.avgFee = 0;
blockExtended.extras.avgFeeRate = 0;
extras.medianFee = 0; // 50th percentiles
extras.feeRange = [0, 0, 0, 0, 0, 0, 0];
extras.totalFees = 0;
extras.avgFee = 0;
extras.avgFeeRate = 0;
extras.utxoSetChange = 0;
extras.avgTxSize = 0;
extras.totalInputs = 0;
extras.totalOutputs = 1;
extras.totalOutputAmt = 0;
extras.segwitTotalTxs = 0;
extras.segwitTotalSize = 0;
extras.segwitTotalWeight = 0;
} else {
const stats = await bitcoinClient.getBlockStats(block.id, [
'feerate_percentiles', 'minfeerate', 'maxfeerate', 'totalfee', 'avgfee', 'avgfeerate'
]);
blockExtended.extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
blockExtended.extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
blockExtended.extras.totalFees = stats.totalfee;
blockExtended.extras.avgFee = stats.avgfee;
blockExtended.extras.avgFeeRate = stats.avgfeerate;
const stats: IBitcoinApi.BlockStats = await bitcoinClient.getBlockStats(block.id);
extras.medianFee = stats.feerate_percentiles[2]; // 50th percentiles
extras.feeRange = [stats.minfeerate, stats.feerate_percentiles, stats.maxfeerate].flat();
extras.totalFees = stats.totalfee;
extras.avgFee = stats.avgfee;
extras.avgFeeRate = stats.avgfeerate;
extras.utxoSetChange = stats.utxo_increase;
extras.avgTxSize = Math.round(stats.total_size / stats.txs * 100) * 0.01;
extras.totalInputs = stats.ins;
extras.totalOutputs = stats.outs;
extras.totalOutputAmt = stats.total_out;
extras.segwitTotalTxs = stats.swtxs;
extras.segwitTotalSize = stats.swtotal_size;
extras.segwitTotalWeight = stats.swtotal_weight;
}
if (Common.blocksSummariesIndexingEnabled()) {
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(block.id);
if (extras.feePercentiles !== null) {
extras.medianFeeAmt = extras.feePercentiles[3];
}
}
extras.virtualSize = block.weight / 4.0;
if (coinbaseTx?.vout.length > 0) {
extras.coinbaseAddress = coinbaseTx.vout[0].scriptpubkey_address ?? null;
extras.coinbaseSignature = coinbaseTx.vout[0].scriptpubkey_asm ?? null;
extras.coinbaseSignatureAscii = transactionUtils.hex2ascii(coinbaseTx.vin[0].scriptsig) ?? null;
} else {
extras.coinbaseAddress = null;
extras.coinbaseSignature = null;
extras.coinbaseSignatureAscii = null;
}
const header = await bitcoinClient.getBlockHeader(block.id, false);
extras.header = header;
const coinStatsIndex = indexer.isCoreIndexReady('coinstatsindex');
if (coinStatsIndex !== null && coinStatsIndex.best_block_height >= block.height) {
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
extras.utxoSetSize = txoutset.txouts,
extras.totalInputAmt = Math.round(txoutset.block_info.prevout_spent * 100000000);
} else {
extras.utxoSetSize = null;
extras.totalInputAmt = null;
}
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
let pool: PoolTag;
if (blockExtended.extras?.coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx);
if (coinbaseTx !== undefined) {
pool = await this.$findBlockMiner(coinbaseTx);
} else {
if (config.DATABASE.ENABLED === true) {
pool = await poolsRepository.$getUnknownPool();
@@ -184,19 +259,27 @@ class Blocks {
}
if (!pool) { // We should never have this situation in practise
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
logger.warn(`Cannot assign pool to block ${blk.height} and 'unknown' pool does not exist. ` +
`Check your "pools" table entries`);
return blockExtended;
} else {
extras.pool = {
id: pool.uniqueId,
name: pool.name,
slug: pool.slug,
};
}
blockExtended.extras.pool = {
id: pool.id,
name: pool.name,
slug: pool.slug,
};
extras.matchRate = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
}
}
}
return blockExtended;
blk.extras = <BlockExtension>extras;
return <BlockExtended>blk;
}
/**
@@ -222,15 +305,18 @@ class Blocks {
} else {
pools = poolsParser.miningPools;
}
for (let i = 0; i < pools.length; ++i) {
if (address !== undefined) {
const addresses: string[] = JSON.parse(pools[i].addresses);
const addresses: string[] = typeof pools[i].addresses === 'string' ?
JSON.parse(pools[i].addresses) : pools[i].addresses;
if (addresses.indexOf(address) !== -1) {
return pools[i];
}
}
const regexes: string[] = JSON.parse(pools[i].regexes);
const regexes: string[] = typeof pools[i].regexes === 'string' ?
JSON.parse(pools[i].regexes) : pools[i].regexes;
for (let y = 0; y < regexes.length; ++y) {
const regex = new RegExp(regexes[y], 'i');
const match = asciiScriptSig.match(regex);
@@ -250,7 +336,7 @@ class Blocks {
/**
* [INDEXING] Index all blocks summaries for the block txs visualization
*/
public async $generateBlocksSummariesDatabase() {
public async $generateBlocksSummariesDatabase(): Promise<void> {
if (Common.blocksSummariesIndexingEnabled() === false) {
return;
}
@@ -283,7 +369,7 @@ class Blocks {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
}
@@ -296,12 +382,60 @@ class Blocks {
newlyIndexed++;
}
if (newlyIndexed > 0) {
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
}
} catch (e) {
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
}
}
/**
* [INDEXING] Index transaction CPFP data for all blocks
*/
public async $generateCPFPDatabase(): Promise<void> {
if (Common.cpfpIndexingEnabled() === false) {
return;
}
try {
// Get all indexed block hash
const unindexedBlockHeights = await blocksRepository.$getCPFPUnindexedBlocks();
logger.info(`Indexing cpfp data for ${unindexedBlockHeights.length} blocks`);
if (!unindexedBlockHeights?.length) {
return;
}
// Logging
let count = 0;
let countThisRun = 0;
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
for (const height of unindexedBlockHeights) {
// Logging
const hash = await bitcoinApi.$getBlockHash(height);
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = (countThisRun / elapsedSeconds);
const progress = Math.round(count / unindexedBlockHeights.length * 10000) / 100;
logger.debug(`Indexing cpfp clusters for #${height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlockHeights.length} (${progress}%) | elapsed: ${runningFor} seconds`);
timer = new Date().getTime() / 1000;
countThisRun = 0;
}
await this.$indexCPFP(hash, height); // Calculate and save CPFP data for transactions in this block
// Logging
count++;
countThisRun++;
}
logger.notice(`CPFP indexing completed: indexed ${count} blocks`);
} catch (e) {
logger.err(`CPFP indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
@@ -321,7 +455,7 @@ class Blocks {
const lastBlockToIndex = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`);
logger.debug(`Indexing blocks from #${currentBlockHeight} to #${lastBlockToIndex}`, logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 0);
const chunkSize = 10000;
@@ -341,7 +475,7 @@ class Blocks {
continue;
}
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`);
logger.info(`Indexing ${missingBlockHeights.length} blocks from #${currentBlockHeight} to #${endBlock}`, logger.tags.mining);
for (const blockHeight of missingBlockHeights) {
if (blockHeight < lastBlockToIndex) {
@@ -349,18 +483,18 @@ class Blocks {
}
++indexedThisRun;
++totalIndexed;
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('block-indexing', progress, false);
}
const blockHash = await bitcoinApi.$getBlockHash(blockHeight);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
@@ -371,13 +505,13 @@ class Blocks {
currentBlockHeight -= chunkSize;
}
if (newlyIndexed > 0) {
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
} else {
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`, logger.tags.mining);
}
loadingIndicators.setProgress('block-indexing', 100);
} catch (e) {
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
loadingIndicators.setProgress('block-indexing', 100);
throw e;
}
@@ -408,13 +542,13 @@ class Blocks {
if (blockchainInfo.blocks === blockchainInfo.headers) {
const heightDiff = blockHeightTip % 2016;
const blockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
this.lastDifficultyAdjustmentTime = block.timestamp;
this.currentDifficulty = block.difficulty;
if (blockHeightTip >= 2016) {
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
const previousPeriodBlock = await bitcoinClient.getBlock(previousPeriodBlockHash)
const previousPeriodBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(previousPeriodBlockHash);
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
logger.debug(`Initial difficulty adjustment data set.`);
}
@@ -429,6 +563,7 @@ class Blocks {
} else {
this.currentBlockHeight++;
logger.debug(`New block found (#${this.currentBlockHeight})!`);
await chainTips.updateOrphanedBlocks();
}
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
@@ -439,18 +574,25 @@ class Blocks {
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
// start async callbacks
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
if (Common.indexingEnabled()) {
if (!fastForwarded) {
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock['hash']) {
logger.warn(`Chain divergence detected at block ${lastBlock['height']}, re-indexing most recent data`);
if (lastBlock !== null && blockExtended.previousblockhash !== lastBlock.id) {
logger.warn(`Chain divergence detected at block ${lastBlock.height}, re-indexing most recent data`);
// We assume there won't be a reorg with more than 10 block depth
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await BlocksRepository.$deleteBlocksFrom(lastBlock.height - 10);
await HashratesRepository.$deleteLastEntries();
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock.height - 10);
await cpfpRepository.$deleteClustersFrom(lastBlock.height - 10);
for (let i = 10; i >= 0; --i) {
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
const newBlock = await this.$indexBlock(lastBlock.height - i);
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
if (config.MEMPOOL.CPFP_INDEXING) {
await this.$indexCPFP(newBlock.id, lastBlock.height - i);
}
}
await mining.$indexDifficultyAdjustments();
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
@@ -466,7 +608,7 @@ class Blocks {
priceId: lastestPriceId,
}]);
} else {
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`, logger.tags.mining);
setTimeout(() => {
indexer.runSingleTask('blocksPrices');
}, 10000);
@@ -476,6 +618,9 @@ class Blocks {
if (Common.blocksSummariesIndexingEnabled() === true) {
await this.$getStrippedBlockTransactions(blockExtended.id, true);
}
if (config.MEMPOOL.CPFP_INDEXING) {
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
}
}
}
@@ -506,9 +651,12 @@ class Blocks {
if (this.newBlockCallbacks.length) {
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
}
if (!memPool.hasPriority()) {
if (!memPool.hasPriority() && (block.height % config.MEMPOOL.DISK_CACHE_BLOCK_INTERVAL === 0)) {
diskCache.$saveCacheToDisk();
}
// wait for pending async callbacks to finish
await Promise.all(callbackPromises);
}
}
@@ -516,23 +664,27 @@ class Blocks {
* Index a block if it's missing from the database. Returns the block after indexing
*/
public async $indexBlock(height: number): Promise<BlockExtended> {
const dbBlock = await blocksRepository.$getBlockByHeight(height);
if (dbBlock != null) {
return prepareBlock(dbBlock);
if (Common.indexingEnabled()) {
const dbBlock = await blocksRepository.$getBlockByHeight(height);
if (dbBlock !== null) {
return dbBlock;
}
}
const blockHash = await bitcoinApi.$getBlockHash(height);
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(blockHash);
const transactions = await this.$getTransactionsExtended(blockHash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
await blocksRepository.$saveBlockInDatabase(blockExtended);
if (Common.indexingEnabled()) {
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
return prepareBlock(blockExtended);
return blockExtended;
}
/**
* Index a block by hash if it's missing from the database. Returns the block after indexing
* Get one block by its hash
*/
public async $getBlock(hash: string): Promise<BlockExtended | IEsploraApi.Block> {
// Check the memory cache
@@ -541,31 +693,14 @@ class Blocks {
return blockByHash;
}
// Block has already been indexed
if (Common.indexingEnabled()) {
const dbBlock = await blocksRepository.$getBlockByHash(hash);
if (dbBlock != null) {
return prepareBlock(dbBlock);
}
}
// Not Bitcoin network, return the block as it
// Not Bitcoin network, return the block as it from the bitcoin backend
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return await bitcoinApi.$getBlock(hash);
return await bitcoinCoreApi.$getBlock(hash);
}
let block = await bitcoinClient.getBlock(hash);
block = prepareBlock(block);
// Bitcoin network, add our custom data on top
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
const blockExtended = await this.$getBlockExtended(block, transactions);
if (Common.indexingEnabled()) {
delete(blockExtended['coinbaseTx']);
await blocksRepository.$saveBlockInDatabase(blockExtended);
}
return blockExtended;
const block: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(hash);
return await this.$indexBlock(block.height);
}
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
@@ -574,7 +709,7 @@ class Blocks {
if (skipMemoryCache === false) {
// Check the memory cache
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
if (cachedSummary) {
if (cachedSummary?.transactions?.length) {
return cachedSummary.transactions;
}
}
@@ -582,7 +717,7 @@ class Blocks {
// Check if it's indexed in db
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
if (indexedSummary !== undefined) {
if (indexedSummary !== undefined && indexedSummary?.transactions?.length) {
return indexedSummary.transactions;
}
}
@@ -599,35 +734,39 @@ class Blocks {
return summary.transactions;
}
/**
* Get 15 blocks
*
* Internally this function uses two methods to get the blocks, and
* the method is automatically selected:
* - Using previous block hash links
* - Using block height
*
* @param fromHeight
* @param limit
* @returns
*/
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
let currentHeight = fromHeight !== undefined ? fromHeight : this.currentBlockHeight;
if (currentHeight > this.currentBlockHeight) {
limit -= currentHeight - this.currentBlockHeight;
currentHeight = this.currentBlockHeight;
}
const returnBlocks: BlockExtended[] = [];
if (currentHeight < 0) {
return returnBlocks;
}
// Check if block height exist in local cache to skip the hash lookup
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
let startFromHash: string | null = null;
if (blockByHeight) {
startFromHash = blockByHeight.id;
} else if (!Common.indexingEnabled()) {
startFromHash = await bitcoinApi.$getBlockHash(currentHeight);
}
let nextHash = startFromHash;
for (let i = 0; i < limit && currentHeight >= 0; i++) {
let block = this.getBlocks().find((b) => b.height === currentHeight);
if (block) {
// Using the memory cache (find by height)
returnBlocks.push(block);
} else if (Common.indexingEnabled()) {
} else {
// Using indexing (find by height, index on the fly, save in database)
block = await this.$indexBlock(currentHeight);
returnBlocks.push(block);
} else if (nextHash != null) {
block = prepareBlock(await bitcoinClient.getBlock(nextHash));
nextHash = block.previousblockhash;
returnBlocks.push(block);
}
currentHeight--;
}
@@ -635,6 +774,130 @@ class Blocks {
return returnBlocks;
}
/**
* Used for bulk block data query
*
* @param fromHeight
* @param toHeight
*/
public async $getBlocksBetweenHeight(fromHeight: number, toHeight: number): Promise<any> {
if (!Common.indexingEnabled()) {
return [];
}
const blocks: any[] = [];
while (fromHeight <= toHeight) {
let block: BlockExtended | null = await blocksRepository.$getBlockByHeight(fromHeight);
if (!block) {
await this.$indexBlock(fromHeight);
block = await blocksRepository.$getBlockByHeight(fromHeight);
if (!block) {
continue;
}
}
// Cleanup fields before sending the response
const cleanBlock: any = {
height: block.height ?? null,
hash: block.id ?? null,
timestamp: block.timestamp ?? null,
median_timestamp: block.mediantime ?? null,
previous_block_hash: block.previousblockhash ?? null,
difficulty: block.difficulty ?? null,
header: block.extras.header ?? null,
version: block.version ?? null,
bits: block.bits ?? null,
nonce: block.nonce ?? null,
size: block.size ?? null,
weight: block.weight ?? null,
tx_count: block.tx_count ?? null,
merkle_root: block.merkle_root ?? null,
reward: block.extras.reward ?? null,
total_fee_amt: block.extras.totalFees ?? null,
avg_fee_amt: block.extras.avgFee ?? null,
median_fee_amt: block.extras.medianFeeAmt ?? null,
fee_amt_percentiles: block.extras.feePercentiles ?? null,
avg_fee_rate: block.extras.avgFeeRate ?? null,
median_fee_rate: block.extras.medianFee ?? null,
fee_rate_percentiles: block.extras.feeRange ?? null,
total_inputs: block.extras.totalInputs ?? null,
total_input_amt: block.extras.totalInputAmt ?? null,
total_outputs: block.extras.totalOutputs ?? null,
total_output_amt: block.extras.totalOutputAmt ?? null,
segwit_total_txs: block.extras.segwitTotalTxs ?? null,
segwit_total_size: block.extras.segwitTotalSize ?? null,
segwit_total_weight: block.extras.segwitTotalWeight ?? null,
avg_tx_size: block.extras.avgTxSize ?? null,
utxoset_change: block.extras.utxoSetChange ?? null,
utxoset_size: block.extras.utxoSetSize ?? null,
coinbase_raw: block.extras.coinbaseRaw ?? null,
coinbase_address: block.extras.coinbaseAddress ?? null,
coinbase_signature: block.extras.coinbaseSignature ?? null,
coinbase_signature_ascii: block.extras.coinbaseSignatureAscii ?? null,
pool_slug: block.extras.pool.slug ?? null,
pool_id: block.extras.pool.id ?? null,
};
if (Common.blocksSummariesIndexingEnabled() && cleanBlock.fee_amt_percentiles === null) {
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
if (cleanBlock.fee_amt_percentiles === null) {
const block = await bitcoinClient.getBlock(cleanBlock.hash, 2);
const summary = this.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
cleanBlock.fee_amt_percentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(cleanBlock.hash);
}
if (cleanBlock.fee_amt_percentiles !== null) {
cleanBlock.median_fee_amt = cleanBlock.fee_amt_percentiles[3];
}
}
cleanBlock.fee_amt_percentiles = {
'min': cleanBlock.fee_amt_percentiles[0],
'perc_10': cleanBlock.fee_amt_percentiles[1],
'perc_25': cleanBlock.fee_amt_percentiles[2],
'perc_50': cleanBlock.fee_amt_percentiles[3],
'perc_75': cleanBlock.fee_amt_percentiles[4],
'perc_90': cleanBlock.fee_amt_percentiles[5],
'max': cleanBlock.fee_amt_percentiles[6],
};
cleanBlock.fee_rate_percentiles = {
'min': cleanBlock.fee_rate_percentiles[0],
'perc_10': cleanBlock.fee_rate_percentiles[1],
'perc_25': cleanBlock.fee_rate_percentiles[2],
'perc_50': cleanBlock.fee_rate_percentiles[3],
'perc_75': cleanBlock.fee_rate_percentiles[4],
'perc_90': cleanBlock.fee_rate_percentiles[5],
'max': cleanBlock.fee_rate_percentiles[6],
};
// Re-org can happen after indexing so we need to always get the
// latest state from core
cleanBlock.orphans = chainTips.getOrphanedBlocksAtHeight(cleanBlock.height);
blocks.push(cleanBlock);
fromHeight++;
}
return blocks;
}
public async $getBlockAuditSummary(hash: string): Promise<any> {
let summary;
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
}
// fallback to non-audited transaction summary
if (!summary?.transactions?.length) {
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
summary = {
transactions: strippedTransactions
};
}
return summary;
}
public getLastDifficultyAdjustmentTime(): number {
return this.lastDifficultyAdjustmentTime;
}
@@ -646,6 +909,50 @@ class Blocks {
public getCurrentBlockHeight(): number {
return this.currentBlockHeight;
}
public async $indexCPFP(hash: string, height: number): Promise<void> {
const block = await bitcoinClient.getBlock(hash, 2);
const transactions = block.tx.map(tx => {
tx.vsize = tx.weight / 4;
tx.fee *= 100_000_000;
return tx;
});
const clusters: any[] = [];
let cluster: TransactionStripped[] = [];
let ancestors: { [txid: string]: boolean } = {};
for (let i = transactions.length - 1; i >= 0; i--) {
const tx = transactions[i];
if (!ancestors[tx.txid]) {
let totalFee = 0;
let totalVSize = 0;
cluster.forEach(tx => {
totalFee += tx?.fee || 0;
totalVSize += tx.vsize;
});
const effectiveFeePerVsize = totalFee / totalVSize;
if (cluster.length > 1) {
clusters.push({
root: cluster[0].txid,
height,
txs: cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: tx.fee || 0 }; }),
effectiveFeePerVsize,
});
}
cluster = [];
ancestors = {};
}
cluster.push(tx);
tx.vin.forEach(vin => {
ancestors[vin.txid] = true;
});
}
const result = await cpfpRepository.$batchSaveClusters(clusters);
if (!result) {
await cpfpRepository.$insertProgressMarker(height);
}
}
}
export default new Blocks();

View File

@@ -0,0 +1,61 @@
import logger from '../logger';
import bitcoinClient from './bitcoin/bitcoin-client';
export interface ChainTip {
height: number;
hash: string;
branchlen: number;
status: 'invalid' | 'active' | 'valid-fork' | 'valid-headers' | 'headers-only';
};
export interface OrphanedBlock {
height: number;
hash: string;
status: 'valid-fork' | 'valid-headers' | 'headers-only';
}
class ChainTips {
private chainTips: ChainTip[] = [];
private orphanedBlocks: OrphanedBlock[] = [];
public async updateOrphanedBlocks(): Promise<void> {
try {
this.chainTips = await bitcoinClient.getChainTips();
this.orphanedBlocks = [];
for (const chain of this.chainTips) {
if (chain.status === 'valid-fork' || chain.status === 'valid-headers') {
let block = await bitcoinClient.getBlock(chain.hash);
while (block && block.confirmations === -1) {
this.orphanedBlocks.push({
height: block.height,
hash: block.hash,
status: chain.status
});
block = await bitcoinClient.getBlock(block.previousblockhash);
}
}
}
logger.debug(`Updated orphaned blocks cache. Found ${this.orphanedBlocks.length} orphaned blocks`);
} catch (e) {
logger.err(`Cannot get fetch orphaned blocks. Reason: ${e instanceof Error ? e.message : e}`);
}
}
public getOrphanedBlocksAtHeight(height: number | undefined): OrphanedBlock[] {
if (height === undefined) {
return [];
}
const orphans: OrphanedBlock[] = [];
for (const block of this.orphanedBlocks) {
if (block.height === height) {
orphans.push(block);
}
}
return orphans;
}
}
export default new ChainTips();

View File

@@ -1,4 +1,4 @@
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import { CpfpInfo, MempoolBlockWithTransactions, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
import config from '../config';
import { NodeSocket } from '../repositories/NodesSocketsRepository';
import { isIP } from 'net';
@@ -35,24 +35,31 @@ export class Common {
}
static getFeesInRange(transactions: TransactionExtended[], rangeLength: number) {
const arr = [transactions[transactions.length - 1].effectiveFeePerVsize];
const filtered: TransactionExtended[] = [];
let lastValidRate = Infinity;
// filter out anomalous fee rates to ensure monotonic range
for (const tx of transactions) {
if (tx.effectiveFeePerVsize <= lastValidRate) {
filtered.push(tx);
lastValidRate = tx.effectiveFeePerVsize;
}
}
const arr = [filtered[filtered.length - 1].effectiveFeePerVsize];
const chunk = 1 / (rangeLength - 1);
let itemsToAdd = rangeLength - 2;
while (itemsToAdd > 0) {
arr.push(transactions[Math.floor(transactions.length * chunk * itemsToAdd)].effectiveFeePerVsize);
arr.push(filtered[Math.floor(filtered.length * chunk * itemsToAdd)].effectiveFeePerVsize);
itemsToAdd--;
}
arr.push(transactions[0].effectiveFeePerVsize);
arr.push(filtered[0].effectiveFeePerVsize);
return arr;
}
static findRbfTransactions(added: TransactionExtended[], deleted: TransactionExtended[]): { [txid: string]: TransactionExtended } {
const matches: { [txid: string]: TransactionExtended } = {};
deleted
// The replaced tx must have at least one input with nSequence < maxint-1 (Thats the opt-in)
.filter((tx) => tx.vin.some((vin) => vin.sequence < 0xfffffffe))
.forEach((deletedTx) => {
const foundMatches = added.find((addedTx) => {
// The new tx must, absolutely speaking, pay at least as much fee as the replaced tx.
@@ -61,7 +68,7 @@ export class Common {
&& addedTx.feePerVsize > deletedTx.feePerVsize
// Spends one or more of the same inputs
&& deletedTx.vin.some((deletedVin) =>
addedTx.vin.some((vin) => vin.txid === deletedVin.txid));
addedTx.vin.some((vin) => vin.txid === deletedVin.txid && vin.vout === deletedVin.vout));
});
if (foundMatches) {
matches[deletedTx.txid] = foundMatches;
@@ -157,6 +164,30 @@ export class Common {
return parents;
}
// calculates the ratio of matched transactions to projected transactions by weight
static getSimilarity(projectedBlock: MempoolBlockWithTransactions, transactions: TransactionExtended[]): number {
let matchedWeight = 0;
let projectedWeight = 0;
const inBlock = {};
for (const tx of transactions) {
inBlock[tx.txid] = tx;
}
// look for transactions that were expected in the template, but missing from the mined block
for (const tx of projectedBlock.transactions) {
if (inBlock[tx.txid]) {
matchedWeight += tx.vsize * 4;
}
projectedWeight += tx.vsize * 4;
}
projectedWeight += transactions[0].weight;
matchedWeight += transactions[0].weight;
return projectedWeight ? matchedWeight / projectedWeight : 1;
}
static getSqlInterval(interval: string | null): string | null {
switch (interval) {
case '24h': return '1 DAY';
@@ -168,6 +199,7 @@ export class Common {
case '1y': return '1 YEAR';
case '2y': return '2 YEAR';
case '3y': return '3 YEAR';
case '4y': return '4 YEAR';
default: return null;
}
}
@@ -187,6 +219,13 @@ export class Common {
);
}
static cpfpIndexingEnabled(): boolean {
return (
Common.indexingEnabled() &&
config.MEMPOOL.CPFP_INDEXING === true
);
}
static setDateMidnight(date: Date): void {
date.setUTCHours(0);
date.setUTCMinutes(0);
@@ -223,14 +262,21 @@ export class Common {
].join('x');
}
static utcDateToMysql(date?: number): string {
static utcDateToMysql(date?: number | null): string | null {
if (date === null) {
return null;
}
const d = new Date((date || 0) * 1000);
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
}
static findSocketNetwork(addr: string): {network: string | null, url: string} {
let network: string | null = null;
let url = addr.split('://')[1];
let url: string = addr;
if (config.LIGHTNING.BACKEND === 'cln') {
url = addr.split('://')[1];
}
if (!url) {
return {
@@ -247,7 +293,7 @@ export class Common {
}
} else if (addr.indexOf('i2p') !== -1) {
network = 'i2p';
} else if (addr.indexOf('ipv4') !== -1) {
} else if (addr.indexOf('ipv4') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && isIP(url.split(':')[0]) === 4)) {
const ipv = isIP(url.split(':')[0]);
if (ipv === 4) {
network = 'ipv4';
@@ -257,7 +303,7 @@ export class Common {
url: addr,
};
}
} else if (addr.indexOf('ipv6') !== -1) {
} else if (addr.indexOf('ipv6') !== -1 || (config.LIGHTNING.BACKEND === 'lnd' && url.indexOf(']:'))) {
url = url.split('[')[1].split(']')[0];
const ipv = isIP(url);
if (ipv === 6) {

View File

@@ -2,10 +2,13 @@ import config from '../config';
import DB from '../database';
import logger from '../logger';
import { Common } from './common';
import blocksRepository from '../repositories/BlocksRepository';
import cpfpRepository from '../repositories/CpfpRepository';
import { RowDataPacket } from 'mysql2';
class DatabaseMigration {
private static currentVersion = 40;
private queryTimeout = 120000;
private static currentVersion = 59;
private queryTimeout = 3600_000;
private statisticsAddedIndexed = false;
private uniqueLogs: string[] = [];
@@ -59,8 +62,8 @@ class DatabaseMigration {
if (databaseSchemaVersion <= 2) {
// Disable some spam logs when they're not relevant
this.uniqueLogs.push(this.blocksTruncatedMessage);
this.uniqueLogs.push(this.hashratesTruncatedMessage);
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
}
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
@@ -83,7 +86,7 @@ class DatabaseMigration {
try {
await this.$migrateTableSchemaFromVersion(databaseSchemaVersion);
if (databaseSchemaVersion === 0) {
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
} else {
logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
}
@@ -107,18 +110,22 @@ class DatabaseMigration {
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
await this.updateToSchemaVersion(2);
}
if (databaseSchemaVersion < 3) {
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
await this.updateToSchemaVersion(3);
}
if (databaseSchemaVersion < 4) {
await this.$executeQuery('DROP table IF EXISTS blocks;');
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
await this.updateToSchemaVersion(4);
}
if (databaseSchemaVersion < 5 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(5);
}
if (databaseSchemaVersion < 6 && isBitcoin === true) {
@@ -141,11 +148,13 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
await this.updateToSchemaVersion(6);
}
if (databaseSchemaVersion < 7 && isBitcoin === true) {
await this.$executeQuery('DROP table IF EXISTS hashrates;');
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
await this.updateToSchemaVersion(7);
}
if (databaseSchemaVersion < 8 && isBitcoin === true) {
@@ -155,6 +164,7 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
await this.updateToSchemaVersion(8);
}
if (databaseSchemaVersion < 9 && isBitcoin === true) {
@@ -162,10 +172,12 @@ class DatabaseMigration {
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
await this.updateToSchemaVersion(9);
}
if (databaseSchemaVersion < 10 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
await this.updateToSchemaVersion(10);
}
if (databaseSchemaVersion < 11 && isBitcoin === true) {
@@ -178,11 +190,13 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(11);
}
if (databaseSchemaVersion < 12 && isBitcoin === true) {
// No need to re-index because the new data type can contain larger values
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(12);
}
if (databaseSchemaVersion < 13 && isBitcoin === true) {
@@ -190,6 +204,7 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(13);
}
if (databaseSchemaVersion < 14 && isBitcoin === true) {
@@ -197,37 +212,45 @@ class DatabaseMigration {
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(14);
}
if (databaseSchemaVersion < 16 && isBitcoin === true) {
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
await this.updateToSchemaVersion(16);
}
if (databaseSchemaVersion < 17 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
await this.updateToSchemaVersion(17);
}
if (databaseSchemaVersion < 18 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
await this.updateToSchemaVersion(18);
}
if (databaseSchemaVersion < 19) {
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
await this.updateToSchemaVersion(19);
}
if (databaseSchemaVersion < 20 && isBitcoin === true) {
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
await this.updateToSchemaVersion(20);
}
if (databaseSchemaVersion < 21) {
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
await this.updateToSchemaVersion(21);
}
if (databaseSchemaVersion < 22 && isBitcoin === true) {
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
await this.updateToSchemaVersion(22);
}
if (databaseSchemaVersion < 23) {
@@ -240,11 +263,13 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
await this.updateToSchemaVersion(23);
}
if (databaseSchemaVersion < 24 && isBitcoin == true) {
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
await this.updateToSchemaVersion(24);
}
if (databaseSchemaVersion < 25 && isBitcoin === true) {
@@ -252,6 +277,7 @@ class DatabaseMigration {
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
await this.updateToSchemaVersion(25);
}
if (databaseSchemaVersion < 26 && isBitcoin === true) {
@@ -262,6 +288,7 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(26);
}
if (databaseSchemaVersion < 27 && isBitcoin === true) {
@@ -271,8 +298,9 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(27);
}
if (databaseSchemaVersion < 28 && isBitcoin === true) {
if (config.LIGHTNING.ENABLED) {
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
@@ -280,6 +308,7 @@ class DatabaseMigration {
await this.$executeQuery(`TRUNCATE lightning_stats`);
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
await this.updateToSchemaVersion(28);
}
if (databaseSchemaVersion < 29 && isBitcoin === true) {
@@ -291,41 +320,50 @@ class DatabaseMigration {
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
await this.updateToSchemaVersion(29);
}
if (databaseSchemaVersion < 30 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
await this.updateToSchemaVersion(30);
}
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
await this.updateToSchemaVersion(31);
}
if (databaseSchemaVersion < 32 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
await this.updateToSchemaVersion(32);
}
if (databaseSchemaVersion < 33 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
await this.updateToSchemaVersion(33);
}
if (databaseSchemaVersion < 34 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
await this.updateToSchemaVersion(34);
}
if (databaseSchemaVersion < 35 && isBitcoin == true) {
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
await this.updateToSchemaVersion(35);
}
if (databaseSchemaVersion < 36 && isBitcoin == true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
await this.updateToSchemaVersion(36);
}
if (databaseSchemaVersion < 37 && isBitcoin == true) {
await this.$executeQuery(this.getCreateLNNodesSocketsTableQuery(), await this.$checkIfTableExists('nodes_sockets'));
await this.updateToSchemaVersion(37);
}
if (databaseSchemaVersion < 38 && isBitcoin == true) {
@@ -336,17 +374,146 @@ class DatabaseMigration {
await this.$executeQuery(`TRUNCATE node_stats`);
await this.$executeQuery('ALTER TABLE `lightning_stats` CHANGE `added` `added` timestamp NULL');
await this.$executeQuery('ALTER TABLE `node_stats` CHANGE `added` `added` timestamp NULL');
await this.updateToSchemaVersion(38);
}
if (databaseSchemaVersion < 39 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
await this.updateToSchemaVersion(39);
}
if (databaseSchemaVersion < 40 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
await this.updateToSchemaVersion(40);
}
if (databaseSchemaVersion < 41 && isBitcoin === true) {
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
await this.updateToSchemaVersion(41);
}
if (databaseSchemaVersion < 42 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0');
await this.updateToSchemaVersion(42);
}
if (databaseSchemaVersion < 43 && isBitcoin === true) {
await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records'));
await this.updateToSchemaVersion(43);
}
if (databaseSchemaVersion < 44 && isBitcoin === true) {
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
await this.updateToSchemaVersion(44);
}
if (databaseSchemaVersion < 45 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fresh_txs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(45);
}
if (databaseSchemaVersion < 46) {
await this.$executeQuery(`ALTER TABLE blocks MODIFY blockTimestamp timestamp NOT NULL DEFAULT 0`);
await this.updateToSchemaVersion(46);
}
if (databaseSchemaVersion < 47) {
await this.$executeQuery('ALTER TABLE `blocks` ADD cpfp_indexed tinyint(1) DEFAULT 0');
await this.$executeQuery(this.getCreateCPFPTableQuery(), await this.$checkIfTableExists('cpfp_clusters'));
await this.$executeQuery(this.getCreateTransactionsTableQuery(), await this.$checkIfTableExists('transactions'));
await this.updateToSchemaVersion(47);
}
if (databaseSchemaVersion < 48 && isBitcoin === true) {
await this.$executeQuery('ALTER TABLE `channels` ADD source_checked tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD closing_fee bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_funding_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node1_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD node2_closing_balance bigint(20) unsigned DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD funding_ratio float unsigned DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD closed_by varchar(66) DEFAULT NULL');
await this.$executeQuery('ALTER TABLE `channels` ADD single_funded tinyint(1) DEFAULT 0');
await this.$executeQuery('ALTER TABLE `channels` ADD outputs JSON DEFAULT "[]"');
await this.updateToSchemaVersion(48);
}
if (databaseSchemaVersion < 49 && isBitcoin === true) {
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
await this.updateToSchemaVersion(49);
}
if (databaseSchemaVersion < 50) {
await this.$executeQuery('ALTER TABLE `blocks` DROP COLUMN `cpfp_indexed`');
await this.updateToSchemaVersion(50);
}
if (databaseSchemaVersion < 51) {
await this.$executeQuery('ALTER TABLE `cpfp_clusters` ADD INDEX `height` (`height`)');
await this.updateToSchemaVersion(51);
}
if (databaseSchemaVersion < 52) {
await this.$executeQuery(this.getCreateCompactCPFPTableQuery(), await this.$checkIfTableExists('compact_cpfp_clusters'));
await this.$executeQuery(this.getCreateCompactTransactionsTableQuery(), await this.$checkIfTableExists('compact_transactions'));
try {
await this.$convertCompactCpfpTables();
await this.$executeQuery('DROP TABLE IF EXISTS `transactions`');
await this.$executeQuery('DROP TABLE IF EXISTS `cpfp_clusters`');
await this.updateToSchemaVersion(52);
} catch (e) {
logger.warn('' + (e instanceof Error ? e.message : e));
}
}
if (databaseSchemaVersion < 53) {
await this.$executeQuery('ALTER TABLE statistics MODIFY mempool_byte_weight bigint(20) UNSIGNED NOT NULL');
await this.updateToSchemaVersion(53);
}
if (databaseSchemaVersion < 54) {
this.uniqueLog(logger.notice, `'prices' table has been truncated`);
await this.$executeQuery(`TRUNCATE prices`);
if (isBitcoin === true) {
this.uniqueLog(logger.notice, `'blocks_prices' table has been truncated`);
await this.$executeQuery(`TRUNCATE blocks_prices`);
}
await this.updateToSchemaVersion(54);
}
if (databaseSchemaVersion < 55) {
await this.$executeQuery(this.getAdditionalBlocksDataQuery());
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
await this.updateToSchemaVersion(55);
}
if (databaseSchemaVersion < 56) {
await this.$executeQuery('ALTER TABLE pools ADD unique_id int NOT NULL DEFAULT -1');
await this.$executeQuery('TRUNCATE TABLE `blocks`');
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
this.uniqueLog(logger.notice, '`pools` table has been truncated`');
await this.updateToSchemaVersion(56);
}
if (databaseSchemaVersion < 57 && isBitcoin === true) {
await this.$executeQuery(`ALTER TABLE nodes MODIFY updated_at datetime NULL`);
await this.updateToSchemaVersion(57);
}
if (databaseSchemaVersion < 58) {
// We only run some migration queries for this version
await this.updateToSchemaVersion(58);
}
if (databaseSchemaVersion < 59 && (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet')) {
// https://github.com/mempool/mempool/issues/3360
await this.$executeQuery(`TRUNCATE prices`);
}
}
@@ -471,10 +638,15 @@ class DatabaseMigration {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_hashrates_indexing', 0, NULL)`);
}
if (version < 9 && isBitcoin === true) {
if (version < 9 && isBitcoin === true) {
queries.push(`INSERT INTO state(name, number, string) VALUES ('last_weekly_hashrates_indexing', 0, NULL)`);
}
if (version < 58) {
queries.push(`DELETE FROM state WHERE name = 'last_hashrates_indexing'`);
queries.push(`DELETE FROM state WHERE name = 'last_weekly_hashrates_indexing'`);
}
return queries;
}
@@ -485,6 +657,10 @@ class DatabaseMigration {
return `UPDATE state SET number = ${DatabaseMigration.currentVersion} WHERE name = 'schema_version';`;
}
private async updateToSchemaVersion(version): Promise<void> {
await this.$executeQuery(`UPDATE state SET number = ${version} WHERE name = 'schema_version';`);
}
/**
* Print current database version
*/
@@ -617,6 +793,28 @@ class DatabaseMigration {
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getAdditionalBlocksDataQuery(): string {
return `ALTER TABLE blocks
ADD median_timestamp timestamp NOT NULL,
ADD coinbase_address varchar(100) NULL,
ADD coinbase_signature varchar(500) NULL,
ADD coinbase_signature_ascii varchar(500) NULL,
ADD avg_tx_size double unsigned NOT NULL,
ADD total_inputs int unsigned NOT NULL,
ADD total_outputs int unsigned NOT NULL,
ADD total_output_amt bigint unsigned NOT NULL,
ADD fee_percentiles longtext NULL,
ADD median_fee_amt int unsigned NULL,
ADD segwit_total_txs int unsigned NOT NULL,
ADD segwit_total_size int unsigned NOT NULL,
ADD segwit_total_weight int unsigned NOT NULL,
ADD header varchar(160) NOT NULL,
ADD utxoset_change int NOT NULL,
ADD utxoset_size int unsigned NULL,
ADD total_input_amt bigint unsigned NULL
`;
}
private getCreateDailyStatsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS hashrates (
hashrate_timestamp timestamp NOT NULL,
@@ -783,24 +981,109 @@ class DatabaseMigration {
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
public async $truncateIndexedData(tables: string[]) {
const allowedTables = ['blocks', 'hashrates', 'prices'];
private getCreateLNNodeRecordsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS nodes_records (
public_key varchar(66) NOT NULL,
type int(10) unsigned NOT NULL,
payload blob NOT NULL,
UNIQUE KEY public_key_type (public_key, type),
INDEX (public_key),
FOREIGN KEY (public_key)
REFERENCES nodes (public_key)
ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateCPFPTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS cpfp_clusters (
root varchar(65) NOT NULL,
height int(10) NOT NULL,
txs JSON DEFAULT NULL,
fee_rate double unsigned NOT NULL,
PRIMARY KEY (root)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateTransactionsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS transactions (
txid varchar(65) NOT NULL,
cluster varchar(65) DEFAULT NULL,
PRIMARY KEY (txid),
FOREIGN KEY (cluster) REFERENCES cpfp_clusters (root) ON DELETE SET NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateCompactCPFPTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS compact_cpfp_clusters (
root binary(32) NOT NULL,
height int(10) NOT NULL,
txs BLOB DEFAULT NULL,
fee_rate float unsigned,
PRIMARY KEY (root),
INDEX (height)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
private getCreateCompactTransactionsTableQuery(): string {
return `CREATE TABLE IF NOT EXISTS compact_transactions (
txid binary(32) NOT NULL,
cluster binary(32) DEFAULT NULL,
PRIMARY KEY (txid)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
}
public async $blocksReindexingTruncate(): Promise<void> {
logger.warn(`Truncating pools, blocks and hashrates for re-indexing (using '--reindex-blocks'). You can cancel this command within 5 seconds`);
await Common.sleep$(5000);
await this.$executeQuery(`TRUNCATE blocks`);
await this.$executeQuery(`TRUNCATE hashrates`);
await this.$executeQuery(`TRUNCATE difficulty_adjustments`);
await this.$executeQuery('DELETE FROM `pools`');
await this.$executeQuery('ALTER TABLE pools AUTO_INCREMENT = 1');
await this.$executeQuery(`UPDATE state SET string = NULL WHERE name = 'pools_json_sha'`);
}
private async $convertCompactCpfpTables(): Promise<void> {
try {
for (const table of tables) {
if (!allowedTables.includes(table)) {
logger.debug(`Table ${table} cannot to be re-indexed (not allowed)`);
continue;
const batchSize = 250;
const maxHeight = await blocksRepository.$mostRecentBlockHeight() || 0;
const [minHeightRows]: any = await DB.query(`SELECT MIN(height) AS minHeight from cpfp_clusters`);
const minHeight = (minHeightRows.length && minHeightRows[0].minHeight != null) ? minHeightRows[0].minHeight : maxHeight;
let height = maxHeight;
// Logging
let timer = new Date().getTime() / 1000;
const startedAt = new Date().getTime() / 1000;
while (height > minHeight) {
const [rows] = await DB.query(
`
SELECT * from cpfp_clusters
WHERE height <= ? AND height > ?
ORDER BY height
`,
[height, height - batchSize]
) as RowDataPacket[][];
if (rows?.length) {
await cpfpRepository.$batchSaveClusters(rows.map(row => {
return {
root: row.root,
height: row.height,
txs: JSON.parse(row.txs),
effectiveFeePerVsize: row.fee_rate,
};
}));
}
await this.$executeQuery(`TRUNCATE ${table}`, true);
if (table === 'hashrates') {
await this.$executeQuery('UPDATE state set number = 0 where name = "last_hashrates_indexing"', true);
}
logger.notice(`Table ${table} has been truncated`);
const elapsed = new Date().getTime() / 1000 - timer;
const runningFor = new Date().getTime() / 1000 - startedAt;
logger.debug(`Migrated cpfp data from block ${height} to ${height - batchSize} in ${elapsed.toFixed(2)} seconds | total elapsed: ${runningFor.toFixed(2)} seconds`);
timer = new Date().getTime() / 1000;
height -= batchSize;
}
} catch (e) {
logger.warn(`Unable to erase indexed data`);
logger.warn(`Failed to migrate cpfp transaction data`);
}
}
}

View File

@@ -9,9 +9,11 @@ export interface DifficultyAdjustment {
remainingBlocks: number; // Block count
remainingTime: number; // Duration of time in ms
previousRetarget: number; // Percent: -75 to 300
previousTime: number; // Unix time in ms
nextRetargetHeight: number; // Block Height
timeAvg: number; // Duration of time in ms
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
expectedBlocks: number; // Block count
}
export function calcDifficultyAdjustment(
@@ -32,12 +34,12 @@ export function calcDifficultyAdjustment(
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
const expectedBlocks = diffSeconds / BLOCK_SECONDS_TARGET;
let difficultyChange = 0;
let timeAvgSecs = BLOCK_SECONDS_TARGET;
let timeAvgSecs = diffSeconds / blocksInEpoch;
// Only calculate the estimate once we have 7.2% of blocks in current epoch
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
timeAvgSecs = diffSeconds / blocksInEpoch;
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
// Max increase is x4 (+300%)
if (difficultyChange > 300) {
@@ -74,9 +76,11 @@ export function calcDifficultyAdjustment(
remainingBlocks,
remainingTime,
previousRetarget,
previousTime: DATime,
nextRetargetHeight,
timeAvg,
timeOffset,
expectedBlocks,
};
}

View File

@@ -9,25 +9,35 @@ import { TransactionExtended } from '../mempool.interfaces';
import { Common } from './common';
class DiskCache {
private cacheSchemaVersion = 1;
private cacheSchemaVersion = 3;
private static TMP_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/tmp-cache.json';
private static TMP_FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/tmp-cache{number}.json';
private static FILE_NAME = config.MEMPOOL.CACHE_DIR + '/cache.json';
private static FILE_NAMES = config.MEMPOOL.CACHE_DIR + '/cache{number}.json';
private static CHUNK_FILES = 25;
private isWritingCache = false;
constructor() { }
constructor() {
if (!cluster.isPrimary) {
return;
}
process.on('SIGINT', (e) => {
this.$saveCacheToDisk(true);
process.exit(0);
});
}
async $saveCacheToDisk(): Promise<void> {
async $saveCacheToDisk(sync: boolean = false): Promise<void> {
if (!cluster.isPrimary) {
return;
}
if (this.isWritingCache) {
logger.debug('Saving cache already in progress. Skipping.')
logger.debug('Saving cache already in progress. Skipping.');
return;
}
try {
logger.debug('Writing mempool and blocks data to disk cache (async)...');
logger.debug(`Writing mempool and blocks data to disk cache (${ sync ? 'sync' : 'async' })...`);
this.isWritingCache = true;
const mempool = memPool.getMempool();
@@ -40,19 +50,48 @@ class DiskCache {
const chunkSize = Math.floor(mempoolArray.length / DiskCache.CHUNK_FILES);
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
if (sync) {
fs.writeFileSync(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.writeFileSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
fs.renameSync(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
} else {
await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({
network: config.MEMPOOL.NETWORK,
cacheSchemaVersion: this.cacheSchemaVersion,
blocks: blocks.getBlocks(),
blockSummaries: blocks.getBlockSummaries(),
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
mempool: {},
mempoolArray: mempoolArray.splice(0, chunkSize),
}), { flag: 'w' });
}
await fsPromises.rename(DiskCache.TMP_FILE_NAME, DiskCache.FILE_NAME);
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
await fsPromises.rename(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString()));
}
}
logger.debug('Mempool and blocks data saved to disk cache');
this.isWritingCache = false;
} catch (e) {
@@ -61,14 +100,29 @@ class DiskCache {
}
}
wipeCache() {
fs.unlinkSync(DiskCache.FILE_NAME);
wipeCache(): void {
logger.notice(`Wiping nodejs backend cache/cache*.json files`);
try {
fs.unlinkSync(DiskCache.FILE_NAME);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${DiskCache.FILE_NAME}. Exception ${JSON.stringify(e)}`);
}
}
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
fs.unlinkSync(DiskCache.FILE_NAMES.replace('{number}', i.toString()));
const filename = DiskCache.FILE_NAMES.replace('{number}', i.toString());
try {
fs.unlinkSync(filename);
} catch (e: any) {
if (e?.code !== 'ENOENT') {
logger.err(`Cannot wipe cache file ${filename}. Exception ${JSON.stringify(e)}`);
}
}
}
}
loadMempoolCache() {
loadMempoolCache(): void {
if (!fs.existsSync(DiskCache.FILE_NAME)) {
return;
}
@@ -82,6 +136,10 @@ class DiskCache {
logger.notice('Disk cache contains an outdated schema version. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.network && data.network !== config.MEMPOOL.NETWORK) {
logger.notice('Disk cache contains data from a different network. Clearing it and skipping the cache loading.');
return this.wipeCache();
}
if (data.mempoolArray) {
for (const tx of data.mempoolArray) {

View File

@@ -117,6 +117,32 @@ class ChannelsApi {
}
}
public async $getUnresolvedClosedChannels(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutSourceChecked(): Promise<any[]> {
try {
const query = `
SELECT channels.*
FROM channels
WHERE channels.source_checked != 1
`;
const [rows]: any = await DB.query(query);
return rows;
} catch (e) {
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
try {
const query = `SELECT * FROM channels WHERE created IS NULL`;
@@ -246,6 +272,108 @@ class ChannelsApi {
}
}
public async $getChannelByClosingId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.closing_transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
rows[0].outputs = JSON.parse(rows[0].outputs);
return rows[0];
}
} catch (e) {
logger.err('$getChannelByClosingId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsByOpeningId(transactionId: string): Promise<any> {
try {
const query = `
SELECT
channels.*
FROM channels
WHERE channels.transaction_id = ?
`;
const [rows]: any = await DB.query(query, [transactionId]);
if (rows.length > 0) {
return rows.map(row => {
row.outputs = JSON.parse(row.outputs);
return row;
});
}
} catch (e) {
logger.err('$getChannelsByOpeningId error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateClosingInfo(channelInfo: { id: string, node1_closing_balance: number, node2_closing_balance: number, closed_by: string | null, closing_fee: number, outputs: ILightningApi.ForensicOutput[]}): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_closing_balance = ?,
node2_closing_balance = ?,
closed_by = ?,
closing_fee = ?,
outputs = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_closing_balance || 0,
channelInfo.node2_closing_balance || 0,
channelInfo.closed_by,
channelInfo.closing_fee || 0,
JSON.stringify(channelInfo.outputs),
channelInfo.id,
]);
} catch (e) {
logger.err('$updateClosingInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $updateOpeningInfo(channelInfo: { id: string, node1_funding_balance: number, node2_funding_balance: number, funding_ratio: number, single_funded: boolean | void }): Promise<void> {
try {
const query = `
UPDATE channels SET
node1_funding_balance = ?,
node2_funding_balance = ?,
funding_ratio = ?,
single_funded = ?
WHERE channels.id = ?
`;
await DB.query<ResultSetHeader>(query, [
channelInfo.node1_funding_balance || 0,
channelInfo.node2_funding_balance || 0,
channelInfo.funding_ratio,
channelInfo.single_funded ? 1 : 0,
channelInfo.id,
]);
} catch (e) {
logger.err('$updateOpeningInfo error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $markChannelSourceChecked(id: string): Promise<void> {
try {
const query = `
UPDATE channels
SET source_checked = 1
WHERE id = ?
`;
await DB.query<ResultSetHeader>(query, [id]);
} catch (e) {
logger.err('$markChannelSourceChecked error: ' + (e instanceof Error ? e.message : e));
// don't throw - this data isn't essential
}
}
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
try {
let channelStatusFilter;
@@ -374,11 +502,15 @@ class ChannelsApi {
'transaction_id': channel.transaction_id,
'transaction_vout': channel.transaction_vout,
'closing_transaction_id': channel.closing_transaction_id,
'closing_fee': channel.closing_fee,
'closing_reason': channel.closing_reason,
'closing_date': channel.closing_date,
'updated_at': channel.updated_at,
'created': channel.created,
'status': channel.status,
'funding_ratio': channel.funding_ratio,
'closed_by': channel.closed_by,
'single_funded': !!channel.single_funded,
'node_left': {
'alias': channel.alias_left,
'public_key': channel.node1_public_key,
@@ -393,6 +525,9 @@ class ChannelsApi {
'updated_at': channel.node1_updated_at,
'longitude': channel.node1_longitude,
'latitude': channel.node1_latitude,
'funding_balance': channel.node1_funding_balance,
'closing_balance': channel.node1_closing_balance,
'initiated_close': channel.closed_by === channel.node1_public_key ? true : undefined,
},
'node_right': {
'alias': channel.alias_right,
@@ -408,6 +543,9 @@ class ChannelsApi {
'updated_at': channel.node2_updated_at,
'longitude': channel.node2_longitude,
'latitude': channel.node2_latitude,
'funding_balance': channel.node2_funding_balance,
'closing_balance': channel.node2_closing_balance,
'initiated_close': channel.closed_by === channel.node2_public_key ? true : undefined,
},
};
}
@@ -421,6 +559,17 @@ class ChannelsApi {
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
// https://github.com/mempool/mempool/issues/3006
if ((channel.last_update ?? 0) < 1514736061) { // January 1st 2018
channel.last_update = null;
}
if ((policy1.last_update ?? 0) < 1514736061) { // January 1st 2018
policy1.last_update = null;
}
if ((policy2.last_update ?? 0) < 1514736061) { // January 1st 2018
policy2.last_update = null;
}
const query = `INSERT INTO channels
(
id,
@@ -532,9 +681,7 @@ class ChannelsApi {
AND status != 2
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));

View File

@@ -14,8 +14,8 @@ class NodesApi {
nodes.longitude, nodes.latitude,
geo_names_country.names as country, geo_names_iso.names as isoCode
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
WHERE status = 1 AND nodes.as_number IS NOT NULL
ORDER BY capacity
`;
@@ -105,6 +105,18 @@ class NodesApi {
node.closed_channel_count = rows[0].closed_channel_count;
}
// Custom records
query = `
SELECT type, payload
FROM nodes_records
WHERE public_key = ?
`;
[rows] = await DB.query(query, [public_key]);
node.custom_records = {};
for (const record of rows) {
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
}
return node;
} catch (e) {
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
@@ -129,6 +141,56 @@ class NodesApi {
}
}
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
try {
const inQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
const outQuery = `
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
END as bucket,
count(short_id) as count,
sum(capacity) as capacity
FROM (
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
short_id as short_id,
capacity as capacity
FROM channels
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
) as fee_rate_table
GROUP BY bucket;
`;
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
return {
incoming: inRows.length > 0 ? inRows : [],
outgoing: outRows.length > 0 ? outRows : [],
};
} catch (e) {
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
throw e;
}
}
public async $getAllNodes(): Promise<any> {
try {
const query = `SELECT * FROM nodes`;
@@ -166,7 +228,7 @@ class NodesApi {
nodes.capacity
FROM nodes
ORDER BY capacity DESC
LIMIT 100
LIMIT 6
`;
[rows] = await DB.query(query);
@@ -207,14 +269,26 @@ class NodesApi {
let query: string;
if (full === false) {
query = `
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.channels
SELECT
nodes.public_key as publicKey,
IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
nodes.channels,
geo_names_city.names as city, geo_names_country.names as country,
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
FROM nodes
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
ORDER BY channels DESC
LIMIT 100;
LIMIT 6;
`;
[rows] = await DB.query(query);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
}
} else {
query = `
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
@@ -300,7 +374,13 @@ class NodesApi {
public async $searchNodeByPublicKeyOrAlias(search: string) {
try {
const publicKeySearch = search.replace('%', '') + '%';
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
const aliasSearch = search
.replace(/[-_.]/g, ' ') // Replace all -_. characters with empty space. Eg: "ln.nicehash" becomes "ln nicehash".
.replace(/[^a-zA-Z0-9 ]/g, '') // Remove all special characters and keep just A to Z, 0 to 9.
.split(' ')
.filter(key => key.length)
.map((search) => '+' + search + '*').join(' ');
// %keyword% is wildcard search and can't be indexed so it's slower as the node database grow. keyword% can be indexed but then you can't search for "Nicehash" and get result for ln.nicehash.com. So we use fulltext index for words "ln, nicehash, com" and nicehash* will find it instantly.
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
return rows;
@@ -337,24 +417,24 @@ class NodesApi {
if (!ispList[isp1]) {
ispList[isp1] = {
id: channel.isp1ID.toString(),
ids: [channel.isp1ID],
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) {
ispList[isp1].id += ',' + channel.isp1ID.toString();
} else if (ispList[isp1].ids.includes(channel.isp1ID) === false) {
ispList[isp1].ids.push(channel.isp1ID);
}
if (!ispList[isp2]) {
ispList[isp2] = {
id: channel.isp2ID.toString(),
ids: [channel.isp2ID],
capacity: 0,
channels: 0,
nodes: {},
};
} else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) {
ispList[isp2].id += ',' + channel.isp2ID.toString();
} else if (ispList[isp2].ids.includes(channel.isp2ID) === false) {
ispList[isp2].ids.push(channel.isp2ID);
}
ispList[isp1].capacity += channel.capacity;
@@ -364,11 +444,11 @@ class NodesApi {
ispList[isp2].channels += 1;
ispList[isp2].nodes[channel.node2PublicKey] = true;
}
const ispRanking: any[] = [];
for (const isp of Object.keys(ispList)) {
ispRanking.push([
ispList[isp].id,
ispList[isp].ids.sort((a, b) => a - b).join(','),
isp,
ispList[isp].capacity,
ispList[isp].channels,
@@ -462,7 +542,41 @@ class NodesApi {
public async $getNodesPerISP(ISPId: string) {
try {
const query = `
let query = `
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
FROM channels
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
JOIN geo_names isp1 ON isp1.id = node1.as_number
JOIN geo_names isp2 ON isp2.id = node2.as_number
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
ORDER BY short_id DESC
`;
const IPSIds = ISPId.split(',');
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
if (!rows || rows.length === 0) {
return [];
}
const nodes = {};
const intISPIds: number[] = [];
for (const ispId of IPSIds) {
intISPIds.push(parseInt(ispId, 10));
}
for (const channel of rows) {
if (intISPIds.includes(channel.isp1ID)) {
nodes[channel.node1PublicKey] = true;
}
if (intISPIds.includes(channel.isp2ID)) {
nodes[channel.node2PublicKey] = true;
}
}
query = `
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
geo_names_city.names as city, geo_names_country.names as country,
@@ -473,17 +587,18 @@ class NodesApi {
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
WHERE nodes.as_number IN (?)
WHERE nodes.public_key IN (?)
ORDER BY capacity DESC
`;
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
for (let i = 0; i < rows.length; ++i) {
rows[i].country = JSON.parse(rows[i].country);
rows[i].city = JSON.parse(rows[i].city);
rows[i].subdivision = JSON.parse(rows[i].subdivision);
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
for (let i = 0; i < rows2.length; ++i) {
rows2[i].country = JSON.parse(rows2[i].country);
rows2[i].city = JSON.parse(rows2[i].city);
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
}
return rows;
return rows2;
} catch (e) {
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
@@ -527,6 +642,11 @@ class NodesApi {
*/
public async $saveNode(node: ILightningApi.Node): Promise<void> {
try {
// https://github.com/mempool/mempool/issues/3006
if ((node.last_update ?? 0) < 1514736061) { // January 1st 2018
node.last_update = null;
}
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
const query = `INSERT INTO nodes(
public_key,
@@ -588,9 +708,7 @@ class NodesApi {
)
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
} else {
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`, logger.tags.ln);
}
} catch (e) {
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));

View File

@@ -20,6 +20,7 @@ class NodesRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
;
@@ -40,13 +41,70 @@ class NodesRoutes {
let nodes: any[] = [];
switch (config.MEMPOOL.NETWORK) {
case 'testnet':
nodesList = ['032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b', '025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7', '0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55', '032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0', '029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3', '0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af', '03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab', '032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8', '02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605', '039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205', '033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18', '029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584'];
nodesList = [
'032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b',
'025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7',
'0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55',
'032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0',
'029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3',
'0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af',
'03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab',
'032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8',
'02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605',
'039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205',
'033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18',
'029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584',
'0235ad0b56ed8c42c4354444c24e971c05e769ec0b5fb0ccea42880095dc02ea2c',
'029700819a37afea630f80e6cc461f3fd3c4ace2598a21cfbbe64d1c78d0ee69a5',
'02c2d8b2dbf87c7894af2f1d321290e2fe6db5446cd35323987cee98f06e2e0075',
'030b0ca1ea7b1075716d2a555630e6fd47ef11bc7391fe68963ec06cf370a5e382',
'031adb9eb2d66693f85fa31a4adca0319ba68219f3ad5f9a2ef9b34a6b40755fa1',
'02ccd07faa47eda810ecf5591ccf5ca50f6c1034d0d175052898d32a00b9bae24f',
];
break;
case 'signet':
nodesList = ['03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956', '033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de', '02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781', '025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029', '027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe', '03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43', '02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991', '02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34', '02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86', '038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7', '03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761', '028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7'];
nodesList = [
'03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956',
'033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de',
'02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781',
'025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029',
'027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe',
'03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43',
'02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991',
'02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34',
'02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86',
'038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7',
'03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761',
'028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7',
'02ff690d06c187ab994bf83c5a2114fe5bf50112c2c817af0f788f736be9fa2070',
'02a9f570c51a2526a5ee85802e88f9281bed771eb66a0c8a7d898430dd5d0eae45',
'038c3de773255d3bd7a50e31e58d423baac5c90826a74d75e64b74c95475de1097',
'0242c7f7d315095f37ad1421ae0a2fc967d4cbe65b61b079c5395a769436959853',
'02a909e70eb03742f12666ebb1f56ac42a5fbaab0c0e8b5b1df4aa9f10f8a09240',
'03a26efa12489803c07f3ac2f1dba63812e38f0f6e866ce3ebb34df7de1f458cd2',
];
break;
default:
nodesList = ['03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61', '03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437', '03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144', '0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108', '03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c', '03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5', '021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a', '032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9', '02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34', '02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf', '03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c', '0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43'];
nodesList = [
'03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61',
'03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437',
'03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144',
'0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108',
'03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c',
'03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5',
'021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a',
'032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9',
'02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34',
'02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf',
'03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c',
'0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43',
'02521287789f851268a39c9eccc9d6180d2c614315b583c9e6ae0addbd6d79df06',
'0258c2a7b7f8af2585b4411b1ec945f70988f30412bb1df179de941f14d0b1bc3e',
'03c3389ff1a896f84d921ed01a19fc99c6724ce8dc4b960cd3b7b2362b62cd60d7',
'038d118996b3eaa15dcd317b32a539c9ecfdd7698f204acf8a087336af655a9192',
'02a928903d93d78877dacc3642b696128a3636e9566dd42d2d132325b2c8891c09',
'0328cd17f3a9d3d90b532ade0d1a67e05eb8a51835b3dce0a2e38eac04b5a62a57',
];
}
for (let pubKey of nodesList) {
@@ -95,6 +153,22 @@ class NodesRoutes {
}
}
private async $getFeeHistogram(req: Request, res: Response) {
try {
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
if (!node) {
res.status(404).send('Node not found');
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(node);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
try {
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);

View File

@@ -1,123 +0,0 @@
import logger from '../logger';
import * as http from 'http';
import * as https from 'https';
import axios, { AxiosResponse } from 'axios';
import { IConversionRates } from '../mempool.interfaces';
import config from '../config';
import backendInfo from './backend-info';
import { SocksProxyAgent } from 'socks-proxy-agent';
class FiatConversion {
private debasingFiatCurrencies = ['AED', 'AUD', 'BDT', 'BHD', 'BMD', 'BRL', 'CAD', 'CHF', 'CLP',
'CNY', 'CZK', 'DKK', 'EUR', 'GBP', 'HKD', 'HUF', 'IDR', 'ILS', 'INR', 'JPY', 'KRW', 'KWD',
'LKR', 'MMK', 'MXN', 'MYR', 'NGN', 'NOK', 'NZD', 'PHP', 'PKR', 'PLN', 'RUB', 'SAR', 'SEK',
'SGD', 'THB', 'TRY', 'TWD', 'UAH', 'USD', 'VND', 'ZAR'];
private conversionRates: IConversionRates = {};
private ratesChangedCallback: ((rates: IConversionRates) => void) | undefined;
public ratesInitialized = false; // If true, it means rates are ready for use
constructor() {
for (const fiat of this.debasingFiatCurrencies) {
this.conversionRates[fiat] = 0;
}
}
public setProgressChangedCallback(fn: (rates: IConversionRates) => void) {
this.ratesChangedCallback = fn;
}
public startService() {
const fiatConversionUrl = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.PRICE_DATA_SERVER.TOR_URL : config.PRICE_DATA_SERVER.CLEARNET_URL;
logger.info('Starting currency rates service');
if (config.SOCKS5PROXY.ENABLED) {
logger.info(`Currency rates service will be queried over the Tor network using ${fiatConversionUrl}`);
} else {
logger.info(`Currency rates service will be queried over clearnet using ${config.PRICE_DATA_SERVER.CLEARNET_URL}`);
}
setInterval(this.updateCurrency.bind(this), 1000 * config.MEMPOOL.PRICE_FEED_UPDATE_INTERVAL);
this.updateCurrency();
}
public getConversionRates() {
return this.conversionRates;
}
private async updateCurrency(): Promise<void> {
type axiosOptions = {
headers: {
'User-Agent': string
};
timeout: number;
httpAgent?: http.Agent;
httpsAgent?: https.Agent;
}
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
const fiatConversionUrl = (config.SOCKS5PROXY.ENABLED === true) && (config.SOCKS5PROXY.USE_ONION === true) ? config.PRICE_DATA_SERVER.TOR_URL : config.PRICE_DATA_SERVER.CLEARNET_URL;
const isHTTP = (new URL(fiatConversionUrl).protocol.split(':')[0] === 'http') ? true : false;
const axiosOptions: axiosOptions = {
headers: {
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
},
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
};
let retry = 0;
while(retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
try {
if (config.SOCKS5PROXY.ENABLED) {
let socksOptions: any = {
agentOptions: {
keepAlive: true,
},
hostname: config.SOCKS5PROXY.HOST,
port: config.SOCKS5PROXY.PORT
};
if (config.SOCKS5PROXY.USERNAME && config.SOCKS5PROXY.PASSWORD) {
socksOptions.username = config.SOCKS5PROXY.USERNAME;
socksOptions.password = config.SOCKS5PROXY.PASSWORD;
} else {
// Retry with different tor circuits https://stackoverflow.com/a/64960234
socksOptions.username = `circuit${retry}`;
}
// Handle proxy agent for onion addresses
if (isHTTP) {
axiosOptions.httpAgent = new SocksProxyAgent(socksOptions);
} else {
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
}
}
logger.debug('Querying currency rates service...');
const response: AxiosResponse = await axios.get(`${fiatConversionUrl}`, axiosOptions);
if (response.statusText === 'error' || !response.data) {
throw new Error(`Could not fetch data from ${fiatConversionUrl}, Error: ${response.status}`);
}
for (const rate of response.data.data) {
if (this.debasingFiatCurrencies.includes(rate.currencyCode) && rate.provider === 'Bisq-Aggregate') {
this.conversionRates[rate.currencyCode] = Math.round(100 * rate.price) / 100;
}
}
this.ratesInitialized = true;
logger.debug(`USD Conversion Rate: ${this.conversionRates.USD}`);
if (this.ratesChangedCallback) {
this.ratesChangedCallback(this.conversionRates);
}
break;
} catch (e) {
logger.err('Error updating fiat conversion rates: ' + (e instanceof Error ? e.message : e));
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
retry++;
}
}
}
}
export default new FiatConversion();

View File

@@ -141,13 +141,13 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
// main data directory provided, default to using the bitcoin mainnet subdirectory
// to be removed in v0.2.0
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
logger.warn(`${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`, logger.tags.ln)
logger.warn(`specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`, logger.tags.ln)
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
}
}
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
logger.debug(`Connecting to ${rpcPath}`, logger.tags.ln);
super();
this.rpcPath = rpcPath;
@@ -172,19 +172,19 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
this.clientConnectionPromise = new Promise<void>(resolve => {
_self.client.on('connect', () => {
logger.info(`[CLightningClient] Lightning client connected`);
logger.info(`CLightning client connected`, logger.tags.ln);
_self.reconnectWait = 1;
resolve();
});
_self.client.on('end', () => {
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
logger.err(`CLightning client connection closed, reconnecting`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
_self.client.on('error', error => {
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
logger.err(`CLightning client connection error: ${error}`, logger.tags.ln);
_self.increaseWaitTime();
_self.reconnect();
});
@@ -196,7 +196,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
return;
}
const data = JSON.parse(line);
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
_self.emit('res:' + data.id, data);
});
}
@@ -217,7 +216,7 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
}
this.reconnectTimeout = setTimeout(() => {
logger.debug('[CLightningClient] Trying to reconnect...');
logger.debug(`Trying to reconnect...`, logger.tags.ln);
_self.client.connect(_self.rpcPath);
_self.reconnectTimeout = null;
@@ -235,7 +234,6 @@ export default class CLightningClient extends EventEmitter implements AbstractLi
id: '' + callInt
};
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
// Wait for the client to connect
return this.clientConnectionPromise

View File

@@ -2,11 +2,21 @@ import { ILightningApi } from '../lightning-api.interface';
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
import logger from '../../../logger';
import { Common } from '../../common';
import config from '../../../config';
/**
* Convert a clightning "listnode" entry to a lnd node entry
*/
export function convertNode(clNode: any): ILightningApi.Node {
let custom_records: { [type: number]: string } | undefined = undefined;
if (clNode.option_will_fund) {
try {
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
} catch (e) {
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
custom_records = undefined;
}
}
return {
alias: clNode.alias ?? '',
color: `#${clNode.color ?? ''}`,
@@ -23,6 +33,7 @@ export function convertNode(clNode: any): ILightningApi.Node {
};
}) ?? [],
last_update: clNode?.last_timestamp ?? 0,
custom_records
};
}
@@ -30,7 +41,7 @@ export function convertNode(clNode: any): ILightningApi.Node {
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
*/
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
logger.info('Converting clightning nodes and channels to lnd graph format');
logger.debug(`Converting clightning nodes and channels to lnd graph format`, logger.tags.ln);
let loggerTimer = new Date().getTime() / 1000;
let channelProcessed = 0;
@@ -44,16 +55,17 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
clChannelsDict[clChannel.short_channel_id] = clChannel;
clChannelsDictCount[clChannel.short_channel_id] = 1;
} else {
consolidatedChannelList.push(
await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id])
);
delete clChannelsDict[clChannel.short_channel_id];
clChannelsDictCount[clChannel.short_channel_id]++;
const fullChannel = await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id]);
if (fullChannel !== null) {
consolidatedChannelList.push(fullChannel);
delete clChannelsDict[clChannel.short_channel_id];
clChannelsDictCount[clChannel.short_channel_id]++;
}
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`, logger.tags.ln);
loggerTimer = new Date().getTime() / 1000;
}
@@ -63,13 +75,18 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
channelProcessed = 0;
const keys = Object.keys(clChannelsDict);
for (const short_channel_id of keys) {
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
const incompleteChannel = await buildIncompleteChannel(clChannelsDict[short_channel_id]);
if (incompleteChannel !== null) {
consolidatedChannelList.push(incompleteChannel);
}
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
loggerTimer = new Date().getTime() / 1000;
}
channelProcessed++;
}
return consolidatedChannelList;
@@ -79,10 +96,13 @@ export async function convertAndmergeBidirectionalChannels(clChannels: any[]): P
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy for both nodes
*/
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel> {
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel | null> {
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
if (!tx) {
return null;
}
const parts = clChannelA.short_channel_id.split('x');
const outputIdx = parts[2];
@@ -102,8 +122,11 @@ async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILigh
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
* In this case, clightning knows the channel policy of only one node
*/
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel | null> {
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
if (!tx) {
return null;
}
const parts = clChannel.short_channel_id.split('x');
const outputIdx = parts[2];

View File

@@ -21,7 +21,7 @@ export namespace ILightningApi {
export interface Channel {
channel_id: string;
chan_point: string;
last_update: number;
last_update: number | null;
node1_pub: string;
node2_pub: string;
capacity: string;
@@ -36,11 +36,11 @@ export namespace ILightningApi {
fee_rate_milli_msat: string;
disabled: boolean;
max_htlc_msat: string;
last_update: number;
last_update: number | null;
}
export interface Node {
last_update: number;
last_update: number | null;
pub_key: string;
alias: string;
addresses: {
@@ -49,6 +49,7 @@ export namespace ILightningApi {
}[];
color: string;
features: { [key: number]: Feature };
custom_records?: { [type: number]: string };
}
export interface Info {
@@ -82,4 +83,10 @@ export namespace ILightningApi {
is_required: boolean;
is_known: boolean;
}
export interface ForensicOutput {
node?: 1 | 2;
type: number;
value: number;
}
}

View File

@@ -1,11 +1,14 @@
import logger from '../logger';
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
import { MempoolBlock, TransactionExtended, ThreadTransaction, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta, Ancestor } from '../mempool.interfaces';
import { Common } from './common';
import config from '../config';
import { Worker } from 'worker_threads';
import path from 'path';
class MempoolBlocks {
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
private txSelectionWorker: Worker | null = null;
constructor() {}
@@ -30,7 +33,7 @@ class MempoolBlocks {
return this.mempoolBlockDeltas;
}
public updateMempoolBlocks(memPool: { [txid: string]: TransactionExtended }): void {
public updateMempoolBlocks(memPool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): MempoolBlockWithTransactions[] {
const latestMempool = memPool;
const memPoolArray: TransactionExtended[] = [];
for (const i in latestMempool) {
@@ -71,15 +74,19 @@ class MempoolBlocks {
const time = end - start;
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
if (saveResults) {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
this.mempoolBlocks = blocks;
this.mempoolBlockDeltas = deltas;
}
return blocks;
}
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]):
{ blocks: MempoolBlockWithTransactions[], deltas: MempoolBlockDelta[] } {
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
let blockWeight = 0;
let blockSize = 0;
let transactions: TransactionExtended[] = [];
@@ -90,16 +97,21 @@ class MempoolBlocks {
blockSize += tx.size;
transactions.push(tx);
} else {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
blockWeight = tx.weight;
blockSize = tx.size;
transactions = [tx];
}
});
if (transactions.length) {
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, mempoolBlocks.length));
}
// Calculate change from previous block states
return mempoolBlocks;
}
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
let added: TransactionStripped[] = [];
let removed: string[] = [];
@@ -132,14 +144,166 @@ class MempoolBlocks {
removed
});
}
return {
blocks: mempoolBlocks,
deltas: mempoolBlockDeltas
};
return mempoolBlockDeltas;
}
private dataToMempoolBlocks(transactions: TransactionExtended[],
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, saveResults: boolean = false): Promise<MempoolBlockWithTransactions[]> {
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const strippedMempool: { [txid: string]: ThreadTransaction } = {};
Object.values(newMempool).forEach(entry => {
strippedMempool[entry.txid] = {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
});
// (re)initialize tx selection worker thread
if (!this.txSelectionWorker) {
this.txSelectionWorker = new Worker(path.resolve(__dirname, './tx-selection-worker.js'));
// if the thread throws an unexpected error, or exits for any other reason,
// reset worker state so that it will be re-initialized on the next run
this.txSelectionWorker.once('error', () => {
this.txSelectionWorker = null;
});
this.txSelectionWorker.once('exit', () => {
this.txSelectionWorker = null;
});
}
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'set', mempool: strippedMempool });
const { blocks, clusters } = await workerResultPromise;
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
return this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
} catch (e) {
logger.err('makeBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
return this.mempoolBlocks;
}
public async updateBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, added: TransactionExtended[], removed: string[], saveResults: boolean = false): Promise<void> {
if (!this.txSelectionWorker) {
// need to reset the worker
this.makeBlockTemplates(newMempool, saveResults);
return;
}
// prepare a stripped down version of the mempool with only the minimum necessary data
// to reduce the overhead of passing this data to the worker thread
const addedStripped: ThreadTransaction[] = added.map(entry => {
return {
txid: entry.txid,
fee: entry.fee,
weight: entry.weight,
feePerVsize: entry.fee / (entry.weight / 4),
effectiveFeePerVsize: entry.fee / (entry.weight / 4),
vin: entry.vin.map(v => v.txid),
};
});
// run the block construction algorithm in a separate thread, and wait for a result
let threadErrorListener;
try {
const workerResultPromise = new Promise<{ blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } }>((resolve, reject) => {
threadErrorListener = reject;
this.txSelectionWorker?.once('message', (result): void => {
resolve(result);
});
this.txSelectionWorker?.once('error', reject);
});
this.txSelectionWorker.postMessage({ type: 'update', added: addedStripped, removed });
const { blocks, clusters } = await workerResultPromise;
// clean up thread error listener
this.txSelectionWorker?.removeListener('error', threadErrorListener);
this.processBlockTemplates(newMempool, blocks, clusters, saveResults);
} catch (e) {
logger.err('updateBlockTemplates failed. ' + (e instanceof Error ? e.message : e));
}
}
private processBlockTemplates(mempool, blocks, clusters, saveResults): MempoolBlockWithTransactions[] {
// update this thread's mempool with the results
blocks.forEach(block => {
block.forEach(tx => {
if (tx.txid in mempool) {
if (tx.effectiveFeePerVsize != null) {
mempool[tx.txid].effectiveFeePerVsize = tx.effectiveFeePerVsize;
}
if (tx.cpfpRoot && tx.cpfpRoot in clusters) {
const ancestors: Ancestor[] = [];
const descendants: Ancestor[] = [];
const cluster = clusters[tx.cpfpRoot];
let matched = false;
cluster.forEach(txid => {
if (txid === tx.txid) {
matched = true;
} else {
const relative = {
txid: txid,
fee: mempool[txid].fee,
weight: mempool[txid].weight,
};
if (matched) {
descendants.push(relative);
} else {
ancestors.push(relative);
}
}
});
mempool[tx.txid].ancestors = ancestors;
mempool[tx.txid].descendants = descendants;
mempool[tx.txid].bestDescendant = null;
}
mempool[tx.txid].cpfpChecked = tx.cpfpChecked;
}
});
});
// unpack the condensed blocks into proper mempool blocks
const mempoolBlocks = blocks.map((transactions, blockIndex) => {
return this.dataToMempoolBlocks(transactions.map(tx => {
return mempool[tx.txid] || null;
}).filter(tx => !!tx), blockIndex);
});
if (saveResults) {
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, mempoolBlocks);
this.mempoolBlocks = mempoolBlocks;
this.mempoolBlockDeltas = deltas;
}
return mempoolBlocks;
}
private dataToMempoolBlocks(transactions: TransactionExtended[], blocksIndex: number): MempoolBlockWithTransactions {
let totalSize = 0;
let totalWeight = 0;
const fitTransactions: TransactionExtended[] = [];
transactions.forEach(tx => {
totalSize += tx.size;
totalWeight += tx.weight;
if ((totalWeight + tx.weight) <= config.MEMPOOL.BLOCK_WEIGHT_UNITS * 1.2) {
fitTransactions.push(tx);
}
});
let rangeLength = 4;
if (blocksIndex === 0) {
rangeLength = 8;
@@ -150,14 +314,14 @@ class MempoolBlocks {
rangeLength = 8;
}
return {
blockSize: blockSize,
blockVSize: blockWeight / 4,
blockSize: totalSize,
blockVSize: totalWeight / 4,
nTx: transactions.length,
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
feeRange: Common.getFeesInRange(transactions, rangeLength),
transactionIds: transactions.map((tx) => tx.txid),
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
transactions: fitTransactions.map((tx) => Common.stripTransaction(tx)),
};
}
}

View File

@@ -20,6 +20,8 @@ class Mempool {
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => void) | undefined;
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
deletedTransactions: TransactionExtended[]) => Promise<void>) | undefined;
private txPerSecondArray: number[] = [];
private txPerSecond: number = 0;
@@ -29,6 +31,11 @@ class Mempool {
private mempoolProtection = 0;
private latestTransactions: any[] = [];
private ESPLORA_MISSING_TX_WARNING_THRESHOLD = 100;
private SAMPLE_TIME = 10000; // In ms
private timer = new Date().getTime();
private missingTxCount = 0;
constructor() {
setInterval(this.updateTxPerSecond.bind(this), 1000);
setInterval(this.deleteExpiredTransactions.bind(this), 20000);
@@ -63,6 +70,11 @@ class Mempool {
this.mempoolChangedCallback = fn;
}
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
this.asyncMempoolChangedCallback = fn;
}
public getMempool(): { [txid: string]: TransactionExtended } {
return this.mempoolCache;
}
@@ -72,6 +84,9 @@ class Mempool {
if (this.mempoolChangedCallback) {
this.mempoolChangedCallback(this.mempoolCache, [], []);
}
if (this.asyncMempoolChangedCallback) {
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
}
}
public async $updateMemPoolInfo() {
@@ -103,12 +118,11 @@ class Mempool {
return txTimes;
}
public async $updateMempool() {
logger.debug('Updating mempool');
public async $updateMempool(): Promise<void> {
logger.debug(`Updating mempool...`);
const start = new Date().getTime();
let hasChange: boolean = false;
const currentMempoolSize = Object.keys(this.mempoolCache).length;
let txCount = 0;
const transactions = await bitcoinApi.$getRawMempool();
const diff = transactions.length - currentMempoolSize;
const newTransactions: TransactionExtended[] = [];
@@ -119,12 +133,21 @@ class Mempool {
loadingIndicators.setProgress('mempool', Object.keys(this.mempoolCache).length / transactions.length * 100);
}
// https://github.com/mempool/mempool/issues/3283
const logEsplora404 = (missingTxCount, threshold, time) => {
const log = `In the past ${time / 1000} seconds, esplora tx API replied ${missingTxCount} times with a 404 error code while updating nodejs backend mempool`;
if (missingTxCount >= threshold) {
logger.warn(log);
} else if (missingTxCount > 0) {
logger.debug(log);
}
};
for (const txid of transactions) {
if (!this.mempoolCache[txid]) {
try {
const transaction = await transactionUtils.$getTransactionExtended(txid);
this.mempoolCache[txid] = transaction;
txCount++;
if (this.inSync) {
this.txPerSecondArray.push(new Date().getTime());
this.vBytesPerSecondArray.push({
@@ -133,14 +156,12 @@ class Mempool {
});
}
hasChange = true;
if (diff > 0) {
logger.debug('Fetched transaction ' + txCount + ' / ' + diff);
} else {
logger.debug('Fetched transaction ' + txCount);
}
newTransactions.push(transaction);
} catch (e) {
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
} catch (e: any) {
if (config.MEMPOOL.BACKEND === 'esplora' && e.response?.status === 404) {
this.missingTxCount++;
}
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
}
}
@@ -149,6 +170,14 @@ class Mempool {
}
}
// Reset esplora 404 counter and log a warning if needed
const elapsedTime = new Date().getTime() - this.timer;
if (elapsedTime > this.SAMPLE_TIME) {
logEsplora404(this.missingTxCount, this.ESPLORA_MISSING_TX_WARNING_THRESHOLD, elapsedTime);
this.timer = new Date().getTime();
this.missingTxCount = 0;
}
// Prevent mempool from clear on bitcoind restart by delaying the deletion
if (this.mempoolProtection === 0
&& currentMempoolSize > 20000
@@ -194,18 +223,20 @@ class Mempool {
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
}
const end = new Date().getTime();
const time = end - start;
logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`);
logger.debug('Mempool updated in ' + time / 1000 + ' seconds');
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
}
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
for (const rbfTransaction in rbfTransactions) {
if (this.mempoolCache[rbfTransaction]) {
// Store replaced transactions
rbfCache.add(rbfTransaction, rbfTransactions[rbfTransaction].txid);
rbfCache.add(this.mempoolCache[rbfTransaction], rbfTransactions[rbfTransaction].txid);
// Erase the replaced transactions from the local mempool
delete this.mempoolCache[rbfTransaction];
}
@@ -231,6 +262,7 @@ class Mempool {
const lazyDeleteAt = this.mempoolCache[tx].deleteAfter;
if (lazyDeleteAt && lazyDeleteAt < now) {
delete this.mempoolCache[tx];
rbfCache.evict(tx);
}
}
}

View File

@@ -7,6 +7,7 @@ import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjust
import HashratesRepository from '../../repositories/HashratesRepository';
import bitcoinClient from '../bitcoin/bitcoin-client';
import mining from "./mining";
import PricesRepository from '../../repositories/PricesRepository';
class MiningRoutes {
public initRoutes(app: Application) {
@@ -26,10 +27,32 @@ class MiningRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
.get(config.MEMPOOL.API_URL_PREFIX + 'historical-price', this.$getHistoricalPrice)
;
}
private async $getHistoricalPrice(req: Request, res: Response): Promise<void> {
try {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
if (req.query.timestamp) {
res.status(200).send(await PricesRepository.$getNearestHistoricalPrice(
parseInt(<string>req.query.timestamp ?? 0, 10)
));
} else {
res.status(200).send(await PricesRepository.$getHistoricalPrices());
}
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getPool(req: Request, res: Response): Promise<void> {
try {
const stats = await mining.$getPoolStat(req.params.slug);
@@ -238,6 +261,12 @@ class MiningRoutes {
public async $getBlockAudit(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
if (!audit) {
res.status(204).send(`This block has not been audited.`);
return;
}
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
@@ -246,6 +275,55 @@ class MiningRoutes {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getHeightFromTimestamp(req: Request, res: Response) {
try {
const timestamp = parseInt(req.params.timestamp, 10);
// This will prevent people from entering milliseconds etc.
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
// will never put the maximum value before the most recent block
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
// Prevent non-integers that are not seconds
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
throw new Error(`Invalid timestamp, value must be Unix seconds`);
}
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
timestamp,
);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
res.json(result);
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
private async $getBlockAuditScores(req: Request, res: Response) {
try {
let height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
if (height == null) {
height = await BlocksRepository.$mostRecentBlockHeight();
}
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
res.json(await BlocksAuditsRepository.$getBlockAuditScores(height, height - 15));
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
public async $getBlockAuditScore(req: Request, res: Response) {
try {
const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash);
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
res.json(audit || 'null');
} catch (e) {
res.status(500).send(e instanceof Error ? e.message : e);
}
}
}
export default new MiningRoutes();

View File

@@ -11,12 +11,14 @@ import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjust
import config from '../../config';
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
import PricesRepository from '../../repositories/PricesRepository';
import { bitcoinCoreApi } from '../bitcoin/bitcoin-api-factory';
import { IEsploraApi } from '../bitcoin/esplora-api.interface';
import database from '../../database';
class Mining {
blocksPriceIndexingRunning = false;
constructor() {
}
private blocksPriceIndexingRunning = false;
public lastHashrateIndexingDate: number | null = null;
public lastWeeklyHashrateIndexingDate: number | null = null;
/**
* Get historical block predictions match rate
@@ -100,6 +102,7 @@ class Mining {
rank: rank++,
emptyBlocks: emptyBlocksCount.length > 0 ? emptyBlocksCount[0]['count'] : 0,
slug: poolInfo.slug,
avgMatchRate: poolInfo.avgMatchRate !== null ? Math.round(100 * poolInfo.avgMatchRate) / 100 : null,
};
poolsStats.push(poolStat);
});
@@ -115,7 +118,7 @@ class Mining {
poolsStatistics['lastEstimatedHashrate'] = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) {
poolsStatistics['lastEstimatedHashrate'] = 0;
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
}
return poolsStatistics;
@@ -139,11 +142,14 @@ class Mining {
const blockCount1w: number = await BlocksRepository.$blockCount(pool.id, '1w');
const totalBlock1w: number = await BlocksRepository.$blockCount(null, '1w');
const avgHealth = await BlocksRepository.$getAvgBlockHealthPerPoolId(pool.id);
const totalReward = await BlocksRepository.$getTotalRewardForPoolId(pool.id);
let currentEstimatedHashrate = 0;
try {
currentEstimatedHashrate = await bitcoinClient.getNetworkHashPs(totalBlock24h);
} catch (e) {
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate');
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate', logger.tags.mining);
}
return {
@@ -160,6 +166,8 @@ class Mining {
},
estimatedHashrate: currentEstimatedHashrate * (blockCount24h / totalBlock24h),
reportedHashrate: null,
avgBlockHealth: avgHealth,
totalReward: totalReward,
};
}
@@ -171,25 +179,26 @@ class Mining {
}
/**
* [INDEXING] Generate weekly mining pool hashrate history
* Generate weekly mining pool hashrate history
*/
public async $generatePoolHashrateHistory(): Promise<void> {
const now = new Date();
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
// Run only if:
// * lastestRunDate is set to 0 (node backend restart, reorg)
// * this.lastWeeklyHashrateIndexingDate is set to null (node backend restart, reorg)
// * we started a new week (around Monday midnight)
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
const runIndexing = this.lastWeeklyHashrateIndexingDate === null ||
now.getUTCDay() === 1 && this.lastWeeklyHashrateIndexingDate !== now.getUTCDate();
if (!runIndexing) {
logger.debug(`Pool hashrate history indexing is up to date, nothing to do`, logger.tags.mining);
return;
}
try {
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.timestamp * 1000;
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
const hashrates: any[] = [];
@@ -205,7 +214,7 @@ class Mining {
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
logger.debug(`Indexing weekly mining pool hashrate`);
logger.debug(`Indexing weekly mining pool hashrate`, logger.tags.mining);
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
@@ -242,7 +251,7 @@ class Mining {
});
}
newlyIndexed += hashrates.length;
newlyIndexed += hashrates.length / Math.max(1, pools.length);
await HashratesRepository.$saveHashrates(hashrates);
hashrates.length = 0;
}
@@ -253,7 +262,7 @@ class Mining {
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
@@ -263,36 +272,36 @@ class Mining {
++indexedThisRun;
++totalIndexed;
}
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
this.lastWeeklyHashrateIndexingDate = new Date().getUTCDate();
if (newlyIndexed > 0) {
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
logger.info(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
} else {
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed} weeks`, logger.tags.mining);
}
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
} catch (e) {
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
}
}
/**
* [INDEXING] Generate daily hashrate data
* Generate daily hashrate data
*/
public async $generateNetworkHashrateHistory(): Promise<void> {
// We only run this once a day around midnight
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
const now = new Date().getUTCDate();
if (now === latestRunDate) {
const today = new Date().getUTCDate();
if (today === this.lastHashrateIndexingDate) {
logger.debug(`Network hashrate history indexing is up to date, nothing to do`, logger.tags.mining);
return;
}
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
try {
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.time * 1000;
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
const genesisTimestamp = genesisBlock.timestamp * 1000;
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
const lastMidnight = this.getDateMidnight(new Date());
let toTimestamp = Math.round(lastMidnight.getTime());
@@ -305,7 +314,7 @@ class Mining {
const startedAt = new Date().getTime() / 1000;
let timer = new Date().getTime() / 1000;
logger.debug(`Indexing daily network hashrate`);
logger.debug(`Indexing daily network hashrate`, logger.tags.mining);
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
@@ -343,7 +352,7 @@ class Mining {
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
const formattedDate = new Date(fromTimestamp).toUTCString();
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`, logger.tags.mining);
timer = new Date().getTime() / 1000;
indexedThisRun = 0;
loadingIndicators.setProgress('daily-hashrate-indexing', progress);
@@ -368,16 +377,16 @@ class Mining {
newlyIndexed += hashrates.length;
await HashratesRepository.$saveHashrates(hashrates);
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
this.lastHashrateIndexingDate = new Date().getUTCDate();
if (newlyIndexed > 0) {
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
logger.info(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
} else {
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`, logger.tags.mining);
}
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
} catch (e) {
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(`Daily network hashrate indexing failed. Trying again later. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
throw e;
}
}
@@ -393,13 +402,13 @@ class Mining {
}
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
const genesisBlock: IEsploraApi.Block = await bitcoinCoreApi.$getBlock(await bitcoinClient.getBlockHash(0));
let currentDifficulty = genesisBlock.difficulty;
let totalIndexed = 0;
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
await DifficultyAdjustmentsRepository.$saveAdjustments({
time: genesisBlock.time,
time: genesisBlock.timestamp,
height: 0,
difficulty: currentDifficulty,
adjustment: 0.0,
@@ -443,22 +452,22 @@ class Mining {
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
if (elapsedSeconds > 5) {
const progress = Math.round(totalBlockChecked / blocks.length * 100);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
if (totalIndexed > 0) {
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
logger.info(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
} else {
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`, logger.tags.mining);
}
}
/**
* Create a link between blocks and the latest price at when they were mined
*/
public async $indexBlockPrices() {
public async $indexBlockPrices(): Promise<void> {
if (this.blocksPriceIndexingRunning === true) {
return;
}
@@ -496,7 +505,7 @@ class Mining {
if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
}
logger.debug(logStr);
logger.debug(logStr, logger.tags.mining);
await BlocksRepository.$saveBlockPrices(blocksPrices);
blocksPrices.length = 0;
}
@@ -508,7 +517,7 @@ class Mining {
if (blocksWithoutPrices.length > 200000) {
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
}
logger.debug(logStr);
logger.debug(logStr, logger.tags.mining);
await BlocksRepository.$saveBlockPrices(blocksPrices);
}
} catch (e) {
@@ -519,6 +528,41 @@ class Mining {
this.blocksPriceIndexingRunning = false;
}
/**
* Index core coinstatsindex
*/
public async $indexCoinStatsIndex(): Promise<void> {
let timer = new Date().getTime() / 1000;
let totalIndexed = 0;
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
let currentBlockHeight = blockchainInfo.blocks;
while (currentBlockHeight > 0) {
const indexedBlocks = await BlocksRepository.$getBlocksMissingCoinStatsIndex(
currentBlockHeight, currentBlockHeight - 10000);
for (const block of indexedBlocks) {
const txoutset = await bitcoinClient.getTxoutSetinfo('none', block.height);
await BlocksRepository.$updateCoinStatsIndexData(block.hash, txoutset.txouts,
Math.round(txoutset.block_info.prevout_spent * 100000000));
++totalIndexed;
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
if (elapsedSeconds > 5) {
logger.info(`Indexing coinstatsindex data for block #${block.height}. Indexed ${totalIndexed} blocks.`, logger.tags.mining);
timer = new Date().getTime() / 1000;
}
}
currentBlockHeight -= 10000;
}
if (totalIndexed) {
logger.info(`Indexing missing coinstatsindex data completed`, logger.tags.mining);
}
}
private getDateMidnight(date: Date): Date {
date.setUTCHours(0);
date.setUTCMinutes(0);
@@ -530,6 +574,7 @@ class Mining {
private getTimeRange(interval: string | null, scale = 1): number {
switch (interval) {
case '4y': return 43200 * scale; // 12h
case '3y': return 43200 * scale; // 12h
case '2y': return 28800 * scale; // 8h
case '1y': return 28800 * scale; // 8h

View File

@@ -1,251 +1,161 @@
import DB from '../database';
import logger from '../logger';
import config from '../config';
import BlocksRepository from '../repositories/BlocksRepository';
interface Pool {
name: string;
link: string;
regexes: string[];
addresses: string[];
slug: string;
}
import PoolsRepository from '../repositories/PoolsRepository';
import { PoolTag } from '../mempool.interfaces';
import diskCache from './disk-cache';
class PoolsParser {
miningPools: any[] = [];
unknownPool: any = {
'name': "Unknown",
'link': "https://learnmeabitcoin.com/technical/coinbase-transaction",
'regexes': "[]",
'addresses': "[]",
'id': 0,
'name': 'Unknown',
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
'regexes': '[]',
'addresses': '[]',
'slug': 'unknown'
};
slugWarnFlag = false;
private uniqueLogs: string[] = [];
private uniqueLog(loggerFunction: any, msg: string): void {
if (this.uniqueLogs.includes(msg)) {
return;
}
this.uniqueLogs.push(msg);
loggerFunction(msg);
}
public setMiningPools(pools): void {
for (const pool of pools) {
pool.regexes = pool.tags;
pool.slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
delete(pool.tags);
}
this.miningPools = pools;
}
/**
* Parse the pools.json file, consolidate the data and dump it into the database
* Populate our db with updated mining pool definition
* @param pools
*/
public async migratePoolsJson(poolsJson: object) {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
return;
}
public async migratePoolsJson(): Promise<void> {
// We also need to wipe the backend cache to make sure we don't serve blocks with
// the wrong mining pool (usually happen with unknown blocks)
diskCache.wipeCache();
// First we save every entries without paying attention to pool duplication
const poolsDuplicated: Pool[] = [];
await this.$insertUnknownPool();
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
for (let i = 0; i < coinbaseTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>coinbaseTags[i][1]).name,
'link': (<Pool>coinbaseTags[i][1]).link,
'regexes': [coinbaseTags[i][0]],
'addresses': [],
'slug': ''
});
}
const addressesTags = Object.entries(poolsJson['payout_addresses']);
for (let i = 0; i < addressesTags.length; ++i) {
poolsDuplicated.push({
'name': (<Pool>addressesTags[i][1]).name,
'link': (<Pool>addressesTags[i][1]).link,
'regexes': [],
'addresses': [addressesTags[i][0]],
'slug': ''
});
}
// Then, we find unique mining pool names
const poolNames: string[] = [];
for (let i = 0; i < poolsDuplicated.length; ++i) {
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
poolNames.push(poolsDuplicated[i].name);
for (const pool of this.miningPools) {
if (!pool.id) {
logger.info(`Mining pool ${pool.name} has no unique 'id' defined. Skipping.`);
continue;
}
}
logger.debug(`Found ${poolNames.length} unique mining pools`);
// Get existing pools from the db
let existingPools;
try {
if (config.DATABASE.ENABLED === true) {
[existingPools] = await DB.query({ sql: 'SELECT * FROM pools;', timeout: 120000 });
const poolDB = await PoolsRepository.$getPoolByUniqueId(pool.id, false);
if (!poolDB) {
// New mining pool
const slug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.debug(`Inserting new mining pool ${pool.name}`);
await PoolsRepository.$insertNewMiningPool(pool, slug);
await this.$deleteUnknownBlocks();
} else {
existingPools = [];
}
} catch (e) {
logger.err('Cannot get existing pools from the database, skipping pools.json import');
return;
}
this.miningPools = [];
// Finally, we generate the final consolidated pools data
const finalPoolDataAdd: Pool[] = [];
const finalPoolDataUpdate: Pool[] = [];
for (let i = 0; i < poolNames.length; ++i) {
let allAddresses: string[] = [];
let allRegexes: string[] = [];
const match = poolsDuplicated.filter((pool: Pool) => pool.name === poolNames[i]);
for (let y = 0; y < match.length; ++y) {
allAddresses = allAddresses.concat(match[y].addresses);
allRegexes = allRegexes.concat(match[y].regexes);
}
const finalPoolName = poolNames[i].replace(`'`, `''`); // To support single quote in names when doing db queries
let slug: string | undefined;
try {
slug = poolsJson['slugs'][poolNames[i]];
} catch (e) {
if (this.slugWarnFlag === false) {
logger.warn(`pools.json does not seem to contain the 'slugs' object`);
this.slugWarnFlag = true;
if (poolDB.name !== pool.name) {
// Pool has been renamed
const newSlug = pool.name.replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`Renaming ${poolDB.name} mining pool to ${pool.name}. Slug has been updated. Maybe you want to make a redirection from 'https://mempool.space/mining/pool/${poolDB.slug}' to 'https://mempool.space/mining/pool/${newSlug}`);
await PoolsRepository.$renameMiningPool(poolDB.id, newSlug, pool.name);
}
if (poolDB.link !== pool.link) {
// Pool link has changed
logger.debug(`Updating link for ${pool.name} mining pool`);
await PoolsRepository.$updateMiningPoolLink(poolDB.id, pool.link);
}
if (JSON.stringify(pool.addresses) !== poolDB.addresses ||
JSON.stringify(pool.regexes) !== poolDB.regexes) {
// Pool addresses changed or coinbase tags changed
logger.notice(`Updating addresses and/or coinbase tags for ${pool.name} mining pool. If 'AUTOMATIC_BLOCK_REINDEXING' is enabled, we will re-index its blocks and 'unknown' blocks`);
await PoolsRepository.$updateMiningPoolTags(poolDB.id, pool.addresses, pool.regexes);
await this.$deleteBlocksForPool(poolDB);
}
}
if (slug === undefined) {
// Only keep alphanumerical
slug = poolNames[i].replace(/[^a-z0-9]/gi, '').toLowerCase();
logger.warn(`No slug found for '${poolNames[i]}', generating it => '${slug}'`);
}
const poolObj = {
'name': finalPoolName,
'link': match[0].link,
'regexes': allRegexes,
'addresses': allAddresses,
'slug': slug
};
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
if (existingPool !== undefined) {
// Check if any data was actually updated
const equals = (a, b) =>
a.length === b.length &&
a.every((v, i) => v === b[i]);
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
finalPoolDataUpdate.push(poolObj);
}
} else {
logger.debug(`Add '${finalPoolName}' mining pool`);
finalPoolDataAdd.push(poolObj);
}
this.miningPools.push({
'name': finalPoolName,
'link': match[0].link,
'regexes': JSON.stringify(allRegexes),
'addresses': JSON.stringify(allAddresses),
'slug': slug
});
}
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools.json import completed (no database)');
return;
}
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
logger.debug(`Update pools table now`);
// Add new mining pools into the database
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
}
queryAdd = queryAdd.slice(0, -1) + ';';
// Updated existing mining pools in the database
const updateQueries: string[] = [];
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
updateQueries.push(`
UPDATE pools
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
slug='${finalPoolDataUpdate[i].slug}'
WHERE name='${finalPoolDataUpdate[i].name}'
;`);
}
try {
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
if (finalPoolDataAdd.length > 0) {
await DB.query({ sql: queryAdd, timeout: 120000 });
}
for (const query of updateQueries) {
await DB.query({ sql: query, timeout: 120000 });
}
await this.insertUnknownPool();
logger.info('Mining pools.json import completed');
} catch (e) {
logger.err(`Cannot import pools in the database`);
throw e;
}
}
try {
await this.insertUnknownPool();
} catch (e) {
logger.err(`Cannot insert unknown pool in the database`);
throw e;
}
logger.info('Mining pools-v2.json import completed');
}
/**
* Manually add the 'unknown pool'
*/
private async insertUnknownPool() {
public async $insertUnknownPool(): Promise<void> {
if (!config.DATABASE.ENABLED) {
return;
}
try {
const [rows]: any[] = await DB.query({ sql: 'SELECT name from pools where name="Unknown"', timeout: 120000 });
if (rows.length === 0) {
await DB.query({
sql: `INSERT INTO pools(name, link, regexes, addresses, slug)
VALUES("Unknown", "https://learnmeabitcoin.com/technical/coinbase-transaction", "[]", "[]", "unknown");
sql: `INSERT INTO pools(name, link, regexes, addresses, slug, unique_id)
VALUES("${this.unknownPool.name}", "${this.unknownPool.link}", "[]", "[]", "${this.unknownPool.slug}", 0);
`});
} else {
await DB.query(`UPDATE pools
SET name='Unknown', link='https://learnmeabitcoin.com/technical/coinbase-transaction',
SET name='${this.unknownPool.name}', link='${this.unknownPool.link}',
regexes='[]', addresses='[]',
slug='unknown'
WHERE name='Unknown'
slug='${this.unknownPool.slug}',
unique_id=0
WHERE slug='${this.unknownPool.slug}'
`);
}
} catch (e) {
logger.err('Unable to insert "Unknown" mining pool');
logger.err(`Unable to insert or update "Unknown" mining pool. Reason: ${e instanceof Error ? e.message : e}`);
}
}
/**
* Delete blocks which needs to be reindexed
* Delete indexed blocks for an updated mining pool
*
* @param pool
*/
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
private async $deleteBlocksForPool(pool: PoolTag): Promise<void> {
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
return;
}
const blockCount = await BlocksRepository.$blockCount(null, null);
if (blockCount === 0) {
return;
}
for (const updatedPool of finalPoolDataUpdate) {
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
if (pool.length > 0) {
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
}
}
// Ignore early days of Bitcoin as there were not mining pool yet
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
// Get oldest blocks mined by the pool and assume pools-v2.json updates only concern most recent years
// Ignore early days of Bitcoin as there were no mining pool yet
const [oldestPoolBlock]: any[] = await DB.query(`
SELECT height
FROM blocks
WHERE pool_id = ?
ORDER BY height
LIMIT 1`,
[pool.id]
);
const oldestBlockHeight = oldestPoolBlock.length ?? 0 > 0 ? oldestPoolBlock[0].height : 130635;
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height ${oldestBlockHeight} for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= ${oldestBlockHeight}`,
[unknownPool[0].id]
);
logger.notice(`Deleting blocks from ${pool.name} mining pool for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ?`,
[pool.id]
);
}
logger.notice('Truncating hashrates for future re-indexing');
await DB.query(`DELETE FROM hashrates`);
private async $deleteUnknownBlocks(): Promise<void> {
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
this.uniqueLog(logger.notice, `Deleting blocks with unknown mining pool from height 130635 for re-indexing`);
await DB.query(`
DELETE FROM blocks
WHERE pool_id = ? AND height >= 130635`,
[unknownPool[0].id]
);
}
}

View File

@@ -1,31 +1,62 @@
export interface CachedRbf {
txid: string;
expires: Date;
}
import { TransactionExtended } from "../mempool.interfaces";
class RbfCache {
private cache: { [txid: string]: CachedRbf; } = {};
private replacedBy: { [txid: string]: string; } = {};
private replaces: { [txid: string]: string[] } = {};
private txs: { [txid: string]: TransactionExtended } = {};
private expiring: { [txid: string]: Date } = {};
constructor() {
setInterval(this.cleanup.bind(this), 1000 * 60 * 60);
}
public add(replacedTxId: string, newTxId: string): void {
this.cache[replacedTxId] = {
expires: new Date(Date.now() + 1000 * 604800), // 1 week
txid: newTxId,
};
public add(replacedTx: TransactionExtended, newTxId: string): void {
this.replacedBy[replacedTx.txid] = newTxId;
this.txs[replacedTx.txid] = replacedTx;
if (!this.replaces[newTxId]) {
this.replaces[newTxId] = [];
}
this.replaces[newTxId].push(replacedTx.txid);
}
public get(txId: string): CachedRbf | undefined {
return this.cache[txId];
public getReplacedBy(txId: string): string | undefined {
return this.replacedBy[txId];
}
public getReplaces(txId: string): string[] | undefined {
return this.replaces[txId];
}
public getTx(txId: string): TransactionExtended | undefined {
return this.txs[txId];
}
// flag a transaction as removed from the mempool
public evict(txid): void {
this.expiring[txid] = new Date(Date.now() + 1000 * 86400); // 24 hours
}
private cleanup(): void {
const currentDate = new Date();
for (const c in this.cache) {
if (this.cache[c].expires < currentDate) {
delete this.cache[c];
for (const txid in this.expiring) {
if (this.expiring[txid] < currentDate) {
delete this.expiring[txid];
this.remove(txid);
}
}
}
// remove a transaction & all previous versions from the cache
private remove(txid): void {
// don't remove a transaction while a newer version remains in the mempool
if (this.replaces[txid] && !this.replacedBy[txid]) {
const replaces = this.replaces[txid];
delete this.replaces[txid];
for (const tx of replaces) {
// recursively remove prior versions from the cache
delete this.replacedBy[tx];
delete this.txs[tx];
this.remove(tx);
}
}
}

View File

@@ -375,6 +375,17 @@ class StatisticsApi {
}
}
public async $list4Y(): Promise<OptimizedStatistic[]> {
try {
const query = this.getQueryForDays(43200, '4 YEAR'); // 12h interval
const [rows] = await DB.query({ sql: query, timeout: this.queryTimeout });
return this.mapStatisticToOptimizedStatistic(rows as Statistic[]);
} catch (e) {
logger.err('$list4Y() error' + (e instanceof Error ? e.message : e));
return [];
}
}
private mapStatisticToOptimizedStatistic(statistic: Statistic[]): OptimizedStatistic[] {
return statistic.map((s) => {
return {

View File

@@ -14,10 +14,11 @@ class StatisticsRoutes {
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', this.$getStatisticsByTime.bind(this, '1y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/4y', this.$getStatisticsByTime.bind(this, '4y'))
;
}
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y', req: Request, res: Response) {
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y' | '4y', req: Request, res: Response) {
res.header('Pragma', 'public');
res.header('Cache-control', 'public');
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
@@ -54,6 +55,9 @@ class StatisticsRoutes {
case '3y':
result = await statisticsApi.$list3Y();
break;
case '4y':
result = await statisticsApi.$list4Y();
break;
default:
result = await statisticsApi.$list2H();
}

View File

@@ -1,8 +1,7 @@
import bitcoinApi from './bitcoin/bitcoin-api-factory';
import { TransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
import { IEsploraApi } from './bitcoin/esplora-api.interface';
import config from '../config';
import { Common } from './common';
import bitcoinApi, { bitcoinCoreApi } from './bitcoin/bitcoin-api-factory';
class TransactionUtils {
constructor() { }
@@ -15,14 +14,26 @@ class TransactionUtils {
vout: tx.vout
.map((vout) => ({
scriptpubkey_address: vout.scriptpubkey_address,
scriptpubkey_asm: vout.scriptpubkey_asm,
value: vout.value
}))
.filter((vout) => vout.value)
};
}
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false): Promise<TransactionExtended> {
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
/**
* @param txId
* @param addPrevouts
* @param lazyPrevouts
* @param forceCore - See https://github.com/mempool/mempool/issues/2904
*/
public async $getTransactionExtended(txId: string, addPrevouts = false, lazyPrevouts = false, forceCore = false): Promise<TransactionExtended> {
let transaction: IEsploraApi.Transaction;
if (forceCore === true) {
transaction = await bitcoinCoreApi.$getRawTransaction(txId, true);
} else {
transaction = await bitcoinApi.$getRawTransaction(txId, false, addPrevouts, lazyPrevouts);
}
return this.extendTransaction(transaction);
}

View File

@@ -0,0 +1,294 @@
import config from '../config';
import logger from '../logger';
import { ThreadTransaction, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
import { PairingHeap } from '../utils/pairing-heap';
import { Common } from './common';
import { parentPort } from 'worker_threads';
let mempool: { [txid: string]: ThreadTransaction } = {};
if (parentPort) {
parentPort.on('message', (params) => {
if (params.type === 'set') {
mempool = params.mempool;
} else if (params.type === 'update') {
params.added.forEach(tx => {
mempool[tx.txid] = tx;
});
params.removed.forEach(txid => {
delete mempool[txid];
});
}
const { blocks, clusters } = makeBlockTemplates(mempool);
// return the result to main thread.
if (parentPort) {
parentPort.postMessage({ blocks, clusters });
}
});
}
/*
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
*/
function makeBlockTemplates(mempool: { [txid: string]: ThreadTransaction })
: { blocks: ThreadTransaction[][], clusters: { [root: string]: string[] } } {
const start = Date.now();
const auditPool: { [txid: string]: AuditTransaction } = {};
const mempoolArray: AuditTransaction[] = [];
const restOfArray: ThreadTransaction[] = [];
const cpfpClusters: { [root: string]: string[] } = {};
// grab the top feerate txs up to maxWeight
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
// initializing everything up front helps V8 optimize property access later
auditPool[tx.txid] = {
txid: tx.txid,
fee: tx.fee,
weight: tx.weight,
feePerVsize: tx.feePerVsize,
effectiveFeePerVsize: tx.feePerVsize,
vin: tx.vin,
relativesSet: false,
ancestorMap: new Map<string, AuditTransaction>(),
children: new Set<AuditTransaction>(),
ancestorFee: 0,
ancestorWeight: 0,
score: 0,
used: false,
modified: false,
modifiedNode: null,
};
mempoolArray.push(auditPool[tx.txid]);
});
// Build relatives graph & calculate ancestor scores
for (const tx of mempoolArray) {
if (!tx.relativesSet) {
setRelatives(tx, auditPool);
}
}
// Sort by descending ancestor score
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
// Build blocks by greedily choosing the highest feerate package
// (i.e. the package rooted in the transaction with the best ancestor score)
const blocks: ThreadTransaction[][] = [];
let blockWeight = 4000;
let blockSize = 0;
let transactions: AuditTransaction[] = [];
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
let overflow: AuditTransaction[] = [];
let failures = 0;
let top = 0;
while ((top < mempoolArray.length || !modified.isEmpty())) {
// skip invalid transactions
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
top++;
}
// Select best next package
let nextTx;
const nextPoolTx = mempoolArray[top];
const nextModifiedTx = modified.peek();
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
nextTx = nextPoolTx;
top++;
} else {
modified.pop();
if (nextModifiedTx) {
nextTx = nextModifiedTx;
nextTx.modifiedNode = undefined;
}
}
if (nextTx && !nextTx?.used) {
// Check if the package fits into this block
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
let isCluster = false;
if (sortedTxSet.length > 1) {
cpfpClusters[nextTx.txid] = sortedTxSet.map(tx => tx.txid);
isCluster = true;
}
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
const used: AuditTransaction[] = [];
while (sortedTxSet.length) {
const ancestor = sortedTxSet.pop();
const mempoolTx = mempool[ancestor.txid];
ancestor.used = true;
ancestor.usedBy = nextTx.txid;
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
if (isCluster) {
mempoolTx.cpfpRoot = nextTx.txid;
}
mempoolTx.cpfpChecked = true;
transactions.push(ancestor);
blockSize += ancestor.size;
blockWeight += ancestor.weight;
used.push(ancestor);
}
// remove these as valid package ancestors for any descendants remaining in the mempool
if (used.length) {
used.forEach(tx => {
updateDescendants(tx, auditPool, modified);
});
}
failures = 0;
} else {
// hold this package in an overflow list while we check for smaller options
overflow.push(nextTx);
failures++;
}
}
// this block is full
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
if ((exceededPackageTries || queueEmpty) && blocks.length < 7) {
// construct this block
if (transactions.length) {
blocks.push(transactions.map(t => mempool[t.txid]));
}
// reset for the next block
transactions = [];
blockSize = 0;
blockWeight = 4000;
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
for (const overflowTx of overflow.reverse()) {
if (overflowTx.modified) {
overflowTx.modifiedNode = modified.add(overflowTx);
} else {
top--;
mempoolArray[top] = overflowTx;
}
}
overflow = [];
}
}
// pack any leftover transactions into the last block
for (const tx of overflow) {
if (!tx || tx?.used) {
continue;
}
blockWeight += tx.weight;
const mempoolTx = mempool[tx.txid];
// update original copy of this tx with effective fee rate & relatives data
mempoolTx.effectiveFeePerVsize = tx.score;
if (tx.ancestorMap.size > 0) {
cpfpClusters[tx.txid] = Array.from(tx.ancestorMap?.values()).map(a => a.txid);
mempoolTx.cpfpRoot = tx.txid;
}
mempoolTx.cpfpChecked = true;
transactions.push(tx);
tx.used = true;
}
const blockTransactions = transactions.map(t => mempool[t.txid]);
restOfArray.forEach(tx => {
blockWeight += tx.weight;
tx.effectiveFeePerVsize = tx.feePerVsize;
tx.cpfpChecked = false;
blockTransactions.push(tx);
});
if (blockTransactions.length) {
blocks.push(blockTransactions);
}
transactions = [];
const end = Date.now();
const time = end - start;
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
return { blocks, clusters: cpfpClusters };
}
// traverse in-mempool ancestors
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
function setRelatives(
tx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
): void {
for (const parent of tx.vin) {
const parentTx = mempool[parent];
if (parentTx && !tx.ancestorMap?.has(parent)) {
tx.ancestorMap.set(parent, parentTx);
parentTx.children.add(tx);
// visit each node only once
if (!parentTx.relativesSet) {
setRelatives(parentTx, mempool);
}
parentTx.ancestorMap.forEach((ancestor) => {
tx.ancestorMap.set(ancestor.txid, ancestor);
});
}
};
tx.ancestorFee = tx.fee || 0;
tx.ancestorWeight = tx.weight || 0;
tx.ancestorMap.forEach((ancestor) => {
tx.ancestorFee += ancestor.fee;
tx.ancestorWeight += ancestor.weight;
});
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
tx.relativesSet = true;
}
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
// avoids recursion to limit call stack depth
function updateDescendants(
rootTx: AuditTransaction,
mempool: { [txid: string]: AuditTransaction },
modified: PairingHeap<AuditTransaction>,
): void {
const descendantSet: Set<AuditTransaction> = new Set();
// stack of nodes left to visit
const descendants: AuditTransaction[] = [];
let descendantTx;
let tmpScore;
rootTx.children.forEach(childTx => {
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
while (descendants.length) {
descendantTx = descendants.pop();
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
// remove tx as ancestor
descendantTx.ancestorMap.delete(rootTx.txid);
descendantTx.ancestorFee -= rootTx.fee;
descendantTx.ancestorWeight -= rootTx.weight;
tmpScore = descendantTx.score;
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
if (!descendantTx.modifiedNode) {
descendantTx.modified = true;
descendantTx.modifiedNode = modified.add(descendantTx);
} else {
// rebalance modified heap if score has changed
if (descendantTx.score < tmpScore) {
modified.decreasePriority(descendantTx.modifiedNode);
} else if (descendantTx.score > tmpScore) {
modified.increasePriority(descendantTx.modifiedNode);
}
}
// add this node's children to the stack
descendantTx.children.forEach(childTx => {
// visit each node only once
if (!descendantSet.has(childTx)) {
descendants.push(childTx);
descendantSet.add(childTx);
}
});
}
}
}

View File

@@ -1,14 +1,13 @@
import logger from '../logger';
import * as WebSocket from 'ws';
import {
BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta,
OptimizedStatistic, ILoadingIndicators, IConversionRates
BlockExtended, TransactionExtended, WebsocketResponse,
OptimizedStatistic, ILoadingIndicators
} from '../mempool.interfaces';
import blocks from './blocks';
import memPool from './mempool';
import backendInfo from './backend-info';
import mempoolBlocks from './mempool-blocks';
import fiatConversion from './fiat-conversion';
import { Common } from './common';
import loadingIndicators from './loading-indicators';
import config from '../config';
@@ -18,6 +17,10 @@ import difficultyAdjustment from './difficulty-adjustment';
import feeApi from './fee-api';
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
import Audit from './audit';
import { deepClone } from '../utils/clone';
import priceUpdater from '../tasks/price-updater';
import { ApiPrice } from '../repositories/PricesRepository';
class WebsocketHandler {
private wss: WebSocket.Server | undefined;
@@ -57,10 +60,10 @@ class WebsocketHandler {
client['track-tx'] = parsedMessage['track-tx'];
// Client is telling the transaction wasn't found
if (parsedMessage['watch-mempool']) {
const rbfCacheTx = rbfCache.get(client['track-tx']);
if (rbfCacheTx) {
const rbfCacheTxid = rbfCache.getReplacedBy(client['track-tx']);
if (rbfCacheTxid) {
response['txReplaced'] = {
txid: rbfCacheTx.txid,
txid: rbfCacheTxid,
};
client['track-tx'] = null;
} else {
@@ -191,7 +194,7 @@ class WebsocketHandler {
});
}
handleNewConversionRates(conversionRates: IConversionRates) {
handleNewConversionRates(conversionRates: ApiPrice) {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
@@ -212,7 +215,7 @@ class WebsocketHandler {
'mempoolInfo': memPool.getMempoolInfo(),
'vBytesPerSecond': memPool.getVBytesPerSecond(),
'blocks': _blocks,
'conversions': fiatConversion.getConversionRates(),
'conversions': priceUpdater.getLatestPrices(),
'mempool-blocks': mempoolBlocks.getMempoolBlocks(),
'transactions': memPool.getLatestTransactions(),
'backendInfo': backendInfo.getBackendInfo(),
@@ -243,13 +246,18 @@ class WebsocketHandler {
});
}
handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) {
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
mempoolBlocks.updateMempoolBlocks(newMempool);
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(newMempool, newTransactions, deletedTransactions.map(tx => tx.txid), true);
} else {
mempoolBlocks.updateMempoolBlocks(newMempool, true);
}
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const mempoolInfo = memPool.getMempoolInfo();
@@ -404,54 +412,39 @@ class WebsocketHandler {
}
});
}
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) {
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
if (!this.wss) {
throw new Error('WebSocket.Server is not set');
}
let mBlocks: undefined | MempoolBlock[];
let mBlockDeltas: undefined | MempoolBlockDelta[];
let matchRate = 0;
const _memPool = memPool.getMempool();
const _mempoolBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if (_mempoolBlocks[0]) {
const matches: string[] = [];
const added: string[] = [];
const missing: string[] = [];
for (const txId of txIds) {
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
matches.push(txId);
} else {
added.push(txId);
}
delete _memPool[txId];
if (config.MEMPOOL.AUDIT) {
let projectedBlocks;
// template calculation functions have mempool side effects, so calculate audits using
// a cloned copy of the mempool if we're running a different algorithm for mempool updates
const auditMempool = (config.MEMPOOL.ADVANCED_GBT_AUDIT === config.MEMPOOL.ADVANCED_GBT_MEMPOOL) ? _memPool : deepClone(_memPool);
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
projectedBlocks = await mempoolBlocks.makeBlockTemplates(auditMempool, false);
} else {
projectedBlocks = mempoolBlocks.updateMempoolBlocks(auditMempool, false);
}
for (const txId of _mempoolBlocks[0].transactionIds) {
if (matches.includes(txId) || added.includes(txId)) {
continue;
}
missing.push(txId);
}
if (Common.indexingEnabled() && memPool.isInSync()) {
const { censored, added, fresh, score, similarity } = Audit.auditBlock(transactions, projectedBlocks, auditMempool);
const matchRate = Math.round(score * 100 * 100) / 100;
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
mempoolBlocks.updateMempoolBlocks(_memPool);
mBlocks = mempoolBlocks.getMempoolBlocks();
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
if (Common.indexingEnabled()) {
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
return {
txid: tx.txid,
vsize: tx.vsize,
fee: tx.fee ? Math.round(tx.fee) : 0,
value: tx.value,
};
});
BlocksSummariesRepository.$saveSummary({
}) : [];
BlocksSummariesRepository.$saveTemplate({
height: block.height,
template: {
id: block.id,
@@ -464,16 +457,39 @@ class WebsocketHandler {
height: block.height,
hash: block.id,
addedTxs: added,
missingTxs: missing,
missingTxs: censored,
freshTxs: fresh,
matchRate: matchRate,
});
if (block.extras) {
block.extras.matchRate = matchRate;
block.extras.similarity = similarity;
}
}
} else if (block.extras) {
const mBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
if (mBlocks?.length && mBlocks[0].transactions) {
block.extras.similarity = Common.getSimilarity(mBlocks[0], transactions);
}
}
if (block.extras) {
block.extras.matchRate = matchRate;
const removed: string[] = [];
// Update mempool to remove transactions included in the new block
for (const txId of txIds) {
delete _memPool[txId];
removed.push(txId);
rbfCache.evict(txId);
}
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
await mempoolBlocks.updateBlockTemplates(_memPool, [], removed, true);
} else {
mempoolBlocks.updateMempoolBlocks(_memPool, true);
}
const mBlocks = mempoolBlocks.getMempoolBlocks();
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
const da = difficultyAdjustment.getDifficultyAdjustment();
const fees = feeApi.getRecommendedFee();

View File

@@ -4,6 +4,7 @@ const configFromFile = require(
interface IConfig {
MEMPOOL: {
ENABLED: boolean;
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
BACKEND: 'esplora' | 'electrum' | 'none';
HTTP_PORT: number;
@@ -18,7 +19,6 @@ interface IConfig {
MEMPOOL_BLOCKS_AMOUNT: number;
INDEXING_BLOCKS_AMOUNT: number;
BLOCKS_SUMMARIES_INDEXING: boolean;
PRICE_FEED_UPDATE_INTERVAL: number;
USE_SECOND_NODE_FOR_MINFEE: boolean;
EXTERNAL_ASSETS: string[];
EXTERNAL_MAX_RETRY: number;
@@ -28,6 +28,12 @@ interface IConfig {
AUTOMATIC_BLOCK_REINDEXING: boolean;
POOLS_JSON_URL: string,
POOLS_JSON_TREE_URL: string,
AUDIT: boolean;
ADVANCED_GBT_AUDIT: boolean;
ADVANCED_GBT_MEMPOOL: boolean;
CPFP_INDEXING: boolean;
MAX_BLOCKS_BULK_QUERY: number;
DISK_CACHE_BLOCK_INTERVAL: number;
};
ESPLORA: {
REST_API_URL: string;
@@ -39,6 +45,8 @@ interface IConfig {
STATS_REFRESH_INTERVAL: number;
GRAPH_REFRESH_INTERVAL: number;
LOGGER_UPDATE_INTERVAL: number;
FORENSICS_INTERVAL: number;
FORENSICS_RATE_LIMIT: number;
};
LND: {
TLS_CERT_PATH: string;
@@ -119,6 +127,7 @@ interface IConfig {
const defaults: IConfig = {
'MEMPOOL': {
'ENABLED': true,
'NETWORK': 'mainnet',
'BACKEND': 'none',
'HTTP_PORT': 8999,
@@ -133,7 +142,6 @@ const defaults: IConfig = {
'MEMPOOL_BLOCKS_AMOUNT': 8,
'INDEXING_BLOCKS_AMOUNT': 11000, // 0 = disable indexing, -1 = index all blocks
'BLOCKS_SUMMARIES_INDEXING': false,
'PRICE_FEED_UPDATE_INTERVAL': 600,
'USE_SECOND_NODE_FOR_MINFEE': false,
'EXTERNAL_ASSETS': [],
'EXTERNAL_MAX_RETRY': 1,
@@ -141,8 +149,14 @@ const defaults: IConfig = {
'USER_AGENT': 'mempool',
'STDOUT_LOG_MIN_PRIORITY': 'debug',
'AUTOMATIC_BLOCK_REINDEXING': false,
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json',
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
'AUDIT': false,
'ADVANCED_GBT_AUDIT': false,
'ADVANCED_GBT_MEMPOOL': false,
'CPFP_INDEXING': false,
'MAX_BLOCKS_BULK_QUERY': 0,
'DISK_CACHE_BLOCK_INTERVAL': 6,
},
'ESPLORA': {
'REST_API_URL': 'http://127.0.0.1:3000',
@@ -195,6 +209,8 @@ const defaults: IConfig = {
'STATS_REFRESH_INTERVAL': 600,
'GRAPH_REFRESH_INTERVAL': 600,
'LOGGER_UPDATE_INTERVAL': 30,
'FORENSICS_INTERVAL': 43200,
'FORENSICS_RATE_LIMIT': 20,
},
'LND': {
'TLS_CERT_PATH': '',
@@ -224,11 +240,11 @@ const defaults: IConfig = {
'BISQ_URL': 'https://bisq.markets/api',
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
},
"MAXMIND": {
'MAXMIND': {
'ENABLED': false,
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
},
};

View File

@@ -24,7 +24,8 @@ import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } fr
private checkDBFlag() {
if (config.DATABASE.ENABLED === false) {
logger.err('Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue');
const stack = new Error().stack;
logger.err(`Trying to use DB feature but config.DATABASE.ENABLED is set to false, please open an issue.\nStack trace: ${stack}}`);
}
}

View File

@@ -1,4 +1,4 @@
import express from "express";
import express from 'express';
import { Application, Request, Response, NextFunction } from 'express';
import * as http from 'http';
import * as WebSocket from 'ws';
@@ -10,7 +10,6 @@ import memPool from './api/mempool';
import diskCache from './api/disk-cache';
import statistics from './api/statistics/statistics';
import websocketHandler from './api/websocket-handler';
import fiatConversion from './api/fiat-conversion';
import bisq from './api/bisq/bisq';
import bisqMarkets from './api/bisq/markets';
import logger from './logger';
@@ -34,7 +33,13 @@ import miningRoutes from './api/mining/mining-routes';
import bisqRoutes from './api/bisq/bisq.routes';
import liquidRoutes from './api/liquid/liquid.routes';
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher";
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
import forensicsService from './tasks/lightning/forensics.service';
import priceUpdater from './tasks/price-updater';
import chainTips from './api/chain-tips';
import { AxiosError } from 'axios';
import v8 from 'v8';
import { formatBytes, getBytesUnit } from './utils/format';
class Server {
private wss: WebSocket.Server | undefined;
@@ -42,6 +47,11 @@ class Server {
private app: Application;
private currentBackendRetryInterval = 5;
private maxHeapSize: number = 0;
private heapLogInterval: number = 60;
private warnedHeapCritical: boolean = false;
private lastHeapLogTime: number | null = null;
constructor() {
this.app = express();
@@ -74,42 +84,43 @@ class Server {
}
}
async startServer(worker = false) {
async startServer(worker = false): Promise<void> {
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
if (config.DATABASE.ENABLED) {
await DB.checkDbConnection();
try {
if (process.env.npm_config_reindex_blocks === 'true') { // Re-index requests
await databaseMigration.$blocksReindexingTruncate();
}
await databaseMigration.$initializeOrMigrateDatabase();
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
}
}
this.app
.use((req: Request, res: Response, next: NextFunction) => {
res.setHeader('Access-Control-Allow-Origin', '*');
next();
})
.use(express.urlencoded({ extended: true }))
.use(express.text())
.use(express.text({ type: ['text/plain', 'application/base64'] }))
;
if (config.DATABASE.ENABLED) {
await priceUpdater.$initializeLatestPriceWithDb();
}
this.server = http.createServer(this.app);
this.wss = new WebSocket.Server({ server: this.server });
this.setUpWebsocketHandling();
await poolsUpdater.updatePoolsJson(); // Needs to be done before loading the disk cache because we sometimes wipe it
await syncAssets.syncAssets$();
diskCache.loadMempoolCache();
if (config.DATABASE.ENABLED) {
await DB.checkDbConnection();
try {
if (process.env.npm_config_reindex !== undefined) { // Re-index requests
const tables = process.env.npm_config_reindex.split(',');
logger.warn(`Indexed data for "${process.env.npm_config_reindex}" tables will be erased in 5 seconds (using '--reindex')`);
await Common.sleep$(5000);
await databaseMigration.$truncateIndexedData(tables);
}
await databaseMigration.$initializeOrMigrateDatabase();
if (Common.indexingEnabled()) {
await indexer.$resetHashratesIndexingState();
}
} catch (e) {
throw new Error(e instanceof Error ? e.message : 'Error');
}
if (config.MEMPOOL.ENABLED) {
diskCache.loadMempoolCache();
}
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
@@ -124,10 +135,16 @@ class Server {
}
}
fiatConversion.startService();
priceUpdater.$run();
await chainTips.updateOrphanedBlocks();
this.setUpHttpApiRoutes();
this.runMainUpdateLoop();
if (config.MEMPOOL.ENABLED) {
this.runMainUpdateLoop();
}
setInterval(() => { this.healthCheck(); }, 2500);
if (config.BISQ.ENABLED) {
bisq.startBisqService();
@@ -149,7 +166,7 @@ class Server {
});
}
async runMainUpdateLoop() {
async runMainUpdateLoop(): Promise<void> {
try {
try {
await memPool.$updateMemPoolInfo();
@@ -161,33 +178,42 @@ class Server {
logger.debug(msg);
}
}
await poolsUpdater.updatePoolsJson();
await blocks.$updateBlocks();
await memPool.$updateMempool();
indexer.$run();
setTimeout(this.runMainUpdateLoop.bind(this), config.MEMPOOL.POLL_RATE_MS);
this.currentBackendRetryInterval = 5;
} catch (e) {
const loggerMsg = `runMainLoop error: ${(e instanceof Error ? e.message : e)}. Retrying in ${this.currentBackendRetryInterval} sec.`;
} catch (e: any) {
let loggerMsg = `Exception in runMainUpdateLoop(). Retrying in ${this.currentBackendRetryInterval} sec.`;
loggerMsg += ` Reason: ${(e instanceof Error ? e.message : e)}.`;
if (e?.stack) {
loggerMsg += ` Stack trace: ${e.stack}`;
}
// When we get a first Exception, only `logger.debug` it and retry after 5 seconds
// From the second Exception, `logger.warn` the Exception and increase the retry delay
// Maximum retry delay is 60 seconds
if (this.currentBackendRetryInterval > 5) {
logger.warn(loggerMsg);
mempool.setOutOfSync();
} else {
logger.debug(loggerMsg);
}
logger.debug(JSON.stringify(e));
if (e instanceof AxiosError) {
logger.debug(`AxiosError: ${e?.message}`);
}
setTimeout(this.runMainUpdateLoop.bind(this), 1000 * this.currentBackendRetryInterval);
this.currentBackendRetryInterval *= 2;
this.currentBackendRetryInterval = Math.min(this.currentBackendRetryInterval, 60);
}
}
async $runLightningBackend() {
async $runLightningBackend(): Promise<void> {
try {
await fundingTxFetcher.$init();
await networkSyncService.$startService();
await lightningStatsUpdater.$startService();
await forensicsService.$startService();
} catch(e) {
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
await Common.sleep$(1000 * 60);
@@ -195,7 +221,7 @@ class Server {
};
}
setUpWebsocketHandling() {
setUpWebsocketHandling(): void {
if (this.wss) {
websocketHandler.setWebsocketServer(this.wss);
}
@@ -209,19 +235,21 @@ class Server {
});
}
websocketHandler.setupConnectionHandling();
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
blocks.setNewBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
memPool.setMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
if (config.MEMPOOL.ENABLED) {
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
}
priceUpdater.setRatesChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler));
}
setUpHttpApiRoutes() {
setUpHttpApiRoutes(): void {
bitcoinRoutes.initRoutes(this.app);
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
statisticsRoutes.initRoutes(this.app);
}
if (Common.indexingEnabled()) {
if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) {
miningRoutes.initRoutes(this.app);
}
if (config.BISQ.ENABLED) {
@@ -236,6 +264,26 @@ class Server {
channelsRoutes.initRoutes(this.app);
}
}
healthCheck(): void {
const now = Date.now();
const stats = v8.getHeapStatistics();
this.maxHeapSize = Math.max(stats.used_heap_size, this.maxHeapSize);
const warnThreshold = 0.8 * stats.heap_size_limit;
const byteUnits = getBytesUnit(Math.max(this.maxHeapSize, stats.heap_size_limit));
if (!this.warnedHeapCritical && this.maxHeapSize > warnThreshold) {
this.warnedHeapCritical = true;
logger.warn(`Used ${(this.maxHeapSize / stats.heap_size_limit).toFixed(2)}% of heap limit (${formatBytes(this.maxHeapSize, byteUnits, true)} / ${formatBytes(stats.heap_size_limit, byteUnits)})!`);
}
if (this.lastHeapLogTime === null || (now - this.lastHeapLogTime) > (this.heapLogInterval * 1000)) {
logger.debug(`Memory usage: ${formatBytes(this.maxHeapSize, byteUnits)} / ${formatBytes(stats.heap_size_limit, byteUnits)}`);
this.warnedHeapCritical = false;
this.maxHeapSize = 0;
this.lastHeapLogTime = now;
}
}
}
const server = new Server();
((): Server => new Server())();

View File

@@ -3,23 +3,71 @@ import blocks from './api/blocks';
import mempool from './api/mempool';
import mining from './api/mining/mining';
import logger from './logger';
import HashratesRepository from './repositories/HashratesRepository';
import bitcoinClient from './api/bitcoin/bitcoin-client';
import priceUpdater from './tasks/price-updater';
import PricesRepository from './repositories/PricesRepository';
export interface CoreIndex {
name: string;
synced: boolean;
best_block_height: number;
}
class Indexer {
runIndexer = true;
indexerRunning = false;
tasksRunning: string[] = [];
coreIndexes: CoreIndex[] = [];
public reindex() {
/**
* Check which core index is available for indexing
*/
public async checkAvailableCoreIndexes(): Promise<void> {
const updatedCoreIndexes: CoreIndex[] = [];
const indexes: any = await bitcoinClient.getIndexInfo();
for (const indexName in indexes) {
const newState = {
name: indexName,
synced: indexes[indexName].synced,
best_block_height: indexes[indexName].best_block_height,
};
logger.info(`Core index '${indexName}' is ${indexes[indexName].synced ? 'synced' : 'not synced'}. Best block height is ${indexes[indexName].best_block_height}`);
updatedCoreIndexes.push(newState);
if (indexName === 'coinstatsindex' && newState.synced === true) {
const previousState = this.isCoreIndexReady('coinstatsindex');
// if (!previousState || previousState.synced === false) {
this.runSingleTask('coinStatsIndex');
// }
}
}
this.coreIndexes = updatedCoreIndexes;
}
/**
* Return the best block height if a core index is available, or 0 if not
*
* @param name
* @returns
*/
public isCoreIndexReady(name: string): CoreIndex | null {
for (const index of this.coreIndexes) {
if (index.name === name && index.synced === true) {
return index;
}
}
return null;
}
public reindex(): void {
if (Common.indexingEnabled()) {
this.runIndexer = true;
}
}
public async runSingleTask(task: 'blocksPrices') {
public async runSingleTask(task: 'blocksPrices' | 'coinStatsIndex'): Promise<void> {
if (!Common.indexingEnabled()) {
return;
}
@@ -28,20 +76,27 @@ class Indexer {
this.tasksRunning.push(task);
const lastestPriceId = await PricesRepository.$getLatestPriceId();
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`, logger.tags.mining);
setTimeout(() => {
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
this.runSingleTask('blocksPrices');
}, 10000);
} else {
logger.debug(`Blocks prices indexer will run now`)
logger.debug(`Blocks prices indexer will run now`, logger.tags.mining);
await mining.$indexBlockPrices();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
}
}
if (task === 'coinStatsIndex' && !this.tasksRunning.includes(task)) {
this.tasksRunning.push(task);
logger.debug(`Indexing coinStatsIndex now`);
await mining.$indexCoinStatsIndex();
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask !== task);
}
}
public async $run() {
public async $run(): Promise<void> {
if (!Common.indexingEnabled() || this.runIndexer === false ||
this.indexerRunning === true || mempool.hasPriority()
) {
@@ -59,13 +114,15 @@ class Indexer {
logger.debug(`Running mining indexer`);
await this.checkAvailableCoreIndexes();
try {
await priceUpdater.$run();
const chainValid = await blocks.$generateBlockDatabase();
if (chainValid === false) {
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`);
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`, logger.tags.mining);
setTimeout(() => this.reindex(), 10000);
this.indexerRunning = false;
return;
@@ -73,10 +130,10 @@ class Indexer {
this.runSingleTask('blocksPrices');
await mining.$indexDifficultyAdjustments();
await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
await mining.$generateNetworkHashrateHistory();
await mining.$generatePoolHashrateHistory();
await blocks.$generateBlocksSummariesDatabase();
await blocks.$generateCPFPDatabase();
} catch (e) {
this.indexerRunning = false;
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
@@ -91,16 +148,6 @@ class Indexer {
logger.debug(`Indexing completed. Next run planned at ${new Date(new Date().getTime() + runEvery).toUTCString()}`);
setTimeout(() => this.reindex(), runEvery);
}
async $resetHashratesIndexingState() {
try {
await HashratesRepository.$setLatestRun('last_hashrates_indexing', 0);
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', 0);
} catch (e) {
logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new Indexer();

View File

@@ -32,22 +32,27 @@ class Logger {
local7: 23
};
public tags = {
mining: 'Mining',
ln: 'Lightning',
};
// @ts-ignore
public emerg: ((msg: string) => void);
public emerg: ((msg: string, tag?: string) => void);
// @ts-ignore
public alert: ((msg: string) => void);
public alert: ((msg: string, tag?: string) => void);
// @ts-ignore
public crit: ((msg: string) => void);
public crit: ((msg: string, tag?: string) => void);
// @ts-ignore
public err: ((msg: string) => void);
public err: ((msg: string, tag?: string) => void);
// @ts-ignore
public warn: ((msg: string) => void);
public warn: ((msg: string, tag?: string) => void);
// @ts-ignore
public notice: ((msg: string) => void);
public notice: ((msg: string, tag?: string) => void);
// @ts-ignore
public info: ((msg: string) => void);
public info: ((msg: string, tag?: string) => void);
// @ts-ignore
public debug: ((msg: string) => void);
public debug: ((msg: string, tag?: string) => void);
private name = 'mempool';
private client: dgram.Socket;
@@ -66,8 +71,8 @@ class Logger {
private addprio(prio): void {
this[prio] = (function(_this) {
return function(msg) {
return _this.msg(prio, msg);
return function(msg, tag?: string) {
return _this.msg(prio, msg, tag);
};
})(this);
}
@@ -85,7 +90,7 @@ class Logger {
return '';
}
private msg(priority, msg) {
private msg(priority, msg, tag?: string) {
let consolemsg, prionum, syslogmsg;
if (typeof msg === 'string' && msg.length > 0) {
while (msg[msg.length - 1].charCodeAt(0) === 10) {
@@ -94,10 +99,10 @@ class Logger {
}
const network = this.network ? ' <' + this.network + '>' : '';
prionum = Logger.priorities[priority] || Logger.priorities.info;
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${msg}`;
consolemsg = `${this.ts()} [${process.pid}] ${priority.toUpperCase()}:${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
if (config.SYSLOG.ENABLED && Logger.priorities[priority] <= Logger.priorities[config.SYSLOG.MIN_PRIORITY]) {
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${msg}`;
syslogmsg = `<${(Logger.facilities[config.SYSLOG.FACILITY] * 8 + prionum)}> ${this.name}[${process.pid}]: ${priority.toUpperCase()}${network} ${tag ? '[' + tag + '] ' : ''}${msg}`;
this.syslog(syslogmsg);
}
if (Logger.priorities[priority] > Logger.priorities[config.MEMPOOL.STDOUT_LOG_MIN_PRIORITY]) {

View File

@@ -1,7 +1,10 @@
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
import { OrphanedBlock } from './api/chain-tips';
import { HeapNode } from './utils/pairing-heap';
export interface PoolTag {
id: number; // mysql row id
id: number;
uniqueId: number;
name: string;
link: string;
regexes: string; // JSON array
@@ -15,6 +18,7 @@ export interface PoolInfo {
link: string;
blockCount: number;
slug: string;
avgMatchRate: number | null;
}
export interface PoolStats extends PoolInfo {
@@ -27,10 +31,16 @@ export interface BlockAudit {
height: number,
hash: string,
missingTxs: string[],
freshTxs: string[],
addedTxs: string[],
matchRate: number,
}
export interface AuditScore {
hash: string,
matchRate?: number,
}
export interface MempoolBlock {
blockSize: number;
blockVSize: number;
@@ -56,6 +66,7 @@ interface VinStrippedToScriptsig {
interface VoutStrippedToScriptPubkey {
scriptpubkey_address: string | undefined;
scriptpubkey_asm: string | undefined;
value: number;
}
@@ -65,17 +76,57 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
firstSeen?: number;
effectiveFeePerVsize: number;
ancestors?: Ancestor[];
descendants?: Ancestor[];
bestDescendant?: BestDescendant | null;
cpfpChecked?: boolean;
deleteAfter?: number;
}
interface Ancestor {
export interface AuditTransaction {
txid: string;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize: number;
vin: string[];
relativesSet: boolean;
ancestorMap: Map<string, AuditTransaction>;
children: Set<AuditTransaction>;
ancestorFee: number;
ancestorWeight: number;
score: number;
used: boolean;
modified: boolean;
modifiedNode: HeapNode<AuditTransaction>;
}
export interface ThreadTransaction {
txid: string;
fee: number;
weight: number;
feePerVsize: number;
effectiveFeePerVsize?: number;
vin: string[];
cpfpRoot?: string;
cpfpChecked?: boolean;
}
export interface Ancestor {
txid: string;
weight: number;
fee: number;
}
export interface TransactionSet {
fee: number;
weight: number;
score: number;
children?: Set<string>;
available?: boolean;
modified?: boolean;
modifiedNode?: HeapNode<string>;
}
interface BestDescendant {
txid: string;
weight: number;
@@ -84,7 +135,9 @@ interface BestDescendant {
export interface CpfpInfo {
ancestors: Ancestor[];
bestDescendant: BestDescendant | null;
bestDescendant?: BestDescendant | null;
descendants?: Ancestor[];
effectiveFeePerVsize?: number;
}
export interface TransactionStripped {
@@ -95,23 +148,45 @@ export interface TransactionStripped {
}
export interface BlockExtension {
totalFees?: number;
medianFee?: number;
feeRange?: number[];
reward?: number;
coinbaseTx?: TransactionMinerInfo;
matchRate?: number;
pool?: {
id: number;
totalFees: number;
medianFee: number; // median fee rate
feeRange: number[]; // fee rate percentiles
reward: number;
matchRate: number | null;
similarity?: number;
pool: {
id: number; // Note - This is the `unique_id`, not to mix with the auto increment `id`
name: string;
slug: string;
};
avgFee?: number;
avgFeeRate?: number;
coinbaseRaw?: string;
usd?: number | null;
avgFee: number;
avgFeeRate: number;
coinbaseRaw: string;
orphans: OrphanedBlock[] | null;
coinbaseAddress: string | null;
coinbaseSignature: string | null;
coinbaseSignatureAscii: string | null;
virtualSize: number;
avgTxSize: number;
totalInputs: number;
totalOutputs: number;
totalOutputAmt: number;
medianFeeAmt: number | null; // median fee in sats
feePercentiles: number[] | null, // fee percentiles in sats
segwitTotalTxs: number;
segwitTotalSize: number;
segwitTotalWeight: number;
header: string;
utxoSetChange: number;
// Requires coinstatsindex, will be set to NULL otherwise
utxoSetSize: number | null;
totalInputAmt: number | null;
}
/**
* Note: Everything that is added in here will be automatically returned through
* /api/v1/block and /api/v1/blocks APIs
*/
export interface BlockExtended extends IEsploraApi.Block {
extras: BlockExtension;
}
@@ -219,12 +294,12 @@ interface RequiredParams {
}
export interface ILoadingIndicators { [name: string]: number; }
export interface IConversionRates { [currency: string]: number; }
export interface IBackendInfo {
hostname: string;
gitCommit: string;
version: string;
lightning: boolean;
}
export interface IDifficultyAdjustment {
@@ -288,4 +363,4 @@ export interface IOldestNodes {
updatedAt?: number,
city?: any,
country?: any,
}
}

View File

@@ -1,13 +1,14 @@
import blocks from '../api/blocks';
import DB from '../database';
import logger from '../logger';
import { BlockAudit } from '../mempool.interfaces';
import { BlockAudit, AuditScore } from '../mempool.interfaces';
class BlocksAuditRepositories {
public async $saveAudit(audit: BlockAudit): Promise<void> {
try {
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), audit.matchRate]);
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
@@ -51,24 +52,58 @@ class BlocksAuditRepositories {
const [rows]: any[] = await DB.query(
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
blocks.weight, blocks.tx_count,
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
FROM blocks_audits
JOIN blocks ON blocks.hash = blocks_audits.hash
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
WHERE blocks_audits.hash = "${hash}"
`);
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].template = JSON.parse(rows[0].template);
if (rows.length) {
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
rows[0].transactions = JSON.parse(rows[0].transactions);
rows[0].template = JSON.parse(rows[0].template);
if (rows[0].transactions.length) {
return rows[0];
}
}
return null;
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
FROM blocks_audits
WHERE blocks_audits.hash = "${hash}"
`);
return rows[0];
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getBlockAuditScores(maxHeight: number, minHeight: number): Promise<AuditScore[]> {
try {
const [rows]: any[] = await DB.query(
`SELECT hash, match_rate as matchRate
FROM blocks_audits
WHERE blocks_audits.height BETWEEN ? AND ?
`, [minHeight, maxHeight]);
return rows;
} catch (e: any) {
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
}
export default new BlocksAuditRepositories();

View File

@@ -1,34 +1,98 @@
import { BlockExtended, BlockPrice } from '../mempool.interfaces';
import { BlockExtended, BlockExtension, BlockPrice } from '../mempool.interfaces';
import DB from '../database';
import logger from '../logger';
import { Common } from '../api/common';
import { prepareBlock } from '../utils/blocks-utils';
import PoolsRepository from './PoolsRepository';
import HashratesRepository from './HashratesRepository';
import { escape } from 'mysql2';
import BlocksSummariesRepository from './BlocksSummariesRepository';
import DifficultyAdjustmentsRepository from './DifficultyAdjustmentsRepository';
import bitcoinClient from '../api/bitcoin/bitcoin-client';
import config from '../config';
import chainTips from '../api/chain-tips';
import blocks from '../api/blocks';
import BlocksAuditsRepository from './BlocksAuditsRepository';
const BLOCK_DB_FIELDS = `
blocks.hash AS id,
blocks.height,
blocks.version,
UNIX_TIMESTAMP(blocks.blockTimestamp) AS timestamp,
blocks.bits,
blocks.nonce,
blocks.difficulty,
blocks.merkle_root,
blocks.tx_count,
blocks.size,
blocks.weight,
blocks.previous_block_hash AS previousblockhash,
UNIX_TIMESTAMP(blocks.median_timestamp) AS mediantime,
blocks.fees AS totalFees,
blocks.median_fee AS medianFee,
blocks.fee_span AS feeRange,
blocks.reward,
pools.unique_id AS poolId,
pools.name AS poolName,
pools.slug AS poolSlug,
blocks.avg_fee AS avgFee,
blocks.avg_fee_rate AS avgFeeRate,
blocks.coinbase_raw AS coinbaseRaw,
blocks.coinbase_address AS coinbaseAddress,
blocks.coinbase_signature AS coinbaseSignature,
blocks.coinbase_signature_ascii AS coinbaseSignatureAscii,
blocks.avg_tx_size AS avgTxSize,
blocks.total_inputs AS totalInputs,
blocks.total_outputs AS totalOutputs,
blocks.total_output_amt AS totalOutputAmt,
blocks.median_fee_amt AS medianFeeAmt,
blocks.fee_percentiles AS feePercentiles,
blocks.segwit_total_txs AS segwitTotalTxs,
blocks.segwit_total_size AS segwitTotalSize,
blocks.segwit_total_weight AS segwitTotalWeight,
blocks.header,
blocks.utxoset_change AS utxoSetChange,
blocks.utxoset_size AS utxoSetSize,
blocks.total_input_amt AS totalInputAmts
`;
class BlocksRepository {
/**
* Save indexed block data in the database
*/
public async $saveBlockInDatabase(block: BlockExtended) {
const truncatedCoinbaseSignature = block?.extras?.coinbaseSignature?.substring(0, 500);
const truncatedCoinbaseSignatureAscii = block?.extras?.coinbaseSignatureAscii?.substring(0, 500);
try {
const query = `INSERT INTO blocks(
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
pool_id, fees, fee_span, median_fee,
reward, version, bits, nonce,
merkle_root, previous_block_hash, avg_fee, avg_fee_rate
height, hash, blockTimestamp, size,
weight, tx_count, coinbase_raw, difficulty,
pool_id, fees, fee_span, median_fee,
reward, version, bits, nonce,
merkle_root, previous_block_hash, avg_fee, avg_fee_rate,
median_timestamp, header, coinbase_address,
coinbase_signature, utxoset_size, utxoset_change, avg_tx_size,
total_inputs, total_outputs, total_input_amt, total_output_amt,
fee_percentiles, segwit_total_txs, segwit_total_size, segwit_total_weight,
median_fee_amt, coinbase_signature_ascii
) VALUE (
?, ?, FROM_UNIXTIME(?), ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?
?, ?, ?, ?,
FROM_UNIXTIME(?), ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?, ?, ?,
?, ?
)`;
const poolDbId = await PoolsRepository.$getPoolByUniqueId(block.extras.pool.id);
if (!poolDbId) {
throw Error(`Could not find a mining pool with the unique_id = ${block.extras.pool.id}. This error should never be printed.`);
}
const params: any[] = [
block.height,
block.id,
@@ -38,7 +102,7 @@ class BlocksRepository {
block.tx_count,
block.extras.coinbaseRaw,
block.difficulty,
block.extras.pool?.id, // Should always be set to something
poolDbId.id,
block.extras.totalFees,
JSON.stringify(block.extras.feeRange),
block.extras.medianFee,
@@ -50,19 +114,63 @@ class BlocksRepository {
block.previousblockhash,
block.extras.avgFee,
block.extras.avgFeeRate,
block.mediantime,
block.extras.header,
block.extras.coinbaseAddress,
truncatedCoinbaseSignature,
block.extras.utxoSetSize,
block.extras.utxoSetChange,
block.extras.avgTxSize,
block.extras.totalInputs,
block.extras.totalOutputs,
block.extras.totalInputAmt,
block.extras.totalOutputAmt,
block.extras.feePercentiles ? JSON.stringify(block.extras.feePercentiles) : null,
block.extras.segwitTotalTxs,
block.extras.segwitTotalSize,
block.extras.segwitTotalWeight,
block.extras.medianFeeAmt,
truncatedCoinbaseSignatureAscii,
];
await DB.query(query, params);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`);
logger.debug(`$saveBlockInDatabase() - Block ${block.height} has already been indexed, ignoring`, logger.tags.mining);
} else {
logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot save indexed block into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
}
/**
* Save newly indexed data from core coinstatsindex
*
* @param utxoSetSize
* @param totalInputAmt
*/
public async $updateCoinStatsIndexData(blockHash: string, utxoSetSize: number,
totalInputAmt: number
) : Promise<void> {
try {
const query = `
UPDATE blocks
SET utxoset_size = ?, total_input_amt = ?
WHERE hash = ?
`;
const params: any[] = [
utxoSetSize,
totalInputAmt,
blockHash
];
await DB.query(query, params);
} catch (e: any) {
logger.err('Cannot update indexed block coinstatsindex. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get all block height that have not been indexed between [startHeight, endHeight]
*/
@@ -222,6 +330,55 @@ class BlocksRepository {
}
}
/**
* Get average block health for all blocks for a single pool
*/
public async $getAvgBlockHealthPerPoolId(poolId: number): Promise<number> {
const params: any[] = [];
const query = `
SELECT AVG(blocks_audits.match_rate) AS avg_match_rate
FROM blocks
JOIN blocks_audits ON blocks.height = blocks_audits.height
WHERE blocks.pool_id = ?
`;
params.push(poolId);
try {
const [rows] = await DB.query(query, params);
if (!rows[0] || !rows[0].avg_match_rate) {
return 0;
}
return Math.round(rows[0].avg_match_rate * 100) / 100;
} catch (e) {
logger.err(`Cannot get average block health for pool id ${poolId}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get average block health for all blocks for a single pool
*/
public async $getTotalRewardForPoolId(poolId: number): Promise<number> {
const params: any[] = [];
const query = `
SELECT sum(reward) as total_reward
FROM blocks
WHERE blocks.pool_id = ?
`;
params.push(poolId);
try {
const [rows] = await DB.query(query, params);
if (!rows[0] || !rows[0].total_reward) {
return 0;
}
return rows[0].total_reward;
} catch (e) {
logger.err(`Cannot get total reward for pool id ${poolId}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Get the oldest indexed block
*/
@@ -248,34 +405,17 @@ class BlocksRepository {
/**
* Get blocks mined by a specific mining pool
*/
public async $getBlocksByPool(slug: string, startHeight?: number): Promise<object[]> {
public async $getBlocksByPool(slug: string, startHeight?: number): Promise<BlockExtended[]> {
const pool = await PoolsRepository.$getPool(slug);
if (!pool) {
throw new Error('This mining pool does not exist ' + escape(slug));
}
const params: any[] = [];
let query = ` SELECT
blocks.height,
hash as id,
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
size,
weight,
tx_count,
coinbase_raw,
difficulty,
fees,
fee_span,
median_fee,
reward,
version,
bits,
nonce,
merkle_root,
previous_block_hash as previousblockhash,
avg_fee,
avg_fee_rate
let query = `
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE pool_id = ?`;
params.push(pool.id);
@@ -288,11 +428,11 @@ class BlocksRepository {
LIMIT 10`;
try {
const [rows] = await DB.query(query, params);
const [rows]: any[] = await DB.query(query, params);
const blocks: BlockExtended[] = [];
for (const block of <object[]>rows) {
blocks.push(prepareBlock(block));
for (const block of rows) {
blocks.push(await this.formatDbBlockIntoExtendedBlock(block));
}
return blocks;
@@ -305,46 +445,21 @@ class BlocksRepository {
/**
* Get one block by height
*/
public async $getBlockByHeight(height: number): Promise<object | null> {
public async $getBlockByHeight(height: number): Promise<BlockExtended | null> {
try {
const [rows]: any[] = await DB.query(`SELECT
blocks.height,
hash,
hash as id,
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
size,
weight,
tx_count,
coinbase_raw,
difficulty,
pools.id as pool_id,
pools.name as pool_name,
pools.link as pool_link,
pools.slug as pool_slug,
pools.addresses as pool_addresses,
pools.regexes as pool_regexes,
fees,
fee_span,
median_fee,
reward,
version,
bits,
nonce,
merkle_root,
previous_block_hash as previousblockhash,
avg_fee,
avg_fee_rate
const [rows]: any[] = await DB.query(`
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE blocks.height = ${height}
`);
WHERE blocks.height = ?`,
[height]
);
if (rows.length <= 0) {
return null;
}
rows[0].fee_span = JSON.parse(rows[0].fee_span);
return rows[0];
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
} catch (e) {
logger.err(`Cannot get indexed block ${height}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
@@ -357,10 +472,7 @@ class BlocksRepository {
public async $getBlockByHash(hash: string): Promise<object | null> {
try {
const query = `
SELECT *, blocks.height, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
pools.addresses as pool_addresses, pools.regexes as pool_regexes,
previous_block_hash as previousblockhash
SELECT ${BLOCK_DB_FIELDS}
FROM blocks
JOIN pools ON blocks.pool_id = pools.id
WHERE hash = ?;
@@ -370,9 +482,8 @@ class BlocksRepository {
if (rows.length <= 0) {
return null;
}
rows[0].fee_span = JSON.parse(rows[0].fee_span);
return rows[0];
return await this.formatDbBlockIntoExtendedBlock(rows[0]);
} catch (e) {
logger.err(`Cannot get indexed block ${hash}. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
@@ -392,6 +503,36 @@ class BlocksRepository {
}
}
/**
* Get the first block at or directly after a given timestamp
* @param timestamp number unix time in seconds
* @returns The height and timestamp of a block (timestamp might vary from given timestamp)
*/
public async $getBlockHeightFromTimestamp(
timestamp: number,
): Promise<{ height: number; hash: string; timestamp: number }> {
try {
// Get first block at or after the given timestamp
const query = `SELECT height, hash, blockTimestamp as timestamp FROM blocks
WHERE blockTimestamp <= FROM_UNIXTIME(?)
ORDER BY blockTimestamp DESC
LIMIT 1`;
const params = [timestamp];
const [rows]: any[][] = await DB.query(query, params);
if (rows.length === 0) {
throw new Error(`No block was found before timestamp ${timestamp}`);
}
return rows[0];
} catch (e) {
logger.err(
'Cannot get block height from timestamp from the db. Reason: ' +
(e instanceof Error ? e.message : e),
);
throw e;
}
}
/**
* Return blocks height
*/
@@ -433,8 +574,15 @@ class BlocksRepository {
public async $validateChain(): Promise<boolean> {
try {
const start = new Date().getTime();
const [blocks]: any[] = await DB.query(`SELECT height, hash, previous_block_hash,
UNIX_TIMESTAMP(blockTimestamp) as timestamp FROM blocks ORDER BY height`);
const [blocks]: any[] = await DB.query(`
SELECT
height,
hash,
previous_block_hash,
UNIX_TIMESTAMP(blockTimestamp) AS timestamp
FROM blocks
ORDER BY height
`);
let partialMsg = false;
let idx = 1;
@@ -632,6 +780,39 @@ class BlocksRepository {
}
}
/**
* Get a list of blocks that have not had CPFP data indexed
*/
public async $getCPFPUnindexedBlocks(): Promise<any[]> {
try {
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
const currentBlockHeight = blockchainInfo.blocks;
let indexingBlockAmount = Math.min(config.MEMPOOL.INDEXING_BLOCKS_AMOUNT, currentBlockHeight);
if (indexingBlockAmount <= -1) {
indexingBlockAmount = currentBlockHeight + 1;
}
const minHeight = Math.max(0, currentBlockHeight - indexingBlockAmount + 1);
const [rows]: any[] = await DB.query(`
SELECT height
FROM compact_cpfp_clusters
WHERE height <= ? AND height >= ?
GROUP BY height
ORDER BY height DESC;
`, [currentBlockHeight, minHeight]);
const indexedHeights = {};
rows.forEach((row) => { indexedHeights[row.height] = true; });
const allHeights: number[] = Array.from(Array(currentBlockHeight - minHeight + 1).keys(), n => n + minHeight).reverse();
const unindexedHeights = allHeights.filter(x => !indexedHeights[x]);
return unindexedHeights;
} catch (e) {
logger.err('Cannot fetch CPFP unindexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Return the oldest block from a consecutive chain of block from the most recent one
*/
@@ -676,7 +857,7 @@ class BlocksRepository {
try {
let query = `INSERT INTO blocks_prices(height, price_id) VALUES`;
for (const price of blockPrices) {
query += ` (${price.height}, ${price.priceId}),`
query += ` (${price.height}, ${price.priceId}),`;
}
query = query.slice(0, -1);
await DB.query(query);
@@ -689,6 +870,132 @@ class BlocksRepository {
}
}
}
/**
* Get all indexed blocsk with missing coinstatsindex data
*/
public async $getBlocksMissingCoinStatsIndex(maxHeight: number, minHeight: number): Promise<any> {
try {
const [blocks] = await DB.query(`
SELECT height, hash
FROM blocks
WHERE height >= ${minHeight} AND height <= ${maxHeight} AND
(utxoset_size IS NULL OR total_input_amt IS NULL)
`);
return blocks;
} catch (e) {
logger.err(`Cannot get blocks with missing coinstatsindex. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Save indexed median fee to avoid recomputing it later
*
* @param id
* @param feePercentiles
*/
public async $saveFeePercentilesForBlockId(id: string, feePercentiles: number[]): Promise<void> {
try {
await DB.query(`
UPDATE blocks SET fee_percentiles = ?, median_fee_amt = ?
WHERE hash = ?`,
[JSON.stringify(feePercentiles), feePercentiles[3], id]
);
} catch (e) {
logger.err(`Cannot update block fee_percentiles. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Convert a mysql row block into a BlockExtended. Note that you
* must provide the correct field into dbBlk object param
*
* @param dbBlk
*/
private async formatDbBlockIntoExtendedBlock(dbBlk: any): Promise<BlockExtended> {
const blk: Partial<BlockExtended> = {};
const extras: Partial<BlockExtension> = {};
// IEsploraApi.Block
blk.id = dbBlk.id;
blk.height = dbBlk.height;
blk.version = dbBlk.version;
blk.timestamp = dbBlk.timestamp;
blk.bits = dbBlk.bits;
blk.nonce = dbBlk.nonce;
blk.difficulty = dbBlk.difficulty;
blk.merkle_root = dbBlk.merkle_root;
blk.tx_count = dbBlk.tx_count;
blk.size = dbBlk.size;
blk.weight = dbBlk.weight;
blk.previousblockhash = dbBlk.previousblockhash;
blk.mediantime = dbBlk.mediantime;
// BlockExtension
extras.totalFees = dbBlk.totalFees;
extras.medianFee = dbBlk.medianFee;
extras.feeRange = JSON.parse(dbBlk.feeRange);
extras.reward = dbBlk.reward;
extras.pool = {
id: dbBlk.poolId,
name: dbBlk.poolName,
slug: dbBlk.poolSlug,
};
extras.avgFee = dbBlk.avgFee;
extras.avgFeeRate = dbBlk.avgFeeRate;
extras.coinbaseRaw = dbBlk.coinbaseRaw;
extras.coinbaseAddress = dbBlk.coinbaseAddress;
extras.coinbaseSignature = dbBlk.coinbaseSignature;
extras.coinbaseSignatureAscii = dbBlk.coinbaseSignatureAscii;
extras.avgTxSize = dbBlk.avgTxSize;
extras.totalInputs = dbBlk.totalInputs;
extras.totalOutputs = dbBlk.totalOutputs;
extras.totalOutputAmt = dbBlk.totalOutputAmt;
extras.medianFeeAmt = dbBlk.medianFeeAmt;
extras.feePercentiles = JSON.parse(dbBlk.feePercentiles);
extras.segwitTotalTxs = dbBlk.segwitTotalTxs;
extras.segwitTotalSize = dbBlk.segwitTotalSize;
extras.segwitTotalWeight = dbBlk.segwitTotalWeight;
extras.header = dbBlk.header,
extras.utxoSetChange = dbBlk.utxoSetChange;
extras.utxoSetSize = dbBlk.utxoSetSize;
extras.totalInputAmt = dbBlk.totalInputAmt;
extras.virtualSize = dbBlk.weight / 4.0;
// Re-org can happen after indexing so we need to always get the
// latest state from core
extras.orphans = chainTips.getOrphanedBlocksAtHeight(dbBlk.height);
// Match rate is not part of the blocks table, but it is part of APIs so we must include it
extras.matchRate = null;
if (config.MEMPOOL.AUDIT) {
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(dbBlk.id);
if (auditScore != null) {
extras.matchRate = auditScore.matchRate;
}
}
// If we're missing block summary related field, check if we can populate them on the fly now
if (Common.blocksSummariesIndexingEnabled() &&
(extras.medianFeeAmt === null || extras.feePercentiles === null))
{
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
if (extras.feePercentiles === null) {
const block = await bitcoinClient.getBlock(dbBlk.id, 2);
const summary = blocks.summarizeBlock(block);
await BlocksSummariesRepository.$saveSummary({ height: block.height, mined: summary });
extras.feePercentiles = await BlocksSummariesRepository.$getFeePercentilesByBlockId(dbBlk.id);
}
if (extras.feePercentiles !== null) {
extras.medianFeeAmt = extras.feePercentiles[3];
}
}
blk.extras = <BlockExtension>extras;
return <BlockExtended>blk;
}
}
export default new BlocksRepository();

View File

@@ -17,19 +17,16 @@ class BlocksSummariesRepository {
return undefined;
}
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
const blockId = params.mined?.id ?? params.template?.id;
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
const blockId = params.mined?.id;
try {
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
if (dbSummary.length === 0) { // First insertion
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
]);
} else if (params.mined !== undefined) { // Update mined block summary
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
} else if (params.template !== undefined) { // Update template block summary
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
}
const transactions = JSON.stringify(params.mined?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
transactions = ?
`, [params.height, blockId, transactions, '[]', transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
@@ -40,6 +37,26 @@ class BlocksSummariesRepository {
}
}
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
const blockId = params.template?.id;
try {
const transactions = JSON.stringify(params.template?.transactions || []);
await DB.query(`
INSERT INTO blocks_summaries (height, id, transactions, template)
VALUE (?, ?, ?, ?)
ON DUPLICATE KEY UPDATE
template = ?
`, [params.height, blockId, '[]', transactions, transactions]);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
} else {
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
throw e;
}
}
}
public async $getIndexedSummariesId(): Promise<string[]> {
try {
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
@@ -63,6 +80,48 @@ class BlocksSummariesRepository {
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
}
}
/**
* Get the fee percentiles if the block has already been indexed, [] otherwise
*
* @param id
*/
public async $getFeePercentilesByBlockId(id: string): Promise<number[] | null> {
try {
const [rows]: any[] = await DB.query(`
SELECT transactions
FROM blocks_summaries
WHERE id = ?`,
[id]
);
if (rows === null || rows.length === 0) {
return null;
}
const transactions = JSON.parse(rows[0].transactions);
if (transactions === null) {
return null;
}
transactions.shift(); // Ignore coinbase
transactions.sort((a: any, b: any) => a.fee - b.fee);
const fees = transactions.map((t: any) => t.fee);
return [
fees[0] ?? 0, // min
fees[Math.max(0, Math.floor(fees.length * 0.1) - 1)] ?? 0, // 10th
fees[Math.max(0, Math.floor(fees.length * 0.25) - 1)] ?? 0, // 25th
fees[Math.max(0, Math.floor(fees.length * 0.5) - 1)] ?? 0, // median
fees[Math.max(0, Math.floor(fees.length * 0.75) - 1)] ?? 0, // 75th
fees[Math.max(0, Math.floor(fees.length * 0.9) - 1)] ?? 0, // 90th
fees[fees.length - 1] ?? 0, // max
];
} catch (e) {
logger.err(`Cannot get block summaries transactions. Reason: ` + (e instanceof Error ? e.message : e));
return null;
}
}
}
export default new BlocksSummariesRepository();

View File

@@ -0,0 +1,232 @@
import cluster, { Cluster } from 'cluster';
import { RowDataPacket } from 'mysql2';
import DB from '../database';
import logger from '../logger';
import { Ancestor } from '../mempool.interfaces';
import transactionRepository from '../repositories/TransactionRepository';
class CpfpRepository {
public async $saveCluster(clusterRoot: string, height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<boolean> {
if (!txs[0]) {
return false;
}
// skip clusters of transactions with the same fees
const roundedEffectiveFee = Math.round(effectiveFeePerVsize * 100) / 100;
const equalFee = txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (equalFee) {
return false;
}
try {
const packedTxs = Buffer.from(this.pack(txs));
await DB.query(
`
INSERT INTO compact_cpfp_clusters(root, height, txs, fee_rate)
VALUE (UNHEX(?), ?, ?, ?)
ON DUPLICATE KEY UPDATE
height = ?,
txs = ?,
fee_rate = ?
`,
[clusterRoot, height, packedTxs, effectiveFeePerVsize, height, packedTxs, effectiveFeePerVsize]
);
const maxChunk = 10;
let chunkIndex = 0;
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk).map(tx => {
return { txid: tx.txid, cluster: clusterRoot };
});
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $batchSaveClusters(clusters: { root: string, height: number, txs: any, effectiveFeePerVsize: number}[]): Promise<boolean> {
try {
const clusterValues: any[] = [];
const txs: any[] = [];
for (const cluster of clusters) {
if (cluster.txs?.length > 1) {
const roundedEffectiveFee = Math.round(cluster.effectiveFeePerVsize * 100) / 100;
const equalFee = cluster.txs.reduce((acc, tx) => {
return (acc && Math.round(((tx.fee || 0) / (tx.weight / 4)) * 100) / 100 === roundedEffectiveFee);
}, true);
if (!equalFee) {
clusterValues.push([
cluster.root,
cluster.height,
Buffer.from(this.pack(cluster.txs)),
cluster.effectiveFeePerVsize
]);
for (const tx of cluster.txs) {
txs.push({ txid: tx.txid, cluster: cluster.root });
}
}
}
}
if (!clusterValues.length) {
return false;
}
const maxChunk = 100;
let chunkIndex = 0;
// insert transactions in batches of up to 100 rows
while (chunkIndex < txs.length) {
const chunk = txs.slice(chunkIndex, chunkIndex + maxChunk);
await transactionRepository.$batchSetCluster(chunk);
chunkIndex += maxChunk;
}
chunkIndex = 0;
// insert clusters in batches of up to 100 rows
while (chunkIndex < clusterValues.length) {
const chunk = clusterValues.slice(chunkIndex, chunkIndex + maxChunk);
let query = `
INSERT IGNORE INTO compact_cpfp_clusters(root, height, txs, fee_rate)
VALUES
`;
query += chunk.map(chunk => {
return (' (UNHEX(?), ?, ?, ?)');
}) + ';';
const values = chunk.flat();
await DB.query(
query,
values
);
chunkIndex += maxChunk;
}
return true;
} catch (e: any) {
logger.err(`Cannot save cpfp clusters into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getCluster(clusterRoot: string): Promise<Cluster | void> {
const [clusterRows]: any = await DB.query(
`
SELECT *
FROM compact_cpfp_clusters
WHERE root = UNHEX(?)
`,
[clusterRoot]
);
const cluster = clusterRows[0];
if (cluster?.txs) {
cluster.txs = this.unpack(cluster.txs);
return cluster;
}
return;
}
public async $deleteClustersFrom(height: number): Promise<void> {
logger.info(`Delete newer cpfp clusters from height ${height} from the database`);
try {
const [rows] = await DB.query(
`
SELECT txs, height, root from compact_cpfp_clusters
WHERE height >= ?
`,
[height]
) as RowDataPacket[][];
if (rows?.length) {
for (const clusterToDelete of rows) {
const txs = this.unpack(clusterToDelete?.txs);
for (const tx of txs) {
await transactionRepository.$removeTransaction(tx.txid);
}
}
}
await DB.query(
`
DELETE from compact_cpfp_clusters
WHERE height >= ?
`,
[height]
);
} catch (e: any) {
logger.err(`Cannot delete cpfp clusters from db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
// insert a dummy row to mark that we've indexed as far as this block
public async $insertProgressMarker(height: number): Promise<void> {
try {
const [rows]: any = await DB.query(
`
SELECT root
FROM compact_cpfp_clusters
WHERE height = ?
`,
[height]
);
if (!rows?.length) {
const rootBuffer = Buffer.alloc(32);
rootBuffer.writeInt32LE(height);
await DB.query(
`
INSERT INTO compact_cpfp_clusters(root, height, fee_rate)
VALUE (?, ?, ?)
`,
[rootBuffer, height, 0]
);
}
} catch (e: any) {
logger.err(`Cannot insert cpfp progress marker. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public pack(txs: Ancestor[]): ArrayBuffer {
const buf = new ArrayBuffer(44 * txs.length);
const view = new DataView(buf);
txs.forEach((tx, i) => {
const offset = i * 44;
for (let x = 0; x < 32; x++) {
// store txid in little-endian
view.setUint8(offset + (31 - x), parseInt(tx.txid.slice(x * 2, (x * 2) + 2), 16));
}
view.setUint32(offset + 32, tx.weight);
view.setBigUint64(offset + 36, BigInt(Math.round(tx.fee)));
});
return buf;
}
public unpack(buf: Buffer): Ancestor[] {
if (!buf) {
return [];
}
try {
const arrayBuffer = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
const txs: Ancestor[] = [];
const view = new DataView(arrayBuffer);
for (let offset = 0; offset < arrayBuffer.byteLength; offset += 44) {
const txid = Array.from(new Uint8Array(arrayBuffer, offset, 32)).reverse().map(b => b.toString(16).padStart(2, '0')).join('');
const weight = view.getUint32(offset + 32);
const fee = Number(view.getBigUint64(offset + 36));
txs.push({
txid,
weight,
fee
});
}
return txs;
} catch (e) {
logger.warn(`Failed to unpack CPFP cluster. Reason: ` + (e instanceof Error ? e.message : e));
return [];
}
}
}
export default new CpfpRepository();

View File

@@ -1,5 +1,4 @@
import { Common } from '../api/common';
import config from '../config';
import DB from '../database';
import logger from '../logger';
import { IndexedDifficultyAdjustment } from '../mempool.interfaces';
@@ -21,9 +20,9 @@ class DifficultyAdjustmentsRepository {
await DB.query(query, params);
} catch (e: any) {
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`);
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`, logger.tags.mining);
} else {
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e;
}
}
@@ -55,7 +54,7 @@ class DifficultyAdjustmentsRepository {
const [rows] = await DB.query(query);
return rows as IndexedDifficultyAdjustment[];
} catch (e) {
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -84,7 +83,7 @@ class DifficultyAdjustmentsRepository {
const [rows] = await DB.query(query);
return rows as IndexedDifficultyAdjustment[];
} catch (e) {
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -94,27 +93,27 @@ class DifficultyAdjustmentsRepository {
const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`);
return rows.map(block => block.height);
} catch (e: any) {
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e;
}
}
public async $deleteAdjustementsFromHeight(height: number): Promise<void> {
try {
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`);
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`, logger.tags.mining);
await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]);
} catch (e: any) {
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e;
}
}
public async $deleteLastAdjustment(): Promise<void> {
try {
logger.info(`Delete last difficulty adjustment from the database`);
logger.info(`Delete last difficulty adjustment from the database`, logger.tags.mining);
await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`);
} catch (e: any) {
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
throw e;
}
}

View File

@@ -1,5 +1,6 @@
import { escape } from 'mysql2';
import { Common } from '../api/common';
import mining from '../api/mining/mining';
import DB from '../database';
import logger from '../logger';
import PoolsRepository from './PoolsRepository';
@@ -24,7 +25,7 @@ class HashratesRepository {
try {
await DB.query(query);
} catch (e: any) {
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot save indexed hashrate into db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -50,7 +51,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query);
return rows;
} catch (e) {
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -77,7 +78,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query);
return rows;
} catch (e) {
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -92,7 +93,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query);
return rows.map(row => row.timestamp);
} catch (e) {
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot retreive indexed weekly hashrate timestamps. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -127,7 +128,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query);
return rows;
} catch (e) {
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch weekly pools hashrate history. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -157,7 +158,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query, [pool.id]);
boundaries = rows[0];
} catch (e) {
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch hashrate start/end timestamps for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
// Get hashrates entries between boundaries
@@ -172,21 +173,7 @@ class HashratesRepository {
const [rows]: any[] = await DB.query(query, [boundaries.firstTimestamp, boundaries.lastTimestamp, pool.id]);
return rows;
} catch (e) {
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Set latest run timestamp
*/
public async $setLatestRun(key: string, val: number) {
const query = `UPDATE state SET number = ? WHERE name = ?`;
try {
await DB.query(query, [val, key]);
} catch (e) {
logger.err(`Cannot set last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
logger.err('Cannot fetch pool hashrate history for this pool. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -205,7 +192,7 @@ class HashratesRepository {
}
return rows[0]['number'];
} catch (e) {
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e));
logger.err(`Cannot retrieve last indexing run for ${key}. Reason: ` + (e instanceof Error ? e.message : e), logger.tags.mining);
throw e;
}
}
@@ -214,7 +201,7 @@ class HashratesRepository {
* Delete most recent data points for re-indexing
*/
public async $deleteLastEntries() {
logger.info(`Delete latest hashrates data points from the database`);
logger.info(`Delete latest hashrates data points from the database`, logger.tags.mining);
try {
const [rows]: any[] = await DB.query(`SELECT MAX(hashrate_timestamp) as timestamp FROM hashrates GROUP BY type`);
@@ -222,10 +209,10 @@ class HashratesRepository {
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp = ?`, [row.timestamp]);
}
// Re-run the hashrate indexing to fill up missing data
await this.$setLatestRun('last_hashrates_indexing', 0);
await this.$setLatestRun('last_weekly_hashrates_indexing', 0);
mining.lastHashrateIndexingDate = null;
mining.lastWeeklyHashrateIndexingDate = null;
} catch (e) {
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
@@ -238,10 +225,10 @@ class HashratesRepository {
try {
await DB.query(`DELETE FROM hashrates WHERE hashrate_timestamp >= FROM_UNIXTIME(?)`, [timestamp]);
// Re-run the hashrate indexing to fill up missing data
await this.$setLatestRun('last_hashrates_indexing', 0);
await this.$setLatestRun('last_weekly_hashrates_indexing', 0);
mining.lastHashrateIndexingDate = null;
mining.lastWeeklyHashrateIndexingDate = null;
} catch (e) {
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot delete latest hashrates data points. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
}

View File

@@ -0,0 +1,67 @@
import { ResultSetHeader, RowDataPacket } from 'mysql2';
import DB from '../database';
import logger from '../logger';
export interface NodeRecord {
publicKey: string; // node public key
type: number; // TLV extension record type
payload: string; // base64 record payload
}
class NodesRecordsRepository {
public async $saveRecord(record: NodeRecord): Promise<void> {
try {
const payloadBytes = Buffer.from(record.payload, 'base64');
await DB.query(`
INSERT INTO nodes_records(public_key, type, payload)
VALUE (?, ?, ?)
ON DUPLICATE KEY UPDATE
payload = ?
`, [record.publicKey, record.type, payloadBytes, payloadBytes]);
} catch (e: any) {
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
logger.err(`Cannot save node record (${[record.publicKey, record.type, record.payload]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
// We don't throw, not a critical issue if we miss some nodes records
}
}
}
public async $getRecordTypes(publicKey: string): Promise<any> {
try {
const query = `
SELECT type FROM nodes_records
WHERE public_key = ?
`;
const [rows] = await DB.query<RowDataPacket[][]>(query, [publicKey]);
return rows.map(row => row['type']);
} catch (e) {
logger.err(`Cannot retrieve custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
return [];
}
}
public async $deleteUnusedRecords(publicKey: string, recordTypes: number[]): Promise<number> {
try {
let query;
if (recordTypes.length) {
query = `
DELETE FROM nodes_records
WHERE public_key = ?
AND type NOT IN (${recordTypes.map(type => `${type}`).join(',')})
`;
} else {
query = `
DELETE FROM nodes_records
WHERE public_key = ?
`;
}
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
return result.affectedRows;
} catch (e) {
logger.err(`Cannot delete unused custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
return 0;
}
}
}
export default new NodesRecordsRepository();

View File

@@ -1,4 +1,5 @@
import { Common } from '../api/common';
import poolsParser from '../api/pools-parser';
import config from '../config';
import DB from '../database';
import logger from '../logger';
@@ -9,7 +10,7 @@ class PoolsRepository {
* Get all pools tagging info
*/
public async $getPools(): Promise<PoolTag[]> {
const [rows] = await DB.query('SELECT id, name, addresses, regexes, slug FROM pools;');
const [rows] = await DB.query('SELECT id, unique_id as uniqueId, name, addresses, regexes, slug FROM pools');
return <PoolTag[]>rows;
}
@@ -17,7 +18,11 @@ class PoolsRepository {
* Get unknown pool tagging info
*/
public async $getUnknownPool(): Promise<PoolTag> {
const [rows] = await DB.query('SELECT id, name, slug FROM pools where name = "Unknown"');
let [rows]: any[] = await DB.query('SELECT id, unique_id as uniqueId, name, slug FROM pools where name = "Unknown"');
if (rows && rows.length === 0 && config.DATABASE.ENABLED) {
await poolsParser.$insertUnknownPool();
[rows] = await DB.query('SELECT id, unique_id as uniqueId, name, slug FROM pools where name = "Unknown"');
}
return <PoolTag>rows[0];
}
@@ -27,16 +32,25 @@ class PoolsRepository {
public async $getPoolsInfo(interval: string | null = null): Promise<PoolInfo[]> {
interval = Common.getSqlInterval(interval);
let query = `SELECT COUNT(height) as blockCount, pool_id as poolId, pools.name as name, pools.link as link, slug
let query = `
SELECT
COUNT(blocks.height) As blockCount,
pool_id AS poolId,
pools.name AS name,
pools.link AS link,
slug,
AVG(blocks_audits.match_rate) AS avgMatchRate
FROM blocks
JOIN pools on pools.id = pool_id`;
JOIN pools on pools.id = pool_id
LEFT JOIN blocks_audits ON blocks_audits.height = blocks.height
`;
if (interval) {
query += ` WHERE blocks.blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
}
query += ` GROUP BY pool_id
ORDER BY COUNT(height) DESC`;
ORDER BY COUNT(blocks.height) DESC`;
try {
const [rows] = await DB.query(query);
@@ -50,7 +64,7 @@ class PoolsRepository {
/**
* Get basic pool info and block count between two timestamp
*/
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
public async $getPoolsInfoBetween(from: number, to: number): Promise<PoolInfo[]> {
const query = `SELECT COUNT(height) as blockCount, pools.id as poolId, pools.name as poolName
FROM pools
LEFT JOIN blocks on pools.id = blocks.pool_id AND blocks.blockTimestamp BETWEEN FROM_UNIXTIME(?) AND FROM_UNIXTIME(?)
@@ -66,9 +80,9 @@ class PoolsRepository {
}
/**
* Get mining pool statistics for one pool
* Get a mining pool info
*/
public async $getPool(slug: string): Promise<PoolTag | null> {
public async $getPool(slug: string, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
@@ -81,10 +95,12 @@ class PoolsRepository {
return null;
}
rows[0].regexes = JSON.parse(rows[0].regexes);
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else {
rows[0].addresses = []; // pools-v2.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
@@ -94,6 +110,116 @@ class PoolsRepository {
throw e;
}
}
/**
* Get a mining pool info by its unique id
*/
public async $getPoolByUniqueId(id: number, parse: boolean = true): Promise<PoolTag | null> {
const query = `
SELECT *
FROM pools
WHERE pools.unique_id = ?`;
try {
const [rows]: any[] = await DB.query(query, [id]);
if (rows.length < 1) {
return null;
}
if (parse) {
rows[0].regexes = JSON.parse(rows[0].regexes);
}
if (['testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
rows[0].addresses = []; // pools.json only contains mainnet addresses
} else if (parse) {
rows[0].addresses = JSON.parse(rows[0].addresses);
}
return rows[0];
} catch (e) {
logger.err('Cannot get pool from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
/**
* Insert a new mining pool in the database
*
* @param pool
*/
public async $insertNewMiningPool(pool: any, slug: string): Promise<void> {
try {
await DB.query(`
INSERT INTO pools
SET name = ?, link = ?, addresses = ?, regexes = ?, slug = ?, unique_id = ?`,
[pool.name, pool.link, JSON.stringify(pool.addresses), JSON.stringify(pool.regexes), slug, pool.id]
);
} catch (e: any) {
logger.err(`Cannot insert new mining pool into db. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Rename an existing mining pool
*
* @param dbId
* @param newSlug
* @param newName
*/
public async $renameMiningPool(dbId: number, newSlug: string, newName: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET slug = ?, name = ?
WHERE id = ?`,
[newSlug, newName, dbId]
);
} catch (e: any) {
logger.err(`Cannot rename mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an exisiting mining pool link
*
* @param dbId
* @param newLink
*/
public async $updateMiningPoolLink(dbId: number, newLink: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET link = ?
WHERE id = ?`,
[newLink, dbId]
);
} catch (e: any) {
logger.err(`Cannot update link for mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
/**
* Update an existing mining pool addresses or coinbase tags
*
* @param dbId
* @param addresses
* @param regexes
*/
public async $updateMiningPoolTags(dbId: number, addresses: string, regexes: string): Promise<void> {
try {
await DB.query(`
UPDATE pools
SET addresses = ?, regexes = ?
WHERE id = ?`,
[JSON.stringify(addresses), JSON.stringify(regexes), dbId]
);
} catch (e: any) {
logger.err(`Cannot update mining pool id ${dbId}. Reason: ` + (e instanceof Error ? e.message : e));
}
}
}
export default new PoolsRepository();

View File

@@ -1,50 +1,228 @@
import DB from '../database';
import logger from '../logger';
import { Prices } from '../tasks/price-updater';
import priceUpdater from '../tasks/price-updater';
export interface ApiPrice {
time?: number,
USD: number,
EUR: number,
GBP: number,
CAD: number,
CHF: number,
AUD: number,
JPY: number,
}
const ApiPriceFields = `
UNIX_TIMESTAMP(time) as time,
USD,
EUR,
GBP,
CAD,
CHF,
AUD,
JPY
`;
export interface ExchangeRates {
USDEUR: number,
USDGBP: number,
USDCAD: number,
USDCHF: number,
USDAUD: number,
USDJPY: number,
}
export interface Conversion {
prices: ApiPrice[],
exchangeRates: ExchangeRates;
}
export const MAX_PRICES = {
USD: 100000000,
EUR: 100000000,
GBP: 100000000,
CAD: 100000000,
CHF: 100000000,
AUD: 100000000,
JPY: 10000000000,
};
class PricesRepository {
public async $savePrices(time: number, prices: Prices): Promise<void> {
public async $savePrices(time: number, prices: ApiPrice): Promise<void> {
if (prices.USD === -1) {
// Some historical price entries have not USD prices, so we just ignore them to avoid future UX issues
// As of today there are only 4 (on 2013-09-05, 2013-09-19, 2013-09-12 and 2013-09-26) so that's fine
// Some historical price entries have no USD prices, so we just ignore them to avoid future UX issues
// As of today there are only 4 (on 2013-09-05, 2013-0909, 2013-09-12 and 2013-09-26) so that's fine
return;
}
// Sanity check
for (const currency of Object.keys(prices)) {
if (prices[currency] < -1 || prices[currency] > MAX_PRICES[currency]) { // We use -1 to mark a "missing data, so it's a valid entry"
logger.info(`Ignore BTC${currency} price of ${prices[currency]}`);
prices[currency] = 0;
}
}
try {
await DB.query(`
INSERT INTO prices(time, USD, EUR, GBP, CAD, CHF, AUD, JPY)
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ? )`,
[time, prices.USD, prices.EUR, prices.GBP, prices.CAD, prices.CHF, prices.AUD, prices.JPY]
);
} catch (e: any) {
} catch (e) {
logger.err(`Cannot save exchange rate into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getOldestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time LIMIT 1`);
const [oldestRow] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
ORDER BY time
LIMIT 1
`);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getLatestPriceId(): Promise<number | null> {
const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
const [oldestRow] = await DB.query(`
SELECT id
FROM prices
ORDER BY time DESC
LIMIT 1`
);
return oldestRow[0] ? oldestRow[0].id : null;
}
public async $getLatestPriceTime(): Promise<number> {
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
const [oldestRow] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
ORDER BY time DESC
LIMIT 1`
);
return oldestRow[0] ? oldestRow[0].time : 0;
}
public async $getPricesTimes(): Promise<number[]> {
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time`);
const [times] = await DB.query(`
SELECT UNIX_TIMESTAMP(time) AS time
FROM prices
WHERE USD != -1
ORDER BY time
`);
if (!Array.isArray(times)) {
return [];
}
return times.map(time => time.time);
}
public async $getPricesTimesAndId(): Promise<number[]> {
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time, id, USD from prices ORDER BY time`);
return times;
public async $getPricesTimesAndId(): Promise<{time: number, id: number, USD: number}[]> {
const [times] = await DB.query(`
SELECT
UNIX_TIMESTAMP(time) AS time,
id,
USD
FROM prices
ORDER BY time
`);
return times as {time: number, id: number, USD: number}[];
}
public async $getLatestConversionRates(): Promise<ApiPrice> {
const [rates] = await DB.query(`
SELECT ${ApiPriceFields}
FROM prices
ORDER BY time DESC
LIMIT 1`
);
if (!Array.isArray(rates) || rates.length === 0) {
return priceUpdater.getEmptyPricesObj();
}
return rates[0] as ApiPrice;
}
public async $getNearestHistoricalPrice(timestamp: number | undefined): Promise<Conversion | null> {
try {
const [rates] = await DB.query(`
SELECT ${ApiPriceFields}
FROM prices
WHERE UNIX_TIMESTAMP(time) < ?
ORDER BY time DESC
LIMIT 1`,
[timestamp]
);
if (!Array.isArray(rates)) {
throw Error(`Cannot get single historical price from the database`);
}
// Compute fiat exchange rates
let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}
const computeFx = (usd: number, other: number): number =>
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
const exchangeRates: ExchangeRates = {
USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
};
return {
prices: rates as ApiPrice[],
exchangeRates: exchangeRates
};
} catch (e) {
logger.err(`Cannot fetch single historical prices from the db. Reason ${e instanceof Error ? e.message : e}`);
return null;
}
}
public async $getHistoricalPrices(): Promise<Conversion | null> {
try {
const [rates] = await DB.query(`
SELECT ${ApiPriceFields}
FROM prices
ORDER BY time DESC
`);
if (!Array.isArray(rates)) {
throw Error(`Cannot get average historical price from the database`);
}
// Compute fiat exchange rates
let latestPrice = rates[0] as ApiPrice;
if (latestPrice.USD === -1) {
latestPrice = priceUpdater.getEmptyPricesObj();
}
const computeFx = (usd: number, other: number): number =>
Math.round(Math.max(other, 0) / Math.max(usd, 1) * 100) / 100;
const exchangeRates: ExchangeRates = {
USDEUR: computeFx(latestPrice.USD, latestPrice.EUR),
USDGBP: computeFx(latestPrice.USD, latestPrice.GBP),
USDCAD: computeFx(latestPrice.USD, latestPrice.CAD),
USDCHF: computeFx(latestPrice.USD, latestPrice.CHF),
USDAUD: computeFx(latestPrice.USD, latestPrice.AUD),
USDJPY: computeFx(latestPrice.USD, latestPrice.JPY),
};
return {
prices: rates as ApiPrice[],
exchangeRates: exchangeRates
};
} catch (e) {
logger.err(`Cannot fetch historical prices from the db. Reason ${e instanceof Error ? e.message : e}`);
return null;
}
}
}

View File

@@ -0,0 +1,113 @@
import DB from '../database';
import logger from '../logger';
import { Ancestor, CpfpInfo } from '../mempool.interfaces';
import cpfpRepository from './CpfpRepository';
class TransactionRepository {
public async $setCluster(txid: string, clusterRoot: string): Promise<void> {
try {
await DB.query(
`
INSERT INTO compact_transactions
(
txid,
cluster
)
VALUE (UNHEX(?), UNHEX(?))
ON DUPLICATE KEY UPDATE
cluster = UNHEX(?)
;`,
[txid, clusterRoot, clusterRoot]
);
} catch (e: any) {
logger.err(`Cannot save transaction cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $batchSetCluster(txs): Promise<void> {
try {
let query = `
INSERT IGNORE INTO compact_transactions
(
txid,
cluster
)
VALUES
`;
query += txs.map(tx => {
return (' (UNHEX(?), UNHEX(?))');
}) + ';';
const values = txs.map(tx => [tx.txid, tx.cluster]).flat();
await DB.query(
query,
values
);
} catch (e: any) {
logger.err(`Cannot save cpfp transactions into db. Reason: ` + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $getCpfpInfo(txid: string): Promise<CpfpInfo | void> {
try {
const [txRows]: any = await DB.query(
`
SELECT HEX(txid) as id, HEX(cluster) as root
FROM compact_transactions
WHERE txid = UNHEX(?)
`,
[txid]
);
if (txRows.length && txRows[0].root != null) {
const txid = txRows[0].id.toLowerCase();
const clusterId = txRows[0].root.toLowerCase();
const cluster = await cpfpRepository.$getCluster(clusterId);
if (cluster) {
return this.convertCpfp(txid, cluster);
}
}
} catch (e) {
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
public async $removeTransaction(txid: string): Promise<void> {
try {
await DB.query(
`
DELETE FROM compact_transactions
WHERE txid = UNHEX(?)
`,
[txid]
);
} catch (e) {
logger.warn('Cannot delete transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));
throw e;
}
}
private convertCpfp(txid, cluster): CpfpInfo {
const descendants: Ancestor[] = [];
const ancestors: Ancestor[] = [];
let matched = false;
for (const tx of (cluster?.txs || [])) {
if (tx.txid === txid) {
matched = true;
} else if (!matched) {
descendants.push(tx);
} else {
ancestors.push(tx);
}
}
return {
descendants,
ancestors,
};
}
}
export default new TransactionRepository();

View File

@@ -88,5 +88,7 @@ module.exports = {
verifyTxOutProof: 'verifytxoutproof', // bitcoind v0.11.0+
walletLock: 'walletlock',
walletPassphrase: 'walletpassphrase',
walletPassphraseChange: 'walletpassphrasechange'
}
walletPassphraseChange: 'walletpassphrasechange',
getTxoutSetinfo: 'gettxoutsetinfo',
getIndexInfo: 'getindexinfo',
};

View File

@@ -0,0 +1,457 @@
import DB from '../../database';
import logger from '../../logger';
import channelsApi from '../../api/explorer/channels.api';
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
import config from '../../config';
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
import { Common } from '../../api/common';
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
const tempCacheSize = 10000;
class ForensicsService {
loggerTimer = 0;
closedChannelsScanBlock = 0;
txCache: { [txid: string]: IEsploraApi.Transaction } = {};
tempCached: string[] = [];
constructor() {}
public async $startService(): Promise<void> {
logger.info('Starting lightning network forensics service');
this.loggerTimer = new Date().getTime() / 1000;
await this.$runTasks();
}
private async $runTasks(): Promise<void> {
try {
logger.info(`Running forensics scans`);
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics(false);
await this.$runOpenedChannelsForensics();
}
} catch (e) {
logger.err('ForensicsService.$runTasks() error: ' + (e instanceof Error ? e.message : e));
}
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.FORENSICS_INTERVAL);
}
/*
1. Mutually closed
2. Forced closed
3. Forced closed with penalty
┌────────────────────────────────────┐ ┌────────────────────────────┐
│ outputs contain revocation script? ├──yes──► force close w/ penalty = 3 │
└──────────────┬─────────────────────┘ └────────────────────────────┘
no
┌──────────────▼──────────────────────────┐
│ outputs contain other lightning script? ├──┐
└──────────────┬──────────────────────────┘ │
no yes
┌──────────────▼─────────────┐ │
│ sequence starts with 0x80 │ ┌────────▼────────┐
│ and ├──────► force close = 2 │
│ locktime starts with 0x20? │ └─────────────────┘
└──────────────┬─────────────┘
no
┌─────────▼────────┐
│ mutual close = 1 │
└──────────────────┘
*/
public async $runClosedChannelsForensics(onlyNewChannels: boolean = false): Promise<void> {
if (config.MEMPOOL.BACKEND !== 'esplora') {
return;
}
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
let channels;
if (onlyNewChannels) {
channels = await channelsApi.$getClosedChannelsWithoutReason();
} else {
channels = await channelsApi.$getUnresolvedClosedChannels();
}
for (const channel of channels) {
let reason = 0;
let resolvedForceClose = false;
// Only Esplora backend can retrieve spent transaction outputs
const cached: string[] = [];
try {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const lightningScriptReasons: number[] = [];
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
let spendingTx = await this.fetchTransaction(outspend.txid);
if (!spendingTx) {
continue;
}
cached.push(spendingTx.txid);
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
}
}
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
reason = 3;
} else {
reason = 2;
resolvedForceClose = true;
}
} else {
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
let closingTx = await this.fetchTransaction(channel.closing_transaction_id, true);
if (!closingTx) {
continue;
}
cached.push(closingTx.txid);
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
reason = 2; // Here we can't be sure if it's a penalty or not
} else {
reason = 1;
}
}
if (reason) {
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
if (reason === 2 && resolvedForceClose) {
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
}
if (reason !== 2 || resolvedForceClose) {
cached.forEach(txid => {
delete this.txCache[txid];
});
}
}
} catch (e) {
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
}
private findLightningScript(vin: IEsploraApi.Vin): number {
const topElement = vin.witness?.length > 2 ? vin.witness[vin.witness.length - 2] : null;
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
if (topElement === '01') {
// top element is '01' to get in the revocation path
// 'Revoked Lightning Force Close';
// Penalty force closed
return 2;
} else {
// top element is '', this is a delayed to_local output
// 'Lightning Force Close';
return 3;
}
} else if (
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
if (topElement?.length === 66) {
// top element is a public key
// 'Revoked Lightning HTLC'; Penalty force closed
return 4;
} else if (topElement) {
// top element is a preimage
// 'Lightning HTLC';
return 5;
} else {
// top element is '' to get in the expiry of the script
// 'Expired Lightning HTLC';
return 6;
}
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
if (topElement) {
// top element is a signature
// 'Lightning Anchor';
return 7;
} else {
// top element is '', it has been swept after 16 blocks
// 'Swept Lightning Anchor';
return 8;
}
}
return 1;
}
// If a channel open tx spends funds from a another channel transaction,
// we can attribute that output to a specific counterparty
private async $runOpenedChannelsForensics(): Promise<void> {
const runTimer = Date.now();
let progress = 0;
try {
logger.info(`Started running open channel forensics...`);
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
for (const openChannel of channels) {
let openTx = await this.fetchTransaction(openChannel.transaction_id, true);
if (!openTx) {
continue;
}
for (const input of openTx.vin) {
const closeChannel = await channelsApi.$getChannelByClosingId(input.txid);
if (closeChannel) {
// this input directly spends a channel close output
await this.$attributeChannelBalances(closeChannel, openChannel, input);
} else {
const prevOpenChannels = await channelsApi.$getChannelsByOpeningId(input.txid);
if (prevOpenChannels?.length) {
// this input spends a channel open change output
for (const prevOpenChannel of prevOpenChannels) {
await this.$attributeChannelBalances(prevOpenChannel, openChannel, input, null, null, true);
}
} else {
// check if this input spends any swept channel close outputs
await this.$attributeSweptChannelCloses(openChannel, input);
}
}
}
// calculate how much of the total input value is attributable to the channel open output
openChannel.funding_ratio = openTx.vout[openChannel.transaction_vout].value / ((openTx.vout.reduce((sum, v) => sum + v.value, 0) || 1) + openTx.fee);
// save changes to the opening channel, and mark it as checked
if (openTx?.vin?.length === 1) {
openChannel.single_funded = true;
}
if (openChannel.node1_funding_balance || openChannel.node2_funding_balance || openChannel.node1_closing_balance || openChannel.node2_closing_balance || openChannel.closed_by) {
await channelsApi.$updateOpeningInfo(openChannel);
}
await channelsApi.$markChannelSourceChecked(openChannel.id);
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating opened channel forensics ${progress}/${channels?.length}`);
this.loggerTimer = new Date().getTime() / 1000;
this.truncateTempCache();
}
if (Date.now() - runTimer > (config.LIGHTNING.FORENSICS_INTERVAL * 1000)) {
break;
}
}
logger.info(`Open channels forensics scan complete.`);
} catch (e) {
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
} finally {
this.clearTempCache();
}
}
// Check if a channel open tx input spends the result of a swept channel close output
private async $attributeSweptChannelCloses(openChannel: ILightningApi.Channel, input: IEsploraApi.Vin): Promise<void> {
let sweepTx = await this.fetchTransaction(input.txid, true);
if (!sweepTx) {
logger.err(`couldn't find input transaction for channel forensics ${openChannel.channel_id} ${input.txid}`);
return;
}
const openContribution = sweepTx.vout[input.vout].value;
for (const sweepInput of sweepTx.vin) {
const lnScriptType = this.findLightningScript(sweepInput);
if (lnScriptType > 1) {
const closeChannel = await channelsApi.$getChannelByClosingId(sweepInput.txid);
if (closeChannel) {
const initiator = (lnScriptType === 2 || lnScriptType === 4) ? 'remote' : (lnScriptType === 3 ? 'local' : null);
await this.$attributeChannelBalances(closeChannel, openChannel, sweepInput, openContribution, initiator);
}
}
}
}
private async $attributeChannelBalances(
prevChannel, openChannel, input: IEsploraApi.Vin, openContribution: number | null = null,
initiator: 'remote' | 'local' | null = null, linkedOpenings: boolean = false
): Promise<void> {
// figure out which node controls the input/output
let openSide;
let prevLocal;
let prevRemote;
let matched = false;
let ambiguous = false; // if counterparties are the same in both channels, we can't tell them apart
if (openChannel.node1_public_key === prevChannel.node1_public_key) {
openSide = 1;
prevLocal = 1;
prevRemote = 2;
matched = true;
} else if (openChannel.node1_public_key === prevChannel.node2_public_key) {
openSide = 1;
prevLocal = 2;
prevRemote = 1;
matched = true;
}
if (openChannel.node2_public_key === prevChannel.node1_public_key) {
openSide = 2;
prevLocal = 1;
prevRemote = 2;
if (matched) {
ambiguous = true;
}
matched = true;
} else if (openChannel.node2_public_key === prevChannel.node2_public_key) {
openSide = 2;
prevLocal = 2;
prevRemote = 1;
if (matched) {
ambiguous = true;
}
matched = true;
}
if (matched && !ambiguous) {
// fetch closing channel transaction and perform forensics on the outputs
let prevChannelTx = await this.fetchTransaction(input.txid, true);
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(input.txid);
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
}
if (!outspends || !prevChannelTx) {
return;
}
if (!linkedOpenings) {
if (!prevChannel.outputs || !prevChannel.outputs.length) {
prevChannel.outputs = prevChannelTx.vout.map(vout => {
return {
type: 0,
value: vout.value,
};
});
}
for (let i = 0; i < outspends?.length; i++) {
const outspend = outspends[i];
const output = prevChannel.outputs[i];
if (outspend.spent && outspend.txid) {
try {
const spendingTx = await this.fetchTransaction(outspend.txid, true);
if (spendingTx) {
output.type = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
}
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`);
}
} else {
output.type = 0;
}
}
// attribute outputs to each counterparty, and sum up total known balances
prevChannel.outputs[input.vout].node = prevLocal;
const isPenalty = prevChannel.outputs.filter((out) => out.type === 2 || out.type === 4)?.length > 0;
const normalOutput = [1,3].includes(prevChannel.outputs[input.vout].type);
const mutualClose = ((prevChannel.status === 2 || prevChannel.status === 'closed') && prevChannel.closing_reason === 1);
let localClosingBalance = 0;
let remoteClosingBalance = 0;
for (const output of prevChannel.outputs) {
if (isPenalty) {
// penalty close, so local node takes everything
localClosingBalance += output.value;
} else if (output.node) {
// this output determinstically linked to one of the counterparties
if (output.node === prevLocal) {
localClosingBalance += output.value;
} else {
remoteClosingBalance += output.value;
}
} else if (normalOutput && (output.type === 1 || output.type === 3 || (mutualClose && prevChannel.outputs.length === 2))) {
// local node had one main output, therefore remote node takes the other
remoteClosingBalance += output.value;
}
}
prevChannel[`node${prevLocal}_closing_balance`] = localClosingBalance;
prevChannel[`node${prevRemote}_closing_balance`] = remoteClosingBalance;
prevChannel.closing_fee = prevChannelTx.fee;
if (initiator && !linkedOpenings) {
const initiatorSide = initiator === 'remote' ? prevRemote : prevLocal;
prevChannel.closed_by = prevChannel[`node${initiatorSide}_public_key`];
}
// save changes to the closing channel
await channelsApi.$updateClosingInfo(prevChannel);
} else {
if (prevChannelTx.vin.length <= 1) {
prevChannel[`node${prevLocal}_funding_balance`] = prevChannel.capacity;
prevChannel.single_funded = true;
prevChannel.funding_ratio = 1;
// save changes to the closing channel
await channelsApi.$updateOpeningInfo(prevChannel);
}
}
openChannel[`node${openSide}_funding_balance`] = openChannel[`node${openSide}_funding_balance`] + (openContribution || prevChannelTx?.vout[input.vout]?.value || 0);
}
}
async fetchTransaction(txid: string, temp: boolean = false): Promise<IEsploraApi.Transaction | null> {
let tx = this.txCache[txid];
if (!tx) {
try {
tx = await bitcoinApi.$getRawTransaction(txid);
this.txCache[txid] = tx;
if (temp) {
this.tempCached.push(txid);
}
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
return null;
}
}
return tx;
}
clearTempCache(): void {
for (const txid of this.tempCached) {
delete this.txCache[txid];
}
this.tempCached = [];
}
truncateTempCache(): void {
if (this.tempCached.length > tempCacheSize) {
const removed = this.tempCached.splice(0, this.tempCached.length - tempCacheSize);
for (const txid of removed) {
delete this.txCache[txid];
}
}
}
}
export default new ForensicsService();

View File

@@ -13,6 +13,8 @@ import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
import NodesSocketsRepository from '../../repositories/NodesSocketsRepository';
import { Common } from '../../api/common';
import blocks from '../../api/blocks';
import NodeRecordsRepository from '../../repositories/NodeRecordsRepository';
import forensicsService from './forensics.service';
class NetworkSyncService {
loggerTimer = 0;
@@ -21,7 +23,7 @@ class NetworkSyncService {
constructor() {}
public async $startService(): Promise<void> {
logger.info('Starting lightning network sync service');
logger.info(`Starting lightning network sync service`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
@@ -29,12 +31,13 @@ class NetworkSyncService {
}
private async $runTasks(): Promise<void> {
const taskStartTime = Date.now();
try {
logger.info(`Updating nodes and channels`);
logger.debug(`Updating nodes and channels`, logger.tags.ln);
const networkGraph = await lightningApi.$getNetworkGraph();
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
logger.info(`LN Network graph is empty, retrying in 10 seconds`, logger.tags.ln);
setTimeout(() => { this.$runTasks(); }, 10000);
return;
}
@@ -45,15 +48,17 @@ class NetworkSyncService {
await this.$lookUpCreationDateFromChain();
await this.$updateNodeFirstSeen();
await this.$scanForClosedChannels();
if (config.MEMPOOL.BACKEND === 'esplora') {
await this.$runClosedChannelsForensics();
// run forensics on new channels only
await forensicsService.$runClosedChannelsForensics(true);
}
} catch (e) {
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$runTasks() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL);
setTimeout(() => { this.$runTasks(); }, Math.max(1, (1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL) - (Date.now() - taskStartTime)));
}
/**
@@ -63,18 +68,19 @@ class NetworkSyncService {
let progress = 0;
let deletedSockets = 0;
let deletedRecords = 0;
const graphNodesPubkeys: string[] = [];
for (const node of nodes) {
const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key);
node.last_update = Math.max(node.last_update, latestUpdated);
node.last_update = Math.max(node.last_update ?? 0, latestUpdated);
await nodesApi.$saveNode(node);
graphNodesPubkeys.push(node.pub_key);
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node ${progress}/${nodes.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
@@ -84,8 +90,23 @@ class NetworkSyncService {
addresses.push(socket.addr);
}
deletedSockets += await NodesSocketsRepository.$deleteUnusedSockets(node.pub_key, addresses);
const oldRecordTypes = await NodeRecordsRepository.$getRecordTypes(node.pub_key);
const customRecordTypes: number[] = [];
for (const [type, payload] of Object.entries(node.custom_records || {})) {
const numericalType = parseInt(type);
await NodeRecordsRepository.$saveRecord({
publicKey: node.pub_key,
type: numericalType,
payload,
});
customRecordTypes.push(numericalType);
}
if (oldRecordTypes.reduce((changed, type) => changed || customRecordTypes.indexOf(type) === -1, false)) {
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
}
}
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted`);
logger.debug(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
// If a channel if not present in the graph, mark it as inactive
await nodesApi.$setNodesInactive(graphNodesPubkeys);
@@ -117,18 +138,18 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating channel ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`${progress} channels updated`);
logger.debug(`${progress} channels updated`, logger.tags.ln);
// If a channel if not present in the graph, mark it as inactive
await channelsApi.$setChannelsInactive(graphChannelsIds);
} catch (e) {
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.err(` Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.ln);
}
}
@@ -163,44 +184,52 @@ class NetworkSyncService {
if (lowest < node.first_seen) {
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
const params = [lowest, node.public_key];
++updated;
await DB.query(query, params);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node first seen date ${progress}/${nodes.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
++updated;
}
}
logger.info(`Updated ${updated} node first seen dates`);
if (updated > 0) {
logger.debug(`Updated ${updated} node first seen dates`, logger.tags.ln);
}
} catch (e) {
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$updateNodeFirstSeen() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
private async $lookUpCreationDateFromChain(): Promise<void> {
let progress = 0;
logger.info(`Running channel creation date lookup`);
logger.debug(`Running channel creation date lookup`, logger.tags.ln);
try {
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
for (const channel of channels) {
const transaction = await fundingTxFetcher.$fetchChannelOpenTx(channel.short_id);
if (!transaction) {
continue;
}
await DB.query(`
UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`,
[transaction.timestamp, channel.id]
);
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel creation date ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating channel creation date ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Updated ${channels.length} channels' creation date`);
if (channels.length > 0) {
logger.debug(`Updated ${channels.length} channels' creation date`, logger.tags.ln);
}
} catch (e) {
logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$lookUpCreationDateFromChain() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
@@ -209,7 +238,7 @@ class NetworkSyncService {
* mark that channel as inactive
*/
private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
logger.info(`Find channels which nodes are offline`);
logger.debug(`Find channels which nodes are offline`, logger.tags.ln);
try {
const result = await DB.query<ResultSetHeader>(`
@@ -232,12 +261,10 @@ class NetworkSyncService {
`);
if (result[0].changedRows ?? 0 > 0) {
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
} else {
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`, logger.tags.ln);
}
} catch (e) {
logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$deactivateChannelsWithoutActiveNodes() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
@@ -256,13 +283,13 @@ class NetworkSyncService {
} else {
log += ` for the first time`;
}
logger.info(log);
logger.info(`${log}`, logger.tags.ln);
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
for (const channel of channels) {
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
logger.debug('Marking channel: ' + channel.id + ' as closed.');
logger.debug(`Marking channel: ${channel.id} as closed.`, logger.tags.ln);
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
[spendingTx.status.block_time, channel.id]);
if (spendingTx.txid && !channel.closing_transaction_id) {
@@ -272,160 +299,18 @@ class NetworkSyncService {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`, logger.tags.ln);
this.loggerTimer = new Date().getTime() / 1000;
}
}
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
logger.info(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`);
logger.debug(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`, logger.tags.ln);
} catch (e) {
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
logger.err(`$scanForClosedChannels() error: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}
/*
1. Mutually closed
2. Forced closed
3. Forced closed with penalty
*/
private async $runClosedChannelsForensics(): Promise<void> {
if (!config.ESPLORA.REST_API_URL) {
return;
}
let progress = 0;
try {
logger.info(`Started running closed channel forensics...`);
const channels = await channelsApi.$getClosedChannelsWithoutReason();
for (const channel of channels) {
let reason = 0;
// Only Esplora backend can retrieve spent transaction outputs
try {
let outspends: IEsploraApi.Outspend[] | undefined;
try {
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const lightningScriptReasons: number[] = [];
for (const outspend of outspends) {
if (outspend.spent && outspend.txid) {
let spendingTx: IEsploraApi.Transaction | undefined;
try {
spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
lightningScriptReasons.push(lightningScript);
}
}
if (lightningScriptReasons.length === outspends.length
&& lightningScriptReasons.filter((r) => r === 1).length === outspends.length) {
reason = 1;
} else {
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
if (filteredReasons.length) {
if (filteredReasons.some((r) => r === 2 || r === 4)) {
reason = 3;
} else {
reason = 2;
}
} else {
/*
We can detect a commitment transaction (force close) by reading Sequence and Locktime
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
*/
let closingTx: IEsploraApi.Transaction | undefined;
try {
closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id);
} catch (e) {
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id}. Reason ${e instanceof Error ? e.message : e}`);
continue;
}
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
const locktimeHex: string = closingTx.locktime.toString(16);
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
reason = 2; // Here we can't be sure if it's a penalty or not
} else {
reason = 1;
}
}
}
if (reason) {
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
}
} catch (e) {
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
}
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
this.loggerTimer = new Date().getTime() / 1000;
}
}
logger.info(`Closed channels forensics scan complete.`);
} catch (e) {
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
}
}
private findLightningScript(vin: IEsploraApi.Vin): number {
const topElement = vin.witness[vin.witness.length - 2];
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
if (topElement === '01') {
// top element is '01' to get in the revocation path
// 'Revoked Lightning Force Close';
// Penalty force closed
return 2;
} else {
// top element is '', this is a delayed to_local output
// 'Lightning Force Close';
return 3;
}
} else if (
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
if (topElement.length === 66) {
// top element is a public key
// 'Revoked Lightning HTLC'; Penalty force closed
return 4;
} else if (topElement) {
// top element is a preimage
// 'Lightning HTLC';
return 5;
} else {
// top element is '' to get in the expiry of the script
// 'Expired Lightning HTLC';
return 6;
}
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
if (topElement) {
// top element is a signature
// 'Lightning Anchor';
return 7;
} else {
// top element is '', it has been swept after 16 blocks
// 'Swept Lightning Anchor';
return 8;
}
}
return 1;
}
}
export default new NetworkSyncService();

View File

@@ -6,7 +6,7 @@ import { Common } from '../../api/common';
class LightningStatsUpdater {
public async $startService(): Promise<void> {
logger.info('Starting Lightning Stats service');
logger.info(`Starting Lightning Stats service`, logger.tags.ln);
await this.$runTasks();
LightningStatsImporter.$run();
@@ -27,7 +27,7 @@ class LightningStatsUpdater {
const networkGraph = await lightningApi.$getNetworkGraph();
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
logger.info(`Updated latest network stats`);
logger.debug(`Updated latest network stats`, logger.tags.ln);
}
}

View File

@@ -21,10 +21,10 @@ class FundingTxFetcher {
try {
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
} catch (e) {
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`, logger.tags.ln);
this.fundingTxCache = {};
}
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`, logger.tags.ln);
}
}
@@ -44,33 +44,34 @@ class FundingTxFetcher {
++channelProcessed;
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
`elapsed: ${elapsedSeconds} seconds`
`elapsed: ${elapsedSeconds} seconds`,
logger.tags.ln
);
loggerTimer = new Date().getTime() / 1000;
}
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
if (elapsedSeconds > 60) {
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
cacheTimer = new Date().getTime() / 1000;
}
}
if (this.channelNewlyProcessed > 0) {
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`, logger.tags.ln);
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`, logger.tags.ln);
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
}
this.running = false;
}
public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number}> {
public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number} | null> {
channelId = Common.channelIntegerIdToShortId(channelId);
if (this.fundingTxCache[channelId]) {
@@ -101,6 +102,11 @@ class FundingTxFetcher {
const rawTx = await bitcoinClient.getRawTransaction(txid);
const tx = await bitcoinClient.decodeRawTransaction(rawTx);
if (!tx || !tx.vout || tx.vout.length < parseInt(outputIdx, 10) + 1 || tx.vout[outputIdx].value === undefined) {
logger.err(`Cannot find blockchain funding tx for channel id ${channelId}. Possible reasons are: bitcoin backend timeout or the channel shortId is not valid`);
return null;
}
this.fundingTxCache[channelId] = {
timestamp: block.time,
txid: txid,

View File

@@ -6,6 +6,7 @@ import DB from '../../../database';
import logger from '../../../logger';
import { ResultSetHeader } from 'mysql2';
import * as IPCheck from '../../../utils/ipcheck.js';
import { Reader } from 'mmdb-lib';
export async function $lookupNodeLocation(): Promise<void> {
let loggerTimer = new Date().getTime() / 1000;
@@ -13,12 +14,15 @@ export async function $lookupNodeLocation(): Promise<void> {
let nodesUpdated = 0;
let geoNamesInserted = 0;
logger.info(`Running node location updater using Maxmind`);
logger.debug(`Running node location updater using Maxmind`, logger.tags.ln);
try {
const nodes = await nodesApi.$getAllNodes();
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
let lookupIsp: Reader<IspResponse> | null = null;
try {
lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
} catch (e) { }
for (const node of nodes) {
const sockets: string[] = node.sockets.split(',');
@@ -29,7 +33,10 @@ export async function $lookupNodeLocation(): Promise<void> {
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
const city = lookupCity.get(ip);
const asn = lookupAsn.get(ip);
const isp = lookupIsp.get(ip);
let isp: IspResponse | null = null;
if (lookupIsp) {
isp = lookupIsp.get(ip);
}
let asOverwrite: any | undefined;
if (asn && (IPCheck.match(ip, '170.75.160.0/20') || IPCheck.match(ip, '172.81.176.0/21'))) {
@@ -145,8 +152,8 @@ export async function $lookupNodeLocation(): Promise<void> {
++progress;
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
if (elapsedSeconds > 10) {
logger.info(`Updating node location data ${progress}/${nodes.length}`);
if (elapsedSeconds > config.LIGHTNING.LOGGER_UPDATE_INTERVAL) {
logger.debug(`Updating node location data ${progress}/${nodes.length}`);
loggerTimer = new Date().getTime() / 1000;
}
}
@@ -154,9 +161,7 @@ export async function $lookupNodeLocation(): Promise<void> {
}
if (nodesUpdated > 0) {
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
} else {
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`, logger.tags.ln);
}
} catch (e) {
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));

View File

@@ -8,7 +8,6 @@ import { isIP } from 'net';
import { Common } from '../../../api/common';
import channelsApi from '../../../api/explorer/channels.api';
import nodesApi from '../../../api/explorer/nodes.api';
import { ResultSetHeader } from 'mysql2';
const fsPromises = promises;
@@ -17,7 +16,7 @@ class LightningStatsImporter {
async $run(): Promise<void> {
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
logger.info('Caching funding txs for currently existing channels');
logger.info(`Caching funding txs for currently existing channels`, logger.tags.ln);
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
@@ -108,7 +107,7 @@ class LightningStatsImporter {
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
if (!tx) {
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`, logger.tags.ln);
continue;
}
@@ -310,13 +309,18 @@ class LightningStatsImporter {
* Import topology files LN historical data into the database
*/
async $importHistoricalLightningStats(): Promise<void> {
if (!config.LIGHTNING.TOPOLOGY_FOLDER) {
logger.info(`Lightning topology folder is not set. Not importing historical LN stats`);
return;
}
logger.debug('Run the historical importer');
try {
let fileList: string[] = [];
try {
fileList = await fsPromises.readdir(this.topologiesFolder);
} catch (e) {
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`);
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`, logger.tags.ln);
throw e;
}
// Insert history from the most recent to the oldest
@@ -354,7 +358,7 @@ class LightningStatsImporter {
continue;
}
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
logger.debug(`Reading ${this.topologiesFolder}/${filename}`, logger.tags.ln);
let fileContent = '';
try {
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
@@ -363,7 +367,7 @@ class LightningStatsImporter {
totalProcessed++;
continue;
}
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`, logger.tags.ln);
totalProcessed++;
continue;
}
@@ -373,7 +377,7 @@ class LightningStatsImporter {
graph = JSON.parse(fileContent);
graph = await this.cleanupTopology(graph);
} catch (e) {
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
totalProcessed++;
continue;
}
@@ -385,20 +389,20 @@ class LightningStatsImporter {
}
if (!logStarted) {
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`, logger.tags.ln);
logStarted = true;
}
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`, logger.tags.ln);
totalProcessed++;
if (processed > 10) {
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
processed = 0;
} else {
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`, logger.tags.ln);
}
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
const stat = await this.computeNetworkStats(timestamp, graph, true);
@@ -407,10 +411,10 @@ class LightningStatsImporter {
}
if (totalProcessed > 0) {
logger.info(`Lightning network stats historical import completed`);
logger.info(`Lightning network stats historical import completed`, logger.tags.ln);
}
} catch (e) {
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.ln);
}
}

View File

@@ -8,16 +8,18 @@ import { SocksProxyAgent } from 'socks-proxy-agent';
import * as https from 'https';
/**
* Maintain the most recent version of pools.json
* Maintain the most recent version of pools-v2.json
*/
class PoolsUpdater {
lastRun: number = 0;
currentSha: string | undefined = undefined;
currentSha: string | null = null;
poolsUrl: string = config.MEMPOOL.POOLS_JSON_URL;
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
public async updatePoolsJson(): Promise<void> {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false ||
config.MEMPOOL.ENABLED === false
) {
return;
}
@@ -31,15 +33,9 @@ class PoolsUpdater {
this.lastRun = now;
if (config.SOCKS5PROXY.ENABLED) {
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`);
} else {
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`);
}
try {
const githubSha = await this.fetchPoolsSha(); // Fetch pools.json sha from github
if (githubSha === undefined) {
const githubSha = await this.fetchPoolsSha(); // Fetch pools-v2.json sha from github
if (githubSha === null) {
return;
}
@@ -47,32 +43,57 @@ class PoolsUpdater {
this.currentSha = await this.getShaFromDb();
}
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
if (this.currentSha !== undefined && this.currentSha === githubSha) {
logger.debug(`pools-v2.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
if (this.currentSha !== null && this.currentSha === githubSha) {
return;
}
if (this.currentSha === undefined) {
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`);
// See backend README for more details about the mining pools update process
if (this.currentSha !== null && // If we don't have any mining pool, download it at least once
config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING !== true && // Automatic pools update is disabled
!process.env.npm_config_update_pools // We're not manually updating mining pool
) {
logger.warn(`Updated mining pools data is available (${githubSha}) but AUTOMATIC_BLOCK_REINDEXING is disabled`);
logger.info(`You can update your mining pools using the --update-pools command flag. You may want to clear your nginx cache as well if applicable`);
return;
}
const network = config.SOCKS5PROXY.ENABLED ? 'tor' : 'clearnet';
if (this.currentSha === null) {
logger.info(`Downloading pools-v2.json for the first time from ${this.poolsUrl} over ${network}`, logger.tags.mining);
} else {
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`);
logger.warn(`pools-v2.json is outdated, fetch latest from ${this.poolsUrl} over ${network}`, logger.tags.mining);
}
const poolsJson = await this.query(this.poolsUrl);
if (poolsJson === undefined) {
return;
}
await poolsParser.migratePoolsJson(poolsJson);
await this.updateDBSha(githubSha);
logger.notice('PoolsUpdater completed');
poolsParser.setMiningPools(poolsJson);
if (config.DATABASE.ENABLED === false) { // Don't run db operations
logger.info('Mining pools-v2.json import completed (no database)');
return;
}
try {
await DB.query('START TRANSACTION;');
await poolsParser.migratePoolsJson();
await this.updateDBSha(githubSha);
await DB.query('COMMIT;');
} catch (e) {
logger.err(`Could not migrate mining pools, rolling back. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
await DB.query('ROLLBACK;');
}
logger.info('PoolsUpdater completed');
} catch (e) {
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
logger.err(`PoolsUpdater failed. Will try again in 24h. Exception: ${JSON.stringify(e)}`, logger.tags.mining);
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async updateDBSha(githubSha: string): Promise<void> {
this.currentSha = githubSha;
@@ -81,46 +102,46 @@ class PoolsUpdater {
await DB.query('DELETE FROM state where name="pools_json_sha"');
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
} catch (e) {
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
logger.err('Cannot save github pools-v2.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
}
}
}
/**
* Fetch our latest pools.json sha from the db
* Fetch our latest pools-v2.json sha from the db
*/
private async getShaFromDb(): Promise<string | undefined> {
private async getShaFromDb(): Promise<string | null> {
try {
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
return (rows.length > 0 ? rows[0].string : undefined);
return (rows.length > 0 ? rows[0].string : null);
} catch (e) {
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
return undefined;
logger.err('Cannot fetch pools-v2.json sha from db. Reason: ' + (e instanceof Error ? e.message : e), logger.tags.mining);
return null;
}
}
/**
* Fetch our latest pools.json sha from github
* Fetch our latest pools-v2.json sha from github
*/
private async fetchPoolsSha(): Promise<string | undefined> {
private async fetchPoolsSha(): Promise<string | null> {
const response = await this.query(this.treeUrl);
if (response !== undefined) {
for (const file of response['tree']) {
if (file['path'] === 'pools.json') {
if (file['path'] === 'pools-v2.json') {
return file['sha'];
}
}
}
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`);
return undefined;
logger.err(`Cannot find "pools-v2.json" in git tree (${this.treeUrl})`, logger.tags.mining);
return null;
}
/**
* Http request wrapper
*/
private async query(path): Promise<object | undefined> {
private async query(path): Promise<any[] | undefined> {
type axiosOptions = {
headers: {
'User-Agent': string

View File

@@ -8,12 +8,13 @@ class BitfinexApi implements PriceFeed {
public url: string = 'https://api.bitfinex.com/v1/pubticker/BTC';
public urlHist: string = 'https://api-pub.bitfinex.com/v2/candles/trade:{GRANULARITY}:tBTC{CURRENCY}/hist';
constructor() {
}
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['last_price'], 10) : -1;
if (response && response['last_price']) {
return parseInt(response['last_price'], 10);
} else {
return -1;
}
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {

View File

@@ -13,7 +13,11 @@ class BitflyerApi implements PriceFeed {
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['ltp'], 10) : -1;
if (response && response['ltp']) {
return parseInt(response['ltp'], 10);
} else {
return -1;
}
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {

View File

@@ -13,7 +13,11 @@ class CoinbaseApi implements PriceFeed {
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['data']['amount'], 10) : -1;
if (response && response['data'] && response['data']['amount']) {
return parseInt(response['data']['amount'], 10);
} else {
return -1;
}
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {

View File

@@ -1,43 +0,0 @@
import { query } from '../../utils/axios-query';
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
class FtxApi implements PriceFeed {
public name: string = 'FTX';
public currencies: string[] = ['USD', 'BRZ', 'EUR', 'JPY', 'AUD'];
public url: string = 'https://ftx.com/api/markets/BTC/';
public urlHist: string = 'https://ftx.com/api/markets/BTC/{CURRENCY}/candles?resolution={GRANULARITY}';
constructor() {
}
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['result']['last'], 10) : -1;
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
const priceHistory: PriceHistory = {};
for (const currency of currencies) {
if (this.currencies.includes(currency) === false) {
continue;
}
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
const pricesRaw = response ? response['result'] : [];
for (const price of pricesRaw as any[]) {
const time = Math.round(price['time'] / 1000);
if (priceHistory[time] === undefined) {
priceHistory[time] = priceUpdater.getEmptyPricesObj();
}
priceHistory[time][currency] = price['close'];
}
}
return priceHistory;
}
}
export default FtxApi;

View File

@@ -13,7 +13,11 @@ class GeminiApi implements PriceFeed {
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['last'], 10) : -1;
if (response && response['last']) {
return parseInt(response['last'], 10);
} else {
return -1;
}
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {

View File

@@ -23,7 +23,14 @@ class KrakenApi implements PriceFeed {
public async $fetchPrice(currency): Promise<number> {
const response = await query(this.url + currency);
return response ? parseInt(response['result'][this.getTicker(currency)]['c'][0], 10) : -1;
const ticker = this.getTicker(currency);
if (response && response['result'] && response['result'][ticker] &&
response['result'][ticker]['c'] && response['result'][ticker]['c'].length > 0
) {
return parseInt(response['result'][ticker]['c'][0], 10);
} else {
return -1;
}
}
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
@@ -91,7 +98,7 @@ class KrakenApi implements PriceFeed {
}
if (Object.keys(priceHistory).length > 0) {
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`);
logger.info(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`, logger.tags.mining);
}
}
}

View File

@@ -1,13 +1,11 @@
import * as fs from 'fs';
import path from "path";
import { Common } from '../api/common';
import path from 'path';
import config from '../config';
import logger from '../logger';
import PricesRepository from '../repositories/PricesRepository';
import PricesRepository, { ApiPrice, MAX_PRICES } from '../repositories/PricesRepository';
import BitfinexApi from './price-feeds/bitfinex-api';
import BitflyerApi from './price-feeds/bitflyer-api';
import CoinbaseApi from './price-feeds/coinbase-api';
import FtxApi from './price-feeds/ftx-api';
import GeminiApi from './price-feeds/gemini-api';
import KrakenApi from './price-feeds/kraken-api';
@@ -22,41 +20,36 @@ export interface PriceFeed {
}
export interface PriceHistory {
[timestamp: number]: Prices;
}
export interface Prices {
USD: number;
EUR: number;
GBP: number;
CAD: number;
CHF: number;
AUD: number;
JPY: number;
[timestamp: number]: ApiPrice;
}
class PriceUpdater {
public historyInserted = false;
lastRun = 0;
lastHistoricalRun = 0;
running = false;
feeds: PriceFeed[] = [];
currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
latestPrices: Prices;
private lastRun = 0;
private lastHistoricalRun = 0;
private running = false;
private feeds: PriceFeed[] = [];
private currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
private latestPrices: ApiPrice;
private ratesChangedCallback: ((rates: ApiPrice) => void) | undefined;
constructor() {
this.latestPrices = this.getEmptyPricesObj();
this.feeds.push(new BitflyerApi()); // Does not have historical endpoint
this.feeds.push(new FtxApi());
this.feeds.push(new KrakenApi());
this.feeds.push(new CoinbaseApi());
this.feeds.push(new BitfinexApi());
this.feeds.push(new GeminiApi());
}
public getEmptyPricesObj(): Prices {
public getLatestPrices(): ApiPrice {
return this.latestPrices;
}
public getEmptyPricesObj(): ApiPrice {
return {
time: 0,
USD: -1,
EUR: -1,
GBP: -1,
@@ -67,7 +60,24 @@ class PriceUpdater {
};
}
public setRatesChangedCallback(fn: (rates: ApiPrice) => void): void {
this.ratesChangedCallback = fn;
}
/**
* We execute this function before the websocket initialization since
* the websocket init is not done asyncronously
*/
public async $initializeLatestPriceWithDb(): Promise<void> {
this.latestPrices = await PricesRepository.$getLatestConversionRates();
}
public async $run(): Promise<void> {
if (config.MEMPOOL.NETWORK === 'signet' || config.MEMPOOL.NETWORK === 'testnet') {
// Coins have no value on testnet/signet, so we want to always show 0
return;
}
if (this.running === true) {
return;
}
@@ -79,13 +89,12 @@ class PriceUpdater {
}
try {
await this.$updatePrice();
if (this.historyInserted === false && config.DATABASE.ENABLED === true) {
await this.$insertHistoricalPrices();
} else {
await this.$updatePrice();
}
} catch (e) {
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`);
} catch (e: any) {
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
this.running = false;
@@ -115,22 +124,26 @@ class PriceUpdater {
if (feed.currencies.includes(currency)) {
try {
const price = await feed.$fetchPrice(currency);
if (price > 0) {
if (price > -1 && price < MAX_PRICES[currency]) {
prices.push(price);
}
logger.debug(`${feed.name} BTC/${currency} price: ${price}`);
logger.debug(`${feed.name} BTC/${currency} price: ${price}`, logger.tags.mining);
} catch (e) {
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`);
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`, logger.tags.mining);
}
}
}
if (prices.length === 1) {
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`);
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`, logger.tags.mining);
}
// Compute average price, non weighted
prices = prices.filter(price => price > 0);
this.latestPrices[currency] = Math.round((prices.reduce((partialSum, a) => partialSum + a, 0)) / prices.length);
if (prices.length === 0) {
this.latestPrices[currency] = -1;
} else {
this.latestPrices[currency] = Math.round((prices.reduce((partialSum, a) => partialSum + a, 0)) / prices.length);
}
}
logger.info(`Latest BTC fiat averaged price: ${JSON.stringify(this.latestPrices)}`);
@@ -147,7 +160,15 @@ class PriceUpdater {
}
}
if (this.ratesChangedCallback) {
this.ratesChangedCallback(this.latestPrices);
}
this.lastRun = new Date().getTime() / 1000;
if (this.latestPrices.USD === -1) {
this.latestPrices = await PricesRepository.$getLatestConversionRates();
}
}
/**
@@ -178,9 +199,9 @@ class PriceUpdater {
++insertedCount;
}
if (insertedCount > 0) {
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
} else {
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`, logger.tags.mining);
}
// Insert Kraken weekly prices
@@ -201,7 +222,7 @@ class PriceUpdater {
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
const existingPriceTimes = await PricesRepository.$getPricesTimes();
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database, this may take a while`);
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database`, logger.tags.mining);
const historicalPrices: PriceHistory[] = [];
@@ -210,13 +231,13 @@ class PriceUpdater {
try {
historicalPrices.push(await feed.$fetchRecentPrice(this.currencies, type));
} catch (e) {
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`);
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`, logger.tags.mining);
}
}
// Group them by timestamp and currency, for example
// grouped[123456789]['USD'] = [1, 2, 3, 4];
const grouped: Object = {};
const grouped = {};
for (const historicalEntry of historicalPrices) {
for (const time in historicalEntry) {
if (existingPriceTimes.includes(parseInt(time, 10))) {
@@ -231,8 +252,8 @@ class PriceUpdater {
for (const currency of this.currencies) {
const price = historicalEntry[time][currency];
if (price > 0) {
grouped[time][currency].push(parseInt(price, 10));
if (price > -1 && price < MAX_PRICES[currency]) {
grouped[time][currency].push(typeof price === 'string' ? parseInt(price, 10) : price);
}
}
}
@@ -241,7 +262,7 @@ class PriceUpdater {
// Average prices and insert everything into the db
let totalInserted = 0;
for (const time in grouped) {
const prices: Prices = this.getEmptyPricesObj();
const prices: ApiPrice = this.getEmptyPricesObj();
for (const currency in grouped[time]) {
if (grouped[time][currency].length === 0) {
continue;
@@ -255,9 +276,9 @@ class PriceUpdater {
}
if (totalInserted > 0) {
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
} else {
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`, logger.tags.mining);
}
}
}

View File

@@ -1,33 +0,0 @@
import { BlockExtended } from '../mempool.interfaces';
export function prepareBlock(block: any): BlockExtended {
return <BlockExtended>{
id: block.id ?? block.hash, // hash for indexed block
timestamp: block.timestamp ?? block.time ?? block.blockTimestamp, // blockTimestamp for indexed block
height: block.height,
version: block.version,
bits: (typeof block.bits === 'string' ? parseInt(block.bits, 16): block.bits),
nonce: block.nonce,
difficulty: block.difficulty,
merkle_root: block.merkle_root ?? block.merkleroot,
tx_count: block.tx_count ?? block.nTx,
size: block.size,
weight: block.weight,
previousblockhash: block.previousblockhash,
extras: {
coinbaseRaw: block.coinbase_raw ?? block.extras?.coinbaseRaw,
medianFee: block.medianFee ?? block.median_fee ?? block.extras?.medianFee,
feeRange: block.feeRange ?? block.fee_span,
reward: block.reward ?? block?.extras?.reward,
totalFees: block.totalFees ?? block?.fees ?? block?.extras?.totalFees,
avgFee: block?.extras?.avgFee ?? block.avg_fee,
avgFeeRate: block?.avgFeeRate ?? block.avg_fee_rate,
pool: block?.extras?.pool ?? (block?.pool_id ? {
id: block.pool_id,
name: block.pool_name,
slug: block.pool_slug,
} : undefined),
usd: block?.extras?.usd ?? block.usd ?? null,
}
};
}

View File

@@ -0,0 +1,14 @@
// simple recursive deep clone for literal-type objects
// does not preserve Dates, Maps, Sets etc
// does not support recursive objects
// properties deeper than maxDepth will be shallow cloned
export function deepClone(obj: any, maxDepth: number = 50, depth: number = 0): any {
let cloned = obj;
if (depth < maxDepth && typeof obj === 'object') {
cloned = Array.isArray(obj) ? [] : {};
for (const key in obj) {
cloned[key] = deepClone(obj[key], maxDepth, depth + 1);
}
}
return cloned;
}

View File

@@ -0,0 +1,29 @@
const byteUnits = ['B', 'kB', 'MB', 'GB', 'TB'];
export function getBytesUnit(bytes: number): string {
if (isNaN(bytes) || !isFinite(bytes)) {
return 'B';
}
let unitIndex = 0;
while (unitIndex < byteUnits.length && bytes > 1024) {
unitIndex++;
bytes /= 1024;
}
return byteUnits[unitIndex];
}
export function formatBytes(bytes: number, toUnit: string, skipUnit = false): string {
if (isNaN(bytes) || !isFinite(bytes)) {
return `${bytes}`;
}
let unitIndex = 0;
while (unitIndex < byteUnits.length && (toUnit && byteUnits[unitIndex] !== toUnit || (!toUnit && bytes > 1024))) {
unitIndex++;
bytes /= 1024;
}
return `${bytes.toFixed(2)}${skipUnit ? '' : ' ' + byteUnits[unitIndex]}`;
}

View File

@@ -0,0 +1,174 @@
export type HeapNode<T> = {
element: T
child?: HeapNode<T>
next?: HeapNode<T>
prev?: HeapNode<T>
} | null | undefined;
// minimal pairing heap priority queue implementation
export class PairingHeap<T> {
private root: HeapNode<T> = null;
private comparator: (a: T, b: T) => boolean;
// comparator function should return 'true' if a is higher priority than b
constructor(comparator: (a: T, b: T) => boolean) {
this.comparator = comparator;
}
isEmpty(): boolean {
return !this.root;
}
add(element: T): HeapNode<T> {
const node: HeapNode<T> = {
element
};
this.root = this.meld(this.root, node);
return node;
}
// returns the top priority element without modifying the queue
peek(): T | void {
return this.root?.element;
}
// removes and returns the top priority element
pop(): T | void {
let element;
if (this.root) {
const node = this.root;
element = node.element;
this.root = this.mergePairs(node.child);
}
return element;
}
deleteNode(node: HeapNode<T>): void {
if (!node) {
return;
}
if (node === this.root) {
this.root = this.mergePairs(node.child);
}
else {
if (node.prev) {
if (node.prev.child === node) {
node.prev.child = node.next;
}
else {
node.prev.next = node.next;
}
}
if (node.next) {
node.next.prev = node.prev;
}
this.root = this.meld(this.root, this.mergePairs(node.child));
}
node.child = null;
node.prev = null;
node.next = null;
}
// fix the heap after increasing the priority of a given node
increasePriority(node: HeapNode<T>): void {
// already the top priority element
if (!node || node === this.root) {
return;
}
// extract from siblings
if (node.prev) {
if (node.prev?.child === node) {
if (this.comparator(node.prev.element, node.element)) {
// already in a valid position
return;
}
node.prev.child = node.next;
}
else {
node.prev.next = node.next;
}
}
if (node.next) {
node.next.prev = node.prev;
}
this.root = this.meld(this.root, node);
}
decreasePriority(node: HeapNode<T>): void {
this.deleteNode(node);
this.root = this.meld(this.root, node);
}
meld(a: HeapNode<T>, b: HeapNode<T>): HeapNode<T> {
if (!a) {
return b;
}
if (!b || a === b) {
return a;
}
let parent: HeapNode<T> = b;
let child: HeapNode<T> = a;
if (this.comparator(a.element, b.element)) {
parent = a;
child = b;
}
child.next = parent.child;
if (parent.child) {
parent.child.prev = child;
}
child.prev = parent;
parent.child = child;
parent.next = null;
parent.prev = null;
return parent;
}
mergePairs(node: HeapNode<T>): HeapNode<T> {
if (!node) {
return null;
}
let current: HeapNode<T> = node;
let next: HeapNode<T>;
let nextCurrent: HeapNode<T>;
let pairs: HeapNode<T>;
let melded: HeapNode<T>;
while (current) {
next = current.next;
if (next) {
nextCurrent = next.next;
melded = this.meld(current, next);
if (melded) {
melded.prev = pairs;
}
pairs = melded;
}
else {
nextCurrent = null;
current.prev = pairs;
pairs = current;
break;
}
current = nextCurrent;
}
melded = null;
let prev: HeapNode<T>;
while (pairs) {
prev = pairs.prev;
melded = this.meld(melded, pairs);
pairs = prev;
}
return melded;
}
}

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: AlexLloyd0

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of January 25, 2022.
Signed: Arooba-git

View File

@@ -0,0 +1,3 @@
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of December 17, 2022.
Signed: piterden

View File

@@ -17,7 +17,7 @@ _Note: address lookups require an Electrum Server and will not work with this co
The default Docker configuration assumes you have the following configuration in your `bitcoin.conf` file:
```
```ini
txindex=1
server=1
rpcuser=mempool
@@ -26,7 +26,7 @@ rpcpassword=mempool
If you want to use different credentials, specify them in the `docker-compose.yml` file:
```
```yaml
api:
environment:
MEMPOOL_BACKEND: "none"
@@ -54,7 +54,7 @@ First, configure `bitcoind` as specified above, and make sure your Electrum Serv
Then, set the following variables in `docker-compose.yml` so Mempool can connect to your Electrum Server:
```
```yaml
api:
environment:
MEMPOOL_BACKEND: "electrum"
@@ -85,10 +85,11 @@ Below we list all settings from `mempool-config.json` and the corresponding over
<br/>
`mempool-config.json`:
```
```json
"MEMPOOL": {
"NETWORK": "mainnet",
"BACKEND": "electrum",
"ENABLED": true,
"HTTP_PORT": 8999,
"SPAWN_CLUSTER_PROCS": 0,
"API_URL_PREFIX": "/api/v1/",
@@ -99,17 +100,24 @@ Below we list all settings from `mempool-config.json` and the corresponding over
"BLOCK_WEIGHT_UNITS": 4000000,
"INITIAL_BLOCKS_AMOUNT": 8,
"MEMPOOL_BLOCKS_AMOUNT": 8,
"PRICE_FEED_UPDATE_INTERVAL": 600,
"BLOCKS_SUMMARIES_INDEXING": false,
"USE_SECOND_NODE_FOR_MINFEE": false,
"EXTERNAL_ASSETS": ["https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json"],
"EXTERNAL_ASSETS": [],
"STDOUT_LOG_MIN_PRIORITY": "info",
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
"INDEXING_BLOCKS_AMOUNT": false,
"AUTOMATIC_BLOCK_REINDEXING": false,
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json",
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
"ADVANCED_GBT_AUDIT": false,
"ADVANCED_GBT_MEMPOOL": false,
"CPFP_INDEXING": false,
"MAX_BLOCKS_BULK_QUERY": 0,
"DISK_CACHE_BLOCK_INTERVAL": 6
},
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
MEMPOOL_NETWORK: ""
@@ -124,20 +132,31 @@ Corresponding `docker-compose.yml` overrides:
MEMPOOL_BLOCK_WEIGHT_UNITS: ""
MEMPOOL_INITIAL_BLOCKS_AMOUNT: ""
MEMPOOL_MEMPOOL_BLOCKS_AMOUNT: ""
MEMPOOL_PRICE_FEED_UPDATE_INTERVAL: ""
MEMPOOL_BLOCKS_SUMMARIES_INDEXING: ""
MEMPOOL_USE_SECOND_NODE_FOR_MINFEE: ""
MEMPOOL_EXTERNAL_ASSETS: ""
MEMPOOL_STDOUT_LOG_MIN_PRIORITY: ""
MEMPOOL_INDEXING_BLOCKS_AMOUNT: ""
MEMPOOL_AUTOMATIC_BLOCK_REINDEXING: ""
MEMPOOL_POOLS_JSON_URL: ""
MEMPOOL_POOLS_JSON_TREE_URL: ""
MEMPOOL_ADVANCED_GBT_AUDIT: ""
MEMPOOL_ADVANCED_GBT_MEMPOOL: ""
MEMPOOL_CPFP_INDEXING: ""
MAX_BLOCKS_BULK_QUERY: ""
DISK_CACHE_BLOCK_INTERVAL: ""
...
```
`ADVANCED_GBT_AUDIT` AND `ADVANCED_GBT_MEMPOOL` enable a more accurate (but slower) block prediction algorithm for the block audit feature and the projected mempool-blocks respectively.
`CPFP_INDEXING` enables indexing CPFP (Child Pays For Parent) information for the last `INDEXING_BLOCKS_AMOUNT` blocks.
<br/>
`mempool-config.json`:
```
"CORE_RPC": {
```json
"CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
"USERNAME": "mempool",
@@ -146,7 +165,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
CORE_RPC_HOST: ""
@@ -159,7 +178,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"ELECTRUM": {
"HOST": "127.0.0.1",
"PORT": 50002,
@@ -168,7 +187,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
ELECTRUM_HOST: ""
@@ -180,14 +199,14 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"ESPLORA": {
"REST_API_URL": "http://127.0.0.1:3000"
},
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
ESPLORA_REST_API_URL: ""
@@ -197,7 +216,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"SECOND_CORE_RPC": {
"HOST": "127.0.0.1",
"PORT": 8332,
@@ -207,7 +226,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
SECOND_CORE_RPC_HOST: ""
@@ -220,7 +239,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"DATABASE": {
"ENABLED": true,
"HOST": "127.0.0.1",
@@ -232,7 +251,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
DATABASE_ENABLED: ""
@@ -247,7 +266,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"SYSLOG": {
"ENABLED": true,
"HOST": "127.0.0.1",
@@ -258,7 +277,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
SYSLOG_ENABLED: ""
@@ -272,7 +291,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"STATISTICS": {
"ENABLED": true,
"TX_PER_SECOND_SAMPLE_PERIOD": 150
@@ -280,7 +299,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
STATISTICS_ENABLED: ""
@@ -291,7 +310,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"BISQ": {
"ENABLED": false,
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
@@ -299,7 +318,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
BISQ_ENABLED: ""
@@ -310,7 +329,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"SOCKS5PROXY": {
"ENABLED": false,
"HOST": "127.0.0.1",
@@ -321,7 +340,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
SOCKS5PROXY_ENABLED: ""
@@ -335,7 +354,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"PRICE_DATA_SERVER": {
"TOR_URL": "http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices",
"CLEARNET_URL": "https://price.bisq.wiz.biz/getAllMarketPrices"
@@ -343,7 +362,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
PRICE_DATA_SERVER_TOR_URL: ""
@@ -354,7 +373,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"LIGHTNING": {
"ENABLED": false
"BACKEND": "lnd"
@@ -366,7 +385,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
LIGHTNING_ENABLED: false
@@ -381,7 +400,7 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"LND": {
"TLS_CERT_PATH": ""
"MACAROON_PATH": ""
@@ -390,7 +409,7 @@ Corresponding `docker-compose.yml` overrides:
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
LND_TLS_CERT_PATH: ""
@@ -402,16 +421,39 @@ Corresponding `docker-compose.yml` overrides:
<br/>
`mempool-config.json`:
```
```json
"CLIGHTNING": {
"SOCKET": ""
}
```
Corresponding `docker-compose.yml` overrides:
```
```yaml
api:
environment:
CLIGHTNING_SOCKET: ""
...
```
<br/>
`mempool-config.json`:
```json
"MAXMIND": {
"ENABLED": true,
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
}
```
Corresponding `docker-compose.yml` overrides:
```yaml
api:
environment:
MAXMIND_ENABLED: true,
MAXMIND_GEOLITE2_CITY: "/backend/GeoIP/GeoLite2-City.mmdb",
MAXMIND_GEOLITE2_ASN": "/backend/GeoIP/GeoLite2-ASN.mmdb",
MAXMIND_GEOIP2_ISP": "/backend/GeoIP/GeoIP2-ISP.mmdb"
...
```

View File

@@ -17,6 +17,7 @@ WORKDIR /backend
RUN chown 1000:1000 ./
COPY --from=builder --chown=1000:1000 /build/package ./package/
COPY --from=builder --chown=1000:1000 /build/GeoIP ./GeoIP/
COPY --from=builder --chown=1000:1000 /build/mempool-config.json /build/start.sh /build/wait-for-it.sh ./
USER 1000

View File

@@ -2,6 +2,7 @@
"MEMPOOL": {
"NETWORK": "__MEMPOOL_NETWORK__",
"BACKEND": "__MEMPOOL_BACKEND__",
"ENABLED": __MEMPOOL_ENABLED__,
"HTTP_PORT": __MEMPOOL_HTTP_PORT__,
"SPAWN_CLUSTER_PROCS": __MEMPOOL_SPAWN_CLUSTER_PROCS__,
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
@@ -12,7 +13,6 @@
"BLOCK_WEIGHT_UNITS": __MEMPOOL_BLOCK_WEIGHT_UNITS__,
"INITIAL_BLOCKS_AMOUNT": __MEMPOOL_INITIAL_BLOCKS_AMOUNT__,
"MEMPOOL_BLOCKS_AMOUNT": __MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__,
"PRICE_FEED_UPDATE_INTERVAL": __MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__,
"USE_SECOND_NODE_FOR_MINFEE": __MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__,
"EXTERNAL_ASSETS": __MEMPOOL_EXTERNAL_ASSETS__,
"EXTERNAL_MAX_RETRY": __MEMPOOL_EXTERNAL_MAX_RETRY__,
@@ -21,7 +21,13 @@
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__,
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__,
"AUTOMATIC_BLOCK_REINDEXING": __MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__
"AUTOMATIC_BLOCK_REINDEXING": __MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__,
"AUDIT": __MEMPOOL_AUDIT__,
"ADVANCED_GBT_AUDIT": __MEMPOOL_ADVANCED_GBT_AUDIT__,
"ADVANCED_GBT_MEMPOOL": __MEMPOOL_ADVANCED_GBT_MEMPOOL__,
"CPFP_INDEXING": __MEMPOOL_CPFP_INDEXING__,
"MAX_BLOCKS_BULK_QUERY": __MEMPOOL_MAX_BLOCKS_BULK_QUERY__,
"DISK_CACHE_BLOCK_INTERVAL": __MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__
},
"CORE_RPC": {
"HOST": "__CORE_RPC_HOST__",
@@ -102,5 +108,11 @@
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
},
"MAXMIND": {
"ENABLED": __MAXMIND_ENABLED__,
"GEOLITE2_CITY": "__MAXMIND_GEOLITE2_CITY__",
"GEOLITE2_ASN": "__MAXMIND_GEOLITE2_ASN__",
"GEOIP2_ISP": "__MAXMIND_GEOIP2_ISP__"
}
}

View File

@@ -3,6 +3,7 @@
# MEMPOOL
__MEMPOOL_NETWORK__=${MEMPOOL_NETWORK:=mainnet}
__MEMPOOL_BACKEND__=${MEMPOOL_BACKEND:=electrum}
__MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=true}
__MEMPOOL_HTTP_PORT__=${BACKEND_HTTP_PORT:=8999}
__MEMPOOL_SPAWN_CLUSTER_PROCS__=${MEMPOOL_SPAWN_CLUSTER_PROCS:=0}
__MEMPOOL_API_URL_PREFIX__=${MEMPOOL_API_URL_PREFIX:=/api/v1/}
@@ -15,17 +16,21 @@ __MEMPOOL_INITIAL_BLOCKS_AMOUNT__=${MEMPOOL_INITIAL_BLOCKS_AMOUNT:=8}
__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_MEMPOOL_BLOCKS_AMOUNT:=8}
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=11000}
__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__=${MEMPOOL_BLOCKS_SUMMARIES_INDEXING:=false}
__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__=${MEMPOOL_PRICE_FEED_UPDATE_INTERVAL:=600}
__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__=${MEMPOOL_USE_SECOND_NODE_FOR_MINFEE:=false}
__MEMPOOL_EXTERNAL_ASSETS__=${MEMPOOL_EXTERNAL_ASSETS:=[]}
__MEMPOOL_EXTERNAL_MAX_RETRY__=${MEMPOOL_EXTERNAL_MAX_RETRY:=1}
__MEMPOOL_EXTERNAL_RETRY_INTERVAL__=${MEMPOOL_EXTERNAL_RETRY_INTERVAL:=0}
__MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json}
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=https://raw.githubusercontent.com/mempool/mining-pools/master/pools-v2.json}
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.github.com/repos/mempool/mining-pools/git/trees/master}
__MEMPOOL_AUDIT__=${MEMPOOL_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_AUDIT__=${MEMPOOL_ADVANCED_GBT_AUDIT:=false}
__MEMPOOL_ADVANCED_GBT_MEMPOOL__=${MEMPOOL_ADVANCED_GBT_MEMPOOL:=false}
__MEMPOOL_CPFP_INDEXING__=${MEMPOOL_CPFP_INDEXING:=false}
__MEMPOOL_MAX_BLOCKS_BULK_QUERY__=${MEMPOOL_MAX_BLOCKS_BULK_QUERY:=0}
__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__=${MEMPOOL_DISK_CACHE_BLOCK_INTERVAL:=6}
# CORE_RPC
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
@@ -107,10 +112,18 @@ __LND_REST_API_URL__=${LND_REST_API_URL:="https://localhost:8080"}
# CLN
__CLIGHTNING_SOCKET__=${CLIGHTNING_SOCKET:=""}
# MAXMIND
__MAXMIND_ENABLED__=${MAXMIND_ENABLED:=true}
__MAXMIND_GEOLITE2_CITY__=${MAXMIND_GEOLITE2_CITY:="/backend/GeoIP/GeoLite2-City.mmdb"}
__MAXMIND_GEOLITE2_ASN__=${MAXMIND_GEOLITE2_ASN:="/backend/GeoIP/GeoLite2-ASN.mmdb"}
__MAXMIND_GEOIP2_ISP__=${MAXMIND_GEOIP2_ISP:=""}
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json
sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json
sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
@@ -123,17 +136,21 @@ sed -i "s/__MEMPOOL_INITIAL_BLOCKS_AMOUNT__/${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}
sed -i "s/__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__/${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__/${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}/g" mempool-config.json
sed -i "s/__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__/${__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__}/g" mempool-config.json
sed -i "s/__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__/${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}/g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_ASSETS__!${__MEMPOOL_EXTERNAL_ASSETS__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_AUDIT__!${__MEMPOOL_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_MEMPOOL__!${__MEMPOOL_ADVANCED_GBT_MEMPOOL__}!g" mempool-config.json
sed -i "s!__MEMPOOL_ADVANCED_GBT_AUDIT__!${__MEMPOOL_ADVANCED_GBT_AUDIT__}!g" mempool-config.json
sed -i "s!__MEMPOOL_CPFP_INDEXING__!${__MEMPOOL_CPFP_INDEXING__}!g" mempool-config.json
sed -i "s!__MEMPOOL_MAX_BLOCKS_BULK_QUERY__!${__MEMPOOL_MAX_BLOCKS_BULK_QUERY__}!g" mempool-config.json
sed -i "s!__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__!${__MEMPOOL_DISK_CACHE_BLOCK_INTERVAL__}!g" mempool-config.json
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
@@ -205,4 +222,11 @@ sed -i "s!__LND_REST_API_URL__!${__LND_REST_API_URL__}!g" mempool-config.json
# CLN
sed -i "s!__CLIGHTNING_SOCKET__!${__CLIGHTNING_SOCKET__}!g" mempool-config.json
# MAXMIND
sed -i "s!__MAXMIND_ENABLED__!${__MAXMIND_ENABLED__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOLITE2_CITY__!${__MAXMIND_GEOLITE2_CITY__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOLITE2_ASN__!${__MAXMIND_GEOLITE2_ASN__}!g" mempool-config.json
sed -i "s!__MAXMIND_GEOIP2_ISP__!${__MAXMIND_GEOIP2_ISP__}!g" mempool-config.json
node /backend/package/index.js

View File

@@ -8,7 +8,9 @@ WORKDIR /build
COPY . .
RUN apt-get update
RUN apt-get install -y build-essential rsync
RUN cp mempool-frontend-config.sample.json mempool-frontend-config.json
RUN npm install --omit=dev --omit=optional
RUN npm run build
FROM nginx:1.17.8-alpine
@@ -28,7 +30,9 @@ RUN chown -R 1000:1000 /patch && chmod -R 755 /patch && \
chown -R 1000:1000 /var/cache/nginx && \
chown -R 1000:1000 /var/log/nginx && \
chown -R 1000:1000 /etc/nginx/nginx.conf && \
chown -R 1000:1000 /etc/nginx/conf.d
chown -R 1000:1000 /etc/nginx/conf.d && \
chown -R 1000:1000 /var/www/mempool
RUN touch /var/run/nginx.pid && \
chown -R 1000:1000 /var/run/nginx.pid

View File

@@ -10,4 +10,61 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf
sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf
cat /patch/nginx.conf > /etc/nginx/nginx.conf
# Runtime overrides - read env vars defined in docker compose
__TESTNET_ENABLED__=${TESTNET_ENABLED:=false}
__SIGNET_ENABLED__=${SIGNET_ENABLED:=false}
__LIQUID_ENABLED__=${LIQUID_EANBLED:=false}
__LIQUID_TESTNET_ENABLED__=${LIQUID_TESTNET_ENABLED:=false}
__BISQ_ENABLED__=${BISQ_ENABLED:=false}
__BISQ_SEPARATE_BACKEND__=${BISQ_SEPARATE_BACKEND:=false}
__ITEMS_PER_PAGE__=${ITEMS_PER_PAGE:=10}
__KEEP_BLOCKS_AMOUNT__=${KEEP_BLOCKS_AMOUNT:=8}
__NGINX_PROTOCOL__=${NGINX_PROTOCOL:=http}
__NGINX_HOSTNAME__=${NGINX_HOSTNAME:=localhost}
__NGINX_PORT__=${NGINX_PORT:=8999}
__BLOCK_WEIGHT_UNITS__=${BLOCK_WEIGHT_UNITS:=4000000}
__MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_BLOCKS_AMOUNT:=8}
__BASE_MODULE__=${BASE_MODULE:=mempool}
__MEMPOOL_WEBSITE_URL__=${MEMPOOL_WEBSITE_URL:=https://mempool.space}
__LIQUID_WEBSITE_URL__=${LIQUID_WEBSITE_URL:=https://liquid.network}
__BISQ_WEBSITE_URL__=${BISQ_WEBSITE_URL:=https://bisq.markets}
__MINING_DASHBOARD__=${MINING_DASHBOARD:=true}
__LIGHTNING__=${LIGHTNING:=false}
__AUDIT__=${AUDIT:=false}
__MAINNET_BLOCK_AUDIT_START_HEIGHT__=${MAINNET_BLOCK_AUDIT_START_HEIGHT:=0}
__TESTNET_BLOCK_AUDIT_START_HEIGHT__=${TESTNET_BLOCK_AUDIT_START_HEIGHT:=0}
__SIGNET_BLOCK_AUDIT_START_HEIGHT__=${SIGNET_BLOCK_AUDIT_START_HEIGHT:=0}
__HISTORICAL_PRICE__=${HISTORICAL_PRICE:=true}
# Export as environment variables to be used by envsubst
export __TESTNET_ENABLED__
export __SIGNET_ENABLED__
export __LIQUID_ENABLED__
export __LIQUID_TESTNET_ENABLED__
export __BISQ_ENABLED__
export __BISQ_SEPARATE_BACKEND__
export __ITEMS_PER_PAGE__
export __KEEP_BLOCKS_AMOUNT__
export __NGINX_PROTOCOL__
export __NGINX_HOSTNAME__
export __NGINX_PORT__
export __BLOCK_WEIGHT_UNITS__
export __MEMPOOL_BLOCKS_AMOUNT__
export __BASE_MODULE__
export __MEMPOOL_WEBSITE_URL__
export __LIQUID_WEBSITE_URL__
export __BISQ_WEBSITE_URL__
export __MINING_DASHBOARD__
export __LIGHTNING__
export __AUDIT__
export __MAINNET_BLOCK_AUDIT_START_HEIGHT__
export __TESTNET_BLOCK_AUDIT_START_HEIGHT__
export __SIGNET_BLOCK_AUDIT_START_HEIGHT__
export __HISTORICAL_PRICE__
folder=$(find /var/www/mempool -name "config.js" | xargs dirname)
echo ${folder}
envsubst < ${folder}/config.template.js > ${folder}/config.js
exec "$@"

View File

@@ -3,6 +3,11 @@
#backend
cp ./docker/backend/* ./backend/
#geoip-data
mkdir -p ./backend/GeoIP/
wget -O ./backend/GeoIP/GeoLite2-City.mmdb https://raw.githubusercontent.com/mempool/geoip-data/master/GeoLite2-City.mmdb
wget -O ./backend/GeoIP/GeoLite2-ASN.mmdb https://raw.githubusercontent.com/mempool/geoip-data/master/GeoLite2-ASN.mmdb
#frontend
localhostIP="127.0.0.1"
cp ./docker/frontend/* ./frontend

2
frontend/.gitignore vendored
View File

@@ -54,6 +54,8 @@ src/resources/assets-testnet.json
src/resources/assets-testnet.minimal.json
src/resources/pools.json
src/resources/mining-pools/*
src/resources/**/*.mp4
src/resources/**/*.vtt
# environment config
mempool-frontend-config.json

View File

@@ -1,7 +1,9 @@
[main]
host = https://www.transifex.com
[mempool.frontend-src-locale-messages-xlf--master]
[o:mempool:p:mempool:r:frontend-src-locale-messages-xlf--master]
file_filter = frontend/src/locale/messages.<lang>.xlf
source_file = frontend/src/locale/messages.en-US.xlf
source_lang = en-US
type = XLIFF
type = XLIFF

View File

@@ -111,9 +111,9 @@ https://www.transifex.com/mempool/mempool/dashboard/
* Spanish @maxhodler @bisqes
* Persian @techmix
* French @Bayernatoor
* Korean @kcalvinalvinn
* Korean @kcalvinalvinn @sogoagain
* Italian @HodlBits
* Hebrew @Sh0ham
* Hebrew @rapidlab309
* Georgian @wyd_idk
* Hungarian @btcdragonlord
* Dutch @m__btc
@@ -127,8 +127,9 @@ https://www.transifex.com/mempool/mempool/dashboard/
* Thai @Gusb3ll
* Turkish @stackmore
* Ukrainian @volbil
* Vietnamese @bitcoin_vietnam
* Vietnamese @BitcoinvnNews
* Chinese @wdljt
* Russian @TonyCrusoe @Bitconan
* Romanian @mirceavesa
* Macedonian @SkechBoy
* Nepalese @kebinm

View File

@@ -38,6 +38,10 @@
"translation": "src/locale/messages.de.xlf",
"baseHref": "/de/"
},
"da": {
"translation": "src/locale/messages.da.xlf",
"baseHref": "/da/"
},
"es": {
"translation": "src/locale/messages.es.xlf",
"baseHref": "/es/"
@@ -137,6 +141,14 @@
"hi": {
"translation": "src/locale/messages.hi.xlf",
"baseHref": "/hi/"
},
"ne": {
"translation": "src/locale/messages.ne.xlf",
"baseHref": "/ne/"
},
"lt": {
"translation": "src/locale/messages.lt.xlf",
"baseHref": "/lt/"
}
}
},
@@ -152,15 +164,14 @@
"assets": [
"src/favicon.ico",
"src/resources",
"src/robots.txt"
"src/robots.txt",
"src/config.js",
"src/config.template.js"
],
"styles": [
"src/styles.scss",
"node_modules/@fortawesome/fontawesome-svg-core/styles.css"
],
"scripts": [
"generated-config.js"
],
"vendorChunk": true,
"extractLicenses": false,
"buildOptimizer": false,
@@ -222,6 +233,10 @@
"proxyConfig": "proxy.conf.local.js",
"verbose": true
},
"local-esplora": {
"proxyConfig": "proxy.conf.local-esplora.js",
"verbose": true
},
"mixed": {
"proxyConfig": "proxy.conf.mixed.js",
"verbose": true
@@ -265,57 +280,6 @@
}
}
},
"server": {
"builder": "@angular-devkit/build-angular:server",
"options": {
"outputPath": "dist/mempool/server",
"main": "server.ts",
"tsConfig": "tsconfig.server.json",
"sourceMap": true,
"optimization": false
},
"configurations": {
"production": {
"outputHashing": "media",
"fileReplacements": [
{
"replace": "src/environments/environment.ts",
"with": "src/environments/environment.prod.ts"
}
],
"sourceMap": false,
"localize": true,
"optimization": true
}
},
"defaultConfiguration": ""
},
"serve-ssr": {
"builder": "@nguniversal/builders:ssr-dev-server",
"options": {
"browserTarget": "mempool:build",
"serverTarget": "mempool:server"
},
"configurations": {
"production": {
"browserTarget": "mempool:build:production",
"serverTarget": "mempool:server:production"
}
}
},
"prerender": {
"builder": "@nguniversal/builders:prerender",
"options": {
"browserTarget": "mempool:build:production",
"serverTarget": "mempool:server:production",
"routes": [
"/"
]
},
"configurations": {
"production": {}
}
},
"cypress-run": {
"builder": "@cypress/schematic:cypress",
"options": {
@@ -336,6 +300,5 @@
}
}
}
},
"defaultProject": "mempool"
}
}

View File

@@ -1,4 +1,4 @@
import { defineConfig } from 'cypress'
import { defineConfig } from 'cypress';
export default defineConfig({
projectId: 'ry4br7',
@@ -12,12 +12,18 @@ export default defineConfig({
},
chromeWebSecurity: false,
e2e: {
// We've imported your old cypress plugins here.
// You may want to clean this up later by importing these.
setupNodeEvents(on, config) {
return require('./cypress/plugins/index.js')(on, config)
setupNodeEvents(on: any, config: any) {
const fs = require('fs');
const CONFIG_FILE = 'mempool-frontend-config.json';
if (fs.existsSync(CONFIG_FILE)) {
let contents = JSON.parse(fs.readFileSync(CONFIG_FILE, 'utf8'));
config.env.BASE_MODULE = contents.BASE_MODULE ? contents.BASE_MODULE : 'mempool';
} else {
config.env.BASE_MODULE = 'mempool';
}
return config;
},
baseUrl: 'http://localhost:4200',
specPattern: 'cypress/e2e/**/*.{js,jsx,ts,tsx}',
},
})
});

Some files were not shown because too many files have changed in this diff Show More