Compare commits
1116 Commits
ops/fix-to
...
v2.5.0-dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a210a3faf2 | ||
|
|
b8edcbadf4 | ||
|
|
07987ff4b6 | ||
|
|
e7e0a64ca2 | ||
|
|
484c503f6d | ||
|
|
c59ab2a129 | ||
|
|
8e668be703 | ||
|
|
7a7172bb64 | ||
|
|
685433fe4c | ||
|
|
79f6ae3b6f | ||
|
|
e54e896e56 | ||
|
|
3126a559a0 | ||
|
|
132e848fdc | ||
|
|
0d92779971 | ||
|
|
5ff5275b36 | ||
|
|
534f2e2781 | ||
|
|
2cd98c7c04 | ||
|
|
75459729ad | ||
|
|
2b411aad0a | ||
|
|
229dd7718a | ||
|
|
13b52c427c | ||
|
|
fc778e1e25 | ||
|
|
f6813f1d1c | ||
|
|
1db11d1d67 | ||
|
|
12b130cfdc | ||
|
|
175bcf7467 | ||
|
|
0b54035e80 | ||
|
|
92807dbdde | ||
|
|
059d5a94a9 | ||
|
|
501ca1832b | ||
|
|
ddc7de0d4a | ||
|
|
59f1b031c8 | ||
|
|
3d45054e38 | ||
|
|
38c890626b | ||
|
|
c7d61a3be4 | ||
|
|
0b37a02435 | ||
|
|
03a3320e45 | ||
|
|
6d075842f4 | ||
|
|
ead60aaa21 | ||
|
|
0fd672a741 | ||
|
|
6741a2b226 | ||
|
|
100c1b292a | ||
|
|
7e5c0a4c46 | ||
|
|
8117b9799c | ||
|
|
afc5c6786b | ||
|
|
c7cca500fa | ||
|
|
5f1a71cc9b | ||
|
|
734c953714 | ||
|
|
ba10df69b7 | ||
|
|
ded11892f5 | ||
|
|
609f68eb24 | ||
|
|
5e1f54e862 | ||
|
|
dc7d5bc94d | ||
|
|
35ae672177 | ||
|
|
8f0830f6d1 | ||
|
|
0c96a11150 | ||
|
|
cf89ded14d | ||
|
|
a9e766046f | ||
|
|
030889250f | ||
|
|
50993d3b95 | ||
|
|
95e8789ba9 | ||
|
|
194e4b4c80 | ||
|
|
272b6d2437 | ||
|
|
89293b4358 | ||
|
|
c682a8e3ff | ||
|
|
cc30da0b4d | ||
|
|
6d6dd09d11 | ||
|
|
f2ad184d1f | ||
|
|
ab5308e1c8 | ||
|
|
205d832d31 | ||
|
|
3e7270d1c5 | ||
|
|
fa515402bf | ||
|
|
9b6a012476 | ||
|
|
3406758fd2 | ||
|
|
cc93674591 | ||
|
|
c9fc77490f | ||
|
|
ddb4fbac5c | ||
|
|
3eb4ea9048 | ||
|
|
6d99d0a9ce | ||
|
|
d43a9cc5ea | ||
|
|
a3a2adac02 | ||
|
|
c8aea18c5e | ||
|
|
c2f45f9bc1 | ||
|
|
208946a8bf | ||
|
|
0e8e5dc3a9 | ||
|
|
f1122384dd | ||
|
|
2290f98011 | ||
|
|
b0e3022ddb | ||
|
|
acd633530f | ||
|
|
f73dc59f49 | ||
|
|
e627122239 | ||
|
|
201b32bdcd | ||
|
|
6ec9c2f816 | ||
|
|
de04914851 | ||
|
|
5fc3b8b70c | ||
|
|
276470474d | ||
|
|
1461cb1b17 | ||
|
|
c43e4bb71b | ||
|
|
92538b1a48 | ||
|
|
fa519a0d8f | ||
|
|
da10b36524 | ||
|
|
c2b6316c8b | ||
|
|
6ada839282 | ||
|
|
7de068368c | ||
|
|
0d797ff7fd | ||
|
|
fe8cdb5867 | ||
|
|
74dbd6cee1 | ||
|
|
0b7182715f | ||
|
|
e08902b85b | ||
|
|
7d3ec63335 | ||
|
|
584f443f56 | ||
|
|
4f3296566a | ||
|
|
1309a63430 | ||
|
|
ca33a629cf | ||
|
|
311774103e | ||
|
|
e72cdb42e8 | ||
|
|
6f807b7a2c | ||
|
|
7f83b4be28 | ||
|
|
802d38c363 | ||
|
|
38311e191b | ||
|
|
a1c5769d0d | ||
|
|
01a727a344 | ||
|
|
6cd1f9e870 | ||
|
|
d107286344 | ||
|
|
330ab9682b | ||
|
|
2b94849881 | ||
|
|
37bf67aa38 | ||
|
|
28d5ec34b3 | ||
|
|
eeea6cd9c8 | ||
|
|
7bafeefa95 | ||
|
|
dc86f41e03 | ||
|
|
2f7aacaf3b | ||
|
|
446d76980a | ||
|
|
92dbba64e6 | ||
|
|
43bb3aa50b | ||
|
|
5198cc51dc | ||
|
|
56e00d7ea9 | ||
|
|
5e72ecfdc9 | ||
|
|
6c1457e257 | ||
|
|
7e01a22265 | ||
|
|
cb7e25d646 | ||
|
|
2653e7bf39 | ||
|
|
d8d8a52445 | ||
|
|
3e50941351 | ||
|
|
b9a761fb88 | ||
|
|
b1d490972b | ||
|
|
786d73625a | ||
|
|
08ad6a0da3 | ||
|
|
38cb45e026 | ||
|
|
24dba5a2ef | ||
|
|
a32f960c4a | ||
|
|
9345b1609f | ||
|
|
4abd77fe31 | ||
|
|
a9760326f2 | ||
|
|
ed184824d4 | ||
|
|
9d5717f30d | ||
|
|
547b60fce7 | ||
|
|
b7bf2ec666 | ||
|
|
9b5d8fdad6 | ||
|
|
782d4b391b | ||
|
|
19e778c4b5 | ||
|
|
4bc5de306a | ||
|
|
47c61842f5 | ||
|
|
672001af72 | ||
|
|
5da8f2b6dc | ||
|
|
9df0e602d3 | ||
|
|
8a367fc6fd | ||
|
|
a33562a47a | ||
|
|
fc7024351e | ||
|
|
d3d4f93f85 | ||
|
|
14ec427f5e | ||
|
|
2c1f38aa9d | ||
|
|
eb2abefabc | ||
|
|
90912af62d | ||
|
|
adcc1ba4f0 | ||
|
|
a0b6719105 | ||
|
|
c2ab0bc715 | ||
|
|
010e9f2bb1 | ||
|
|
373e02a5b0 | ||
|
|
d36b239dbe | ||
|
|
eb03fc18ad | ||
|
|
a7c511fc1c | ||
|
|
5b6f713ef3 | ||
|
|
1b3bc0ef4e | ||
|
|
2022d3f6d5 | ||
|
|
695d81a3f6 | ||
|
|
29f7c89c53 | ||
|
|
7232c4755d | ||
|
|
88fa6bffb5 | ||
|
|
235ac204b4 | ||
|
|
e051758ca7 | ||
|
|
be3acf8694 | ||
|
|
2020cd74e9 | ||
|
|
67cbbda04b | ||
|
|
5957b71774 | ||
|
|
b0198de7e8 | ||
|
|
8cc252642b | ||
|
|
5e5daca600 | ||
|
|
cfb4fdb7a4 | ||
|
|
dbc2d752bc | ||
|
|
7c7273b696 | ||
|
|
34500f7d47 | ||
|
|
f18226bd01 | ||
|
|
c1e741a025 | ||
|
|
2a6ac4a5da | ||
|
|
34d5a2f9c0 | ||
|
|
3654178c83 | ||
|
|
5df54b6b3e | ||
|
|
8bd3e14652 | ||
|
|
ddcd387848 | ||
|
|
ef27aca6e4 | ||
|
|
997e8a4624 | ||
|
|
d65f267122 | ||
|
|
d32d97fbaf | ||
|
|
65bfe8163c | ||
|
|
b069196c27 | ||
|
|
38255a5452 | ||
|
|
48e2df3f7a | ||
|
|
4fc355a05d | ||
|
|
7c6349f2ba | ||
|
|
899d6558ec | ||
|
|
02820b0e68 | ||
|
|
4bb6a3800c | ||
|
|
b6d4e6b993 | ||
|
|
de46f7c10e | ||
|
|
69a36e17a8 | ||
|
|
06eeaf68e8 | ||
|
|
f789334d47 | ||
|
|
387a51d87e | ||
|
|
64426fa9c9 | ||
|
|
9c6799e193 | ||
|
|
8d6a0f867b | ||
|
|
057456504c | ||
|
|
45273f9309 | ||
|
|
2cbb7231a7 | ||
|
|
bee573fdb8 | ||
|
|
12bd89dade | ||
|
|
e24fd8e275 | ||
|
|
8c4a8f3a71 | ||
|
|
38ec5ef957 | ||
|
|
dbb6f267f4 | ||
|
|
23a4ab461e | ||
|
|
b657eb4e7d | ||
|
|
f3eb403c17 | ||
|
|
b6343ddc2d | ||
|
|
d86f045150 | ||
|
|
e2e50ac6bf | ||
|
|
6d28259515 | ||
|
|
968d7b827b | ||
|
|
832ccdac46 | ||
|
|
39afa4cda1 | ||
|
|
702ff2796a | ||
|
|
cb576ce601 | ||
|
|
e14fff45d6 | ||
|
|
847aa1ba13 | ||
|
|
58371bbd7d | ||
|
|
f3faf99c15 | ||
|
|
a5c4f8e2f3 | ||
|
|
27c39ef557 | ||
|
|
9e0a91efd2 | ||
|
|
601a559784 | ||
|
|
0e0ac363cf | ||
|
|
b31642e554 | ||
|
|
5f87cc6d37 | ||
|
|
b89d526379 | ||
|
|
67429d83b5 | ||
|
|
5c6060780b | ||
|
|
06a89bc1a7 | ||
|
|
022785a555 | ||
|
|
69baf97445 | ||
|
|
04fa08085d | ||
|
|
9bb897307f | ||
|
|
f3c947685a | ||
|
|
dffe9fa4e6 | ||
|
|
20bef70390 | ||
|
|
ae9439a991 | ||
|
|
9964f1ab14 | ||
|
|
f27abb1421 | ||
|
|
ee6766e34c | ||
|
|
76764936f9 | ||
|
|
596c7afecb | ||
|
|
ffad5e2a30 | ||
|
|
8da476c48c | ||
|
|
5bfc8a9d58 | ||
|
|
670f85b1f5 | ||
|
|
82a4212b72 | ||
|
|
cfa8a9a7d6 | ||
|
|
b77fe0dca2 | ||
|
|
81d35d9401 | ||
|
|
2742acf6ee | ||
|
|
8a2b144e29 | ||
|
|
3e66e4d6db | ||
|
|
61e8892204 | ||
|
|
543c4feaf9 | ||
|
|
992ea6da3c | ||
|
|
f3cfc7f80b | ||
|
|
4c170b08f4 | ||
|
|
d3b3c7df21 | ||
|
|
893aa03622 | ||
|
|
f4df51dd21 | ||
|
|
3e41e512ad | ||
|
|
7bdde13b40 | ||
|
|
7ec0e3ac86 | ||
|
|
02340d57dd | ||
|
|
1e6ea0b5f5 | ||
|
|
5d9bcce5cd | ||
|
|
39dd8ebe07 | ||
|
|
e5ec152002 | ||
|
|
e77f48abd4 | ||
|
|
3b692d05bc | ||
|
|
6895eb0b05 | ||
|
|
61333b2286 | ||
|
|
1240a3f115 | ||
|
|
f70ff9b402 | ||
|
|
5cdb0c5ce9 | ||
|
|
3971814710 | ||
|
|
c5d4e86e0e | ||
|
|
ad7e7795f9 | ||
|
|
71e00f66c9 | ||
|
|
5d21a61840 | ||
|
|
8ef88e9f39 | ||
|
|
ddb1e97ce0 | ||
|
|
b638719e72 | ||
|
|
4fee471992 | ||
|
|
5365f61121 | ||
|
|
4924c521a4 | ||
|
|
43d56a2121 | ||
|
|
bede502f2d | ||
|
|
6005bbea49 | ||
|
|
3653e75810 | ||
|
|
66c99e2f3b | ||
|
|
9876805bc3 | ||
|
|
d08e5e293c | ||
|
|
6635238934 | ||
|
|
001f7a4fd7 | ||
|
|
3ba4fd454e | ||
|
|
52b2ee4f35 | ||
|
|
bfac856eb2 | ||
|
|
42dec95738 | ||
|
|
6eacbf80d8 | ||
|
|
be7e2c2c80 | ||
|
|
e428565d50 | ||
|
|
50cc424679 | ||
|
|
d288da1e18 | ||
|
|
0c1993e264 | ||
|
|
75fd036ec2 | ||
|
|
626a1a2977 | ||
|
|
d1cedbb981 | ||
|
|
0df796f873 | ||
|
|
c10ace8fb5 | ||
|
|
5d3ee50bca | ||
|
|
be2b72eea7 | ||
|
|
1af38456f3 | ||
|
|
0a4c1c24af | ||
|
|
54c44565fb | ||
|
|
b86d8bd836 | ||
|
|
5610afde36 | ||
|
|
d88e12fc6e | ||
|
|
95156eebd1 | ||
|
|
8f0a9a9dd2 | ||
|
|
42189ec1a5 | ||
|
|
f2889fc05c | ||
|
|
6e235924d8 | ||
|
|
15caef10d6 | ||
|
|
21db64b2a5 | ||
|
|
d07bf30737 | ||
|
|
135fbfc4f3 | ||
|
|
03c6a7c54f | ||
|
|
9d3d3ed5f8 | ||
|
|
619a6bd34d | ||
|
|
d9c967b529 | ||
|
|
0e716165e5 | ||
|
|
4154d3081d | ||
|
|
678977a2a0 | ||
|
|
f7548a6154 | ||
|
|
b1cb3f3798 | ||
|
|
611d86f3f7 | ||
|
|
e402be1dd2 | ||
|
|
cc79b2b2a2 | ||
|
|
34d5f97c79 | ||
|
|
16cb3de211 | ||
|
|
788377d174 | ||
|
|
666a03baf9 | ||
|
|
6235dc97a3 | ||
|
|
fd8d61e742 | ||
|
|
cce82c12f0 | ||
|
|
b3a5a52432 | ||
|
|
bf08498b72 | ||
|
|
ea461ad592 | ||
|
|
96870bf934 | ||
|
|
ea52e4df35 | ||
|
|
c4760578a4 | ||
|
|
b5027cd646 | ||
|
|
c79c1d9958 | ||
|
|
b496f075a8 | ||
|
|
eb28fd90e5 | ||
|
|
409e5a335f | ||
|
|
7c13f5d8de | ||
|
|
29118dc0e8 | ||
|
|
0dab6e4ab1 | ||
|
|
6df731af58 | ||
|
|
63417c9179 | ||
|
|
ab2adc48a3 | ||
|
|
2c370ffccd | ||
|
|
575a79145e | ||
|
|
b7b1dfdeb5 | ||
|
|
0f218ced47 | ||
|
|
1ead34d42d | ||
|
|
387cebeb50 | ||
|
|
2e0afefe63 | ||
|
|
65c3a40039 | ||
|
|
c34cb939b7 | ||
|
|
e3abd3d5ef | ||
|
|
fa11cb0619 | ||
|
|
7cbc87d3df | ||
|
|
bc0af68d97 | ||
|
|
5d1c5b51dd | ||
|
|
72bed3b062 | ||
|
|
59931afd62 | ||
|
|
ad30ba9602 | ||
|
|
1b2e7090c3 | ||
|
|
05e8811fe9 | ||
|
|
04ed24feae | ||
|
|
5c8d28bf1d | ||
|
|
78cc33ab01 | ||
|
|
da2260a62e | ||
|
|
287756ea19 | ||
|
|
691f9aade1 | ||
|
|
b255f68a83 | ||
|
|
19467de809 | ||
|
|
f7cd401e7a | ||
|
|
0ca33f7b5b | ||
|
|
a43f0454f9 | ||
|
|
64f3a597a2 | ||
|
|
1e5cef4a62 | ||
|
|
5e1ca44a7f | ||
|
|
0694e71b14 | ||
|
|
b1aa7965d7 | ||
|
|
25cc038dd3 | ||
|
|
65b677238c | ||
|
|
7014ac2335 | ||
|
|
19a86cbd59 | ||
|
|
fc57effd5c | ||
|
|
b6296fcbeb | ||
|
|
0a645431ae | ||
|
|
2d0b4f868e | ||
|
|
23efacad70 | ||
|
|
deae7b28e6 | ||
|
|
dd5d85cc7a | ||
|
|
da8044c073 | ||
|
|
33334fd94c | ||
|
|
047843b19a | ||
|
|
a74811cb7e | ||
|
|
ce530e24e2 | ||
|
|
0ac3352835 | ||
|
|
7d367572dc | ||
|
|
bd4cf980bd | ||
|
|
9b1fc1e000 | ||
|
|
e63096239e | ||
|
|
b53bd5149e | ||
|
|
90db8c15f2 | ||
|
|
1b08f94497 | ||
|
|
9a5844bbdc | ||
|
|
9a87b357fc | ||
|
|
67675e1f79 | ||
|
|
1eda695630 | ||
|
|
9591be6401 | ||
|
|
4089e4e8d1 | ||
|
|
cbb8997d5c | ||
|
|
f8fbef78bf | ||
|
|
4fb77a9a45 | ||
|
|
1a8102f91c | ||
|
|
a8188a3536 | ||
|
|
0e090f940a | ||
|
|
dd9ba701ad | ||
|
|
dbfb886475 | ||
|
|
1a2e336c18 | ||
|
|
e6bc15a9e1 | ||
|
|
6d75a2284e | ||
|
|
3a63375499 | ||
|
|
d6d0c42691 | ||
|
|
b30483572d | ||
|
|
c92fcd20f7 | ||
|
|
ffbb4e0b9e | ||
|
|
c98e95751f | ||
|
|
5e1f891f02 | ||
|
|
2aeccd72e9 | ||
|
|
478d8ce70d | ||
|
|
7087f7a78c | ||
|
|
91c607b0e8 | ||
|
|
9c025e79ca | ||
|
|
faec398cf0 | ||
|
|
dcfcac2cc6 | ||
|
|
769ca5794a | ||
|
|
004768132b | ||
|
|
12c188266a | ||
|
|
22def9b01c | ||
|
|
2d2b7d3a9f | ||
|
|
aa51484b0b | ||
|
|
aa1519c18e | ||
|
|
249a65bb57 | ||
|
|
b5f6fdecbf | ||
|
|
f015b165ee | ||
|
|
4cb6418f83 | ||
|
|
5a0ffee58b | ||
|
|
22091e05ac | ||
|
|
3a1da0eb4a | ||
|
|
1a2c0b7843 | ||
|
|
24c8ae2002 | ||
|
|
51bf4f769f | ||
|
|
eaa5c0fb33 | ||
|
|
d0b4d1da4a | ||
|
|
07978bc3d4 | ||
|
|
11d6b372ba | ||
|
|
04b4c61f83 | ||
|
|
d9483dbd7a | ||
|
|
01588305fc | ||
|
|
4fe3c308fe | ||
|
|
bdf60b2b68 | ||
|
|
c57e9706cd | ||
|
|
367c06dca6 | ||
|
|
e3f6767259 | ||
|
|
b608b66823 | ||
|
|
a1895a66b3 | ||
|
|
2d52dcd867 | ||
|
|
5d986e86de | ||
|
|
25ee1acde9 | ||
|
|
50b9644bd0 | ||
|
|
474b94f9af | ||
|
|
eb18625802 | ||
|
|
d536d63d69 | ||
|
|
efb18c7548 | ||
|
|
5eab47674c | ||
|
|
50ae075b1f | ||
|
|
f062132636 | ||
|
|
5389928c49 | ||
|
|
5086f132f8 | ||
|
|
206edb7613 | ||
|
|
a75262d79e | ||
|
|
dac3a43c1b | ||
|
|
a2dd0baaf6 | ||
|
|
3801f988ba | ||
|
|
34c8ad614a | ||
|
|
19bb8988f8 | ||
|
|
be72c5109a | ||
|
|
d591f7c456 | ||
|
|
5683f639ed | ||
|
|
8f0fc3af57 | ||
|
|
dcd55d9757 | ||
|
|
83df23a902 | ||
|
|
ee23d1695d | ||
|
|
88a36f4378 | ||
|
|
1aad4f2926 | ||
|
|
2a28ccc758 | ||
|
|
4ee5ef336c | ||
|
|
3da76892d5 | ||
|
|
9047cb5998 | ||
|
|
869cff89c6 | ||
|
|
95cd01d1fa | ||
|
|
ad753b9d16 | ||
|
|
c155598c08 | ||
|
|
80dfa0e937 | ||
|
|
5922ff0f40 | ||
|
|
10bca8f665 | ||
|
|
0bc310243f | ||
|
|
8a2925ab0c | ||
|
|
3f750fb8d4 | ||
|
|
38324575e8 | ||
|
|
1a10acf8ce | ||
|
|
799e8d9e23 | ||
|
|
454166dbca | ||
|
|
c9343f56d6 | ||
|
|
b389457092 | ||
|
|
d5f8ce00b7 | ||
|
|
78298d16d7 | ||
|
|
4f8c36df35 | ||
|
|
e65d2a522f | ||
|
|
e357a75b70 | ||
|
|
ff1aae853e | ||
|
|
08833b08a0 | ||
|
|
e8151e8393 | ||
|
|
434963e8a0 | ||
|
|
48a7f8a3ee | ||
|
|
9131521e7d | ||
|
|
c593ded864 | ||
|
|
9dc45d9db3 | ||
|
|
8f060d3d65 | ||
|
|
4d7ae95d4f | ||
|
|
a2e6b265d3 | ||
|
|
ffc9081e1a | ||
|
|
a0c54531c0 | ||
|
|
0dfda66578 | ||
|
|
70eb0abb7e | ||
|
|
91355c0936 | ||
|
|
b690dcaabc | ||
|
|
a5e532d485 | ||
|
|
c150129d74 | ||
|
|
313d8d6a53 | ||
|
|
5a339c382f | ||
|
|
ef16f3bd68 | ||
|
|
c289f821e4 | ||
|
|
51d35ec7d2 | ||
|
|
b36a7a2bcf | ||
|
|
d9320574d8 | ||
|
|
abb2ce5146 | ||
|
|
64a1ba3ac3 | ||
|
|
b6bebad14d | ||
|
|
b30c6a6147 | ||
|
|
a62dfe55cf | ||
|
|
cd2ef20d5c | ||
|
|
7f2e68dae4 | ||
|
|
dc8256489b | ||
|
|
4a2c35c81b | ||
|
|
8094965a2c | ||
|
|
dfb35315e0 | ||
|
|
0aa5dff450 | ||
|
|
d1d4d0e5c4 | ||
|
|
868136bb38 | ||
|
|
9bc1393981 | ||
|
|
af805f15c7 | ||
|
|
f489ec6cee | ||
|
|
56ec8b900c | ||
|
|
2f42dc9898 | ||
|
|
b9d56e8882 | ||
|
|
b0492f52a4 | ||
|
|
a98c7a4b32 | ||
|
|
daeac2f894 | ||
|
|
0f5e4d3a15 | ||
|
|
25c5ca731d | ||
|
|
8435c775ec | ||
|
|
89f93d23b6 | ||
|
|
f97d3f57af | ||
|
|
8e236e6594 | ||
|
|
e1c98ceaa2 | ||
|
|
10e49fd77f | ||
|
|
e27b97f0f4 | ||
|
|
32ee0ae908 | ||
|
|
3573912d8b | ||
|
|
2b333d513c | ||
|
|
bd690951e7 | ||
|
|
034b7fb516 | ||
|
|
b0fb93f7be | ||
|
|
0b4f17c129 | ||
|
|
916042faab | ||
|
|
dd9ff41fde | ||
|
|
0c71d505f2 | ||
|
|
948375f0e9 | ||
|
|
b8f73b9495 | ||
|
|
2debea28c8 | ||
|
|
f4f60a6b2e | ||
|
|
c93334a849 | ||
|
|
31c21137b0 | ||
|
|
aec6d57fc3 | ||
|
|
36663f0aa6 | ||
|
|
6b248fb46d | ||
|
|
c4e656e275 | ||
|
|
1d571c284c | ||
|
|
1a5ee32565 | ||
|
|
feff3c52ef | ||
|
|
90c0ece93f | ||
|
|
45f2b016e1 | ||
|
|
f2377a5f92 | ||
|
|
d1c7105a84 | ||
|
|
d36ae1476b | ||
|
|
19d78ca519 | ||
|
|
676e83a872 | ||
|
|
70fd94dc1f | ||
|
|
70d8a664e5 | ||
|
|
dcfaa9b474 | ||
|
|
9c606a6240 | ||
|
|
586473a4e8 | ||
|
|
0ad8dc8529 | ||
|
|
680c85eee7 | ||
|
|
c3604b5495 | ||
|
|
73bf52dc58 | ||
|
|
c8cde11ff1 | ||
|
|
e89d466827 | ||
|
|
71a6b85b04 | ||
|
|
80d6ec580d | ||
|
|
0847e15d07 | ||
|
|
f25ec08f5e | ||
|
|
a5db46240e | ||
|
|
b7662347c9 | ||
|
|
448d073bf2 | ||
|
|
59c11379b2 | ||
|
|
2979f286cf | ||
|
|
1ca1d0b109 | ||
|
|
7e1ab55c01 | ||
|
|
5878def72c | ||
|
|
0992258222 | ||
|
|
6d027dd7ce | ||
|
|
622636e35f | ||
|
|
3b8dc89b53 | ||
|
|
5896ba9cf5 | ||
|
|
01e745e389 | ||
|
|
9377df9646 | ||
|
|
530cae3cdb | ||
|
|
22f6bf60c0 | ||
|
|
49813c9629 | ||
|
|
add01f547b | ||
|
|
f50bba1d39 | ||
|
|
8d3a1c2daa | ||
|
|
28b9205e95 | ||
|
|
38fa8f58b7 | ||
|
|
0018c865bd | ||
|
|
bf0244f9dc | ||
|
|
9c09c00fab | ||
|
|
6ae05842d9 | ||
|
|
9cd61944d9 | ||
|
|
e7aa862e43 | ||
|
|
5d0547af48 | ||
|
|
a83b2ddeea | ||
|
|
1cd0796428 | ||
|
|
a2e4da840e | ||
|
|
b7bf4ba010 | ||
|
|
8ec61dd603 | ||
|
|
3f16b53159 | ||
|
|
1eef5d40a5 | ||
|
|
0b7aa8a83c | ||
|
|
d8e87bccab | ||
|
|
930f1e4f09 | ||
|
|
a65d54c549 | ||
|
|
35de1d4d9a | ||
|
|
6b049f2c33 | ||
|
|
7ba8a3da84 | ||
|
|
626b4e61cd | ||
|
|
141789b034 | ||
|
|
f19978345e | ||
|
|
be10cc65f4 | ||
|
|
4ca87e730c | ||
|
|
d931ddc731 | ||
|
|
54c3b2ba4a | ||
|
|
6f87fd9c89 | ||
|
|
88e6afadb2 | ||
|
|
40dc476460 | ||
|
|
c2c7448c45 | ||
|
|
3b2061bb5c | ||
|
|
1a756c5fa9 | ||
|
|
004dcebc19 | ||
|
|
6d535441c0 | ||
|
|
119de111e4 | ||
|
|
bba9f2608a | ||
|
|
936921781e | ||
|
|
d051538c6a | ||
|
|
69d4ba18d5 | ||
|
|
738d1f8007 | ||
|
|
6092a7d9ed | ||
|
|
ba7b65a978 | ||
|
|
85c428be25 | ||
|
|
beac979c32 | ||
|
|
62be755fc4 | ||
|
|
cfb5f2f3b5 | ||
|
|
a48f116bcd | ||
|
|
d045f2750b | ||
|
|
686cfb6d2f | ||
|
|
dda8c7fb14 | ||
|
|
5eac69b6df | ||
|
|
144eb558c4 | ||
|
|
a9e1ce42df | ||
|
|
5d3ad78611 | ||
|
|
0add42c53b | ||
|
|
865b25d8df | ||
|
|
a5410178c8 | ||
|
|
b6bea12bca | ||
|
|
fafe40cef0 | ||
|
|
1971d5d6b6 | ||
|
|
dc4cd96fc0 | ||
|
|
cd57cfc861 | ||
|
|
ca6edb4bc4 | ||
|
|
f51a4b4416 | ||
|
|
f888011191 | ||
|
|
d575f554ad | ||
|
|
b1b84c1e50 | ||
|
|
fd8ed4bbdf | ||
|
|
c23aa67b0c | ||
|
|
52ba1b7910 | ||
|
|
0afdaf116e | ||
|
|
8dd8c01aab | ||
|
|
71002ee119 | ||
|
|
5ad9572166 | ||
|
|
9fc753630a | ||
|
|
00bd61e1d3 | ||
|
|
a151a90d2f | ||
|
|
4e9bc955e6 | ||
|
|
b121e46bcc | ||
|
|
43cc9499b1 | ||
|
|
35512bef8d | ||
|
|
755ac276f7 | ||
|
|
1fa882d59e | ||
|
|
23fab65216 | ||
|
|
4cceb008ea | ||
|
|
7995058d86 | ||
|
|
e0c097e0dd | ||
|
|
9d9ead60a6 | ||
|
|
0a8b8cc75a | ||
|
|
c4914c2ced | ||
|
|
a30bb4e6c0 | ||
|
|
7babc82f5d | ||
|
|
7d5ed66db1 | ||
|
|
8c885e87d4 | ||
|
|
4f009e0320 | ||
|
|
13c5e05044 | ||
|
|
2104570889 | ||
|
|
f24d4124fb | ||
|
|
fc5525ec4a | ||
|
|
08b04c3264 | ||
|
|
b3735328b7 | ||
|
|
73d2930230 | ||
|
|
fd46ea82bf | ||
|
|
bd1d9573d6 | ||
|
|
7fe9029a4e | ||
|
|
0b3e8c3fef | ||
|
|
7ad1c15245 | ||
|
|
29dd842404 | ||
|
|
6c4ad94b59 | ||
|
|
8fa65f170d | ||
|
|
9692ae5cdd | ||
|
|
c780962892 | ||
|
|
f0283a3e17 | ||
|
|
0f6aec31fd | ||
|
|
6110d89bb4 | ||
|
|
713c0ef216 | ||
|
|
63404edeec | ||
|
|
9af2d478c8 | ||
|
|
0ea9b47604 | ||
|
|
2a26d532df | ||
|
|
f7d475aa75 | ||
|
|
766dcddd36 | ||
|
|
4727b4bade | ||
|
|
f48460687b | ||
|
|
14bf256ab8 | ||
|
|
3201caf54e | ||
|
|
7c25fd61da | ||
|
|
330ba7cb71 | ||
|
|
3a02df28af | ||
|
|
b66cfa23a8 | ||
|
|
9fd4adecb3 | ||
|
|
7c1e35ae3b | ||
|
|
8c9ac01123 | ||
|
|
c75ea29d26 | ||
|
|
998dcc6f89 | ||
|
|
daa5ebbcff | ||
|
|
454414179b | ||
|
|
ad33890dea | ||
|
|
fe139651f5 | ||
|
|
de3e89ac33 | ||
|
|
13bac763a1 | ||
|
|
ffd8a527f2 | ||
|
|
6b0d89e920 | ||
|
|
13dca97505 | ||
|
|
fa83c2a26d | ||
|
|
ea931da38b | ||
|
|
406a65cfb6 | ||
|
|
dd2c226354 | ||
|
|
3a4982c5e6 | ||
|
|
87c9f881c0 | ||
|
|
d700b5f145 | ||
|
|
1bc2c18167 | ||
|
|
a00eb2736b | ||
|
|
bb1adf41e7 | ||
|
|
772765959b | ||
|
|
2435d12181 | ||
|
|
5b9b717a93 | ||
|
|
64c5f1ce02 | ||
|
|
b2d07d2d44 | ||
|
|
e46636f573 | ||
|
|
4cf4efd3f2 | ||
|
|
48dcf01199 | ||
|
|
64c07cf2d2 | ||
|
|
4e7b0b8650 | ||
|
|
59f0e2d345 | ||
|
|
e7d99e9653 | ||
|
|
1ef4485a26 | ||
|
|
c9a8b91c0b | ||
|
|
0dc950ab95 | ||
|
|
298edb6430 | ||
|
|
558ddec0a1 | ||
|
|
9fca8de52f | ||
|
|
f4ee983807 | ||
|
|
78f28c4bc3 | ||
|
|
cbd8936872 | ||
|
|
60fb61f70a | ||
|
|
c767f39619 | ||
|
|
daf3e269f4 | ||
|
|
d2240532c1 | ||
|
|
e2f60a6761 | ||
|
|
eb1d6a4a78 | ||
|
|
5ab05e4e12 | ||
|
|
5d22023d19 | ||
|
|
0e3e62fee8 | ||
|
|
e437f2125d | ||
|
|
64443d4b1b | ||
|
|
42604cc6be | ||
|
|
9f60d787fe | ||
|
|
0243769a02 | ||
|
|
57e0980134 | ||
|
|
c37b4cadb1 | ||
|
|
350aedd934 | ||
|
|
9c8fd6431e | ||
|
|
50d99634f7 | ||
|
|
b1e8d0aab6 | ||
|
|
86e5048566 | ||
|
|
6421bc82f2 | ||
|
|
2359e44b16 | ||
|
|
7520e3beba | ||
|
|
44cf47b86d | ||
|
|
8dc41257ce | ||
|
|
a71262f538 | ||
|
|
264ce1222a | ||
|
|
82f8bf6bb4 | ||
|
|
54451c9a8c | ||
|
|
7f48416dc3 | ||
|
|
e0ea47b8ee | ||
|
|
a6b1d4059f | ||
|
|
238008010d | ||
|
|
c895bc2681 | ||
|
|
3684ee1e6c | ||
|
|
097a763e6e | ||
|
|
50d39295a5 | ||
|
|
d667b8d455 | ||
|
|
9216936a71 | ||
|
|
18d18fa234 | ||
|
|
67ce4a956f | ||
|
|
ca51d15f86 | ||
|
|
d0dd78c47c | ||
|
|
c0773afb68 | ||
|
|
8200d54c20 | ||
|
|
53bc616b1b | ||
|
|
b9b1265b78 | ||
|
|
927a54e4d9 | ||
|
|
f855173b9b | ||
|
|
f618f12515 | ||
|
|
d902e2b838 | ||
|
|
80f4a98ea7 | ||
|
|
b870cd69de | ||
|
|
0f4b11e455 | ||
|
|
ebb119aa90 | ||
|
|
3e9543f0b6 | ||
|
|
f55cbe11af | ||
|
|
bd6831fd48 | ||
|
|
e60bd52e8b | ||
|
|
ce1a8053c8 | ||
|
|
fad66c7266 | ||
|
|
240394817f | ||
|
|
d037ae2faf | ||
|
|
53d1497ce8 | ||
|
|
b7a8ac8343 | ||
|
|
9a3929554b | ||
|
|
b082d81924 | ||
|
|
927141fb13 | ||
|
|
1a903d3efb | ||
|
|
0631f357b6 | ||
|
|
04242b686e | ||
|
|
b0015dfe5c | ||
|
|
2fc15f3be8 | ||
|
|
8edbd60bbf | ||
|
|
4249270e30 | ||
|
|
ffe8e52095 | ||
|
|
0c13d9f585 | ||
|
|
39549c8ca9 | ||
|
|
54355b61d6 | ||
|
|
ed2f1b4603 | ||
|
|
576400414f | ||
|
|
06453edc7c | ||
|
|
329e9aa034 | ||
|
|
6cba4a8492 | ||
|
|
4f1ebc2545 | ||
|
|
1008ab6222 | ||
|
|
bbf04648f9 | ||
|
|
31ced9e23c | ||
|
|
94d1aeb287 | ||
|
|
06f232fdd8 | ||
|
|
e4342113fa | ||
|
|
578a1b6d19 | ||
|
|
251de2be11 | ||
|
|
786cd85c74 | ||
|
|
7e356ef0a0 | ||
|
|
e48eab403c | ||
|
|
e0c61f7299 | ||
|
|
b24256a83e | ||
|
|
631de8d85f | ||
|
|
c1ebbc556b | ||
|
|
73285ae776 | ||
|
|
2d692c3f20 | ||
|
|
0f4b127275 | ||
|
|
c85b6d53c5 | ||
|
|
8ed1644081 | ||
|
|
1d71e26a12 | ||
|
|
bb2e4a4fb3 | ||
|
|
5b4d394039 | ||
|
|
2aaa392bf5 | ||
|
|
8919cbcdc1 | ||
|
|
dc7231537f | ||
|
|
b31a82d27e | ||
|
|
7fecea9cca | ||
|
|
b0d4c9eac8 | ||
|
|
db41aed44b | ||
|
|
ff370684f1 | ||
|
|
8c21cc56d4 | ||
|
|
49d8b3bacd | ||
|
|
e0d677b01c | ||
|
|
2b2c40f65a | ||
|
|
4e61f1ff36 | ||
|
|
33cf69872b | ||
|
|
48a0a6c7e3 | ||
|
|
7012a480e8 | ||
|
|
88e6305b9a | ||
|
|
b479136688 | ||
|
|
d6a42cdf6b | ||
|
|
aed37afb3e | ||
|
|
a64cb4bbad | ||
|
|
9b974dfbd9 | ||
|
|
61e512b8f7 | ||
|
|
2a6f48d8c8 | ||
|
|
6a52725b63 | ||
|
|
abb078f7ee | ||
|
|
47363b477e | ||
|
|
c0e6b7af58 | ||
|
|
bd822998a5 | ||
|
|
771d21e410 | ||
|
|
3b1d4ffe43 | ||
|
|
78d1ef9b1c | ||
|
|
632da54146 | ||
|
|
b725fc8d26 | ||
|
|
03c6c0567a | ||
|
|
1d106a9851 | ||
|
|
5612a033d5 | ||
|
|
cacd4abd9d | ||
|
|
c01c610bb3 | ||
|
|
82bcac7c74 | ||
|
|
5a50a0d973 | ||
|
|
5d81a13a80 | ||
|
|
0f9941f0d1 | ||
|
|
f7cbe30a16 | ||
|
|
76f261eb38 | ||
|
|
fad73cf3f5 | ||
|
|
6796bb94cc | ||
|
|
54669281de | ||
|
|
d647edcae3 | ||
|
|
ab985afe01 | ||
|
|
f60ef05223 | ||
|
|
f6d6ea5d31 | ||
|
|
3bf7bf5563 | ||
|
|
3c2e27f778 | ||
|
|
99379d53bf | ||
|
|
6be2985b40 | ||
|
|
faa59f59bd | ||
|
|
5147b0dbc4 | ||
|
|
db779578d2 | ||
|
|
76600af698 | ||
|
|
a43a65df2c | ||
|
|
215df5efed | ||
|
|
feda827860 | ||
|
|
33f3b0006b | ||
|
|
a25af16f7c | ||
|
|
00cd3ee9bf | ||
|
|
80f1ee45b5 | ||
|
|
eb90434c28 | ||
|
|
a94403b3a1 | ||
|
|
3f83e517f0 | ||
|
|
82cef095fc | ||
|
|
b6ba3c5781 | ||
|
|
5b521cfc7c | ||
|
|
d7f2f4136c | ||
|
|
5d7e42195f | ||
|
|
7fdf95ad34 | ||
|
|
5287490894 | ||
|
|
b246c6f4c3 | ||
|
|
2daf94f65a | ||
|
|
91ada9ce75 | ||
|
|
4ea1e98547 | ||
|
|
82f9814438 | ||
|
|
b8c82c8f2c | ||
|
|
55966e601a | ||
|
|
886498fe01 | ||
|
|
54eb11a2a7 | ||
|
|
73c9ec20e8 | ||
|
|
c4f125b2d8 | ||
|
|
03dfca3bee | ||
|
|
04d7265a86 | ||
|
|
4335ee8157 | ||
|
|
6ba8b0ec58 | ||
|
|
fc463a9561 | ||
|
|
a80c79bd70 | ||
|
|
3fd8af0c78 | ||
|
|
474f2d2e7d | ||
|
|
b61ef6814b | ||
|
|
f4670156a5 | ||
|
|
2a3111af6d | ||
|
|
573168f647 | ||
|
|
7570603b37 | ||
|
|
887fb13f34 | ||
|
|
1a761e79ad | ||
|
|
51b832335b | ||
|
|
2dc875bb33 | ||
|
|
f4bb927dbd | ||
|
|
e0952a4c1d | ||
|
|
654e7589a6 | ||
|
|
de8d3d7e3e | ||
|
|
d2bae2fa8b | ||
|
|
9b6bbaf51c | ||
|
|
15fcbf18bf | ||
|
|
fc629d7109 | ||
|
|
6a802d7e80 | ||
|
|
7bc3ece5b5 | ||
|
|
1b9100a7f7 | ||
|
|
af27d68add | ||
|
|
07610c7ed0 | ||
|
|
6bb1b8a64c | ||
|
|
90dd78c2ec | ||
|
|
dc258da6c0 | ||
|
|
2c222c36d2 | ||
|
|
3c92919359 | ||
|
|
f36fa62569 | ||
|
|
451e36e288 | ||
|
|
352f0817d9 |
12
.github/FUNDING.yml
vendored
12
.github/FUNDING.yml
vendored
@@ -1,12 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: ['mempool'] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
otechie: # Replace with a single Otechie username
|
||||
custom: ['https://mempool.space/sponsor'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||
44
.github/dependabot.yml
vendored
44
.github/dependabot.yml
vendored
@@ -1,20 +1,28 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: npm
|
||||
directory: "/backend"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: npm
|
||||
directory: "/frontend"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: docker
|
||||
directory: "/docker/backend"
|
||||
schedule:
|
||||
interval: weekly
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
- package-ecosystem: npm
|
||||
directory: "/backend"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- update-types: ["version-update:semver-major"]
|
||||
- package-ecosystem: npm
|
||||
directory: "/frontend"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
ignore:
|
||||
- update-types: ["version-update:semver-major"]
|
||||
- package-ecosystem: docker
|
||||
directory: "/docker/backend"
|
||||
schedule:
|
||||
interval: daily
|
||||
ignore:
|
||||
- update-types: ["version-update:semver-major"]
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
ignore:
|
||||
- update-types: ["version-update:semver-major"]
|
||||
|
||||
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@@ -42,8 +42,10 @@ jobs:
|
||||
run: npm run lint
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
# - name: Test
|
||||
# run: npm run test
|
||||
- name: Unit Tests
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm run test
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
|
||||
44
.github/workflows/on-tag.yml
vendored
44
.github/workflows/on-tag.yml
vendored
@@ -1,7 +1,7 @@
|
||||
name: Docker build on tag
|
||||
env:
|
||||
DOCKER_CLI_EXPERIMENTAL: enabled
|
||||
TAG_FMT: '^refs/tags/(((.?[0-9]+){3,4}))$'
|
||||
TAG_FMT: "^refs/tags/(((.?[0-9]+){3,4}))$"
|
||||
DOCKER_BUILDKIT: 0
|
||||
COMPOSE_DOCKER_CLI_BUILD: 0
|
||||
|
||||
@@ -21,16 +21,46 @@ jobs:
|
||||
service:
|
||||
- frontend
|
||||
- backend
|
||||
runs-on: ubuntu-18.04
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 120
|
||||
name: Build and push to DockerHub
|
||||
steps:
|
||||
# Workaround based on JonasAlfredsson/docker-on-tmpfs@v1.0.1
|
||||
- name: Replace the current swap file
|
||||
shell: bash
|
||||
run: |
|
||||
sudo swapoff /mnt/swapfile
|
||||
sudo rm -v /mnt/swapfile
|
||||
sudo fallocate -l 10G /mnt/swapfile
|
||||
sudo chmod 600 /mnt/swapfile
|
||||
sudo mkswap /mnt/swapfile
|
||||
sudo swapon /mnt/swapfile
|
||||
|
||||
- name: Show current memory and swap status
|
||||
shell: bash
|
||||
run: |
|
||||
sudo free -h
|
||||
echo
|
||||
sudo swapon --show
|
||||
|
||||
- name: Mount a tmpfs over /var/lib/docker
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -d "/var/lib/docker" ]; then
|
||||
echo "Directory '/var/lib/docker' not found"
|
||||
exit 1
|
||||
fi
|
||||
sudo mount -t tmpfs -o size=10G tmpfs /var/lib/docker
|
||||
sudo systemctl restart docker
|
||||
sudo df -h | grep docker
|
||||
|
||||
- name: Set env variables
|
||||
run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV
|
||||
|
||||
- name: Show set environment variables
|
||||
run: |
|
||||
printf " TAG: %s\n" "$TAG"
|
||||
|
||||
|
||||
- name: Add SHORT_SHA env property with commit short sha
|
||||
run: echo "SHORT_SHA=`echo ${GITHUB_SHA} | cut -c1-8`" >> $GITHUB_ENV
|
||||
|
||||
@@ -38,24 +68,24 @@ jobs:
|
||||
run: echo "${{ secrets.DOCKER_PASSWORD }}" | docker login -u "${{ secrets.DOCKER_USERNAME }}" --password-stdin
|
||||
|
||||
- name: Checkout project
|
||||
uses: actions/checkout@629c2de402a417ea7690ca6ce3f33229e27606a5 # v2
|
||||
uses: actions/checkout@e2f20e631ae6d7dd3b768f56a5d2af784dd54791 # v2.5.0
|
||||
|
||||
- name: Init repo for Dockerization
|
||||
run: docker/init.sh "$TAG"
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@27d0a4f181a40b142cce983c5393082c365d1480 # v1
|
||||
uses: docker/setup-qemu-action@e81a89b1732b9c48d79cd809d8d81d79c4647a18 # v2.1.0
|
||||
id: qemu
|
||||
|
||||
- name: Setup Docker buildx action
|
||||
uses: docker/setup-buildx-action@94ab11c41e45d028884a99163086648e898eed25 # v1
|
||||
uses: docker/setup-buildx-action@8c0edbc76e98fa90f69d9a2c020dcb50019dc325 # v2.2.1
|
||||
id: buildx
|
||||
|
||||
- name: Available platforms
|
||||
run: echo ${{ steps.buildx.outputs.platforms }}
|
||||
|
||||
- name: Cache Docker layers
|
||||
uses: actions/cache@661fd3eb7f2f20d8c7c84bc2b0509efd7a826628 # v2
|
||||
uses: actions/cache@9b0c1fce7a93df8e3bb8926b0d6e9d89e92f20a7 # v3.0.11
|
||||
id: cache
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@ data
|
||||
docker-compose.yml
|
||||
backend/mempool-config.json
|
||||
*.swp
|
||||
frontend/src/resources/config.template.js
|
||||
frontend/src/resources/config.js
|
||||
|
||||
@@ -31,6 +31,7 @@
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
"semi": 1,
|
||||
"eqeqeq": 1
|
||||
}
|
||||
}
|
||||
|
||||
3
backend/.gitignore
vendored
3
backend/.gitignore
vendored
@@ -1,7 +1,8 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# production config and external assets
|
||||
|
||||
!mempool-config.template.json
|
||||
!mempool-config.sample.json
|
||||
mempool-config.json
|
||||
pools.json
|
||||
icons.json
|
||||
|
||||
@@ -110,6 +110,11 @@ Run the Mempool backend:
|
||||
|
||||
```
|
||||
npm run start
|
||||
|
||||
```
|
||||
You can also set env var `MEMPOOL_CONFIG_FILE` to specify a custom config file location:
|
||||
```
|
||||
MEMPOOL_CONFIG_FILE=/path/to/mempool-config.json npm run start
|
||||
```
|
||||
|
||||
When it's running, you should see output like this:
|
||||
|
||||
20
backend/jest.config.ts
Normal file
20
backend/jest.config.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { Config } from "@jest/types"
|
||||
|
||||
const config: Config.InitialOptions = {
|
||||
preset: "ts-jest",
|
||||
testEnvironment: "node",
|
||||
verbose: true,
|
||||
automock: false,
|
||||
collectCoverage: true,
|
||||
collectCoverageFrom: ["./src/**/**.ts"],
|
||||
coverageProvider: "babel",
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
lines: 1
|
||||
}
|
||||
},
|
||||
setupFiles: [
|
||||
"./testSetup.ts",
|
||||
],
|
||||
}
|
||||
export default config;
|
||||
@@ -2,6 +2,7 @@
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "mainnet",
|
||||
"BACKEND": "electrum",
|
||||
"ENABLED": true,
|
||||
"HTTP_PORT": 8999,
|
||||
"SPAWN_CLUSTER_PROCS": 0,
|
||||
"API_URL_PREFIX": "/api/v1/",
|
||||
@@ -21,7 +22,12 @@
|
||||
"EXTERNAL_RETRY_INTERVAL": 0,
|
||||
"USER_AGENT": "mempool",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false,
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master",
|
||||
"ADVANCED_GBT_AUDIT": false,
|
||||
"ADVANCED_GBT_MEMPOOL": false,
|
||||
"TRANSACTION_INDEXING": false
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "127.0.0.1",
|
||||
@@ -75,12 +81,20 @@
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": false,
|
||||
"BACKEND": "lnd"
|
||||
"BACKEND": "lnd",
|
||||
"STATS_REFRESH_INTERVAL": 600,
|
||||
"GRAPH_REFRESH_INTERVAL": 600,
|
||||
"LOGGER_UPDATE_INTERVAL": 30,
|
||||
"FORENSICS_INTERVAL": 43200,
|
||||
"FORENSICS_RATE_LIMIT": 20
|
||||
},
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": "tls.cert",
|
||||
"MACAROON_PATH": "admin.macaroon",
|
||||
"SOCKET": "localhost:10009"
|
||||
"MACAROON_PATH": "readonly.macaroon",
|
||||
"REST_API_URL": "https://localhost:8080"
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "lightning-rpc"
|
||||
},
|
||||
"SOCKS5PROXY": {
|
||||
"ENABLED": false,
|
||||
|
||||
7353
backend/package-lock.json
generated
7353
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -16,44 +16,53 @@
|
||||
"mempool",
|
||||
"blockchain",
|
||||
"explorer",
|
||||
"liquid"
|
||||
"liquid",
|
||||
"lightning"
|
||||
],
|
||||
"main": "index.ts",
|
||||
"scripts": {
|
||||
"tsc": "./node_modules/typescript/bin/tsc",
|
||||
"build": "npm run tsc",
|
||||
"tsc": "./node_modules/typescript/bin/tsc -p tsconfig.build.json",
|
||||
"build": "npm run tsc && npm run create-resources",
|
||||
"create-resources": "cp ./src/tasks/price-feeds/mtgox-weekly.json ./dist/tasks && node dist/api/fetch-version.js",
|
||||
"package": "npm run build && rm -rf package && mv dist package && mv node_modules package && npm run package-rm-build-deps",
|
||||
"package-rm-build-deps": "(cd package/node_modules; rm -r typescript @typescript-eslint)",
|
||||
"start": "node --max-old-space-size=2048 dist/index.js",
|
||||
"start-production": "node --max-old-space-size=4096 dist/index.js",
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"test": "./node_modules/.bin/jest --coverage",
|
||||
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
||||
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
||||
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.20.5",
|
||||
"@mempool/electrum-client": "^1.1.7",
|
||||
"@types/node": "^16.11.41",
|
||||
"axios": "~0.27.2",
|
||||
"bitcoinjs-lib": "6.0.1",
|
||||
"bolt07": "^1.8.1",
|
||||
"crypto-js": "^4.0.0",
|
||||
"express": "^4.18.0",
|
||||
"lightning": "^5.16.3",
|
||||
"maxmind": "^4.3.6",
|
||||
"mysql2": "2.3.3",
|
||||
"node-worker-threads-pool": "^1.5.1",
|
||||
"bitcoinjs-lib": "~6.0.2",
|
||||
"crypto-js": "~4.1.1",
|
||||
"express": "~4.18.2",
|
||||
"maxmind": "~4.3.8",
|
||||
"mysql2": "~2.3.3",
|
||||
"node-worker-threads-pool": "~1.5.1",
|
||||
"socks-proxy-agent": "~7.0.0",
|
||||
"typescript": "~4.7.4",
|
||||
"ws": "~8.8.0"
|
||||
"ws": "~8.11.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.20.5",
|
||||
"@babel/code-frame": "^7.18.6",
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/express": "^4.17.13",
|
||||
"@types/express": "^4.17.14",
|
||||
"@types/jest": "^29.2.3",
|
||||
"@types/ws": "~8.5.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.30.5",
|
||||
"@typescript-eslint/parser": "^5.30.5",
|
||||
"eslint": "^8.19.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"eslint": "^8.28.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"prettier": "^2.7.1"
|
||||
"jest": "^29.3.1",
|
||||
"prettier": "^2.8.0",
|
||||
"ts-jest": "^29.0.3",
|
||||
"ts-node": "^10.9.1"
|
||||
}
|
||||
}
|
||||
|
||||
115
backend/src/__fixtures__/mempool-config.template.json
Normal file
115
backend/src/__fixtures__/mempool-config.template.json
Normal file
@@ -0,0 +1,115 @@
|
||||
{
|
||||
"MEMPOOL": {
|
||||
"ENABLED": true,
|
||||
"NETWORK": "__MEMPOOL_NETWORK__",
|
||||
"BACKEND": "__MEMPOOL_BACKEND__",
|
||||
"ENABLED": true,
|
||||
"BLOCKS_SUMMARIES_INDEXING": true,
|
||||
"HTTP_PORT": 1,
|
||||
"SPAWN_CLUSTER_PROCS": 2,
|
||||
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": true,
|
||||
"POLL_RATE_MS": 3,
|
||||
"CACHE_DIR": "__MEMPOOL_CACHE_DIR__",
|
||||
"CLEAR_PROTECTION_MINUTES": 4,
|
||||
"RECOMMENDED_FEE_PERCENTILE": 5,
|
||||
"BLOCK_WEIGHT_UNITS": 6,
|
||||
"INITIAL_BLOCKS_AMOUNT": 7,
|
||||
"MEMPOOL_BLOCKS_AMOUNT": 8,
|
||||
"PRICE_FEED_UPDATE_INTERVAL": 9,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": 10,
|
||||
"EXTERNAL_ASSETS": 11,
|
||||
"EXTERNAL_MAX_RETRY": 12,
|
||||
"EXTERNAL_RETRY_INTERVAL": 13,
|
||||
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
|
||||
"INDEXING_BLOCKS_AMOUNT": 14,
|
||||
"POOLS_JSON_TREE_URL": "__POOLS_JSON_TREE_URL__",
|
||||
"POOLS_JSON_URL": "__POOLS_JSON_URL__",
|
||||
"ADVANCED_GBT_AUDIT": "__ADVANCED_GBT_AUDIT__",
|
||||
"ADVANCED_GBT_MEMPOOL": "__ADVANCED_GBT_MEMPOOL__",
|
||||
"TRANSACTION_INDEXING": "__TRANSACTION_INDEXING__"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
"PORT": 15,
|
||||
"USERNAME": "__CORE_RPC_USERNAME__",
|
||||
"PASSWORD": "__CORE_RPC_PASSWORD__"
|
||||
},
|
||||
"ELECTRUM": {
|
||||
"HOST": "__ELECTRUM_HOST__",
|
||||
"PORT": 16,
|
||||
"TLS_ENABLED": true
|
||||
},
|
||||
"ESPLORA": {
|
||||
"REST_API_URL": "__ESPLORA_REST_API_URL__"
|
||||
},
|
||||
"SECOND_CORE_RPC": {
|
||||
"HOST": "__SECOND_CORE_RPC_HOST__",
|
||||
"PORT": 17,
|
||||
"USERNAME": "__SECOND_CORE_RPC_USERNAME__",
|
||||
"PASSWORD": "__SECOND_CORE_RPC_PASSWORD__"
|
||||
},
|
||||
"DATABASE": {
|
||||
"ENABLED": false,
|
||||
"HOST": "__DATABASE_HOST__",
|
||||
"SOCKET": "__DATABASE_SOCKET__",
|
||||
"PORT": 18,
|
||||
"DATABASE": "__DATABASE_DATABASE__",
|
||||
"USERNAME": "__DATABASE_USERNAME__",
|
||||
"PASSWORD": "__DATABASE_PASSWORD__"
|
||||
},
|
||||
"SYSLOG": {
|
||||
"ENABLED": false,
|
||||
"HOST": "__SYSLOG_HOST__",
|
||||
"PORT": 19,
|
||||
"MIN_PRIORITY": "__SYSLOG_MIN_PRIORITY__",
|
||||
"FACILITY": "__SYSLOG_FACILITY__"
|
||||
},
|
||||
"STATISTICS": {
|
||||
"ENABLED": false,
|
||||
"TX_PER_SECOND_SAMPLE_PERIOD": 20
|
||||
},
|
||||
"BISQ": {
|
||||
"ENABLED": true,
|
||||
"DATA_PATH": "__BISQ_DATA_PATH__"
|
||||
},
|
||||
"SOCKS5PROXY": {
|
||||
"ENABLED": true,
|
||||
"USE_ONION": true,
|
||||
"HOST": "__SOCKS5PROXY_HOST__",
|
||||
"PORT": "__SOCKS5PROXY_PORT__",
|
||||
"USERNAME": "__SOCKS5PROXY_USERNAME__",
|
||||
"PASSWORD": "__SOCKS5PROXY_PASSWORD__"
|
||||
},
|
||||
"PRICE_DATA_SERVER": {
|
||||
"TOR_URL": "__PRICE_DATA_SERVER_TOR_URL__",
|
||||
"CLEARNET_URL": "__PRICE_DATA_SERVER_CLEARNET_URL__"
|
||||
},
|
||||
"EXTERNAL_DATA_SERVER": {
|
||||
"MEMPOOL_API": "__EXTERNAL_DATA_SERVER_MEMPOOL_API__",
|
||||
"MEMPOOL_ONION": "__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__",
|
||||
"LIQUID_API": "__EXTERNAL_DATA_SERVER_LIQUID_API__",
|
||||
"LIQUID_ONION": "__EXTERNAL_DATA_SERVER_LIQUID_ONION__",
|
||||
"BISQ_URL": "__EXTERNAL_DATA_SERVER_BISQ_URL__",
|
||||
"BISQ_ONION": "__EXTERNAL_DATA_SERVER_BISQ_ONION__"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": "__LIGHTNING_ENABLED__",
|
||||
"BACKEND": "__LIGHTNING_BACKEND__",
|
||||
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__",
|
||||
"STATS_REFRESH_INTERVAL": 600,
|
||||
"GRAPH_REFRESH_INTERVAL": 600,
|
||||
"LOGGER_UPDATE_INTERVAL": 30,
|
||||
"FORENSICS_INTERVAL": 43200,
|
||||
"FORENSICS_RATE_LIMIT": "__FORENSICS_RATE_LIMIT__"
|
||||
},
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": "",
|
||||
"MACAROON_PATH": "",
|
||||
"REST_API_URL": "https://localhost:8080"
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "__CLIGHTNING_SOCKET__"
|
||||
}
|
||||
}
|
||||
62
backend/src/__tests__/api/difficulty-adjustment.test.ts
Normal file
62
backend/src/__tests__/api/difficulty-adjustment.test.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
import { calcDifficultyAdjustment, DifficultyAdjustment } from '../../api/difficulty-adjustment';
|
||||
|
||||
describe('Mempool Difficulty Adjustment', () => {
|
||||
test('should calculate Difficulty Adjustments properly', () => {
|
||||
const dt = (dtString) => {
|
||||
return Math.floor(new Date(dtString).getTime() / 1000);
|
||||
};
|
||||
|
||||
const vectors = [
|
||||
[ // Vector 1
|
||||
[ // Inputs
|
||||
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
|
||||
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
|
||||
750134, // Current block height
|
||||
0.6280047707459726, // Previous retarget % (Passed through)
|
||||
'mainnet', // Network (if testnet, next value is non-zero)
|
||||
0, // If not testnet, not used
|
||||
],
|
||||
{ // Expected Result
|
||||
progressPercent: 9.027777777777777,
|
||||
difficultyChange: 12.562233927411782,
|
||||
estimatedRetargetDate: 1661895424692,
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: 0,
|
||||
},
|
||||
],
|
||||
[ // Vector 2 (testnet)
|
||||
[ // Inputs
|
||||
dt('2022-08-18T11:07:00.000Z'), // Last DA time (in seconds)
|
||||
dt('2022-08-19T14:03:53.000Z'), // Current time (now) (in seconds)
|
||||
750134, // Current block height
|
||||
0.6280047707459726, // Previous retarget % (Passed through)
|
||||
'testnet', // Network
|
||||
dt('2022-08-19T13:52:46.000Z'), // Latest block timestamp in seconds
|
||||
],
|
||||
{ // Expected Result is same other than timeOffset
|
||||
progressPercent: 9.027777777777777,
|
||||
difficultyChange: 12.562233927411782,
|
||||
estimatedRetargetDate: 1661895424692,
|
||||
remainingBlocks: 1834,
|
||||
remainingTime: 977591692,
|
||||
previousRetarget: 0.6280047707459726,
|
||||
nextRetargetHeight: 751968,
|
||||
timeAvg: 533038,
|
||||
timeOffset: -667000, // 11 min 7 seconds since last block (testnet only)
|
||||
// If we add time avg to abs(timeOffset) it makes exactly 1200000 ms, or 20 minutes
|
||||
},
|
||||
],
|
||||
] as [[number, number, number, number, string, number], DifficultyAdjustment][];
|
||||
|
||||
for (const vector of vectors) {
|
||||
const result = calcDifficultyAdjustment(...vector[0]);
|
||||
// previousRetarget is passed through untouched
|
||||
expect(result.previousRetarget).toStrictEqual(vector[0][3]);
|
||||
expect(result).toStrictEqual(vector[1]);
|
||||
}
|
||||
});
|
||||
});
|
||||
143
backend/src/__tests__/config.test.ts
Normal file
143
backend/src/__tests__/config.test.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import * as fs from 'fs';
|
||||
|
||||
describe('Mempool Backend Config', () => {
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
jest.resetModules();
|
||||
});
|
||||
|
||||
test('should return defaults when no file is present', () => {
|
||||
jest.isolateModules(() => {
|
||||
jest.mock('../../mempool-config.json', () => ({}), { virtual: true });
|
||||
|
||||
const config = jest.requireActual('../config').default;
|
||||
|
||||
expect(config.MEMPOOL).toStrictEqual({
|
||||
ENABLED: true,
|
||||
NETWORK: 'mainnet',
|
||||
BACKEND: 'none',
|
||||
BLOCKS_SUMMARIES_INDEXING: false,
|
||||
HTTP_PORT: 8999,
|
||||
SPAWN_CLUSTER_PROCS: 0,
|
||||
API_URL_PREFIX: '/api/v1/',
|
||||
AUTOMATIC_BLOCK_REINDEXING: false,
|
||||
POLL_RATE_MS: 2000,
|
||||
CACHE_DIR: './cache',
|
||||
CLEAR_PROTECTION_MINUTES: 20,
|
||||
RECOMMENDED_FEE_PERCENTILE: 50,
|
||||
BLOCK_WEIGHT_UNITS: 4000000,
|
||||
INITIAL_BLOCKS_AMOUNT: 8,
|
||||
MEMPOOL_BLOCKS_AMOUNT: 8,
|
||||
INDEXING_BLOCKS_AMOUNT: 11000,
|
||||
PRICE_FEED_UPDATE_INTERVAL: 600,
|
||||
USE_SECOND_NODE_FOR_MINFEE: false,
|
||||
EXTERNAL_ASSETS: [],
|
||||
EXTERNAL_MAX_RETRY: 1,
|
||||
EXTERNAL_RETRY_INTERVAL: 0,
|
||||
USER_AGENT: 'mempool',
|
||||
STDOUT_LOG_MIN_PRIORITY: 'debug',
|
||||
POOLS_JSON_TREE_URL: 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
POOLS_JSON_URL: 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
ADVANCED_GBT_AUDIT: false,
|
||||
ADVANCED_GBT_MEMPOOL: false,
|
||||
TRANSACTION_INDEXING: false,
|
||||
});
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual({ HOST: '127.0.0.1', PORT: 3306, TLS_ENABLED: true });
|
||||
|
||||
expect(config.ESPLORA).toStrictEqual({ REST_API_URL: 'http://127.0.0.1:3000' });
|
||||
|
||||
expect(config.CORE_RPC).toStrictEqual({
|
||||
HOST: '127.0.0.1',
|
||||
PORT: 8332,
|
||||
USERNAME: 'mempool',
|
||||
PASSWORD: 'mempool'
|
||||
});
|
||||
|
||||
expect(config.SECOND_CORE_RPC).toStrictEqual({
|
||||
HOST: '127.0.0.1',
|
||||
PORT: 8332,
|
||||
USERNAME: 'mempool',
|
||||
PASSWORD: 'mempool'
|
||||
});
|
||||
|
||||
expect(config.DATABASE).toStrictEqual({
|
||||
ENABLED: true,
|
||||
HOST: '127.0.0.1',
|
||||
SOCKET: '',
|
||||
PORT: 3306,
|
||||
DATABASE: 'mempool',
|
||||
USERNAME: 'mempool',
|
||||
PASSWORD: 'mempool'
|
||||
});
|
||||
|
||||
expect(config.SYSLOG).toStrictEqual({
|
||||
ENABLED: true,
|
||||
HOST: '127.0.0.1',
|
||||
PORT: 514,
|
||||
MIN_PRIORITY: 'info',
|
||||
FACILITY: 'local7'
|
||||
});
|
||||
|
||||
expect(config.STATISTICS).toStrictEqual({ ENABLED: true, TX_PER_SECOND_SAMPLE_PERIOD: 150 });
|
||||
|
||||
expect(config.BISQ).toStrictEqual({ ENABLED: false, DATA_PATH: '/bisq/statsnode-data/btc_mainnet/db' });
|
||||
|
||||
expect(config.SOCKS5PROXY).toStrictEqual({
|
||||
ENABLED: false,
|
||||
USE_ONION: true,
|
||||
HOST: '127.0.0.1',
|
||||
PORT: 9050,
|
||||
USERNAME: '',
|
||||
PASSWORD: ''
|
||||
});
|
||||
|
||||
expect(config.PRICE_DATA_SERVER).toStrictEqual({
|
||||
TOR_URL: 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
|
||||
CLEARNET_URL: 'https://price.bisq.wiz.biz/getAllMarketPrices'
|
||||
});
|
||||
|
||||
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual({
|
||||
MEMPOOL_API: 'https://mempool.space/api/v1',
|
||||
MEMPOOL_ONION: 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
|
||||
LIQUID_API: 'https://liquid.network/api/v1',
|
||||
LIQUID_ONION: 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
|
||||
BISQ_URL: 'https://bisq.markets/api',
|
||||
BISQ_ONION: 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
test('should override the default values with the passed values', () => {
|
||||
jest.isolateModules(() => {
|
||||
const fixture = JSON.parse(fs.readFileSync(`${__dirname}/../__fixtures__/mempool-config.template.json`, 'utf8'));
|
||||
jest.mock('../../mempool-config.json', () => (fixture), { virtual: true });
|
||||
|
||||
const config = jest.requireActual('../config').default;
|
||||
|
||||
expect(config.MEMPOOL).toStrictEqual(fixture.MEMPOOL);
|
||||
|
||||
expect(config.ELECTRUM).toStrictEqual(fixture.ELECTRUM);
|
||||
|
||||
expect(config.ESPLORA).toStrictEqual(fixture.ESPLORA);
|
||||
|
||||
expect(config.CORE_RPC).toStrictEqual(fixture.CORE_RPC);
|
||||
|
||||
expect(config.SECOND_CORE_RPC).toStrictEqual(fixture.SECOND_CORE_RPC);
|
||||
|
||||
expect(config.DATABASE).toStrictEqual(fixture.DATABASE);
|
||||
|
||||
expect(config.SYSLOG).toStrictEqual(fixture.SYSLOG);
|
||||
|
||||
expect(config.STATISTICS).toStrictEqual(fixture.STATISTICS);
|
||||
|
||||
expect(config.BISQ).toStrictEqual(fixture.BISQ);
|
||||
|
||||
expect(config.SOCKS5PROXY).toStrictEqual(fixture.SOCKS5PROXY);
|
||||
|
||||
expect(config.PRICE_DATA_SERVER).toStrictEqual(fixture.PRICE_DATA_SERVER);
|
||||
|
||||
expect(config.EXTERNAL_DATA_SERVER).toStrictEqual(fixture.EXTERNAL_DATA_SERVER);
|
||||
});
|
||||
});
|
||||
});
|
||||
138
backend/src/api/audit.ts
Normal file
138
backend/src/api/audit.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import config from '../config';
|
||||
import bitcoinApi from './bitcoin/bitcoin-api-factory';
|
||||
import { Common } from './common';
|
||||
import { TransactionExtended, MempoolBlockWithTransactions, AuditScore } from '../mempool.interfaces';
|
||||
import blocksRepository from '../repositories/BlocksRepository';
|
||||
import blocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import blocks from '../api/blocks';
|
||||
|
||||
const PROPAGATION_MARGIN = 180; // in seconds, time since a transaction is first seen after which it is assumed to have propagated to all miners
|
||||
|
||||
class Audit {
|
||||
auditBlock(transactions: TransactionExtended[], projectedBlocks: MempoolBlockWithTransactions[], mempool: { [txId: string]: TransactionExtended })
|
||||
: { censored: string[], added: string[], fresh: string[], score: number } {
|
||||
if (!projectedBlocks?.[0]?.transactionIds || !mempool) {
|
||||
return { censored: [], added: [], fresh: [], score: 0 };
|
||||
}
|
||||
|
||||
const matches: string[] = []; // present in both mined block and template
|
||||
const added: string[] = []; // present in mined block, not in template
|
||||
const fresh: string[] = []; // missing, but firstSeen within PROPAGATION_MARGIN
|
||||
const isCensored = {}; // missing, without excuse
|
||||
const isDisplaced = {};
|
||||
let displacedWeight = 0;
|
||||
|
||||
const inBlock = {};
|
||||
const inTemplate = {};
|
||||
|
||||
const now = Math.round((Date.now() / 1000));
|
||||
for (const tx of transactions) {
|
||||
inBlock[tx.txid] = tx;
|
||||
}
|
||||
// coinbase is always expected
|
||||
if (transactions[0]) {
|
||||
inTemplate[transactions[0].txid] = true;
|
||||
}
|
||||
// look for transactions that were expected in the template, but missing from the mined block
|
||||
for (const txid of projectedBlocks[0].transactionIds) {
|
||||
if (!inBlock[txid]) {
|
||||
// tx is recent, may have reached the miner too late for inclusion
|
||||
if (mempool[txid]?.firstSeen != null && (now - (mempool[txid]?.firstSeen || 0)) <= PROPAGATION_MARGIN) {
|
||||
fresh.push(txid);
|
||||
} else {
|
||||
isCensored[txid] = true;
|
||||
}
|
||||
displacedWeight += mempool[txid].weight;
|
||||
}
|
||||
inTemplate[txid] = true;
|
||||
}
|
||||
|
||||
displacedWeight += (4000 - transactions[0].weight);
|
||||
|
||||
// we can expect an honest miner to include 'displaced' transactions in place of recent arrivals and censored txs
|
||||
// these displaced transactions should occupy the first N weight units of the next projected block
|
||||
let displacedWeightRemaining = displacedWeight;
|
||||
let index = 0;
|
||||
let lastFeeRate = Infinity;
|
||||
let failures = 0;
|
||||
while (projectedBlocks[1] && index < projectedBlocks[1].transactionIds.length && failures < 500) {
|
||||
const txid = projectedBlocks[1].transactionIds[index];
|
||||
const fits = (mempool[txid].weight - displacedWeightRemaining) < 4000;
|
||||
const feeMatches = mempool[txid].effectiveFeePerVsize >= lastFeeRate;
|
||||
if (fits || feeMatches) {
|
||||
isDisplaced[txid] = true;
|
||||
if (fits) {
|
||||
lastFeeRate = Math.min(lastFeeRate, mempool[txid].effectiveFeePerVsize);
|
||||
}
|
||||
if (mempool[txid].firstSeen == null || (now - (mempool[txid]?.firstSeen || 0)) > PROPAGATION_MARGIN) {
|
||||
displacedWeightRemaining -= mempool[txid].weight;
|
||||
}
|
||||
failures = 0;
|
||||
} else {
|
||||
failures++;
|
||||
}
|
||||
index++;
|
||||
}
|
||||
|
||||
// mark unexpected transactions in the mined block as 'added'
|
||||
let overflowWeight = 0;
|
||||
let totalWeight = 0;
|
||||
for (const tx of transactions) {
|
||||
if (inTemplate[tx.txid]) {
|
||||
matches.push(tx.txid);
|
||||
} else {
|
||||
if (!isDisplaced[tx.txid]) {
|
||||
added.push(tx.txid);
|
||||
} else {
|
||||
}
|
||||
let blockIndex = -1;
|
||||
let index = -1;
|
||||
projectedBlocks.forEach((block, bi) => {
|
||||
const i = block.transactionIds.indexOf(tx.txid);
|
||||
if (i >= 0) {
|
||||
blockIndex = bi;
|
||||
index = i;
|
||||
}
|
||||
});
|
||||
overflowWeight += tx.weight;
|
||||
}
|
||||
totalWeight += tx.weight;
|
||||
}
|
||||
|
||||
// transactions missing from near the end of our template are probably not being censored
|
||||
let overflowWeightRemaining = overflowWeight - (config.MEMPOOL.BLOCK_WEIGHT_UNITS - totalWeight);
|
||||
let maxOverflowRate = 0;
|
||||
let rateThreshold = 0;
|
||||
index = projectedBlocks[0].transactionIds.length - 1;
|
||||
while (index >= 0) {
|
||||
const txid = projectedBlocks[0].transactionIds[index];
|
||||
if (overflowWeightRemaining > 0) {
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
if (mempool[txid].effectiveFeePerVsize > maxOverflowRate) {
|
||||
maxOverflowRate = mempool[txid].effectiveFeePerVsize;
|
||||
rateThreshold = (Math.ceil(maxOverflowRate * 100) / 100) + 0.005;
|
||||
}
|
||||
} else if (mempool[txid].effectiveFeePerVsize <= rateThreshold) { // tolerance of 0.01 sat/vb + rounding
|
||||
if (isCensored[txid]) {
|
||||
delete isCensored[txid];
|
||||
}
|
||||
}
|
||||
overflowWeightRemaining -= (mempool[txid]?.weight || 0);
|
||||
index--;
|
||||
}
|
||||
|
||||
const numCensored = Object.keys(isCensored).length;
|
||||
const score = matches.length > 0 ? (matches.length / (matches.length + numCensored)) : 0;
|
||||
|
||||
return {
|
||||
censored: Object.keys(isCensored),
|
||||
added,
|
||||
fresh,
|
||||
score
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new Audit();
|
||||
@@ -1,60 +1,37 @@
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import logger from '../logger';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
import { IBackendInfo } from '../mempool.interfaces';
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
class BackendInfo {
|
||||
private gitCommitHash = '';
|
||||
private hostname = '';
|
||||
private version = '';
|
||||
private backendInfo: IBackendInfo;
|
||||
|
||||
constructor() {
|
||||
this.setLatestCommitHash();
|
||||
this.setVersion();
|
||||
this.hostname = os.hostname();
|
||||
}
|
||||
|
||||
public getBackendInfo(): IBackendInfo {
|
||||
return {
|
||||
hostname: this.hostname,
|
||||
gitCommit: this.gitCommitHash,
|
||||
version: this.version,
|
||||
// This file is created by ./fetch-version.ts during building
|
||||
const versionFile = path.join(__dirname, 'version.json')
|
||||
var versionInfo;
|
||||
if (fs.existsSync(versionFile)) {
|
||||
versionInfo = JSON.parse(fs.readFileSync(versionFile).toString());
|
||||
} else {
|
||||
// Use dummy values if `versionFile` doesn't exist (e.g., during testing)
|
||||
versionInfo = {
|
||||
version: '?',
|
||||
gitCommit: '?'
|
||||
};
|
||||
}
|
||||
this.backendInfo = {
|
||||
hostname: os.hostname(),
|
||||
version: versionInfo.version,
|
||||
gitCommit: versionInfo.gitCommit
|
||||
};
|
||||
}
|
||||
|
||||
public getBackendInfo(): IBackendInfo {
|
||||
return this.backendInfo;
|
||||
}
|
||||
|
||||
public getShortCommitHash() {
|
||||
return this.gitCommitHash.slice(0, 7);
|
||||
}
|
||||
|
||||
private setLatestCommitHash(): void {
|
||||
//TODO: share this logic with `generate-config.js`
|
||||
if (process.env.DOCKER_COMMIT_HASH) {
|
||||
this.gitCommitHash = process.env.DOCKER_COMMIT_HASH;
|
||||
} else {
|
||||
try {
|
||||
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
|
||||
if (!gitRevParse.error) {
|
||||
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
|
||||
this.gitCommitHash = output ? output : '?';
|
||||
} else if (gitRevParse.error.code === 'ENOENT') {
|
||||
console.log('git not found, cannot parse git hash');
|
||||
this.gitCommitHash = '?';
|
||||
}
|
||||
} catch (e: any) {
|
||||
console.log('Could not load git commit info: ' + e.message);
|
||||
this.gitCommitHash = '?';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private setVersion(): void {
|
||||
try {
|
||||
const packageJson = fs.readFileSync('package.json').toString();
|
||||
this.version = JSON.parse(packageJson).version;
|
||||
} catch (e) {
|
||||
throw new Error(e instanceof Error ? e.message : 'Error');
|
||||
}
|
||||
return this.backendInfo.gitCommit.slice(0, 7);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,13 +3,14 @@ import { IEsploraApi } from './esplora-api.interface';
|
||||
export interface AbstractBitcoinApi {
|
||||
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
|
||||
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
|
||||
$getTransactionHex(txId: string): Promise<string>;
|
||||
$getBlockHeightTip(): Promise<number>;
|
||||
$getBlockHashTip(): Promise<string>;
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]>;
|
||||
$getBlockHash(height: number): Promise<string>;
|
||||
$getBlockHeader(hash: string): Promise<string>;
|
||||
$getBlock(hash: string): Promise<IEsploraApi.Block>;
|
||||
$getRawBlock(hash: string): Promise<string>;
|
||||
$getRawBlock(hash: string): Promise<Buffer>;
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address>;
|
||||
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$getAddressPrefix(prefix: string): string[];
|
||||
|
||||
@@ -57,6 +57,11 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
});
|
||||
}
|
||||
|
||||
$getTransactionHex(txId: string): Promise<string> {
|
||||
return this.$getRawTransaction(txId, true)
|
||||
.then((tx) => tx.hex || '');
|
||||
}
|
||||
|
||||
$getBlockHeightTip(): Promise<number> {
|
||||
return this.bitcoindClient.getChainTips()
|
||||
.then((result: IBitcoinApi.ChainTips[]) => {
|
||||
@@ -76,7 +81,7 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
$getRawBlock(hash: string): Promise<Buffer> {
|
||||
return this.bitcoindClient.getBlock(hash, 0)
|
||||
.then((raw: string) => Buffer.from(raw, "hex"));
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import axios from 'axios';
|
||||
import * as bitcoinjs from 'bitcoinjs-lib';
|
||||
import config from '../../config';
|
||||
import websocketHandler from '../websocket-handler';
|
||||
import mempool from '../mempool';
|
||||
@@ -16,13 +17,14 @@ import logger from '../../logger';
|
||||
import blocks from '../blocks';
|
||||
import bitcoinClient from './bitcoin-client';
|
||||
import difficultyAdjustment from '../difficulty-adjustment';
|
||||
import transactionRepository from '../../repositories/TransactionRepository';
|
||||
|
||||
class BitcoinRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.$getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
|
||||
@@ -87,7 +89,9 @@ class BitcoinRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions);
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/audit-summary', this.getBlockAuditSummary)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'psbt/addparents', this.postPsbtCompletion)
|
||||
;
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
@@ -185,29 +189,36 @@ class BitcoinRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private getCpfpInfo(req: Request, res: Response) {
|
||||
private async $getCpfpInfo(req: Request, res: Response) {
|
||||
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
|
||||
res.status(501).send(`Invalid transaction ID.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const tx = mempool.getMempool()[req.params.txId];
|
||||
if (!tx) {
|
||||
res.status(404).send(`Transaction doesn't exist in the mempool.`);
|
||||
if (tx) {
|
||||
if (tx?.cpfpChecked) {
|
||||
res.json({
|
||||
ancestors: tx.ancestors,
|
||||
bestDescendant: tx.bestDescendant || null,
|
||||
descendants: tx.descendants || null,
|
||||
effectiveFeePerVsize: tx.effectiveFeePerVsize || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
|
||||
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
} else {
|
||||
const cpfpInfo = await transactionRepository.$getCpfpInfo(req.params.txId);
|
||||
if (cpfpInfo) {
|
||||
res.json(cpfpInfo);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (tx.cpfpChecked) {
|
||||
res.json({
|
||||
ancestors: tx.ancestors,
|
||||
bestDescendant: tx.bestDescendant || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
|
||||
|
||||
res.json(cpfpInfo);
|
||||
res.status(404).send(`Transaction has no CPFP info available.`);
|
||||
}
|
||||
|
||||
private getBackendInfo(req: Request, res: Response) {
|
||||
@@ -241,6 +252,74 @@ class BitcoinRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the PSBT as text/plain body, parses it, and adds the full
|
||||
* parent transaction to each input that doesn't already have it.
|
||||
* This is used for BTCPayServer / Trezor users which need access to
|
||||
* the full parent transaction even with segwit inputs.
|
||||
* It will respond with a text/plain PSBT in the same format (hex|base64).
|
||||
*/
|
||||
private async postPsbtCompletion(req: Request, res: Response): Promise<void> {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
const notFoundError = `Couldn't get transaction hex for parent of input`;
|
||||
try {
|
||||
let psbt: bitcoinjs.Psbt;
|
||||
let format: 'hex' | 'base64';
|
||||
let isModified = false;
|
||||
try {
|
||||
psbt = bitcoinjs.Psbt.fromBase64(req.body);
|
||||
format = 'base64';
|
||||
} catch (e1) {
|
||||
try {
|
||||
psbt = bitcoinjs.Psbt.fromHex(req.body);
|
||||
format = 'hex';
|
||||
} catch (e2) {
|
||||
throw new Error(`Unable to parse PSBT`);
|
||||
}
|
||||
}
|
||||
for (const [index, input] of psbt.data.inputs.entries()) {
|
||||
if (!input.nonWitnessUtxo) {
|
||||
// Buffer.from ensures it won't be modified in place by reverse()
|
||||
const txid = Buffer.from(psbt.txInputs[index].hash)
|
||||
.reverse()
|
||||
.toString('hex');
|
||||
|
||||
let transactionHex: string;
|
||||
// If missing transaction, return 404 status error
|
||||
try {
|
||||
transactionHex = await bitcoinApi.$getTransactionHex(txid);
|
||||
if (!transactionHex) {
|
||||
throw new Error('');
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(`${notFoundError} #${index} @ ${txid}`);
|
||||
}
|
||||
|
||||
psbt.updateInput(index, {
|
||||
nonWitnessUtxo: Buffer.from(transactionHex, 'hex'),
|
||||
});
|
||||
if (!isModified) {
|
||||
isModified = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (isModified) {
|
||||
res.send(format === 'hex' ? psbt.toHex() : psbt.toBase64());
|
||||
} else {
|
||||
// Not modified
|
||||
// 422 Unprocessable Entity
|
||||
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/422
|
||||
res.status(422).send(`Psbt had no missing nonWitnessUtxos.`);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e instanceof Error && new RegExp(notFoundError).test(e.message)) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async getTransactionStatus(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
|
||||
@@ -254,6 +333,16 @@ class BitcoinRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async getStrippedBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const block = await blocks.$getBlock(req.params.hash);
|
||||
@@ -286,9 +375,9 @@ class BitcoinRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async getStrippedBlockTransactions(req: Request, res: Response) {
|
||||
private async getBlockAuditSummary(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
|
||||
const transactions = await blocks.$getBlockAuditSummary(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
@@ -510,7 +599,12 @@ class BitcoinRoutes {
|
||||
|
||||
private getDifficultyChange(req: Request, res: Response) {
|
||||
try {
|
||||
res.json(difficultyAdjustment.getDifficultyAdjustment());
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
if (da) {
|
||||
res.json(da);
|
||||
} else {
|
||||
res.status(503).send(`Service Temporarily Unavailable`);
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
|
||||
@@ -20,6 +20,11 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getTransactionHex(txId: string): Promise<string> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/hex', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getBlockHeightTip(): Promise<number> {
|
||||
return axios.get<number>(config.ESPLORA.REST_API_URL + '/blocks/tip/height', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
@@ -50,9 +55,9 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
$getRawBlock(hash: string): Promise<Buffer> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", { ...this.axiosConfig, responseType: 'arraybuffer' })
|
||||
.then((response) => { return Buffer.from(response.data); });
|
||||
}
|
||||
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address> {
|
||||
|
||||
@@ -20,8 +20,14 @@ import indexer from '../indexer';
|
||||
import fiatConversion from './fiat-conversion';
|
||||
import poolsParser from './pools-parser';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import cpfpRepository from '../repositories/CpfpRepository';
|
||||
import transactionRepository from '../repositories/TransactionRepository';
|
||||
import mining from './mining/mining';
|
||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
import { Block } from 'bitcoinjs-lib';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
@@ -31,6 +37,7 @@ class Blocks {
|
||||
private lastDifficultyAdjustmentTime = 0;
|
||||
private previousDifficultyRetarget = 0;
|
||||
private newBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void)[] = [];
|
||||
private newAsyncBlockCallbacks: ((block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>)[] = [];
|
||||
|
||||
constructor() { }
|
||||
|
||||
@@ -54,6 +61,10 @@ class Blocks {
|
||||
this.newBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
public setNewAsyncBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => Promise<void>) {
|
||||
this.newAsyncBlockCallbacks.push(fn);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the list of transaction for a block
|
||||
* @param blockHash
|
||||
@@ -127,7 +138,7 @@ class Blocks {
|
||||
const stripped = block.tx.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
vsize: tx.weight / 4,
|
||||
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
|
||||
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
|
||||
};
|
||||
@@ -184,14 +195,18 @@ class Blocks {
|
||||
if (!pool) { // We should never have this situation in practise
|
||||
logger.warn(`Cannot assign pool to block ${blockExtended.height} and 'unknown' pool does not exist. ` +
|
||||
`Check your "pools" table entries`);
|
||||
return blockExtended;
|
||||
} else {
|
||||
blockExtended.extras.pool = {
|
||||
id: pool.id,
|
||||
name: pool.name,
|
||||
slug: pool.slug,
|
||||
};
|
||||
}
|
||||
|
||||
blockExtended.extras.pool = {
|
||||
id: pool.id,
|
||||
name: pool.name,
|
||||
slug: pool.slug,
|
||||
};
|
||||
const auditScore = await BlocksAuditsRepository.$getBlockAuditScore(block.id);
|
||||
if (auditScore != null) {
|
||||
blockExtended.extras.matchRate = auditScore.matchRate;
|
||||
}
|
||||
}
|
||||
|
||||
return blockExtended;
|
||||
@@ -248,7 +263,7 @@ class Blocks {
|
||||
/**
|
||||
* [INDEXING] Index all blocks summaries for the block txs visualization
|
||||
*/
|
||||
public async $generateBlocksSummariesDatabase() {
|
||||
public async $generateBlocksSummariesDatabase(): Promise<void> {
|
||||
if (Common.blocksSummariesIndexingEnabled() === false) {
|
||||
return;
|
||||
}
|
||||
@@ -304,6 +319,57 @@ class Blocks {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index transaction CPFP data for all blocks
|
||||
*/
|
||||
public async $generateCPFPDatabase(): Promise<void> {
|
||||
if (Common.cpfpIndexingEnabled() === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all indexed block hash
|
||||
const unindexedBlocks = await blocksRepository.$getCPFPUnindexedBlocks();
|
||||
|
||||
if (!unindexedBlocks?.length) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Logging
|
||||
let count = 0;
|
||||
let countThisRun = 0;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
|
||||
for (const block of unindexedBlocks) {
|
||||
// Logging
|
||||
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
|
||||
if (elapsedSeconds > 5) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, countThisRun / elapsedSeconds);
|
||||
const progress = Math.round(count / unindexedBlocks.length * 10000) / 100;
|
||||
logger.debug(`Indexing cpfp clusters for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${count}/${unindexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
countThisRun = 0;
|
||||
}
|
||||
|
||||
await this.$indexCPFP(block.hash, block.height); // Calculate and save CPFP data for transactions in this block
|
||||
|
||||
// Logging
|
||||
count++;
|
||||
countThisRun++;
|
||||
}
|
||||
if (count > 0) {
|
||||
logger.notice(`CPFP indexing completed: indexed ${count} blocks`);
|
||||
} else {
|
||||
logger.debug(`CPFP indexing completed: indexed ${count} blocks`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`CPFP indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index all blocks metadata for the mining dashboard
|
||||
*/
|
||||
@@ -347,7 +413,7 @@ class Blocks {
|
||||
}
|
||||
++indexedThisRun;
|
||||
++totalIndexed;
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
const elapsedSeconds = Math.max(1, new Date().getTime() / 1000 - timer);
|
||||
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
@@ -437,6 +503,9 @@ class Blocks {
|
||||
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
|
||||
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
|
||||
|
||||
// start async callbacks
|
||||
const callbackPromises = this.newAsyncBlockCallbacks.map((cb) => cb(blockExtended, txIds, transactions));
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
if (!fastForwarded) {
|
||||
const lastBlock = await blocksRepository.$getBlockByHeight(blockExtended.height - 1);
|
||||
@@ -446,9 +515,13 @@ class Blocks {
|
||||
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await cpfpRepository.$deleteClustersFrom(lastBlock['height'] - 10);
|
||||
for (let i = 10; i >= 0; --i) {
|
||||
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
|
||||
if (config.MEMPOOL.TRANSACTION_INDEXING) {
|
||||
await this.$indexCPFP(newBlock.id, lastBlock['height'] - i);
|
||||
}
|
||||
}
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
|
||||
@@ -457,10 +530,26 @@ class Blocks {
|
||||
}
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
|
||||
await blocksRepository.$saveBlockPrices([{
|
||||
height: blockExtended.height,
|
||||
priceId: lastestPriceId,
|
||||
}]);
|
||||
} else {
|
||||
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
|
||||
setTimeout(() => {
|
||||
indexer.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
// Save blocks summary for visualization if it's enabled
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await this.$getStrippedBlockTransactions(blockExtended.id, true);
|
||||
}
|
||||
if (config.MEMPOOL.TRANSACTION_INDEXING) {
|
||||
this.$indexCPFP(blockExtended.id, this.currentBlockHeight);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -494,6 +583,9 @@ class Blocks {
|
||||
if (!memPool.hasPriority()) {
|
||||
diskCache.$saveCacheToDisk();
|
||||
}
|
||||
|
||||
// wait for pending async callbacks to finish
|
||||
await Promise.all(callbackPromises);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -559,7 +651,7 @@ class Blocks {
|
||||
if (skipMemoryCache === false) {
|
||||
// Check the memory cache
|
||||
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
|
||||
if (cachedSummary) {
|
||||
if (cachedSummary?.transactions?.length) {
|
||||
return cachedSummary.transactions;
|
||||
}
|
||||
}
|
||||
@@ -567,7 +659,7 @@ class Blocks {
|
||||
// Check if it's indexed in db
|
||||
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
|
||||
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
|
||||
if (indexedSummary !== undefined) {
|
||||
if (indexedSummary !== undefined && indexedSummary?.transactions?.length) {
|
||||
return indexedSummary.transactions;
|
||||
}
|
||||
}
|
||||
@@ -620,6 +712,22 @@ class Blocks {
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
public async $getBlockAuditSummary(hash: string): Promise<any> {
|
||||
let summary;
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
summary = await BlocksAuditsRepository.$getBlockAudit(hash);
|
||||
}
|
||||
|
||||
// fallback to non-audited transaction summary
|
||||
if (!summary?.transactions?.length) {
|
||||
const strippedTransactions = await this.$getStrippedBlockTransactions(hash);
|
||||
summary = {
|
||||
transactions: strippedTransactions
|
||||
};
|
||||
}
|
||||
return summary;
|
||||
}
|
||||
|
||||
public getLastDifficultyAdjustmentTime(): number {
|
||||
return this.lastDifficultyAdjustmentTime;
|
||||
}
|
||||
@@ -631,6 +739,62 @@ class Blocks {
|
||||
public getCurrentBlockHeight(): number {
|
||||
return this.currentBlockHeight;
|
||||
}
|
||||
|
||||
public async $indexCPFP(hash: string, height: number): Promise<void> {
|
||||
let transactions;
|
||||
if (false/*Common.blocksSummariesIndexingEnabled()*/) {
|
||||
transactions = await this.$getStrippedBlockTransactions(hash);
|
||||
const rawBlock = await bitcoinApi.$getRawBlock(hash);
|
||||
const block = Block.fromBuffer(rawBlock);
|
||||
const txMap = {};
|
||||
for (const tx of block.transactions || []) {
|
||||
txMap[tx.getId()] = tx;
|
||||
}
|
||||
for (const tx of transactions) {
|
||||
if (txMap[tx.txid]?.ins) {
|
||||
tx.vin = txMap[tx.txid].ins.map(vin => {
|
||||
return {
|
||||
txid: vin.hash
|
||||
};
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const block = await bitcoinClient.getBlock(hash, 2);
|
||||
transactions = block.tx.map(tx => {
|
||||
tx.vsize = tx.weight / 4;
|
||||
return tx;
|
||||
});
|
||||
}
|
||||
|
||||
let cluster: TransactionStripped[] = [];
|
||||
let ancestors: { [txid: string]: boolean } = {};
|
||||
for (let i = transactions.length - 1; i >= 0; i--) {
|
||||
const tx = transactions[i];
|
||||
if (!ancestors[tx.txid]) {
|
||||
let totalFee = 0;
|
||||
let totalVSize = 0;
|
||||
cluster.forEach(tx => {
|
||||
totalFee += tx?.fee || 0;
|
||||
totalVSize += tx.vsize;
|
||||
});
|
||||
const effectiveFeePerVsize = (totalFee * 100_000_000) / totalVSize;
|
||||
if (cluster.length > 1) {
|
||||
await cpfpRepository.$saveCluster(height, cluster.map(tx => { return { txid: tx.txid, weight: tx.vsize * 4, fee: (tx.fee || 0) * 100_000_000 }; }), effectiveFeePerVsize);
|
||||
for (const tx of cluster) {
|
||||
await transactionRepository.$setCluster(tx.txid, cluster[0].txid);
|
||||
}
|
||||
}
|
||||
cluster = [];
|
||||
ancestors = {};
|
||||
}
|
||||
cluster.push(tx);
|
||||
tx.vin.forEach(vin => {
|
||||
ancestors[vin.txid] = true;
|
||||
});
|
||||
}
|
||||
await blocksRepository.$setCPFPIndexed(hash);
|
||||
}
|
||||
}
|
||||
|
||||
export default new Blocks();
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { CpfpInfo, TransactionExtended, TransactionStripped } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import { NodeSocket } from '../repositories/NodesSocketsRepository';
|
||||
import { isIP } from 'net';
|
||||
export class Common {
|
||||
static nativeAssetId = config.MEMPOOL.NETWORK === 'liquidtestnet' ?
|
||||
'144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49'
|
||||
@@ -184,4 +186,124 @@ export class Common {
|
||||
config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
|
||||
);
|
||||
}
|
||||
|
||||
static cpfpIndexingEnabled(): boolean {
|
||||
return (
|
||||
Common.indexingEnabled() &&
|
||||
config.MEMPOOL.TRANSACTION_INDEXING === true
|
||||
);
|
||||
}
|
||||
|
||||
static setDateMidnight(date: Date): void {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
date.setUTCSeconds(0);
|
||||
date.setUTCMilliseconds(0);
|
||||
}
|
||||
|
||||
static channelShortIdToIntegerId(channelId: string): string {
|
||||
if (channelId.indexOf('x') === -1) { // Already an integer id
|
||||
return channelId;
|
||||
}
|
||||
if (channelId.indexOf('/') !== -1) { // Topology import
|
||||
channelId = channelId.slice(0, -2);
|
||||
}
|
||||
const s = channelId.split('x').map(part => BigInt(part));
|
||||
return ((s[0] << 40n) | (s[1] << 16n) | s[2]).toString();
|
||||
}
|
||||
|
||||
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
|
||||
static channelIntegerIdToShortId(id: string): string {
|
||||
if (id.indexOf('/') !== -1) {
|
||||
id = id.slice(0, -2);
|
||||
}
|
||||
|
||||
if (id.indexOf('x') !== -1) { // Already a short id
|
||||
return id;
|
||||
}
|
||||
|
||||
const n = BigInt(id);
|
||||
return [
|
||||
n >> 40n, // nth block
|
||||
(n >> 16n) & 0xffffffn, // nth tx of the block
|
||||
n & 0xffffn // nth output of the tx
|
||||
].join('x');
|
||||
}
|
||||
|
||||
static utcDateToMysql(date?: number): string {
|
||||
const d = new Date((date || 0) * 1000);
|
||||
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
|
||||
}
|
||||
|
||||
static findSocketNetwork(addr: string): {network: string | null, url: string} {
|
||||
let network: string | null = null;
|
||||
let url = addr.split('://')[1];
|
||||
|
||||
if (!url) {
|
||||
return {
|
||||
network: null,
|
||||
url: addr,
|
||||
};
|
||||
}
|
||||
|
||||
if (addr.indexOf('onion') !== -1) {
|
||||
if (url.split('.')[0].length >= 56) {
|
||||
network = 'torv3';
|
||||
} else {
|
||||
network = 'torv2';
|
||||
}
|
||||
} else if (addr.indexOf('i2p') !== -1) {
|
||||
network = 'i2p';
|
||||
} else if (addr.indexOf('ipv4') !== -1) {
|
||||
const ipv = isIP(url.split(':')[0]);
|
||||
if (ipv === 4) {
|
||||
network = 'ipv4';
|
||||
} else {
|
||||
return {
|
||||
network: null,
|
||||
url: addr,
|
||||
};
|
||||
}
|
||||
} else if (addr.indexOf('ipv6') !== -1) {
|
||||
url = url.split('[')[1].split(']')[0];
|
||||
const ipv = isIP(url);
|
||||
if (ipv === 6) {
|
||||
const parts = addr.split(':');
|
||||
network = 'ipv6';
|
||||
url = `[${url}]:${parts[parts.length - 1]}`;
|
||||
} else {
|
||||
return {
|
||||
network: null,
|
||||
url: addr,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
return {
|
||||
network: null,
|
||||
url: addr,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
network: network,
|
||||
url: url,
|
||||
};
|
||||
}
|
||||
|
||||
static formatSocket(publicKey: string, socket: {network: string, addr: string}): NodeSocket {
|
||||
if (config.LIGHTNING.BACKEND === 'cln') {
|
||||
return {
|
||||
publicKey: publicKey,
|
||||
network: socket.network,
|
||||
addr: socket.addr,
|
||||
};
|
||||
} else /* if (config.LIGHTNING.BACKEND === 'lnd') */ {
|
||||
const formatted = this.findSocketNetwork(socket.addr);
|
||||
return {
|
||||
publicKey: publicKey,
|
||||
network: formatted.network,
|
||||
addr: formatted.url,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,8 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 33;
|
||||
private queryTimeout = 120000;
|
||||
private static currentVersion = 49;
|
||||
private queryTimeout = 3600_000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
||||
@@ -107,18 +107,22 @@ class DatabaseMigration {
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
await this.updateToSchemaVersion(2);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
await this.updateToSchemaVersion(3);
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
await this.updateToSchemaVersion(4);
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(5);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
@@ -141,11 +145,13 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
await this.updateToSchemaVersion(6);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
await this.updateToSchemaVersion(7);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
@@ -155,6 +161,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
await this.updateToSchemaVersion(8);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
@@ -162,10 +169,12 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
await this.updateToSchemaVersion(9);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
await this.updateToSchemaVersion(10);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
@@ -178,11 +187,13 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(11);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(12);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
@@ -190,6 +201,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(13);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
@@ -197,37 +209,45 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(14);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
await this.updateToSchemaVersion(16);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
await this.updateToSchemaVersion(17);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 18 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
|
||||
await this.updateToSchemaVersion(18);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
await this.updateToSchemaVersion(19);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 20 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
|
||||
await this.updateToSchemaVersion(20);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 21) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
|
||||
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
|
||||
await this.updateToSchemaVersion(21);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 22 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
|
||||
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
|
||||
await this.updateToSchemaVersion(22);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 23) {
|
||||
@@ -240,19 +260,21 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(23);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 24 && isBitcoin == true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
|
||||
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
|
||||
await this.updateToSchemaVersion(24);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 25 && isBitcoin === true) {
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_node_stats', 0, '1970-01-01');`);
|
||||
await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats'));
|
||||
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
|
||||
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
|
||||
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
|
||||
await this.updateToSchemaVersion(25);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 26 && isBitcoin === true) {
|
||||
@@ -263,6 +285,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(26);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 27 && isBitcoin === true) {
|
||||
@@ -272,6 +295,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(27);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 28 && isBitcoin === true) {
|
||||
@@ -281,6 +305,7 @@ class DatabaseMigration {
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery(`TRUNCATE node_stats`);
|
||||
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
|
||||
await this.updateToSchemaVersion(28);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 29 && isBitcoin === true) {
|
||||
@@ -292,24 +317,130 @@ class DatabaseMigration {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
|
||||
await this.updateToSchemaVersion(29);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 30 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
|
||||
await this.updateToSchemaVersion(30);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
|
||||
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
|
||||
await this.updateToSchemaVersion(31);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 32 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
|
||||
await this.updateToSchemaVersion(32);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 33 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
|
||||
await this.updateToSchemaVersion(33);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 34 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.updateToSchemaVersion(34);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 35 && isBitcoin == true) {
|
||||
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
|
||||
await this.updateToSchemaVersion(35);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 36 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
|
||||
await this.updateToSchemaVersion(36);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 37 && isBitcoin == true) {
|
||||
await this.$executeQuery(this.getCreateLNNodesSocketsTableQuery(), await this.$checkIfTableExists('nodes_sockets'));
|
||||
await this.updateToSchemaVersion(37);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 38 && isBitcoin == true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
|
||||
}
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery(`TRUNCATE node_stats`);
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` CHANGE `added` `added` timestamp NULL');
|
||||
await this.$executeQuery('ALTER TABLE `node_stats` CHANGE `added` `added` timestamp NULL');
|
||||
await this.updateToSchemaVersion(38);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 39 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD alias_search TEXT NULL DEFAULT NULL AFTER `alias`');
|
||||
await this.$executeQuery('ALTER TABLE nodes ADD FULLTEXT(alias_search)');
|
||||
await this.updateToSchemaVersion(39);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 40 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD capacity bigint(20) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD channels int(11) unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD INDEX `capacity` (`capacity`);');
|
||||
await this.updateToSchemaVersion(40);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 41 && isBitcoin === true) {
|
||||
await this.$executeQuery('UPDATE channels SET closing_reason = NULL WHERE closing_reason = 1');
|
||||
await this.updateToSchemaVersion(41);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 42 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD closing_resolved tinyint(1) DEFAULT 0');
|
||||
await this.updateToSchemaVersion(42);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 43 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateLNNodeRecordsTableQuery(), await this.$checkIfTableExists('nodes_records'));
|
||||
await this.updateToSchemaVersion(43);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 44 && isBitcoin === true) {
|
||||
await this.$executeQuery('UPDATE blocks_summaries SET template = NULL');
|
||||
await this.updateToSchemaVersion(44);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 45 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_audits` ADD fresh_txs JSON DEFAULT "[]"');
|
||||
await this.updateToSchemaVersion(45);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 46) {
|
||||
await this.$executeQuery(`ALTER TABLE blocks MODIFY blockTimestamp timestamp NOT NULL DEFAULT 0`);
|
||||
await this.updateToSchemaVersion(46);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 47) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD cpfp_indexed tinyint(1) DEFAULT 0');
|
||||
await this.$executeQuery(this.getCreateCPFPTableQuery(), await this.$checkIfTableExists('cpfp_clusters'));
|
||||
await this.$executeQuery(this.getCreateTransactionsTableQuery(), await this.$checkIfTableExists('transactions'));
|
||||
await this.updateToSchemaVersion(47);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 48 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD source_checked tinyint(1) DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD closing_fee bigint(20) unsigned DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD node1_funding_balance bigint(20) unsigned DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD node2_funding_balance bigint(20) unsigned DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD node1_closing_balance bigint(20) unsigned DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD node2_closing_balance bigint(20) unsigned DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD funding_ratio float unsigned DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD closed_by varchar(66) DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD single_funded tinyint(1) DEFAULT 0');
|
||||
await this.$executeQuery('ALTER TABLE `channels` ADD outputs JSON DEFAULT "[]"');
|
||||
await this.updateToSchemaVersion(48);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 49 && isBitcoin === true) {
|
||||
await this.$executeQuery('TRUNCATE TABLE `blocks_audits`');
|
||||
await this.updateToSchemaVersion(49);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -448,6 +579,10 @@ class DatabaseMigration {
|
||||
return `UPDATE state SET number = ${DatabaseMigration.currentVersion} WHERE name = 'schema_version';`;
|
||||
}
|
||||
|
||||
private async updateToSchemaVersion(version): Promise<void> {
|
||||
await this.$executeQuery(`UPDATE state SET number = ${version} WHERE name = 'schema_version';`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print current database version
|
||||
*/
|
||||
@@ -724,7 +859,7 @@ class DatabaseMigration {
|
||||
names text DEFAULT NULL,
|
||||
UNIQUE KEY id (id,type),
|
||||
KEY id_2 (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateBlocksPricesTableQuery(): string {
|
||||
@@ -736,6 +871,48 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateLNNodesSocketsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS nodes_sockets (
|
||||
public_key varchar(66) NOT NULL,
|
||||
socket varchar(100) NOT NULL,
|
||||
type enum('ipv4', 'ipv6', 'torv2', 'torv3', 'i2p', 'dns', 'websocket') NULL,
|
||||
UNIQUE KEY public_key_socket (public_key, socket),
|
||||
INDEX (public_key)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateLNNodeRecordsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS nodes_records (
|
||||
public_key varchar(66) NOT NULL,
|
||||
type int(10) unsigned NOT NULL,
|
||||
payload blob NOT NULL,
|
||||
UNIQUE KEY public_key_type (public_key, type),
|
||||
INDEX (public_key),
|
||||
FOREIGN KEY (public_key)
|
||||
REFERENCES nodes (public_key)
|
||||
ON DELETE CASCADE
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateCPFPTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS cpfp_clusters (
|
||||
root varchar(65) NOT NULL,
|
||||
height int(10) NOT NULL,
|
||||
txs JSON DEFAULT NULL,
|
||||
fee_rate double unsigned NOT NULL,
|
||||
PRIMARY KEY (root)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateTransactionsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS transactions (
|
||||
txid varchar(65) NOT NULL,
|
||||
cluster varchar(65) DEFAULT NULL,
|
||||
PRIMARY KEY (txid),
|
||||
FOREIGN KEY (cluster) REFERENCES cpfp_clusters (root) ON DELETE SET NULL
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates', 'prices'];
|
||||
|
||||
|
||||
@@ -2,65 +2,100 @@ import config from '../config';
|
||||
import { IDifficultyAdjustment } from '../mempool.interfaces';
|
||||
import blocks from './blocks';
|
||||
|
||||
class DifficultyAdjustmentApi {
|
||||
constructor() { }
|
||||
export interface DifficultyAdjustment {
|
||||
progressPercent: number; // Percent: 0 to 100
|
||||
difficultyChange: number; // Percent: -75 to 300
|
||||
estimatedRetargetDate: number; // Unix time in ms
|
||||
remainingBlocks: number; // Block count
|
||||
remainingTime: number; // Duration of time in ms
|
||||
previousRetarget: number; // Percent: -75 to 300
|
||||
nextRetargetHeight: number; // Block Height
|
||||
timeAvg: number; // Duration of time in ms
|
||||
timeOffset: number; // (Testnet) Time since last block (cap @ 20min) in ms
|
||||
}
|
||||
|
||||
public getDifficultyAdjustment(): IDifficultyAdjustment {
|
||||
export function calcDifficultyAdjustment(
|
||||
DATime: number,
|
||||
nowSeconds: number,
|
||||
blockHeight: number,
|
||||
previousRetarget: number,
|
||||
network: string,
|
||||
latestBlockTimestamp: number,
|
||||
): DifficultyAdjustment {
|
||||
const ESTIMATE_LAG_BLOCKS = 146; // For first 7.2% of epoch, don't estimate.
|
||||
const EPOCH_BLOCK_LENGTH = 2016; // Bitcoin mainnet
|
||||
const BLOCK_SECONDS_TARGET = 600; // Bitcoin mainnet
|
||||
const TESTNET_MAX_BLOCK_SECONDS = 1200; // Bitcoin testnet
|
||||
|
||||
const diffSeconds = nowSeconds - DATime;
|
||||
const blocksInEpoch = (blockHeight >= 0) ? blockHeight % EPOCH_BLOCK_LENGTH : 0;
|
||||
const progressPercent = (blockHeight >= 0) ? blocksInEpoch / EPOCH_BLOCK_LENGTH * 100 : 100;
|
||||
const remainingBlocks = EPOCH_BLOCK_LENGTH - blocksInEpoch;
|
||||
const nextRetargetHeight = (blockHeight >= 0) ? blockHeight + remainingBlocks : 0;
|
||||
|
||||
let difficultyChange = 0;
|
||||
let timeAvgSecs = BLOCK_SECONDS_TARGET;
|
||||
// Only calculate the estimate once we have 7.2% of blocks in current epoch
|
||||
if (blocksInEpoch >= ESTIMATE_LAG_BLOCKS) {
|
||||
timeAvgSecs = diffSeconds / blocksInEpoch;
|
||||
difficultyChange = (BLOCK_SECONDS_TARGET / timeAvgSecs - 1) * 100;
|
||||
// Max increase is x4 (+300%)
|
||||
if (difficultyChange > 300) {
|
||||
difficultyChange = 300;
|
||||
}
|
||||
// Max decrease is /4 (-75%)
|
||||
if (difficultyChange < -75) {
|
||||
difficultyChange = -75;
|
||||
}
|
||||
}
|
||||
|
||||
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
|
||||
// therefore the time between blocks will always be below 20 minutes (1200s).
|
||||
let timeOffset = 0;
|
||||
if (network === 'testnet') {
|
||||
if (timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
|
||||
timeAvgSecs = TESTNET_MAX_BLOCK_SECONDS;
|
||||
}
|
||||
|
||||
const secondsSinceLastBlock = nowSeconds - latestBlockTimestamp;
|
||||
if (secondsSinceLastBlock + timeAvgSecs > TESTNET_MAX_BLOCK_SECONDS) {
|
||||
timeOffset = -Math.min(secondsSinceLastBlock, TESTNET_MAX_BLOCK_SECONDS) * 1000;
|
||||
}
|
||||
}
|
||||
|
||||
const timeAvg = Math.floor(timeAvgSecs * 1000);
|
||||
const remainingTime = remainingBlocks * timeAvg;
|
||||
const estimatedRetargetDate = remainingTime + nowSeconds * 1000;
|
||||
|
||||
return {
|
||||
progressPercent,
|
||||
difficultyChange,
|
||||
estimatedRetargetDate,
|
||||
remainingBlocks,
|
||||
remainingTime,
|
||||
previousRetarget,
|
||||
nextRetargetHeight,
|
||||
timeAvg,
|
||||
timeOffset,
|
||||
};
|
||||
}
|
||||
|
||||
class DifficultyAdjustmentApi {
|
||||
public getDifficultyAdjustment(): IDifficultyAdjustment | null {
|
||||
const DATime = blocks.getLastDifficultyAdjustmentTime();
|
||||
const previousRetarget = blocks.getPreviousDifficultyRetarget();
|
||||
const blockHeight = blocks.getCurrentBlockHeight();
|
||||
const blocksCache = blocks.getBlocks();
|
||||
const latestBlock = blocksCache[blocksCache.length - 1];
|
||||
|
||||
const now = new Date().getTime() / 1000;
|
||||
const diff = now - DATime;
|
||||
const blocksInEpoch = blockHeight % 2016;
|
||||
const progressPercent = (blocksInEpoch >= 0) ? blocksInEpoch / 2016 * 100 : 100;
|
||||
const remainingBlocks = 2016 - blocksInEpoch;
|
||||
const nextRetargetHeight = blockHeight + remainingBlocks;
|
||||
|
||||
let difficultyChange = 0;
|
||||
if (remainingBlocks < 1870) {
|
||||
if (blocksInEpoch > 0) {
|
||||
difficultyChange = (600 / (diff / blocksInEpoch) - 1) * 100;
|
||||
}
|
||||
if (difficultyChange > 300) {
|
||||
difficultyChange = 300;
|
||||
}
|
||||
if (difficultyChange < -75) {
|
||||
difficultyChange = -75;
|
||||
}
|
||||
if (!latestBlock) {
|
||||
return null;
|
||||
}
|
||||
const nowSeconds = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
let timeAvgMins = blocksInEpoch && blocksInEpoch > 146 ? diff / blocksInEpoch / 60 : 10;
|
||||
|
||||
// Testnet difficulty is set to 1 after 20 minutes of no blocks,
|
||||
// therefore the time between blocks will always be below 20 minutes (1200s).
|
||||
let timeOffset = 0;
|
||||
if (config.MEMPOOL.NETWORK === 'testnet') {
|
||||
if (timeAvgMins > 20) {
|
||||
timeAvgMins = 20;
|
||||
}
|
||||
if (now - latestBlock.timestamp + timeAvgMins * 60 > 1200) {
|
||||
timeOffset = -Math.min(now - latestBlock.timestamp, 1200) * 1000;
|
||||
}
|
||||
}
|
||||
|
||||
const timeAvg = timeAvgMins * 60 * 1000 ;
|
||||
const remainingTime = (remainingBlocks * timeAvg) + (now * 1000);
|
||||
const estimatedRetargetDate = remainingTime + now;
|
||||
|
||||
return {
|
||||
progressPercent,
|
||||
difficultyChange,
|
||||
estimatedRetargetDate,
|
||||
remainingBlocks,
|
||||
remainingTime,
|
||||
previousRetarget,
|
||||
nextRetargetHeight,
|
||||
timeAvg,
|
||||
timeOffset,
|
||||
};
|
||||
return calcDifficultyAdjustment(
|
||||
DATime, nowSeconds, blockHeight, previousRetarget,
|
||||
config.MEMPOOL.NETWORK, latestBlock.timestamp
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import nodesApi from './nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import { ILightningApi } from '../lightning/lightning-api.interface';
|
||||
import { Common } from '../common';
|
||||
|
||||
class ChannelsApi {
|
||||
public async $getAllChannels(): Promise<any[]> {
|
||||
@@ -13,32 +17,61 @@ class ChannelsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAllChannelsGeo(publicKey?: string): Promise<any[]> {
|
||||
public async $getAllChannelsGeo(publicKey?: string, style?: string): Promise<any[]> {
|
||||
try {
|
||||
let select: string;
|
||||
if (style === 'widget') {
|
||||
select = `
|
||||
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
|
||||
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
|
||||
`;
|
||||
} else {
|
||||
select = `
|
||||
nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
|
||||
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
|
||||
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
|
||||
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude
|
||||
`;
|
||||
}
|
||||
|
||||
const params: string[] = [];
|
||||
let query = `SELECT nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
|
||||
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
|
||||
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
|
||||
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude,
|
||||
channels.capacity
|
||||
FROM channels
|
||||
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
|
||||
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
|
||||
WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
|
||||
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
|
||||
let query = `SELECT ${select}
|
||||
FROM channels
|
||||
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
|
||||
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
|
||||
WHERE channels.status = 1
|
||||
AND nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
|
||||
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
|
||||
`;
|
||||
|
||||
if (publicKey !== undefined) {
|
||||
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
|
||||
params.push(publicKey);
|
||||
params.push(publicKey);
|
||||
} else {
|
||||
query += ` AND channels.capacity > 1000000
|
||||
GROUP BY nodes_1.public_key, nodes_2.public_key
|
||||
ORDER BY channels.capacity DESC
|
||||
LIMIT 10000
|
||||
`;
|
||||
}
|
||||
|
||||
const [rows]: any = await DB.query(query, params);
|
||||
return rows.map((row) => [
|
||||
row.node1_public_key, row.node1_alias, row.node1_longitude, row.node1_latitude,
|
||||
row.node2_public_key, row.node2_alias, row.node2_longitude, row.node2_latitude,
|
||||
row.capacity]);
|
||||
return rows.map((row) => {
|
||||
if (style === 'widget') {
|
||||
return [
|
||||
row.node1_longitude, row.node1_latitude,
|
||||
row.node2_longitude, row.node2_latitude,
|
||||
];
|
||||
} else {
|
||||
return [
|
||||
row.node1_public_key, row.node1_alias,
|
||||
row.node1_longitude, row.node1_latitude,
|
||||
row.node2_public_key, row.node2_alias,
|
||||
row.node2_longitude, row.node2_latitude,
|
||||
];
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
@@ -48,7 +81,7 @@ class ChannelsApi {
|
||||
public async $searchChannelsById(search: string): Promise<any[]> {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT id, short_id, capacity FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
|
||||
const query = `SELECT id, short_id, capacity, status FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
@@ -57,9 +90,14 @@ class ChannelsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByStatus(status: number): Promise<any[]> {
|
||||
public async $getChannelsByStatus(status: number | number[]): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = ?`;
|
||||
let query: string;
|
||||
if (Array.isArray(status)) {
|
||||
query = `SELECT * FROM channels WHERE status IN (${status.join(',')})`;
|
||||
} else {
|
||||
query = `SELECT * FROM channels WHERE status = ?`;
|
||||
}
|
||||
const [rows]: any = await DB.query(query, [status]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
@@ -79,6 +117,32 @@ class ChannelsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getUnresolvedClosedChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason = 2 AND closing_resolved = 0 AND closing_transaction_id != ''`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsWithoutSourceChecked(): Promise<any[]> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT channels.*
|
||||
FROM channels
|
||||
WHERE channels.source_checked != 1
|
||||
`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getUnresolvedClosedChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE created IS NULL`;
|
||||
@@ -92,7 +156,31 @@ class ChannelsApi {
|
||||
|
||||
public async $getChannel(id: string): Promise<any> {
|
||||
try {
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*, ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key WHERE (ns1.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node1_public_key) AND ns2.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node2_public_key)) AND channels.id = ?`;
|
||||
const query = `
|
||||
SELECT n1.alias AS alias_left, n1.longitude as node1_longitude, n1.latitude as node1_latitude,
|
||||
n2.alias AS alias_right, n2.longitude as node2_longitude, n2.latitude as node2_latitude,
|
||||
channels.*,
|
||||
ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
|
||||
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
|
||||
LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key
|
||||
LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key
|
||||
WHERE (
|
||||
ns1.id = (
|
||||
SELECT MAX(id)
|
||||
FROM node_stats
|
||||
WHERE public_key = channels.node1_public_key
|
||||
)
|
||||
AND ns2.id = (
|
||||
SELECT MAX(id)
|
||||
FROM node_stats
|
||||
WHERE public_key = channels.node2_public_key
|
||||
)
|
||||
)
|
||||
AND channels.id = ?
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [id]);
|
||||
if (rows[0]) {
|
||||
return this.convertChannel(rows[0]);
|
||||
@@ -168,9 +256,14 @@ class ChannelsApi {
|
||||
|
||||
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
|
||||
try {
|
||||
transactionIds = transactionIds.map((id) => '\'' + id + '\'');
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.* FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key WHERE channels.transaction_id IN (${transactionIds.join(', ')}) OR channels.closing_transaction_id IN (${transactionIds.join(', ')})`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
const query = `
|
||||
SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
|
||||
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
|
||||
WHERE channels.transaction_id IN ? OR channels.closing_transaction_id IN ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [[transactionIds], [transactionIds]]);
|
||||
const channels = rows.map((row) => this.convertChannel(row));
|
||||
return channels;
|
||||
} catch (e) {
|
||||
@@ -179,17 +272,199 @@ class ChannelsApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelByClosingId(transactionId: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT
|
||||
channels.*
|
||||
FROM channels
|
||||
WHERE channels.closing_transaction_id = ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [transactionId]);
|
||||
if (rows.length > 0) {
|
||||
rows[0].outputs = JSON.parse(rows[0].outputs);
|
||||
return rows[0];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$getChannelByClosingId error: ' + (e instanceof Error ? e.message : e));
|
||||
// don't throw - this data isn't essential
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByOpeningId(transactionId: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT
|
||||
channels.*
|
||||
FROM channels
|
||||
WHERE channels.transaction_id = ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [transactionId]);
|
||||
if (rows.length > 0) {
|
||||
return rows.map(row => {
|
||||
row.outputs = JSON.parse(row.outputs);
|
||||
return row;
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsByOpeningId error: ' + (e instanceof Error ? e.message : e));
|
||||
// don't throw - this data isn't essential
|
||||
}
|
||||
}
|
||||
|
||||
public async $updateClosingInfo(channelInfo: { id: string, node1_closing_balance: number, node2_closing_balance: number, closed_by: string | null, closing_fee: number, outputs: ILightningApi.ForensicOutput[]}): Promise<void> {
|
||||
try {
|
||||
const query = `
|
||||
UPDATE channels SET
|
||||
node1_closing_balance = ?,
|
||||
node2_closing_balance = ?,
|
||||
closed_by = ?,
|
||||
closing_fee = ?,
|
||||
outputs = ?
|
||||
WHERE channels.id = ?
|
||||
`;
|
||||
await DB.query<ResultSetHeader>(query, [
|
||||
channelInfo.node1_closing_balance || 0,
|
||||
channelInfo.node2_closing_balance || 0,
|
||||
channelInfo.closed_by,
|
||||
channelInfo.closing_fee || 0,
|
||||
JSON.stringify(channelInfo.outputs),
|
||||
channelInfo.id,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$updateClosingInfo error: ' + (e instanceof Error ? e.message : e));
|
||||
// don't throw - this data isn't essential
|
||||
}
|
||||
}
|
||||
|
||||
public async $updateOpeningInfo(channelInfo: { id: string, node1_funding_balance: number, node2_funding_balance: number, funding_ratio: number, single_funded: boolean | void }): Promise<void> {
|
||||
try {
|
||||
const query = `
|
||||
UPDATE channels SET
|
||||
node1_funding_balance = ?,
|
||||
node2_funding_balance = ?,
|
||||
funding_ratio = ?,
|
||||
single_funded = ?
|
||||
WHERE channels.id = ?
|
||||
`;
|
||||
await DB.query<ResultSetHeader>(query, [
|
||||
channelInfo.node1_funding_balance || 0,
|
||||
channelInfo.node2_funding_balance || 0,
|
||||
channelInfo.funding_ratio,
|
||||
channelInfo.single_funded ? 1 : 0,
|
||||
channelInfo.id,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$updateOpeningInfo error: ' + (e instanceof Error ? e.message : e));
|
||||
// don't throw - this data isn't essential
|
||||
}
|
||||
}
|
||||
|
||||
public async $markChannelSourceChecked(id: string): Promise<void> {
|
||||
try {
|
||||
const query = `
|
||||
UPDATE channels
|
||||
SET source_checked = 1
|
||||
WHERE id = ?
|
||||
`;
|
||||
await DB.query<ResultSetHeader>(query, [id]);
|
||||
} catch (e) {
|
||||
logger.err('$markChannelSourceChecked error: ' + (e instanceof Error ? e.message : e));
|
||||
// don't throw - this data isn't essential
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
|
||||
try {
|
||||
// Default active and inactive channels
|
||||
let statusQuery = '< 2';
|
||||
// Closed channels only
|
||||
if (status === 'closed') {
|
||||
statusQuery = '= 2';
|
||||
let channelStatusFilter;
|
||||
if (status === 'open') {
|
||||
channelStatusFilter = '< 2';
|
||||
} else if (status === 'active') {
|
||||
channelStatusFilter = '= 1';
|
||||
} else if (status === 'closed') {
|
||||
channelStatusFilter = '= 2';
|
||||
} else {
|
||||
throw new Error('getChannelsForNode: Invalid status requested');
|
||||
}
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.*, ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key WHERE (ns1.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node1_public_key) AND ns2.id = (SELECT MAX(id) FROM node_stats WHERE public_key = channels.node2_public_key)) AND (node1_public_key = ? OR node2_public_key = ?) AND status ${statusQuery} ORDER BY channels.capacity DESC LIMIT ?, ?`;
|
||||
const [rows]: any = await DB.query(query, [public_key, public_key, index, length]);
|
||||
const channels = rows.map((row) => this.convertChannel(row));
|
||||
|
||||
// Channels originating from node
|
||||
let query = `
|
||||
SELECT COALESCE(node2.alias, SUBSTRING(node2_public_key, 0, 20)) AS alias, COALESCE(node2.public_key, node2_public_key) AS public_key,
|
||||
channels.status, channels.node1_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
|
||||
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
|
||||
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsFromNode]: any = await DB.query(query, [public_key]);
|
||||
|
||||
// Channels incoming to node
|
||||
query = `
|
||||
SELECT COALESCE(node1.alias, SUBSTRING(node1_public_key, 0, 20)) AS alias, COALESCE(node1.public_key, node1_public_key) AS public_key,
|
||||
channels.status, channels.node2_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id, channels.closing_reason,
|
||||
UNIX_TIMESTAMP(closing_date) as closing_date, UNIX_TIMESTAMP(channels.updated_at) as updated_at
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
|
||||
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsToNode]: any = await DB.query(query, [public_key]);
|
||||
|
||||
let allChannels = channelsFromNode.concat(channelsToNode);
|
||||
allChannels.sort((a, b) => {
|
||||
if (status === 'closed') {
|
||||
if (!b.closing_date && !a.closing_date) {
|
||||
return (b.updated_at ?? 0) - (a.updated_at ?? 0);
|
||||
} else {
|
||||
return (b.closing_date ?? 0) - (a.closing_date ?? 0);
|
||||
}
|
||||
} else {
|
||||
return b.capacity - a.capacity;
|
||||
}
|
||||
});
|
||||
|
||||
if (index >= 0) {
|
||||
allChannels = allChannels.slice(index, index + length);
|
||||
} else if (index === -1) { // Node channels tree chart
|
||||
allChannels = allChannels.slice(0, 1000);
|
||||
}
|
||||
|
||||
const channels: any[] = []
|
||||
for (const row of allChannels) {
|
||||
let channel;
|
||||
if (index >= 0) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channel = {
|
||||
status: row.status,
|
||||
closing_reason: row.closing_reason,
|
||||
closing_date: row.closing_date,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
};
|
||||
} else if (index === -1) {
|
||||
channel = {
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
channels.push(channel);
|
||||
}
|
||||
|
||||
return channels;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
|
||||
@@ -205,7 +480,12 @@ class ChannelsApi {
|
||||
if (status === 'closed') {
|
||||
statusQuery = '= 2';
|
||||
}
|
||||
const query = `SELECT COUNT(*) AS count FROM channels WHERE (node1_public_key = ? OR node2_public_key = ?) AND status ${statusQuery}`;
|
||||
const query = `
|
||||
SELECT COUNT(*) AS count
|
||||
FROM channels
|
||||
WHERE (node1_public_key = ? OR node2_public_key = ?)
|
||||
AND status ${statusQuery}
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key, public_key]);
|
||||
return rows[0]['count'];
|
||||
} catch (e) {
|
||||
@@ -222,10 +502,15 @@ class ChannelsApi {
|
||||
'transaction_id': channel.transaction_id,
|
||||
'transaction_vout': channel.transaction_vout,
|
||||
'closing_transaction_id': channel.closing_transaction_id,
|
||||
'closing_fee': channel.closing_fee,
|
||||
'closing_reason': channel.closing_reason,
|
||||
'closing_date': channel.closing_date,
|
||||
'updated_at': channel.updated_at,
|
||||
'created': channel.created,
|
||||
'status': channel.status,
|
||||
'funding_ratio': channel.funding_ratio,
|
||||
'closed_by': channel.closed_by,
|
||||
'single_funded': !!channel.single_funded,
|
||||
'node_left': {
|
||||
'alias': channel.alias_left,
|
||||
'public_key': channel.node1_public_key,
|
||||
@@ -238,6 +523,11 @@ class ChannelsApi {
|
||||
'max_htlc_mtokens': channel.node1_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node1_min_htlc_mtokens,
|
||||
'updated_at': channel.node1_updated_at,
|
||||
'longitude': channel.node1_longitude,
|
||||
'latitude': channel.node1_latitude,
|
||||
'funding_balance': channel.node1_funding_balance,
|
||||
'closing_balance': channel.node1_closing_balance,
|
||||
'initiated_close': channel.closed_by === channel.node1_public_key ? true : undefined,
|
||||
},
|
||||
'node_right': {
|
||||
'alias': channel.alias_right,
|
||||
@@ -251,9 +541,160 @@ class ChannelsApi {
|
||||
'max_htlc_mtokens': channel.node2_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node2_min_htlc_mtokens,
|
||||
'updated_at': channel.node2_updated_at,
|
||||
'longitude': channel.node2_longitude,
|
||||
'latitude': channel.node2_latitude,
|
||||
'funding_balance': channel.node2_funding_balance,
|
||||
'closing_balance': channel.node2_closing_balance,
|
||||
'initiated_close': channel.closed_by === channel.node2_public_key ? true : undefined,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Save or update a channel present in the graph
|
||||
*/
|
||||
public async $saveChannel(channel: ILightningApi.Channel, status = 1): Promise<void> {
|
||||
const [ txid, vout ] = channel.chan_point.split(':');
|
||||
|
||||
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
|
||||
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
|
||||
|
||||
const query = `INSERT INTO channels
|
||||
(
|
||||
id,
|
||||
short_id,
|
||||
capacity,
|
||||
transaction_id,
|
||||
transaction_vout,
|
||||
updated_at,
|
||||
status,
|
||||
node1_public_key,
|
||||
node1_base_fee_mtokens,
|
||||
node1_cltv_delta,
|
||||
node1_fee_rate,
|
||||
node1_is_disabled,
|
||||
node1_max_htlc_mtokens,
|
||||
node1_min_htlc_mtokens,
|
||||
node1_updated_at,
|
||||
node2_public_key,
|
||||
node2_base_fee_mtokens,
|
||||
node2_cltv_delta,
|
||||
node2_fee_rate,
|
||||
node2_is_disabled,
|
||||
node2_max_htlc_mtokens,
|
||||
node2_min_htlc_mtokens,
|
||||
node2_updated_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ${status}, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
capacity = ?,
|
||||
updated_at = ?,
|
||||
status = ${status},
|
||||
node1_public_key = ?,
|
||||
node1_base_fee_mtokens = ?,
|
||||
node1_cltv_delta = ?,
|
||||
node1_fee_rate = ?,
|
||||
node1_is_disabled = ?,
|
||||
node1_max_htlc_mtokens = ?,
|
||||
node1_min_htlc_mtokens = ?,
|
||||
node1_updated_at = ?,
|
||||
node2_public_key = ?,
|
||||
node2_base_fee_mtokens = ?,
|
||||
node2_cltv_delta = ?,
|
||||
node2_fee_rate = ?,
|
||||
node2_is_disabled = ?,
|
||||
node2_max_htlc_mtokens = ?,
|
||||
node2_min_htlc_mtokens = ?,
|
||||
node2_updated_at = ?
|
||||
;`;
|
||||
|
||||
await DB.query(query, [
|
||||
Common.channelShortIdToIntegerId(channel.channel_id),
|
||||
Common.channelIntegerIdToShortId(channel.channel_id),
|
||||
channel.capacity,
|
||||
txid,
|
||||
vout,
|
||||
Common.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
Common.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
Common.utcDateToMysql(policy2.last_update),
|
||||
channel.capacity,
|
||||
Common.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
Common.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
Common.utcDateToMysql(policy2.last_update)
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all channels not in `graphChannelsIds` as inactive (status = 0)
|
||||
*/
|
||||
public async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
|
||||
if (graphChannelsIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE id NOT IN (
|
||||
${graphChannelsIds.map(id => `"${id}"`).join(',')}
|
||||
)
|
||||
AND status != 2
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
public async $getLatestChannelUpdateForNode(publicKey: string): Promise<number> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT MAX(UNIX_TIMESTAMP(updated_at)) as updated_at
|
||||
FROM channels
|
||||
WHERE node1_public_key = ?
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query, [publicKey]);
|
||||
if (rows.length > 0) {
|
||||
return rows[0].updated_at;
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Can't getLatestChannelUpdateForNode for ${publicKey}. Reason ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
export default new ChannelsApi();
|
||||
|
||||
@@ -32,6 +32,9 @@ class ChannelsRoutes {
|
||||
res.status(404).send('Channel not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(channel);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
@@ -44,11 +47,22 @@ class ChannelsRoutes {
|
||||
res.status(400).send('Missing parameter: public_key');
|
||||
return;
|
||||
}
|
||||
|
||||
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
|
||||
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
|
||||
const length = 25;
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, length, status);
|
||||
|
||||
if (index < -1) {
|
||||
res.status(400).send('Invalid index');
|
||||
}
|
||||
if (['open', 'active', 'closed'].includes(status) === false) {
|
||||
res.status(400).send('Invalid status');
|
||||
}
|
||||
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
|
||||
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.header('X-Total-Count', channelsCount.toString());
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
@@ -56,7 +70,7 @@ class ChannelsRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response) {
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(400).send('Not an array');
|
||||
@@ -69,27 +83,26 @@ class ChannelsRoutes {
|
||||
}
|
||||
}
|
||||
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
|
||||
const inputs: any[] = [];
|
||||
const outputs: any[] = [];
|
||||
const result: any[] = [];
|
||||
for (const txid of txIds) {
|
||||
const foundChannelInputs = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelInputs) {
|
||||
inputs.push(foundChannelInputs);
|
||||
} else {
|
||||
inputs.push(null);
|
||||
const inputs: any = {};
|
||||
const outputs: any = {};
|
||||
// Assuming that we only have one lightning close input in each transaction. This may not be true in the future
|
||||
const foundChannelsFromInput = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelsFromInput) {
|
||||
inputs[0] = foundChannelsFromInput;
|
||||
}
|
||||
const foundChannelOutputs = channels.find((channel) => channel.transaction_id === txid);
|
||||
if (foundChannelOutputs) {
|
||||
outputs.push(foundChannelOutputs);
|
||||
} else {
|
||||
outputs.push(null);
|
||||
const foundChannelsFromOutputs = channels.filter((channel) => channel.transaction_id === txid);
|
||||
for (const output of foundChannelsFromOutputs) {
|
||||
outputs[output.transaction_vout] = output;
|
||||
}
|
||||
result.push({
|
||||
inputs,
|
||||
outputs,
|
||||
});
|
||||
}
|
||||
|
||||
res.json({
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
});
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
@@ -97,7 +110,11 @@ class ChannelsRoutes {
|
||||
|
||||
private async $getAllChannelsGeo(req: Request, res: Response) {
|
||||
try {
|
||||
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey);
|
||||
const style: string = typeof req.query.style === 'string' ? req.query.style : '';
|
||||
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey, style);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
|
||||
@@ -1,24 +1,62 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import { ILightningApi } from '../lightning/lightning-api.interface';
|
||||
import { ITopNodesPerCapacity, ITopNodesPerChannels } from '../../mempool.interfaces';
|
||||
|
||||
class NodesApi {
|
||||
public async $getWorldNodes(): Promise<any> {
|
||||
try {
|
||||
let query = `
|
||||
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.longitude, nodes.latitude,
|
||||
geo_names_country.names as country, geo_names_iso.names as isoCode
|
||||
FROM nodes
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
WHERE status = 1 AND nodes.as_number IS NOT NULL
|
||||
ORDER BY capacity
|
||||
`;
|
||||
|
||||
const [nodes]: any[] = await DB.query(query);
|
||||
|
||||
for (let i = 0; i < nodes.length; ++i) {
|
||||
nodes[i].country = JSON.parse(nodes[i].country);
|
||||
}
|
||||
|
||||
query = `
|
||||
SELECT MAX(nodes.capacity) as maxLiquidity, MAX(nodes.channels) as maxChannels
|
||||
FROM nodes
|
||||
WHERE status = 1 AND nodes.as_number IS NOT NULL
|
||||
`;
|
||||
|
||||
const [maximums]: any[] = await DB.query(query);
|
||||
|
||||
return {
|
||||
maxLiquidity: maximums[0].maxLiquidity,
|
||||
maxChannels: maximums[0].maxChannels,
|
||||
nodes: nodes.map(node => [
|
||||
node.longitude, node.latitude,
|
||||
node.publicKey, node.alias, node.capacity, node.channels,
|
||||
node.country, node.isoCode
|
||||
])
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err(`Can't get world nodes list. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNode(public_key: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.*, geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision,
|
||||
(SELECT Count(*)
|
||||
FROM channels
|
||||
WHERE channels.status = 2 AND ( channels.node1_public_key = ? OR channels.node2_public_key = ? )) AS channel_closed_count,
|
||||
(SELECT Count(*)
|
||||
FROM channels
|
||||
WHERE channels.status = 1 AND ( channels.node1_public_key = ? OR channels.node2_public_key = ? )) AS channel_active_count,
|
||||
(SELECT Sum(capacity)
|
||||
FROM channels
|
||||
WHERE channels.status = 1 AND ( channels.node1_public_key = ? OR channels.node2_public_key = ? )) AS capacity,
|
||||
(SELECT Avg(capacity)
|
||||
FROM channels
|
||||
WHERE status = 1 AND ( node1_public_key = ? OR node2_public_key = ? )) AS channels_capacity_avg
|
||||
// General info
|
||||
let query = `
|
||||
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
|
||||
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
|
||||
as_number, city_id, country_id, subdivision_id, longitude, latitude,
|
||||
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
|
||||
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
|
||||
@@ -27,17 +65,128 @@ class NodesApi {
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key, public_key, public_key, public_key, public_key, public_key, public_key, public_key, public_key]);
|
||||
if (rows.length > 0) {
|
||||
rows[0].as_organization = JSON.parse(rows[0].as_organization);
|
||||
rows[0].subdivision = JSON.parse(rows[0].subdivision);
|
||||
rows[0].city = JSON.parse(rows[0].city);
|
||||
rows[0].country = JSON.parse(rows[0].country);
|
||||
return rows[0];
|
||||
let [rows]: any[] = await DB.query(query, [public_key]);
|
||||
if (rows.length === 0) {
|
||||
throw new Error(`This node does not exist, or our node is not seeing it yet`);
|
||||
}
|
||||
return null;
|
||||
|
||||
const node = rows[0];
|
||||
node.as_organization = JSON.parse(node.as_organization);
|
||||
node.subdivision = JSON.parse(node.subdivision);
|
||||
node.city = JSON.parse(node.city);
|
||||
node.country = JSON.parse(node.country);
|
||||
|
||||
// Active channels and capacity
|
||||
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
|
||||
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
|
||||
node.capacity = activeChannelsStats.capacity ?? 0;
|
||||
|
||||
// Opened channels count
|
||||
query = `
|
||||
SELECT count(short_id) as opened_channel_count
|
||||
FROM channels
|
||||
WHERE status != 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.opened_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.opened_channel_count = rows[0].opened_channel_count;
|
||||
}
|
||||
|
||||
// Closed channels count
|
||||
query = `
|
||||
SELECT count(short_id) as closed_channel_count
|
||||
FROM channels
|
||||
WHERE status = 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.closed_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.closed_channel_count = rows[0].closed_channel_count;
|
||||
}
|
||||
|
||||
// Custom records
|
||||
query = `
|
||||
SELECT type, payload
|
||||
FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key]);
|
||||
node.custom_records = {};
|
||||
for (const record of rows) {
|
||||
node.custom_records[record.type] = Buffer.from(record.payload, 'binary').toString('hex');
|
||||
}
|
||||
|
||||
return node;
|
||||
} catch (e) {
|
||||
logger.err('$getNode error: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getActiveChannelsStats(node_public_key: string): Promise<unknown> {
|
||||
const query = `
|
||||
SELECT count(short_id) as active_channel_count, sum(capacity) as capacity
|
||||
FROM channels
|
||||
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query, [node_public_key, node_public_key]);
|
||||
if (rows.length > 0) {
|
||||
return {
|
||||
active_channel_count: rows[0].active_channel_count,
|
||||
capacity: rows[0].capacity
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getFeeHistogram(node_public_key: string): Promise<unknown> {
|
||||
try {
|
||||
const inQuery = `
|
||||
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
|
||||
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
|
||||
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
|
||||
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
|
||||
END as bucket,
|
||||
count(short_id) as count,
|
||||
sum(capacity) as capacity
|
||||
FROM (
|
||||
SELECT CASE WHEN node1_public_key = ? THEN node2_fee_rate WHEN node2_public_key = ? THEN node1_fee_rate END as fee_rate,
|
||||
short_id as short_id,
|
||||
capacity as capacity
|
||||
FROM channels
|
||||
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
) as fee_rate_table
|
||||
GROUP BY bucket;
|
||||
`;
|
||||
const [inRows]: any[] = await DB.query(inQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
|
||||
|
||||
const outQuery = `
|
||||
SELECT CASE WHEN fee_rate <= 10.0 THEN CEIL(fee_rate)
|
||||
WHEN (fee_rate > 10.0 and fee_rate <= 100.0) THEN CEIL(fee_rate / 10.0) * 10.0
|
||||
WHEN (fee_rate > 100.0 and fee_rate <= 1000.0) THEN CEIL(fee_rate / 100.0) * 100.0
|
||||
WHEN fee_rate > 1000.0 THEN CEIL(fee_rate / 1000.0) * 1000.0
|
||||
END as bucket,
|
||||
count(short_id) as count,
|
||||
sum(capacity) as capacity
|
||||
FROM (
|
||||
SELECT CASE WHEN node1_public_key = ? THEN node1_fee_rate WHEN node2_public_key = ? THEN node2_fee_rate END as fee_rate,
|
||||
short_id as short_id,
|
||||
capacity as capacity
|
||||
FROM channels
|
||||
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
) as fee_rate_table
|
||||
GROUP BY bucket;
|
||||
`;
|
||||
const [outRows]: any[] = await DB.query(outQuery, [node_public_key, node_public_key, node_public_key, node_public_key]);
|
||||
|
||||
return {
|
||||
incoming: inRows.length > 0 ? inRows : [],
|
||||
outgoing: outRows.length > 0 ? outRows : [],
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get node fee distribution for ${node_public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -55,7 +204,12 @@ class NodesApi {
|
||||
|
||||
public async $getNodeStats(public_key: string): Promise<any> {
|
||||
try {
|
||||
const query = `SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels FROM node_stats WHERE public_key = ? ORDER BY added DESC`;
|
||||
const query = `
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels
|
||||
FROM node_stats
|
||||
WHERE public_key = ?
|
||||
ORDER BY added DESC
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
@@ -64,10 +218,44 @@ class NodesApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopCapacityNodes(): Promise<any> {
|
||||
public async $getTopCapacityNodes(full: boolean): Promise<ITopNodesPerCapacity[]> {
|
||||
try {
|
||||
const query = `SELECT nodes.*, node_stats.capacity, node_stats.channels FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key ORDER BY node_stats.added DESC, node_stats.capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
nodes.capacity
|
||||
FROM nodes
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopCapacityNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
@@ -75,10 +263,95 @@ class NodesApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopChannelsNodes(): Promise<any> {
|
||||
public async $getTopChannelsNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
|
||||
try {
|
||||
const query = `SELECT nodes.*, node_stats.capacity, node_stats.channels FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key ORDER BY node_stats.added DESC, node_stats.channels DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
let rows: any;
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key as publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
nodes.channels
|
||||
FROM nodes
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100;
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT nodes.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(nodes.capacity, 0) as INT) as capacity,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
ORDER BY channels DESC
|
||||
LIMIT 100
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getOldestNodes(full: boolean): Promise<ITopNodesPerChannels[]> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
let query: string;
|
||||
if (full === false) {
|
||||
query = `
|
||||
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias,
|
||||
node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY first_seen
|
||||
LIMIT 100;
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
} else {
|
||||
query = `
|
||||
SELECT node_stats.public_key AS publicKey, IF(nodes.alias = '', SUBSTRING(node_stats.public_key, 1, 20), alias) as alias,
|
||||
CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as firstSeen, UNIX_TIMESTAMP(nodes.updated_at) as updatedAt,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision
|
||||
FROM node_stats
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY first_seen
|
||||
LIMIT 100
|
||||
`;
|
||||
|
||||
[rows] = await DB.query(query);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
}
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
@@ -88,9 +361,10 @@ class NodesApi {
|
||||
|
||||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR nodes.alias LIKE ? GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
const publicKeySearch = search.replace('%', '') + '%';
|
||||
const aliasSearch = search.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '').split(' ').map((search) => '+' + search + '*').join(' ');
|
||||
const query = `SELECT public_key, alias, capacity, channels, status FROM nodes WHERE public_key LIKE ? OR MATCH alias_search AGAINST (? IN BOOLEAN MODE) ORDER BY capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [publicKeySearch, aliasSearch]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
|
||||
@@ -98,64 +372,120 @@ class NodesApi {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesISP(groupBy: string, showTor: boolean) {
|
||||
public async $getNodesISPRanking() {
|
||||
try {
|
||||
const orderBy = groupBy === 'capacity' ? `CAST(SUM(capacity) as INT)` : `COUNT(DISTINCT nodes.public_key)`;
|
||||
|
||||
// Clearnet
|
||||
let query = `SELECT GROUP_CONCAT(DISTINCT(nodes.as_number)) as ispId, geo_names.names as names,
|
||||
COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.as_number
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY geo_names.names
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountPerAS]: any = await DB.query(query);
|
||||
let query = '';
|
||||
|
||||
let total = 0;
|
||||
const nodesPerAs: any[] = [];
|
||||
// List all channels and the two linked ISP
|
||||
query = `
|
||||
SELECT short_id, channels.capacity,
|
||||
channels.node1_public_key AS node1PublicKey, isp1.names AS isp1, isp1.id as isp1ID,
|
||||
channels.node2_public_key AS node2PublicKey, isp2.names AS isp2, isp2.id as isp2ID
|
||||
FROM channels
|
||||
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
|
||||
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
|
||||
JOIN geo_names isp1 ON isp1.id = node1.as_number
|
||||
JOIN geo_names isp2 ON isp2.id = node2.as_number
|
||||
WHERE channels.status = 1
|
||||
ORDER BY short_id DESC
|
||||
`;
|
||||
const [channelsIsp]: any = await DB.query(query);
|
||||
|
||||
for (const asGroup of nodesCountPerAS) {
|
||||
if (groupBy === 'capacity') {
|
||||
total += asGroup.capacity;
|
||||
} else {
|
||||
total += asGroup.nodesCount;
|
||||
// Sum channels capacity and node count per ISP
|
||||
const ispList = {};
|
||||
for (const channel of channelsIsp) {
|
||||
const isp1 = JSON.parse(channel.isp1);
|
||||
const isp2 = JSON.parse(channel.isp2);
|
||||
|
||||
if (!ispList[isp1]) {
|
||||
ispList[isp1] = {
|
||||
id: channel.isp1ID.toString(),
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
nodes: {},
|
||||
};
|
||||
} else if (ispList[isp1].id.indexOf(channel.isp1ID) === -1) {
|
||||
ispList[isp1].id += ',' + channel.isp1ID.toString();
|
||||
}
|
||||
|
||||
if (!ispList[isp2]) {
|
||||
ispList[isp2] = {
|
||||
id: channel.isp2ID.toString(),
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
nodes: {},
|
||||
};
|
||||
} else if (ispList[isp2].id.indexOf(channel.isp2ID) === -1) {
|
||||
ispList[isp2].id += ',' + channel.isp2ID.toString();
|
||||
}
|
||||
|
||||
ispList[isp1].capacity += channel.capacity;
|
||||
ispList[isp1].channels += 1;
|
||||
ispList[isp1].nodes[channel.node1PublicKey] = true;
|
||||
ispList[isp2].capacity += channel.capacity;
|
||||
ispList[isp2].channels += 1;
|
||||
ispList[isp2].nodes[channel.node2PublicKey] = true;
|
||||
}
|
||||
|
||||
// Tor
|
||||
if (showTor) {
|
||||
query = `SELECT COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountTor]: any = await DB.query(query);
|
||||
|
||||
total += groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount;
|
||||
nodesPerAs.push({
|
||||
ispId: null,
|
||||
name: 'Tor',
|
||||
count: nodesCountTor[0].nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount) / total * 10000) / 100,
|
||||
capacity: nodesCountTor[0].capacity,
|
||||
});
|
||||
const ispRanking: any[] = [];
|
||||
for (const isp of Object.keys(ispList)) {
|
||||
ispRanking.push([
|
||||
ispList[isp].id,
|
||||
isp,
|
||||
ispList[isp].capacity,
|
||||
ispList[isp].channels,
|
||||
Object.keys(ispList[isp].nodes).length,
|
||||
]);
|
||||
}
|
||||
|
||||
for (const as of nodesCountPerAS) {
|
||||
nodesPerAs.push({
|
||||
ispId: as.ispId,
|
||||
name: JSON.parse(as.names),
|
||||
count: as.nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? as.capacity : as.nodesCount) / total * 10000) / 100,
|
||||
capacity: as.capacity,
|
||||
});
|
||||
}
|
||||
// Total active channels capacity
|
||||
query = `SELECT SUM(capacity) AS capacity FROM channels WHERE status = 1`;
|
||||
const [totalCapacity]: any = await DB.query(query);
|
||||
|
||||
return nodesPerAs;
|
||||
// Get the total capacity of all channels which have at least one node on clearnet
|
||||
query = `
|
||||
SELECT SUM(capacity) as capacity
|
||||
FROM (
|
||||
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
|
||||
FROM channels
|
||||
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
|
||||
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
|
||||
AND channels.status = 1
|
||||
GROUP BY short_id
|
||||
) channels_tmp
|
||||
WHERE channels_tmp.networks LIKE '%ipv%'
|
||||
`;
|
||||
const [clearnetCapacity]: any = await DB.query(query);
|
||||
|
||||
// Get the total capacity of all channels which have both nodes on Tor
|
||||
query = `
|
||||
SELECT SUM(capacity) as capacity
|
||||
FROM (
|
||||
SELECT capacity, GROUP_CONCAT(socket1.type, socket2.type) as networks
|
||||
FROM channels
|
||||
JOIN nodes_sockets socket1 ON node1_public_key = socket1.public_key
|
||||
JOIN nodes_sockets socket2 ON node2_public_key = socket2.public_key
|
||||
AND channels.status = 1
|
||||
GROUP BY short_id
|
||||
) channels_tmp
|
||||
WHERE channels_tmp.networks NOT LIKE '%ipv%' AND
|
||||
channels_tmp.networks NOT LIKE '%dns%' AND
|
||||
channels_tmp.networks NOT LIKE '%websocket%'
|
||||
`;
|
||||
const [torCapacity]: any = await DB.query(query);
|
||||
|
||||
const clearnetCapacityValue = parseInt(clearnetCapacity[0].capacity, 10);
|
||||
const torCapacityValue = parseInt(torCapacity[0].capacity, 10);
|
||||
const unknownCapacityValue = parseInt(totalCapacity[0].capacity) - clearnetCapacityValue - torCapacityValue;
|
||||
|
||||
return {
|
||||
clearnetCapacity: clearnetCapacityValue,
|
||||
torCapacity: torCapacityValue,
|
||||
unknownCapacity: unknownCapacityValue,
|
||||
ispRanking: ispRanking,
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
logger.err(`Cannot get LN ISP ranking. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -163,25 +493,27 @@ class NodesApi {
|
||||
public async $getNodesPerCountry(countryId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT node_stats.public_key, node_stats.capacity, node_stats.channels, nodes.alias,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
|
||||
nodes.longitude, nodes.latitude, nodes.as_number, geo_names_isp.names as isp
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
LEFT JOIN geo_names geo_names_isp on geo_names_isp.id = nodes.as_number AND geo_names_isp.type = 'as_organization'
|
||||
WHERE geo_names_country.id = ?
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [countryId]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
rows[i].subdivision = JSON.parse(rows[i].subdivision);
|
||||
rows[i].isp = JSON.parse(rows[i].isp);
|
||||
}
|
||||
return rows;
|
||||
} catch (e) {
|
||||
@@ -192,29 +524,63 @@ class NodesApi {
|
||||
|
||||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT node_stats.public_key, node_stats.capacity, node_stats.channels, nodes.alias,
|
||||
UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
let query = `
|
||||
SELECT channels.node1_public_key AS node1PublicKey, isp1.id as isp1ID,
|
||||
channels.node2_public_key AS node2PublicKey, isp2.id as isp2ID
|
||||
FROM channels
|
||||
JOIN nodes node1 ON node1.public_key = channels.node1_public_key
|
||||
JOIN nodes node2 ON node2.public_key = channels.node2_public_key
|
||||
JOIN geo_names isp1 ON isp1.id = node1.as_number
|
||||
JOIN geo_names isp2 ON isp2.id = node2.as_number
|
||||
WHERE channels.status = 1 AND (node1.as_number IN (?) OR node2.as_number IN (?))
|
||||
ORDER BY short_id DESC
|
||||
`;
|
||||
|
||||
const IPSIds = ISPId.split(',');
|
||||
const [rows]: any = await DB.query(query, [IPSIds, IPSIds]);
|
||||
if (!rows || rows.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const nodes = {};
|
||||
|
||||
const intISPIds: number[] = [];
|
||||
for (const ispId of IPSIds) {
|
||||
intISPIds.push(parseInt(ispId, 10));
|
||||
}
|
||||
|
||||
for (const channel of rows) {
|
||||
if (intISPIds.includes(channel.isp1ID)) {
|
||||
nodes[channel.node1PublicKey] = true;
|
||||
}
|
||||
if (intISPIds.includes(channel.isp2ID)) {
|
||||
nodes[channel.node2PublicKey] = true;
|
||||
}
|
||||
}
|
||||
|
||||
query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(nodes.capacity, 0) as INT) as capacity, CAST(COALESCE(nodes.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country,
|
||||
geo_names_iso.names as iso_code, geo_names_subdivision.names as subdivision,
|
||||
nodes.longitude, nodes.latitude
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE nodes.as_number IN (?)
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = nodes.subdivision_id AND geo_names_subdivision.type = 'division'
|
||||
WHERE nodes.public_key IN (?)
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
const [rows2]: any = await DB.query(query, [Object.keys(nodes)]);
|
||||
for (let i = 0; i < rows2.length; ++i) {
|
||||
rows2[i].country = JSON.parse(rows2[i].country);
|
||||
rows2[i].city = JSON.parse(rows2[i].city);
|
||||
rows2[i].subdivision = JSON.parse(rows2[i].subdivision);
|
||||
}
|
||||
return rows;
|
||||
return rows2;
|
||||
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
@@ -227,7 +593,6 @@ class NodesApi {
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY country_id
|
||||
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
|
||||
`;
|
||||
@@ -253,6 +618,85 @@ class NodesApi {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save or update a node present in the graph
|
||||
*/
|
||||
public async $saveNode(node: ILightningApi.Node): Promise<void> {
|
||||
try {
|
||||
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
|
||||
const query = `INSERT INTO nodes(
|
||||
public_key,
|
||||
first_seen,
|
||||
updated_at,
|
||||
alias,
|
||||
alias_search,
|
||||
color,
|
||||
sockets,
|
||||
status
|
||||
)
|
||||
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, ?, 1)
|
||||
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, alias_search = ?, color = ?, sockets = ?, status = 1`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.pub_key,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
this.aliasToSearchText(node.alias),
|
||||
node.color,
|
||||
sockets,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
this.aliasToSearchText(node.alias),
|
||||
node.color,
|
||||
sockets,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update node sockets
|
||||
*/
|
||||
public async $updateNodeSockets(publicKey: string, sockets: {network: string; addr: string}[]): Promise<void> {
|
||||
const formattedSockets = (sockets.map(a => a.addr).join(',')) ?? '';
|
||||
try {
|
||||
await DB.query(`UPDATE nodes SET sockets = ? WHERE public_key = ?`, [formattedSockets, publicKey]);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot update node sockets for ${publicKey}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all nodes not in `nodesPubkeys` as inactive (status = 0)
|
||||
*/
|
||||
public async $setNodesInactive(graphNodesPubkeys: string[]): Promise<void> {
|
||||
if (graphNodesPubkeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE nodes
|
||||
SET status = 0
|
||||
WHERE public_key NOT IN (
|
||||
${graphNodesPubkeys.map(pubkey => `"${pubkey}"`).join(',')}
|
||||
)
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private aliasToSearchText(str: string): string {
|
||||
return str.replace(/[-_.]/g, ' ').replace(/[^a-zA-Z0-9 ]/g, '');
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesApi();
|
||||
|
||||
@@ -2,20 +2,27 @@ import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import nodesApi from './nodes.api';
|
||||
import DB from '../../database';
|
||||
import { INodesRanking } from '../../mempool.interfaces';
|
||||
|
||||
class NodesRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/world', this.$getWorldNodes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/top', this.$getTopNodes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings', this.$getNodesRanking)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/liquidity', this.$getTopNodesByCapacity)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/connectivity', this.$getTopNodesByChannels)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/rankings/age', this.$getOldestNodes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/fees/histogram', this.$getFeeHistogram)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/group/:name', this.$getNodeGroup)
|
||||
;
|
||||
}
|
||||
|
||||
@@ -28,6 +35,39 @@ class NodesRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodeGroup(req: Request, res: Response) {
|
||||
try {
|
||||
let nodesList;
|
||||
let nodes: any[] = [];
|
||||
switch (config.MEMPOOL.NETWORK) {
|
||||
case 'testnet':
|
||||
nodesList = ['032c7c7819276c4f706a04df1a0f1e10a5495994a7be4c1d3d28ca766e5a2b957b', '025a7e38c2834dd843591a4d23d5f09cdeb77ddca85f673c2d944a14220ff14cf7', '0395e2731a1673ef21d7a16a727c4fc4d4c35a861c428ce2c819c53d2b81c8bd55', '032ab2028c0b614c6d87824e2373529652fd7e4221b4c70cc4da7c7005c49afcf0', '029001b22fe70b48bee12d014df91982eb85ff1bd404ec772d5c83c4ee3e88d2c3', '0212e2848d79f928411da5f2ff0a8c95ec6ccb5a09d2031b6f71e91309dcde63af', '03e871a2229523d34f76e6311ff197cfe7f26c2fbec13554b93a46f4e710c47dab', '032202ec98d976b0e928bd1d91924e8bd3eab07231fc39feb3737b010071073df8', '02fa7c5a948d03d563a9f36940c2205a814e594d17c0042ced242c71a857d72605', '039c14fdec2d958e3d14cebf657451bbd9e039196615785e82c917f274e3fb2205', '033589bbcb233ffc416cefd5437c7f37e9d7cb7942d405e39e72c4c846d9b37f18', '029293110441c6e2eacb57e1255bf6ef05c41a6a676fe474922d33c19f98a7d584'];
|
||||
break;
|
||||
case 'signet':
|
||||
nodesList = ['03ddab321b760433cbf561b615ef62ac7d318630c5f51d523aaf5395b90b751956', '033d92c7bfd213ef1b34c90e985fb5dc77f9ec2409d391492484e57a44c4aca1de', '02ad010dda54253c1eb9efe38b0760657a3b43ecad62198c359c051c9d99d45781', '025196512905b8a3f1597428b867bec63ec9a95e5089eb7dc7e63e2d2691669029', '027c625aa1fbe3768db68ebcb05b53b6dc0ce68b7b54b8900d326d167363e684fe', '03f1629af3101fcc56b7aac2667016be84e3defbf3d0c8719f836c9b41c9a57a43', '02dfb81e2f7a3c4c9e8a51b70ef82b4a24549cc2fab1f5b2fd636501774a918991', '02d01ccf832944c68f10d39006093769c5b8bda886d561b128534e313d729fdb34', '02499ed23027d4698a6904ff4ec1b6085a61f10b9a6937f90438f9947e38e8ea86', '038310e3a786340f2bd7770704c7ccfe560fd163d9a1c99d67894597419d12cbf7', '03e5e9d879b72c7d67ecd483bae023bd33e695bb32b981a4021260f7b9d62bc761', '028d16e1a0ace4c0c0a421536d8d32ce484dfe6e2f726b7b0e7c30f12a195f8cc7'];
|
||||
break;
|
||||
default:
|
||||
nodesList = ['03fbc17549ec667bccf397ababbcb4cdc0e3394345e4773079ab2774612ec9be61', '03da9a8623241ccf95f19cd645c6cecd4019ac91570e976eb0a128bebbc4d8a437', '03ca5340cf85cb2e7cf076e489f785410838de174e40be62723e8a60972ad75144', '0238bd27f02d67d6c51e269692bc8c9a32357a00e7777cba7f4f1f18a2a700b108', '03f983dcabed6baa1eab5b56c8b2e8fdc846ab3fd931155377897335e85a9fa57c', '03e399589533581e48796e29a825839a010036a61b20744fda929d6709fcbffcc5', '021f5288b5f72c42cd0d8801086af7ce09a816d8ee9a4c47a4b436399b26cb601a', '032b01b7585f781420cd4148841a82831ba37fa952342052cec16750852d4f2dd9', '02848036488d4b8fb1f1c4064261ec36151f43b085f0b51bd239ade3ddfc940c34', '02b6b1640fe029e304c216951af9fbefdb23b0bdc9baaf327540d31b6107841fdf', '03694289827203a5b3156d753071ddd5bf92e371f5a462943f9555eef6d2d6606c', '0283d850db7c3e8ea7cc9c4abc7afaab12bbdf72b677dcba1d608350d2537d7d43'];
|
||||
}
|
||||
|
||||
for (let pubKey of nodesList) {
|
||||
try {
|
||||
const node = await nodesApi.$getNode(pubKey);
|
||||
if (node) {
|
||||
nodes.push(node);
|
||||
}
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(nodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNode(req: Request, res: Response) {
|
||||
try {
|
||||
const node = await nodesApi.$getNode(req.params.public_key);
|
||||
@@ -35,6 +75,9 @@ class NodesRoutes {
|
||||
res.status(404).send('Node not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(node);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
@@ -44,17 +87,39 @@ class NodesRoutes {
|
||||
private async $getHistoricalNodeStats(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await nodesApi.$getNodeStats(req.params.public_key);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getTopNodes(req: Request, res: Response) {
|
||||
private async $getFeeHistogram(req: Request, res: Response) {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopCapacityNodes();
|
||||
const topChannelsNodes = await nodesApi.$getTopChannelsNodes();
|
||||
res.json({
|
||||
const node = await nodesApi.$getFeeHistogram(req.params.public_key);
|
||||
if (!node) {
|
||||
res.status(404).send('Node not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(node);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesRanking(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(false);
|
||||
const topChannelsNodes = await nodesApi.$getTopChannelsNodes(false);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(<INodesRanking>{
|
||||
topByCapacity: topCapacityNodes,
|
||||
topByChannels: topChannelsNodes,
|
||||
});
|
||||
@@ -63,18 +128,45 @@ class NodesRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async $getTopNodesByCapacity(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopCapacityNodes(true);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(topCapacityNodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getTopNodesByChannels(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopChannelsNodes(true);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(topCapacityNodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getOldestNodes(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getOldestNodes(true);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(topCapacityNodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getISPRanking(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const groupBy = req.query.groupBy as string;
|
||||
const showTor = req.query.showTor as string === 'true' ? true : false;
|
||||
|
||||
if (!['capacity', 'node-count'].includes(groupBy)) {
|
||||
res.status(400).send(`groupBy must be one of 'capacity' or 'node-count'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodesPerAs = await nodesApi.$getNodesISP(groupBy, showTor);
|
||||
|
||||
const nodesPerAs = await nodesApi.$getNodesISPRanking();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
@@ -84,6 +176,18 @@ class NodesRoutes {
|
||||
}
|
||||
}
|
||||
|
||||
private async $getWorldNodes(req: Request, res: Response) {
|
||||
try {
|
||||
const worldNodes = await nodesApi.$getWorldNodes();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.json(worldNodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesPerCountry(req: Request, res: Response) {
|
||||
try {
|
||||
const [country]: any[] = await DB.query(
|
||||
|
||||
@@ -6,7 +6,8 @@ class StatisticsApi {
|
||||
public async $getStatistics(interval: string | null = null): Promise<any> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity, tor_nodes, clearnet_nodes, unannounced_nodes
|
||||
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity,
|
||||
tor_nodes, clearnet_nodes, unannounced_nodes, clearnet_tor_nodes
|
||||
FROM lightning_stats`;
|
||||
|
||||
if (interval) {
|
||||
@@ -27,7 +28,7 @@ class StatisticsApi {
|
||||
public async $getLatestStatistics(): Promise<any> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
|
||||
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1 OFFSET 7`);
|
||||
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats WHERE DATE(added) = DATE(NOW() - INTERVAL 7 DAY)`);
|
||||
return {
|
||||
latest: rows[0],
|
||||
previous: rows2[0],
|
||||
|
||||
37
backend/src/api/fetch-version.ts
Normal file
37
backend/src/api/fetch-version.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import fs from 'fs';
|
||||
import path from "path";
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
function getVersion(): string {
|
||||
const packageJson = fs.readFileSync('package.json').toString();
|
||||
return JSON.parse(packageJson).version;
|
||||
}
|
||||
|
||||
function getGitCommit(): string {
|
||||
if (process.env.MEMPOOL_COMMIT_HASH) {
|
||||
return process.env.MEMPOOL_COMMIT_HASH;
|
||||
} else {
|
||||
const gitRevParse = spawnSync('git', ['rev-parse', '--short', 'HEAD']);
|
||||
if (!gitRevParse.error) {
|
||||
const output = gitRevParse.stdout.toString('utf-8').replace(/[\n\r\s]+$/, '');
|
||||
if (output) {
|
||||
return output;
|
||||
} else {
|
||||
console.log('Could not fetch git commit: No repo available');
|
||||
}
|
||||
} else if (gitRevParse.error.code === 'ENOENT') {
|
||||
console.log('Could not fetch git commit: Command `git` is unavailable');
|
||||
}
|
||||
}
|
||||
return '?';
|
||||
}
|
||||
|
||||
const versionInfo = {
|
||||
version: getVersion(),
|
||||
gitCommit: getGitCommit()
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
path.join(__dirname, 'version.json'),
|
||||
JSON.stringify(versionInfo, null, 2) + "\n"
|
||||
);
|
||||
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
// Imported from https://github.com/shesek/lightning-client-js
|
||||
|
||||
'use strict';
|
||||
|
||||
const methods = [
|
||||
'addgossip',
|
||||
'autocleaninvoice',
|
||||
'check',
|
||||
'checkmessage',
|
||||
'close',
|
||||
'connect',
|
||||
'createinvoice',
|
||||
'createinvoicerequest',
|
||||
'createoffer',
|
||||
'createonion',
|
||||
'decode',
|
||||
'decodepay',
|
||||
'delexpiredinvoice',
|
||||
'delinvoice',
|
||||
'delpay',
|
||||
'dev-listaddrs',
|
||||
'dev-rescan-outputs',
|
||||
'disableoffer',
|
||||
'disconnect',
|
||||
'estimatefees',
|
||||
'feerates',
|
||||
'fetchinvoice',
|
||||
'fundchannel',
|
||||
'fundchannel_cancel',
|
||||
'fundchannel_complete',
|
||||
'fundchannel_start',
|
||||
'fundpsbt',
|
||||
'getchaininfo',
|
||||
'getinfo',
|
||||
'getlog',
|
||||
'getrawblockbyheight',
|
||||
'getroute',
|
||||
'getsharedsecret',
|
||||
'getutxout',
|
||||
'help',
|
||||
'invoice',
|
||||
'keysend',
|
||||
'legacypay',
|
||||
'listchannels',
|
||||
'listconfigs',
|
||||
'listforwards',
|
||||
'listfunds',
|
||||
'listinvoices',
|
||||
'listnodes',
|
||||
'listoffers',
|
||||
'listpays',
|
||||
'listpeers',
|
||||
'listsendpays',
|
||||
'listtransactions',
|
||||
'multifundchannel',
|
||||
'multiwithdraw',
|
||||
'newaddr',
|
||||
'notifications',
|
||||
'offer',
|
||||
'offerout',
|
||||
'openchannel_abort',
|
||||
'openchannel_bump',
|
||||
'openchannel_init',
|
||||
'openchannel_signed',
|
||||
'openchannel_update',
|
||||
'pay',
|
||||
'payersign',
|
||||
'paystatus',
|
||||
'ping',
|
||||
'plugin',
|
||||
'reserveinputs',
|
||||
'sendinvoice',
|
||||
'sendonion',
|
||||
'sendonionmessage',
|
||||
'sendpay',
|
||||
'sendpsbt',
|
||||
'sendrawtransaction',
|
||||
'setchannelfee',
|
||||
'signmessage',
|
||||
'signpsbt',
|
||||
'stop',
|
||||
'txdiscard',
|
||||
'txprepare',
|
||||
'txsend',
|
||||
'unreserveinputs',
|
||||
'utxopsbt',
|
||||
'waitanyinvoice',
|
||||
'waitblockheight',
|
||||
'waitinvoice',
|
||||
'waitsendpay',
|
||||
'withdraw'
|
||||
];
|
||||
|
||||
|
||||
import EventEmitter from 'events';
|
||||
import { existsSync, statSync } from 'fs';
|
||||
import { createConnection, Socket } from 'net';
|
||||
import { homedir } from 'os';
|
||||
import path from 'path';
|
||||
import { createInterface, Interface } from 'readline';
|
||||
import logger from '../../../logger';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import { convertAndmergeBidirectionalChannels, convertNode } from './clightning-convert';
|
||||
|
||||
class LightningError extends Error {
|
||||
type: string = 'lightning';
|
||||
message: string = 'lightning-client error';
|
||||
|
||||
constructor(error) {
|
||||
super();
|
||||
this.type = error.type;
|
||||
this.message = error.message;
|
||||
}
|
||||
}
|
||||
|
||||
const defaultRpcPath = path.join(homedir(), '.lightning')
|
||||
, fStat = (...p) => statSync(path.join(...p))
|
||||
, fExists = (...p) => existsSync(path.join(...p))
|
||||
|
||||
export default class CLightningClient extends EventEmitter implements AbstractLightningApi {
|
||||
private rpcPath: string;
|
||||
private reconnectWait: number;
|
||||
private reconnectTimeout;
|
||||
private reqcount: number;
|
||||
private client: Socket;
|
||||
private rl: Interface;
|
||||
private clientConnectionPromise: Promise<unknown>;
|
||||
|
||||
constructor(rpcPath = defaultRpcPath) {
|
||||
if (!path.isAbsolute(rpcPath)) {
|
||||
throw new Error('The rpcPath must be an absolute path');
|
||||
}
|
||||
|
||||
if (!fExists(rpcPath) || !fStat(rpcPath).isSocket()) {
|
||||
// network directory provided, use the lightning-rpc within in
|
||||
if (fExists(rpcPath, 'lightning-rpc')) {
|
||||
rpcPath = path.join(rpcPath, 'lightning-rpc');
|
||||
}
|
||||
|
||||
// main data directory provided, default to using the bitcoin mainnet subdirectory
|
||||
// to be removed in v0.2.0
|
||||
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
|
||||
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
|
||||
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
|
||||
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
|
||||
|
||||
super();
|
||||
this.rpcPath = rpcPath;
|
||||
this.reconnectWait = 0.5;
|
||||
this.reconnectTimeout = null;
|
||||
this.reqcount = 0;
|
||||
|
||||
const _self = this;
|
||||
|
||||
this.client = createConnection(rpcPath).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
this.rl = createInterface({ input: this.client }).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
|
||||
this.clientConnectionPromise = new Promise<void>(resolve => {
|
||||
_self.client.on('connect', () => {
|
||||
logger.info(`[CLightningClient] Lightning client connected`);
|
||||
_self.reconnectWait = 1;
|
||||
resolve();
|
||||
});
|
||||
|
||||
_self.client.on('end', () => {
|
||||
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
|
||||
_self.client.on('error', error => {
|
||||
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
});
|
||||
|
||||
this.rl.on('line', line => {
|
||||
line = line.trim();
|
||||
if (!line) {
|
||||
return;
|
||||
}
|
||||
const data = JSON.parse(line);
|
||||
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
|
||||
_self.emit('res:' + data.id, data);
|
||||
});
|
||||
}
|
||||
|
||||
increaseWaitTime(): void {
|
||||
if (this.reconnectWait >= 16) {
|
||||
this.reconnectWait = 16;
|
||||
} else {
|
||||
this.reconnectWait *= 2;
|
||||
}
|
||||
}
|
||||
|
||||
reconnect(): void {
|
||||
const _self = this;
|
||||
|
||||
if (this.reconnectTimeout) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reconnectTimeout = setTimeout(() => {
|
||||
logger.debug('[CLightningClient] Trying to reconnect...');
|
||||
|
||||
_self.client.connect(_self.rpcPath);
|
||||
_self.reconnectTimeout = null;
|
||||
}, this.reconnectWait * 1000);
|
||||
}
|
||||
|
||||
call(method, args = []): Promise<any> {
|
||||
const _self = this;
|
||||
|
||||
const callInt = ++this.reqcount;
|
||||
const sendObj = {
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params: args,
|
||||
id: '' + callInt
|
||||
};
|
||||
|
||||
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
|
||||
|
||||
// Wait for the client to connect
|
||||
return this.clientConnectionPromise
|
||||
.then(() => new Promise((resolve, reject) => {
|
||||
// Wait for a response
|
||||
this.once('res:' + callInt, res => res.error == null
|
||||
? resolve(res.result)
|
||||
: reject(new LightningError(res.error))
|
||||
);
|
||||
|
||||
// Send the command
|
||||
_self.client.write(JSON.stringify(sendObj));
|
||||
}));
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
const listnodes: any[] = await this.call('listnodes');
|
||||
const listchannels: any[] = await this.call('listchannels');
|
||||
const channelsList = await convertAndmergeBidirectionalChannels(listchannels['channels']);
|
||||
|
||||
return {
|
||||
nodes: listnodes['nodes'].map(node => convertNode(node)),
|
||||
edges: channelsList,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const protify = s => s.replace(/-([a-z])/g, m => m[1].toUpperCase());
|
||||
|
||||
methods.forEach(k => {
|
||||
CLightningClient.prototype[protify(k)] = function (...args: any) {
|
||||
return this.call(k, args);
|
||||
};
|
||||
});
|
||||
147
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
147
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
@@ -0,0 +1,147 @@
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import logger from '../../../logger';
|
||||
import { Common } from '../../common';
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
*/
|
||||
export function convertNode(clNode: any): ILightningApi.Node {
|
||||
let custom_records: { [type: number]: string } | undefined = undefined;
|
||||
if (clNode.option_will_fund) {
|
||||
try {
|
||||
custom_records = { '1': Buffer.from(clNode.option_will_fund.compact_lease || '', 'hex').toString('base64') };
|
||||
} catch (e) {
|
||||
logger.err(`Cannot decode option_will_fund compact_lease for ${clNode.nodeid}). Reason: ` + (e instanceof Error ? e.message : e));
|
||||
custom_records = undefined;
|
||||
}
|
||||
}
|
||||
return {
|
||||
alias: clNode.alias ?? '',
|
||||
color: `#${clNode.color ?? ''}`,
|
||||
features: [], // TODO parse and return clNode.feature
|
||||
pub_key: clNode.nodeid,
|
||||
addresses: clNode.addresses?.map((addr) => {
|
||||
let address = addr.address;
|
||||
if (addr.type === 'ipv6') {
|
||||
address = `[${address}]`;
|
||||
}
|
||||
return {
|
||||
network: addr.type,
|
||||
addr: `${address}:${addr.port}`
|
||||
};
|
||||
}) ?? [],
|
||||
last_update: clNode?.last_timestamp ?? 0,
|
||||
custom_records
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
||||
*/
|
||||
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
||||
logger.info('Converting clightning nodes and channels to lnd graph format');
|
||||
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
|
||||
const consolidatedChannelList: ILightningApi.Channel[] = [];
|
||||
const clChannelsDict = {};
|
||||
const clChannelsDictCount = {};
|
||||
|
||||
for (const clChannel of clChannels) {
|
||||
if (!clChannelsDict[clChannel.short_channel_id]) {
|
||||
clChannelsDict[clChannel.short_channel_id] = clChannel;
|
||||
clChannelsDictCount[clChannel.short_channel_id] = 1;
|
||||
} else {
|
||||
consolidatedChannelList.push(
|
||||
await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id])
|
||||
);
|
||||
delete clChannelsDict[clChannel.short_channel_id];
|
||||
clChannelsDictCount[clChannel.short_channel_id]++;
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
++channelProcessed;
|
||||
}
|
||||
|
||||
channelProcessed = 0;
|
||||
const keys = Object.keys(clChannelsDict);
|
||||
for (const short_channel_id of keys) {
|
||||
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
channelProcessed++;
|
||||
}
|
||||
|
||||
return consolidatedChannelList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy for both nodes
|
||||
*/
|
||||
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel> {
|
||||
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
|
||||
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
|
||||
const parts = clChannelA.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
|
||||
capacity: clChannelA.satoshis,
|
||||
last_update: lastUpdate,
|
||||
node1_policy: convertPolicy(clChannelA),
|
||||
node2_policy: convertPolicy(clChannelB),
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannelA.source,
|
||||
node2_pub: clChannelB.source,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy of only one node
|
||||
*/
|
||||
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
|
||||
const parts = clChannel.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
|
||||
capacity: clChannel.satoshis,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
node1_policy: convertPolicy(clChannel),
|
||||
node2_policy: null,
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannel.source,
|
||||
node2_pub: clChannel.destination,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" response to a lnd channel policy format
|
||||
*/
|
||||
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
||||
return {
|
||||
time_lock_delta: clChannel.delay,
|
||||
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
||||
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
|
||||
fee_base_msat: clChannel.base_fee_millisatoshi,
|
||||
fee_rate_milli_msat: clChannel.fee_per_millionth,
|
||||
disabled: !clChannel.active,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
};
|
||||
}
|
||||
@@ -1,7 +1,5 @@
|
||||
import { ILightningApi } from './lightning-api.interface';
|
||||
|
||||
export interface AbstractLightningApi {
|
||||
$getNetworkInfo(): Promise<ILightningApi.NetworkInfo>;
|
||||
$getNetworkGraph(): Promise<ILightningApi.NetworkGraph>;
|
||||
$getInfo(): Promise<ILightningApi.Info>;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import config from '../../config';
|
||||
import CLightningClient from './clightning/clightning-client';
|
||||
import { AbstractLightningApi } from './lightning-api-abstract-factory';
|
||||
import LndApi from './lnd/lnd-api';
|
||||
|
||||
function lightningApiFactory(): AbstractLightningApi {
|
||||
switch (config.LIGHTNING.BACKEND) {
|
||||
switch (config.LIGHTNING.ENABLED === true && config.LIGHTNING.BACKEND) {
|
||||
case 'cln':
|
||||
return new CLightningClient(config.CLIGHTNING.SOCKET);
|
||||
case 'lnd':
|
||||
default:
|
||||
return new LndApi();
|
||||
|
||||
@@ -1,71 +1,92 @@
|
||||
export namespace ILightningApi {
|
||||
export interface NetworkInfo {
|
||||
average_channel_size: number;
|
||||
channel_count: number;
|
||||
max_channel_size: number;
|
||||
median_channel_size: number;
|
||||
min_channel_size: number;
|
||||
node_count: number;
|
||||
not_recently_updated_policy_count: number;
|
||||
total_capacity: number;
|
||||
graph_diameter: number;
|
||||
avg_out_degree: number;
|
||||
max_out_degree: number;
|
||||
num_nodes: number;
|
||||
num_channels: number;
|
||||
total_network_capacity: string;
|
||||
avg_channel_size: number;
|
||||
min_channel_size: string;
|
||||
max_channel_size: string;
|
||||
median_channel_size_sat: string;
|
||||
num_zombie_chans: string;
|
||||
}
|
||||
|
||||
export interface NetworkGraph {
|
||||
channels: Channel[];
|
||||
nodes: Node[];
|
||||
edges: Channel[];
|
||||
}
|
||||
|
||||
export interface Channel {
|
||||
id: string;
|
||||
capacity: number;
|
||||
policies: Policy[];
|
||||
transaction_id: string;
|
||||
transaction_vout: number;
|
||||
updated_at?: string;
|
||||
channel_id: string;
|
||||
chan_point: string;
|
||||
last_update: number;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
capacity: string;
|
||||
node1_policy: RoutingPolicy | null;
|
||||
node2_policy: RoutingPolicy | null;
|
||||
}
|
||||
|
||||
interface Policy {
|
||||
public_key: string;
|
||||
base_fee_mtokens?: string;
|
||||
cltv_delta?: number;
|
||||
fee_rate?: number;
|
||||
is_disabled?: boolean;
|
||||
max_htlc_mtokens?: string;
|
||||
min_htlc_mtokens?: string;
|
||||
updated_at?: string;
|
||||
export interface RoutingPolicy {
|
||||
time_lock_delta: number;
|
||||
min_htlc: string;
|
||||
fee_base_msat: string;
|
||||
fee_rate_milli_msat: string;
|
||||
disabled: boolean;
|
||||
max_htlc_msat: string;
|
||||
last_update: number;
|
||||
}
|
||||
|
||||
export interface Node {
|
||||
last_update: number;
|
||||
pub_key: string;
|
||||
alias: string;
|
||||
addresses: {
|
||||
network: string;
|
||||
addr: string;
|
||||
}[];
|
||||
color: string;
|
||||
features: Feature[];
|
||||
public_key: string;
|
||||
sockets: string[];
|
||||
updated_at?: string;
|
||||
features: { [key: number]: Feature };
|
||||
custom_records?: { [type: number]: string };
|
||||
}
|
||||
|
||||
export interface Info {
|
||||
chains: string[];
|
||||
color: string;
|
||||
active_channels_count: number;
|
||||
identity_pubkey: string;
|
||||
alias: string;
|
||||
current_block_hash: string;
|
||||
current_block_height: number;
|
||||
features: Feature[];
|
||||
is_synced_to_chain: boolean;
|
||||
is_synced_to_graph: boolean;
|
||||
latest_block_at: string;
|
||||
peers_count: number;
|
||||
pending_channels_count: number;
|
||||
public_key: string;
|
||||
uris: any[];
|
||||
num_pending_channels: number;
|
||||
num_active_channels: number;
|
||||
num_peers: number;
|
||||
block_height: number;
|
||||
block_hash: string;
|
||||
synced_to_chain: boolean;
|
||||
testnet: boolean;
|
||||
uris: string[];
|
||||
best_header_timestamp: string;
|
||||
version: string;
|
||||
num_inactive_channels: number;
|
||||
chains: {
|
||||
chain: string;
|
||||
network: string;
|
||||
}[];
|
||||
color: string;
|
||||
synced_to_graph: boolean;
|
||||
features: { [key: number]: Feature };
|
||||
commit_hash: string;
|
||||
/** Available on LND since v0.15.0-beta */
|
||||
require_htlc_interceptor?: boolean;
|
||||
}
|
||||
|
||||
|
||||
export interface Feature {
|
||||
bit: number;
|
||||
is_known: boolean;
|
||||
name: string;
|
||||
is_required: boolean;
|
||||
type?: string;
|
||||
is_known: boolean;
|
||||
}
|
||||
}
|
||||
|
||||
export interface ForensicOutput {
|
||||
node?: 1 | 2;
|
||||
type: number;
|
||||
value: number;
|
||||
}
|
||||
}
|
||||
@@ -1,44 +1,40 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios';
|
||||
import { Agent } from 'https';
|
||||
import * as fs from 'fs';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import * as fs from 'fs';
|
||||
import { authenticatedLndGrpc, getWalletInfo, getNetworkGraph, getNetworkInfo } from 'lightning';
|
||||
import config from '../../../config';
|
||||
import logger from '../../../logger';
|
||||
|
||||
class LndApi implements AbstractLightningApi {
|
||||
private lnd: any;
|
||||
axiosConfig: AxiosRequestConfig = {};
|
||||
|
||||
constructor() {
|
||||
if (!config.LIGHTNING.ENABLED) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const tls = fs.readFileSync(config.LND.TLS_CERT_PATH).toString('base64');
|
||||
const macaroon = fs.readFileSync(config.LND.MACAROON_PATH).toString('base64');
|
||||
|
||||
const { lnd } = authenticatedLndGrpc({
|
||||
cert: tls,
|
||||
macaroon: macaroon,
|
||||
socket: config.LND.SOCKET,
|
||||
});
|
||||
|
||||
this.lnd = lnd;
|
||||
} catch (e) {
|
||||
logger.err('Could not initiate the LND service handler: ' + (e instanceof Error ? e.message : e));
|
||||
process.exit(1);
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.axiosConfig = {
|
||||
headers: {
|
||||
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
|
||||
},
|
||||
httpsAgent: new Agent({
|
||||
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
|
||||
}),
|
||||
timeout: 10000
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async $getNetworkInfo(): Promise<ILightningApi.NetworkInfo> {
|
||||
return await getNetworkInfo({ lnd: this.lnd });
|
||||
return axios.get<ILightningApi.NetworkInfo>(config.LND.REST_API_URL + '/v1/graph/info', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getInfo(): Promise<ILightningApi.Info> {
|
||||
// @ts-ignore
|
||||
return await getWalletInfo({ lnd: this.lnd });
|
||||
return axios.get<ILightningApi.Info>(config.LND.REST_API_URL + '/v1/getinfo', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
return await getNetworkGraph({ lnd: this.lnd });
|
||||
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,10 +2,16 @@ import logger from '../logger';
|
||||
import { MempoolBlock, TransactionExtended, TransactionStripped, MempoolBlockWithTransactions, MempoolBlockDelta } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
import config from '../config';
|
||||
import { StaticPool } from 'node-worker-threads-pool';
|
||||
import path from 'path';
|
||||
|
||||
class MempoolBlocks {
|
||||
private mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
private mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
private makeTemplatesPool = new StaticPool({
|
||||
size: 1,
|
||||
task: path.resolve(__dirname, './tx-selection-worker.js'),
|
||||
});
|
||||
|
||||
constructor() {}
|
||||
|
||||
@@ -71,15 +77,15 @@ class MempoolBlocks {
|
||||
const time = end - start;
|
||||
logger.debug('Mempool blocks calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
const { blocks, deltas } = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
|
||||
const blocks = this.calculateMempoolBlocks(memPoolArray, this.mempoolBlocks);
|
||||
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
|
||||
|
||||
this.mempoolBlocks = blocks;
|
||||
this.mempoolBlockDeltas = deltas;
|
||||
}
|
||||
|
||||
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]):
|
||||
{ blocks: MempoolBlockWithTransactions[], deltas: MempoolBlockDelta[] } {
|
||||
private calculateMempoolBlocks(transactionsSorted: TransactionExtended[], prevBlocks: MempoolBlockWithTransactions[]): MempoolBlockWithTransactions[] {
|
||||
const mempoolBlocks: MempoolBlockWithTransactions[] = [];
|
||||
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
let blockWeight = 0;
|
||||
let blockSize = 0;
|
||||
let transactions: TransactionExtended[] = [];
|
||||
@@ -99,7 +105,12 @@ class MempoolBlocks {
|
||||
if (transactions.length) {
|
||||
mempoolBlocks.push(this.dataToMempoolBlocks(transactions, blockSize, blockWeight, mempoolBlocks.length));
|
||||
}
|
||||
// Calculate change from previous block states
|
||||
|
||||
return mempoolBlocks;
|
||||
}
|
||||
|
||||
private calculateMempoolDeltas(prevBlocks: MempoolBlockWithTransactions[], mempoolBlocks: MempoolBlockWithTransactions[]): MempoolBlockDelta[] {
|
||||
const mempoolBlockDeltas: MempoolBlockDelta[] = [];
|
||||
for (let i = 0; i < Math.max(mempoolBlocks.length, prevBlocks.length); i++) {
|
||||
let added: TransactionStripped[] = [];
|
||||
let removed: string[] = [];
|
||||
@@ -132,10 +143,26 @@ class MempoolBlocks {
|
||||
removed
|
||||
});
|
||||
}
|
||||
return {
|
||||
blocks: mempoolBlocks,
|
||||
deltas: mempoolBlockDeltas
|
||||
};
|
||||
return mempoolBlockDeltas;
|
||||
}
|
||||
|
||||
public async makeBlockTemplates(newMempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null = null, condenseRest = false): Promise<void> {
|
||||
const { mempool, blocks } = await this.makeTemplatesPool.exec({ mempool: newMempool, blockLimit, weightLimit, condenseRest });
|
||||
const deltas = this.calculateMempoolDeltas(this.mempoolBlocks, blocks);
|
||||
|
||||
// copy CPFP info across to main thread's mempool
|
||||
Object.keys(newMempool).forEach((txid) => {
|
||||
if (newMempool[txid] && mempool[txid]) {
|
||||
newMempool[txid].effectiveFeePerVsize = mempool[txid].effectiveFeePerVsize;
|
||||
newMempool[txid].ancestors = mempool[txid].ancestors;
|
||||
newMempool[txid].descendants = mempool[txid].descendants;
|
||||
newMempool[txid].bestDescendant = mempool[txid].bestDescendant;
|
||||
newMempool[txid].cpfpChecked = mempool[txid].cpfpChecked;
|
||||
}
|
||||
});
|
||||
|
||||
this.mempoolBlocks = blocks;
|
||||
this.mempoolBlockDeltas = deltas;
|
||||
}
|
||||
|
||||
private dataToMempoolBlocks(transactions: TransactionExtended[],
|
||||
|
||||
@@ -20,6 +20,8 @@ class Mempool {
|
||||
maxmempool: 300000000, mempoolminfee: 0.00001000, minrelaytxfee: 0.00001000 };
|
||||
private mempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
|
||||
deletedTransactions: TransactionExtended[]) => void) | undefined;
|
||||
private asyncMempoolChangedCallback: ((newMempool: {[txId: string]: TransactionExtended; }, newTransactions: TransactionExtended[],
|
||||
deletedTransactions: TransactionExtended[]) => void) | undefined;
|
||||
|
||||
private txPerSecondArray: number[] = [];
|
||||
private txPerSecond: number = 0;
|
||||
@@ -63,6 +65,11 @@ class Mempool {
|
||||
this.mempoolChangedCallback = fn;
|
||||
}
|
||||
|
||||
public setAsyncMempoolChangedCallback(fn: (newMempool: { [txId: string]: TransactionExtended; },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) => Promise<void>) {
|
||||
this.asyncMempoolChangedCallback = fn;
|
||||
}
|
||||
|
||||
public getMempool(): { [txid: string]: TransactionExtended } {
|
||||
return this.mempoolCache;
|
||||
}
|
||||
@@ -72,6 +79,9 @@ class Mempool {
|
||||
if (this.mempoolChangedCallback) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, [], []);
|
||||
}
|
||||
if (this.asyncMempoolChangedCallback) {
|
||||
this.asyncMempoolChangedCallback(this.mempoolCache, [], []);
|
||||
}
|
||||
}
|
||||
|
||||
public async $updateMemPoolInfo() {
|
||||
@@ -103,12 +113,11 @@ class Mempool {
|
||||
return txTimes;
|
||||
}
|
||||
|
||||
public async $updateMempool() {
|
||||
logger.debug('Updating mempool');
|
||||
public async $updateMempool(): Promise<void> {
|
||||
logger.debug(`Updating mempool...`);
|
||||
const start = new Date().getTime();
|
||||
let hasChange: boolean = false;
|
||||
const currentMempoolSize = Object.keys(this.mempoolCache).length;
|
||||
let txCount = 0;
|
||||
const transactions = await bitcoinApi.$getRawMempool();
|
||||
const diff = transactions.length - currentMempoolSize;
|
||||
const newTransactions: TransactionExtended[] = [];
|
||||
@@ -124,7 +133,6 @@ class Mempool {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(txid);
|
||||
this.mempoolCache[txid] = transaction;
|
||||
txCount++;
|
||||
if (this.inSync) {
|
||||
this.txPerSecondArray.push(new Date().getTime());
|
||||
this.vBytesPerSecondArray.push({
|
||||
@@ -133,14 +141,9 @@ class Mempool {
|
||||
});
|
||||
}
|
||||
hasChange = true;
|
||||
if (diff > 0) {
|
||||
logger.debug('Fetched transaction ' + txCount + ' / ' + diff);
|
||||
} else {
|
||||
logger.debug('Fetched transaction ' + txCount);
|
||||
}
|
||||
newTransactions.push(transaction);
|
||||
} catch (e) {
|
||||
logger.debug('Error finding transaction in mempool: ' + (e instanceof Error ? e.message : e));
|
||||
logger.debug(`Error finding transaction '${txid}' in the mempool: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -194,11 +197,13 @@ class Mempool {
|
||||
if (this.mempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
this.mempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
}
|
||||
if (this.asyncMempoolChangedCallback && (hasChange || deletedTransactions.length)) {
|
||||
await this.asyncMempoolChangedCallback(this.mempoolCache, newTransactions, deletedTransactions);
|
||||
}
|
||||
|
||||
const end = new Date().getTime();
|
||||
const time = end - start;
|
||||
logger.debug(`New mempool size: ${Object.keys(this.mempoolCache).length} Change: ${diff}`);
|
||||
logger.debug('Mempool updated in ' + time / 1000 + ' seconds');
|
||||
logger.debug(`Mempool updated in ${time / 1000} seconds. New size: ${Object.keys(this.mempoolCache).length} (${diff > 0 ? '+' + diff : diff})`);
|
||||
}
|
||||
|
||||
public handleRbfTransactions(rbfTransactions: { [txid: string]: TransactionExtended; }) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from "../../config";
|
||||
import logger from '../../logger';
|
||||
import audits from '../audit';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
@@ -26,7 +27,11 @@ class MiningRoutes {
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores', this.$getBlockAuditScores)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/scores/:height', this.$getBlockAuditScores)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/score/:hash', this.$getBlockAuditScore)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/timestamp/:timestamp', this.$getHeightFromTimestamp)
|
||||
;
|
||||
}
|
||||
|
||||
@@ -238,6 +243,12 @@ class MiningRoutes {
|
||||
public async $getBlockAudit(req: Request, res: Response) {
|
||||
try {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
|
||||
|
||||
if (!audit) {
|
||||
res.status(404).send(`This block has not been audited.`);
|
||||
return;
|
||||
}
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
|
||||
@@ -246,6 +257,55 @@ class MiningRoutes {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHeightFromTimestamp(req: Request, res: Response) {
|
||||
try {
|
||||
const timestamp = parseInt(req.params.timestamp, 10);
|
||||
// This will prevent people from entering milliseconds etc.
|
||||
// Block timestamps are allowed to be up to 2 hours off, so 24 hours
|
||||
// will never put the maximum value before the most recent block
|
||||
const nowPlus1day = Math.floor(Date.now() / 1000) + 60 * 60 * 24;
|
||||
// Prevent non-integers that are not seconds
|
||||
if (!/^[1-9][0-9]*$/.test(req.params.timestamp) || timestamp > nowPlus1day) {
|
||||
throw new Error(`Invalid timestamp, value must be Unix seconds`);
|
||||
}
|
||||
const result = await BlocksRepository.$getBlockHeightFromTimestamp(
|
||||
timestamp,
|
||||
);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getBlockAuditScores(req: Request, res: Response) {
|
||||
try {
|
||||
let height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
|
||||
if (height == null) {
|
||||
height = await BlocksRepository.$mostRecentBlockHeight();
|
||||
}
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(await BlocksAuditsRepository.$getBlockAuditScores(height, height - 15));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAuditScore(req: Request, res: Response) {
|
||||
try {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAuditScore(req.params.hash);
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
|
||||
res.json(audit || 'null');
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new MiningRoutes();
|
||||
|
||||
@@ -473,7 +473,7 @@ class Mining {
|
||||
|
||||
for (const block of blocksWithoutPrices) {
|
||||
// Quick optimisation, out mtgox feed only goes back to 2010-07-19 02:00:00, so skip the first 68951 blocks
|
||||
if (block.height < 68951) {
|
||||
if (['mainnet', 'testnet'].includes(config.MEMPOOL.NETWORK) && block.height < 68951) {
|
||||
blocksPrices.push({
|
||||
height: block.height,
|
||||
priceId: prices[0].id,
|
||||
@@ -492,11 +492,11 @@ class Mining {
|
||||
|
||||
if (blocksPrices.length >= 100000) {
|
||||
totalInserted += blocksPrices.length;
|
||||
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
|
||||
} else {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
blocksPrices.length = 0;
|
||||
}
|
||||
@@ -504,11 +504,11 @@ class Mining {
|
||||
|
||||
if (blocksPrices.length > 0) {
|
||||
totalInserted += blocksPrices.length;
|
||||
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`);
|
||||
} else {
|
||||
logger.debug(`Linking ${blocksPrices.length} newly indexed blocks to their closest price`);
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
}
|
||||
} catch (e) {
|
||||
|
||||
@@ -14,10 +14,10 @@ interface Pool {
|
||||
class PoolsParser {
|
||||
miningPools: any[] = [];
|
||||
unknownPool: any = {
|
||||
'name': "Unknown",
|
||||
'link': "https://learnmeabitcoin.com/technical/coinbase-transaction",
|
||||
'regexes': "[]",
|
||||
'addresses': "[]",
|
||||
'name': 'Unknown',
|
||||
'link': 'https://learnmeabitcoin.com/technical/coinbase-transaction',
|
||||
'regexes': '[]',
|
||||
'addresses': '[]',
|
||||
'slug': 'unknown'
|
||||
};
|
||||
slugWarnFlag = false;
|
||||
@@ -25,7 +25,7 @@ class PoolsParser {
|
||||
/**
|
||||
* Parse the pools.json file, consolidate the data and dump it into the database
|
||||
*/
|
||||
public async migratePoolsJson(poolsJson: object) {
|
||||
public async migratePoolsJson(poolsJson: object): Promise<void> {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return;
|
||||
}
|
||||
@@ -81,6 +81,7 @@ class PoolsParser {
|
||||
// Finally, we generate the final consolidated pools data
|
||||
const finalPoolDataAdd: Pool[] = [];
|
||||
const finalPoolDataUpdate: Pool[] = [];
|
||||
const finalPoolDataRename: Pool[] = [];
|
||||
for (let i = 0; i < poolNames.length; ++i) {
|
||||
let allAddresses: string[] = [];
|
||||
let allRegexes: string[] = [];
|
||||
@@ -127,8 +128,26 @@ class PoolsParser {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
// Double check that if we're not just renaming a pool (same address same regex)
|
||||
const [poolToRename]: any[] = await DB.query(`
|
||||
SELECT * FROM pools
|
||||
WHERE addresses = ? OR regexes = ?`,
|
||||
[JSON.stringify(poolObj.addresses), JSON.stringify(poolObj.regexes)]
|
||||
);
|
||||
if (poolToRename && poolToRename.length > 0) {
|
||||
// We're actually renaming an existing pool
|
||||
finalPoolDataRename.push({
|
||||
'name': poolObj.name,
|
||||
'link': poolObj.link,
|
||||
'regexes': allRegexes,
|
||||
'addresses': allAddresses,
|
||||
'slug': slug
|
||||
});
|
||||
logger.debug(`Rename '${poolToRename[0].name}' mining pool to ${poolObj.name}`);
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
}
|
||||
}
|
||||
|
||||
this.miningPools.push({
|
||||
@@ -145,7 +164,9 @@ class PoolsParser {
|
||||
return;
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0 ||
|
||||
finalPoolDataRename.length > 0
|
||||
) {
|
||||
logger.debug(`Update pools table now`);
|
||||
|
||||
// Add new mining pools into the database
|
||||
@@ -169,8 +190,22 @@ class PoolsParser {
|
||||
;`);
|
||||
}
|
||||
|
||||
// Rename mining pools
|
||||
const renameQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataRename.length; ++i) {
|
||||
renameQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataRename[i].name}', link='${finalPoolDataRename[i].link}',
|
||||
slug='${finalPoolDataRename[i].slug}'
|
||||
WHERE regexes='${JSON.stringify(finalPoolDataRename[i].regexes)}'
|
||||
AND addresses='${JSON.stringify(finalPoolDataRename[i].addresses)}'
|
||||
;`);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
if (finalPoolDataAdd.length > 0 || updateQueries.length > 0) {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
}
|
||||
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
@@ -178,6 +213,9 @@ class PoolsParser {
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
for (const query of renameQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
|
||||
338
backend/src/api/tx-selection-worker.ts
Normal file
338
backend/src/api/tx-selection-worker.ts
Normal file
@@ -0,0 +1,338 @@
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import { TransactionExtended, MempoolBlockWithTransactions, AuditTransaction } from '../mempool.interfaces';
|
||||
import { PairingHeap } from '../utils/pairing-heap';
|
||||
import { Common } from './common';
|
||||
import { parentPort } from 'worker_threads';
|
||||
|
||||
if (parentPort) {
|
||||
parentPort.on('message', (params: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit: number | null, condenseRest: boolean}) => {
|
||||
const { mempool, blocks } = makeBlockTemplates(params);
|
||||
|
||||
// return the result to main thread.
|
||||
if (parentPort) {
|
||||
parentPort.postMessage({ mempool, blocks });
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/*
|
||||
* Build projected mempool blocks using an approximation of the transaction selection algorithm from Bitcoin Core
|
||||
* (see BlockAssembler in https://github.com/bitcoin/bitcoin/blob/master/src/node/miner.cpp)
|
||||
*
|
||||
* blockLimit: number of blocks to build in total.
|
||||
* weightLimit: maximum weight of transactions to consider using the selection algorithm.
|
||||
* if weightLimit is significantly lower than the mempool size, results may start to diverge from getBlockTemplate
|
||||
* condenseRest: whether to ignore excess transactions or append them to the final block.
|
||||
*/
|
||||
function makeBlockTemplates({ mempool, blockLimit, weightLimit, condenseRest }: { mempool: { [txid: string]: TransactionExtended }, blockLimit: number, weightLimit?: number | null, condenseRest?: boolean | null })
|
||||
: { mempool: { [txid: string]: TransactionExtended }, blocks: MempoolBlockWithTransactions[] } {
|
||||
const start = Date.now();
|
||||
const auditPool: { [txid: string]: AuditTransaction } = {};
|
||||
const mempoolArray: AuditTransaction[] = [];
|
||||
const restOfArray: TransactionExtended[] = [];
|
||||
|
||||
let weight = 0;
|
||||
const maxWeight = weightLimit ? Math.max(4_000_000 * blockLimit, weightLimit) : Infinity;
|
||||
// grab the top feerate txs up to maxWeight
|
||||
Object.values(mempool).sort((a, b) => b.feePerVsize - a.feePerVsize).forEach(tx => {
|
||||
weight += tx.weight;
|
||||
if (weight >= maxWeight) {
|
||||
restOfArray.push(tx);
|
||||
return;
|
||||
}
|
||||
// initializing everything up front helps V8 optimize property access later
|
||||
auditPool[tx.txid] = {
|
||||
txid: tx.txid,
|
||||
fee: tx.fee,
|
||||
size: tx.size,
|
||||
weight: tx.weight,
|
||||
feePerVsize: tx.feePerVsize,
|
||||
vin: tx.vin,
|
||||
relativesSet: false,
|
||||
ancestorMap: new Map<string, AuditTransaction>(),
|
||||
children: new Set<AuditTransaction>(),
|
||||
ancestorFee: 0,
|
||||
ancestorWeight: 0,
|
||||
score: 0,
|
||||
used: false,
|
||||
modified: false,
|
||||
modifiedNode: null,
|
||||
};
|
||||
mempoolArray.push(auditPool[tx.txid]);
|
||||
});
|
||||
|
||||
// Build relatives graph & calculate ancestor scores
|
||||
for (const tx of mempoolArray) {
|
||||
if (!tx.relativesSet) {
|
||||
setRelatives(tx, auditPool);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by descending ancestor score
|
||||
mempoolArray.sort((a, b) => (b.score || 0) - (a.score || 0));
|
||||
|
||||
// Build blocks by greedily choosing the highest feerate package
|
||||
// (i.e. the package rooted in the transaction with the best ancestor score)
|
||||
const blocks: MempoolBlockWithTransactions[] = [];
|
||||
let blockWeight = 4000;
|
||||
let blockSize = 0;
|
||||
let transactions: AuditTransaction[] = [];
|
||||
const modified: PairingHeap<AuditTransaction> = new PairingHeap((a, b): boolean => (a.score || 0) > (b.score || 0));
|
||||
let overflow: AuditTransaction[] = [];
|
||||
let failures = 0;
|
||||
let top = 0;
|
||||
while ((top < mempoolArray.length || !modified.isEmpty()) && (condenseRest || blocks.length < blockLimit)) {
|
||||
// skip invalid transactions
|
||||
while (top < mempoolArray.length && (mempoolArray[top].used || mempoolArray[top].modified)) {
|
||||
top++;
|
||||
}
|
||||
|
||||
// Select best next package
|
||||
let nextTx;
|
||||
const nextPoolTx = mempoolArray[top];
|
||||
const nextModifiedTx = modified.peek();
|
||||
if (nextPoolTx && (!nextModifiedTx || (nextPoolTx.score || 0) > (nextModifiedTx.score || 0))) {
|
||||
nextTx = nextPoolTx;
|
||||
top++;
|
||||
} else {
|
||||
modified.pop();
|
||||
if (nextModifiedTx) {
|
||||
nextTx = nextModifiedTx;
|
||||
nextTx.modifiedNode = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
if (nextTx && !nextTx?.used) {
|
||||
// Check if the package fits into this block
|
||||
if (blockWeight + nextTx.ancestorWeight < config.MEMPOOL.BLOCK_WEIGHT_UNITS) {
|
||||
const ancestors: AuditTransaction[] = Array.from(nextTx.ancestorMap.values());
|
||||
const descendants: AuditTransaction[] = [];
|
||||
// sort ancestors by dependency graph (equivalent to sorting by ascending ancestor count)
|
||||
const sortedTxSet = [...ancestors.sort((a, b) => { return (a.ancestorMap.size || 0) - (b.ancestorMap.size || 0); }), nextTx];
|
||||
const effectiveFeeRate = nextTx.ancestorFee / (nextTx.ancestorWeight / 4);
|
||||
const used: AuditTransaction[] = [];
|
||||
while (sortedTxSet.length) {
|
||||
const ancestor = sortedTxSet.pop();
|
||||
const mempoolTx = mempool[ancestor.txid];
|
||||
ancestor.used = true;
|
||||
ancestor.usedBy = nextTx.txid;
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = effectiveFeeRate;
|
||||
mempoolTx.ancestors = sortedTxSet.map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
};
|
||||
}).reverse();
|
||||
mempoolTx.descendants = descendants.map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
};
|
||||
});
|
||||
descendants.push(ancestor);
|
||||
mempoolTx.cpfpChecked = true;
|
||||
transactions.push(ancestor);
|
||||
blockSize += ancestor.size;
|
||||
blockWeight += ancestor.weight;
|
||||
used.push(ancestor);
|
||||
}
|
||||
|
||||
// remove these as valid package ancestors for any descendants remaining in the mempool
|
||||
if (used.length) {
|
||||
used.forEach(tx => {
|
||||
updateDescendants(tx, auditPool, modified);
|
||||
});
|
||||
}
|
||||
|
||||
failures = 0;
|
||||
} else {
|
||||
// hold this package in an overflow list while we check for smaller options
|
||||
overflow.push(nextTx);
|
||||
failures++;
|
||||
}
|
||||
}
|
||||
|
||||
// this block is full
|
||||
const exceededPackageTries = failures > 1000 && blockWeight > (config.MEMPOOL.BLOCK_WEIGHT_UNITS - 4000);
|
||||
const queueEmpty = top >= mempoolArray.length && modified.isEmpty();
|
||||
if ((exceededPackageTries || queueEmpty) && (!condenseRest || blocks.length < blockLimit - 1)) {
|
||||
// construct this block
|
||||
if (transactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
// reset for the next block
|
||||
transactions = [];
|
||||
blockSize = 0;
|
||||
blockWeight = 4000;
|
||||
|
||||
// 'overflow' packages didn't fit in this block, but are valid candidates for the next
|
||||
for (const overflowTx of overflow.reverse()) {
|
||||
if (overflowTx.modified) {
|
||||
overflowTx.modifiedNode = modified.add(overflowTx);
|
||||
} else {
|
||||
top--;
|
||||
mempoolArray[top] = overflowTx;
|
||||
}
|
||||
}
|
||||
overflow = [];
|
||||
}
|
||||
}
|
||||
if (condenseRest) {
|
||||
// pack any leftover transactions into the last block
|
||||
for (const tx of overflow) {
|
||||
if (!tx || tx?.used) {
|
||||
continue;
|
||||
}
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
const mempoolTx = mempool[tx.txid];
|
||||
// update original copy of this tx with effective fee rate & relatives data
|
||||
mempoolTx.effectiveFeePerVsize = tx.score;
|
||||
mempoolTx.ancestors = (Array.from(tx.ancestorMap?.values()) as AuditTransaction[]).map((a) => {
|
||||
return {
|
||||
txid: a.txid,
|
||||
fee: a.fee,
|
||||
weight: a.weight,
|
||||
};
|
||||
});
|
||||
mempoolTx.bestDescendant = null;
|
||||
mempoolTx.cpfpChecked = true;
|
||||
transactions.push(tx);
|
||||
tx.used = true;
|
||||
}
|
||||
const blockTransactions = transactions.map(t => mempool[t.txid]);
|
||||
restOfArray.forEach(tx => {
|
||||
blockWeight += tx.weight;
|
||||
blockSize += tx.size;
|
||||
tx.effectiveFeePerVsize = tx.feePerVsize;
|
||||
tx.cpfpChecked = false;
|
||||
tx.ancestors = [];
|
||||
tx.bestDescendant = null;
|
||||
blockTransactions.push(tx);
|
||||
});
|
||||
if (blockTransactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(blockTransactions, blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
transactions = [];
|
||||
} else if (transactions.length) {
|
||||
blocks.push(dataToMempoolBlocks(transactions.map(t => mempool[t.txid]), blockSize, blockWeight, blocks.length));
|
||||
}
|
||||
|
||||
const end = Date.now();
|
||||
const time = end - start;
|
||||
logger.debug('Mempool templates calculated in ' + time / 1000 + ' seconds');
|
||||
|
||||
return {
|
||||
mempool,
|
||||
blocks
|
||||
};
|
||||
}
|
||||
|
||||
// traverse in-mempool ancestors
|
||||
// recursion unavoidable, but should be limited to depth < 25 by mempool policy
|
||||
function setRelatives(
|
||||
tx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
): void {
|
||||
for (const parent of tx.vin) {
|
||||
const parentTx = mempool[parent.txid];
|
||||
if (parentTx && !tx.ancestorMap?.has(parent.txid)) {
|
||||
tx.ancestorMap.set(parent.txid, parentTx);
|
||||
parentTx.children.add(tx);
|
||||
// visit each node only once
|
||||
if (!parentTx.relativesSet) {
|
||||
setRelatives(parentTx, mempool);
|
||||
}
|
||||
parentTx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorMap.set(ancestor.txid, ancestor);
|
||||
});
|
||||
}
|
||||
};
|
||||
tx.ancestorFee = tx.fee || 0;
|
||||
tx.ancestorWeight = tx.weight || 0;
|
||||
tx.ancestorMap.forEach((ancestor) => {
|
||||
tx.ancestorFee += ancestor.fee;
|
||||
tx.ancestorWeight += ancestor.weight;
|
||||
});
|
||||
tx.score = tx.ancestorFee / ((tx.ancestorWeight / 4) || 1);
|
||||
tx.relativesSet = true;
|
||||
}
|
||||
|
||||
// iterate over remaining descendants, removing the root as a valid ancestor & updating the ancestor score
|
||||
// avoids recursion to limit call stack depth
|
||||
function updateDescendants(
|
||||
rootTx: AuditTransaction,
|
||||
mempool: { [txid: string]: AuditTransaction },
|
||||
modified: PairingHeap<AuditTransaction>,
|
||||
): void {
|
||||
const descendantSet: Set<AuditTransaction> = new Set();
|
||||
// stack of nodes left to visit
|
||||
const descendants: AuditTransaction[] = [];
|
||||
let descendantTx;
|
||||
let tmpScore;
|
||||
rootTx.children.forEach(childTx => {
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
while (descendants.length) {
|
||||
descendantTx = descendants.pop();
|
||||
if (descendantTx && descendantTx.ancestorMap && descendantTx.ancestorMap.has(rootTx.txid)) {
|
||||
// remove tx as ancestor
|
||||
descendantTx.ancestorMap.delete(rootTx.txid);
|
||||
descendantTx.ancestorFee -= rootTx.fee;
|
||||
descendantTx.ancestorWeight -= rootTx.weight;
|
||||
tmpScore = descendantTx.score;
|
||||
descendantTx.score = descendantTx.ancestorFee / (descendantTx.ancestorWeight / 4);
|
||||
|
||||
if (!descendantTx.modifiedNode) {
|
||||
descendantTx.modified = true;
|
||||
descendantTx.modifiedNode = modified.add(descendantTx);
|
||||
} else {
|
||||
// rebalance modified heap if score has changed
|
||||
if (descendantTx.score < tmpScore) {
|
||||
modified.decreasePriority(descendantTx.modifiedNode);
|
||||
} else if (descendantTx.score > tmpScore) {
|
||||
modified.increasePriority(descendantTx.modifiedNode);
|
||||
}
|
||||
}
|
||||
|
||||
// add this node's children to the stack
|
||||
descendantTx.children.forEach(childTx => {
|
||||
// visit each node only once
|
||||
if (!descendantSet.has(childTx)) {
|
||||
descendants.push(childTx);
|
||||
descendantSet.add(childTx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function dataToMempoolBlocks(transactions: TransactionExtended[],
|
||||
blockSize: number, blockWeight: number, blocksIndex: number): MempoolBlockWithTransactions {
|
||||
let rangeLength = 4;
|
||||
if (blocksIndex === 0) {
|
||||
rangeLength = 8;
|
||||
}
|
||||
if (transactions.length > 4000) {
|
||||
rangeLength = 6;
|
||||
} else if (transactions.length > 10000) {
|
||||
rangeLength = 8;
|
||||
}
|
||||
return {
|
||||
blockSize: blockSize,
|
||||
blockVSize: blockWeight / 4,
|
||||
nTx: transactions.length,
|
||||
totalFees: transactions.reduce((acc, cur) => acc + cur.fee, 0),
|
||||
medianFee: Common.percentile(transactions.map((tx) => tx.effectiveFeePerVsize), config.MEMPOOL.RECOMMENDED_FEE_PERCENTILE),
|
||||
feeRange: Common.getFeesInRange(transactions, rangeLength),
|
||||
transactionIds: transactions.map((tx) => tx.txid),
|
||||
transactions: transactions.map((tx) => Common.stripTransaction(tx)),
|
||||
};
|
||||
}
|
||||
@@ -18,6 +18,7 @@ import difficultyAdjustment from './difficulty-adjustment';
|
||||
import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import Audit from './audit';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -243,13 +244,18 @@ class WebsocketHandler {
|
||||
});
|
||||
}
|
||||
|
||||
handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]) {
|
||||
async handleMempoolChange(newMempool: { [txid: string]: TransactionExtended },
|
||||
newTransactions: TransactionExtended[], deletedTransactions: TransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
mempoolBlocks.updateMempoolBlocks(newMempool);
|
||||
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
|
||||
await mempoolBlocks.makeBlockTemplates(newMempool, 8, null, true);
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(newMempool);
|
||||
}
|
||||
|
||||
const mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
const mempoolInfo = memPool.getMempoolInfo();
|
||||
@@ -404,76 +410,71 @@ class WebsocketHandler {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) {
|
||||
|
||||
async handleNewBlock(block: BlockExtended, txIds: string[], transactions: TransactionExtended[]): Promise<void> {
|
||||
if (!this.wss) {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
let mBlocks: undefined | MempoolBlock[];
|
||||
let mBlockDeltas: undefined | MempoolBlockDelta[];
|
||||
let matchRate = 0;
|
||||
const _memPool = memPool.getMempool();
|
||||
const _mempoolBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
|
||||
if (_mempoolBlocks[0]) {
|
||||
const matches: string[] = [];
|
||||
const added: string[] = [];
|
||||
const missing: string[] = [];
|
||||
|
||||
for (const txId of txIds) {
|
||||
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
|
||||
matches.push(txId);
|
||||
} else {
|
||||
added.push(txId);
|
||||
}
|
||||
delete _memPool[txId];
|
||||
}
|
||||
|
||||
for (const txId of _mempoolBlocks[0].transactionIds) {
|
||||
if (matches.includes(txId) || added.includes(txId)) {
|
||||
continue;
|
||||
}
|
||||
missing.push(txId);
|
||||
}
|
||||
|
||||
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
|
||||
if (config.MEMPOOL.ADVANCED_GBT_AUDIT) {
|
||||
await mempoolBlocks.makeBlockTemplates(_memPool, 2);
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
}
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
});
|
||||
BlocksSummariesRepository.$saveSummary({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
});
|
||||
if (Common.indexingEnabled() && memPool.isInSync()) {
|
||||
const projectedBlocks = mempoolBlocks.getMempoolBlocksWithTransactions();
|
||||
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: missing,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
const { censored, added, fresh, score } = Audit.auditBlock(transactions, projectedBlocks, _memPool);
|
||||
const matchRate = Math.round(score * 100 * 100) / 100;
|
||||
|
||||
const stripped = projectedBlocks[0]?.transactions ? projectedBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
}) : [];
|
||||
|
||||
BlocksSummariesRepository.$saveTemplate({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
});
|
||||
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: censored,
|
||||
freshTxs: fresh,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
|
||||
if (block.extras) {
|
||||
block.extras.matchRate = matchRate;
|
||||
}
|
||||
}
|
||||
|
||||
if (block.extras) {
|
||||
block.extras.matchRate = matchRate;
|
||||
// Update mempool to remove transactions included in the new block
|
||||
for (const txId of txIds) {
|
||||
delete _memPool[txId];
|
||||
}
|
||||
|
||||
if (config.MEMPOOL.ADVANCED_GBT_MEMPOOL) {
|
||||
await mempoolBlocks.makeBlockTemplates(_memPool, 8, null, true);
|
||||
} else {
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
}
|
||||
const mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
const mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
const da = difficultyAdjustment.getDifficultyAdjustment();
|
||||
const fees = feeApi.getRecommendedFee();
|
||||
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
const configFile = require('../mempool-config.json');
|
||||
const configFromFile = require(
|
||||
process.env.MEMPOOL_CONFIG_FILE ? process.env.MEMPOOL_CONFIG_FILE : '../mempool-config.json'
|
||||
);
|
||||
|
||||
interface IConfig {
|
||||
MEMPOOL: {
|
||||
ENABLED: boolean;
|
||||
NETWORK: 'mainnet' | 'testnet' | 'signet' | 'liquid' | 'liquidtestnet';
|
||||
BACKEND: 'esplora' | 'electrum' | 'none';
|
||||
HTTP_PORT: number;
|
||||
@@ -24,6 +27,11 @@ interface IConfig {
|
||||
USER_AGENT: string;
|
||||
STDOUT_LOG_MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
|
||||
AUTOMATIC_BLOCK_REINDEXING: boolean;
|
||||
POOLS_JSON_URL: string,
|
||||
POOLS_JSON_TREE_URL: string,
|
||||
ADVANCED_GBT_AUDIT: boolean;
|
||||
ADVANCED_GBT_MEMPOOL: boolean;
|
||||
TRANSACTION_INDEXING: boolean;
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
@@ -31,10 +39,19 @@ interface IConfig {
|
||||
LIGHTNING: {
|
||||
ENABLED: boolean;
|
||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||
TOPOLOGY_FOLDER: string;
|
||||
STATS_REFRESH_INTERVAL: number;
|
||||
GRAPH_REFRESH_INTERVAL: number;
|
||||
LOGGER_UPDATE_INTERVAL: number;
|
||||
FORENSICS_INTERVAL: number;
|
||||
FORENSICS_RATE_LIMIT: number;
|
||||
};
|
||||
LND: {
|
||||
TLS_CERT_PATH: string;
|
||||
MACAROON_PATH: string;
|
||||
REST_API_URL: string;
|
||||
};
|
||||
CLIGHTNING: {
|
||||
SOCKET: string;
|
||||
};
|
||||
ELECTRUM: {
|
||||
@@ -108,6 +125,7 @@ interface IConfig {
|
||||
|
||||
const defaults: IConfig = {
|
||||
'MEMPOOL': {
|
||||
'ENABLED': true,
|
||||
'NETWORK': 'mainnet',
|
||||
'BACKEND': 'none',
|
||||
'HTTP_PORT': 8999,
|
||||
@@ -130,6 +148,11 @@ const defaults: IConfig = {
|
||||
'USER_AGENT': 'mempool',
|
||||
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
'ADVANCED_GBT_AUDIT': false,
|
||||
'ADVANCED_GBT_MEMPOOL': false,
|
||||
'TRANSACTION_INDEXING': false,
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
@@ -177,12 +200,21 @@ const defaults: IConfig = {
|
||||
},
|
||||
'LIGHTNING': {
|
||||
'ENABLED': false,
|
||||
'BACKEND': 'lnd'
|
||||
'BACKEND': 'lnd',
|
||||
'TOPOLOGY_FOLDER': '',
|
||||
'STATS_REFRESH_INTERVAL': 600,
|
||||
'GRAPH_REFRESH_INTERVAL': 600,
|
||||
'LOGGER_UPDATE_INTERVAL': 30,
|
||||
'FORENSICS_INTERVAL': 43200,
|
||||
'FORENSICS_RATE_LIMIT': 20,
|
||||
},
|
||||
'LND': {
|
||||
'TLS_CERT_PATH': '',
|
||||
'MACAROON_PATH': '',
|
||||
'SOCKET': 'localhost:10009',
|
||||
'REST_API_URL': 'https://localhost:8080',
|
||||
},
|
||||
'CLIGHTNING': {
|
||||
'SOCKET': '',
|
||||
},
|
||||
'SOCKS5PROXY': {
|
||||
'ENABLED': false,
|
||||
@@ -204,11 +236,11 @@ const defaults: IConfig = {
|
||||
'BISQ_URL': 'https://bisq.markets/api',
|
||||
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
|
||||
},
|
||||
"MAXMIND": {
|
||||
'MAXMIND': {
|
||||
'ENABLED': false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
'GEOLITE2_CITY': '/usr/local/share/GeoIP/GeoLite2-City.mmdb',
|
||||
'GEOLITE2_ASN': '/usr/local/share/GeoIP/GeoLite2-ASN.mmdb',
|
||||
'GEOIP2_ISP': '/usr/local/share/GeoIP/GeoIP2-ISP.mmdb'
|
||||
},
|
||||
};
|
||||
|
||||
@@ -224,13 +256,14 @@ class Config implements IConfig {
|
||||
BISQ: IConfig['BISQ'];
|
||||
LIGHTNING: IConfig['LIGHTNING'];
|
||||
LND: IConfig['LND'];
|
||||
CLIGHTNING: IConfig['CLIGHTNING'];
|
||||
SOCKS5PROXY: IConfig['SOCKS5PROXY'];
|
||||
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
|
||||
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
|
||||
MAXMIND: IConfig['MAXMIND'];
|
||||
|
||||
constructor() {
|
||||
const configs = this.merge(configFile, defaults);
|
||||
const configs = this.merge(configFromFile, defaults);
|
||||
this.MEMPOOL = configs.MEMPOOL;
|
||||
this.ESPLORA = configs.ESPLORA;
|
||||
this.ELECTRUM = configs.ELECTRUM;
|
||||
@@ -242,6 +275,7 @@ class Config implements IConfig {
|
||||
this.BISQ = configs.BISQ;
|
||||
this.LIGHTNING = configs.LIGHTNING;
|
||||
this.LND = configs.LND;
|
||||
this.CLIGHTNING = configs.CLIGHTNING;
|
||||
this.SOCKS5PROXY = configs.SOCKS5PROXY;
|
||||
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
|
||||
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import config from './config';
|
||||
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
|
||||
import logger from './logger';
|
||||
import { PoolOptions } from 'mysql2/typings/mysql';
|
||||
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
|
||||
|
||||
class DB {
|
||||
constructor() {
|
||||
@@ -28,7 +28,9 @@ import { PoolOptions } from 'mysql2/typings/mysql';
|
||||
}
|
||||
}
|
||||
|
||||
public async query(query, params?) {
|
||||
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
|
||||
{
|
||||
this.checkDBFlag();
|
||||
const pool = await this.getPool();
|
||||
return pool.query(query, params);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import express from "express";
|
||||
import express from 'express';
|
||||
import { Application, Request, Response, NextFunction } from 'express';
|
||||
import * as http from 'http';
|
||||
import * as WebSocket from 'ws';
|
||||
@@ -28,12 +28,14 @@ import nodesRoutes from './api/explorer/nodes.routes';
|
||||
import channelsRoutes from './api/explorer/channels.routes';
|
||||
import generalLightningRoutes from './api/explorer/general.routes';
|
||||
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
|
||||
import nodeSyncService from './tasks/lightning/node-sync.service';
|
||||
import statisticsRoutes from "./api/statistics/statistics.routes";
|
||||
import miningRoutes from "./api/mining/mining-routes";
|
||||
import bisqRoutes from "./api/bisq/bisq.routes";
|
||||
import liquidRoutes from "./api/liquid/liquid.routes";
|
||||
import bitcoinRoutes from "./api/bitcoin/bitcoin.routes";
|
||||
import networkSyncService from './tasks/lightning/network-sync.service';
|
||||
import statisticsRoutes from './api/statistics/statistics.routes';
|
||||
import miningRoutes from './api/mining/mining-routes';
|
||||
import bisqRoutes from './api/bisq/bisq.routes';
|
||||
import liquidRoutes from './api/liquid/liquid.routes';
|
||||
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
import fundingTxFetcher from './tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import forensicsService from './tasks/lightning/forensics.service';
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -73,7 +75,7 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
async startServer(worker = false) {
|
||||
async startServer(worker = false): Promise<void> {
|
||||
logger.notice(`Starting Mempool Server${worker ? ' (worker)' : ''}... (${backendInfo.getShortCommitHash()})`);
|
||||
|
||||
this.app
|
||||
@@ -82,7 +84,7 @@ class Server {
|
||||
next();
|
||||
})
|
||||
.use(express.urlencoded({ extended: true }))
|
||||
.use(express.text())
|
||||
.use(express.text({ type: ['text/plain', 'application/base64'] }))
|
||||
;
|
||||
|
||||
this.server = http.createServer(this.app);
|
||||
@@ -91,7 +93,9 @@ class Server {
|
||||
this.setUpWebsocketHandling();
|
||||
|
||||
await syncAssets.syncAssets$();
|
||||
diskCache.loadMempoolCache();
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
diskCache.loadMempoolCache();
|
||||
}
|
||||
|
||||
if (config.DATABASE.ENABLED) {
|
||||
await DB.checkDbConnection();
|
||||
@@ -126,7 +130,10 @@ class Server {
|
||||
fiatConversion.startService();
|
||||
|
||||
this.setUpHttpApiRoutes();
|
||||
this.runMainUpdateLoop();
|
||||
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
this.runMainUpdateLoop();
|
||||
}
|
||||
|
||||
if (config.BISQ.ENABLED) {
|
||||
bisq.startBisqService();
|
||||
@@ -136,8 +143,7 @@ class Server {
|
||||
}
|
||||
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
nodeSyncService.$startService()
|
||||
.then(() => lightningStatsUpdater.$startService());
|
||||
this.$runLightningBackend();
|
||||
}
|
||||
|
||||
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
|
||||
@@ -149,7 +155,7 @@ class Server {
|
||||
});
|
||||
}
|
||||
|
||||
async runMainUpdateLoop() {
|
||||
async runMainUpdateLoop(): Promise<void> {
|
||||
try {
|
||||
try {
|
||||
await memPool.$updateMemPoolInfo();
|
||||
@@ -183,7 +189,20 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
setUpWebsocketHandling() {
|
||||
async $runLightningBackend(): Promise<void> {
|
||||
try {
|
||||
await fundingTxFetcher.$init();
|
||||
await networkSyncService.$startService();
|
||||
await forensicsService.$startService();
|
||||
await lightningStatsUpdater.$startService();
|
||||
} catch(e) {
|
||||
logger.err(`Nodejs lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
await Common.sleep$(1000 * 60);
|
||||
this.$runLightningBackend();
|
||||
};
|
||||
}
|
||||
|
||||
setUpWebsocketHandling(): void {
|
||||
if (this.wss) {
|
||||
websocketHandler.setWebsocketServer(this.wss);
|
||||
}
|
||||
@@ -197,19 +216,21 @@ class Server {
|
||||
});
|
||||
}
|
||||
websocketHandler.setupConnectionHandling();
|
||||
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
|
||||
blocks.setNewBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
|
||||
memPool.setMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
|
||||
if (config.MEMPOOL.ENABLED) {
|
||||
statistics.setNewStatisticsEntryCallback(websocketHandler.handleNewStatistic.bind(websocketHandler));
|
||||
memPool.setAsyncMempoolChangedCallback(websocketHandler.handleMempoolChange.bind(websocketHandler));
|
||||
blocks.setNewAsyncBlockCallback(websocketHandler.handleNewBlock.bind(websocketHandler));
|
||||
}
|
||||
fiatConversion.setProgressChangedCallback(websocketHandler.handleNewConversionRates.bind(websocketHandler));
|
||||
loadingIndicators.setProgressChangedCallback(websocketHandler.handleLoadingChanged.bind(websocketHandler));
|
||||
}
|
||||
|
||||
setUpHttpApiRoutes() {
|
||||
|
||||
setUpHttpApiRoutes(): void {
|
||||
bitcoinRoutes.initRoutes(this.app);
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && config.MEMPOOL.ENABLED) {
|
||||
statisticsRoutes.initRoutes(this.app);
|
||||
}
|
||||
if (Common.indexingEnabled()) {
|
||||
if (Common.indexingEnabled() && config.MEMPOOL.ENABLED) {
|
||||
miningRoutes.initRoutes(this.app);
|
||||
}
|
||||
if (config.BISQ.ENABLED) {
|
||||
@@ -226,4 +247,4 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
const server = new Server();
|
||||
((): Server => new Server())();
|
||||
|
||||
@@ -6,13 +6,12 @@ import logger from './logger';
|
||||
import HashratesRepository from './repositories/HashratesRepository';
|
||||
import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import PricesRepository from './repositories/PricesRepository';
|
||||
|
||||
class Indexer {
|
||||
runIndexer = true;
|
||||
indexerRunning = false;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
tasksRunning: string[] = [];
|
||||
|
||||
public reindex() {
|
||||
if (Common.indexingEnabled()) {
|
||||
@@ -20,6 +19,28 @@ class Indexer {
|
||||
}
|
||||
}
|
||||
|
||||
public async runSingleTask(task: 'blocksPrices') {
|
||||
if (!Common.indexingEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
|
||||
this.tasksRunning.push(task);
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
|
||||
setTimeout(() => {
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
this.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
} else {
|
||||
logger.debug(`Blocks prices indexer will run now`)
|
||||
await mining.$indexBlockPrices();
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $run() {
|
||||
if (!Common.indexingEnabled() || this.runIndexer === false ||
|
||||
this.indexerRunning === true || mempool.hasPriority()
|
||||
@@ -50,12 +71,13 @@ class Indexer {
|
||||
return;
|
||||
}
|
||||
|
||||
await mining.$indexBlockPrices();
|
||||
this.runSingleTask('blocksPrices');
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
|
||||
await mining.$generateNetworkHashrateHistory();
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
await blocks.$generateBlocksSummariesDatabase();
|
||||
await blocks.$generateCPFPDatabase();
|
||||
} catch (e) {
|
||||
this.indexerRunning = false;
|
||||
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
|
||||
@@ -74,7 +74,7 @@ class Logger {
|
||||
|
||||
private getNetwork(): string {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
return 'lightning';
|
||||
return config.MEMPOOL.NETWORK === 'mainnet' ? 'lightning' : `${config.MEMPOOL.NETWORK}-lightning`;
|
||||
}
|
||||
if (config.BISQ.ENABLED) {
|
||||
return 'bisq';
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { IEsploraApi } from './api/bitcoin/esplora-api.interface';
|
||||
import { HeapNode } from "./utils/pairing-heap";
|
||||
|
||||
export interface PoolTag {
|
||||
id: number; // mysql row id
|
||||
@@ -27,10 +28,16 @@ export interface BlockAudit {
|
||||
height: number,
|
||||
hash: string,
|
||||
missingTxs: string[],
|
||||
freshTxs: string[],
|
||||
addedTxs: string[],
|
||||
matchRate: number,
|
||||
}
|
||||
|
||||
export interface AuditScore {
|
||||
hash: string,
|
||||
matchRate?: number,
|
||||
}
|
||||
|
||||
export interface MempoolBlock {
|
||||
blockSize: number;
|
||||
blockVSize: number;
|
||||
@@ -65,17 +72,46 @@ export interface TransactionExtended extends IEsploraApi.Transaction {
|
||||
firstSeen?: number;
|
||||
effectiveFeePerVsize: number;
|
||||
ancestors?: Ancestor[];
|
||||
descendants?: Ancestor[];
|
||||
bestDescendant?: BestDescendant | null;
|
||||
cpfpChecked?: boolean;
|
||||
deleteAfter?: number;
|
||||
}
|
||||
|
||||
interface Ancestor {
|
||||
export interface AuditTransaction {
|
||||
txid: string;
|
||||
fee: number;
|
||||
size: number;
|
||||
weight: number;
|
||||
feePerVsize: number;
|
||||
vin: IEsploraApi.Vin[];
|
||||
relativesSet: boolean;
|
||||
ancestorMap: Map<string, AuditTransaction>;
|
||||
children: Set<AuditTransaction>;
|
||||
ancestorFee: number;
|
||||
ancestorWeight: number;
|
||||
score: number;
|
||||
used: boolean;
|
||||
modified: boolean;
|
||||
modifiedNode: HeapNode<AuditTransaction>;
|
||||
}
|
||||
|
||||
export interface Ancestor {
|
||||
txid: string;
|
||||
weight: number;
|
||||
fee: number;
|
||||
}
|
||||
|
||||
export interface TransactionSet {
|
||||
fee: number;
|
||||
weight: number;
|
||||
score: number;
|
||||
children?: Set<string>;
|
||||
available?: boolean;
|
||||
modified?: boolean;
|
||||
modifiedNode?: HeapNode<string>;
|
||||
}
|
||||
|
||||
interface BestDescendant {
|
||||
txid: string;
|
||||
weight: number;
|
||||
@@ -84,7 +120,9 @@ interface BestDescendant {
|
||||
|
||||
export interface CpfpInfo {
|
||||
ancestors: Ancestor[];
|
||||
bestDescendant: BestDescendant | null;
|
||||
bestDescendant?: BestDescendant | null;
|
||||
descendants?: Ancestor[];
|
||||
effectiveFeePerVsize?: number;
|
||||
}
|
||||
|
||||
export interface TransactionStripped {
|
||||
@@ -251,3 +289,41 @@ export interface RewardStats {
|
||||
totalFee: number;
|
||||
totalTx: number;
|
||||
}
|
||||
|
||||
export interface ITopNodesPerChannels {
|
||||
publicKey: string,
|
||||
alias: string,
|
||||
channels?: number,
|
||||
capacity: number,
|
||||
firstSeen?: number,
|
||||
updatedAt?: number,
|
||||
city?: any,
|
||||
country?: any,
|
||||
}
|
||||
|
||||
export interface ITopNodesPerCapacity {
|
||||
publicKey: string,
|
||||
alias: string,
|
||||
capacity: number,
|
||||
channels?: number,
|
||||
firstSeen?: number,
|
||||
updatedAt?: number,
|
||||
city?: any,
|
||||
country?: any,
|
||||
}
|
||||
|
||||
export interface INodesRanking {
|
||||
topByCapacity: ITopNodesPerCapacity[];
|
||||
topByChannels: ITopNodesPerChannels[];
|
||||
}
|
||||
|
||||
export interface IOldestNodes {
|
||||
publicKey: string,
|
||||
alias: string,
|
||||
firstSeen: number,
|
||||
channels?: number,
|
||||
capacity: number,
|
||||
updatedAt?: number,
|
||||
city?: any,
|
||||
country?: any,
|
||||
}
|
||||
@@ -1,14 +1,14 @@
|
||||
import transactionUtils from '../api/transaction-utils';
|
||||
import blocks from '../api/blocks';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockAudit } from '../mempool.interfaces';
|
||||
import { BlockAudit, AuditScore } from '../mempool.interfaces';
|
||||
|
||||
class BlocksAuditRepositories {
|
||||
public async $saveAudit(audit: BlockAudit): Promise<void> {
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, match_rate)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), audit.matchRate]);
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, fresh_txs, match_rate)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), JSON.stringify(audit.freshTxs), audit.matchRate]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
|
||||
@@ -52,24 +52,58 @@ class BlocksAuditRepositories {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
|
||||
blocks.weight, blocks.tx_count,
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, fresh_txs as freshTxs, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
JOIN blocks ON blocks.hash = blocks_audits.hash
|
||||
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
|
||||
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
|
||||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
|
||||
if (rows.length) {
|
||||
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
|
||||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].freshTxs = JSON.parse(rows[0].freshTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
|
||||
if (rows[0].transactions.length) {
|
||||
return rows[0];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAuditScore(hash: string): Promise<AuditScore> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT hash, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
return rows[0];
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAuditScores(maxHeight: number, minHeight: number): Promise<AuditScore[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT hash, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
WHERE blocks_audits.height BETWEEN ? AND ?
|
||||
`, [minHeight, maxHeight]);
|
||||
return rows;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksAuditRepositories();
|
||||
|
||||
@@ -392,6 +392,36 @@ class BlocksRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the first block at or directly after a given timestamp
|
||||
* @param timestamp number unix time in seconds
|
||||
* @returns The height and timestamp of a block (timestamp might vary from given timestamp)
|
||||
*/
|
||||
public async $getBlockHeightFromTimestamp(
|
||||
timestamp: number,
|
||||
): Promise<{ height: number; hash: string; timestamp: number }> {
|
||||
try {
|
||||
// Get first block at or after the given timestamp
|
||||
const query = `SELECT height, hash, blockTimestamp as timestamp FROM blocks
|
||||
WHERE blockTimestamp <= FROM_UNIXTIME(?)
|
||||
ORDER BY blockTimestamp DESC
|
||||
LIMIT 1`;
|
||||
const params = [timestamp];
|
||||
const [rows]: any[][] = await DB.query(query, params);
|
||||
if (rows.length === 0) {
|
||||
throw new Error(`No block was found before timestamp ${timestamp}`);
|
||||
}
|
||||
|
||||
return rows[0];
|
||||
} catch (e) {
|
||||
logger.err(
|
||||
'Cannot get block height from timestamp from the db. Reason: ' +
|
||||
(e instanceof Error ? e.message : e),
|
||||
);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return blocks height
|
||||
*/
|
||||
@@ -632,6 +662,23 @@ class BlocksRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of blocks that have not had CPFP data indexed
|
||||
*/
|
||||
public async $getCPFPUnindexedBlocks(): Promise<any[]> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT height, hash FROM blocks WHERE cpfp_indexed = 0 ORDER BY height DESC`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch CPFP unindexed blocks. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $setCPFPIndexed(hash: string): Promise<void> {
|
||||
await DB.query(`UPDATE blocks SET cpfp_indexed = 1 WHERE hash = ?`, [hash]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the oldest block from a consecutive chain of block from the most recent one
|
||||
*/
|
||||
|
||||
@@ -17,19 +17,16 @@ class BlocksSummariesRepository {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
|
||||
const blockId = params.mined?.id ?? params.template?.id;
|
||||
public async $saveSummary(params: { height: number, mined?: BlockSummary}) {
|
||||
const blockId = params.mined?.id;
|
||||
try {
|
||||
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
|
||||
if (dbSummary.length === 0) { // First insertion
|
||||
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
|
||||
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
|
||||
]);
|
||||
} else if (params.mined !== undefined) { // Update mined block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
|
||||
} else if (params.template !== undefined) { // Update template block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
|
||||
}
|
||||
const transactions = JSON.stringify(params.mined?.transactions || []);
|
||||
await DB.query(`
|
||||
INSERT INTO blocks_summaries (height, id, transactions, template)
|
||||
VALUE (?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
transactions = ?
|
||||
`, [params.height, blockId, transactions, '[]', transactions]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
|
||||
@@ -40,6 +37,26 @@ class BlocksSummariesRepository {
|
||||
}
|
||||
}
|
||||
|
||||
public async $saveTemplate(params: { height: number, template: BlockSummary}) {
|
||||
const blockId = params.template?.id;
|
||||
try {
|
||||
const transactions = JSON.stringify(params.template?.transactions || []);
|
||||
await DB.query(`
|
||||
INSERT INTO blocks_summaries (height, id, transactions, template)
|
||||
VALUE (?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
template = ?
|
||||
`, [params.height, blockId, '[]', transactions, transactions]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block template for ${blockId} because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.debug(`Cannot save block template for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getIndexedSummariesId(): Promise<string[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
|
||||
|
||||
43
backend/src/repositories/CpfpRepository.ts
Normal file
43
backend/src/repositories/CpfpRepository.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Ancestor } from '../mempool.interfaces';
|
||||
|
||||
class CpfpRepository {
|
||||
public async $saveCluster(height: number, txs: Ancestor[], effectiveFeePerVsize: number): Promise<void> {
|
||||
try {
|
||||
const txsJson = JSON.stringify(txs);
|
||||
await DB.query(
|
||||
`
|
||||
INSERT INTO cpfp_clusters(root, height, txs, fee_rate)
|
||||
VALUE (?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
height = ?,
|
||||
txs = ?,
|
||||
fee_rate = ?
|
||||
`,
|
||||
[txs[0].txid, height, txsJson, effectiveFeePerVsize, height, txsJson, effectiveFeePerVsize, height]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteClustersFrom(height: number): Promise<void> {
|
||||
logger.info(`Delete newer cpfp clusters from height ${height} from the database`);
|
||||
try {
|
||||
await DB.query(
|
||||
`
|
||||
DELETE from cpfp_clusters
|
||||
WHERE height >= ?
|
||||
`,
|
||||
[height]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete cpfp clusters from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new CpfpRepository();
|
||||
67
backend/src/repositories/NodeRecordsRepository.ts
Normal file
67
backend/src/repositories/NodeRecordsRepository.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { ResultSetHeader, RowDataPacket } from 'mysql2';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
export interface NodeRecord {
|
||||
publicKey: string; // node public key
|
||||
type: number; // TLV extension record type
|
||||
payload: string; // base64 record payload
|
||||
}
|
||||
|
||||
class NodesRecordsRepository {
|
||||
public async $saveRecord(record: NodeRecord): Promise<void> {
|
||||
try {
|
||||
const payloadBytes = Buffer.from(record.payload, 'base64');
|
||||
await DB.query(`
|
||||
INSERT INTO nodes_records(public_key, type, payload)
|
||||
VALUE (?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
payload = ?
|
||||
`, [record.publicKey, record.type, payloadBytes, payloadBytes]);
|
||||
} catch (e: any) {
|
||||
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
|
||||
logger.err(`Cannot save node record (${[record.publicKey, record.type, record.payload]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
// We don't throw, not a critical issue if we miss some nodes records
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getRecordTypes(publicKey: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT type FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
const [rows] = await DB.query<RowDataPacket[][]>(query, [publicKey]);
|
||||
return rows.map(row => row['type']);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot retrieve custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteUnusedRecords(publicKey: string, recordTypes: number[]): Promise<number> {
|
||||
try {
|
||||
let query;
|
||||
if (recordTypes.length) {
|
||||
query = `
|
||||
DELETE FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
AND type NOT IN (${recordTypes.map(type => `${type}`).join(',')})
|
||||
`;
|
||||
} else {
|
||||
query = `
|
||||
DELETE FROM nodes_records
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
}
|
||||
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
|
||||
return result.affectedRows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot delete unused custom records for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesRecordsRepository();
|
||||
45
backend/src/repositories/NodesSocketsRepository.ts
Normal file
45
backend/src/repositories/NodesSocketsRepository.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
|
||||
export interface NodeSocket {
|
||||
publicKey: string;
|
||||
network: string | null;
|
||||
addr: string;
|
||||
}
|
||||
|
||||
class NodesSocketsRepository {
|
||||
public async $saveSocket(socket: NodeSocket): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
INSERT INTO nodes_sockets(public_key, socket, type)
|
||||
VALUE (?, ?, ?)
|
||||
`, [socket.publicKey, socket.addr, socket.network]);
|
||||
} catch (e: any) {
|
||||
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
|
||||
logger.err(`Cannot save node socket (${[socket.publicKey, socket.addr, socket.network]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
// We don't throw, not a critical issue if we miss some nodes sockets
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteUnusedSockets(publicKey: string, addresses: string[]): Promise<number> {
|
||||
if (addresses.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
try {
|
||||
const query = `
|
||||
DELETE FROM nodes_sockets
|
||||
WHERE public_key = ?
|
||||
AND socket NOT IN (${addresses.map(id => `"${id}"`).join(',')})
|
||||
`;
|
||||
const [result] = await DB.query<ResultSetHeader>(query, [publicKey]);
|
||||
return result.affectedRows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot delete unused sockets for ${publicKey} from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesSocketsRepository();
|
||||
@@ -27,6 +27,11 @@ class PricesRepository {
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getLatestPriceId(): Promise<number | null> {
|
||||
const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].id : null;
|
||||
}
|
||||
|
||||
public async $getLatestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
|
||||
77
backend/src/repositories/TransactionRepository.ts
Normal file
77
backend/src/repositories/TransactionRepository.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Ancestor, CpfpInfo } from '../mempool.interfaces';
|
||||
|
||||
interface CpfpSummary {
|
||||
txid: string;
|
||||
cluster: string;
|
||||
root: string;
|
||||
txs: Ancestor[];
|
||||
height: number;
|
||||
fee_rate: number;
|
||||
}
|
||||
|
||||
class TransactionRepository {
|
||||
public async $setCluster(txid: string, cluster: string): Promise<void> {
|
||||
try {
|
||||
await DB.query(
|
||||
`
|
||||
INSERT INTO transactions
|
||||
(
|
||||
txid,
|
||||
cluster
|
||||
)
|
||||
VALUE (?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
cluster = ?
|
||||
;`,
|
||||
[txid, cluster, cluster]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save transaction cpfp cluster into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getCpfpInfo(txid: string): Promise<CpfpInfo | void> {
|
||||
try {
|
||||
let query = `
|
||||
SELECT *
|
||||
FROM transactions
|
||||
LEFT JOIN cpfp_clusters AS cluster ON cluster.root = transactions.cluster
|
||||
WHERE transactions.txid = ?
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [txid]);
|
||||
if (rows.length) {
|
||||
rows[0].txs = JSON.parse(rows[0].txs) as Ancestor[];
|
||||
return this.convertCpfp(rows[0]);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Cannot get transaction cpfp info from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private convertCpfp(cpfp: CpfpSummary): CpfpInfo {
|
||||
const descendants: Ancestor[] = [];
|
||||
const ancestors: Ancestor[] = [];
|
||||
let matched = false;
|
||||
for (const tx of cpfp.txs) {
|
||||
if (tx.txid === cpfp.txid) {
|
||||
matched = true;
|
||||
} else if (!matched) {
|
||||
descendants.push(tx);
|
||||
} else {
|
||||
ancestors.push(tx);
|
||||
}
|
||||
}
|
||||
return {
|
||||
descendants,
|
||||
ancestors,
|
||||
effectiveFeePerVsize: cpfp.fee_rate
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new TransactionRepository();
|
||||
|
||||
457
backend/src/tasks/lightning/forensics.service.ts
Normal file
457
backend/src/tasks/lightning/forensics.service.ts
Normal file
@@ -0,0 +1,457 @@
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import { Common } from '../../api/common';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
|
||||
const tempCacheSize = 10000;
|
||||
|
||||
class ForensicsService {
|
||||
loggerTimer = 0;
|
||||
closedChannelsScanBlock = 0;
|
||||
txCache: { [txid: string]: IEsploraApi.Transaction } = {};
|
||||
tempCached: string[] = [];
|
||||
|
||||
constructor() {}
|
||||
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting lightning network forensics service');
|
||||
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
|
||||
await this.$runTasks();
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Running forensics scans`);
|
||||
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
await this.$runClosedChannelsForensics(false);
|
||||
await this.$runOpenedChannelsForensics();
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('ForensicsService.$runTasks() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.FORENSICS_INTERVAL);
|
||||
}
|
||||
|
||||
/*
|
||||
1. Mutually closed
|
||||
2. Forced closed
|
||||
3. Forced closed with penalty
|
||||
|
||||
┌────────────────────────────────────┐ ┌────────────────────────────┐
|
||||
│ outputs contain revocation script? ├──yes──► force close w/ penalty = 3 │
|
||||
└──────────────┬─────────────────────┘ └────────────────────────────┘
|
||||
no
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ outputs contain other lightning script? ├──┐
|
||||
└──────────────┬──────────────────────────┘ │
|
||||
no yes
|
||||
┌──────────────▼─────────────┐ │
|
||||
│ sequence starts with 0x80 │ ┌────────▼────────┐
|
||||
│ and ├──────► force close = 2 │
|
||||
│ locktime starts with 0x20? │ └─────────────────┘
|
||||
└──────────────┬─────────────┘
|
||||
no
|
||||
┌─────────▼────────┐
|
||||
│ mutual close = 1 │
|
||||
└──────────────────┘
|
||||
*/
|
||||
|
||||
public async $runClosedChannelsForensics(onlyNewChannels: boolean = false): Promise<void> {
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
return;
|
||||
}
|
||||
|
||||
let progress = 0;
|
||||
|
||||
try {
|
||||
logger.info(`Started running closed channel forensics...`);
|
||||
let channels;
|
||||
if (onlyNewChannels) {
|
||||
channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
} else {
|
||||
channels = await channelsApi.$getUnresolvedClosedChannels();
|
||||
}
|
||||
|
||||
for (const channel of channels) {
|
||||
let reason = 0;
|
||||
let resolvedForceClose = false;
|
||||
// Only Esplora backend can retrieve spent transaction outputs
|
||||
const cached: string[] = [];
|
||||
try {
|
||||
let outspends: IEsploraApi.Outspend[] | undefined;
|
||||
try {
|
||||
outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
|
||||
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
|
||||
} catch (e) {
|
||||
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + channel.closing_transaction_id + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
|
||||
continue;
|
||||
}
|
||||
const lightningScriptReasons: number[] = [];
|
||||
for (const outspend of outspends) {
|
||||
if (outspend.spent && outspend.txid) {
|
||||
let spendingTx = await this.fetchTransaction(outspend.txid);
|
||||
if (!spendingTx) {
|
||||
continue;
|
||||
}
|
||||
cached.push(spendingTx.txid);
|
||||
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
|
||||
lightningScriptReasons.push(lightningScript);
|
||||
}
|
||||
}
|
||||
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
|
||||
if (filteredReasons.length) {
|
||||
if (filteredReasons.some((r) => r === 2 || r === 4)) {
|
||||
reason = 3;
|
||||
} else {
|
||||
reason = 2;
|
||||
resolvedForceClose = true;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
We can detect a commitment transaction (force close) by reading Sequence and Locktime
|
||||
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
|
||||
*/
|
||||
let closingTx = await this.fetchTransaction(channel.closing_transaction_id, true);
|
||||
if (!closingTx) {
|
||||
continue;
|
||||
}
|
||||
cached.push(closingTx.txid);
|
||||
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
|
||||
const locktimeHex: string = closingTx.locktime.toString(16);
|
||||
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
|
||||
reason = 2; // Here we can't be sure if it's a penalty or not
|
||||
} else {
|
||||
reason = 1;
|
||||
}
|
||||
}
|
||||
if (reason) {
|
||||
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
|
||||
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
|
||||
if (reason === 2 && resolvedForceClose) {
|
||||
await DB.query(`UPDATE channels SET closing_resolved = ? WHERE id = ?`, [true, channel.id]);
|
||||
}
|
||||
if (reason !== 2 || resolvedForceClose) {
|
||||
cached.forEach(txid => {
|
||||
delete this.txCache[txid];
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`$runClosedChannelsForensics() failed for channel ${channel.short_id}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels forensics scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private findLightningScript(vin: IEsploraApi.Vin): number {
|
||||
const topElement = vin.witness?.length > 2 ? vin.witness[vin.witness.length - 2] : null;
|
||||
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
|
||||
if (topElement === '01') {
|
||||
// top element is '01' to get in the revocation path
|
||||
// 'Revoked Lightning Force Close';
|
||||
// Penalty force closed
|
||||
return 2;
|
||||
} else {
|
||||
// top element is '', this is a delayed to_local output
|
||||
// 'Lightning Force Close';
|
||||
return 3;
|
||||
}
|
||||
} else if (
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
|
||||
) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
|
||||
if (topElement?.length === 66) {
|
||||
// top element is a public key
|
||||
// 'Revoked Lightning HTLC'; Penalty force closed
|
||||
return 4;
|
||||
} else if (topElement) {
|
||||
// top element is a preimage
|
||||
// 'Lightning HTLC';
|
||||
return 5;
|
||||
} else {
|
||||
// top element is '' to get in the expiry of the script
|
||||
// 'Expired Lightning HTLC';
|
||||
return 6;
|
||||
}
|
||||
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
|
||||
if (topElement) {
|
||||
// top element is a signature
|
||||
// 'Lightning Anchor';
|
||||
return 7;
|
||||
} else {
|
||||
// top element is '', it has been swept after 16 blocks
|
||||
// 'Swept Lightning Anchor';
|
||||
return 8;
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
// If a channel open tx spends funds from a another channel transaction,
|
||||
// we can attribute that output to a specific counterparty
|
||||
private async $runOpenedChannelsForensics(): Promise<void> {
|
||||
const runTimer = Date.now();
|
||||
let progress = 0;
|
||||
|
||||
try {
|
||||
logger.info(`Started running open channel forensics...`);
|
||||
const channels = await channelsApi.$getChannelsWithoutSourceChecked();
|
||||
|
||||
for (const openChannel of channels) {
|
||||
let openTx = await this.fetchTransaction(openChannel.transaction_id, true);
|
||||
if (!openTx) {
|
||||
continue;
|
||||
}
|
||||
for (const input of openTx.vin) {
|
||||
const closeChannel = await channelsApi.$getChannelByClosingId(input.txid);
|
||||
if (closeChannel) {
|
||||
// this input directly spends a channel close output
|
||||
await this.$attributeChannelBalances(closeChannel, openChannel, input);
|
||||
} else {
|
||||
const prevOpenChannels = await channelsApi.$getChannelsByOpeningId(input.txid);
|
||||
if (prevOpenChannels?.length) {
|
||||
// this input spends a channel open change output
|
||||
for (const prevOpenChannel of prevOpenChannels) {
|
||||
await this.$attributeChannelBalances(prevOpenChannel, openChannel, input, null, null, true);
|
||||
}
|
||||
} else {
|
||||
// check if this input spends any swept channel close outputs
|
||||
await this.$attributeSweptChannelCloses(openChannel, input);
|
||||
}
|
||||
}
|
||||
}
|
||||
// calculate how much of the total input value is attributable to the channel open output
|
||||
openChannel.funding_ratio = openTx.vout[openChannel.transaction_vout].value / ((openTx.vout.reduce((sum, v) => sum + v.value, 0) || 1) + openTx.fee);
|
||||
// save changes to the opening channel, and mark it as checked
|
||||
if (openTx?.vin?.length === 1) {
|
||||
openChannel.single_funded = true;
|
||||
}
|
||||
if (openChannel.node1_funding_balance || openChannel.node2_funding_balance || openChannel.node1_closing_balance || openChannel.node2_closing_balance || openChannel.closed_by) {
|
||||
await channelsApi.$updateOpeningInfo(openChannel);
|
||||
}
|
||||
await channelsApi.$markChannelSourceChecked(openChannel.id);
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating opened channel forensics ${progress}/${channels?.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
this.truncateTempCache();
|
||||
}
|
||||
if (Date.now() - runTimer > (config.LIGHTNING.FORENSICS_INTERVAL * 1000)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Open channels forensics scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$runOpenedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||
} finally {
|
||||
this.clearTempCache();
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a channel open tx input spends the result of a swept channel close output
|
||||
private async $attributeSweptChannelCloses(openChannel: ILightningApi.Channel, input: IEsploraApi.Vin): Promise<void> {
|
||||
let sweepTx = await this.fetchTransaction(input.txid, true);
|
||||
if (!sweepTx) {
|
||||
logger.err(`couldn't find input transaction for channel forensics ${openChannel.channel_id} ${input.txid}`);
|
||||
return;
|
||||
}
|
||||
const openContribution = sweepTx.vout[input.vout].value;
|
||||
for (const sweepInput of sweepTx.vin) {
|
||||
const lnScriptType = this.findLightningScript(sweepInput);
|
||||
if (lnScriptType > 1) {
|
||||
const closeChannel = await channelsApi.$getChannelByClosingId(sweepInput.txid);
|
||||
if (closeChannel) {
|
||||
const initiator = (lnScriptType === 2 || lnScriptType === 4) ? 'remote' : (lnScriptType === 3 ? 'local' : null);
|
||||
await this.$attributeChannelBalances(closeChannel, openChannel, sweepInput, openContribution, initiator);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $attributeChannelBalances(
|
||||
prevChannel, openChannel, input: IEsploraApi.Vin, openContribution: number | null = null,
|
||||
initiator: 'remote' | 'local' | null = null, linkedOpenings: boolean = false
|
||||
): Promise<void> {
|
||||
// figure out which node controls the input/output
|
||||
let openSide;
|
||||
let prevLocal;
|
||||
let prevRemote;
|
||||
let matched = false;
|
||||
let ambiguous = false; // if counterparties are the same in both channels, we can't tell them apart
|
||||
if (openChannel.node1_public_key === prevChannel.node1_public_key) {
|
||||
openSide = 1;
|
||||
prevLocal = 1;
|
||||
prevRemote = 2;
|
||||
matched = true;
|
||||
} else if (openChannel.node1_public_key === prevChannel.node2_public_key) {
|
||||
openSide = 1;
|
||||
prevLocal = 2;
|
||||
prevRemote = 1;
|
||||
matched = true;
|
||||
}
|
||||
if (openChannel.node2_public_key === prevChannel.node1_public_key) {
|
||||
openSide = 2;
|
||||
prevLocal = 1;
|
||||
prevRemote = 2;
|
||||
if (matched) {
|
||||
ambiguous = true;
|
||||
}
|
||||
matched = true;
|
||||
} else if (openChannel.node2_public_key === prevChannel.node2_public_key) {
|
||||
openSide = 2;
|
||||
prevLocal = 2;
|
||||
prevRemote = 1;
|
||||
if (matched) {
|
||||
ambiguous = true;
|
||||
}
|
||||
matched = true;
|
||||
}
|
||||
|
||||
if (matched && !ambiguous) {
|
||||
// fetch closing channel transaction and perform forensics on the outputs
|
||||
let prevChannelTx = await this.fetchTransaction(input.txid, true);
|
||||
let outspends: IEsploraApi.Outspend[] | undefined;
|
||||
try {
|
||||
outspends = await bitcoinApi.$getOutspends(input.txid);
|
||||
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
|
||||
} catch (e) {
|
||||
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + input.txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
if (!outspends || !prevChannelTx) {
|
||||
return;
|
||||
}
|
||||
if (!linkedOpenings) {
|
||||
if (!prevChannel.outputs || !prevChannel.outputs.length) {
|
||||
prevChannel.outputs = prevChannelTx.vout.map(vout => {
|
||||
return {
|
||||
type: 0,
|
||||
value: vout.value,
|
||||
};
|
||||
});
|
||||
}
|
||||
for (let i = 0; i < outspends?.length; i++) {
|
||||
const outspend = outspends[i];
|
||||
const output = prevChannel.outputs[i];
|
||||
if (outspend.spent && outspend.txid) {
|
||||
try {
|
||||
const spendingTx = await this.fetchTransaction(outspend.txid, true);
|
||||
if (spendingTx) {
|
||||
output.type = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + outspend.txid}. Reason ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
} else {
|
||||
output.type = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// attribute outputs to each counterparty, and sum up total known balances
|
||||
prevChannel.outputs[input.vout].node = prevLocal;
|
||||
const isPenalty = prevChannel.outputs.filter((out) => out.type === 2 || out.type === 4)?.length > 0;
|
||||
const normalOutput = [1,3].includes(prevChannel.outputs[input.vout].type);
|
||||
const mutualClose = ((prevChannel.status === 2 || prevChannel.status === 'closed') && prevChannel.closing_reason === 1);
|
||||
let localClosingBalance = 0;
|
||||
let remoteClosingBalance = 0;
|
||||
for (const output of prevChannel.outputs) {
|
||||
if (isPenalty) {
|
||||
// penalty close, so local node takes everything
|
||||
localClosingBalance += output.value;
|
||||
} else if (output.node) {
|
||||
// this output determinstically linked to one of the counterparties
|
||||
if (output.node === prevLocal) {
|
||||
localClosingBalance += output.value;
|
||||
} else {
|
||||
remoteClosingBalance += output.value;
|
||||
}
|
||||
} else if (normalOutput && (output.type === 1 || output.type === 3 || (mutualClose && prevChannel.outputs.length === 2))) {
|
||||
// local node had one main output, therefore remote node takes the other
|
||||
remoteClosingBalance += output.value;
|
||||
}
|
||||
}
|
||||
prevChannel[`node${prevLocal}_closing_balance`] = localClosingBalance;
|
||||
prevChannel[`node${prevRemote}_closing_balance`] = remoteClosingBalance;
|
||||
prevChannel.closing_fee = prevChannelTx.fee;
|
||||
|
||||
if (initiator && !linkedOpenings) {
|
||||
const initiatorSide = initiator === 'remote' ? prevRemote : prevLocal;
|
||||
prevChannel.closed_by = prevChannel[`node${initiatorSide}_public_key`];
|
||||
}
|
||||
|
||||
// save changes to the closing channel
|
||||
await channelsApi.$updateClosingInfo(prevChannel);
|
||||
} else {
|
||||
if (prevChannelTx.vin.length <= 1) {
|
||||
prevChannel[`node${prevLocal}_funding_balance`] = prevChannel.capacity;
|
||||
prevChannel.single_funded = true;
|
||||
prevChannel.funding_ratio = 1;
|
||||
// save changes to the closing channel
|
||||
await channelsApi.$updateOpeningInfo(prevChannel);
|
||||
}
|
||||
}
|
||||
openChannel[`node${openSide}_funding_balance`] = openChannel[`node${openSide}_funding_balance`] + (openContribution || prevChannelTx?.vout[input.vout]?.value || 0);
|
||||
}
|
||||
}
|
||||
|
||||
async fetchTransaction(txid: string, temp: boolean = false): Promise<IEsploraApi.Transaction | null> {
|
||||
let tx = this.txCache[txid];
|
||||
if (!tx) {
|
||||
try {
|
||||
tx = await bitcoinApi.$getRawTransaction(txid);
|
||||
this.txCache[txid] = tx;
|
||||
if (temp) {
|
||||
this.tempCached.push(txid);
|
||||
}
|
||||
await Common.sleep$(config.LIGHTNING.FORENSICS_RATE_LIMIT);
|
||||
} catch (e) {
|
||||
logger.err(`Failed to call ${config.ESPLORA.REST_API_URL + '/tx/' + txid + '/outspends'}. Reason ${e instanceof Error ? e.message : e}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
return tx;
|
||||
}
|
||||
|
||||
clearTempCache(): void {
|
||||
for (const txid of this.tempCached) {
|
||||
delete this.txCache[txid];
|
||||
}
|
||||
this.tempCached = [];
|
||||
}
|
||||
|
||||
truncateTempCache(): void {
|
||||
if (this.tempCached.length > tempCacheSize) {
|
||||
const removed = this.tempCached.splice(0, this.tempCached.length - tempCacheSize);
|
||||
for (const txid of removed) {
|
||||
delete this.txCache[txid];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new ForensicsService();
|
||||
310
backend/src/tasks/lightning/network-sync.service.ts
Normal file
310
backend/src/tasks/lightning/network-sync.service.ts
Normal file
@@ -0,0 +1,310 @@
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import nodesApi from '../../api/explorer/nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
|
||||
import NodesSocketsRepository from '../../repositories/NodesSocketsRepository';
|
||||
import { Common } from '../../api/common';
|
||||
import blocks from '../../api/blocks';
|
||||
import NodeRecordsRepository from '../../repositories/NodeRecordsRepository';
|
||||
import forensicsService from './forensics.service';
|
||||
|
||||
class NetworkSyncService {
|
||||
loggerTimer = 0;
|
||||
closedChannelsScanBlock = 0;
|
||||
|
||||
constructor() {}
|
||||
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting lightning network sync service');
|
||||
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
|
||||
await this.$runTasks();
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
const taskStartTime = Date.now();
|
||||
try {
|
||||
logger.info(`Updating nodes and channels`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
|
||||
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
|
||||
setTimeout(() => { this.$runTasks(); }, 10000);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$updateNodesList(networkGraph.nodes);
|
||||
await this.$updateChannelsList(networkGraph.edges);
|
||||
await this.$deactivateChannelsWithoutActiveNodes();
|
||||
await this.$lookUpCreationDateFromChain();
|
||||
await this.$updateNodeFirstSeen();
|
||||
await this.$scanForClosedChannels();
|
||||
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
// run forensics on new channels only
|
||||
await forensicsService.$runClosedChannelsForensics(true);
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
setTimeout(() => { this.$runTasks(); }, Math.max(1, (1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL) - (Date.now() - taskStartTime)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the `nodes` table to reflect the current network graph state
|
||||
*/
|
||||
private async $updateNodesList(nodes: ILightningApi.Node[]): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
let deletedSockets = 0;
|
||||
let deletedRecords = 0;
|
||||
const graphNodesPubkeys: string[] = [];
|
||||
for (const node of nodes) {
|
||||
const latestUpdated = await channelsApi.$getLatestChannelUpdateForNode(node.pub_key);
|
||||
node.last_update = Math.max(node.last_update, latestUpdated);
|
||||
|
||||
await nodesApi.$saveNode(node);
|
||||
graphNodesPubkeys.push(node.pub_key);
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node ${progress}/${nodes.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
const addresses: string[] = [];
|
||||
for (const socket of node.addresses) {
|
||||
await NodesSocketsRepository.$saveSocket(Common.formatSocket(node.pub_key, socket));
|
||||
addresses.push(socket.addr);
|
||||
}
|
||||
deletedSockets += await NodesSocketsRepository.$deleteUnusedSockets(node.pub_key, addresses);
|
||||
|
||||
const oldRecordTypes = await NodeRecordsRepository.$getRecordTypes(node.pub_key);
|
||||
const customRecordTypes: number[] = [];
|
||||
for (const [type, payload] of Object.entries(node.custom_records || {})) {
|
||||
const numericalType = parseInt(type);
|
||||
await NodeRecordsRepository.$saveRecord({
|
||||
publicKey: node.pub_key,
|
||||
type: numericalType,
|
||||
payload,
|
||||
});
|
||||
customRecordTypes.push(numericalType);
|
||||
}
|
||||
if (oldRecordTypes.reduce((changed, type) => changed || customRecordTypes.indexOf(type) === -1, false)) {
|
||||
deletedRecords += await NodeRecordsRepository.$deleteUnusedRecords(node.pub_key, customRecordTypes);
|
||||
}
|
||||
}
|
||||
logger.info(`${progress} nodes updated. ${deletedSockets} sockets deleted. ${deletedRecords} custom records deleted.`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
await nodesApi.$setNodesInactive(graphNodesPubkeys);
|
||||
|
||||
if (config.MAXMIND.ENABLED) {
|
||||
$lookupNodeLocation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the `channels` table to reflect the current network graph state
|
||||
*/
|
||||
private async $updateChannelsList(channels: ILightningApi.Channel[]): Promise<void> {
|
||||
try {
|
||||
const [closedChannelsRaw]: any[] = await DB.query(`SELECT id FROM channels WHERE status = 2`);
|
||||
const closedChannels = {};
|
||||
for (const closedChannel of closedChannelsRaw) {
|
||||
closedChannels[closedChannel.id] = true;
|
||||
}
|
||||
|
||||
let progress = 0;
|
||||
|
||||
const graphChannelsIds: string[] = [];
|
||||
for (const channel of channels) {
|
||||
if (!closedChannels[channel.channel_id]) {
|
||||
await channelsApi.$saveChannel(channel);
|
||||
}
|
||||
graphChannelsIds.push(channel.channel_id);
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`${progress} channels updated`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
await channelsApi.$setChannelsInactive(graphChannelsIds);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// This method look up the creation date of the earliest channel of the node
|
||||
// and update the node to that date in order to get the earliest first seen date
|
||||
private async $updateNodeFirstSeen(): Promise<void> {
|
||||
let progress = 0;
|
||||
let updated = 0;
|
||||
|
||||
try {
|
||||
const [nodes]: any[] = await DB.query(`
|
||||
SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen,
|
||||
(
|
||||
SELECT MIN(UNIX_TIMESTAMP(created))
|
||||
FROM channels
|
||||
WHERE channels.node1_public_key = nodes.public_key
|
||||
) AS created1,
|
||||
(
|
||||
SELECT MIN(UNIX_TIMESTAMP(created))
|
||||
FROM channels
|
||||
WHERE channels.node2_public_key = nodes.public_key
|
||||
) AS created2
|
||||
FROM nodes
|
||||
`);
|
||||
|
||||
for (const node of nodes) {
|
||||
const lowest = Math.min(
|
||||
node.created1 ?? Number.MAX_SAFE_INTEGER,
|
||||
node.created2 ?? Number.MAX_SAFE_INTEGER,
|
||||
node.first_seen ?? Number.MAX_SAFE_INTEGER
|
||||
);
|
||||
if (lowest < node.first_seen) {
|
||||
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
|
||||
const params = [lowest, node.public_key];
|
||||
await DB.query(query, params);
|
||||
}
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
++updated;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${updated} node first seen dates`);
|
||||
} catch (e) {
|
||||
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $lookUpCreationDateFromChain(): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
logger.info(`Running channel creation date lookup`);
|
||||
try {
|
||||
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
|
||||
for (const channel of channels) {
|
||||
const transaction = await fundingTxFetcher.$fetchChannelOpenTx(channel.short_id);
|
||||
await DB.query(`
|
||||
UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`,
|
||||
[transaction.timestamp, channel.id]
|
||||
);
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel creation date ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${channels.length} channels' creation date`);
|
||||
} catch (e) {
|
||||
logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a channel does not have any active node linked to it, then also
|
||||
* mark that channel as inactive
|
||||
*/
|
||||
private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
|
||||
logger.info(`Find channels which nodes are offline`);
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE channels.status = 1
|
||||
AND (
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node1_public_key
|
||||
AND nodes.status = 1
|
||||
) = 0
|
||||
OR (
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node2_public_key
|
||||
AND nodes.status = 1
|
||||
) = 0)
|
||||
`);
|
||||
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $scanForClosedChannels(): Promise<void> {
|
||||
if (this.closedChannelsScanBlock === blocks.getCurrentBlockHeight()) {
|
||||
logger.debug(`We've already scan closed channels for this block, skipping.`);
|
||||
return;
|
||||
}
|
||||
|
||||
let progress = 0;
|
||||
|
||||
try {
|
||||
let log = `Starting closed channels scan`;
|
||||
if (this.closedChannelsScanBlock > 0) {
|
||||
log += `. Last scan was at block ${this.closedChannelsScanBlock}`;
|
||||
} else {
|
||||
log += ` for the first time`;
|
||||
}
|
||||
logger.info(log);
|
||||
|
||||
const channels = await channelsApi.$getChannelsByStatus([0, 1]);
|
||||
for (const channel of channels) {
|
||||
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
|
||||
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
|
||||
logger.debug('Marking channel: ' + channel.id + ' as closed.');
|
||||
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
|
||||
[spendingTx.status.block_time, channel.id]);
|
||||
if (spendingTx.txid && !channel.closing_transaction_id) {
|
||||
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
|
||||
}
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
this.closedChannelsScanBlock = blocks.getCurrentBlockHeight();
|
||||
logger.info(`Closed channels scan completed at block ${this.closedChannelsScanBlock}`);
|
||||
} catch (e) {
|
||||
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NetworkSyncService();
|
||||
@@ -1,427 +0,0 @@
|
||||
import { chanNumber } from 'bolt07';
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinClient from '../../api/bitcoin/bitcoin-client';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
|
||||
|
||||
class NodeSyncService {
|
||||
constructor() {}
|
||||
|
||||
public async $startService() {
|
||||
logger.info('Starting node sync service');
|
||||
|
||||
await this.$runUpdater();
|
||||
|
||||
setInterval(async () => {
|
||||
await this.$runUpdater();
|
||||
}, 1000 * 60 * 60);
|
||||
}
|
||||
|
||||
private async $runUpdater() {
|
||||
try {
|
||||
logger.info(`Updating nodes and channels...`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
await this.$saveNode(node);
|
||||
}
|
||||
logger.info(`Nodes updated.`);
|
||||
|
||||
if (config.MAXMIND.ENABLED) {
|
||||
await $lookupNodeLocation();
|
||||
}
|
||||
|
||||
const graphChannelsIds: string[] = [];
|
||||
for (const channel of networkGraph.channels) {
|
||||
await this.$saveChannel(channel);
|
||||
graphChannelsIds.push(channel.id);
|
||||
}
|
||||
await this.$setChannelsInactive(graphChannelsIds);
|
||||
|
||||
logger.info(`Channels updated.`);
|
||||
|
||||
await this.$findInactiveNodesAndChannels();
|
||||
await this.$lookUpCreationDateFromChain();
|
||||
await this.$updateNodeFirstSeen();
|
||||
await this.$scanForClosedChannels();
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
await this.$runClosedChannelsForensics();
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('$updateNodes() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// This method look up the creation date of the earliest channel of the node
|
||||
// and update the node to that date in order to get the earliest first seen date
|
||||
private async $updateNodeFirstSeen() {
|
||||
try {
|
||||
const [nodes]: any[] = await DB.query(`SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node1_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created1, (SELECT UNIX_TIMESTAMP(created) FROM channels WHERE channels.node2_public_key = nodes.public_key ORDER BY created ASC LIMIT 1) AS created2 FROM nodes`);
|
||||
for (const node of nodes) {
|
||||
let lowest = 0;
|
||||
if (node.created1) {
|
||||
if (node.created2 && node.created2 < node.created1) {
|
||||
lowest = node.created2;
|
||||
} else {
|
||||
lowest = node.created1;
|
||||
}
|
||||
} else if (node.created2) {
|
||||
lowest = node.created2;
|
||||
}
|
||||
if (lowest && lowest < node.first_seen) {
|
||||
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
|
||||
const params = [lowest, node.public_key];
|
||||
await DB.query(query, params);
|
||||
}
|
||||
}
|
||||
logger.info(`Node first seen dates scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $lookUpCreationDateFromChain() {
|
||||
logger.info(`Running channel creation date lookup...`);
|
||||
try {
|
||||
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
|
||||
for (const channel of channels) {
|
||||
const transaction = await bitcoinClient.getRawTransaction(channel.transaction_id, 1);
|
||||
await DB.query(`UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`, [transaction.blocktime, channel.id]);
|
||||
}
|
||||
logger.info(`Channel creation dates scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$setCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// Looking for channels whos nodes are inactive
|
||||
private async $findInactiveNodesAndChannels(): Promise<void> {
|
||||
logger.info(`Running inactive channels scan...`);
|
||||
|
||||
try {
|
||||
// @ts-ignore
|
||||
const [channels]: [ILightningApi.Channel[]] = await DB.query(`
|
||||
SELECT channels.id
|
||||
FROM channels
|
||||
WHERE channels.status = 1
|
||||
AND (
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node1_public_key
|
||||
) = 0
|
||||
OR (
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node2_public_key
|
||||
) = 0)
|
||||
`);
|
||||
|
||||
for (const channel of channels) {
|
||||
await this.$updateChannelStatus(channel.id, 0);
|
||||
}
|
||||
logger.info(`Inactive channels scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$findInactiveNodesAndChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $scanForClosedChannels(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Starting closed channels scan...`);
|
||||
const channels = await channelsApi.$getChannelsByStatus(0);
|
||||
for (const channel of channels) {
|
||||
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
|
||||
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
|
||||
logger.debug('Marking channel: ' + channel.id + ' as closed.');
|
||||
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
|
||||
[spendingTx.status.block_time, channel.id]);
|
||||
if (spendingTx.txid && !channel.closing_transaction_id) {
|
||||
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
1. Mutually closed
|
||||
2. Forced closed
|
||||
3. Forced closed with penalty
|
||||
*/
|
||||
|
||||
private async $runClosedChannelsForensics(): Promise<void> {
|
||||
if (!config.ESPLORA.REST_API_URL) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
logger.info(`Started running closed channel forensics...`);
|
||||
const channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
for (const channel of channels) {
|
||||
let reason = 0;
|
||||
// Only Esplora backend can retrieve spent transaction outputs
|
||||
const outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
|
||||
const lightningScriptReasons: number[] = [];
|
||||
for (const outspend of outspends) {
|
||||
if (outspend.spent && outspend.txid) {
|
||||
const spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid);
|
||||
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
|
||||
lightningScriptReasons.push(lightningScript);
|
||||
}
|
||||
}
|
||||
if (lightningScriptReasons.length === outspends.length
|
||||
&& lightningScriptReasons.filter((r) => r === 1).length === outspends.length) {
|
||||
reason = 1;
|
||||
} else {
|
||||
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
|
||||
if (filteredReasons.length) {
|
||||
if (filteredReasons.some((r) => r === 2 || r === 4)) {
|
||||
reason = 3;
|
||||
} else {
|
||||
reason = 2;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
We can detect a commitment transaction (force close) by reading Sequence and Locktime
|
||||
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
|
||||
*/
|
||||
const closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id);
|
||||
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
|
||||
const locktimeHex: string = closingTx.locktime.toString(16);
|
||||
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
|
||||
reason = 2; // Here we can't be sure if it's a penalty or not
|
||||
} else {
|
||||
reason = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (reason) {
|
||||
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
|
||||
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels forensics scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private findLightningScript(vin: IEsploraApi.Vin): number {
|
||||
const topElement = vin.witness[vin.witness.length - 2];
|
||||
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
|
||||
if (topElement === '01') {
|
||||
// top element is '01' to get in the revocation path
|
||||
// 'Revoked Lightning Force Close';
|
||||
// Penalty force closed
|
||||
return 2;
|
||||
} else {
|
||||
// top element is '', this is a delayed to_local output
|
||||
// 'Lightning Force Close';
|
||||
return 3;
|
||||
}
|
||||
} else if (
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
|
||||
) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
|
||||
if (topElement.length === 66) {
|
||||
// top element is a public key
|
||||
// 'Revoked Lightning HTLC'; Penalty force closed
|
||||
return 4;
|
||||
} else if (topElement) {
|
||||
// top element is a preimage
|
||||
// 'Lightning HTLC';
|
||||
return 5;
|
||||
} else {
|
||||
// top element is '' to get in the expiry of the script
|
||||
// 'Expired Lightning HTLC';
|
||||
return 6;
|
||||
}
|
||||
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
|
||||
if (topElement) {
|
||||
// top element is a signature
|
||||
// 'Lightning Anchor';
|
||||
return 7;
|
||||
} else {
|
||||
// top element is '', it has been swept after 16 blocks
|
||||
// 'Swept Lightning Anchor';
|
||||
return 8;
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
||||
private async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
|
||||
const fromChannel = chanNumber({ channel: channel.id }).number;
|
||||
|
||||
try {
|
||||
const query = `INSERT INTO channels
|
||||
(
|
||||
id,
|
||||
short_id,
|
||||
capacity,
|
||||
transaction_id,
|
||||
transaction_vout,
|
||||
updated_at,
|
||||
status,
|
||||
node1_public_key,
|
||||
node1_base_fee_mtokens,
|
||||
node1_cltv_delta,
|
||||
node1_fee_rate,
|
||||
node1_is_disabled,
|
||||
node1_max_htlc_mtokens,
|
||||
node1_min_htlc_mtokens,
|
||||
node1_updated_at,
|
||||
node2_public_key,
|
||||
node2_base_fee_mtokens,
|
||||
node2_cltv_delta,
|
||||
node2_fee_rate,
|
||||
node2_is_disabled,
|
||||
node2_max_htlc_mtokens,
|
||||
node2_min_htlc_mtokens,
|
||||
node2_updated_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
capacity = ?,
|
||||
updated_at = ?,
|
||||
status = 1,
|
||||
node1_public_key = ?,
|
||||
node1_base_fee_mtokens = ?,
|
||||
node1_cltv_delta = ?,
|
||||
node1_fee_rate = ?,
|
||||
node1_is_disabled = ?,
|
||||
node1_max_htlc_mtokens = ?,
|
||||
node1_min_htlc_mtokens = ?,
|
||||
node1_updated_at = ?,
|
||||
node2_public_key = ?,
|
||||
node2_base_fee_mtokens = ?,
|
||||
node2_cltv_delta = ?,
|
||||
node2_fee_rate = ?,
|
||||
node2_is_disabled = ?,
|
||||
node2_max_htlc_mtokens = ?,
|
||||
node2_min_htlc_mtokens = ?,
|
||||
node2_updated_at = ?
|
||||
;`;
|
||||
|
||||
await DB.query(query, [
|
||||
fromChannel,
|
||||
channel.id,
|
||||
channel.capacity,
|
||||
channel.transaction_id,
|
||||
channel.transaction_vout,
|
||||
channel.updated_at ? this.utcDateToMysql(channel.updated_at) : 0,
|
||||
channel.policies[0].public_key,
|
||||
channel.policies[0].base_fee_mtokens,
|
||||
channel.policies[0].cltv_delta,
|
||||
channel.policies[0].fee_rate,
|
||||
channel.policies[0].is_disabled,
|
||||
channel.policies[0].max_htlc_mtokens,
|
||||
channel.policies[0].min_htlc_mtokens,
|
||||
channel.policies[0].updated_at ? this.utcDateToMysql(channel.policies[0].updated_at) : 0,
|
||||
channel.policies[1].public_key,
|
||||
channel.policies[1].base_fee_mtokens,
|
||||
channel.policies[1].cltv_delta,
|
||||
channel.policies[1].fee_rate,
|
||||
channel.policies[1].is_disabled,
|
||||
channel.policies[1].max_htlc_mtokens,
|
||||
channel.policies[1].min_htlc_mtokens,
|
||||
channel.policies[1].updated_at ? this.utcDateToMysql(channel.policies[1].updated_at) : 0,
|
||||
channel.capacity,
|
||||
channel.updated_at ? this.utcDateToMysql(channel.updated_at) : 0,
|
||||
channel.policies[0].public_key,
|
||||
channel.policies[0].base_fee_mtokens,
|
||||
channel.policies[0].cltv_delta,
|
||||
channel.policies[0].fee_rate,
|
||||
channel.policies[0].is_disabled,
|
||||
channel.policies[0].max_htlc_mtokens,
|
||||
channel.policies[0].min_htlc_mtokens,
|
||||
channel.policies[0].updated_at ? this.utcDateToMysql(channel.policies[0].updated_at) : 0,
|
||||
channel.policies[1].public_key,
|
||||
channel.policies[1].base_fee_mtokens,
|
||||
channel.policies[1].cltv_delta,
|
||||
channel.policies[1].fee_rate,
|
||||
channel.policies[1].is_disabled,
|
||||
channel.policies[1].max_htlc_mtokens,
|
||||
channel.policies[1].min_htlc_mtokens,
|
||||
channel.policies[1].updated_at ? this.utcDateToMysql(channel.policies[1].updated_at) : 0,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveChannel() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $updateChannelStatus(channelShortId: string, status: number): Promise<void> {
|
||||
try {
|
||||
await DB.query(`UPDATE channels SET status = ? WHERE id = ?`, [status, channelShortId]);
|
||||
} catch (e) {
|
||||
logger.err('$updateChannelStatus() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
|
||||
try {
|
||||
await DB.query(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE short_id NOT IN (
|
||||
${graphChannelsIds.map(id => `"${id}"`).join(',')}
|
||||
)
|
||||
AND status != 2
|
||||
`);
|
||||
} catch (e) {
|
||||
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $saveNode(node: ILightningApi.Node): Promise<void> {
|
||||
try {
|
||||
const updatedAt = node.updated_at ? this.utcDateToMysql(node.updated_at) : '0000-00-00 00:00:00';
|
||||
const sockets = node.sockets.join(',');
|
||||
const query = `INSERT INTO nodes(
|
||||
public_key,
|
||||
first_seen,
|
||||
updated_at,
|
||||
alias,
|
||||
color,
|
||||
sockets
|
||||
)
|
||||
VALUES (?, NOW(), ?, ?, ?, ?) ON DUPLICATE KEY UPDATE updated_at = ?, alias = ?, color = ?, sockets = ?;`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.public_key,
|
||||
updatedAt,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
updatedAt,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private utcDateToMysql(dateString: string): string {
|
||||
const d = new Date(Date.parse(dateString));
|
||||
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodeSyncService();
|
||||
@@ -1,350 +1,33 @@
|
||||
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import * as net from 'net';
|
||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||
import config from '../../config';
|
||||
import { Common } from '../../api/common';
|
||||
|
||||
class LightningStatsUpdater {
|
||||
hardCodedStartTime = '2018-01-12';
|
||||
|
||||
public async $startService() {
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting Lightning Stats service');
|
||||
let isInSync = false;
|
||||
let error: any;
|
||||
try {
|
||||
error = null;
|
||||
isInSync = await this.$lightningIsSynced();
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
if (!isInSync) {
|
||||
if (error) {
|
||||
logger.warn('Was not able to fetch Lightning Node status: ' + (error instanceof Error ? error.message : error) + '. Retrying in 1 minute...');
|
||||
} else {
|
||||
logger.notice('The Lightning graph is not yet in sync. Retrying in 1 minute...');
|
||||
}
|
||||
setTimeout(() => this.$startService(), 60 * 1000);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$populateHistoricalStatistics();
|
||||
await this.$populateHistoricalNodeStatistics();
|
||||
|
||||
setTimeout(() => {
|
||||
this.$runTasks();
|
||||
}, this.timeUntilMidnight());
|
||||
}
|
||||
|
||||
private timeUntilMidnight(): number {
|
||||
const date = new Date();
|
||||
this.setDateMidnight(date);
|
||||
date.setUTCHours(24);
|
||||
return date.getTime() - new Date().getTime();
|
||||
}
|
||||
|
||||
private setDateMidnight(date: Date): void {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
date.setUTCSeconds(0);
|
||||
date.setUTCMilliseconds(0);
|
||||
}
|
||||
|
||||
private async $lightningIsSynced(): Promise<boolean> {
|
||||
const nodeInfo = await lightningApi.$getInfo();
|
||||
return nodeInfo.is_synced_to_chain && nodeInfo.is_synced_to_graph;
|
||||
await this.$runTasks();
|
||||
LightningStatsImporter.$run();
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
await this.$logLightningStatsDaily();
|
||||
await this.$logNodeStatsDaily();
|
||||
await this.$logStatsDaily();
|
||||
|
||||
setTimeout(() => {
|
||||
this.$runTasks();
|
||||
}, this.timeUntilMidnight());
|
||||
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.STATS_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
private async $logLightningStatsDaily() {
|
||||
try {
|
||||
logger.info(`Running lightning daily stats log...`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
let total_capacity = 0;
|
||||
for (const channel of networkGraph.channels) {
|
||||
if (channel.capacity) {
|
||||
total_capacity += channel.capacity;
|
||||
}
|
||||
}
|
||||
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
for (const node of networkGraph.nodes) {
|
||||
let isUnnanounced = true;
|
||||
for (const socket of node.sockets) {
|
||||
const hasOnion = socket.indexOf('.onion') !== -1;
|
||||
if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
const hasClearnet = [4, 6].includes(net.isIP(socket.split(':')[0]));
|
||||
if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
const channelStats = await channelsApi.$getChannelsStats();
|
||||
|
||||
const query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (NOW() - INTERVAL 1 DAY, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
networkGraph.channels.length,
|
||||
networkGraph.nodes.length,
|
||||
total_capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
channelStats.avgCapacity,
|
||||
channelStats.avgFeeRate,
|
||||
channelStats.avgBaseFee,
|
||||
channelStats.medianCapacity,
|
||||
channelStats.medianFeeRate,
|
||||
channelStats.medianBaseFee,
|
||||
]);
|
||||
logger.info(`Lightning daily stats done.`);
|
||||
} catch (e) {
|
||||
logger.err('$logLightningStatsDaily() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $logNodeStatsDaily() {
|
||||
try {
|
||||
logger.info(`Running daily node stats update...`);
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, c1.channels_count_left, c2.channels_count_right, c1.channels_capacity_left,
|
||||
c2.channels_capacity_right
|
||||
FROM nodes
|
||||
LEFT JOIN (
|
||||
SELECT node1_public_key, COUNT(id) AS channels_count_left, SUM(capacity) AS channels_capacity_left
|
||||
FROM channels
|
||||
WHERE channels.status = 1
|
||||
GROUP BY node1_public_key
|
||||
) c1 ON c1.node1_public_key = nodes.public_key
|
||||
LEFT JOIN (
|
||||
SELECT node2_public_key, COUNT(id) AS channels_count_right, SUM(capacity) AS channels_capacity_right
|
||||
FROM channels WHERE channels.status = 1 GROUP BY node2_public_key
|
||||
) c2 ON c2.node2_public_key = nodes.public_key
|
||||
`;
|
||||
|
||||
const [nodes]: any = await DB.query(query);
|
||||
|
||||
for (const node of nodes) {
|
||||
await DB.query(
|
||||
`INSERT INTO node_stats(public_key, added, capacity, channels) VALUES (?, NOW() - INTERVAL 1 DAY, ?, ?)`,
|
||||
[node.public_key, (parseInt(node.channels_capacity_left || 0, 10)) + (parseInt(node.channels_capacity_right || 0, 10)),
|
||||
node.channels_count_left + node.channels_count_right]);
|
||||
}
|
||||
logger.info('Daily node stats has updated.');
|
||||
} catch (e) {
|
||||
logger.err('$logNodeStatsDaily() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
// We only run this on first launch
|
||||
private async $populateHistoricalStatistics() {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT COUNT(*) FROM lightning_stats`);
|
||||
// Only run if table is empty
|
||||
if (rows[0]['COUNT(*)'] > 0) {
|
||||
return;
|
||||
}
|
||||
logger.info(`Running historical stats population...`);
|
||||
|
||||
const [channels]: any = await DB.query(`SELECT capacity, created, closing_date FROM channels ORDER BY created ASC`);
|
||||
const [nodes]: any = await DB.query(`SELECT first_seen, sockets FROM nodes ORDER BY first_seen ASC`);
|
||||
|
||||
const date: Date = new Date(this.hardCodedStartTime);
|
||||
const currentDate = new Date();
|
||||
this.setDateMidnight(currentDate);
|
||||
|
||||
while (date < currentDate) {
|
||||
let totalCapacity = 0;
|
||||
let channelsCount = 0;
|
||||
|
||||
for (const channel of channels) {
|
||||
if (new Date(channel.created) > date) {
|
||||
break;
|
||||
}
|
||||
if (channel.closing_date === null || new Date(channel.closing_date) > date) {
|
||||
totalCapacity += channel.capacity;
|
||||
channelsCount++;
|
||||
}
|
||||
}
|
||||
|
||||
let nodeCount = 0;
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
for (const node of nodes) {
|
||||
if (new Date(node.first_seen) > date) {
|
||||
break;
|
||||
}
|
||||
nodeCount++;
|
||||
|
||||
const sockets = node.sockets.split(',');
|
||||
let isUnnanounced = true;
|
||||
for (const socket of sockets) {
|
||||
const hasOnion = socket.indexOf('.onion') !== -1;
|
||||
if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
const hasClearnet = [4, 6].includes(net.isIP(socket.substring(0, socket.lastIndexOf(':'))));
|
||||
if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
const query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`;
|
||||
|
||||
const rowTimestamp = date.getTime() / 1000; // Save timestamp for the row insertion down below
|
||||
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
|
||||
// Last iteration, save channels stats
|
||||
const channelStats = (date >= currentDate ? await channelsApi.$getChannelsStats() : undefined);
|
||||
|
||||
await DB.query(query, [
|
||||
rowTimestamp,
|
||||
channelsCount,
|
||||
nodeCount,
|
||||
totalCapacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
channelStats?.avgCapacity ?? 0,
|
||||
channelStats?.avgFeeRate ?? 0,
|
||||
channelStats?.avgBaseFee ?? 0,
|
||||
channelStats?.medianCapacity ?? 0,
|
||||
channelStats?.medianFeeRate ?? 0,
|
||||
channelStats?.medianBaseFee ?? 0,
|
||||
]);
|
||||
}
|
||||
|
||||
logger.info('Historical stats populated.');
|
||||
} catch (e) {
|
||||
logger.err('$populateHistoricalData() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $populateHistoricalNodeStatistics() {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT COUNT(*) FROM node_stats`);
|
||||
// Only run if table is empty
|
||||
if (rows[0]['COUNT(*)'] > 0) {
|
||||
return;
|
||||
}
|
||||
logger.info(`Running historical node stats population...`);
|
||||
|
||||
const [nodes]: any = await DB.query(`SELECT public_key, first_seen, alias FROM nodes ORDER BY first_seen ASC`);
|
||||
|
||||
for (const node of nodes) {
|
||||
const [channels]: any = await DB.query(`SELECT capacity, created, closing_date FROM channels WHERE node1_public_key = ? OR node2_public_key = ? ORDER BY created ASC`, [node.public_key, node.public_key]);
|
||||
|
||||
const date: Date = new Date(this.hardCodedStartTime);
|
||||
const currentDate = new Date();
|
||||
this.setDateMidnight(currentDate);
|
||||
|
||||
let lastTotalCapacity = 0;
|
||||
let lastChannelsCount = 0;
|
||||
|
||||
while (date < currentDate) {
|
||||
let totalCapacity = 0;
|
||||
let channelsCount = 0;
|
||||
for (const channel of channels) {
|
||||
if (new Date(channel.created) > date) {
|
||||
break;
|
||||
}
|
||||
if (channel.closing_date !== null && new Date(channel.closing_date) < date) {
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
continue;
|
||||
}
|
||||
totalCapacity += channel.capacity;
|
||||
channelsCount++;
|
||||
}
|
||||
|
||||
if (lastTotalCapacity === totalCapacity && lastChannelsCount === channelsCount) {
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
continue;
|
||||
}
|
||||
|
||||
lastTotalCapacity = totalCapacity;
|
||||
lastChannelsCount = channelsCount;
|
||||
|
||||
const query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.public_key,
|
||||
date.getTime() / 1000,
|
||||
totalCapacity,
|
||||
channelsCount,
|
||||
]);
|
||||
date.setUTCDate(date.getUTCDate() + 1);
|
||||
}
|
||||
logger.debug('Updated node_stats for: ' + node.alias);
|
||||
}
|
||||
logger.info('Historical stats populated.');
|
||||
} catch (e) {
|
||||
logger.err('$populateHistoricalNodeData() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
/**
|
||||
* Update the latest entry for each node every config.LIGHTNING.STATS_REFRESH_INTERVAL seconds
|
||||
*/
|
||||
private async $logStatsDaily(): Promise<void> {
|
||||
const date = new Date();
|
||||
Common.setDateMidnight(date);
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||
|
||||
logger.info(`Updated latest network stats`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
116
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
116
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { existsSync, promises } from 'fs';
|
||||
import bitcoinClient from '../../../api/bitcoin/bitcoin-client';
|
||||
import { Common } from '../../../api/common';
|
||||
import config from '../../../config';
|
||||
import logger from '../../../logger';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
const BLOCKS_CACHE_MAX_SIZE = 100;
|
||||
const CACHE_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/ln-funding-txs-cache.json';
|
||||
|
||||
class FundingTxFetcher {
|
||||
private running = false;
|
||||
private blocksCache = {};
|
||||
private channelNewlyProcessed = 0;
|
||||
public fundingTxCache = {};
|
||||
|
||||
async $init(): Promise<void> {
|
||||
// Load funding tx disk cache
|
||||
if (Object.keys(this.fundingTxCache).length === 0 && existsSync(CACHE_FILE_NAME)) {
|
||||
try {
|
||||
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
|
||||
} catch (e) {
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
|
||||
this.fundingTxCache = {};
|
||||
}
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
|
||||
}
|
||||
}
|
||||
|
||||
async $fetchChannelsFundingTxs(channelIds: string[]): Promise<void> {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
|
||||
const globalTimer = new Date().getTime() / 1000;
|
||||
let cacheTimer = new Date().getTime() / 1000;
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
this.channelNewlyProcessed = 0;
|
||||
for (const channelId of channelIds) {
|
||||
await this.$fetchChannelOpenTx(channelId);
|
||||
++channelProcessed;
|
||||
|
||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||
`elapsed: ${elapsedSeconds} seconds`
|
||||
);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
|
||||
if (elapsedSeconds > 60) {
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
cacheTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.channelNewlyProcessed > 0) {
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number}> {
|
||||
channelId = Common.channelIntegerIdToShortId(channelId);
|
||||
|
||||
if (this.fundingTxCache[channelId]) {
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
|
||||
const parts = channelId.split('x');
|
||||
const blockHeight = parts[0];
|
||||
const txIdx = parts[1];
|
||||
const outputIdx = parts[2];
|
||||
|
||||
let block = this.blocksCache[blockHeight];
|
||||
// Fetch it from core
|
||||
if (!block) {
|
||||
const blockHash = await bitcoinClient.getBlockHash(parseInt(blockHeight, 10));
|
||||
block = await bitcoinClient.getBlock(blockHash, 1);
|
||||
}
|
||||
this.blocksCache[block.height] = block;
|
||||
|
||||
const blocksCacheHashes = Object.keys(this.blocksCache).sort((a, b) => parseInt(b) - parseInt(a)).reverse();
|
||||
if (blocksCacheHashes.length > BLOCKS_CACHE_MAX_SIZE) {
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
delete this.blocksCache[blocksCacheHashes[i]];
|
||||
}
|
||||
}
|
||||
|
||||
const txid = block.tx[txIdx];
|
||||
const rawTx = await bitcoinClient.getRawTransaction(txid);
|
||||
const tx = await bitcoinClient.decodeRawTransaction(rawTx);
|
||||
|
||||
this.fundingTxCache[channelId] = {
|
||||
timestamp: block.time,
|
||||
txid: txid,
|
||||
value: tx.vout[outputIdx].value,
|
||||
};
|
||||
|
||||
++this.channelNewlyProcessed;
|
||||
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
}
|
||||
|
||||
export default new FundingTxFetcher;
|
||||
@@ -4,38 +4,75 @@ import nodesApi from '../../../api/explorer/nodes.api';
|
||||
import config from '../../../config';
|
||||
import DB from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import * as IPCheck from '../../../utils/ipcheck.js';
|
||||
import { Reader } from 'mmdb-lib';
|
||||
|
||||
export async function $lookupNodeLocation(): Promise<void> {
|
||||
logger.info(`Running node location updater using Maxmind...`);
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let progress = 0;
|
||||
let nodesUpdated = 0;
|
||||
let geoNamesInserted = 0;
|
||||
|
||||
logger.info(`Running node location updater using Maxmind`);
|
||||
try {
|
||||
const nodes = await nodesApi.$getAllNodes();
|
||||
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
|
||||
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
|
||||
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
let lookupIsp: Reader<IspResponse> | null = null;
|
||||
try {
|
||||
lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
} catch (e) { }
|
||||
|
||||
for (const node of nodes) {
|
||||
const sockets: string[] = node.sockets.split(',');
|
||||
for (const socket of sockets) {
|
||||
const ip = socket.substring(0, socket.lastIndexOf(':')).replace('[', '').replace(']', '');
|
||||
const hasClearnet = [4, 6].includes(net.isIP(ip));
|
||||
|
||||
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
|
||||
const city = lookupCity.get(ip);
|
||||
const asn = lookupAsn.get(ip);
|
||||
const isp = lookupIsp.get(ip);
|
||||
let isp: IspResponse | null = null;
|
||||
if (lookupIsp) {
|
||||
isp = lookupIsp.get(ip);
|
||||
}
|
||||
|
||||
let asOverwrite: any | undefined;
|
||||
if (asn && (IPCheck.match(ip, '170.75.160.0/20') || IPCheck.match(ip, '172.81.176.0/21'))) {
|
||||
asOverwrite = {
|
||||
asn: 394745,
|
||||
name: 'Lunanode',
|
||||
};
|
||||
}
|
||||
else if (asn && (IPCheck.match(ip, '50.7.0.0/16') || IPCheck.match(ip, '66.90.64.0/18'))) {
|
||||
asOverwrite = {
|
||||
asn: 30058,
|
||||
name: 'FDCservers.net',
|
||||
};
|
||||
}
|
||||
else if (asn && asn.autonomous_system_number === 174) {
|
||||
asOverwrite = {
|
||||
asn: 174,
|
||||
name: 'Cogent Communications',
|
||||
};
|
||||
}
|
||||
|
||||
if (city && (asn || isp)) {
|
||||
const query = `UPDATE nodes SET
|
||||
as_number = ?,
|
||||
city_id = ?,
|
||||
country_id = ?,
|
||||
subdivision_id = ?,
|
||||
longitude = ?,
|
||||
latitude = ?,
|
||||
accuracy_radius = ?
|
||||
WHERE public_key = ?`;
|
||||
const query = `
|
||||
UPDATE nodes SET
|
||||
as_number = ?,
|
||||
city_id = ?,
|
||||
country_id = ?,
|
||||
subdivision_id = ?,
|
||||
longitude = ?,
|
||||
latitude = ?,
|
||||
accuracy_radius = ?
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
|
||||
const params = [
|
||||
isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||
city.city?.geoname_id,
|
||||
city.country?.geoname_id,
|
||||
city.subdivisions ? city.subdivisions[0].geoname_id : null,
|
||||
@@ -44,54 +81,90 @@ export async function $lookupNodeLocation(): Promise<void> {
|
||||
city.location?.accuracy_radius,
|
||||
node.public_key
|
||||
];
|
||||
await DB.query(query, params);
|
||||
let result = await DB.query<ResultSetHeader>(query, params);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++nodesUpdated;
|
||||
}
|
||||
|
||||
// Store Continent
|
||||
if (city.continent?.geoname_id) {
|
||||
await DB.query(
|
||||
// Store Continent
|
||||
if (city.continent?.geoname_id) {
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
|
||||
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
|
||||
}
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
|
||||
// Store Country
|
||||
if (city.country?.geoname_id) {
|
||||
await DB.query(
|
||||
// Store Country
|
||||
if (city.country?.geoname_id) {
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
|
||||
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
|
||||
}
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
|
||||
// Store Country ISO code
|
||||
if (city.country?.iso_code) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
||||
[city.country?.geoname_id, city.country?.iso_code]);
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
||||
[city.country?.geoname_id, city.country?.iso_code]);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
|
||||
// Store Division
|
||||
if (city.subdivisions && city.subdivisions[0]) {
|
||||
await DB.query(
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
|
||||
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
|
||||
// Store City
|
||||
if (city.city?.geoname_id) {
|
||||
await DB.query(
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
|
||||
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
|
||||
// Store AS name
|
||||
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
|
||||
await DB.query(
|
||||
result = await DB.query<ResultSetHeader>(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
|
||||
[isp?.autonomous_system_number ?? asn?.autonomous_system_number, JSON.stringify(isp?.isp ?? asn?.autonomous_system_organization)]);
|
||||
[
|
||||
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||
JSON.stringify(asOverwrite?.name ?? isp?.isp ?? asn?.autonomous_system_organization)
|
||||
]);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
++geoNamesInserted;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node location data ${progress}/${nodes.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`Node location data updated.`);
|
||||
|
||||
if (nodesUpdated > 0) {
|
||||
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||
} else {
|
||||
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
542
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
542
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
@@ -0,0 +1,542 @@
|
||||
import DB from '../../../database';
|
||||
import { promises } from 'fs';
|
||||
import logger from '../../../logger';
|
||||
import fundingTxFetcher from './funding-tx-fetcher';
|
||||
import config from '../../../config';
|
||||
import { ILightningApi } from '../../../api/lightning/lightning-api.interface';
|
||||
import { isIP } from 'net';
|
||||
import { Common } from '../../../api/common';
|
||||
import channelsApi from '../../../api/explorer/channels.api';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
class LightningStatsImporter {
|
||||
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
|
||||
|
||||
async $run(): Promise<void> {
|
||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||
logger.info('Caching funding txs for currently existing channels');
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||
|
||||
if (config.MEMPOOL.NETWORK !== 'mainnet' || config.DATABASE.ENABLED === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$importHistoricalLightningStats();
|
||||
await this.$cleanupIncorrectSnapshot();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate LN network stats for one day
|
||||
*/
|
||||
public async computeNetworkStats(timestamp: number,
|
||||
networkGraph: ILightningApi.NetworkGraph, isHistorical: boolean = false): Promise<unknown> {
|
||||
// Node counts and network shares
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
const [nodesInDbRaw]: any[] = await DB.query(`SELECT public_key FROM nodes`);
|
||||
const nodesInDb = {};
|
||||
for (const node of nodesInDbRaw) {
|
||||
nodesInDb[node.public_key] = node;
|
||||
}
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
// If we don't know about this node, insert it in db
|
||||
if (isHistorical === true && !nodesInDb[node.pub_key]) {
|
||||
await nodesApi.$saveNode({
|
||||
last_update: node.last_update,
|
||||
pub_key: node.pub_key,
|
||||
alias: node.alias,
|
||||
addresses: node.addresses,
|
||||
color: node.color,
|
||||
features: node.features,
|
||||
});
|
||||
nodesInDb[node.pub_key] = node;
|
||||
} else {
|
||||
await nodesApi.$updateNodeSockets(node.pub_key, node.addresses);
|
||||
}
|
||||
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
|
||||
for (const socket of (node.addresses ?? [])) {
|
||||
if (!socket.network?.length && !socket.addr?.length) {
|
||||
continue;
|
||||
}
|
||||
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network) || socket.addr.indexOf('onion') !== -1 || socket.addr.indexOf('torv2') !== -1 || socket.addr.indexOf('torv3') !== -1;
|
||||
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network) || [4, 6].includes(isIP(socket.addr.split(':')[0])) || socket.addr.indexOf('ipv4') !== -1 || socket.addr.indexOf('ipv6') !== -1;;
|
||||
}
|
||||
if (hasOnion && hasClearnet) {
|
||||
clearnetTorNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
// Channels and node historical stats
|
||||
const nodeStats = {};
|
||||
let capacity = 0;
|
||||
let avgFeeRate = 0;
|
||||
let avgBaseFee = 0;
|
||||
const capacities: number[] = [];
|
||||
const feeRates: number[] = [];
|
||||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
const [channelsInDbRaw]: any[] = await DB.query(`SELECT short_id FROM channels`);
|
||||
const channelsInDb = {};
|
||||
for (const channel of channelsInDbRaw) {
|
||||
channelsInDb[channel.short_id] = channel;
|
||||
}
|
||||
|
||||
for (const channel of networkGraph.edges) {
|
||||
const short_id = Common.channelIntegerIdToShortId(channel.channel_id);
|
||||
|
||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||
if (!tx) {
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we don't know about this channel, insert it in db
|
||||
if (isHistorical === true && !channelsInDb[short_id]) {
|
||||
await channelsApi.$saveChannel({
|
||||
channel_id: short_id,
|
||||
chan_point: `${tx.txid}:${short_id.split('x')[2]}`,
|
||||
last_update: channel.last_update,
|
||||
node1_pub: channel.node1_pub,
|
||||
node2_pub: channel.node2_pub,
|
||||
capacity: (tx.value * 100000000).toString(),
|
||||
node1_policy: null,
|
||||
node2_policy: null,
|
||||
}, 0);
|
||||
channelsInDb[channel.channel_id] = channel;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.node1_pub]) {
|
||||
nodeStats[channel.node1_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
if (!nodeStats[channel.node2_pub]) {
|
||||
nodeStats[channel.node2_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
|
||||
if (!alreadyCountedChannels[short_id]) {
|
||||
capacity += Math.round(tx.value * 100000000);
|
||||
capacities.push(Math.round(tx.value * 100000000));
|
||||
alreadyCountedChannels[short_id] = true;
|
||||
|
||||
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node1_pub].channels++;
|
||||
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node2_pub].channels++;
|
||||
}
|
||||
|
||||
if (isHistorical === false) { // Coming from the node
|
||||
for (const policy of [channel.node1_policy, channel.node2_policy]) {
|
||||
if (policy && parseInt(policy.fee_rate_milli_msat, 10) < 5000) {
|
||||
avgFeeRate += parseInt(policy.fee_rate_milli_msat, 10);
|
||||
feeRates.push(parseInt(policy.fee_rate_milli_msat, 10));
|
||||
}
|
||||
if (policy && parseInt(policy.fee_base_msat, 10) < 5000) {
|
||||
avgBaseFee += parseInt(policy.fee_base_msat, 10);
|
||||
baseFees.push(parseInt(policy.fee_base_msat, 10));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// @ts-ignore
|
||||
if (channel.node1_policy.fee_rate_milli_msat < 5000) {
|
||||
// @ts-ignore
|
||||
avgFeeRate += parseInt(channel.node1_policy.fee_rate_milli_msat, 10);
|
||||
// @ts-ignore
|
||||
feeRates.push(parseInt(channel.node1_policy.fee_rate_milli_msat), 10);
|
||||
}
|
||||
// @ts-ignore
|
||||
if (channel.node1_policy.fee_base_msat < 5000) {
|
||||
// @ts-ignore
|
||||
avgBaseFee += parseInt(channel.node1_policy.fee_base_msat, 10);
|
||||
// @ts-ignore
|
||||
baseFees.push(parseInt(channel.node1_policy.fee_base_msat), 10);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let medCapacity = 0;
|
||||
let medFeeRate = 0;
|
||||
let medBaseFee = 0;
|
||||
let avgCapacity = 0;
|
||||
|
||||
avgFeeRate /= Math.max(networkGraph.edges.length, 1);
|
||||
avgBaseFee /= Math.max(networkGraph.edges.length, 1);
|
||||
|
||||
if (capacities.length > 0) {
|
||||
medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
||||
avgCapacity = Math.round(capacity / Math.max(capacities.length, 1));
|
||||
}
|
||||
if (feeRates.length > 0) {
|
||||
medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||
}
|
||||
if (baseFees.length > 0) {
|
||||
medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||
}
|
||||
|
||||
let query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
clearnet_tor_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
channel_count = ?,
|
||||
node_count = ?,
|
||||
total_capacity = ?,
|
||||
tor_nodes = ?,
|
||||
clearnet_nodes = ?,
|
||||
unannounced_nodes = ?,
|
||||
clearnet_tor_nodes = ?,
|
||||
avg_capacity = ?,
|
||||
avg_fee_rate = ?,
|
||||
avg_base_fee_mtokens = ?,
|
||||
med_capacity = ?,
|
||||
med_fee_rate = ?,
|
||||
med_base_fee_mtokens = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
]);
|
||||
|
||||
for (const public_key of Object.keys(nodeStats)) {
|
||||
query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
capacity = ?,
|
||||
channels = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
public_key,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
|
||||
if (!isHistorical) {
|
||||
await DB.query(
|
||||
`UPDATE nodes SET capacity = ?, channels = ? WHERE public_key = ?`,
|
||||
[
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
public_key,
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
added: timestamp,
|
||||
node_count: networkGraph.nodes.length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Import topology files LN historical data into the database
|
||||
*/
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
logger.debug('Run the historical importer');
|
||||
try {
|
||||
let fileList: string[] = [];
|
||||
try {
|
||||
fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||
} catch (e) {
|
||||
logger.err(`Unable to open topology folder at ${this.topologiesFolder}`);
|
||||
throw e;
|
||||
}
|
||||
// Insert history from the most recent to the oldest
|
||||
// This also put the .json cached files first
|
||||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(added) AS added
|
||||
FROM lightning_stats
|
||||
ORDER BY added DESC
|
||||
`);
|
||||
const existingStatsTimestamps = {};
|
||||
for (const row of rows) {
|
||||
existingStatsTimestamps[row.added] = row;
|
||||
}
|
||||
|
||||
// For logging purpose
|
||||
let processed = 10;
|
||||
let totalProcessed = 0;
|
||||
let logStarted = false;
|
||||
|
||||
for (const filename of fileList) {
|
||||
processed++;
|
||||
|
||||
const timestamp = parseInt(filename.split('_')[1], 10);
|
||||
|
||||
// Stats exist already, don't calculate/insert them
|
||||
if (existingStatsTimestamps[timestamp] !== undefined) {
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filename.indexOf('topology_') === -1) {
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
|
||||
let fileContent = '';
|
||||
try {
|
||||
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||
} catch (e: any) {
|
||||
if (e.errno == -1) { // EISDIR - Ignore directorie
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
let graph;
|
||||
try {
|
||||
graph = JSON.parse(fileContent);
|
||||
graph = await this.cleanupTopology(graph);
|
||||
} catch (e) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
totalProcessed++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (this.isIncorrectSnapshot(timestamp, graph)) {
|
||||
logger.debug(`Ignoring ${this.topologiesFolder}/${filename}, because we defined it as an incorrect snapshot`);
|
||||
++totalProcessed;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!logStarted) {
|
||||
logger.info(`Founds a topology file that we did not import. Importing historical lightning stats now.`);
|
||||
logStarted = true;
|
||||
}
|
||||
|
||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||
|
||||
totalProcessed++;
|
||||
|
||||
if (processed > 10) {
|
||||
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
processed = 0;
|
||||
} else {
|
||||
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
}
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
|
||||
const stat = await this.computeNetworkStats(timestamp, graph, true);
|
||||
|
||||
existingStatsTimestamps[timestamp] = stat;
|
||||
}
|
||||
|
||||
if (totalProcessed > 0) {
|
||||
logger.info(`Lightning network stats historical import completed`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Lightning network stats historical failed. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
cleanupTopology(graph): ILightningApi.NetworkGraph {
|
||||
const newGraph = {
|
||||
nodes: <ILightningApi.Node[]>[],
|
||||
edges: <ILightningApi.Channel[]>[],
|
||||
};
|
||||
|
||||
for (const node of graph.nodes) {
|
||||
const addressesParts = (node.addresses ?? '').split(',');
|
||||
const addresses: any[] = [];
|
||||
for (const address of addressesParts) {
|
||||
const formatted = Common.findSocketNetwork(address);
|
||||
addresses.push({
|
||||
network: formatted.network,
|
||||
addr: formatted.url
|
||||
});
|
||||
}
|
||||
|
||||
let rgb = node.rgb_color ?? '#000000';
|
||||
if (rgb.indexOf('#') === -1) {
|
||||
rgb = `#${rgb}`;
|
||||
}
|
||||
newGraph.nodes.push({
|
||||
last_update: node.timestamp ?? 0,
|
||||
pub_key: node.id ?? null,
|
||||
alias: node.alias ?? node.id.slice(0, 20),
|
||||
addresses: addresses,
|
||||
color: rgb,
|
||||
features: {},
|
||||
});
|
||||
}
|
||||
|
||||
for (const adjacency of graph.adjacency) {
|
||||
if (adjacency.length === 0) {
|
||||
continue;
|
||||
} else {
|
||||
for (const edge of adjacency) {
|
||||
newGraph.edges.push({
|
||||
channel_id: edge.scid,
|
||||
chan_point: '',
|
||||
last_update: edge.timestamp,
|
||||
node1_pub: edge.source ?? null,
|
||||
node2_pub: edge.destination ?? null,
|
||||
capacity: '0', // Will be fetch later
|
||||
node1_policy: {
|
||||
time_lock_delta: edge.cltv_expiry_delta,
|
||||
min_htlc: edge.htlc_minimim_msat,
|
||||
fee_base_msat: edge.fee_base_msat,
|
||||
fee_rate_milli_msat: edge.fee_proportional_millionths,
|
||||
max_htlc_msat: edge.htlc_maximum_msat,
|
||||
last_update: edge.timestamp,
|
||||
disabled: false,
|
||||
},
|
||||
node2_policy: null,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newGraph;
|
||||
}
|
||||
|
||||
private isIncorrectSnapshot(timestamp, graph): boolean {
|
||||
if (timestamp >= 1549065600 /* 2019-02-02 */ && timestamp <= 1550620800 /* 2019-02-20 */ && graph.nodes.length < 2600) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1552953600 /* 2019-03-19 */ && timestamp <= 1556323200 /* 2019-05-27 */ && graph.nodes.length < 4000) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1557446400 /* 2019-05-10 */ && timestamp <= 1560470400 /* 2019-06-14 */ && graph.nodes.length < 4000) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1561680000 /* 2019-06-28 */ && timestamp <= 1563148800 /* 2019-07-15 */ && graph.nodes.length < 4000) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1571270400 /* 2019-11-17 */ && timestamp <= 1580601600 /* 2020-02-02 */ && graph.nodes.length < 4500) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1591142400 /* 2020-06-03 */ && timestamp <= 1592006400 /* 2020-06-13 */ && graph.nodes.length < 5500) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1632787200 /* 2021-09-28 */ && timestamp <= 1633564800 /* 2021-10-07 */ && graph.nodes.length < 13000) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1634256000 /* 2021-10-15 */ && timestamp <= 1645401600 /* 2022-02-21 */ && graph.nodes.length < 17000) {
|
||||
return true;
|
||||
}
|
||||
if (timestamp >= 1654992000 /* 2022-06-12 */ && timestamp <= 1661472000 /* 2022-08-26 */ && graph.nodes.length < 14000) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private async $cleanupIncorrectSnapshot(): Promise<void> {
|
||||
// We do not run this one automatically because those stats are not supposed to be inserted in the first
|
||||
// place, but I write them here to remind us we manually run those queries
|
||||
|
||||
// DELETE FROM lightning_stats
|
||||
// WHERE (
|
||||
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 AND node_count < 2600 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 AND node_count < 4000 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 AND node_count < 4000 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 AND node_count < 4000 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 AND node_count < 4500 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 AND node_count < 5500 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 AND node_count < 13000 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 AND node_count < 17000 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000 AND node_count < 14000
|
||||
// )
|
||||
|
||||
// DELETE FROM node_stats
|
||||
// WHERE (
|
||||
// UNIX_TIMESTAMP(added) >= 1549065600 AND UNIX_TIMESTAMP(added) <= 1550620800 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1552953600 AND UNIX_TIMESTAMP(added) <= 1556323200 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1557446400 AND UNIX_TIMESTAMP(added) <= 1560470400 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1561680000 AND UNIX_TIMESTAMP(added) <= 1563148800 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1571270400 AND UNIX_TIMESTAMP(added) <= 1580601600 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1591142400 AND UNIX_TIMESTAMP(added) <= 1592006400 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1632787200 AND UNIX_TIMESTAMP(added) <= 1633564800 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1634256000 AND UNIX_TIMESTAMP(added) <= 1645401600 OR
|
||||
// UNIX_TIMESTAMP(added) >= 1654992000 AND UNIX_TIMESTAMP(added) <= 1661472000
|
||||
// )
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsImporter;
|
||||
@@ -12,14 +12,11 @@ import * as https from 'https';
|
||||
*/
|
||||
class PoolsUpdater {
|
||||
lastRun: number = 0;
|
||||
currentSha: any = undefined;
|
||||
poolsUrl: string = 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json';
|
||||
treeUrl: string = 'https://api.github.com/repos/mempool/mining-pools/git/trees/master';
|
||||
currentSha: string | undefined = undefined;
|
||||
poolsUrl: string = config.MEMPOOL.POOLS_JSON_URL;
|
||||
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async updatePoolsJson() {
|
||||
public async updatePoolsJson(): Promise<void> {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return;
|
||||
}
|
||||
@@ -77,7 +74,7 @@ class PoolsUpdater {
|
||||
/**
|
||||
* Fetch our latest pools.json sha from the db
|
||||
*/
|
||||
private async updateDBSha(githubSha: string) {
|
||||
private async updateDBSha(githubSha: string): Promise<void> {
|
||||
this.currentSha = githubSha;
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
try {
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class FtxApi implements PriceFeed {
|
||||
public name: string = 'FTX';
|
||||
public currencies: string[] = ['USD', 'BRZ', 'EUR', 'JPY', 'AUD'];
|
||||
|
||||
public url: string = 'https://ftx.com/api/markets/BTC/';
|
||||
public urlHist: string = 'https://ftx.com/api/markets/BTC/{CURRENCY}/candles?resolution={GRANULARITY}';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['result']['last'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response['result'] : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
const time = Math.round(price['time'] / 1000);
|
||||
if (priceHistory[time] === undefined) {
|
||||
priceHistory[time] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[time][currency] = price['close'];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
}
|
||||
|
||||
export default FtxApi;
|
||||
@@ -1,11 +1,11 @@
|
||||
import * as fs from 'fs';
|
||||
import path from "path";
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import BitfinexApi from './price-feeds/bitfinex-api';
|
||||
import BitflyerApi from './price-feeds/bitflyer-api';
|
||||
import CoinbaseApi from './price-feeds/coinbase-api';
|
||||
import FtxApi from './price-feeds/ftx-api';
|
||||
import GeminiApi from './price-feeds/gemini-api';
|
||||
import KrakenApi from './price-feeds/kraken-api';
|
||||
|
||||
@@ -34,10 +34,10 @@ export interface Prices {
|
||||
}
|
||||
|
||||
class PriceUpdater {
|
||||
historyInserted: boolean = false;
|
||||
lastRun: number = 0;
|
||||
lastHistoricalRun: number = 0;
|
||||
running: boolean = false;
|
||||
public historyInserted = false;
|
||||
lastRun = 0;
|
||||
lastHistoricalRun = 0;
|
||||
running = false;
|
||||
feeds: PriceFeed[] = [];
|
||||
currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
|
||||
latestPrices: Prices;
|
||||
@@ -46,7 +46,6 @@ class PriceUpdater {
|
||||
this.latestPrices = this.getEmptyPricesObj();
|
||||
|
||||
this.feeds.push(new BitflyerApi()); // Does not have historical endpoint
|
||||
this.feeds.push(new FtxApi());
|
||||
this.feeds.push(new KrakenApi());
|
||||
this.feeds.push(new CoinbaseApi());
|
||||
this.feeds.push(new BitfinexApi());
|
||||
@@ -158,7 +157,7 @@ class PriceUpdater {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
// Insert MtGox weekly prices
|
||||
const pricesJson: any[] = JSON.parse(fs.readFileSync('./src/tasks/price-feeds/mtgox-weekly.json').toString());
|
||||
const pricesJson: any[] = JSON.parse(fs.readFileSync(path.join(__dirname, 'mtgox-weekly.json')).toString());
|
||||
const prices = this.getEmptyPricesObj();
|
||||
let insertedCount: number = 0;
|
||||
for (const price of pricesJson) {
|
||||
|
||||
119
backend/src/utils/ipcheck.js
Normal file
119
backend/src/utils/ipcheck.js
Normal file
@@ -0,0 +1,119 @@
|
||||
var net = require('net');
|
||||
|
||||
var IPCheck = module.exports = function(input) {
|
||||
var self = this;
|
||||
|
||||
if (!(self instanceof IPCheck)) {
|
||||
return new IPCheck(input);
|
||||
}
|
||||
|
||||
self.input = input;
|
||||
self.parse();
|
||||
};
|
||||
|
||||
IPCheck.prototype.parse = function() {
|
||||
var self = this;
|
||||
|
||||
if (!self.input || typeof self.input !== 'string') return self.valid = false;
|
||||
|
||||
var ip;
|
||||
|
||||
var pos = self.input.lastIndexOf('/');
|
||||
if (pos !== -1) {
|
||||
ip = self.input.substring(0, pos);
|
||||
self.mask = +self.input.substring(pos + 1);
|
||||
} else {
|
||||
ip = self.input;
|
||||
self.mask = null;
|
||||
}
|
||||
|
||||
self.ipv = net.isIP(ip);
|
||||
self.valid = !!self.ipv && !isNaN(self.mask);
|
||||
|
||||
if (!self.valid) return;
|
||||
|
||||
// default mask = 32 for ipv4 and 128 for ipv6
|
||||
if (self.mask === null) self.mask = self.ipv === 4 ? 32 : 128;
|
||||
|
||||
if (self.ipv === 4) {
|
||||
// difference between ipv4 and ipv6 masks
|
||||
self.mask += 96;
|
||||
}
|
||||
|
||||
if (self.mask < 0 || self.mask > 128) {
|
||||
self.valid = false;
|
||||
return;
|
||||
}
|
||||
|
||||
self.address = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ];
|
||||
|
||||
if(self.ipv === 4){
|
||||
self.parseIPv4(ip);
|
||||
}else{
|
||||
self.parseIPv6(ip);
|
||||
}
|
||||
};
|
||||
|
||||
IPCheck.prototype.parseIPv4 = function(ip) {
|
||||
var self = this;
|
||||
|
||||
// ipv4 addresses live under ::ffff:0:0
|
||||
self.address[10] = self.address[11] = 0xff;
|
||||
|
||||
var octets = ip.split('.');
|
||||
for (var i = 0; i < 4; i++) {
|
||||
self.address[i + 12] = parseInt(octets[i], 10);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
var V6_TRANSITIONAL = /:(\d+\.\d+\.\d+\.\d+)$/;
|
||||
|
||||
IPCheck.prototype.parseIPv6 = function(ip) {
|
||||
var self = this;
|
||||
|
||||
var transitionalMatch = V6_TRANSITIONAL.exec(ip);
|
||||
if(transitionalMatch){
|
||||
self.parseIPv4(transitionalMatch[1]);
|
||||
return;
|
||||
}
|
||||
|
||||
var bits = ip.split(':');
|
||||
if (bits.length < 8) {
|
||||
ip = ip.replace('::', Array(11 - bits.length).join(':'));
|
||||
bits = ip.split(':');
|
||||
}
|
||||
|
||||
var j = 0;
|
||||
for (var i = 0; i < bits.length; i += 1) {
|
||||
var x = bits[i] ? parseInt(bits[i], 16) : 0;
|
||||
self.address[j++] = x >> 8;
|
||||
self.address[j++] = x & 0xff;
|
||||
}
|
||||
};
|
||||
|
||||
IPCheck.prototype.match = function(cidr) {
|
||||
var self = this;
|
||||
|
||||
if (!(cidr instanceof IPCheck)) cidr = new IPCheck(cidr);
|
||||
if (!self.valid || !cidr.valid) return false;
|
||||
|
||||
var mask = cidr.mask;
|
||||
var i = 0;
|
||||
|
||||
while (mask >= 8) {
|
||||
if (self.address[i] !== cidr.address[i]) return false;
|
||||
|
||||
i++;
|
||||
mask -= 8;
|
||||
}
|
||||
|
||||
var shift = 8 - mask;
|
||||
return (self.address[i] >>> shift) === (cidr.address[i] >>> shift);
|
||||
};
|
||||
|
||||
|
||||
IPCheck.match = function(ip, cidr) {
|
||||
ip = ip instanceof IPCheck ? ip : new IPCheck(ip);
|
||||
return ip.match(cidr);
|
||||
};
|
||||
174
backend/src/utils/pairing-heap.ts
Normal file
174
backend/src/utils/pairing-heap.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
export type HeapNode<T> = {
|
||||
element: T
|
||||
child?: HeapNode<T>
|
||||
next?: HeapNode<T>
|
||||
prev?: HeapNode<T>
|
||||
} | null | undefined;
|
||||
|
||||
// minimal pairing heap priority queue implementation
|
||||
export class PairingHeap<T> {
|
||||
private root: HeapNode<T> = null;
|
||||
private comparator: (a: T, b: T) => boolean;
|
||||
|
||||
// comparator function should return 'true' if a is higher priority than b
|
||||
constructor(comparator: (a: T, b: T) => boolean) {
|
||||
this.comparator = comparator;
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return !this.root;
|
||||
}
|
||||
|
||||
add(element: T): HeapNode<T> {
|
||||
const node: HeapNode<T> = {
|
||||
element
|
||||
};
|
||||
|
||||
this.root = this.meld(this.root, node);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
// returns the top priority element without modifying the queue
|
||||
peek(): T | void {
|
||||
return this.root?.element;
|
||||
}
|
||||
|
||||
// removes and returns the top priority element
|
||||
pop(): T | void {
|
||||
let element;
|
||||
if (this.root) {
|
||||
const node = this.root;
|
||||
element = node.element;
|
||||
this.root = this.mergePairs(node.child);
|
||||
}
|
||||
return element;
|
||||
}
|
||||
|
||||
deleteNode(node: HeapNode<T>): void {
|
||||
if (!node) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (node === this.root) {
|
||||
this.root = this.mergePairs(node.child);
|
||||
}
|
||||
else {
|
||||
if (node.prev) {
|
||||
if (node.prev.child === node) {
|
||||
node.prev.child = node.next;
|
||||
}
|
||||
else {
|
||||
node.prev.next = node.next;
|
||||
}
|
||||
}
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
}
|
||||
this.root = this.meld(this.root, this.mergePairs(node.child));
|
||||
}
|
||||
|
||||
node.child = null;
|
||||
node.prev = null;
|
||||
node.next = null;
|
||||
}
|
||||
|
||||
// fix the heap after increasing the priority of a given node
|
||||
increasePriority(node: HeapNode<T>): void {
|
||||
// already the top priority element
|
||||
if (!node || node === this.root) {
|
||||
return;
|
||||
}
|
||||
// extract from siblings
|
||||
if (node.prev) {
|
||||
if (node.prev?.child === node) {
|
||||
if (this.comparator(node.prev.element, node.element)) {
|
||||
// already in a valid position
|
||||
return;
|
||||
}
|
||||
node.prev.child = node.next;
|
||||
}
|
||||
else {
|
||||
node.prev.next = node.next;
|
||||
}
|
||||
}
|
||||
if (node.next) {
|
||||
node.next.prev = node.prev;
|
||||
}
|
||||
|
||||
this.root = this.meld(this.root, node);
|
||||
}
|
||||
|
||||
decreasePriority(node: HeapNode<T>): void {
|
||||
this.deleteNode(node);
|
||||
this.root = this.meld(this.root, node);
|
||||
}
|
||||
|
||||
meld(a: HeapNode<T>, b: HeapNode<T>): HeapNode<T> {
|
||||
if (!a) {
|
||||
return b;
|
||||
}
|
||||
if (!b || a === b) {
|
||||
return a;
|
||||
}
|
||||
|
||||
let parent: HeapNode<T> = b;
|
||||
let child: HeapNode<T> = a;
|
||||
if (this.comparator(a.element, b.element)) {
|
||||
parent = a;
|
||||
child = b;
|
||||
}
|
||||
|
||||
child.next = parent.child;
|
||||
if (parent.child) {
|
||||
parent.child.prev = child;
|
||||
}
|
||||
child.prev = parent;
|
||||
parent.child = child;
|
||||
|
||||
parent.next = null;
|
||||
parent.prev = null;
|
||||
|
||||
return parent;
|
||||
}
|
||||
|
||||
mergePairs(node: HeapNode<T>): HeapNode<T> {
|
||||
if (!node) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let current: HeapNode<T> = node;
|
||||
let next: HeapNode<T>;
|
||||
let nextCurrent: HeapNode<T>;
|
||||
let pairs: HeapNode<T>;
|
||||
let melded: HeapNode<T>;
|
||||
while (current) {
|
||||
next = current.next;
|
||||
if (next) {
|
||||
nextCurrent = next.next;
|
||||
melded = this.meld(current, next);
|
||||
if (melded) {
|
||||
melded.prev = pairs;
|
||||
}
|
||||
pairs = melded;
|
||||
}
|
||||
else {
|
||||
nextCurrent = null;
|
||||
current.prev = pairs;
|
||||
pairs = current;
|
||||
break;
|
||||
}
|
||||
current = nextCurrent;
|
||||
}
|
||||
|
||||
melded = null;
|
||||
let prev: HeapNode<T>;
|
||||
while (pairs) {
|
||||
prev = pairs.prev;
|
||||
melded = this.meld(melded, pairs);
|
||||
pairs = prev;
|
||||
}
|
||||
|
||||
return melded;
|
||||
}
|
||||
}
|
||||
5
backend/testSetup.ts
Normal file
5
backend/testSetup.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
jest.mock('./mempool-config.json', () => ({}), { virtual: true });
|
||||
jest.mock('./src/logger.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/rbf-cache.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/mempool.ts', () => ({}), { virtual: true });
|
||||
jest.mock('./src/api/memory-cache.ts', () => ({}), { virtual: true });
|
||||
8
backend/tsconfig.build.json
Normal file
8
backend/tsconfig.build.json
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"extends": "./tsconfig",
|
||||
"exclude": ["**/*.test.*", "**/__mocks__/*", "**/__tests__/*"],
|
||||
"compilerOptions": {
|
||||
"types": ["node"]
|
||||
},
|
||||
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"types": ["node"],
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"types": ["node", "jest"],
|
||||
"lib": ["es2019", "dom"],
|
||||
"strict": true,
|
||||
"noImplicitAny": false,
|
||||
@@ -13,7 +13,8 @@
|
||||
"node_modules/@types"
|
||||
],
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true
|
||||
"esModuleInterop": true,
|
||||
"allowJs": true,
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
@@ -21,4 +22,4 @@
|
||||
"exclude": [
|
||||
"dist/**"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
3
contributors/WesVleuten.txt
Normal file
3
contributors/WesVleuten.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of September 1, 2022.
|
||||
|
||||
Signed: WesVleuten
|
||||
3
contributors/junderw.txt
Normal file
3
contributors/junderw.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of August 19, 2022.
|
||||
|
||||
Signed: junderw
|
||||
@@ -89,6 +89,7 @@ Below we list all settings from `mempool-config.json` and the corresponding over
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "mainnet",
|
||||
"BACKEND": "electrum",
|
||||
"ENABLED": true,
|
||||
"HTTP_PORT": 8999,
|
||||
"SPAWN_CLUSTER_PROCS": 0,
|
||||
"API_URL_PREFIX": "/api/v1/",
|
||||
@@ -102,7 +103,9 @@ Below we list all settings from `mempool-config.json` and the corresponding over
|
||||
"PRICE_FEED_UPDATE_INTERVAL": 600,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": false,
|
||||
"EXTERNAL_ASSETS": ["https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json"],
|
||||
"STDOUT_LOG_MIN_PRIORITY": "info"
|
||||
"STDOUT_LOG_MIN_PRIORITY": "info",
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
|
||||
},
|
||||
```
|
||||
|
||||
@@ -126,6 +129,8 @@ Corresponding `docker-compose.yml` overrides:
|
||||
MEMPOOL_USE_SECOND_NODE_FOR_MINFEE: ""
|
||||
MEMPOOL_EXTERNAL_ASSETS: ""
|
||||
MEMPOOL_STDOUT_LOG_MIN_PRIORITY: ""
|
||||
MEMPOOL_POOLS_JSON_URL: ""
|
||||
MEMPOOL_POOLS_JSON_TREE_URL: ""
|
||||
...
|
||||
```
|
||||
|
||||
@@ -346,3 +351,68 @@ Corresponding `docker-compose.yml` overrides:
|
||||
PRICE_DATA_SERVER_CLEARNET_URL: ""
|
||||
...
|
||||
```
|
||||
|
||||
<br/>
|
||||
|
||||
`mempool-config.json`:
|
||||
```
|
||||
"LIGHTNING": {
|
||||
"ENABLED": false
|
||||
"BACKEND": "lnd"
|
||||
"TOPOLOGY_FOLDER": ""
|
||||
"STATS_REFRESH_INTERVAL": 600
|
||||
"GRAPH_REFRESH_INTERVAL": 600
|
||||
"LOGGER_UPDATE_INTERVAL": 30
|
||||
}
|
||||
```
|
||||
|
||||
Corresponding `docker-compose.yml` overrides:
|
||||
```
|
||||
api:
|
||||
environment:
|
||||
LIGHTNING_ENABLED: false
|
||||
LIGHTNING_BACKEND: "lnd"
|
||||
LIGHTNING_TOPOLOGY_FOLDER: ""
|
||||
LIGHTNING_STATS_REFRESH_INTERVAL: 600
|
||||
LIGHTNING_GRAPH_REFRESH_INTERVAL: 600
|
||||
LIGHTNING_LOGGER_UPDATE_INTERVAL: 30
|
||||
...
|
||||
```
|
||||
|
||||
<br/>
|
||||
|
||||
`mempool-config.json`:
|
||||
```
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": ""
|
||||
"MACAROON_PATH": ""
|
||||
"REST_API_URL": "https://localhost:8080"
|
||||
}
|
||||
```
|
||||
|
||||
Corresponding `docker-compose.yml` overrides:
|
||||
```
|
||||
api:
|
||||
environment:
|
||||
LND_TLS_CERT_PATH: ""
|
||||
LND_MACAROON_PATH: ""
|
||||
LND_REST_API_URL: "https://localhost:8080"
|
||||
...
|
||||
```
|
||||
|
||||
<br/>
|
||||
|
||||
`mempool-config.json`:
|
||||
```
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": ""
|
||||
}
|
||||
```
|
||||
|
||||
Corresponding `docker-compose.yml` overrides:
|
||||
```
|
||||
api:
|
||||
environment:
|
||||
CLIGHTNING_SOCKET: ""
|
||||
...
|
||||
```
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
FROM node:16.16.0-buster-slim AS builder
|
||||
|
||||
ARG commitHash
|
||||
ENV DOCKER_COMMIT_HASH=${commitHash}
|
||||
ENV MEMPOOL_COMMIT_HASH=${commitHash}
|
||||
|
||||
WORKDIR /build
|
||||
COPY . .
|
||||
@@ -9,18 +9,15 @@ COPY . .
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential python3 pkg-config
|
||||
RUN npm install --omit=dev --omit=optional
|
||||
RUN npm run build
|
||||
RUN npm run package
|
||||
|
||||
FROM node:16.16.0-buster-slim
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
COPY --from=builder /build/ .
|
||||
|
||||
RUN chmod +x /backend/start.sh
|
||||
RUN chmod +x /backend/wait-for-it.sh
|
||||
|
||||
RUN chown -R 1000:1000 /backend && chmod -R 755 /backend
|
||||
RUN chown 1000:1000 ./
|
||||
COPY --from=builder --chown=1000:1000 /build/package ./package/
|
||||
COPY --from=builder --chown=1000:1000 /build/mempool-config.json /build/start.sh /build/wait-for-it.sh ./
|
||||
|
||||
USER 1000
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"MEMPOOL": {
|
||||
"NETWORK": "__MEMPOOL_NETWORK__",
|
||||
"BACKEND": "__MEMPOOL_BACKEND__",
|
||||
"ENABLED": __MEMPOOL_ENABLED__,
|
||||
"HTTP_PORT": __MEMPOOL_HTTP_PORT__,
|
||||
"SPAWN_CLUSTER_PROCS": __MEMPOOL_SPAWN_CLUSTER_PROCS__,
|
||||
"API_URL_PREFIX": "__MEMPOOL_API_URL_PREFIX__",
|
||||
@@ -67,6 +68,22 @@
|
||||
"ENABLED": __BISQ_ENABLED__,
|
||||
"DATA_PATH": "__BISQ_DATA_PATH__"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": __LIGHTNING_ENABLED__,
|
||||
"BACKEND": "__LIGHTNING_BACKEND__",
|
||||
"STATS_REFRESH_INTERVAL": __LIGHTNING_STATS_REFRESH_INTERVAL__,
|
||||
"GRAPH_REFRESH_INTERVAL": __LIGHTNING_GRAPH_REFRESH_INTERVAL__,
|
||||
"LOGGER_UPDATE_INTERVAL": __LIGHTNING_LOGGER_UPDATE_INTERVAL__,
|
||||
"TOPOLOGY_FOLDER": "__LIGHTNING_TOPOLOGY_FOLDER__"
|
||||
},
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": "__LND_TLS_CERT_PATH__",
|
||||
"MACAROON_PATH": "__LND_MACAROON_PATH__",
|
||||
"REST_API_URL": "__LND_REST_API_URL__"
|
||||
},
|
||||
"CLIGHTNING": {
|
||||
"SOCKET": "__CLIGHTNING_SOCKET__"
|
||||
},
|
||||
"SOCKS5PROXY": {
|
||||
"ENABLED": __SOCKS5PROXY_ENABLED__,
|
||||
"USE_ONION": __SOCKS5PROXY_USE_ONION__,
|
||||
|
||||
40
docker/backend/start.sh
Normal file → Executable file
40
docker/backend/start.sh
Normal file → Executable file
@@ -3,6 +3,7 @@
|
||||
# MEMPOOL
|
||||
__MEMPOOL_NETWORK__=${MEMPOOL_NETWORK:=mainnet}
|
||||
__MEMPOOL_BACKEND__=${MEMPOOL_BACKEND:=electrum}
|
||||
__MEMPOOL_ENABLED__=${MEMPOOL_ENABLED:=true}
|
||||
__MEMPOOL_HTTP_PORT__=${BACKEND_HTTP_PORT:=8999}
|
||||
__MEMPOOL_SPAWN_CLUSTER_PROCS__=${MEMPOOL_SPAWN_CLUSTER_PROCS:=0}
|
||||
__MEMPOOL_API_URL_PREFIX__=${MEMPOOL_API_URL_PREFIX:=/api/v1/}
|
||||
@@ -24,6 +25,8 @@ __MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
|
||||
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
|
||||
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
|
||||
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
|
||||
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json}
|
||||
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=https://api.github.com/repos/mempool/mining-pools/git/trees/master}
|
||||
|
||||
# CORE_RPC
|
||||
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
|
||||
@@ -89,10 +92,27 @@ __EXTERNAL_DATA_SERVER_LIQUID_ONION__=${EXTERNAL_DATA_SERVER_LIQUID_ONION:=http:
|
||||
__EXTERNAL_DATA_SERVER_BISQ_URL__=${EXTERNAL_DATA_SERVER_BISQ_URL:=https://bisq.markets/api}
|
||||
__EXTERNAL_DATA_SERVER_BISQ_ONION__=${EXTERNAL_DATA_SERVER_BISQ_ONION:=http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api}
|
||||
|
||||
# LIGHTNING
|
||||
__LIGHTNING_ENABLED__=${LIGHTNING_ENABLED:=false}
|
||||
__LIGHTNING_BACKEND__=${LIGHTNING_BACKEND:="lnd"}
|
||||
__LIGHTNING_TOPOLOGY_FOLDER__=${LIGHTNING_TOPOLOGY_FOLDER:=""}
|
||||
__LIGHTNING_STATS_REFRESH_INTERVAL__=${LIGHTNING_STATS_REFRESH_INTERVAL:=600}
|
||||
__LIGHTNING_GRAPH_REFRESH_INTERVAL__=${LIGHTNING_GRAPH_REFRESH_INTERVAL:=600}
|
||||
__LIGHTNING_LOGGER_UPDATE_INTERVAL__=${LIGHTNING_LOGGER_UPDATE_INTERVAL:=30}
|
||||
|
||||
# LND
|
||||
__LND_TLS_CERT_PATH__=${LND_TLS_CERT_PATH:=""}
|
||||
__LND_MACAROON_PATH__=${LND_MACAROON_PATH:=""}
|
||||
__LND_REST_API_URL__=${LND_REST_API_URL:="https://localhost:8080"}
|
||||
|
||||
# CLN
|
||||
__CLIGHTNING_SOCKET__=${CLIGHTNING_SOCKET:=""}
|
||||
|
||||
mkdir -p "${__MEMPOOL_CACHE_DIR__}"
|
||||
|
||||
sed -i "s/__MEMPOOL_NETWORK__/${__MEMPOOL_NETWORK__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_BACKEND__/${__MEMPOOL_BACKEND__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_ENABLED__/${__MEMPOOL_ENABLED__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_HTTP_PORT__/${__MEMPOOL_HTTP_PORT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_SPAWN_CLUSTER_PROCS__/${__MEMPOOL_SPAWN_CLUSTER_PROCS__}/g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_API_URL_PREFIX__!${__MEMPOOL_API_URL_PREFIX__}!g" mempool-config.json
|
||||
@@ -114,6 +134,8 @@ sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.jso
|
||||
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_POOLS_JSON_URL__!${__MEMPOOL_POOLS_JSON_URL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_POOLS_JSON_TREE_URL__!${__MEMPOOL_POOLS_JSON_TREE_URL__}!g" mempool-config.json
|
||||
|
||||
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
|
||||
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
|
||||
@@ -169,4 +191,20 @@ sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_ONION__!${__EXTERNAL_DATA_SERVER_LIQUID_
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_URL__!${__EXTERNAL_DATA_SERVER_BISQ_URL__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_ONION__!${__EXTERNAL_DATA_SERVER_BISQ_ONION__}!g" mempool-config.json
|
||||
|
||||
node /backend/dist/index.js
|
||||
# LIGHTNING
|
||||
sed -i "s!__LIGHTNING_ENABLED__!${__LIGHTNING_ENABLED__}!g" mempool-config.json
|
||||
sed -i "s!__LIGHTNING_BACKEND__!${__LIGHTNING_BACKEND__}!g" mempool-config.json
|
||||
sed -i "s!__LIGHTNING_TOPOLOGY_FOLDER__!${__LIGHTNING_TOPOLOGY_FOLDER__}!g" mempool-config.json
|
||||
sed -i "s!__LIGHTNING_STATS_REFRESH_INTERVAL__!${__LIGHTNING_STATS_REFRESH_INTERVAL__}!g" mempool-config.json
|
||||
sed -i "s!__LIGHTNING_GRAPH_REFRESH_INTERVAL__!${__LIGHTNING_GRAPH_REFRESH_INTERVAL__}!g" mempool-config.json
|
||||
sed -i "s!__LIGHTNING_LOGGER_UPDATE_INTERVAL__!${__LIGHTNING_LOGGER_UPDATE_INTERVAL__}!g" mempool-config.json
|
||||
|
||||
# LND
|
||||
sed -i "s!__LND_TLS_CERT_PATH__!${__LND_TLS_CERT_PATH__}!g" mempool-config.json
|
||||
sed -i "s!__LND_MACAROON_PATH__!${__LND_MACAROON_PATH__}!g" mempool-config.json
|
||||
sed -i "s!__LND_REST_API_URL__!${__LND_REST_API_URL__}!g" mempool-config.json
|
||||
|
||||
# CLN
|
||||
sed -i "s!__CLIGHTNING_SOCKET__!${__CLIGHTNING_SOCKET__}!g" mempool-config.json
|
||||
|
||||
node /backend/package/index.js
|
||||
|
||||
0
docker/backend/wait-for-it.sh
Normal file → Executable file
0
docker/backend/wait-for-it.sh
Normal file → Executable file
@@ -8,7 +8,9 @@ WORKDIR /build
|
||||
COPY . .
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential rsync
|
||||
RUN cp mempool-frontend-config.sample.json mempool-frontend-config.json
|
||||
RUN npm install --omit=dev --omit=optional
|
||||
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:1.17.8-alpine
|
||||
@@ -28,7 +30,9 @@ RUN chown -R 1000:1000 /patch && chmod -R 755 /patch && \
|
||||
chown -R 1000:1000 /var/cache/nginx && \
|
||||
chown -R 1000:1000 /var/log/nginx && \
|
||||
chown -R 1000:1000 /etc/nginx/nginx.conf && \
|
||||
chown -R 1000:1000 /etc/nginx/conf.d
|
||||
chown -R 1000:1000 /etc/nginx/conf.d && \
|
||||
chown -R 1000:1000 /var/www/mempool
|
||||
|
||||
RUN touch /var/run/nginx.pid && \
|
||||
chown -R 1000:1000 /var/run/nginx.pid
|
||||
|
||||
|
||||
@@ -10,4 +10,51 @@ cp /etc/nginx/nginx.conf /patch/nginx.conf
|
||||
sed -i "s/__MEMPOOL_FRONTEND_HTTP_PORT__/${__MEMPOOL_FRONTEND_HTTP_PORT__}/g" /patch/nginx.conf
|
||||
cat /patch/nginx.conf > /etc/nginx/nginx.conf
|
||||
|
||||
# Runtime overrides - read env vars defined in docker compose
|
||||
|
||||
__TESTNET_ENABLED__=${TESTNET_ENABLED:=false}
|
||||
__SIGNET_ENABLED__=${SIGNET_ENABLED:=false}
|
||||
__LIQUID_ENABLED__=${LIQUID_EANBLED:=false}
|
||||
__LIQUID_TESTNET_ENABLED__=${LIQUID_TESTNET_ENABLED:=false}
|
||||
__BISQ_ENABLED__=${BISQ_ENABLED:=false}
|
||||
__BISQ_SEPARATE_BACKEND__=${BISQ_SEPARATE_BACKEND:=false}
|
||||
__ITEMS_PER_PAGE__=${ITEMS_PER_PAGE:=10}
|
||||
__KEEP_BLOCKS_AMOUNT__=${KEEP_BLOCKS_AMOUNT:=8}
|
||||
__NGINX_PROTOCOL__=${NGINX_PROTOCOL:=http}
|
||||
__NGINX_HOSTNAME__=${NGINX_HOSTNAME:=localhost}
|
||||
__NGINX_PORT__=${NGINX_PORT:=8999}
|
||||
__BLOCK_WEIGHT_UNITS__=${BLOCK_WEIGHT_UNITS:=4000000}
|
||||
__MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_BLOCKS_AMOUNT:=8}
|
||||
__BASE_MODULE__=${BASE_MODULE:=mempool}
|
||||
__MEMPOOL_WEBSITE_URL__=${MEMPOOL_WEBSITE_URL:=https://mempool.space}
|
||||
__LIQUID_WEBSITE_URL__=${LIQUID_WEBSITE_URL:=https://liquid.network}
|
||||
__BISQ_WEBSITE_URL__=${BISQ_WEBSITE_URL:=https://bisq.markets}
|
||||
__MINING_DASHBOARD__=${MINING_DASHBOARD:=true}
|
||||
__LIGHTNING__=${LIGHTNING:=false}
|
||||
|
||||
# Export as environment variables to be used by envsubst
|
||||
export __TESTNET_ENABLED__
|
||||
export __SIGNET_ENABLED__
|
||||
export __LIQUID_ENABLED__
|
||||
export __LIQUID_TESTNET_ENABLED__
|
||||
export __BISQ_ENABLED__
|
||||
export __BISQ_SEPARATE_BACKEND__
|
||||
export __ITEMS_PER_PAGE__
|
||||
export __KEEP_BLOCKS_AMOUNT__
|
||||
export __NGINX_PROTOCOL__
|
||||
export __NGINX_HOSTNAME__
|
||||
export __NGINX_PORT__
|
||||
export __BLOCK_WEIGHT_UNITS__
|
||||
export __MEMPOOL_BLOCKS_AMOUNT__
|
||||
export __BASE_MODULE__
|
||||
export __MEMPOOL_WEBSITE_URL__
|
||||
export __LIQUID_WEBSITE_URL__
|
||||
export __BISQ_WEBSITE_URL__
|
||||
export __MINING_DASHBOARD__
|
||||
export __LIGHTNING__
|
||||
|
||||
folder=$(find /var/www/mempool -name "config.js" | xargs dirname)
|
||||
echo ${folder}
|
||||
envsubst < ${folder}/config.template.js > ${folder}/config.js
|
||||
|
||||
exec "$@"
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
#!/bin/sh
|
||||
|
||||
#backend
|
||||
gitMaster="\.\.\/\.git\/refs\/heads\/master"
|
||||
git ls-remote https://github.com/mempool/mempool.git "$1^{}" | awk '{ print $1}' > ./backend/master
|
||||
cp ./docker/backend/* ./backend/
|
||||
sed -i "s/${gitMaster}/master/g" ./backend/src/api/backend-info.ts
|
||||
|
||||
#frontend
|
||||
localhostIP="127.0.0.1"
|
||||
|
||||
@@ -32,6 +32,7 @@
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
"semi": 1,
|
||||
"eqeqeq": 1
|
||||
}
|
||||
}
|
||||
|
||||
@@ -113,7 +113,7 @@ https://www.transifex.com/mempool/mempool/dashboard/
|
||||
* French @Bayernatoor
|
||||
* Korean @kcalvinalvinn
|
||||
* Italian @HodlBits
|
||||
* Hebrew @Sh0ham
|
||||
* Hebrew @rapidlab309
|
||||
* Georgian @wyd_idk
|
||||
* Hungarian @btcdragonlord
|
||||
* Dutch @m__btc
|
||||
|
||||
@@ -152,15 +152,14 @@
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/resources",
|
||||
"src/robots.txt"
|
||||
"src/robots.txt",
|
||||
"src/config.js",
|
||||
"src/config.template.js"
|
||||
],
|
||||
"styles": [
|
||||
"src/styles.scss",
|
||||
"node_modules/@fortawesome/fontawesome-svg-core/styles.css"
|
||||
],
|
||||
"scripts": [
|
||||
"generated-config.js"
|
||||
],
|
||||
"vendorChunk": true,
|
||||
"extractLicenses": false,
|
||||
"buildOptimizer": false,
|
||||
@@ -170,6 +169,10 @@
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/robots.txt"
|
||||
],
|
||||
"fileReplacements": [
|
||||
{
|
||||
"replace": "src/environments/environment.ts",
|
||||
@@ -218,6 +221,10 @@
|
||||
"proxyConfig": "proxy.conf.local.js",
|
||||
"verbose": true
|
||||
},
|
||||
"local-esplora": {
|
||||
"proxyConfig": "proxy.conf.local-esplora.js",
|
||||
"verbose": true
|
||||
},
|
||||
"mixed": {
|
||||
"proxyConfig": "proxy.conf.mixed.js",
|
||||
"verbose": true
|
||||
@@ -261,57 +268,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"server": {
|
||||
"builder": "@angular-devkit/build-angular:server",
|
||||
"options": {
|
||||
"outputPath": "dist/mempool/server",
|
||||
"main": "server.ts",
|
||||
"tsConfig": "tsconfig.server.json",
|
||||
"sourceMap": true,
|
||||
"optimization": false
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"outputHashing": "media",
|
||||
"fileReplacements": [
|
||||
{
|
||||
"replace": "src/environments/environment.ts",
|
||||
"with": "src/environments/environment.prod.ts"
|
||||
}
|
||||
],
|
||||
"sourceMap": false,
|
||||
"localize": true,
|
||||
"optimization": true
|
||||
}
|
||||
},
|
||||
"defaultConfiguration": ""
|
||||
},
|
||||
"serve-ssr": {
|
||||
"builder": "@nguniversal/builders:ssr-dev-server",
|
||||
"options": {
|
||||
"browserTarget": "mempool:build",
|
||||
"serverTarget": "mempool:server"
|
||||
},
|
||||
"configurations": {
|
||||
"production": {
|
||||
"browserTarget": "mempool:build:production",
|
||||
"serverTarget": "mempool:server:production"
|
||||
}
|
||||
}
|
||||
},
|
||||
"prerender": {
|
||||
"builder": "@nguniversal/builders:prerender",
|
||||
"options": {
|
||||
"browserTarget": "mempool:build:production",
|
||||
"serverTarget": "mempool:server:production",
|
||||
"routes": [
|
||||
"/"
|
||||
]
|
||||
},
|
||||
"configurations": {
|
||||
"production": {}
|
||||
}
|
||||
},
|
||||
"cypress-run": {
|
||||
"builder": "@cypress/schematic:cypress",
|
||||
"options": {
|
||||
@@ -332,6 +288,5 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultProject": "mempool"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ var fs = require('fs');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
const CONFIG_FILE_NAME = 'mempool-frontend-config.json';
|
||||
const GENERATED_CONFIG_FILE_NAME = 'generated-config.js';
|
||||
const GENERATED_CONFIG_FILE_NAME = 'src/resources/config.js';
|
||||
const GENERATED_TEMPLATE_CONFIG_FILE_NAME = 'src/resources/config.template.js';
|
||||
|
||||
let settings = [];
|
||||
let configContent = {};
|
||||
@@ -67,10 +68,17 @@ if (process.env.DOCKER_COMMIT_HASH) {
|
||||
|
||||
const newConfig = `(function (window) {
|
||||
window.__env = window.__env || {};${settings.reduce((str, obj) => `${str}
|
||||
window.__env.${obj.key} = ${ typeof obj.value === 'string' ? `'${obj.value}'` : obj.value };`, '')}
|
||||
window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'${obj.value}'` : obj.value};`, '')}
|
||||
window.__env.GIT_COMMIT_HASH = '${gitCommitHash}';
|
||||
window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}';
|
||||
}(global || this));`;
|
||||
}(this));`;
|
||||
|
||||
const newConfigTemplate = `(function (window) {
|
||||
window.__env = window.__env || {};${settings.reduce((str, obj) => `${str}
|
||||
window.__env.${obj.key} = ${typeof obj.value === 'string' ? `'\${__${obj.key}__}'` : `\${__${obj.key}__}`};`, '')}
|
||||
window.__env.GIT_COMMIT_HASH = '${gitCommitHash}';
|
||||
window.__env.PACKAGE_JSON_VERSION = '${packetJsonVersion}';
|
||||
}(this));`;
|
||||
|
||||
function readConfig(path) {
|
||||
try {
|
||||
@@ -89,6 +97,16 @@ function writeConfig(path, config) {
|
||||
}
|
||||
}
|
||||
|
||||
function writeConfigTemplate(path, config) {
|
||||
try {
|
||||
fs.writeFileSync(path, config, 'utf8');
|
||||
} catch (e) {
|
||||
throw new Error(e);
|
||||
}
|
||||
}
|
||||
|
||||
writeConfigTemplate(GENERATED_TEMPLATE_CONFIG_FILE_NAME, newConfigTemplate);
|
||||
|
||||
const currentConfig = readConfig(GENERATED_CONFIG_FILE_NAME);
|
||||
|
||||
if (currentConfig && currentConfig === newConfig) {
|
||||
@@ -106,4 +124,4 @@ if (currentConfig && currentConfig === newConfig) {
|
||||
console.log('NEW CONFIG: ', newConfig);
|
||||
writeConfig(GENERATED_CONFIG_FILE_NAME, newConfig);
|
||||
console.log(`${GENERATED_CONFIG_FILE_NAME} file updated`);
|
||||
};
|
||||
}
|
||||
|
||||
@@ -17,5 +17,8 @@
|
||||
"LIQUID_WEBSITE_URL": "https://liquid.network",
|
||||
"BISQ_WEBSITE_URL": "https://bisq.markets",
|
||||
"MINING_DASHBOARD": true,
|
||||
"MAINNET_BLOCK_AUDIT_START_HEIGHT": 0,
|
||||
"TESTNET_BLOCK_AUDIT_START_HEIGHT": 0,
|
||||
"SIGNET_BLOCK_AUDIT_START_HEIGHT": 0,
|
||||
"LIGHTNING": false
|
||||
}
|
||||
|
||||
14552
frontend/package-lock.json
generated
14552
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -22,20 +22,21 @@
|
||||
"scripts": {
|
||||
"ng": "./node_modules/@angular/cli/bin/ng.js",
|
||||
"tsc": "./node_modules/typescript/bin/tsc",
|
||||
"i18n-extract-from-source": "./node_modules/@angular/cli/bin/ng extract-i18n --out-file ./src/locale/messages.xlf",
|
||||
"i18n-extract-from-source": "npm run ng -- extract-i18n --out-file ./src/locale/messages.xlf",
|
||||
"i18n-pull-from-transifex": "tx pull -a --parallel --minimum-perc 1 --force",
|
||||
"serve": "npm run generate-config && npm run ng -- serve -c local",
|
||||
"serve:stg": "npm run generate-config && npm run ng -- serve -c staging",
|
||||
"serve:local-prod": "npm run generate-config && npm run ng -- serve -c local-prod",
|
||||
"serve:local-staging": "npm run generate-config && npm run ng -- serve -c local-staging",
|
||||
"start": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local",
|
||||
"start:local-esplora": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-esplora",
|
||||
"start:stg": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c staging",
|
||||
"start:local-prod": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-prod",
|
||||
"start:local-staging": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c local-staging",
|
||||
"start:mixed": "npm run generate-config && npm run sync-assets-dev && npm run ng -- serve -c mixed",
|
||||
"build": "npm run generate-config && npm run ng -- build --configuration production --localize && npm run sync-assets && npm run build-mempool.js",
|
||||
"sync-assets": "node sync-assets.js && rsync -av ./dist/mempool/browser/en-US/resources ./dist/mempool/browser/resources",
|
||||
"sync-assets-dev": "node sync-assets.js dev",
|
||||
"sync-assets": "rsync -av ./src/resources ./dist/mempool/browser && node sync-assets.js 'dist/mempool/browser/resources/'",
|
||||
"sync-assets-dev": "node sync-assets.js 'src/resources/'",
|
||||
"generate-config": "node generate-config.js",
|
||||
"build-mempool.js": "npm run build-mempool-js && npm run build-mempool-liquid-js && npm run build-mempool-bisq-js",
|
||||
"build-mempool-js": "browserify -p tinyify ./node_modules/@mempool/mempool.js/lib/index.js --standalone mempoolJS > ./dist/mempool/browser/en-US/mempool.js",
|
||||
@@ -50,9 +51,6 @@
|
||||
"config:defaults:mempool": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true LIQUID_TESTNET_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=mempool BLOCK_WEIGHT_UNITS=4000000 && npm run generate-config",
|
||||
"config:defaults:liquid": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true LIQUID_TESTNET_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=liquid BLOCK_WEIGHT_UNITS=300000 && npm run generate-config",
|
||||
"config:defaults:bisq": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 BASE_MODULE=bisq BLOCK_WEIGHT_UNITS=4000000 && npm run generate-config",
|
||||
"dev:ssr": "npm run generate-config && npm run ng -- run mempool:serve-ssr",
|
||||
"serve:ssr": "node server.run.js",
|
||||
"build:ssr": "npm run build && npm run ng -- run mempool:server:production && npm run tsc -- server.run.ts",
|
||||
"prerender": "npm run ng -- run mempool:prerender",
|
||||
"cypress:open": "cypress open",
|
||||
"cypress:run": "cypress run",
|
||||
@@ -63,63 +61,59 @@
|
||||
"cypress:run:ci:staging": "node update-config.js TESTNET_ENABLED=true SIGNET_ENABLED=true LIQUID_ENABLED=true BISQ_ENABLED=true ITEMS_PER_PAGE=25 && npm run generate-config && start-server-and-test serve:local-staging 4200 cypress:run:record"
|
||||
},
|
||||
"dependencies": {
|
||||
"@angular-devkit/build-angular": "~13.3.7",
|
||||
"@angular/animations": "~13.3.10",
|
||||
"@angular/cli": "~13.3.7",
|
||||
"@angular/common": "~13.3.10",
|
||||
"@angular/compiler": "~13.3.10",
|
||||
"@angular/core": "~13.3.10",
|
||||
"@angular/forms": "~13.3.10",
|
||||
"@angular/localize": "~13.3.10",
|
||||
"@angular/platform-browser": "~13.3.10",
|
||||
"@angular/platform-browser-dynamic": "~13.3.10",
|
||||
"@angular/platform-server": "~13.3.10",
|
||||
"@angular/router": "~13.3.10",
|
||||
"@fortawesome/angular-fontawesome": "~0.10.2",
|
||||
"@fortawesome/fontawesome-common-types": "~6.1.1",
|
||||
"@fortawesome/fontawesome-svg-core": "~6.1.1",
|
||||
"@fortawesome/free-solid-svg-icons": "~6.1.1",
|
||||
"@angular-devkit/build-angular": "^14.2.10",
|
||||
"@angular/animations": "^14.2.12",
|
||||
"@angular/cli": "^14.2.10",
|
||||
"@angular/common": "^14.2.12",
|
||||
"@angular/compiler": "^14.2.12",
|
||||
"@angular/core": "^14.2.12",
|
||||
"@angular/forms": "^14.2.12",
|
||||
"@angular/localize": "^14.2.12",
|
||||
"@angular/platform-browser": "^14.2.12",
|
||||
"@angular/platform-browser-dynamic": "^14.2.12",
|
||||
"@angular/platform-server": "^14.2.12",
|
||||
"@angular/router": "^14.2.12",
|
||||
"@fortawesome/angular-fontawesome": "~0.11.1",
|
||||
"@fortawesome/fontawesome-common-types": "~6.2.1",
|
||||
"@fortawesome/fontawesome-svg-core": "~6.2.1",
|
||||
"@fortawesome/free-solid-svg-icons": "~6.2.1",
|
||||
"@mempool/mempool.js": "2.3.0",
|
||||
"@ng-bootstrap/ng-bootstrap": "^11.0.0",
|
||||
"@nguniversal/express-engine": "~13.1.1",
|
||||
"@types/qrcode": "~1.4.2",
|
||||
"bootstrap": "~4.5.0",
|
||||
"@ng-bootstrap/ng-bootstrap": "^13.1.1",
|
||||
"@types/qrcode": "~1.5.0",
|
||||
"bootstrap": "~4.6.1",
|
||||
"browserify": "^17.0.0",
|
||||
"clipboard": "^2.0.10",
|
||||
"clipboard": "^2.0.11",
|
||||
"domino": "^2.1.6",
|
||||
"echarts": "~5.3.2",
|
||||
"echarts": "~5.4.0",
|
||||
"echarts-gl": "^2.0.9",
|
||||
"express": "^4.17.1",
|
||||
"lightweight-charts": "~3.8.0",
|
||||
"ngx-echarts": "8.0.1",
|
||||
"ngx-infinite-scroll": "^10.0.1",
|
||||
"qrcode": "1.5.0",
|
||||
"rxjs": "~7.5.5",
|
||||
"tinyify": "^3.0.0",
|
||||
"ngx-echarts": "~14.0.0",
|
||||
"ngx-infinite-scroll": "^14.0.1",
|
||||
"qrcode": "1.5.1",
|
||||
"rxjs": "~7.5.7",
|
||||
"tinyify": "^3.1.0",
|
||||
"tlite": "^0.1.9",
|
||||
"tslib": "~2.4.0",
|
||||
"zone.js": "~0.11.5"
|
||||
"tslib": "~2.4.1",
|
||||
"zone.js": "~0.12.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular/compiler-cli": "~13.3.10",
|
||||
"@angular/language-service": "~13.3.10",
|
||||
"@nguniversal/builders": "~13.1.1",
|
||||
"@types/express": "^4.17.0",
|
||||
"@types/node": "^12.11.1",
|
||||
"@typescript-eslint/eslint-plugin": "^5.30.5",
|
||||
"@typescript-eslint/parser": "^5.30.5",
|
||||
"eslint": "^8.19.0",
|
||||
"@angular/compiler-cli": "^14.2.12",
|
||||
"@angular/language-service": "^14.2.12",
|
||||
"@types/node": "^18.11.9",
|
||||
"@typescript-eslint/eslint-plugin": "^5.45.0",
|
||||
"@typescript-eslint/parser": "^5.45.0",
|
||||
"eslint": "^8.28.0",
|
||||
"http-proxy-middleware": "~2.0.6",
|
||||
"prettier": "^2.7.1",
|
||||
"ts-node": "~10.8.1",
|
||||
"prettier": "^2.8.0",
|
||||
"ts-node": "~10.9.1",
|
||||
"typescript": "~4.6.4"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@cypress/schematic": "~2.0.0",
|
||||
"cypress": "^10.3.0",
|
||||
"cypress-fail-on-console-error": "~3.0.0",
|
||||
"@cypress/schematic": "~2.3.0",
|
||||
"cypress": "^11.2.0",
|
||||
"cypress-fail-on-console-error": "~4.0.2",
|
||||
"cypress-wait-until": "^1.7.2",
|
||||
"mock-socket": "~9.1.4",
|
||||
"mock-socket": "~9.1.5",
|
||||
"start-server-and-test": "~1.14.0"
|
||||
},
|
||||
"scarfSettings": {
|
||||
|
||||
137
frontend/proxy.conf.local-esplora.js
Normal file
137
frontend/proxy.conf.local-esplora.js
Normal file
@@ -0,0 +1,137 @@
|
||||
const fs = require('fs');
|
||||
|
||||
const FRONTEND_CONFIG_FILE_NAME = 'mempool-frontend-config.json';
|
||||
|
||||
let configContent;
|
||||
|
||||
// Read frontend config
|
||||
try {
|
||||
const rawConfig = fs.readFileSync(FRONTEND_CONFIG_FILE_NAME);
|
||||
configContent = JSON.parse(rawConfig);
|
||||
console.log(`${FRONTEND_CONFIG_FILE_NAME} file found, using provided config`);
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
if (e.code !== 'ENOENT') {
|
||||
throw new Error(e);
|
||||
} else {
|
||||
console.log(`${FRONTEND_CONFIG_FILE_NAME} file not found, using default config`);
|
||||
}
|
||||
}
|
||||
|
||||
let PROXY_CONFIG = [];
|
||||
|
||||
if (configContent && configContent.BASE_MODULE === 'liquid') {
|
||||
PROXY_CONFIG.push(...[
|
||||
{
|
||||
context: ['/liquid/api/v1/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/liquid": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/liquid/api/**'],
|
||||
target: `http://127.0.0.1:3000`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/liquid/api/": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/liquidtestnet/api/v1/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/liquidtestnet": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/liquidtestnet/api/**'],
|
||||
target: `http://127.0.0.1:3000`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/liquidtestnet/api/": "/"
|
||||
},
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
|
||||
if (configContent && configContent.BASE_MODULE === 'bisq') {
|
||||
PROXY_CONFIG.push(...[
|
||||
{
|
||||
context: ['/bisq/api/v1/ws'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/bisq": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/bisq/api/v1/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
},
|
||||
{
|
||||
context: ['/bisq/api/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/bisq/api/": "/api/v1/bisq/"
|
||||
},
|
||||
}
|
||||
]);
|
||||
}
|
||||
|
||||
PROXY_CONFIG.push(...[
|
||||
{
|
||||
context: ['/testnet/api/v1/lightning/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/testnet": ""
|
||||
},
|
||||
},
|
||||
{
|
||||
context: ['/api/v1/**'],
|
||||
target: `http://127.0.0.1:8999`,
|
||||
secure: false,
|
||||
ws: true,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
},
|
||||
{
|
||||
context: ['/api/**'],
|
||||
target: `http://127.0.0.1:3000`,
|
||||
secure: false,
|
||||
changeOrigin: true,
|
||||
proxyTimeout: 30000,
|
||||
pathRewrite: {
|
||||
"^/api": ""
|
||||
},
|
||||
}
|
||||
]);
|
||||
|
||||
console.log(PROXY_CONFIG);
|
||||
|
||||
module.exports = PROXY_CONFIG;
|
||||
@@ -3,9 +3,9 @@ const fs = require('fs');
|
||||
let PROXY_CONFIG = require('./proxy.conf');
|
||||
|
||||
PROXY_CONFIG.forEach(entry => {
|
||||
entry.target = entry.target.replace("mempool.space", "mempool.ninja");
|
||||
entry.target = entry.target.replace("liquid.network", "liquid.place");
|
||||
entry.target = entry.target.replace("bisq.markets", "bisq.ninja");
|
||||
entry.target = entry.target.replace("mempool.space", "mempool-staging.tk7.mempool.space");
|
||||
entry.target = entry.target.replace("liquid.network", "liquid-staging.tk7.mempool.space");
|
||||
entry.target = entry.target.replace("bisq.markets", "bisq-staging.fra.mempool.space");
|
||||
});
|
||||
|
||||
module.exports = PROXY_CONFIG;
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
import 'zone.js/node';
|
||||
import './generated-config';
|
||||
|
||||
import * as domino from 'domino';
|
||||
import * as express from 'express';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const {readFileSync, existsSync} = require('fs');
|
||||
const {createProxyMiddleware} = require('http-proxy-middleware');
|
||||
|
||||
const template = fs.readFileSync(path.join(process.cwd(), 'dist/mempool/browser/en-US/', 'index.html')).toString();
|
||||
const win = domino.createWindow(template);
|
||||
|
||||
// @ts-ignore
|
||||
win.__env = global.__env;
|
||||
|
||||
// @ts-ignore
|
||||
win.matchMedia = () => {
|
||||
return {
|
||||
matches: true
|
||||
};
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
win.setTimeout = (fn) => { fn(); };
|
||||
win.document.body.scrollTo = (() => {});
|
||||
// @ts-ignore
|
||||
global['window'] = win;
|
||||
global['document'] = win.document;
|
||||
// @ts-ignore
|
||||
global['history'] = { state: { } };
|
||||
|
||||
global['localStorage'] = {
|
||||
getItem: () => '',
|
||||
setItem: () => {},
|
||||
removeItem: () => {},
|
||||
clear: () => {},
|
||||
length: 0,
|
||||
key: () => '',
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the list of supported and actually active locales
|
||||
*/
|
||||
function getActiveLocales() {
|
||||
const angularConfig = JSON.parse(readFileSync('angular.json', 'utf8'));
|
||||
|
||||
const supportedLocales = [
|
||||
angularConfig.projects.mempool.i18n.sourceLocale,
|
||||
...Object.keys(angularConfig.projects.mempool.i18n.locales),
|
||||
];
|
||||
|
||||
return supportedLocales.filter(locale => existsSync(`./dist/mempool/server/${locale}`));
|
||||
}
|
||||
|
||||
function app() {
|
||||
const server = express();
|
||||
|
||||
// proxy API to nginx
|
||||
server.get('/api/**', createProxyMiddleware({
|
||||
// @ts-ignore
|
||||
target: win.__env.NGINX_PROTOCOL + '://' + win.__env.NGINX_HOSTNAME + ':' + win.__env.NGINX_PORT,
|
||||
changeOrigin: true,
|
||||
}));
|
||||
|
||||
// map / and /en to en-US
|
||||
const defaultLocale = 'en-US';
|
||||
console.log(`serving default locale: ${defaultLocale}`);
|
||||
const appServerModule = require(`./dist/mempool/server/${defaultLocale}/main.js`);
|
||||
server.use('/', appServerModule.app(defaultLocale));
|
||||
server.use('/en', appServerModule.app(defaultLocale));
|
||||
|
||||
// map each locale to its localized main.js
|
||||
getActiveLocales().forEach(locale => {
|
||||
console.log('serving locale:', locale);
|
||||
const appServerModule = require(`./dist/mempool/server/${locale}/main.js`);
|
||||
|
||||
// map everything to itself
|
||||
server.use(`/${locale}`, appServerModule.app(locale));
|
||||
|
||||
});
|
||||
|
||||
return server;
|
||||
}
|
||||
|
||||
function run() {
|
||||
const port = process.env.PORT || 4000;
|
||||
|
||||
// Start up the Node server
|
||||
app().listen(port, () => {
|
||||
console.log(`Node Express server listening on port ${port}`);
|
||||
});
|
||||
}
|
||||
|
||||
run();
|
||||
@@ -1,160 +0,0 @@
|
||||
import 'zone.js/node';
|
||||
import './generated-config';
|
||||
|
||||
import { ngExpressEngine } from '@nguniversal/express-engine';
|
||||
import * as express from 'express';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as domino from 'domino';
|
||||
|
||||
import { join } from 'path';
|
||||
import { AppServerModule } from './src/main.server';
|
||||
import { APP_BASE_HREF } from '@angular/common';
|
||||
import { existsSync } from 'fs';
|
||||
|
||||
const template = fs.readFileSync(path.join(process.cwd(), 'dist/mempool/browser/en-US/', 'index.html')).toString();
|
||||
const win = domino.createWindow(template);
|
||||
|
||||
// @ts-ignore
|
||||
win.__env = global.__env;
|
||||
|
||||
// @ts-ignore
|
||||
win.matchMedia = () => {
|
||||
return {
|
||||
matches: true
|
||||
};
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
win.setTimeout = (fn) => { fn(); };
|
||||
win.document.body.scrollTo = (() => {});
|
||||
// @ts-ignore
|
||||
global['window'] = win;
|
||||
global['document'] = win.document;
|
||||
// @ts-ignore
|
||||
global['history'] = { state: { } };
|
||||
|
||||
global['localStorage'] = {
|
||||
getItem: () => '',
|
||||
setItem: () => {},
|
||||
removeItem: () => {},
|
||||
clear: () => {},
|
||||
length: 0,
|
||||
key: () => '',
|
||||
};
|
||||
|
||||
// The Express app is exported so that it can be used by serverless Functions.
|
||||
export function app(locale: string): express.Express {
|
||||
const server = express();
|
||||
const distFolder = join(process.cwd(), `dist/mempool/browser/${locale}`);
|
||||
const indexHtml = existsSync(join(distFolder, 'index.original.html')) ? 'index.original.html' : 'index';
|
||||
|
||||
// Our Universal express-engine (found @ https://github.com/angular/universal/tree/master/modules/express-engine)
|
||||
server.engine('html', ngExpressEngine({
|
||||
bootstrap: AppServerModule,
|
||||
}));
|
||||
|
||||
server.set('view engine', 'html');
|
||||
server.set('views', distFolder);
|
||||
|
||||
// only handle URLs that actually exist
|
||||
//server.get(locale, getLocalizedSSR(indexHtml));
|
||||
server.get('/', getLocalizedSSR(indexHtml));
|
||||
server.get('/tx/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/mempool-block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/address/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/blocks', getLocalizedSSR(indexHtml));
|
||||
server.get('/mining/pools', getLocalizedSSR(indexHtml));
|
||||
server.get('/mining/pool/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/graphs', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/tx/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/mempool-block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/address/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/asset/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/blocks', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/graphs', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/assets', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/api', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/tv', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/status', getLocalizedSSR(indexHtml));
|
||||
server.get('/liquid/about', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/tx/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/mempool-block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/address/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/blocks', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/mining/pools', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/graphs', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/api', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/tv', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/status', getLocalizedSSR(indexHtml));
|
||||
server.get('/testnet/about', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/tx/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/mempool-block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/address/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/blocks', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/mining/pools', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/graphs', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/api', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/tv', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/status', getLocalizedSSR(indexHtml));
|
||||
server.get('/signet/about', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/tx/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/blocks', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/block/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/address/*', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/stats', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/about', getLocalizedSSR(indexHtml));
|
||||
server.get('/bisq/api', getLocalizedSSR(indexHtml));
|
||||
server.get('/about', getLocalizedSSR(indexHtml));
|
||||
server.get('/api', getLocalizedSSR(indexHtml));
|
||||
server.get('/tv', getLocalizedSSR(indexHtml));
|
||||
server.get('/status', getLocalizedSSR(indexHtml));
|
||||
server.get('/terms-of-service', getLocalizedSSR(indexHtml));
|
||||
|
||||
// fallback to static file handler so we send HTTP 404 to nginx
|
||||
server.get('/**', express.static(distFolder, { maxAge: '1y' }));
|
||||
|
||||
return server;
|
||||
}
|
||||
|
||||
function getLocalizedSSR(indexHtml) {
|
||||
return (req, res) => {
|
||||
res.render(indexHtml, {
|
||||
req,
|
||||
providers: [
|
||||
{ provide: APP_BASE_HREF, useValue: req.baseUrl }
|
||||
]
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// only used for development mode
|
||||
function run(): void {
|
||||
const port = process.env.PORT || 4000;
|
||||
|
||||
// Start up the Node server
|
||||
const server = app('en-US');
|
||||
server.listen(port, () => {
|
||||
console.log(`Node Express server listening on port ${port}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Webpack will replace 'require' with '__webpack_require__'
|
||||
// '__non_webpack_require__' is a proxy to Node 'require'
|
||||
// The below code is to ensure that the server is run only when not requiring the bundle.
|
||||
declare const __non_webpack_require__: NodeRequire;
|
||||
const mainModule = __non_webpack_require__.main;
|
||||
const moduleFilename = mainModule && mainModule.filename || '';
|
||||
if (moduleFilename === __filename || moduleFilename.includes('iisnode')) {
|
||||
run();
|
||||
}
|
||||
|
||||
export * from './src/main.server';
|
||||
@@ -1,21 +1,17 @@
|
||||
import { NgModule } from '@angular/core';
|
||||
import { Routes, RouterModule, PreloadAllModules } from '@angular/router';
|
||||
import { Routes, RouterModule } from '@angular/router';
|
||||
import { AppPreloadingStrategy } from './app.preloading-strategy'
|
||||
import { StartComponent } from './components/start/start.component';
|
||||
import { TransactionComponent } from './components/transaction/transaction.component';
|
||||
import { BlockComponent } from './components/block/block.component';
|
||||
import { BlockAuditComponent } from './components/block-audit/block-audit.component';
|
||||
import { BlockPreviewComponent } from './components/block/block-preview.component';
|
||||
import { AddressComponent } from './components/address/address.component';
|
||||
import { AddressPreviewComponent } from './components/address/address-preview.component';
|
||||
import { MasterPageComponent } from './components/master-page/master-page.component';
|
||||
import { MasterPagePreviewComponent } from './components/master-page-preview/master-page-preview.component';
|
||||
import { AboutComponent } from './components/about/about.component';
|
||||
import { StatusViewComponent } from './components/status-view/status-view.component';
|
||||
import { TermsOfServiceComponent } from './components/terms-of-service/terms-of-service.component';
|
||||
import { PrivacyPolicyComponent } from './components/privacy-policy/privacy-policy.component';
|
||||
import { TrademarkPolicyComponent } from './components/trademark-policy/trademark-policy.component';
|
||||
import { BisqMasterPageComponent } from './components/bisq-master-page/bisq-master-page.component';
|
||||
import { SponsorComponent } from './components/sponsor/sponsor.component';
|
||||
import { PushTransactionComponent } from './components/push-transaction/push-transaction.component';
|
||||
import { BlocksList } from './components/blocks-list/blocks-list.component';
|
||||
import { LiquidMasterPageComponent } from './components/liquid-master-page/liquid-master-page.component';
|
||||
@@ -25,6 +21,10 @@ import { AssetsComponent } from './components/assets/assets.component';
|
||||
import { AssetComponent } from './components/asset/asset.component';
|
||||
import { AssetsNavComponent } from './components/assets/assets-nav/assets-nav.component';
|
||||
|
||||
const browserWindow = window || {};
|
||||
// @ts-ignore
|
||||
const browserWindowEnv = browserWindow.__env || {};
|
||||
|
||||
let routes: Routes = [
|
||||
{
|
||||
path: 'testnet',
|
||||
@@ -32,7 +32,8 @@ let routes: Routes = [
|
||||
{
|
||||
path: '',
|
||||
pathMatch: 'full',
|
||||
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
|
||||
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule),
|
||||
data: { preload: true },
|
||||
},
|
||||
{
|
||||
path: '',
|
||||
@@ -72,12 +73,14 @@ let routes: Routes = [
|
||||
children: [],
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
ogImage: true,
|
||||
networkSpecific: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
component: StartComponent,
|
||||
data: { networkSpecific: true },
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
@@ -88,6 +91,7 @@ let routes: Routes = [
|
||||
{
|
||||
path: 'block',
|
||||
component: StartComponent,
|
||||
data: { networkSpecific: true },
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
@@ -98,18 +102,10 @@ let routes: Routes = [
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule),
|
||||
data: { preload: true },
|
||||
},
|
||||
{
|
||||
path: 'api',
|
||||
@@ -117,12 +113,14 @@ let routes: Routes = [
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule),
|
||||
data: { preload: browserWindowEnv && browserWindowEnv.LIGHTNING === true, networks: ['bitcoin'] },
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
data: { networks: ['bitcoin', 'liquid'] },
|
||||
component: StatusViewComponent
|
||||
},
|
||||
{
|
||||
@@ -181,11 +179,13 @@ let routes: Routes = [
|
||||
children: [],
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
ogImage: true,
|
||||
networkSpecific: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -196,6 +196,7 @@ let routes: Routes = [
|
||||
},
|
||||
{
|
||||
path: 'block',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -207,15 +208,6 @@ let routes: Routes = [
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
@@ -226,12 +218,14 @@ let routes: Routes = [
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
data: { networks: ['bitcoin'] },
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
data: { networks: ['bitcoin', 'liquid'] },
|
||||
component: StatusViewComponent
|
||||
},
|
||||
{
|
||||
@@ -287,11 +281,13 @@ let routes: Routes = [
|
||||
children: [],
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
ogImage: true,
|
||||
networkSpecific: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -302,6 +298,7 @@ let routes: Routes = [
|
||||
},
|
||||
{
|
||||
path: 'block',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -313,15 +310,6 @@ let routes: Routes = [
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'block-audit',
|
||||
children: [
|
||||
{
|
||||
path: ':id',
|
||||
component: BlockAuditComponent
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'docs',
|
||||
loadChildren: () => import('./docs/docs.module').then(m => m.DocsModule)
|
||||
@@ -332,51 +320,33 @@ let routes: Routes = [
|
||||
},
|
||||
{
|
||||
path: 'lightning',
|
||||
data: { networks: ['bitcoin'] },
|
||||
loadChildren: () => import('./lightning/lightning.module').then(m => m.LightningModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'preview',
|
||||
component: MasterPagePreviewComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'block/:id',
|
||||
component: BlockPreviewComponent
|
||||
path: '',
|
||||
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
|
||||
},
|
||||
{
|
||||
path: 'testnet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
path: 'testnet',
|
||||
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
|
||||
},
|
||||
{
|
||||
path: 'signet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'signet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
path: 'signet',
|
||||
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
data: { networks: ['bitcoin', 'liquid'] },
|
||||
component: StatusViewComponent
|
||||
},
|
||||
{
|
||||
path: 'sponsor',
|
||||
component: SponsorComponent,
|
||||
},
|
||||
{
|
||||
path: '',
|
||||
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
|
||||
@@ -387,10 +357,6 @@ let routes: Routes = [
|
||||
},
|
||||
];
|
||||
|
||||
const browserWindow = window || {};
|
||||
// @ts-ignore
|
||||
const browserWindowEnv = browserWindow.__env || {};
|
||||
|
||||
if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'bisq') {
|
||||
routes = [{
|
||||
path: '',
|
||||
@@ -442,11 +408,13 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
children: [],
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
ogImage: true,
|
||||
networkSpecific: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -457,6 +425,7 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'block',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -470,18 +439,22 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'assets',
|
||||
data: { networks: ['liquid'] },
|
||||
component: AssetsNavComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'all',
|
||||
data: { networks: ['liquid'] },
|
||||
component: AssetsComponent,
|
||||
},
|
||||
{
|
||||
path: 'asset/:id',
|
||||
data: { networkSpecific: true },
|
||||
component: AssetComponent
|
||||
},
|
||||
{
|
||||
path: 'group/:id',
|
||||
data: { networkSpecific: true },
|
||||
component: AssetGroupComponent
|
||||
},
|
||||
{
|
||||
@@ -502,6 +475,7 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
data: { networks: ['bitcoin', 'liquid'] },
|
||||
component: StatusViewComponent
|
||||
},
|
||||
{
|
||||
@@ -552,11 +526,13 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
children: [],
|
||||
component: AddressComponent,
|
||||
data: {
|
||||
ogImage: true
|
||||
ogImage: true,
|
||||
networkSpecific: true,
|
||||
}
|
||||
},
|
||||
{
|
||||
path: 'tx',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -567,6 +543,7 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'block',
|
||||
data: { networkSpecific: true },
|
||||
component: StartComponent,
|
||||
children: [
|
||||
{
|
||||
@@ -580,22 +557,27 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'assets',
|
||||
data: { networks: ['liquid'] },
|
||||
component: AssetsNavComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'featured',
|
||||
data: { networkSpecific: true },
|
||||
component: AssetsFeaturedComponent,
|
||||
},
|
||||
{
|
||||
path: 'all',
|
||||
data: { networks: ['liquid'] },
|
||||
component: AssetsComponent,
|
||||
},
|
||||
{
|
||||
path: 'asset/:id',
|
||||
data: { networkSpecific: true },
|
||||
component: AssetComponent
|
||||
},
|
||||
{
|
||||
path: 'group/:id',
|
||||
data: { networkSpecific: true },
|
||||
component: AssetGroupComponent
|
||||
},
|
||||
{
|
||||
@@ -616,36 +598,22 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
},
|
||||
{
|
||||
path: 'preview',
|
||||
component: MasterPagePreviewComponent,
|
||||
children: [
|
||||
{
|
||||
path: 'block/:id',
|
||||
component: BlockPreviewComponent
|
||||
path: '',
|
||||
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
|
||||
},
|
||||
{
|
||||
path: 'testnet/block/:id',
|
||||
component: BlockPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
},
|
||||
{
|
||||
path: 'testnet/address/:id',
|
||||
children: [],
|
||||
component: AddressPreviewComponent
|
||||
path: 'testnet',
|
||||
loadChildren: () => import('./previews.module').then(m => m.PreviewsModule)
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
path: 'status',
|
||||
data: { networks: ['bitcoin', 'liquid']},
|
||||
component: StatusViewComponent
|
||||
},
|
||||
{
|
||||
path: 'sponsor',
|
||||
component: SponsorComponent,
|
||||
},
|
||||
{
|
||||
path: '',
|
||||
loadChildren: () => import('./graphs/graphs.module').then(m => m.GraphsModule)
|
||||
@@ -659,10 +627,10 @@ if (browserWindowEnv && browserWindowEnv.BASE_MODULE === 'liquid') {
|
||||
|
||||
@NgModule({
|
||||
imports: [RouterModule.forRoot(routes, {
|
||||
initialNavigation: 'enabled',
|
||||
initialNavigation: 'enabledBlocking',
|
||||
scrollPositionRestoration: 'enabled',
|
||||
anchorScrolling: 'enabled',
|
||||
preloadingStrategy: PreloadAllModules
|
||||
preloadingStrategy: AppPreloadingStrategy
|
||||
})],
|
||||
})
|
||||
export class AppRoutingModule { }
|
||||
|
||||
@@ -79,7 +79,7 @@ export const poolsColor = {
|
||||
'binancepool': '#1E88E5',
|
||||
'viabtc': '#039BE5',
|
||||
'btccom': '#00897B',
|
||||
'slushpool': '#00ACC1',
|
||||
'braiinspool': '#00ACC1',
|
||||
'sbicrypto': '#43A047',
|
||||
'marapool': '#7CB342',
|
||||
'luxor': '#C0CA33',
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { BrowserModule, BrowserTransferStateModule } from '@angular/platform-browser';
|
||||
import { NgModule } from '@angular/core';
|
||||
import { ModuleWithProviders, NgModule } from '@angular/core';
|
||||
import { HttpClientModule, HTTP_INTERCEPTORS } from '@angular/common/http';
|
||||
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
|
||||
import { AppRoutingModule } from './app-routing.module';
|
||||
@@ -18,6 +18,24 @@ import { LanguageService } from './services/language.service';
|
||||
import { FiatShortenerPipe } from './shared/pipes/fiat-shortener.pipe';
|
||||
import { ShortenStringPipe } from './shared/pipes/shorten-string-pipe/shorten-string.pipe';
|
||||
import { CapAddressPipe } from './shared/pipes/cap-address-pipe/cap-address-pipe';
|
||||
import { AppPreloadingStrategy } from './app.preloading-strategy';
|
||||
|
||||
const providers = [
|
||||
ElectrsApiService,
|
||||
StateService,
|
||||
WebsocketService,
|
||||
AudioService,
|
||||
SeoService,
|
||||
OpenGraphService,
|
||||
StorageService,
|
||||
EnterpriseService,
|
||||
LanguageService,
|
||||
ShortenStringPipe,
|
||||
FiatShortenerPipe,
|
||||
CapAddressPipe,
|
||||
AppPreloadingStrategy,
|
||||
{ provide: HTTP_INTERCEPTORS, useClass: HttpCacheInterceptor, multi: true }
|
||||
];
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
@@ -31,21 +49,17 @@ import { CapAddressPipe } from './shared/pipes/cap-address-pipe/cap-address-pipe
|
||||
BrowserAnimationsModule,
|
||||
SharedModule,
|
||||
],
|
||||
providers: [
|
||||
ElectrsApiService,
|
||||
StateService,
|
||||
WebsocketService,
|
||||
AudioService,
|
||||
SeoService,
|
||||
OpenGraphService,
|
||||
StorageService,
|
||||
EnterpriseService,
|
||||
LanguageService,
|
||||
ShortenStringPipe,
|
||||
FiatShortenerPipe,
|
||||
CapAddressPipe,
|
||||
{ provide: HTTP_INTERCEPTORS, useClass: HttpCacheInterceptor, multi: true }
|
||||
],
|
||||
providers: providers,
|
||||
bootstrap: [AppComponent]
|
||||
})
|
||||
export class AppModule { }
|
||||
|
||||
@NgModule({})
|
||||
export class MempoolSharedModule{
|
||||
static forRoot(): ModuleWithProviders<MempoolSharedModule> {
|
||||
return {
|
||||
ngModule: AppModule,
|
||||
providers: providers
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
10
frontend/src/app/app.preloading-strategy.ts
Normal file
10
frontend/src/app/app.preloading-strategy.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { PreloadingStrategy, Route } from '@angular/router';
|
||||
import { Observable, timer, mergeMap, of } from 'rxjs';
|
||||
|
||||
export class AppPreloadingStrategy implements PreloadingStrategy {
|
||||
preload(route: Route, load: Function): Observable<any> {
|
||||
return route.data && route.data.preload
|
||||
? timer(1500).pipe(mergeMap(() => load()))
|
||||
: of(null);
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,11 @@
|
||||
import { Component, OnInit, OnDestroy } from '@angular/core';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { SeoService } from '../../services/seo.service';
|
||||
import { switchMap, filter, catchError } from 'rxjs/operators';
|
||||
import { ParamMap, ActivatedRoute } from '@angular/router';
|
||||
import { Subscription, of } from 'rxjs';
|
||||
import { BisqTransaction } from '../bisq.interfaces';
|
||||
import { BisqApiService } from '../bisq-api.service';
|
||||
import { WebsocketService } from 'src/app/services/websocket.service';
|
||||
import { WebsocketService } from '../../services/websocket.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-bisq-address',
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import { Component, OnInit, OnDestroy } from '@angular/core';
|
||||
import { BisqBlock } from 'src/app/bisq/bisq.interfaces';
|
||||
import { BisqBlock } from '../../bisq/bisq.interfaces';
|
||||
import { Location } from '@angular/common';
|
||||
import { BisqApiService } from '../bisq-api.service';
|
||||
import { ActivatedRoute, ParamMap, Router } from '@angular/router';
|
||||
import { Subscription, of } from 'rxjs';
|
||||
import { switchMap, catchError } from 'rxjs/operators';
|
||||
import { SeoService } from 'src/app/services/seo.service';
|
||||
import { ElectrsApiService } from 'src/app/services/electrs-api.service';
|
||||
import { SeoService } from '../../services/seo.service';
|
||||
import { ElectrsApiService } from '../../services/electrs-api.service';
|
||||
import { HttpErrorResponse } from '@angular/common/http';
|
||||
import { WebsocketService } from 'src/app/services/websocket.service';
|
||||
import { WebsocketService } from '../../services/websocket.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-bisq-block',
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user