Compare commits
851 Commits
v2.4.0-bet
...
wiz/202205
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04eb3f3c22 | ||
|
|
6cba4a8492 | ||
|
|
4f1ebc2545 | ||
|
|
251de2be11 | ||
|
|
7e356ef0a0 | ||
|
|
e48eab403c | ||
|
|
e0c61f7299 | ||
|
|
b24256a83e | ||
|
|
631de8d85f | ||
|
|
c1ebbc556b | ||
|
|
73285ae776 | ||
|
|
2d692c3f20 | ||
|
|
0f4b127275 | ||
|
|
c85b6d53c5 | ||
|
|
8ed1644081 | ||
|
|
1d71e26a12 | ||
|
|
5b4d394039 | ||
|
|
2aaa392bf5 | ||
|
|
8919cbcdc1 | ||
|
|
dc7231537f | ||
|
|
b31a82d27e | ||
|
|
7fecea9cca | ||
|
|
b0d4c9eac8 | ||
|
|
db41aed44b | ||
|
|
ff370684f1 | ||
|
|
8c21cc56d4 | ||
|
|
49d8b3bacd | ||
|
|
e0d677b01c | ||
|
|
2b2c40f65a | ||
|
|
4e61f1ff36 | ||
|
|
33cf69872b | ||
|
|
48a0a6c7e3 | ||
|
|
7012a480e8 | ||
|
|
88e6305b9a | ||
|
|
b479136688 | ||
|
|
d6a42cdf6b | ||
|
|
aed37afb3e | ||
|
|
a64cb4bbad | ||
|
|
9b974dfbd9 | ||
|
|
61e512b8f7 | ||
|
|
2a6f48d8c8 | ||
|
|
6a52725b63 | ||
|
|
abb078f7ee | ||
|
|
47363b477e | ||
|
|
c0e6b7af58 | ||
|
|
bd822998a5 | ||
|
|
771d21e410 | ||
|
|
3b1d4ffe43 | ||
|
|
78d1ef9b1c | ||
|
|
632da54146 | ||
|
|
b725fc8d26 | ||
|
|
03c6c0567a | ||
|
|
1d106a9851 | ||
|
|
5612a033d5 | ||
|
|
cacd4abd9d | ||
|
|
c01c610bb3 | ||
|
|
82bcac7c74 | ||
|
|
5a50a0d973 | ||
|
|
5d81a13a80 | ||
|
|
0f9941f0d1 | ||
|
|
f7cbe30a16 | ||
|
|
76f261eb38 | ||
|
|
fad73cf3f5 | ||
|
|
6796bb94cc | ||
|
|
54669281de | ||
|
|
d647edcae3 | ||
|
|
ab985afe01 | ||
|
|
f60ef05223 | ||
|
|
f6d6ea5d31 | ||
|
|
3bf7bf5563 | ||
|
|
3c2e27f778 | ||
|
|
99379d53bf | ||
|
|
6be2985b40 | ||
|
|
faa59f59bd | ||
|
|
5147b0dbc4 | ||
|
|
db779578d2 | ||
|
|
76600af698 | ||
|
|
a43a65df2c | ||
|
|
215df5efed | ||
|
|
feda827860 | ||
|
|
33f3b0006b | ||
|
|
a25af16f7c | ||
|
|
00cd3ee9bf | ||
|
|
80f1ee45b5 | ||
|
|
eb90434c28 | ||
|
|
a94403b3a1 | ||
|
|
3f83e517f0 | ||
|
|
82cef095fc | ||
|
|
b6ba3c5781 | ||
|
|
5b521cfc7c | ||
|
|
d7f2f4136c | ||
|
|
5d7e42195f | ||
|
|
7fdf95ad34 | ||
|
|
5287490894 | ||
|
|
b246c6f4c3 | ||
|
|
2daf94f65a | ||
|
|
91ada9ce75 | ||
|
|
4ea1e98547 | ||
|
|
82f9814438 | ||
|
|
b8c82c8f2c | ||
|
|
55966e601a | ||
|
|
886498fe01 | ||
|
|
54eb11a2a7 | ||
|
|
73c9ec20e8 | ||
|
|
c4f125b2d8 | ||
|
|
03dfca3bee | ||
|
|
04d7265a86 | ||
|
|
4335ee8157 | ||
|
|
6ba8b0ec58 | ||
|
|
fc463a9561 | ||
|
|
a80c79bd70 | ||
|
|
3fd8af0c78 | ||
|
|
474f2d2e7d | ||
|
|
b61ef6814b | ||
|
|
f4670156a5 | ||
|
|
2a3111af6d | ||
|
|
573168f647 | ||
|
|
7570603b37 | ||
|
|
887fb13f34 | ||
|
|
1a761e79ad | ||
|
|
8d1624476f | ||
|
|
8f183945c0 | ||
|
|
5d704b0e43 | ||
|
|
0c6ceaefa2 | ||
|
|
a413c6ebb8 | ||
|
|
2e891eb926 | ||
|
|
21b6c6158a | ||
|
|
eaf7da9acb | ||
|
|
63a22082bc | ||
|
|
6e38caee63 | ||
|
|
cfa2690549 | ||
|
|
c097db2c3c | ||
|
|
2a96d3d213 | ||
|
|
01a5748c02 | ||
|
|
61e8e2fb2a | ||
|
|
d7fe92765c | ||
|
|
bc0c3a1eed | ||
|
|
98e5f78d5f | ||
|
|
6714533ed4 | ||
|
|
94a536af28 | ||
|
|
c3780adab2 | ||
|
|
bdb76b3d4b | ||
|
|
80ee890a9f | ||
|
|
b0a232806b | ||
|
|
9c44bf171c | ||
|
|
5d88dfd00b | ||
|
|
fc14dc95df | ||
|
|
51bdbc5d4a | ||
|
|
b2e6573743 | ||
|
|
d66cc8a213 | ||
|
|
a6663d7869 | ||
|
|
b0fe879503 | ||
|
|
f610699ef4 | ||
|
|
d2ece2993e | ||
|
|
a9248a5f13 | ||
|
|
3e2cf5c058 | ||
|
|
48496abbf4 | ||
|
|
88648da890 | ||
|
|
929491ce3d | ||
|
|
512739ae90 | ||
|
|
69930ef698 | ||
|
|
5854931430 | ||
|
|
f57fa1286c | ||
|
|
edfa0d6074 | ||
|
|
d6e9500bee | ||
|
|
f30883a018 | ||
|
|
12eea0e4cc | ||
|
|
16db740986 | ||
|
|
d1ad9efe64 | ||
|
|
1b3faa1203 | ||
|
|
ce9a4024b3 | ||
|
|
16e79a3662 | ||
|
|
a67c0b166c | ||
|
|
fbf15f05ed | ||
|
|
d1e2ead13e | ||
|
|
9bf04b0af4 | ||
|
|
d6ff8753e2 | ||
|
|
3207e2a285 | ||
|
|
a30808a972 | ||
|
|
a49cb2d611 | ||
|
|
7933a51994 | ||
|
|
fbef2157ec | ||
|
|
b2321495d8 | ||
|
|
7e504e783f | ||
|
|
a14565e288 | ||
|
|
9e6dd65e57 | ||
|
|
898fef19cc | ||
|
|
a9469f7e2b | ||
|
|
412a0ee577 | ||
|
|
d8b3c21a6c | ||
|
|
68f288f69c | ||
|
|
3ba37aaa5a | ||
|
|
b69a7a5031 | ||
|
|
2acaa45e0a | ||
|
|
6341839de4 | ||
|
|
bc132e4337 | ||
|
|
46e63ca6cf | ||
|
|
6ae05c2023 | ||
|
|
65cd708295 | ||
|
|
9656ee92b7 | ||
|
|
9ff8487feb | ||
|
|
fbdf6da314 | ||
|
|
46bce30a64 | ||
|
|
b875bc2552 | ||
|
|
37fd1fb76d | ||
|
|
777a1bb4c1 | ||
|
|
b484852a62 | ||
|
|
12e516c366 | ||
|
|
441a5fa2b4 | ||
|
|
a5b502db4a | ||
|
|
0b2b8fc56c | ||
|
|
d3e24914cd | ||
|
|
1438391515 | ||
|
|
8316c37a0e | ||
|
|
44725d9b29 | ||
|
|
d9e85fdcb6 | ||
|
|
886e7e6638 | ||
|
|
479f635754 | ||
|
|
75cd5a15b7 | ||
|
|
0f91778970 | ||
|
|
eb9c6f2231 | ||
|
|
ad9e989598 | ||
|
|
ffe22399d5 | ||
|
|
300b9e4e05 | ||
|
|
89f7f99720 | ||
|
|
b139423eb9 | ||
|
|
40f2b97075 | ||
|
|
8de89a9f26 | ||
|
|
33776b2b09 | ||
|
|
e0d189b70a | ||
|
|
2e210e7aa4 | ||
|
|
f96b7e7004 | ||
|
|
0508ac1a5d | ||
|
|
ae34225f66 | ||
|
|
c509a69f1d | ||
|
|
6635f2ce8f | ||
|
|
37c28940bf | ||
|
|
51b832335b | ||
|
|
0aec1a5d68 | ||
|
|
1244eac03c | ||
|
|
ce940d7c50 | ||
|
|
d0ff377086 | ||
|
|
773e43e0cf | ||
|
|
68f63683f1 | ||
|
|
b5072f823c | ||
|
|
3d75022a6c | ||
|
|
a030fbedb4 | ||
|
|
310bb3cf24 | ||
|
|
fa18796109 | ||
|
|
41bd89521d | ||
|
|
29f36d7f9e | ||
|
|
2dc875bb33 | ||
|
|
e7e907d535 | ||
|
|
a705e44c09 | ||
|
|
72ec4c0d7b | ||
|
|
7f405ffcdd | ||
|
|
dfe50d4355 | ||
|
|
181c13025d | ||
|
|
219694db8b | ||
|
|
7b3f5c378d | ||
|
|
621789b20c | ||
|
|
7bbfc7872b | ||
|
|
9698339488 | ||
|
|
8503fd2fc0 | ||
|
|
c839abb479 | ||
|
|
fd70a51489 | ||
|
|
b1b4bdf575 | ||
|
|
b52a8c58ab | ||
|
|
58b60c1f68 | ||
|
|
f4bb927dbd | ||
|
|
76b6d2a21b | ||
|
|
93aef078cf | ||
|
|
a5f1ba92e4 | ||
|
|
3e1e47c49a | ||
|
|
c7909a1ca8 | ||
|
|
e0cc58bc6e | ||
|
|
ddb0272d60 | ||
|
|
59f84e82b4 | ||
|
|
3dc37dc34d | ||
|
|
88febf6262 | ||
|
|
29b8660602 | ||
|
|
92da3988da | ||
|
|
8b1fa82c0c | ||
|
|
d2f13eced1 | ||
|
|
d373fd1424 | ||
|
|
d5f5fffd7d | ||
|
|
cc8c52e848 | ||
|
|
c41bfe755e | ||
|
|
8c667a76a7 | ||
|
|
57e033a32c | ||
|
|
b776b935a0 | ||
|
|
3633d36b28 | ||
|
|
b4c6c9c86c | ||
|
|
681d9db900 | ||
|
|
0137d29cd2 | ||
|
|
36412bedd1 | ||
|
|
ccdeb108ee | ||
|
|
f16076b401 | ||
|
|
ac611a4518 | ||
|
|
420ff16c2b | ||
|
|
b4bcd84a53 | ||
|
|
63ebace378 | ||
|
|
75f1b52a2a | ||
|
|
d7f0dc4c05 | ||
|
|
09171c749a | ||
|
|
971f402ced | ||
|
|
3768c28b01 | ||
|
|
dbf60dd4d9 | ||
|
|
683190eaa3 | ||
|
|
93e93d44f4 | ||
|
|
0902015264 | ||
|
|
b11fb44461 | ||
|
|
376484a937 | ||
|
|
fc5fd244d0 | ||
|
|
561d75c694 | ||
|
|
82c0987e7b | ||
|
|
c75138ee50 | ||
|
|
d40545bd52 | ||
|
|
4093cc0cbf | ||
|
|
0c71e11cda | ||
|
|
3edd6f23a5 | ||
|
|
28cf0f71eb | ||
|
|
2fd34cbd91 | ||
|
|
d6158060e7 | ||
|
|
e0952a4c1d | ||
|
|
2872d2e299 | ||
|
|
d8a1c0ac1b | ||
|
|
058d15e67f | ||
|
|
16c6f030db | ||
|
|
1be7c953ea | ||
|
|
c6f33310e5 | ||
|
|
68205ddb9d | ||
|
|
dd683094da | ||
|
|
681708ffa0 | ||
|
|
9a29b4adf3 | ||
|
|
ee058deb74 | ||
|
|
2f0c8d94b0 | ||
|
|
6e09a1c96b | ||
|
|
f2983e28a3 | ||
|
|
c6fa8c6172 | ||
|
|
1988971290 | ||
|
|
6ef200ae7f | ||
|
|
908635b3dd | ||
|
|
a4946de028 | ||
|
|
54931cb23e | ||
|
|
3d2ff7ef62 | ||
|
|
89b2e11083 | ||
|
|
5ac9b5674e | ||
|
|
a97675c538 | ||
|
|
40634a0eb8 | ||
|
|
80b3b91a82 | ||
|
|
47ad5fffc8 | ||
|
|
75bb586f38 | ||
|
|
c743381d33 | ||
|
|
2253dd570d | ||
|
|
97046a7dc4 | ||
|
|
475bb11991 | ||
|
|
fd35c8f4ad | ||
|
|
929a4b955c | ||
|
|
ca86364c35 | ||
|
|
c888d59368 | ||
|
|
495cd26219 | ||
|
|
519494668b | ||
|
|
73b2be0a97 | ||
|
|
9fce787105 | ||
|
|
8c5460c319 | ||
|
|
bd796ae8cc | ||
|
|
97c05facdb | ||
|
|
46bed0be29 | ||
|
|
81bc449043 | ||
|
|
eec82e1bf9 | ||
|
|
1c86273059 | ||
|
|
7320fadec9 | ||
|
|
355e89ce55 | ||
|
|
90b9c5fe8a | ||
|
|
a458cf8ee3 | ||
|
|
4b3cc7396c | ||
|
|
e86c5987e3 | ||
|
|
65ce49817c | ||
|
|
38ac38849e | ||
|
|
960c31a3c7 | ||
|
|
38fa8de01f | ||
|
|
a4641b8480 | ||
|
|
f2e703e928 | ||
|
|
0093eab269 | ||
|
|
b8b50b552e | ||
|
|
665d85204b | ||
|
|
291277f299 | ||
|
|
8c94ef4a03 | ||
|
|
ed3aa7f516 | ||
|
|
37f731d21c | ||
|
|
4ccaafcd63 | ||
|
|
c8e090149a | ||
|
|
9c6a28d9b0 | ||
|
|
9000b6b18e | ||
|
|
4009a066e0 | ||
|
|
d2135a374a | ||
|
|
e8a3b104f8 | ||
|
|
5437beedef | ||
|
|
b7709ac3d0 | ||
|
|
a638369a57 | ||
|
|
22bd8c4bf8 | ||
|
|
eb71276948 | ||
|
|
18030ba33e | ||
|
|
2129146838 | ||
|
|
b6a113f05c | ||
|
|
bd89bf885d | ||
|
|
7d3c105b29 | ||
|
|
a99b52a735 | ||
|
|
e58b71fd4f | ||
|
|
59d10fd3c6 | ||
|
|
30e8b134bc | ||
|
|
cdf0fe0335 | ||
|
|
f4667c0892 | ||
|
|
ac257b4165 | ||
|
|
75ab2bc920 | ||
|
|
307c30e33b | ||
|
|
c7835b1326 | ||
|
|
7e389c8863 | ||
|
|
d697c0c45e | ||
|
|
4fa4088694 | ||
|
|
07cb4a49bc | ||
|
|
067ee168dd | ||
|
|
d8a90cce47 | ||
|
|
e303a4c374 | ||
|
|
c75f485e54 | ||
|
|
f3f0c688d8 | ||
|
|
030020ea9e | ||
|
|
057b1c7569 | ||
|
|
682682c74a | ||
|
|
b5daf205a0 | ||
|
|
1037fbe52b | ||
|
|
051d151fb7 | ||
|
|
735a069b6d | ||
|
|
7ba7440bb6 | ||
|
|
dafbd5cc43 | ||
|
|
81f20e53ea | ||
|
|
05a2c05a9e | ||
|
|
c6d56f06b2 | ||
|
|
73c4a934ce | ||
|
|
1f2254681a | ||
|
|
850060cc07 | ||
|
|
89c4023ddf | ||
|
|
17a6b7fefd | ||
|
|
71b304cafd | ||
|
|
a238420d7f | ||
|
|
faafa6db3b | ||
|
|
1f6008f269 | ||
|
|
d32579dfb5 | ||
|
|
cea7ce140f | ||
|
|
3ecce35b11 | ||
|
|
a8fd04e2f0 | ||
|
|
f2e42b17a7 | ||
|
|
f46543b264 | ||
|
|
d8a39f2e49 | ||
|
|
24631116c4 | ||
|
|
3152effba5 | ||
|
|
4d83478e7d | ||
|
|
da9834d272 | ||
|
|
4bb23cf0c8 | ||
|
|
f0ad38dec6 | ||
|
|
b87308e14f | ||
|
|
473cb55dc4 | ||
|
|
11a7babbc4 | ||
|
|
9ebc8813e3 | ||
|
|
ac10aafc07 | ||
|
|
8604869e5e | ||
|
|
1ed4c93b94 | ||
|
|
caadae3f98 | ||
|
|
67eab93129 | ||
|
|
31d280f729 | ||
|
|
d23e5d0e87 | ||
|
|
774215a073 | ||
|
|
07821769cd | ||
|
|
b0b73e6c70 | ||
|
|
7e1c2f4f40 | ||
|
|
65c731e1ad | ||
|
|
795bb6a7a6 | ||
|
|
f5325b3a6d | ||
|
|
8d622e3606 | ||
|
|
3e6af8e87b | ||
|
|
948f905a66 | ||
|
|
93b398a54f | ||
|
|
44b1daeed2 | ||
|
|
bed266abac | ||
|
|
8487548271 | ||
|
|
c5e8a83ebb | ||
|
|
b6f81bc83a | ||
|
|
294c278c42 | ||
|
|
ff88a65936 | ||
|
|
76c7508224 | ||
|
|
49723c4d1b | ||
|
|
3c02131133 | ||
|
|
c89fd8c39f | ||
|
|
69623d71b2 | ||
|
|
2d6f4d3bdb | ||
|
|
97ff1e37aa | ||
|
|
d0381e7850 | ||
|
|
c4638f2ac5 | ||
|
|
83c383b1ec | ||
|
|
4f22864080 | ||
|
|
92780daa78 | ||
|
|
a5e4b09e64 | ||
|
|
1501dd23ab | ||
|
|
15ab134fa4 | ||
|
|
05f0ba72e2 | ||
|
|
c9c5e8008c | ||
|
|
bdd3af6b6a | ||
|
|
6582c8b36f | ||
|
|
be838ec313 | ||
|
|
92eef3a6c1 | ||
|
|
63939981c1 | ||
|
|
c9f788e3a4 | ||
|
|
0a866b468a | ||
|
|
8f0f755014 | ||
|
|
5943b88ffe | ||
|
|
c5dfe92e60 | ||
|
|
753cf3cbac | ||
|
|
4a64984d7f | ||
|
|
eeb84e5d42 | ||
|
|
50cd8c80d8 | ||
|
|
567d4aebbc | ||
|
|
b53cc4c37c | ||
|
|
d46e1abd07 | ||
|
|
0b0c0b458f | ||
|
|
997b5a1c9d | ||
|
|
4345661a0b | ||
|
|
5867c79a1f | ||
|
|
41f0619572 | ||
|
|
96b4ea6b50 | ||
|
|
8dda51a92a | ||
|
|
b4bb54212c | ||
|
|
d57193c269 | ||
|
|
4bc03c2d60 | ||
|
|
bf969ec8f7 | ||
|
|
6ead907e08 | ||
|
|
243168a450 | ||
|
|
06d2cf1b88 | ||
|
|
d622162f33 | ||
|
|
12807583c2 | ||
|
|
47f3d539c3 | ||
|
|
993cd64126 | ||
|
|
a0e32ab0bd | ||
|
|
409763b885 | ||
|
|
e06819fc6f | ||
|
|
812783f2cd | ||
|
|
849373a6d3 | ||
|
|
0417d3b70d | ||
|
|
a55eb653f9 | ||
|
|
6b05ed764e | ||
|
|
766eeb2a6f | ||
|
|
753e6bd956 | ||
|
|
5e6295d79a | ||
|
|
3b611275d2 | ||
|
|
cb4dac3506 | ||
|
|
9a7dc3fa49 | ||
|
|
e1c833872e | ||
|
|
5de559f5ad | ||
|
|
bc068a0d9a | ||
|
|
13ccc96e03 | ||
|
|
bc56878039 | ||
|
|
8cb2149fbd | ||
|
|
7d7c331238 | ||
|
|
a8d58d14ff | ||
|
|
4723a9d41b | ||
|
|
4ee9a42f3f | ||
|
|
aae2dec16d | ||
|
|
43e0fe655e | ||
|
|
958d77ed6c | ||
|
|
ad32ba8a98 | ||
|
|
d7847c7630 | ||
|
|
284e6e5720 | ||
|
|
4223bb2047 | ||
|
|
0fa18be43e | ||
|
|
43f2faa077 | ||
|
|
f9dfbf94ef | ||
|
|
3424bb9d6a | ||
|
|
6288bcde51 | ||
|
|
80476a2b61 | ||
|
|
78ee671051 | ||
|
|
386a2de117 | ||
|
|
70b2731b82 | ||
|
|
ae3f8b8bd5 | ||
|
|
2d888d7c13 | ||
|
|
98db8b1b25 | ||
|
|
f710ffb7d0 | ||
|
|
b702782c27 | ||
|
|
5dc7fe6a72 | ||
|
|
39e8f75e07 | ||
|
|
f4e0b1125c | ||
|
|
663bd118a5 | ||
|
|
2a8e2d2d25 | ||
|
|
83a08b0f74 | ||
|
|
0887428066 | ||
|
|
1c018d18bd | ||
|
|
8040abaec4 | ||
|
|
a2e2b36a76 | ||
|
|
f17998cfce | ||
|
|
421375ba62 | ||
|
|
db1289f985 | ||
|
|
f5271bc7b4 | ||
|
|
1117324a7b | ||
|
|
57276b7abd | ||
|
|
b0c334fbe3 | ||
|
|
7a8fa6e056 | ||
|
|
72c4ea0065 | ||
|
|
1805b74edf | ||
|
|
327b2aa070 | ||
|
|
fdc3e7a95f | ||
|
|
9ed7b2aad3 | ||
|
|
42188dcef5 | ||
|
|
2b2f4f05b6 | ||
|
|
f5dab6f215 | ||
|
|
2911fbe5e4 | ||
|
|
92d7519d8d | ||
|
|
e94938d5dd | ||
|
|
950d874b9b | ||
|
|
81c68620a1 | ||
|
|
9d832f9bfc | ||
|
|
460ff68a52 | ||
|
|
96007509b5 | ||
|
|
bb74a25adc | ||
|
|
c36cad4619 | ||
|
|
989c74699f | ||
|
|
fa92ba4478 | ||
|
|
e8829e21e7 | ||
|
|
d61e599de0 | ||
|
|
4a6f3e189d | ||
|
|
7154d755c1 | ||
|
|
307ee50798 | ||
|
|
e8175a90f4 | ||
|
|
bbc9df486e | ||
|
|
c7014fc6c8 | ||
|
|
c22aee5e60 | ||
|
|
feeb93b298 | ||
|
|
a74dace594 | ||
|
|
e0e2a2a626 | ||
|
|
7424c65430 | ||
|
|
c17cf308d4 | ||
|
|
bb3f7fe61f | ||
|
|
f7fcc82933 | ||
|
|
351d9864fe | ||
|
|
8148e9a36d | ||
|
|
34195f0e45 | ||
|
|
9b529d075a | ||
|
|
38a98f70d9 | ||
|
|
544ab890b0 | ||
|
|
d0ad4742c1 | ||
|
|
ee5f7600dc | ||
|
|
5b400daf3b | ||
|
|
58882136a0 | ||
|
|
1ebf089d37 | ||
|
|
1499eb3ba8 | ||
|
|
67adf4c310 | ||
|
|
cd4ced8d6d | ||
|
|
acfdc8163b | ||
|
|
df73548f7e | ||
|
|
f4389e11ba | ||
|
|
f8c6a7c77b | ||
|
|
9c65ff3e12 | ||
|
|
79a90aeec2 | ||
|
|
8e8609371f | ||
|
|
174976ce82 | ||
|
|
57adce693a | ||
|
|
d9576bb2e4 | ||
|
|
97686e1c87 | ||
|
|
35db3ffbf0 | ||
|
|
0a747b5609 | ||
|
|
e947f3259e | ||
|
|
a16eb6e804 | ||
|
|
54334a1854 | ||
|
|
a34eb9ba88 | ||
|
|
f218efbeb2 | ||
|
|
570d8cfc74 | ||
|
|
d964ccca12 | ||
|
|
746e205d74 | ||
|
|
ad29462a6d | ||
|
|
d6cd17e4c8 | ||
|
|
1383c20703 | ||
|
|
5fc91fe466 | ||
|
|
77d9cba468 | ||
|
|
32cd93b689 | ||
|
|
083634826e | ||
|
|
29557ddd86 | ||
|
|
f80b97af53 | ||
|
|
543c1cee62 | ||
|
|
3c2171efb3 | ||
|
|
a7b28ca8e8 | ||
|
|
383e3e55a5 | ||
|
|
8660dc3eba | ||
|
|
00bb09faaa | ||
|
|
f13c8b36cd | ||
|
|
e4ac09ea57 | ||
|
|
72492c9b39 | ||
|
|
39b74a42e5 | ||
|
|
cdd2d9089b | ||
|
|
e086daeecb | ||
|
|
7f5ddaf930 | ||
|
|
0f39b3b7d0 | ||
|
|
bae43249b2 | ||
|
|
7f01bda06d | ||
|
|
2c73153db0 | ||
|
|
7e22fe1617 | ||
|
|
415ec685e6 | ||
|
|
35f8e06ec4 | ||
|
|
431c8c35b9 | ||
|
|
57c30da40f | ||
|
|
9ae2cb79c6 | ||
|
|
1b2fbfd506 | ||
|
|
db73b0f671 | ||
|
|
044e786379 | ||
|
|
7262f61ca0 | ||
|
|
19ae01defb | ||
|
|
1c40a22416 | ||
|
|
ceb0050ea9 | ||
|
|
b97ea010cb | ||
|
|
aad94a1af3 | ||
|
|
c738816cb6 | ||
|
|
f5f53c93f7 | ||
|
|
07415d3871 | ||
|
|
1d2841b2a6 | ||
|
|
4ea2a8244a | ||
|
|
ab0c55b0fa | ||
|
|
d6f594b95a | ||
|
|
a8de738e9b | ||
|
|
61c309cd1d | ||
|
|
5b9d6a31e5 | ||
|
|
8fa0539b5a | ||
|
|
7262485f3b | ||
|
|
24300eeac5 | ||
|
|
2be6c19ba2 | ||
|
|
001bd1d442 | ||
|
|
f30d26b83c | ||
|
|
198c52fd5f | ||
|
|
c5e0b0fc74 | ||
|
|
ecefddf2c3 | ||
|
|
4c8eaac144 | ||
|
|
9991d43b3b | ||
|
|
98b9f007c6 | ||
|
|
53812c3751 | ||
|
|
3e01207026 | ||
|
|
68f72e3074 | ||
|
|
03ade97c0e | ||
|
|
411e9c2e89 | ||
|
|
93dab57959 | ||
|
|
625dba943b | ||
|
|
b272d1e27e | ||
|
|
960513c370 | ||
|
|
61afa92d05 | ||
|
|
fa0373c181 | ||
|
|
5373078a30 | ||
|
|
17f0222e47 | ||
|
|
1479039fb5 | ||
|
|
da28e7b80e | ||
|
|
f2780e65cd | ||
|
|
f9a1f10b99 | ||
|
|
816263bd54 | ||
|
|
eb169cf58b | ||
|
|
2d6fcd6d67 | ||
|
|
9d1883f925 | ||
|
|
85e544dc8e | ||
|
|
aa86885e6b | ||
|
|
72a603ac37 | ||
|
|
a5d9d5e575 | ||
|
|
95d645255d | ||
|
|
6c0fe3d7a1 | ||
|
|
071d3e65a3 | ||
|
|
95323ac4cb | ||
|
|
f7b60f3da7 | ||
|
|
806a30c3d8 | ||
|
|
74570676b5 | ||
|
|
903471ee43 | ||
|
|
9a54a94dca | ||
|
|
532b7a430c | ||
|
|
7e08058d0a | ||
|
|
47d84d4ab6 | ||
|
|
288bddcaf2 | ||
|
|
2d529bd581 | ||
|
|
300f5375c8 | ||
|
|
7f4c6352ba | ||
|
|
225decd286 | ||
|
|
539d41f19e | ||
|
|
ca92834493 | ||
|
|
d28fe93360 | ||
|
|
6ff69c0fa8 | ||
|
|
0409c9a9c0 | ||
|
|
c5bcf76353 | ||
|
|
20a4b9fb5a | ||
|
|
9d20637dcb | ||
|
|
da4efdb2d0 | ||
|
|
9fe4cc2d2b | ||
|
|
b82abc2827 | ||
|
|
8de1fb5289 | ||
|
|
0031fbf886 | ||
|
|
e24efe7528 | ||
|
|
109de73691 | ||
|
|
dc475462d0 | ||
|
|
19883c03ad | ||
|
|
46ae76081d | ||
|
|
63a931a10a | ||
|
|
a15da76566 | ||
|
|
465053f3ff | ||
|
|
bf99407816 | ||
|
|
0493d57b2e | ||
|
|
947864cff8 | ||
|
|
1074d23a90 | ||
|
|
24ffc97317 | ||
|
|
1f6b59f2f5 | ||
|
|
5314eb2d45 | ||
|
|
035b29e70b | ||
|
|
68ec7bce12 | ||
|
|
02b34c9811 | ||
|
|
99fcca3cb7 | ||
|
|
fc2ff27928 | ||
|
|
abaaef2285 | ||
|
|
8154a4dd77 | ||
|
|
6bbea198e5 | ||
|
|
2d569b8bcf | ||
|
|
da3272df76 | ||
|
|
3243b1a3cb | ||
|
|
2492bc69ff | ||
|
|
feb1c051e1 | ||
|
|
6fb57cb1a9 | ||
|
|
b9f0e63341 | ||
|
|
ca41edea22 | ||
|
|
f901f06992 | ||
|
|
6038e04ccc | ||
|
|
4e792018ac | ||
|
|
80700fa031 | ||
|
|
aa02170e5c | ||
|
|
bb7bd1a504 | ||
|
|
04d1b8e7c2 | ||
|
|
435f9358b1 | ||
|
|
0d22bf5cae | ||
|
|
044c233598 | ||
|
|
c160ff9d27 | ||
|
|
915adf6397 | ||
|
|
c7ed2ed59d | ||
|
|
9b91274cf2 | ||
|
|
e5ee6bd6eb | ||
|
|
e1611d1e18 | ||
|
|
60c22cbb5d | ||
|
|
c7dd93275e | ||
|
|
3f0201df3a | ||
|
|
bce8a58cf8 | ||
|
|
752156281f | ||
|
|
0073322758 | ||
|
|
ca3ca4557e | ||
|
|
056a9980d6 | ||
|
|
936964d273 | ||
|
|
4d274a3cec |
94
.github/workflows/ci.yml
vendored
Normal file
94
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
name: CI Pipeline for the Backend and Frontend
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, review_requested, synchronize]
|
||||
|
||||
jobs:
|
||||
backend:
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["16.16.0", "18.5.0"]
|
||||
flavor: ["dev", "prod"]
|
||||
fail-fast: false
|
||||
runs-on: "ubuntu-latest"
|
||||
|
||||
name: Backend (${{ matrix.flavor }}) - node ${{ matrix.node }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: ${{ matrix.node }}/${{ matrix.flavor }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Install
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm ci
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
- name: Install (Prod dependencies only)
|
||||
if: ${{ matrix.flavor == 'prod'}}
|
||||
run: npm ci --omit=dev --omit=optional
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
- name: Lint
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm run lint
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
# - name: Test
|
||||
# run: npm run test
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/backend
|
||||
|
||||
frontend:
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
strategy:
|
||||
matrix:
|
||||
node: ["16.15.0", "18.5.0"]
|
||||
flavor: ["dev", "prod"]
|
||||
fail-fast: false
|
||||
runs-on: "ubuntu-latest"
|
||||
|
||||
name: Frontend (${{ matrix.flavor }}) - node ${{ matrix.node }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: ${{ matrix.node }}/${{ matrix.flavor }}
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: ${{ matrix.node }}
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Install (Prod dependencies only)
|
||||
run: npm ci --omit=dev --omit=optional
|
||||
if: ${{ matrix.flavor == 'prod'}}
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
|
||||
|
||||
- name: Install
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm ci
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
|
||||
|
||||
- name: Lint
|
||||
if: ${{ matrix.flavor == 'dev'}}
|
||||
run: npm run lint
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
|
||||
|
||||
# - name: Test
|
||||
# run: npm run test
|
||||
|
||||
- name: Build
|
||||
run: npm run build
|
||||
working-directory: ${{ matrix.node }}/${{ matrix.flavor }}/frontend
|
||||
92
.github/workflows/cypress.yml
vendored
92
.github/workflows/cypress.yml
vendored
@@ -1,88 +1,58 @@
|
||||
name: Cypress Tests
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
pull_request:
|
||||
types: [ opened, review_requested, synchronize ]
|
||||
jobs:
|
||||
cypress:
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: "!contains(github.event.pull_request.labels.*.name, 'ops') && !contains(github.head_ref, 'ops/')"
|
||||
runs-on: "ubuntu-latest"
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
containers: [1, 2, 3, 4, 5]
|
||||
os: ["ubuntu-latest"]
|
||||
browser: [chrome]
|
||||
name: E2E tests on ${{ matrix.browser }} - ${{ matrix.os }}
|
||||
module: ["mempool", "liquid", "bisq"]
|
||||
include:
|
||||
- module: "mempool"
|
||||
spec: |
|
||||
cypress/e2e/mainnet/*.spec.ts
|
||||
cypress/e2e/signet/*.spec.ts
|
||||
cypress/e2e/testnet/*.spec.ts
|
||||
- module: "liquid"
|
||||
spec: |
|
||||
cypress/e2e/liquid/liquid.spec.ts
|
||||
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
|
||||
- module: "bisq"
|
||||
spec: |
|
||||
cypress/e2e/bisq/bisq.spec.ts
|
||||
|
||||
name: E2E tests for ${{ matrix.module }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: ${{ matrix.module }}
|
||||
|
||||
- name: Setup node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 16.15.0
|
||||
cache: 'npm'
|
||||
cache-dependency-path: frontend/package-lock.json
|
||||
- name: ${{ matrix.browser }} browser tests (Mempool)
|
||||
uses: cypress-io/github-action@v4
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:mempool
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: |
|
||||
cypress/e2e/mainnet/*.spec.ts
|
||||
cypress/e2e/signet/*.spec.ts
|
||||
cypress/e2e/testnet/*.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Mempool)
|
||||
browser: ${{ matrix.browser }}
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
cache-dependency-path: ${{ matrix.module }}/frontend/package-lock.json
|
||||
|
||||
- name: ${{ matrix.browser }} browser tests (Liquid)
|
||||
- name: Chrome browser tests (${{ matrix.module }})
|
||||
uses: cypress-io/github-action@v4
|
||||
if: always()
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:liquid
|
||||
working-directory: ${{ matrix.module }}/frontend
|
||||
build: npm run config:defaults:${{ matrix.module }}
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: |
|
||||
cypress/e2e/liquid/liquid.spec.ts
|
||||
cypress/e2e/liquidtestnet/liquidtestnet.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Liquid)
|
||||
browser: ${{ matrix.browser }}
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
CYPRESS_PROJECT_ID: ${{ secrets.CYPRESS_PROJECT_ID }}
|
||||
|
||||
- name: ${{ matrix.browser }} browser tests (Bisq)
|
||||
uses: cypress-io/github-action@v4
|
||||
if: always()
|
||||
with:
|
||||
tag: ${{ github.event_name }}
|
||||
working-directory: frontend
|
||||
build: npm run config:defaults:bisq
|
||||
start: npm run start:local-staging
|
||||
wait-on: 'http://localhost:4200'
|
||||
wait-on-timeout: 120
|
||||
record: true
|
||||
parallel: true
|
||||
spec: cypress/e2e/bisq/bisq.spec.ts
|
||||
group: Tests on ${{ matrix.browser }} (Bisq)
|
||||
browser: ${{ matrix.browser }}
|
||||
spec: ${{ matrix.spec }}
|
||||
group: Tests on Chrome (${{ matrix.module }})
|
||||
browser: "chrome"
|
||||
ci-build-id: '${{ github.sha }}-${{ github.workflow }}-${{ github.event_name }}'
|
||||
env:
|
||||
COMMIT_INFO_MESSAGE: ${{ github.event.pull_request.title }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,3 +2,4 @@ sitemap
|
||||
data
|
||||
docker-compose.yml
|
||||
backend/mempool-config.json
|
||||
*.swp
|
||||
|
||||
@@ -6,6 +6,8 @@ In order to clarify the intellectual property license granted with Contributions
|
||||
|
||||
When submitting a pull request for the first time, please create a file with a name like `/contributors/{github_username}.txt`, and in the content of that file indicate your agreement to the Contributor License Agreement terms below. An example of what that file should contain can be seen in wiz's agreement file. (This method of CLA "signing" is borrowed from Medium's open source project.)
|
||||
|
||||
Also, please GPG-sign all your commits (`git config commit.gpgsign true`).
|
||||
|
||||
# Contributor License Agreement
|
||||
|
||||
Last Updated: January 25, 2022
|
||||
|
||||
23
LICENSE
23
LICENSE
@@ -1,5 +1,5 @@
|
||||
The Mempool Open Source Project
|
||||
Copyright (c) 2019-2022 The Mempool Open Source Project Developers
|
||||
The Mempool Open Source Project™
|
||||
Copyright (c) 2019-2022 Mempool Space K.K. and other shadowy super-coders
|
||||
|
||||
This program is free software; you can redistribute it and/or modify it under
|
||||
the terms of (at your option) either:
|
||||
@@ -12,13 +12,18 @@ the terms of (at your option) either:
|
||||
Foundation, either version 3 of the License or any later version approved by a
|
||||
proxy statement published on <https://mempool.space/about>.
|
||||
|
||||
However, this copyright license does not include an implied right or license to
|
||||
use our trademarks: The Mempool Open Source Project™, mempool.space™, the
|
||||
mempool Logo™, the mempool.space Vertical Logo™, the mempool.space Horizontal
|
||||
Logo™, the mempool Square Logo™, and the mempool Blocks logo™ are registered
|
||||
trademarks or trademarks of Mempool Space K.K in Japan, the United States,
|
||||
and/or other countries. See our full Trademark Policy and Guidelines for more
|
||||
details, published on <https://mempool.space/trademark-policy>.
|
||||
However, the above copyright licenses do not include an implied right or license
|
||||
to use any trademarks, service marks, logos, or trade names of Mempool Space K.K.
|
||||
or any other contributor to The Mempool Open Source Project.
|
||||
|
||||
The Mempool Open Source Project™, Mempool Accelerator™, mempool.space™,
|
||||
the mempool Logo, the mempool Square logo, the mempool Blocks logo,
|
||||
the mempool.space Vertical Logo, and the mempool.space Horizontal logo
|
||||
are registered trademarks or trademarks of Mempool Space K.K in Japan,
|
||||
the United States, and/or other countries.
|
||||
|
||||
See our full Trademark Policy and Guidelines for more details, published on
|
||||
<https://mempool.space/trademark-policy>.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT ANY
|
||||
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
|
||||
|
||||
@@ -29,5 +29,5 @@ Mempool can be conveniently installed on the following full-node distros:
|
||||
Mempool can be installed in other ways too, but we only recommend doing so if you're a developer, have experience managing servers, or otherwise know what you're doing.
|
||||
|
||||
- See the [`docker/`](./docker/) directory for instructions on deploying Mempool with Docker.
|
||||
- See the [`backend/`](./backend/) and [`frontend/`](./frontend/) directories for manual install instructions oriented for developers and small-scale deployments.
|
||||
- See the [`production/`](./production/) directory for guidance on setting up a more serious Mempool instance designed for high performance at scale.
|
||||
- See the [`backend/`](./backend/) and [`frontend/`](./frontend/) directories for manual install instructions oriented for developers.
|
||||
- See the [`production/`](./production/) directory for guidance on setting up a more serious Mempool instance designed for high performance at scale.
|
||||
|
||||
17
backend/.editorconfig
Normal file
17
backend/.editorconfig
Normal file
@@ -0,0 +1,17 @@
|
||||
# Editor configuration, see https://editorconfig.org
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
quote_type = single
|
||||
|
||||
[*.md]
|
||||
max_line_length = off
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
2
backend/.eslintignore
Normal file
2
backend/.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
dist
|
||||
36
backend/.eslintrc
Normal file
36
backend/.eslintrc
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"prettier"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/ban-ts-comment": 1,
|
||||
"@typescript-eslint/ban-types": 1,
|
||||
"@typescript-eslint/no-empty-function": 1,
|
||||
"@typescript-eslint/no-explicit-any": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 0,
|
||||
"@typescript-eslint/no-namespace": 1,
|
||||
"@typescript-eslint/no-this-alias": 1,
|
||||
"@typescript-eslint/no-var-requires": 1,
|
||||
"@typescript-eslint/explicit-function-return-type": 1,
|
||||
"no-console": 1,
|
||||
"no-constant-condition": 1,
|
||||
"no-dupe-else-if": 1,
|
||||
"no-empty": 1,
|
||||
"no-prototype-builtins": 1,
|
||||
"no-self-assign": 1,
|
||||
"no-useless-catch": 1,
|
||||
"no-var": 1,
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
}
|
||||
}
|
||||
4
backend/.gitignore
vendored
4
backend/.gitignore
vendored
@@ -1,9 +1,9 @@
|
||||
# See http://help.github.com/ignore-files/ for more about ignoring files.
|
||||
|
||||
# production config and external assets
|
||||
*.json
|
||||
!mempool-config.sample.json
|
||||
|
||||
mempool-config.json
|
||||
pools.json
|
||||
icons.json
|
||||
|
||||
# compiled output
|
||||
|
||||
2
backend/.prettierignore
Normal file
2
backend/.prettierignore
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
package-lock.json
|
||||
6
backend/.prettierrc
Normal file
6
backend/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"endOfLine": "lf",
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
# Mempool Backend
|
||||
|
||||
These instructions are mostly intended for developers, but can be used as a basis for personal or small-scale production setups.
|
||||
These instructions are mostly intended for developers.
|
||||
|
||||
If you choose to use these instructions for a production setup, be aware that you will still probably need to do additional configuration for your specific OS, environment, use-case, etc. We do our best here to provide a good starting point, but only proceed if you know what you're doing. Mempool does not provide support for custom setups.
|
||||
|
||||
@@ -77,13 +77,13 @@ Query OK, 0 rows affected (0.00 sec)
|
||||
|
||||
#### Build
|
||||
|
||||
_Make sure to use Node.js 16.15 and npm 7._
|
||||
_Make sure to use Node.js 16.10 and npm 7._
|
||||
|
||||
Install dependencies with `npm` and build the backend:
|
||||
|
||||
```
|
||||
cd backend
|
||||
npm install # add --prod for production
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
@@ -159,3 +159,57 @@ nodemon src/index.ts --ignore cache/ --ignore pools.json
|
||||
```
|
||||
|
||||
`nodemon` should be in npm's global binary folder. If needed, you can determine where that is with `npm -g bin`.
|
||||
|
||||
### Useful Regtest Commands
|
||||
|
||||
Helpful link: https://gist.github.com/System-Glitch/cb4e87bf1ae3fec9925725bb3ebe223a
|
||||
|
||||
Run bitcoind on regtest:
|
||||
```
|
||||
bitcoind -regtest -rpcport=8332
|
||||
```
|
||||
|
||||
Create a new wallet, if needed:
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 createwallet test
|
||||
```
|
||||
|
||||
Load wallet (this command may take a while if you have lot of UTXOs):
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 loadwallet test
|
||||
```
|
||||
|
||||
Get a new address:
|
||||
```
|
||||
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
|
||||
```
|
||||
|
||||
Mine blocks to the previously generated address. You need at least 101 blocks before you can spend. This will take some time to execute (~1 min):
|
||||
```
|
||||
bitcoin-cli -regtest -rpcport=8332 generatetoaddress 101 $address
|
||||
```
|
||||
|
||||
Send 0.1 BTC at 5 sat/vB to another address:
|
||||
```
|
||||
./src/bitcoin-cli -named -regtest -rpcport=8332 sendtoaddress address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress) amount=0.1 fee_rate=5
|
||||
```
|
||||
|
||||
See more example of `sendtoaddress`:
|
||||
```
|
||||
./src/bitcoin-cli sendtoaddress # will print the help
|
||||
```
|
||||
|
||||
Mini script to generate transactions with random TX fee-rate (between 1 to 100 sat/vB). It's slow so don't expect to use this to test mempool spam, except if you let it run for a long time, or maybe with multiple regtest nodes connected to each other.
|
||||
```
|
||||
#!/bin/bash
|
||||
address=$(./src/bitcoin-cli -regtest -rpcport=8332 getnewaddress)
|
||||
for i in {1..1000000}
|
||||
do
|
||||
./src/bitcoin-cli -regtest -rpcport=8332 -named sendtoaddress address=$address amount=0.01 fee_rate=$(jot -r 1 1 100)
|
||||
done
|
||||
```
|
||||
|
||||
Generate block at regular interval (every 10 seconds in this example):
|
||||
```
|
||||
watch -n 10 "./src/bitcoin-cli -regtest -rpcport=8332 generatetoaddress 1 $address"
|
||||
```
|
||||
|
||||
@@ -13,13 +13,17 @@
|
||||
"INITIAL_BLOCKS_AMOUNT": 8,
|
||||
"MEMPOOL_BLOCKS_AMOUNT": 8,
|
||||
"INDEXING_BLOCKS_AMOUNT": 11000,
|
||||
"BLOCKS_SUMMARIES_INDEXING": false,
|
||||
"PRICE_FEED_UPDATE_INTERVAL": 600,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": false,
|
||||
"EXTERNAL_ASSETS": [],
|
||||
"EXTERNAL_MAX_RETRY": 1,
|
||||
"EXTERNAL_RETRY_INTERVAL": 0,
|
||||
"USER_AGENT": "mempool",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug"
|
||||
"STDOUT_LOG_MIN_PRIORITY": "debug",
|
||||
"AUTOMATIC_BLOCK_REINDEXING": false,
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "127.0.0.1",
|
||||
@@ -61,10 +65,25 @@
|
||||
"ENABLED": true,
|
||||
"TX_PER_SECOND_SAMPLE_PERIOD": 150
|
||||
},
|
||||
"MAXMIND": {
|
||||
"ENABLED": false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
},
|
||||
"BISQ": {
|
||||
"ENABLED": false,
|
||||
"DATA_PATH": "/bisq/statsnode-data/btc_mainnet/db"
|
||||
},
|
||||
"LIGHTNING": {
|
||||
"ENABLED": false,
|
||||
"BACKEND": "lnd"
|
||||
},
|
||||
"LND": {
|
||||
"TLS_CERT_PATH": "tls.cert",
|
||||
"MACAROON_PATH": "readonly.macaroon",
|
||||
"REST_API_URL": "https://localhost:8080"
|
||||
},
|
||||
"SOCKS5PROXY": {
|
||||
"ENABLED": false,
|
||||
"USE_ONION": true,
|
||||
|
||||
3016
backend/package-lock.json
generated
3016
backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "mempool-backend",
|
||||
"version": "2.4.0-dev",
|
||||
"version": "2.5.0-dev",
|
||||
"description": "Bitcoin mempool visualizer and blockchain explorer backend",
|
||||
"license": "GNU Affero General Public License v3.0",
|
||||
"homepage": "https://mempool.space",
|
||||
@@ -16,33 +16,44 @@
|
||||
"mempool",
|
||||
"blockchain",
|
||||
"explorer",
|
||||
"liquid"
|
||||
"liquid",
|
||||
"lightning"
|
||||
],
|
||||
"main": "index.ts",
|
||||
"scripts": {
|
||||
"ng": "./node_modules/@angular/cli/bin/ng",
|
||||
"tsc": "./node_modules/typescript/bin/tsc",
|
||||
"build": "npm run tsc",
|
||||
"start": "node --max-old-space-size=2048 dist/index.js",
|
||||
"start-production": "node --max-old-space-size=4096 dist/index.js",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"lint": "./node_modules/.bin/eslint . --ext .ts",
|
||||
"lint:fix": "./node_modules/.bin/eslint . --ext .ts --fix",
|
||||
"prettier": "./node_modules/.bin/prettier --write \"src/**/*.{js,ts}\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@mempool/electrum-client": "^1.1.7",
|
||||
"@types/node": "^16.11.41",
|
||||
"axios": "~0.27.2",
|
||||
"bitcoinjs-lib": "6.0.1",
|
||||
"crypto-js": "^4.0.0",
|
||||
"express": "^4.18.0",
|
||||
"fast-xml-parser": "^4.0.9",
|
||||
"maxmind": "^4.3.6",
|
||||
"mysql2": "2.3.3",
|
||||
"node-worker-threads-pool": "^1.5.1",
|
||||
"socks-proxy-agent": "^6.2.0",
|
||||
"typescript": "~4.7.2",
|
||||
"ws": "~8.7.0"
|
||||
"socks-proxy-agent": "~7.0.0",
|
||||
"typescript": "~4.7.4",
|
||||
"ws": "~8.8.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/compression": "^1.7.2",
|
||||
"@types/ws": "~8.5.3",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/express": "^4.17.13",
|
||||
"tslint": "^6.1.0"
|
||||
"@types/ws": "~8.5.3",
|
||||
"@typescript-eslint/eslint-plugin": "^5.30.5",
|
||||
"@typescript-eslint/parser": "^5.30.5",
|
||||
"eslint": "^8.19.0",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"prettier": "^2.7.1"
|
||||
}
|
||||
}
|
||||
|
||||
381
backend/src/api/bisq/bisq.routes.ts
Normal file
381
backend/src/api/bisq/bisq.routes.ts
Normal file
@@ -0,0 +1,381 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import { RequiredSpec } from '../../mempool.interfaces';
|
||||
import bisq from './bisq';
|
||||
import { MarketsApiError } from './interfaces';
|
||||
import marketsApi from './markets-api';
|
||||
|
||||
class BisqRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', this.getBisqStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', this.getBisqTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', this.getBisqBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', this.getBisqTip)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', this.getBisqBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', this.getBisqAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', this.getBisqTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', this.getBisqMarketCurrencies.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', this.getBisqMarketDepth.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', this.getBisqMarketHloc.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', this.getBisqMarketMarkets.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', this.getBisqMarketOffers.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', this.getBisqMarketTicker.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', this.getBisqMarketTrades.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', this.getBisqMarketVolumes.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', this.getBisqMarketVolumes7d.bind(this))
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
private getBisqStats(req: Request, res: Response) {
|
||||
const result = bisq.getStats();
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
private getBisqTip(req: Request, res: Response) {
|
||||
const result = bisq.getLatestBlockHeight();
|
||||
res.type('text/plain');
|
||||
res.send(result.toString());
|
||||
}
|
||||
|
||||
private getBisqTransaction(req: Request, res: Response) {
|
||||
const result = bisq.getTransaction(req.params.txId);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq transaction not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqTransactions(req: Request, res: Response) {
|
||||
const types: string[] = [];
|
||||
req.query.types = req.query.types || [];
|
||||
if (!Array.isArray(req.query.types)) {
|
||||
res.status(500).send('Types is not an array');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const _type in req.query.types) {
|
||||
if (typeof req.query.types[_type] === 'string') {
|
||||
types.push(req.query.types[_type].toString());
|
||||
}
|
||||
}
|
||||
|
||||
const index = parseInt(req.params.index, 10) || 0;
|
||||
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
|
||||
const [transactions, count] = bisq.getTransactions(index, length, types);
|
||||
res.header('X-Total-Count', count.toString());
|
||||
res.json(transactions);
|
||||
}
|
||||
|
||||
private getBisqBlock(req: Request, res: Response) {
|
||||
const result = bisq.getBlock(req.params.hash);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq block not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqBlocks(req: Request, res: Response) {
|
||||
const index = parseInt(req.params.index, 10) || 0;
|
||||
const length = parseInt(req.params.length, 10) > 100 ? 100 : parseInt(req.params.length, 10) || 25;
|
||||
const [transactions, count] = bisq.getBlocks(index, length);
|
||||
res.header('X-Total-Count', count.toString());
|
||||
res.json(transactions);
|
||||
}
|
||||
|
||||
private getBisqAddress(req: Request, res: Response) {
|
||||
const result = bisq.getAddress(req.params.address.substr(1));
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Bisq address not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketCurrencies(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'type': {
|
||||
required: false,
|
||||
types: ['crypto', 'fiat', 'all']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getCurrencies(p.type);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketCurrencies error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketDepth(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getDepth(p.market);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketDepth error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketMarkets(req: Request, res: Response) {
|
||||
const result = marketsApi.getMarkets();
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketMarkets error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketTrades(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'trade_id_to': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'trade_id_from': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'direction': {
|
||||
required: false,
|
||||
types: ['buy', 'sell']
|
||||
},
|
||||
'limit': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'sort': {
|
||||
required: false,
|
||||
types: ['asc', 'desc']
|
||||
}
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getTrades(p.market, p.timestamp_from,
|
||||
p.timestamp_to, p.trade_id_from, p.trade_id_to, p.direction, p.limit, p.sort);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTrades error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketOffers(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'direction': {
|
||||
required: false,
|
||||
types: ['buy', 'sell']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getOffers(p.market, p.direction);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketOffers error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketVolumes(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
'interval': {
|
||||
required: false,
|
||||
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'milliseconds': {
|
||||
required: false,
|
||||
types: ['@boolean']
|
||||
},
|
||||
'timestamp': {
|
||||
required: false,
|
||||
types: ['no', 'yes']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getVolumes(p.market, p.timestamp_from, p.timestamp_to, p.interval, p.milliseconds, p.timestamp);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketHloc(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: true,
|
||||
types: ['@string']
|
||||
},
|
||||
'interval': {
|
||||
required: false,
|
||||
types: ['minute', 'half_hour', 'hour', 'half_day', 'day', 'week', 'month', 'year', 'auto']
|
||||
},
|
||||
'timestamp_from': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'timestamp_to': {
|
||||
required: false,
|
||||
types: ['@number']
|
||||
},
|
||||
'milliseconds': {
|
||||
required: false,
|
||||
types: ['@boolean']
|
||||
},
|
||||
'timestamp': {
|
||||
required: false,
|
||||
types: ['no', 'yes']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getHloc(p.market, p.interval, p.timestamp_from, p.timestamp_to, p.milliseconds, p.timestamp);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketHloc error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketTicker(req: Request, res: Response) {
|
||||
const constraints: RequiredSpec = {
|
||||
'market': {
|
||||
required: false,
|
||||
types: ['@string']
|
||||
},
|
||||
};
|
||||
|
||||
const p = this.parseRequestParameters(req.query, constraints);
|
||||
if (p.error) {
|
||||
res.status(400).json(this.getBisqMarketErrorResponse(p.error));
|
||||
return;
|
||||
}
|
||||
|
||||
const result = marketsApi.getTicker(p.market);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketTicker error'));
|
||||
}
|
||||
}
|
||||
|
||||
private getBisqMarketVolumes7d(req: Request, res: Response) {
|
||||
const result = marketsApi.getVolumesByTime(604800);
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(500).json(this.getBisqMarketErrorResponse('getBisqMarketVolumes7d error'));
|
||||
}
|
||||
}
|
||||
|
||||
private parseRequestParameters(requestParams: object, params: RequiredSpec): { [name: string]: any; } {
|
||||
const final = {};
|
||||
for (const i in params) {
|
||||
if (params.hasOwnProperty(i)) {
|
||||
if (params[i].required && requestParams[i] === undefined) {
|
||||
return { error: i + ' parameter missing'};
|
||||
}
|
||||
if (typeof requestParams[i] === 'string') {
|
||||
const str = (requestParams[i] || '').toString().toLowerCase();
|
||||
if (params[i].types.indexOf('@number') > -1) {
|
||||
const number = parseInt((str).toString(), 10);
|
||||
final[i] = number;
|
||||
} else if (params[i].types.indexOf('@string') > -1) {
|
||||
final[i] = str;
|
||||
} else if (params[i].types.indexOf('@boolean') > -1) {
|
||||
final[i] = str === 'true' || str === 'yes';
|
||||
} else if (params[i].types.indexOf(str) > -1) {
|
||||
final[i] = str;
|
||||
} else {
|
||||
return { error: i + ' parameter invalid'};
|
||||
}
|
||||
} else if (typeof requestParams[i] === 'number') {
|
||||
final[i] = requestParams[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
return final;
|
||||
}
|
||||
|
||||
private getBisqMarketErrorResponse(message: string): MarketsApiError {
|
||||
return {
|
||||
'success': 0,
|
||||
'error': message
|
||||
};
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new BisqRoutes;
|
||||
@@ -4,15 +4,19 @@ export interface AbstractBitcoinApi {
|
||||
$getRawMempool(): Promise<IEsploraApi.Transaction['txid'][]>;
|
||||
$getRawTransaction(txId: string, skipConversion?: boolean, addPrevout?: boolean, lazyPrevouts?: boolean): Promise<IEsploraApi.Transaction>;
|
||||
$getBlockHeightTip(): Promise<number>;
|
||||
$getBlockHashTip(): Promise<string>;
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]>;
|
||||
$getBlockHash(height: number): Promise<string>;
|
||||
$getBlockHeader(hash: string): Promise<string>;
|
||||
$getBlock(hash: string): Promise<IEsploraApi.Block>;
|
||||
$getRawBlock(hash: string): Promise<string>;
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address>;
|
||||
$getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]>;
|
||||
$getAddressPrefix(prefix: string): string[];
|
||||
$sendRawTransaction(rawTransaction: string): Promise<string>;
|
||||
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend>;
|
||||
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]>;
|
||||
$getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]>;
|
||||
}
|
||||
export interface BitcoinRpcCredentials {
|
||||
host: string;
|
||||
|
||||
@@ -73,6 +73,14 @@ export namespace IBitcoinApi {
|
||||
time: number; // (numeric) Same as blocktime
|
||||
}
|
||||
|
||||
export interface VerboseBlock extends Block {
|
||||
tx: VerboseTransaction[]; // The transactions in the format of the getrawtransaction RPC. Different from verbosity = 1 "tx" result
|
||||
}
|
||||
|
||||
export interface VerboseTransaction extends Transaction {
|
||||
fee?: number; // (numeric) The transaction fee in BTC, omitted if block undo data is not available
|
||||
}
|
||||
|
||||
export interface Vin {
|
||||
txid?: string; // (string) The transaction id
|
||||
vout?: number; // (string)
|
||||
|
||||
@@ -64,13 +64,21 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
});
|
||||
}
|
||||
|
||||
$getBlockHashTip(): Promise<string> {
|
||||
return this.bitcoindClient.getChainTips()
|
||||
.then((result: IBitcoinApi.ChainTips[]) => {
|
||||
return result.find(tip => tip.status === 'active')!.hash;
|
||||
});
|
||||
}
|
||||
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]> {
|
||||
return this.bitcoindClient.getBlock(hash, 1)
|
||||
.then((rpcBlock: IBitcoinApi.Block) => rpcBlock.tx);
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
return this.bitcoindClient.getBlock(hash, 0);
|
||||
return this.bitcoindClient.getBlock(hash, 0)
|
||||
.then((raw: string) => Buffer.from(raw, "hex"));
|
||||
}
|
||||
|
||||
$getBlockHash(height: number): Promise<string> {
|
||||
@@ -123,6 +131,16 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
return this.bitcoindClient.sendRawTransaction(rawTransaction);
|
||||
}
|
||||
|
||||
async $getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
|
||||
const txOut = await this.bitcoindClient.getTxOut(txId, vout, false);
|
||||
return {
|
||||
spent: txOut === null,
|
||||
status: {
|
||||
confirmed: true,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async $getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
|
||||
const outSpends: IEsploraApi.Outspend[] = [];
|
||||
const tx = await this.$getRawTransaction(txId, true, false);
|
||||
@@ -141,6 +159,15 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
return outSpends;
|
||||
}
|
||||
|
||||
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
|
||||
const outspends: IEsploraApi.Outspend[][] = [];
|
||||
for (const tx of txId) {
|
||||
const outspend = await this.$getOutspends(tx);
|
||||
outspends.push(outspend);
|
||||
}
|
||||
return outspends;
|
||||
}
|
||||
|
||||
$getEstimatedHashrate(blockHeight: number): Promise<number> {
|
||||
// 120 is the default block span in Core
|
||||
return this.bitcoindClient.getNetworkHashPs(120, blockHeight);
|
||||
@@ -179,7 +206,9 @@ class BitcoinApi implements AbstractBitcoinApi {
|
||||
sequence: vin.sequence,
|
||||
txid: vin.txid || '',
|
||||
vout: vin.vout || 0,
|
||||
witness: vin.txinwitness,
|
||||
witness: vin.txinwitness || [],
|
||||
inner_redeemscript_asm: '',
|
||||
inner_witnessscript_asm: '',
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
554
backend/src/api/bitcoin/bitcoin.routes.ts
Normal file
554
backend/src/api/bitcoin/bitcoin.routes.ts
Normal file
@@ -0,0 +1,554 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import axios from 'axios';
|
||||
import config from '../../config';
|
||||
import websocketHandler from '../websocket-handler';
|
||||
import mempool from '../mempool';
|
||||
import feeApi from '../fee-api';
|
||||
import mempoolBlocks from '../mempool-blocks';
|
||||
import bitcoinApi from './bitcoin-api-factory';
|
||||
import { Common } from '../common';
|
||||
import backendInfo from '../backend-info';
|
||||
import transactionUtils from '../transaction-utils';
|
||||
import { IEsploraApi } from './esplora-api.interface';
|
||||
import loadingIndicators from '../loading-indicators';
|
||||
import { TransactionExtended } from '../../mempool.interfaces';
|
||||
import logger from '../../logger';
|
||||
import blocks from '../blocks';
|
||||
import bitcoinClient from './bitcoin-client';
|
||||
import difficultyAdjustment from '../difficulty-adjustment';
|
||||
|
||||
class BitcoinRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', this.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'outspends', this.$getBatchedOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', this.getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', this.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', this.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', this.getMempoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', this.getBackendInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', this.getInitData)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', this.validateAddress)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', this.$postTransactionForm)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', this.getBlocks.bind(this))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', this.getBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/summary', this.getStrippedBlockTransactions);
|
||||
;
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', this.getMempool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', this.getMempoolTxIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', this.getRecentMempoolTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', this.getTransaction)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', this.$postTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', this.getRawTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', this.getTransactionStatus)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', this.getTransactionOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', this.getBlockHeader)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', this.getBlockTipHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/hash', this.getBlockTipHash)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/raw', this.getRawBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', this.getTxIdsForBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', this.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', this.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', this.getBlockHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', this.getAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', this.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', this.getAddressPrefix)
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private getInitData(req: Request, res: Response) {
|
||||
try {
|
||||
const result = websocketHandler.getInitData();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getRecommendedFees(req: Request, res: Response) {
|
||||
if (!mempool.isInSync()) {
|
||||
res.statusCode = 503;
|
||||
res.send('Service Unavailable');
|
||||
return;
|
||||
}
|
||||
const result = feeApi.getRecommendedFee();
|
||||
res.json(result);
|
||||
}
|
||||
|
||||
private getMempoolBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const result = mempoolBlocks.getMempoolBlocks();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getTransactionTimes(req: Request, res: Response) {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(500).send('Not an array');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
|
||||
const times = mempool.getFirstSeenForTransactions(txIds);
|
||||
res.json(times);
|
||||
}
|
||||
|
||||
private async $getBatchedOutspends(req: Request, res: Response) {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(500).send('Not an array');
|
||||
return;
|
||||
}
|
||||
if (req.query.txId.length > 50) {
|
||||
res.status(400).send('Too many txids requested');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const batchedOutspends = await bitcoinApi.$getBatchedOutspends(txIds);
|
||||
res.json(batchedOutspends);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getCpfpInfo(req: Request, res: Response) {
|
||||
if (!/^[a-fA-F0-9]{64}$/.test(req.params.txId)) {
|
||||
res.status(501).send(`Invalid transaction ID.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const tx = mempool.getMempool()[req.params.txId];
|
||||
if (!tx) {
|
||||
res.status(404).send(`Transaction doesn't exist in the mempool.`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (tx.cpfpChecked) {
|
||||
res.json({
|
||||
ancestors: tx.ancestors,
|
||||
bestDescendant: tx.bestDescendant || null,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const cpfpInfo = Common.setRelativesAndGetCpfpInfo(tx, mempool.getMempool());
|
||||
|
||||
res.json(cpfpInfo);
|
||||
}
|
||||
|
||||
private getBackendInfo(req: Request, res: Response) {
|
||||
res.json(backendInfo.getBackendInfo());
|
||||
}
|
||||
|
||||
private async getTransaction(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
|
||||
res.json(transaction);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRawTransaction(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction: IEsploraApi.Transaction = await bitcoinApi.$getRawTransaction(req.params.txId, true);
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(transaction.hex);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTransactionStatus(req: Request, res: Response) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(req.params.txId, true);
|
||||
res.json(transaction.status);
|
||||
} catch (e) {
|
||||
let statusCode = 500;
|
||||
if (e instanceof Error && e.message && e.message.indexOf('No such mempool or blockchain transaction') > -1) {
|
||||
statusCode = 404;
|
||||
}
|
||||
res.status(statusCode).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const block = await blocks.$getBlock(req.params.hash);
|
||||
|
||||
const blockAge = new Date().getTime() / 1000 - block.timestamp;
|
||||
const day = 24 * 3600;
|
||||
let cacheDuration;
|
||||
if (blockAge > 365 * day) {
|
||||
cacheDuration = 30 * day;
|
||||
} else if (blockAge > 30 * day) {
|
||||
cacheDuration = 10 * day;
|
||||
} else {
|
||||
cacheDuration = 600
|
||||
}
|
||||
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * cacheDuration).toUTCString());
|
||||
res.json(block);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockHeader(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHeader = await bitcoinApi.$getBlockHeader(req.params.hash);
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(blockHeader);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getStrippedBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
const transactions = await blocks.$getStrippedBlockTransactions(req.params.hash);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24 * 30).toUTCString());
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) { // Bitcoin
|
||||
const height = req.params.height === undefined ? undefined : parseInt(req.params.height, 10);
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(await blocks.$getBlocks(height, 15));
|
||||
} else { // Liquid, Bisq
|
||||
return await this.getLegacyBlocks(req, res);
|
||||
}
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getLegacyBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const returnBlocks: IEsploraApi.Block[] = [];
|
||||
const fromHeight = parseInt(req.params.height, 10) || blocks.getCurrentBlockHeight();
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = blocks.getBlocks().find((b) => b.height === fromHeight);
|
||||
let startFromHash: string | null = null;
|
||||
if (blockByHeight) {
|
||||
startFromHash = blockByHeight.id;
|
||||
} else {
|
||||
startFromHash = await bitcoinApi.$getBlockHash(fromHeight);
|
||||
}
|
||||
|
||||
let nextHash = startFromHash;
|
||||
for (let i = 0; i < 10 && nextHash; i++) {
|
||||
const localBlock = blocks.getBlocks().find((b) => b.id === nextHash);
|
||||
if (localBlock) {
|
||||
returnBlocks.push(localBlock);
|
||||
nextHash = localBlock.previousblockhash;
|
||||
} else {
|
||||
const block = await bitcoinApi.$getBlock(nextHash);
|
||||
returnBlocks.push(block);
|
||||
nextHash = block.previousblockhash;
|
||||
}
|
||||
}
|
||||
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(returnBlocks);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTransactions(req: Request, res: Response) {
|
||||
try {
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 0);
|
||||
|
||||
const txIds = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
|
||||
const transactions: TransactionExtended[] = [];
|
||||
const startingIndex = Math.max(0, parseInt(req.params.index || '0', 10));
|
||||
|
||||
const endIndex = Math.min(startingIndex + 10, txIds.length);
|
||||
for (let i = startingIndex; i < endIndex; i++) {
|
||||
try {
|
||||
const transaction = await transactionUtils.$getTransactionExtended(txIds[i], true, true);
|
||||
transactions.push(transaction);
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, (i - startingIndex + 1) / (endIndex - startingIndex) * 100);
|
||||
} catch (e) {
|
||||
logger.debug('getBlockTransactions error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('blocktxs-' + req.params.hash, 100);
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockHeight(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHash = await bitcoinApi.$getBlockHash(parseInt(req.params.height, 10));
|
||||
res.send(blockHash);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAddress(req: Request, res: Response) {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const addressData = await bitcoinApi.$getAddress(req.params.address);
|
||||
res.json(addressData);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
return res.status(413).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAddressTransactions(req: Request, res: Response) {
|
||||
if (config.MEMPOOL.BACKEND === 'none') {
|
||||
res.status(405).send('Address lookups cannot be used with bitcoind as backend.');
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const transactions = await bitcoinApi.$getAddressTransactions(req.params.address, req.params.txId);
|
||||
res.json(transactions);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message && (e.message.indexOf('too long') > 0 || e.message.indexOf('confirmed status') > 0)) {
|
||||
return res.status(413).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getAdressTxChain(req: Request, res: Response) {
|
||||
res.status(501).send('Not implemented');
|
||||
}
|
||||
|
||||
private async getAddressPrefix(req: Request, res: Response) {
|
||||
try {
|
||||
const blockHash = await bitcoinApi.$getAddressPrefix(req.params.prefix);
|
||||
res.send(blockHash);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRecentMempoolTransactions(req: Request, res: Response) {
|
||||
const latestTransactions = Object.entries(mempool.getMempool())
|
||||
.sort((a, b) => (b[1].firstSeen || 0) - (a[1].firstSeen || 0))
|
||||
.slice(0, 10).map((tx) => Common.stripTransaction(tx[1]));
|
||||
|
||||
res.json(latestTransactions);
|
||||
}
|
||||
|
||||
private async getMempool(req: Request, res: Response) {
|
||||
const info = mempool.getMempoolInfo();
|
||||
res.json({
|
||||
count: info.size,
|
||||
vsize: info.bytes,
|
||||
total_fee: info.total_fee * 1e8,
|
||||
fee_histogram: []
|
||||
});
|
||||
}
|
||||
|
||||
private async getMempoolTxIds(req: Request, res: Response) {
|
||||
try {
|
||||
const rawMempool = await bitcoinApi.$getRawMempool();
|
||||
res.send(rawMempool);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTipHeight(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getBlockHeightTip();
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getBlockTipHash(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getBlockHashTip();
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
res.send(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getRawBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getRawBlock(req.params.hash);
|
||||
res.setHeader('content-type', 'application/octet-stream');
|
||||
res.send(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTxIdsForBlock(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getTxIdsForBlock(req.params.hash);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async validateAddress(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinClient.validateAddress(req.params.address);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async getTransactionOutspends(req: Request, res: Response) {
|
||||
try {
|
||||
const result = await bitcoinApi.$getOutspends(req.params.txId);
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private getDifficultyChange(req: Request, res: Response) {
|
||||
try {
|
||||
res.json(difficultyAdjustment.getDifficultyAdjustment());
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $postTransaction(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
try {
|
||||
let rawTx;
|
||||
if (typeof req.body === 'object') {
|
||||
rawTx = Object.keys(req.body)[0];
|
||||
} else {
|
||||
rawTx = req.body;
|
||||
}
|
||||
const txIdResult = await bitcoinApi.$sendRawTransaction(rawTx);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
|
||||
: (e.message || 'Error'));
|
||||
}
|
||||
}
|
||||
|
||||
private async $postTransactionForm(req: Request, res: Response) {
|
||||
res.setHeader('content-type', 'text/plain');
|
||||
const matches = /tx=([a-z0-9]+)/.exec(req.body);
|
||||
let txHex = '';
|
||||
if (matches && matches[1]) {
|
||||
txHex = matches[1];
|
||||
}
|
||||
try {
|
||||
const txIdResult = await bitcoinClient.sendRawTransaction(txHex);
|
||||
res.send(txIdResult);
|
||||
} catch (e: any) {
|
||||
res.status(400).send(e.message && e.code ? 'sendrawtransaction RPC error: ' + JSON.stringify({ code: e.code, message: e.message })
|
||||
: (e.message || 'Error'));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new BitcoinRoutes();
|
||||
@@ -1,12 +1,11 @@
|
||||
import config from '../../config';
|
||||
import Client from '@mempool/electrum-client';
|
||||
import { AbstractBitcoinApi } from './bitcoin-api-abstract-factory';
|
||||
import { IEsploraApi } from './esplora-api.interface';
|
||||
import { IElectrumApi } from './electrum-api.interface';
|
||||
import BitcoinApi from './bitcoin-api';
|
||||
import logger from '../../logger';
|
||||
import * as ElectrumClient from '@mempool/electrum-client';
|
||||
import * as sha256 from 'crypto-js/sha256';
|
||||
import * as hexEnc from 'crypto-js/enc-hex';
|
||||
import crypto from "crypto-js";
|
||||
import loadingIndicators from '../loading-indicators';
|
||||
import memoryCache from '../memory-cache';
|
||||
|
||||
@@ -26,7 +25,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
|
||||
onLog: (str) => { logger.debug(str); },
|
||||
};
|
||||
|
||||
this.electrumClient = new ElectrumClient(
|
||||
this.electrumClient = new Client(
|
||||
config.ELECTRUM.PORT,
|
||||
config.ELECTRUM.HOST,
|
||||
config.ELECTRUM.TLS_ENABLED ? 'tls' : 'tcp',
|
||||
@@ -35,7 +34,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
|
||||
);
|
||||
|
||||
this.electrumClient.initElectrum(electrumConfig, electrumPersistencePolicy)
|
||||
.then(() => {})
|
||||
.then(() => { })
|
||||
.catch((err) => {
|
||||
logger.err(`Error connecting to Electrum Server at ${config.ELECTRUM.HOST}:${config.ELECTRUM.PORT}`);
|
||||
});
|
||||
@@ -95,7 +94,7 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
|
||||
async $getAddressTransactions(address: string, lastSeenTxId: string): Promise<IEsploraApi.Transaction[]> {
|
||||
const addressInfo = await this.bitcoindClient.validateAddress(address);
|
||||
if (!addressInfo || !addressInfo.isvalid) {
|
||||
return [];
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -144,8 +143,8 @@ class BitcoindElectrsApi extends BitcoinApi implements AbstractBitcoinApi {
|
||||
}
|
||||
|
||||
private encodeScriptHash(scriptPubKey: string): string {
|
||||
const addrScripthash = hexEnc.stringify(sha256(hexEnc.parse(scriptPubKey)));
|
||||
return addrScripthash.match(/.{2}/g).reverse().join('');
|
||||
const addrScripthash = crypto.enc.Hex.stringify(crypto.SHA256(crypto.enc.Hex.parse(scriptPubKey)));
|
||||
return addrScripthash!.match(/.{2}/g)!.reverse().join('');
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -25,10 +25,10 @@ export namespace IEsploraApi {
|
||||
is_coinbase: boolean;
|
||||
scriptsig: string;
|
||||
scriptsig_asm: string;
|
||||
inner_redeemscript_asm?: string;
|
||||
inner_witnessscript_asm?: string;
|
||||
inner_redeemscript_asm: string;
|
||||
inner_witnessscript_asm: string;
|
||||
sequence: any;
|
||||
witness?: string[];
|
||||
witness: string[];
|
||||
prevout: Vout | null;
|
||||
// Elements
|
||||
is_pegin?: boolean;
|
||||
|
||||
@@ -25,6 +25,11 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getBlockHashTip(): Promise<string> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/blocks/tip/hash', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getTxIdsForBlock(hash: string): Promise<string[]> {
|
||||
return axios.get<string[]>(config.ESPLORA.REST_API_URL + '/block/' + hash + '/txids', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
@@ -45,6 +50,11 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getRawBlock(hash: string): Promise<string> {
|
||||
return axios.get<string>(config.ESPLORA.REST_API_URL + '/block/' + hash + "/raw", this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getAddress(address: string): Promise<IEsploraApi.Address> {
|
||||
throw new Error('Method getAddress not implemented.');
|
||||
}
|
||||
@@ -61,8 +71,23 @@ class ElectrsApi implements AbstractBitcoinApi {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
|
||||
$getOutspends(): Promise<IEsploraApi.Outspend[]> {
|
||||
throw new Error('Method not implemented.');
|
||||
$getOutspend(txId: string, vout: number): Promise<IEsploraApi.Outspend> {
|
||||
return axios.get<IEsploraApi.Outspend>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspend/' + vout, this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
$getOutspends(txId: string): Promise<IEsploraApi.Outspend[]> {
|
||||
return axios.get<IEsploraApi.Outspend[]>(config.ESPLORA.REST_API_URL + '/tx/' + txId + '/outspends', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getBatchedOutspends(txId: string[]): Promise<IEsploraApi.Outspend[][]> {
|
||||
const outspends: IEsploraApi.Outspend[][] = [];
|
||||
for (const tx of txId) {
|
||||
const outspend = await this.$getOutspends(tx);
|
||||
outspends.push(outspend);
|
||||
}
|
||||
return outspends;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,11 +2,12 @@ import config from '../config';
|
||||
import bitcoinApi from './bitcoin/bitcoin-api-factory';
|
||||
import logger from '../logger';
|
||||
import memPool from './mempool';
|
||||
import { BlockExtended, PoolTag, TransactionExtended, TransactionMinerInfo } from '../mempool.interfaces';
|
||||
import { BlockExtended, BlockSummary, PoolTag, TransactionExtended, TransactionStripped, TransactionMinerInfo } from '../mempool.interfaces';
|
||||
import { Common } from './common';
|
||||
import diskCache from './disk-cache';
|
||||
import transactionUtils from './transaction-utils';
|
||||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
import { IBitcoinApi } from './bitcoin/bitcoin-api.interface';
|
||||
import { IEsploraApi } from './bitcoin/esplora-api.interface';
|
||||
import poolsRepository from '../repositories/PoolsRepository';
|
||||
import blocksRepository from '../repositories/BlocksRepository';
|
||||
@@ -17,11 +18,16 @@ import BlocksRepository from '../repositories/BlocksRepository';
|
||||
import HashratesRepository from '../repositories/HashratesRepository';
|
||||
import indexer from '../indexer';
|
||||
import fiatConversion from './fiat-conversion';
|
||||
import RatesRepository from '../repositories/RatesRepository';
|
||||
import poolsParser from './pools-parser';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
import mining from './mining/mining';
|
||||
import DifficultyAdjustmentsRepository from '../repositories/DifficultyAdjustmentsRepository';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import priceUpdater from '../tasks/price-updater';
|
||||
|
||||
class Blocks {
|
||||
private blocks: BlockExtended[] = [];
|
||||
private blockSummaries: BlockSummary[] = [];
|
||||
private currentBlockHeight = 0;
|
||||
private currentDifficulty = 0;
|
||||
private lastDifficultyAdjustmentTime = 0;
|
||||
@@ -38,6 +44,14 @@ class Blocks {
|
||||
this.blocks = blocks;
|
||||
}
|
||||
|
||||
public getBlockSummaries(): BlockSummary[] {
|
||||
return this.blockSummaries;
|
||||
}
|
||||
|
||||
public setBlockSummaries(blockSummaries: BlockSummary[]) {
|
||||
this.blockSummaries = blockSummaries;
|
||||
}
|
||||
|
||||
public setNewBlockCallback(fn: (block: BlockExtended, txIds: string[], transactions: TransactionExtended[]) => void) {
|
||||
this.newBlockCallbacks.push(fn);
|
||||
}
|
||||
@@ -106,6 +120,27 @@ class Blocks {
|
||||
return transactions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a block summary (list of stripped transactions)
|
||||
* @param block
|
||||
* @returns BlockSummary
|
||||
*/
|
||||
private summarizeBlock(block: IBitcoinApi.VerboseBlock): BlockSummary {
|
||||
const stripped = block.tx.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee * 100000000) : 0,
|
||||
value: Math.round(tx.vout.reduce((acc, vout) => acc + (vout.value ? vout.value : 0), 0) * 100000000)
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
id: block.hash,
|
||||
transactions: stripped
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a block with additional data (reward, coinbase, fees...)
|
||||
* @param block
|
||||
@@ -117,6 +152,7 @@ class Blocks {
|
||||
blockExtended.extras.reward = transactions[0].vout.reduce((acc, curr) => acc + curr.value, 0);
|
||||
blockExtended.extras.coinbaseTx = transactionUtils.stripCoinbaseTransaction(transactions[0]);
|
||||
blockExtended.extras.coinbaseRaw = blockExtended.extras.coinbaseTx.vin[0].scriptsig;
|
||||
blockExtended.extras.usd = fiatConversion.getConversionRates().USD;
|
||||
|
||||
if (block.height === 0) {
|
||||
blockExtended.extras.medianFee = 0; // 50th percentiles
|
||||
@@ -135,7 +171,7 @@ class Blocks {
|
||||
blockExtended.extras.avgFeeRate = stats.avgfeerate;
|
||||
}
|
||||
|
||||
if (['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK)) {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK)) {
|
||||
let pool: PoolTag;
|
||||
if (blockExtended.extras?.coinbaseTx !== undefined) {
|
||||
pool = await this.$findBlockMiner(blockExtended.extras?.coinbaseTx);
|
||||
@@ -212,15 +248,70 @@ class Blocks {
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index all blocks metadata for the mining dashboard
|
||||
* [INDEXING] Index all blocks summaries for the block txs visualization
|
||||
*/
|
||||
public async $generateBlockDatabase() {
|
||||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
if (blockchainInfo.blocks !== blockchainInfo.headers) { // Wait for node to sync
|
||||
public async $generateBlocksSummariesDatabase() {
|
||||
if (Common.blocksSummariesIndexingEnabled() === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Get all indexed block hash
|
||||
const indexedBlocks = await blocksRepository.$getIndexedBlocks();
|
||||
const indexedBlockSummariesHashesArray = await BlocksSummariesRepository.$getIndexedSummariesId();
|
||||
|
||||
const indexedBlockSummariesHashes = {}; // Use a map for faster seek during the indexing loop
|
||||
for (const hash of indexedBlockSummariesHashesArray) {
|
||||
indexedBlockSummariesHashes[hash] = true;
|
||||
}
|
||||
|
||||
// Logging
|
||||
let newlyIndexed = 0;
|
||||
let totalIndexed = indexedBlockSummariesHashesArray.length;
|
||||
let indexedThisRun = 0;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
const startedAt = new Date().getTime() / 1000;
|
||||
|
||||
for (const block of indexedBlocks) {
|
||||
if (indexedBlockSummariesHashes[block.hash] === true) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Logging
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexedBlocks.length * 10000) / 100;
|
||||
logger.debug(`Indexing block summary for #${block.height} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexedBlocks.length} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
}
|
||||
|
||||
await this.$getStrippedBlockTransactions(block.hash, true, true); // This will index the block summary
|
||||
|
||||
// Logging
|
||||
indexedThisRun++;
|
||||
totalIndexed++;
|
||||
newlyIndexed++;
|
||||
}
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
} else {
|
||||
logger.debug(`Blocks summaries indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Blocks summaries indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* [INDEXING] Index all blocks metadata for the mining dashboard
|
||||
*/
|
||||
public async $generateBlockDatabase(): Promise<boolean> {
|
||||
try {
|
||||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
let currentBlockHeight = blockchainInfo.blocks;
|
||||
|
||||
let indexingBlockAmount = Math.min(config.MEMPOOL.INDEXING_BLOCKS_AMOUNT, blockchainInfo.blocks);
|
||||
@@ -261,10 +352,9 @@ class Blocks {
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5 || blockHeight === lastBlockToIndex) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const blockPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const blockPerSeconds = Math.max(1, indexedThisRun / elapsedSeconds);
|
||||
const progress = Math.round(totalIndexed / indexingBlockAmount * 10000) / 100;
|
||||
const timeLeft = Math.round((indexingBlockAmount - totalIndexed) / blockPerSeconds);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Indexing block #${blockHeight} | ~${blockPerSeconds.toFixed(2)} blocks/sec | total: ${totalIndexed}/${indexingBlockAmount} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('block-indexing', progress, false);
|
||||
@@ -280,18 +370,19 @@ class Blocks {
|
||||
|
||||
currentBlockHeight -= chunkSize;
|
||||
}
|
||||
logger.info(`Indexed ${newlyIndexed} blocks`);
|
||||
if (newlyIndexed > 0) {
|
||||
logger.notice(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
} else {
|
||||
logger.debug(`Block indexing completed: indexed ${newlyIndexed} blocks`);
|
||||
}
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
} catch (e) {
|
||||
logger.err('Block indexing failed. Trying again later. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Block indexing failed. Trying again in 10 seconds. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
loadingIndicators.setProgress('block-indexing', 100);
|
||||
return;
|
||||
throw e;
|
||||
}
|
||||
|
||||
const chainValid = await BlocksRepository.$validateChain();
|
||||
if (!chainValid) {
|
||||
indexer.reindex();
|
||||
}
|
||||
return await BlocksRepository.$validateChain();
|
||||
}
|
||||
|
||||
public async $updateBlocks() {
|
||||
@@ -323,7 +414,7 @@ class Blocks {
|
||||
|
||||
if (blockHeightTip >= 2016) {
|
||||
const previousPeriodBlockHash = await bitcoinApi.$getBlockHash(blockHeightTip - heightDiff - 2016);
|
||||
const previousPeriodBlock = await bitcoinApi.$getBlock(previousPeriodBlockHash);
|
||||
const previousPeriodBlock = await bitcoinClient.getBlock(previousPeriodBlockHash)
|
||||
this.previousDifficultyRetarget = (block.difficulty - previousPeriodBlock.difficulty) / previousPeriodBlock.difficulty * 100;
|
||||
logger.debug(`Initial difficulty adjustment data set.`);
|
||||
}
|
||||
@@ -341,10 +432,12 @@ class Blocks {
|
||||
}
|
||||
|
||||
const blockHash = await bitcoinApi.$getBlockHash(this.currentBlockHeight);
|
||||
const block = BitcoinApi.convertBlock(await bitcoinClient.getBlock(blockHash));
|
||||
const verboseBlock = await bitcoinClient.getBlock(blockHash, 2);
|
||||
const block = BitcoinApi.convertBlock(verboseBlock);
|
||||
const txIds: string[] = await bitcoinApi.$getTxIdsForBlock(blockHash);
|
||||
const transactions = await this.$getTransactionsExtended(blockHash, block.height, false);
|
||||
const blockExtended: BlockExtended = await this.$getBlockExtended(block, transactions);
|
||||
const blockSummary: BlockSummary = this.summarizeBlock(verboseBlock);
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
if (!fastForwarded) {
|
||||
@@ -354,18 +447,48 @@ class Blocks {
|
||||
// We assume there won't be a reorg with more than 10 block depth
|
||||
await BlocksRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
await HashratesRepository.$deleteLastEntries();
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(lastBlock['height'] - 10);
|
||||
for (let i = 10; i >= 0; --i) {
|
||||
await this.$indexBlock(lastBlock['height'] - i);
|
||||
const newBlock = await this.$indexBlock(lastBlock['height'] - i);
|
||||
await this.$getStrippedBlockTransactions(newBlock.id, true, true);
|
||||
}
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await DifficultyAdjustmentsRepository.$deleteLastAdjustment();
|
||||
logger.info(`Re-indexed 10 blocks and summaries. Also re-indexed the last difficulty adjustments. Will re-index latest hashrates in a few seconds.`);
|
||||
indexer.reindex();
|
||||
}
|
||||
await blocksRepository.$saveBlockInDatabase(blockExtended);
|
||||
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === true && lastestPriceId !== null) {
|
||||
await blocksRepository.$saveBlockPrices([{
|
||||
height: blockExtended.height,
|
||||
priceId: lastestPriceId,
|
||||
}]);
|
||||
} else {
|
||||
logger.info(`Cannot save block price for ${blockExtended.height} because the price updater hasnt completed yet. Trying again in 10 seconds.`)
|
||||
setTimeout(() => {
|
||||
indexer.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
}
|
||||
|
||||
// Save blocks summary for visualization if it's enabled
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await this.$getStrippedBlockTransactions(blockExtended.id, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (fiatConversion.ratesInitialized === true && config.DATABASE.ENABLED === true) {
|
||||
await RatesRepository.$saveRate(blockExtended.height, fiatConversion.getConversionRates());
|
||||
}
|
||||
|
||||
if (block.height % 2016 === 0) {
|
||||
if (Common.indexingEnabled()) {
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
difficulty: block.difficulty,
|
||||
adjustment: Math.round((block.difficulty / this.currentDifficulty) * 1000000) / 1000000, // Remove float point noise
|
||||
});
|
||||
}
|
||||
|
||||
this.previousDifficultyRetarget = (block.difficulty - this.currentDifficulty) / this.currentDifficulty * 100;
|
||||
this.lastDifficultyAdjustmentTime = block.timestamp;
|
||||
this.currentDifficulty = block.difficulty;
|
||||
@@ -375,6 +498,10 @@ class Blocks {
|
||||
if (this.blocks.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
|
||||
this.blocks = this.blocks.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
|
||||
}
|
||||
this.blockSummaries.push(blockSummary);
|
||||
if (this.blockSummaries.length > config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4) {
|
||||
this.blockSummaries = this.blockSummaries.slice(-config.MEMPOOL.INITIAL_BLOCKS_AMOUNT * 4);
|
||||
}
|
||||
|
||||
if (this.newBlockCallbacks.length) {
|
||||
this.newBlockCallbacks.forEach((cb) => cb(blockExtended, txIds, transactions));
|
||||
@@ -422,13 +549,14 @@ class Blocks {
|
||||
}
|
||||
}
|
||||
|
||||
const block = await bitcoinApi.$getBlock(hash);
|
||||
|
||||
// Not Bitcoin network, return the block as it
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return block;
|
||||
return await bitcoinApi.$getBlock(hash);
|
||||
}
|
||||
|
||||
let block = await bitcoinClient.getBlock(hash);
|
||||
block = prepareBlock(block);
|
||||
|
||||
// Bitcoin network, add our custom data on top
|
||||
const transactions = await this.$getTransactionsExtended(hash, block.height, true);
|
||||
const blockExtended = await this.$getBlockExtended(block, transactions);
|
||||
@@ -440,48 +568,71 @@ class Blocks {
|
||||
return blockExtended;
|
||||
}
|
||||
|
||||
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
|
||||
try {
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : this.getCurrentBlockHeight();
|
||||
const returnBlocks: BlockExtended[] = [];
|
||||
|
||||
if (currentHeight < 0) {
|
||||
return returnBlocks;
|
||||
public async $getStrippedBlockTransactions(hash: string, skipMemoryCache = false,
|
||||
skipDBLookup = false): Promise<TransactionStripped[]>
|
||||
{
|
||||
if (skipMemoryCache === false) {
|
||||
// Check the memory cache
|
||||
const cachedSummary = this.getBlockSummaries().find((b) => b.id === hash);
|
||||
if (cachedSummary) {
|
||||
return cachedSummary.transactions;
|
||||
}
|
||||
|
||||
if (currentHeight === 0 && Common.indexingEnabled()) {
|
||||
currentHeight = await blocksRepository.$mostRecentBlockHeight();
|
||||
}
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
let startFromHash: string | null = null;
|
||||
if (blockByHeight) {
|
||||
startFromHash = blockByHeight.id;
|
||||
} else if (!Common.indexingEnabled()) {
|
||||
startFromHash = await bitcoinApi.$getBlockHash(currentHeight);
|
||||
}
|
||||
|
||||
let nextHash = startFromHash;
|
||||
for (let i = 0; i < limit && currentHeight >= 0; i++) {
|
||||
let block = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
if (block) {
|
||||
returnBlocks.push(block);
|
||||
} else if (Common.indexingEnabled()) {
|
||||
block = await this.$indexBlock(currentHeight);
|
||||
returnBlocks.push(block);
|
||||
} else if (nextHash != null) {
|
||||
block = prepareBlock(await bitcoinApi.$getBlock(nextHash));
|
||||
nextHash = block.previousblockhash;
|
||||
returnBlocks.push(block);
|
||||
}
|
||||
currentHeight--;
|
||||
}
|
||||
|
||||
return returnBlocks;
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Check if it's indexed in db
|
||||
if (skipDBLookup === false && Common.blocksSummariesIndexingEnabled() === true) {
|
||||
const indexedSummary = await BlocksSummariesRepository.$getByBlockId(hash);
|
||||
if (indexedSummary !== undefined) {
|
||||
return indexedSummary.transactions;
|
||||
}
|
||||
}
|
||||
|
||||
// Call Core RPC
|
||||
const block = await bitcoinClient.getBlock(hash, 2);
|
||||
const summary = this.summarizeBlock(block);
|
||||
|
||||
// Index the response if needed
|
||||
if (Common.blocksSummariesIndexingEnabled() === true) {
|
||||
await BlocksSummariesRepository.$saveSummary({height: block.height, mined: summary});
|
||||
}
|
||||
|
||||
return summary.transactions;
|
||||
}
|
||||
|
||||
public async $getBlocks(fromHeight?: number, limit: number = 15): Promise<BlockExtended[]> {
|
||||
let currentHeight = fromHeight !== undefined ? fromHeight : await blocksRepository.$mostRecentBlockHeight();
|
||||
const returnBlocks: BlockExtended[] = [];
|
||||
|
||||
if (currentHeight < 0) {
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
// Check if block height exist in local cache to skip the hash lookup
|
||||
const blockByHeight = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
let startFromHash: string | null = null;
|
||||
if (blockByHeight) {
|
||||
startFromHash = blockByHeight.id;
|
||||
} else if (!Common.indexingEnabled()) {
|
||||
startFromHash = await bitcoinApi.$getBlockHash(currentHeight);
|
||||
}
|
||||
|
||||
let nextHash = startFromHash;
|
||||
for (let i = 0; i < limit && currentHeight >= 0; i++) {
|
||||
let block = this.getBlocks().find((b) => b.height === currentHeight);
|
||||
if (block) {
|
||||
returnBlocks.push(block);
|
||||
} else if (Common.indexingEnabled()) {
|
||||
block = await this.$indexBlock(currentHeight);
|
||||
returnBlocks.push(block);
|
||||
} else if (nextHash != null) {
|
||||
block = prepareBlock(await bitcoinClient.getBlock(nextHash));
|
||||
nextHash = block.previousblockhash;
|
||||
returnBlocks.push(block);
|
||||
}
|
||||
currentHeight--;
|
||||
}
|
||||
|
||||
return returnBlocks;
|
||||
}
|
||||
|
||||
public getLastDifficultyAdjustmentTime(): number {
|
||||
|
||||
@@ -114,7 +114,7 @@ export class Common {
|
||||
totalFees += tx.bestDescendant.fee;
|
||||
}
|
||||
|
||||
tx.effectiveFeePerVsize = Math.max(Common.isLiquid() ? 0.1 : 1, totalFees / (totalWeight / 4));
|
||||
tx.effectiveFeePerVsize = Math.max(0, totalFees / (totalWeight / 4));
|
||||
tx.cpfpChecked = true;
|
||||
|
||||
return {
|
||||
@@ -172,9 +172,53 @@ export class Common {
|
||||
|
||||
static indexingEnabled(): boolean {
|
||||
return (
|
||||
['mainnet', 'testnet', 'signet', 'regtest'].includes(config.MEMPOOL.NETWORK) &&
|
||||
['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) &&
|
||||
config.DATABASE.ENABLED === true &&
|
||||
config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== 0
|
||||
);
|
||||
}
|
||||
|
||||
static blocksSummariesIndexingEnabled(): boolean {
|
||||
return (
|
||||
Common.indexingEnabled() &&
|
||||
config.MEMPOOL.BLOCKS_SUMMARIES_INDEXING === true
|
||||
);
|
||||
}
|
||||
|
||||
static setDateMidnight(date: Date): void {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
date.setUTCSeconds(0);
|
||||
date.setUTCMilliseconds(0);
|
||||
}
|
||||
|
||||
static channelShortIdToIntegerId(channelId: string): string {
|
||||
if (channelId.indexOf('x') === -1) { // Already an integer id
|
||||
return channelId;
|
||||
}
|
||||
if (channelId.indexOf('/') !== -1) { // Topology import
|
||||
channelId = channelId.slice(0, -2);
|
||||
}
|
||||
const s = channelId.split('x').map(part => BigInt(part));
|
||||
return ((s[0] << 40n) | (s[1] << 16n) | s[2]).toString();
|
||||
}
|
||||
|
||||
/** Decodes a channel id returned by lnd as uint64 to a short channel id */
|
||||
static channelIntegerIdToShortId(id: string): string {
|
||||
if (id.indexOf('x') !== -1) { // Already a short id
|
||||
return id;
|
||||
}
|
||||
|
||||
const n = BigInt(id);
|
||||
return [
|
||||
n >> 40n, // nth block
|
||||
(n >> 16n) & 0xffffffn, // nth tx of the block
|
||||
n & 0xffffn // nth output of the tx
|
||||
].join('x');
|
||||
}
|
||||
|
||||
static utcDateToMysql(date?: number): string {
|
||||
const d = new Date((date || 0) * 1000);
|
||||
return d.toISOString().split('T')[0] + ' ' + d.toTimeString().split(' ')[0];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,11 +4,25 @@ import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
|
||||
class DatabaseMigration {
|
||||
private static currentVersion = 19;
|
||||
private static currentVersion = 36;
|
||||
private queryTimeout = 120000;
|
||||
private statisticsAddedIndexed = false;
|
||||
private uniqueLogs: string[] = [];
|
||||
|
||||
private blocksTruncatedMessage = `'blocks' table has been truncated.`;
|
||||
private hashratesTruncatedMessage = `'hashrates' table has been truncated.`;
|
||||
|
||||
/**
|
||||
* Avoid printing multiple time the same message
|
||||
*/
|
||||
private uniqueLog(loggerFunction: any, msg: string) {
|
||||
if (this.uniqueLogs.includes(msg)) {
|
||||
return;
|
||||
}
|
||||
this.uniqueLogs.push(msg);
|
||||
loggerFunction(msg);
|
||||
}
|
||||
|
||||
constructor() { }
|
||||
/**
|
||||
* Entry point
|
||||
*/
|
||||
@@ -39,6 +53,16 @@ class DatabaseMigration {
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion === 0) {
|
||||
logger.info('Initializing database (first run, clean install)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion <= 2) {
|
||||
// Disable some spam logs when they're not relevant
|
||||
this.uniqueLogs.push(this.blocksTruncatedMessage);
|
||||
this.uniqueLogs.push(this.hashratesTruncatedMessage);
|
||||
}
|
||||
|
||||
logger.debug('MIGRATIONS: Current state.schema_version ' + databaseSchemaVersion);
|
||||
logger.debug('MIGRATIONS: Latest DatabaseMigration.version is ' + DatabaseMigration.currentVersion);
|
||||
if (databaseSchemaVersion >= DatabaseMigration.currentVersion) {
|
||||
@@ -56,10 +80,13 @@ class DatabaseMigration {
|
||||
}
|
||||
|
||||
if (DatabaseMigration.currentVersion > databaseSchemaVersion) {
|
||||
logger.notice('MIGRATIONS: Upgrading database schema');
|
||||
try {
|
||||
await this.$migrateTableSchemaFromVersion(databaseSchemaVersion);
|
||||
logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
if (databaseSchemaVersion === 0) {
|
||||
logger.notice(`MIGRATIONS: OK. Database schema has been properly initialized to version ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
} else {
|
||||
logger.notice(`MIGRATIONS: OK. Database schema have been migrated from version ${databaseSchemaVersion} to ${DatabaseMigration.currentVersion} (latest version)`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('MIGRATIONS: Unable to migrate database, aborting. ' + e);
|
||||
}
|
||||
@@ -75,121 +102,227 @@ class DatabaseMigration {
|
||||
await this.$setStatisticsAddedIndexedFlag(databaseSchemaVersion);
|
||||
|
||||
const isBitcoin = ['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK);
|
||||
try {
|
||||
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
// Cleanup original blocks fields type
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
// We also fix the pools.id type so we need to drop/re-create the foreign key
|
||||
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
// Add new block indexing fields
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
}
|
||||
await this.$executeQuery(this.getCreateElementsTableQuery(), await this.$checkIfTableExists('elements_pegs'));
|
||||
await this.$executeQuery(this.getCreateStatisticsQuery(), await this.$checkIfTableExists('statistics'));
|
||||
if (databaseSchemaVersion < 2 && this.statisticsAddedIndexed === false) {
|
||||
await this.$executeQuery(`CREATE INDEX added ON statistics (added);`);
|
||||
}
|
||||
if (databaseSchemaVersion < 3) {
|
||||
await this.$executeQuery(this.getCreatePoolsTableQuery(), await this.$checkIfTableExists('pools'));
|
||||
}
|
||||
if (databaseSchemaVersion < 4) {
|
||||
await this.$executeQuery('DROP table IF EXISTS blocks;');
|
||||
await this.$executeQuery(this.getCreateBlocksTableQuery(), await this.$checkIfTableExists('blocks'));
|
||||
}
|
||||
if (databaseSchemaVersion < 5 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `reward` double unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
}
|
||||
if (databaseSchemaVersion < 6 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
// Cleanup original blocks fields type
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `height` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `tx_count` smallint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `size` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `weight` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` double NOT NULL DEFAULT "0"');
|
||||
// We also fix the pools.id type so we need to drop/re-create the foreign key
|
||||
await this.$executeQuery('ALTER TABLE blocks DROP FOREIGN KEY IF EXISTS `blocks_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE pools MODIFY `id` smallint unsigned AUTO_INCREMENT');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `pool_id` smallint unsigned NULL');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD FOREIGN KEY (`pool_id`) REFERENCES `pools` (`id`)');
|
||||
// Add new block indexing fields
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `version` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `bits` integer unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `nonce` bigint unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `merkle_root` varchar(65) NOT NULL DEFAULT ""');
|
||||
await this.$executeQuery('ALTER TABLE blocks ADD `previous_block_hash` varchar(65) NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
}
|
||||
if (databaseSchemaVersion < 7 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP table IF EXISTS hashrates;');
|
||||
await this.$executeQuery(this.getCreateDailyStatsTableQuery(), await this.$checkIfTableExists('hashrates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
}
|
||||
if (databaseSchemaVersion < 8 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP INDEX `PRIMARY`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `id` int NOT NULL AUTO_INCREMENT PRIMARY KEY FIRST');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `share` float NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD `type` enum("daily", "weekly") DEFAULT "daily"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
}
|
||||
if (databaseSchemaVersion < 9 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `state` CHANGE `name` `name` varchar(100)');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` ADD UNIQUE `hashrate_timestamp_pool_id` (`hashrate_timestamp`, `pool_id`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
logger.warn(`'blocks' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(`ALTER TABLE blocks
|
||||
ADD avg_fee INT UNSIGNED NULL,
|
||||
ADD avg_fee_rate INT UNSIGNED NULL
|
||||
`);
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 10 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `blockTimestamp` (`blockTimestamp`)');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 11 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.blocksTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE blocks;'); // Need to re-index
|
||||
await this.$executeQuery(`ALTER TABLE blocks
|
||||
ADD avg_fee INT UNSIGNED NULL,
|
||||
ADD avg_fee_rate INT UNSIGNED NULL
|
||||
`);
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `reward` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` INT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 12 && isBitcoin === true) {
|
||||
// No need to re-index because the new data type can contain larger values
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `fees` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
if (databaseSchemaVersion < 13 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `difficulty` DOUBLE UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `median_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE blocks MODIFY `avg_fee_rate` BIGINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
logger.warn(`'hashrates' table has been truncated. Re-indexing from scratch.`);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
}
|
||||
if (databaseSchemaVersion < 14 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` DROP FOREIGN KEY `hashrates_ibfk_1`');
|
||||
await this.$executeQuery('ALTER TABLE `hashrates` MODIFY `pool_id` SMALLINT UNSIGNED NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
}
|
||||
if (databaseSchemaVersion < 16 && isBitcoin === true) {
|
||||
this.uniqueLog(logger.notice, this.hashratesTruncatedMessage);
|
||||
await this.$executeQuery('TRUNCATE hashrates;'); // Need to re-index because we changed timestamps
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 18 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
|
||||
}
|
||||
if (databaseSchemaVersion < 17 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `pools` ADD `slug` CHAR(50) NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
if (databaseSchemaVersion < 18 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks` ADD INDEX `hash` (`hash`);');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 19) {
|
||||
await this.$executeQuery(this.getCreateRatesTableQuery(), await this.$checkIfTableExists('rates'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 20 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateBlocksSummariesTableQuery(), await this.$checkIfTableExists('blocks_summaries'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 21) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `rates`');
|
||||
await this.$executeQuery(this.getCreatePricesTableQuery(), await this.$checkIfTableExists('prices'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 22 && isBitcoin === true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `difficulty_adjustments`');
|
||||
await this.$executeQuery(this.getCreateDifficultyAdjustmentsTableQuery(), await this.$checkIfTableExists('difficulty_adjustments'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 23) {
|
||||
await this.$executeQuery('TRUNCATE `prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` DROP `avg_prices`');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `USD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `EUR` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `GBP` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CAD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `CHF` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `AUD` float DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `JPY` float DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 24 && isBitcoin == true) {
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_audits`');
|
||||
await this.$executeQuery(this.getCreateBlocksAuditsTableQuery(), await this.$checkIfTableExists('blocks_audits'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 25 && isBitcoin === true) {
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_node_stats', 0, '1970-01-01');`);
|
||||
await this.$executeQuery(this.getCreateLightningStatisticsQuery(), await this.$checkIfTableExists('lightning_stats'));
|
||||
await this.$executeQuery(this.getCreateNodesQuery(), await this.$checkIfTableExists('nodes'));
|
||||
await this.$executeQuery(this.getCreateChannelsQuery(), await this.$checkIfTableExists('channels'));
|
||||
await this.$executeQuery(this.getCreateNodesStatsQuery(), await this.$checkIfTableExists('node_stats'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 26 && isBitcoin === true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' table has been truncated.`);
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD unannounced_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 27 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD avg_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_capacity bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_fee_rate int(11) unsigned NOT NULL DEFAULT "0"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD med_base_fee_mtokens bigint(20) unsigned NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 28 && isBitcoin === true) {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.uniqueLog(logger.notice, `'lightning_stats' and 'node_stats' tables have been truncated.`);
|
||||
}
|
||||
await this.$executeQuery(`TRUNCATE lightning_stats`);
|
||||
await this.$executeQuery(`TRUNCATE node_stats`);
|
||||
await this.$executeQuery(`ALTER TABLE lightning_stats MODIFY added DATE`);
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 29 && isBitcoin === true) {
|
||||
await this.$executeQuery(this.getCreateGeoNamesTableQuery(), await this.$checkIfTableExists('geo_names'));
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD as_number int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD city_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD country_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD accuracy_radius int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD subdivision_id int(11) unsigned NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD longitude double NULL DEFAULT NULL');
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD latitude double NULL DEFAULT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 30 && isBitcoin === true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization") NOT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 31 && isBitcoin == true) { // Link blocks to prices
|
||||
await this.$executeQuery('ALTER TABLE `prices` ADD `id` int NULL AUTO_INCREMENT UNIQUE');
|
||||
await this.$executeQuery('DROP TABLE IF EXISTS `blocks_prices`');
|
||||
await this.$executeQuery(this.getCreateBlocksPricesTableQuery(), await this.$checkIfTableExists('blocks_prices'));
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 32 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `blocks_summaries` ADD `template` JSON DEFAULT "[]"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 33 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `geo_names` CHANGE `type` `type` enum("city","country","division","continent","as_organization", "country_iso_code") NOT NULL');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 34 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD clearnet_tor_nodes int(11) NOT NULL DEFAULT "0"');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 35 && isBitcoin == true) {
|
||||
await this.$executeQuery('DELETE from `lightning_stats` WHERE added > "2021-09-19"');
|
||||
await this.$executeQuery('ALTER TABLE `lightning_stats` ADD CONSTRAINT added_unique UNIQUE (added);');
|
||||
}
|
||||
|
||||
if (databaseSchemaVersion < 36 && isBitcoin == true) {
|
||||
await this.$executeQuery('ALTER TABLE `nodes` ADD status TINYINT NOT NULL DEFAULT "1"');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -228,7 +361,7 @@ class DatabaseMigration {
|
||||
/**
|
||||
* Small query execution wrapper to log all executed queries
|
||||
*/
|
||||
private async $executeQuery(query: string, silent: boolean = false): Promise<any> {
|
||||
private async $executeQuery(query: string, silent = false): Promise<any> {
|
||||
if (!silent) {
|
||||
logger.debug('MIGRATIONS: Execute query:\n' + query);
|
||||
}
|
||||
@@ -257,21 +390,17 @@ class DatabaseMigration {
|
||||
* Create the `state` table
|
||||
*/
|
||||
private async $createMigrationStateTable(): Promise<void> {
|
||||
try {
|
||||
const query = `CREATE TABLE IF NOT EXISTS state (
|
||||
name varchar(25) NOT NULL,
|
||||
number int(11) NULL,
|
||||
string varchar(100) NULL,
|
||||
CONSTRAINT name_unique UNIQUE (name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
await this.$executeQuery(query);
|
||||
const query = `CREATE TABLE IF NOT EXISTS state (
|
||||
name varchar(25) NOT NULL,
|
||||
number int(11) NULL,
|
||||
string varchar(100) NULL,
|
||||
CONSTRAINT name_unique UNIQUE (name)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
await this.$executeQuery(query);
|
||||
|
||||
// Set initial values
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
} catch (e) {
|
||||
throw e;
|
||||
}
|
||||
// Set initial values
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('schema_version', 0, NULL);`);
|
||||
await this.$executeQuery(`INSERT INTO state VALUES('last_elements_block', 0, NULL);`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -282,6 +411,8 @@ class DatabaseMigration {
|
||||
for (const query of this.getMigrationQueriesFromVersion(version)) {
|
||||
transactionQueries.push(query);
|
||||
}
|
||||
|
||||
logger.notice(`MIGRATIONS: ${version > 0 ? 'Upgrading' : 'Initializing'} database schema version number to ${DatabaseMigration.currentVersion}`);
|
||||
transactionQueries.push(this.getUpdateToLatestSchemaVersionQuery());
|
||||
|
||||
try {
|
||||
@@ -305,6 +436,9 @@ class DatabaseMigration {
|
||||
|
||||
if (version < 1) {
|
||||
if (config.MEMPOOL.NETWORK !== 'liquid' && config.MEMPOOL.NETWORK !== 'liquidtestnet') {
|
||||
if (version > 0) {
|
||||
logger.notice(`MIGRATIONS: Migrating (shifting) statistics table data`);
|
||||
}
|
||||
queries.push(this.getShiftStatisticsQuery());
|
||||
}
|
||||
}
|
||||
@@ -470,7 +604,7 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateRatesTableQuery(): string {
|
||||
private getCreateRatesTableQuery(): string { // This table has been replaced by the prices table
|
||||
return `CREATE TABLE IF NOT EXISTS rates (
|
||||
height int(10) unsigned NOT NULL,
|
||||
bisq_rates JSON NOT NULL,
|
||||
@@ -478,8 +612,145 @@ class DatabaseMigration {
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateBlocksSummariesTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS blocks_summaries (
|
||||
height int(10) unsigned NOT NULL,
|
||||
id varchar(65) NOT NULL,
|
||||
transactions JSON NOT NULL,
|
||||
PRIMARY KEY (id),
|
||||
INDEX (height)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreatePricesTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS prices (
|
||||
time timestamp NOT NULL,
|
||||
avg_prices JSON NOT NULL,
|
||||
PRIMARY KEY (time)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateDifficultyAdjustmentsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS difficulty_adjustments (
|
||||
time timestamp NOT NULL,
|
||||
height int(10) unsigned NOT NULL,
|
||||
difficulty double unsigned NOT NULL,
|
||||
adjustment float NOT NULL,
|
||||
PRIMARY KEY (height),
|
||||
INDEX (time)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateLightningStatisticsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS lightning_stats (
|
||||
id int(11) NOT NULL AUTO_INCREMENT,
|
||||
added datetime NOT NULL,
|
||||
channel_count int(11) NOT NULL,
|
||||
node_count int(11) NOT NULL,
|
||||
total_capacity double unsigned NOT NULL,
|
||||
PRIMARY KEY (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateNodesQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS nodes (
|
||||
public_key varchar(66) NOT NULL,
|
||||
first_seen datetime NOT NULL,
|
||||
updated_at datetime NOT NULL,
|
||||
alias varchar(200) CHARACTER SET utf8mb4 NOT NULL,
|
||||
color varchar(200) NOT NULL,
|
||||
sockets text DEFAULT NULL,
|
||||
PRIMARY KEY (public_key),
|
||||
KEY alias (alias(10))
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateChannelsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS channels (
|
||||
id bigint(11) unsigned NOT NULL,
|
||||
short_id varchar(15) NOT NULL DEFAULT '',
|
||||
capacity bigint(20) unsigned NOT NULL,
|
||||
transaction_id varchar(64) NOT NULL,
|
||||
transaction_vout int(11) NOT NULL,
|
||||
updated_at datetime DEFAULT NULL,
|
||||
created datetime DEFAULT NULL,
|
||||
status int(11) NOT NULL DEFAULT 0,
|
||||
closing_transaction_id varchar(64) DEFAULT NULL,
|
||||
closing_date datetime DEFAULT NULL,
|
||||
closing_reason int(11) DEFAULT NULL,
|
||||
node1_public_key varchar(66) NOT NULL,
|
||||
node1_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node1_cltv_delta int(11) DEFAULT NULL,
|
||||
node1_fee_rate bigint(11) DEFAULT NULL,
|
||||
node1_is_disabled tinyint(1) DEFAULT NULL,
|
||||
node1_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node1_min_htlc_mtokens bigint(20) DEFAULT NULL,
|
||||
node1_updated_at datetime DEFAULT NULL,
|
||||
node2_public_key varchar(66) NOT NULL,
|
||||
node2_base_fee_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_cltv_delta int(11) DEFAULT NULL,
|
||||
node2_fee_rate bigint(11) DEFAULT NULL,
|
||||
node2_is_disabled tinyint(1) DEFAULT NULL,
|
||||
node2_max_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_min_htlc_mtokens bigint(20) unsigned DEFAULT NULL,
|
||||
node2_updated_at datetime DEFAULT NULL,
|
||||
PRIMARY KEY (id),
|
||||
KEY node1_public_key (node1_public_key),
|
||||
KEY node2_public_key (node2_public_key),
|
||||
KEY status (status),
|
||||
KEY short_id (short_id),
|
||||
KEY transaction_id (transaction_id),
|
||||
KEY closing_transaction_id (closing_transaction_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateNodesStatsQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS node_stats (
|
||||
id int(11) unsigned NOT NULL AUTO_INCREMENT,
|
||||
public_key varchar(66) NOT NULL DEFAULT '',
|
||||
added date NOT NULL,
|
||||
capacity bigint(20) unsigned NOT NULL DEFAULT 0,
|
||||
channels int(11) unsigned NOT NULL DEFAULT 0,
|
||||
PRIMARY KEY (id),
|
||||
UNIQUE KEY added (added,public_key),
|
||||
KEY public_key (public_key)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateBlocksAuditsTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS blocks_audits (
|
||||
time timestamp NOT NULL,
|
||||
hash varchar(65) NOT NULL,
|
||||
height int(10) unsigned NOT NULL,
|
||||
missing_txs JSON NOT NULL,
|
||||
added_txs JSON NOT NULL,
|
||||
match_rate float unsigned NOT NULL,
|
||||
PRIMARY KEY (hash),
|
||||
INDEX (height)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
private getCreateGeoNamesTableQuery(): string {
|
||||
return `CREATE TABLE geo_names (
|
||||
id int(11) unsigned NOT NULL,
|
||||
type enum('city','country','division','continent') NOT NULL,
|
||||
names text DEFAULT NULL,
|
||||
UNIQUE KEY id (id,type),
|
||||
KEY id_2 (id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`
|
||||
}
|
||||
|
||||
private getCreateBlocksPricesTableQuery(): string {
|
||||
return `CREATE TABLE IF NOT EXISTS blocks_prices (
|
||||
height int(10) unsigned NOT NULL,
|
||||
price_id int(10) unsigned NOT NULL,
|
||||
PRIMARY KEY (height),
|
||||
INDEX (price_id)
|
||||
) ENGINE=InnoDB DEFAULT CHARSET=utf8;`;
|
||||
}
|
||||
|
||||
public async $truncateIndexedData(tables: string[]) {
|
||||
const allowedTables = ['blocks', 'hashrates'];
|
||||
const allowedTables = ['blocks', 'hashrates', 'prices'];
|
||||
|
||||
try {
|
||||
for (const table of tables) {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import * as fs from 'fs';
|
||||
const fsPromises = fs.promises;
|
||||
import * as cluster from 'cluster';
|
||||
import cluster from 'cluster';
|
||||
import memPool from './mempool';
|
||||
import blocks from './blocks';
|
||||
import logger from '../logger';
|
||||
@@ -19,7 +19,7 @@ class DiskCache {
|
||||
constructor() { }
|
||||
|
||||
async $saveCacheToDisk(): Promise<void> {
|
||||
if (!cluster.isMaster) {
|
||||
if (!cluster.isPrimary) {
|
||||
return;
|
||||
}
|
||||
if (this.isWritingCache) {
|
||||
@@ -43,14 +43,15 @@ class DiskCache {
|
||||
await fsPromises.writeFile(DiskCache.FILE_NAME, JSON.stringify({
|
||||
cacheSchemaVersion: this.cacheSchemaVersion,
|
||||
blocks: blocks.getBlocks(),
|
||||
blockSummaries: blocks.getBlockSummaries(),
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), {flag: 'w'});
|
||||
}), { flag: 'w' });
|
||||
for (let i = 1; i < DiskCache.CHUNK_FILES; i++) {
|
||||
await fsPromises.writeFile(DiskCache.FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({
|
||||
mempool: {},
|
||||
mempoolArray: mempoolArray.splice(0, chunkSize),
|
||||
}), {flag: 'w'});
|
||||
}), { flag: 'w' });
|
||||
}
|
||||
logger.debug('Mempool and blocks data saved to disk cache');
|
||||
this.isWritingCache = false;
|
||||
@@ -66,7 +67,7 @@ class DiskCache {
|
||||
fs.unlinkSync(DiskCache.FILE_NAMES.replace('{number}', i.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
loadMempoolCache() {
|
||||
if (!fs.existsSync(DiskCache.FILE_NAME)) {
|
||||
return;
|
||||
@@ -109,6 +110,7 @@ class DiskCache {
|
||||
|
||||
memPool.setMempool(data.mempool);
|
||||
blocks.setBlocks(data.blocks);
|
||||
blocks.setBlockSummaries(data.blockSummaries || []);
|
||||
} catch (e) {
|
||||
logger.warn('Failed to parse mempoool and blocks cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
467
backend/src/api/explorer/channels.api.ts
Normal file
467
backend/src/api/explorer/channels.api.ts
Normal file
@@ -0,0 +1,467 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import nodesApi from './nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import { ILightningApi } from '../lightning/lightning-api.interface';
|
||||
import { Common } from '../common';
|
||||
|
||||
class ChannelsApi {
|
||||
public async $getAllChannels(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getAllChannels error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAllChannelsGeo(publicKey?: string): Promise<any[]> {
|
||||
try {
|
||||
const params: string[] = [];
|
||||
let query = `SELECT nodes_1.public_key as node1_public_key, nodes_1.alias AS node1_alias,
|
||||
nodes_1.latitude AS node1_latitude, nodes_1.longitude AS node1_longitude,
|
||||
nodes_2.public_key as node2_public_key, nodes_2.alias AS node2_alias,
|
||||
nodes_2.latitude AS node2_latitude, nodes_2.longitude AS node2_longitude,
|
||||
channels.capacity
|
||||
FROM channels
|
||||
JOIN nodes AS nodes_1 on nodes_1.public_key = channels.node1_public_key
|
||||
JOIN nodes AS nodes_2 on nodes_2.public_key = channels.node2_public_key
|
||||
WHERE nodes_1.latitude IS NOT NULL AND nodes_1.longitude IS NOT NULL
|
||||
AND nodes_2.latitude IS NOT NULL AND nodes_2.longitude IS NOT NULL
|
||||
`;
|
||||
|
||||
if (publicKey !== undefined) {
|
||||
query += ' AND (nodes_1.public_key = ? OR nodes_2.public_key = ?)';
|
||||
params.push(publicKey);
|
||||
params.push(publicKey);
|
||||
}
|
||||
|
||||
const [rows]: any = await DB.query(query, params);
|
||||
return rows.map((row) => [
|
||||
row.node1_public_key, row.node1_alias, row.node1_longitude, row.node1_latitude,
|
||||
row.node2_public_key, row.node2_alias, row.node2_longitude, row.node2_latitude,
|
||||
row.capacity]);
|
||||
} catch (e) {
|
||||
logger.err('$getAllChannelsGeo error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $searchChannelsById(search: string): Promise<any[]> {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT id, short_id, capacity FROM channels WHERE id LIKE ? OR short_id LIKE ? LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$searchChannelsById error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByStatus(status: number): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = ?`;
|
||||
const [rows]: any = await DB.query(query, [status]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsByStatus error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getClosedChannelsWithoutReason(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE status = 2 AND closing_reason IS NULL AND closing_transaction_id != ''`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getClosedChannelsWithoutReason error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsWithoutCreatedDate(): Promise<any[]> {
|
||||
try {
|
||||
const query = `SELECT * FROM channels WHERE created IS NULL`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsWithoutCreatedDate error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannel(id: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT n1.alias AS alias_left, n1.longitude as node1_longitude, n1.latitude as node1_latitude,
|
||||
n2.alias AS alias_right, n2.longitude as node2_longitude, n2.latitude as node2_latitude,
|
||||
channels.*,
|
||||
ns1.channels AS channels_left, ns1.capacity AS capacity_left, ns2.channels AS channels_right, ns2.capacity AS capacity_right
|
||||
FROM channels
|
||||
LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key
|
||||
LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key
|
||||
LEFT JOIN node_stats AS ns1 ON ns1.public_key = channels.node1_public_key
|
||||
LEFT JOIN node_stats AS ns2 ON ns2.public_key = channels.node2_public_key
|
||||
WHERE (
|
||||
ns1.id = (
|
||||
SELECT MAX(id)
|
||||
FROM node_stats
|
||||
WHERE public_key = channels.node1_public_key
|
||||
)
|
||||
AND ns2.id = (
|
||||
SELECT MAX(id)
|
||||
FROM node_stats
|
||||
WHERE public_key = channels.node2_public_key
|
||||
)
|
||||
)
|
||||
AND channels.id = ?
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [id]);
|
||||
if (rows[0]) {
|
||||
return this.convertChannel(rows[0]);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$getChannel error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsStats(): Promise<any> {
|
||||
try {
|
||||
// Feedback from zerofeerouting:
|
||||
// "I would argue > 5000ppm can be ignored. Channels charging more than .5% fee are ignored by CLN for example."
|
||||
const ignoredFeeRateThreshold = 5000;
|
||||
const ignoredBaseFeeThreshold = 5000;
|
||||
|
||||
// Capacity
|
||||
let query = `SELECT AVG(capacity) AS avgCapacity FROM channels WHERE status = 1 ORDER BY capacity`;
|
||||
const [avgCapacity]: any = await DB.query(query);
|
||||
|
||||
query = `SELECT capacity FROM channels WHERE status = 1 ORDER BY capacity`;
|
||||
let [capacity]: any = await DB.query(query);
|
||||
capacity = capacity.map(capacity => capacity.capacity);
|
||||
const medianCapacity = capacity[Math.floor(capacity.length / 2)];
|
||||
|
||||
// Fee rates
|
||||
query = `SELECT node1_fee_rate FROM channels WHERE node1_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
|
||||
let [feeRates1]: any = await DB.query(query);
|
||||
feeRates1 = feeRates1.map(rate => rate.node1_fee_rate);
|
||||
query = `SELECT node2_fee_rate FROM channels WHERE node2_fee_rate < ${ignoredFeeRateThreshold} AND status = 1`;
|
||||
let [feeRates2]: any = await DB.query(query);
|
||||
feeRates2 = feeRates2.map(rate => rate.node2_fee_rate);
|
||||
|
||||
let feeRates = (feeRates1.concat(feeRates2)).sort((a, b) => a - b);
|
||||
let avgFeeRate = 0;
|
||||
for (const rate of feeRates) {
|
||||
avgFeeRate += rate;
|
||||
}
|
||||
avgFeeRate /= feeRates.length;
|
||||
const medianFeeRate = feeRates[Math.floor(feeRates.length / 2)];
|
||||
|
||||
// Base fees
|
||||
query = `SELECT node1_base_fee_mtokens FROM channels WHERE node1_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
|
||||
let [baseFees1]: any = await DB.query(query);
|
||||
baseFees1 = baseFees1.map(rate => rate.node1_base_fee_mtokens);
|
||||
query = `SELECT node2_base_fee_mtokens FROM channels WHERE node2_base_fee_mtokens < ${ignoredBaseFeeThreshold} AND status = 1`;
|
||||
let [baseFees2]: any = await DB.query(query);
|
||||
baseFees2 = baseFees2.map(rate => rate.node2_base_fee_mtokens);
|
||||
|
||||
let baseFees = (baseFees1.concat(baseFees2)).sort((a, b) => a - b);
|
||||
let avgBaseFee = 0;
|
||||
for (const fee of baseFees) {
|
||||
avgBaseFee += fee;
|
||||
}
|
||||
avgBaseFee /= baseFees.length;
|
||||
const medianBaseFee = feeRates[Math.floor(baseFees.length / 2)];
|
||||
|
||||
return {
|
||||
avgCapacity: parseInt(avgCapacity[0].avgCapacity, 10),
|
||||
avgFeeRate: avgFeeRate,
|
||||
avgBaseFee: avgBaseFee,
|
||||
medianCapacity: medianCapacity,
|
||||
medianFeeRate: medianFeeRate,
|
||||
medianBaseFee: medianBaseFee,
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err(`Cannot calculate channels statistics. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsByTransactionId(transactionIds: string[]): Promise<any[]> {
|
||||
try {
|
||||
transactionIds = transactionIds.map((id) => '\'' + id + '\'');
|
||||
const query = `SELECT n1.alias AS alias_left, n2.alias AS alias_right, channels.* FROM channels LEFT JOIN nodes AS n1 ON n1.public_key = channels.node1_public_key LEFT JOIN nodes AS n2 ON n2.public_key = channels.node2_public_key WHERE channels.transaction_id IN (${transactionIds.join(', ')}) OR channels.closing_transaction_id IN (${transactionIds.join(', ')})`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
const channels = rows.map((row) => this.convertChannel(row));
|
||||
return channels;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelByTransactionId error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsForNode(public_key: string, index: number, length: number, status: string): Promise<any[]> {
|
||||
try {
|
||||
let channelStatusFilter;
|
||||
if (status === 'open') {
|
||||
channelStatusFilter = '< 2';
|
||||
} else if (status === 'closed') {
|
||||
channelStatusFilter = '= 2';
|
||||
}
|
||||
|
||||
// Channels originating from node
|
||||
let query = `
|
||||
SELECT node2.alias, node2.public_key, channels.status, channels.node1_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id
|
||||
FROM channels
|
||||
JOIN nodes AS node2 ON node2.public_key = channels.node2_public_key
|
||||
WHERE node1_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsFromNode]: any = await DB.query(query, [public_key, index, length]);
|
||||
|
||||
// Channels incoming to node
|
||||
query = `
|
||||
SELECT node1.alias, node1.public_key, channels.status, channels.node2_fee_rate,
|
||||
channels.capacity, channels.short_id, channels.id
|
||||
FROM channels
|
||||
JOIN nodes AS node1 ON node1.public_key = channels.node1_public_key
|
||||
WHERE node2_public_key = ? AND channels.status ${channelStatusFilter}
|
||||
`;
|
||||
const [channelsToNode]: any = await DB.query(query, [public_key, index, length]);
|
||||
|
||||
let allChannels = channelsFromNode.concat(channelsToNode);
|
||||
allChannels.sort((a, b) => {
|
||||
return b.capacity - a.capacity;
|
||||
});
|
||||
allChannels = allChannels.slice(index, index + length);
|
||||
|
||||
const channels: any[] = []
|
||||
for (const row of allChannels) {
|
||||
const activeChannelsStats: any = await nodesApi.$getActiveChannelsStats(row.public_key);
|
||||
channels.push({
|
||||
status: row.status,
|
||||
capacity: row.capacity ?? 0,
|
||||
short_id: row.short_id,
|
||||
id: row.id,
|
||||
fee_rate: row.node1_fee_rate ?? row.node2_fee_rate ?? 0,
|
||||
node: {
|
||||
alias: row.alias.length > 0 ? row.alias : row.public_key.slice(0, 20),
|
||||
public_key: row.public_key,
|
||||
channels: activeChannelsStats.active_channel_count ?? 0,
|
||||
capacity: activeChannelsStats.capacity ?? 0,
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return channels;
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getChannelsCountForNode(public_key: string, status: string): Promise<any> {
|
||||
try {
|
||||
// Default active and inactive channels
|
||||
let statusQuery = '< 2';
|
||||
// Closed channels only
|
||||
if (status === 'closed') {
|
||||
statusQuery = '= 2';
|
||||
}
|
||||
const query = `
|
||||
SELECT COUNT(*) AS count
|
||||
FROM channels
|
||||
WHERE (node1_public_key = ? OR node2_public_key = ?)
|
||||
AND status ${statusQuery}
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key, public_key]);
|
||||
return rows[0]['count'];
|
||||
} catch (e) {
|
||||
logger.err('$getChannelsForNode error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
private convertChannel(channel: any): any {
|
||||
return {
|
||||
'id': channel.id,
|
||||
'short_id': channel.short_id,
|
||||
'capacity': channel.capacity,
|
||||
'transaction_id': channel.transaction_id,
|
||||
'transaction_vout': channel.transaction_vout,
|
||||
'closing_transaction_id': channel.closing_transaction_id,
|
||||
'closing_reason': channel.closing_reason,
|
||||
'updated_at': channel.updated_at,
|
||||
'created': channel.created,
|
||||
'status': channel.status,
|
||||
'node_left': {
|
||||
'alias': channel.alias_left,
|
||||
'public_key': channel.node1_public_key,
|
||||
'channels': channel.channels_left,
|
||||
'capacity': channel.capacity_left,
|
||||
'base_fee_mtokens': channel.node1_base_fee_mtokens,
|
||||
'cltv_delta': channel.node1_cltv_delta,
|
||||
'fee_rate': channel.node1_fee_rate,
|
||||
'is_disabled': channel.node1_is_disabled,
|
||||
'max_htlc_mtokens': channel.node1_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node1_min_htlc_mtokens,
|
||||
'updated_at': channel.node1_updated_at,
|
||||
'longitude': channel.node1_longitude,
|
||||
'latitude': channel.node1_latitude,
|
||||
},
|
||||
'node_right': {
|
||||
'alias': channel.alias_right,
|
||||
'public_key': channel.node2_public_key,
|
||||
'channels': channel.channels_right,
|
||||
'capacity': channel.capacity_right,
|
||||
'base_fee_mtokens': channel.node2_base_fee_mtokens,
|
||||
'cltv_delta': channel.node2_cltv_delta,
|
||||
'fee_rate': channel.node2_fee_rate,
|
||||
'is_disabled': channel.node2_is_disabled,
|
||||
'max_htlc_mtokens': channel.node2_max_htlc_mtokens,
|
||||
'min_htlc_mtokens': channel.node2_min_htlc_mtokens,
|
||||
'updated_at': channel.node2_updated_at,
|
||||
'longitude': channel.node2_longitude,
|
||||
'latitude': channel.node2_latitude,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Save or update a channel present in the graph
|
||||
*/
|
||||
public async $saveChannel(channel: ILightningApi.Channel): Promise<void> {
|
||||
const [ txid, vout ] = channel.chan_point.split(':');
|
||||
|
||||
const policy1: Partial<ILightningApi.RoutingPolicy> = channel.node1_policy || {};
|
||||
const policy2: Partial<ILightningApi.RoutingPolicy> = channel.node2_policy || {};
|
||||
|
||||
const query = `INSERT INTO channels
|
||||
(
|
||||
id,
|
||||
short_id,
|
||||
capacity,
|
||||
transaction_id,
|
||||
transaction_vout,
|
||||
updated_at,
|
||||
status,
|
||||
node1_public_key,
|
||||
node1_base_fee_mtokens,
|
||||
node1_cltv_delta,
|
||||
node1_fee_rate,
|
||||
node1_is_disabled,
|
||||
node1_max_htlc_mtokens,
|
||||
node1_min_htlc_mtokens,
|
||||
node1_updated_at,
|
||||
node2_public_key,
|
||||
node2_base_fee_mtokens,
|
||||
node2_cltv_delta,
|
||||
node2_fee_rate,
|
||||
node2_is_disabled,
|
||||
node2_max_htlc_mtokens,
|
||||
node2_min_htlc_mtokens,
|
||||
node2_updated_at
|
||||
)
|
||||
VALUES (?, ?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
capacity = ?,
|
||||
updated_at = ?,
|
||||
status = 1,
|
||||
node1_public_key = ?,
|
||||
node1_base_fee_mtokens = ?,
|
||||
node1_cltv_delta = ?,
|
||||
node1_fee_rate = ?,
|
||||
node1_is_disabled = ?,
|
||||
node1_max_htlc_mtokens = ?,
|
||||
node1_min_htlc_mtokens = ?,
|
||||
node1_updated_at = ?,
|
||||
node2_public_key = ?,
|
||||
node2_base_fee_mtokens = ?,
|
||||
node2_cltv_delta = ?,
|
||||
node2_fee_rate = ?,
|
||||
node2_is_disabled = ?,
|
||||
node2_max_htlc_mtokens = ?,
|
||||
node2_min_htlc_mtokens = ?,
|
||||
node2_updated_at = ?
|
||||
;`;
|
||||
|
||||
await DB.query(query, [
|
||||
Common.channelShortIdToIntegerId(channel.channel_id),
|
||||
Common.channelIntegerIdToShortId(channel.channel_id),
|
||||
channel.capacity,
|
||||
txid,
|
||||
vout,
|
||||
Common.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
Common.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
Common.utcDateToMysql(policy2.last_update),
|
||||
channel.capacity,
|
||||
Common.utcDateToMysql(channel.last_update),
|
||||
channel.node1_pub,
|
||||
policy1.fee_base_msat,
|
||||
policy1.time_lock_delta,
|
||||
policy1.fee_rate_milli_msat,
|
||||
policy1.disabled,
|
||||
policy1.max_htlc_msat,
|
||||
policy1.min_htlc,
|
||||
Common.utcDateToMysql(policy1.last_update),
|
||||
channel.node2_pub,
|
||||
policy2.fee_base_msat,
|
||||
policy2.time_lock_delta,
|
||||
policy2.fee_rate_milli_msat,
|
||||
policy2.disabled,
|
||||
policy2.max_htlc_msat,
|
||||
policy2.min_htlc,
|
||||
Common.utcDateToMysql(policy2.last_update)
|
||||
]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all channels not in `graphChannelsIds` as inactive (status = 0)
|
||||
*/
|
||||
public async $setChannelsInactive(graphChannelsIds: string[]): Promise<void> {
|
||||
if (graphChannelsIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE id NOT IN (
|
||||
${graphChannelsIds.map(id => `"${id}"`).join(',')}
|
||||
)
|
||||
AND status != 2
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not in the graph`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setChannelsInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new ChannelsApi();
|
||||
114
backend/src/api/explorer/channels.routes.ts
Normal file
114
backend/src/api/explorer/channels.routes.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import channelsApi from './channels.api';
|
||||
|
||||
class ChannelsRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/txids', this.$getChannelsByTransactionIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/search/:search', this.$searchChannelsById)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels/:short_id', this.$getChannel)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels', this.$getChannelsForNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo', this.$getAllChannelsGeo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/channels-geo/:publicKey', this.$getAllChannelsGeo)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchChannelsById(req: Request, res: Response) {
|
||||
try {
|
||||
const channels = await channelsApi.$searchChannelsById(req.params.search);
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannel(req: Request, res: Response) {
|
||||
try {
|
||||
const channel = await channelsApi.$getChannel(req.params.short_id);
|
||||
if (!channel) {
|
||||
res.status(404).send('Channel not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(channel);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsForNode(req: Request, res: Response) {
|
||||
try {
|
||||
if (typeof req.query.public_key !== 'string') {
|
||||
res.status(400).send('Missing parameter: public_key');
|
||||
return;
|
||||
}
|
||||
const index = parseInt(typeof req.query.index === 'string' ? req.query.index : '0', 10) || 0;
|
||||
const status: string = typeof req.query.status === 'string' ? req.query.status : '';
|
||||
const channels = await channelsApi.$getChannelsForNode(req.query.public_key, index, 10, status);
|
||||
const channelsCount = await channelsApi.$getChannelsCountForNode(req.query.public_key, status);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.header('X-Total-Count', channelsCount.toString());
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getChannelsByTransactionIds(req: Request, res: Response) {
|
||||
try {
|
||||
if (!Array.isArray(req.query.txId)) {
|
||||
res.status(400).send('Not an array');
|
||||
return;
|
||||
}
|
||||
const txIds: string[] = [];
|
||||
for (const _txId in req.query.txId) {
|
||||
if (typeof req.query.txId[_txId] === 'string') {
|
||||
txIds.push(req.query.txId[_txId].toString());
|
||||
}
|
||||
}
|
||||
const channels = await channelsApi.$getChannelsByTransactionId(txIds);
|
||||
const inputs: any[] = [];
|
||||
const outputs: any[] = [];
|
||||
for (const txid of txIds) {
|
||||
const foundChannelInputs = channels.find((channel) => channel.closing_transaction_id === txid);
|
||||
if (foundChannelInputs) {
|
||||
inputs.push(foundChannelInputs);
|
||||
} else {
|
||||
inputs.push(null);
|
||||
}
|
||||
const foundChannelOutputs = channels.find((channel) => channel.transaction_id === txid);
|
||||
if (foundChannelOutputs) {
|
||||
outputs.push(foundChannelOutputs);
|
||||
} else {
|
||||
outputs.push(null);
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
inputs: inputs,
|
||||
outputs: outputs,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAllChannelsGeo(req: Request, res: Response) {
|
||||
try {
|
||||
const channels = await channelsApi.$getAllChannelsGeo(req.params?.publicKey);
|
||||
res.json(channels);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new ChannelsRoutes();
|
||||
58
backend/src/api/explorer/general.routes.ts
Normal file
58
backend/src/api/explorer/general.routes.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import nodesApi from './nodes.api';
|
||||
import channelsApi from './channels.api';
|
||||
import statisticsApi from './statistics.api';
|
||||
class GeneralLightningRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/search', this.$searchNodesAndChannels)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/latest', this.$getGeneralStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/statistics/:interval', this.$getStatistics)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchNodesAndChannels(req: Request, res: Response) {
|
||||
if (typeof req.query.searchText !== 'string') {
|
||||
res.status(400).send('Missing parameter: searchText');
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.query.searchText);
|
||||
const channels = await channelsApi.$searchChannelsById(req.query.searchText);
|
||||
res.json({
|
||||
nodes: nodes,
|
||||
channels: channels,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getStatistics(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await statisticsApi.$getStatistics(req.params.interval);
|
||||
const statisticsCount = await statisticsApi.$getStatisticsCount();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', statisticsCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getGeneralStats(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await statisticsApi.$getLatestStatistics();
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new GeneralLightningRoutes();
|
||||
388
backend/src/api/explorer/nodes.api.ts
Normal file
388
backend/src/api/explorer/nodes.api.ts
Normal file
@@ -0,0 +1,388 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import { ILightningApi } from '../lightning/lightning-api.interface';
|
||||
|
||||
class NodesApi {
|
||||
public async $getNode(public_key: string): Promise<any> {
|
||||
try {
|
||||
// General info
|
||||
let query = `
|
||||
SELECT public_key, alias, UNIX_TIMESTAMP(first_seen) AS first_seen,
|
||||
UNIX_TIMESTAMP(updated_at) AS updated_at, color, sockets as sockets,
|
||||
as_number, city_id, country_id, subdivision_id, longitude, latitude,
|
||||
geo_names_iso.names as iso_code, geo_names_as.names as as_organization, geo_names_city.names as city,
|
||||
geo_names_country.names as country, geo_names_subdivision.names as subdivision
|
||||
FROM nodes
|
||||
LEFT JOIN geo_names geo_names_as on geo_names_as.id = as_number
|
||||
LEFT JOIN geo_names geo_names_city on geo_names_city.id = city_id
|
||||
LEFT JOIN geo_names geo_names_subdivision on geo_names_subdivision.id = subdivision_id
|
||||
LEFT JOIN geo_names geo_names_country on geo_names_country.id = country_id
|
||||
LEFT JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
let [rows]: any[] = await DB.query(query, [public_key]);
|
||||
if (rows.length === 0) {
|
||||
throw new Error(`This node does not exist, or our node is not seeing it yet`);
|
||||
}
|
||||
|
||||
const node = rows[0];
|
||||
node.as_organization = JSON.parse(node.as_organization);
|
||||
node.subdivision = JSON.parse(node.subdivision);
|
||||
node.city = JSON.parse(node.city);
|
||||
node.country = JSON.parse(node.country);
|
||||
|
||||
// Active channels and capacity
|
||||
const activeChannelsStats: any = await this.$getActiveChannelsStats(public_key);
|
||||
node.active_channel_count = activeChannelsStats.active_channel_count ?? 0;
|
||||
node.capacity = activeChannelsStats.capacity ?? 0;
|
||||
|
||||
// Opened channels count
|
||||
query = `
|
||||
SELECT count(short_id) as opened_channel_count
|
||||
FROM channels
|
||||
WHERE status != 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.opened_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.opened_channel_count = rows[0].opened_channel_count;
|
||||
}
|
||||
|
||||
// Closed channels count
|
||||
query = `
|
||||
SELECT count(short_id) as closed_channel_count
|
||||
FROM channels
|
||||
WHERE status = 2 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
[rows] = await DB.query(query, [public_key, public_key]);
|
||||
node.closed_channel_count = 0;
|
||||
if (rows.length > 0) {
|
||||
node.closed_channel_count = rows[0].closed_channel_count;
|
||||
}
|
||||
|
||||
return node;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get node information for ${public_key}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getActiveChannelsStats(node_public_key: string): Promise<unknown> {
|
||||
const query = `
|
||||
SELECT count(short_id) as active_channel_count, sum(capacity) as capacity
|
||||
FROM channels
|
||||
WHERE status = 1 AND (channels.node1_public_key = ? OR channels.node2_public_key = ?)
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query, [node_public_key, node_public_key]);
|
||||
if (rows.length > 0) {
|
||||
return {
|
||||
active_channel_count: rows[0].active_channel_count,
|
||||
capacity: rows[0].capacity
|
||||
};
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAllNodes(): Promise<any> {
|
||||
try {
|
||||
const query = `SELECT * FROM nodes`;
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getAllNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodeStats(public_key: string): Promise<any> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, capacity, channels
|
||||
FROM node_stats
|
||||
WHERE public_key = ?
|
||||
ORDER BY added DESC
|
||||
`;
|
||||
const [rows]: any = await DB.query(query, [public_key]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getNodeStats error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopCapacityNodes(): Promise<any> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY capacity DESC
|
||||
LIMIT 10;
|
||||
`;
|
||||
[rows] = await DB.query(query);
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopCapacityNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getTopChannelsNodes(): Promise<any> {
|
||||
try {
|
||||
let [rows]: any[] = await DB.query('SELECT UNIX_TIMESTAMP(MAX(added)) as maxAdded FROM node_stats');
|
||||
const latestDate = rows[0].maxAdded;
|
||||
|
||||
const query = `
|
||||
SELECT nodes.public_key, IF(nodes.alias = '', SUBSTRING(nodes.public_key, 1, 20), alias) as alias, node_stats.capacity, node_stats.channels
|
||||
FROM node_stats
|
||||
JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
WHERE added = FROM_UNIXTIME(${latestDate})
|
||||
ORDER BY channels DESC
|
||||
LIMIT 10;
|
||||
`;
|
||||
[rows] = await DB.query(query);
|
||||
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getTopChannelsNodes error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $searchNodeByPublicKeyOrAlias(search: string) {
|
||||
try {
|
||||
const searchStripped = search.replace('%', '') + '%';
|
||||
const query = `SELECT nodes.public_key, nodes.alias, node_stats.capacity FROM nodes LEFT JOIN node_stats ON node_stats.public_key = nodes.public_key WHERE nodes.public_key LIKE ? OR nodes.alias LIKE ? GROUP BY nodes.public_key ORDER BY node_stats.capacity DESC LIMIT 10`;
|
||||
const [rows]: any = await DB.query(query, [searchStripped, searchStripped]);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$searchNodeByPublicKeyOrAlias error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesISPRanking(groupBy: string, showTor: boolean) {
|
||||
try {
|
||||
const orderBy = groupBy === 'capacity' ? `CAST(SUM(capacity) as INT)` : `COUNT(DISTINCT nodes.public_key)`;
|
||||
|
||||
// Clearnet
|
||||
let query = `SELECT GROUP_CONCAT(DISTINCT(nodes.as_number)) as ispId, geo_names.names as names,
|
||||
COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.as_number
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY geo_names.names
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountPerAS]: any = await DB.query(query);
|
||||
|
||||
let total = 0;
|
||||
const nodesPerAs: any[] = [];
|
||||
|
||||
for (const asGroup of nodesCountPerAS) {
|
||||
if (groupBy === 'capacity') {
|
||||
total += asGroup.capacity;
|
||||
} else {
|
||||
total += asGroup.nodesCount;
|
||||
}
|
||||
}
|
||||
|
||||
// Tor
|
||||
if (showTor) {
|
||||
query = `SELECT COUNT(DISTINCT nodes.public_key) as nodesCount, CAST(SUM(capacity) as INT) as capacity
|
||||
FROM nodes
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
ORDER BY ${orderBy} DESC
|
||||
`;
|
||||
const [nodesCountTor]: any = await DB.query(query);
|
||||
|
||||
total += groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount;
|
||||
nodesPerAs.push({
|
||||
ispId: null,
|
||||
name: 'Tor',
|
||||
count: nodesCountTor[0].nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? nodesCountTor[0].capacity : nodesCountTor[0].nodesCount) / total * 10000) / 100,
|
||||
capacity: nodesCountTor[0].capacity,
|
||||
});
|
||||
}
|
||||
|
||||
for (const as of nodesCountPerAS) {
|
||||
nodesPerAs.push({
|
||||
ispId: as.ispId,
|
||||
name: JSON.parse(as.names),
|
||||
count: as.nodesCount,
|
||||
share: Math.floor((groupBy === 'capacity' ? as.capacity : as.nodesCount) / total * 10000) / 100,
|
||||
capacity: as.capacity,
|
||||
});
|
||||
}
|
||||
|
||||
return nodesPerAs;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesPerCountry(countryId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE geo_names_country.id = ?
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [countryId]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for country id ${countryId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesPerISP(ISPId: string) {
|
||||
try {
|
||||
const query = `
|
||||
SELECT nodes.public_key, CAST(COALESCE(node_stats.capacity, 0) as INT) as capacity, CAST(COALESCE(node_stats.channels, 0) as INT) as channels,
|
||||
nodes.alias, UNIX_TIMESTAMP(nodes.first_seen) as first_seen, UNIX_TIMESTAMP(nodes.updated_at) as updated_at,
|
||||
geo_names_city.names as city, geo_names_country.names as country
|
||||
FROM node_stats
|
||||
JOIN (
|
||||
SELECT public_key, MAX(added) as last_added
|
||||
FROM node_stats
|
||||
GROUP BY public_key
|
||||
) as b ON b.public_key = node_stats.public_key AND b.last_added = node_stats.added
|
||||
RIGHT JOIN nodes ON nodes.public_key = node_stats.public_key
|
||||
JOIN geo_names geo_names_country ON geo_names_country.id = nodes.country_id AND geo_names_country.type = 'country'
|
||||
LEFT JOIN geo_names geo_names_city ON geo_names_city.id = nodes.city_id AND geo_names_city.type = 'city'
|
||||
WHERE nodes.as_number IN (?)
|
||||
ORDER BY capacity DESC
|
||||
`;
|
||||
|
||||
const [rows]: any = await DB.query(query, [ISPId.split(',')]);
|
||||
for (let i = 0; i < rows.length; ++i) {
|
||||
rows[i].country = JSON.parse(rows[i].country);
|
||||
rows[i].city = JSON.parse(rows[i].city);
|
||||
}
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes for ISP id ${ISPId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNodesCountries() {
|
||||
try {
|
||||
let query = `SELECT geo_names.names as names, geo_names_iso.names as iso_code, COUNT(DISTINCT nodes.public_key) as nodesCount, SUM(capacity) as capacity
|
||||
FROM nodes
|
||||
JOIN geo_names ON geo_names.id = nodes.country_id AND geo_names.type = 'country'
|
||||
JOIN geo_names geo_names_iso ON geo_names_iso.id = nodes.country_id AND geo_names_iso.type = 'country_iso_code'
|
||||
JOIN channels ON channels.node1_public_key = nodes.public_key OR channels.node2_public_key = nodes.public_key
|
||||
GROUP BY country_id
|
||||
ORDER BY COUNT(DISTINCT nodes.public_key) DESC
|
||||
`;
|
||||
const [nodesCountPerCountry]: any = await DB.query(query);
|
||||
|
||||
query = `SELECT COUNT(*) as total FROM nodes WHERE country_id IS NOT NULL`;
|
||||
const [nodesWithAS]: any = await DB.query(query);
|
||||
|
||||
const nodesPerCountry: any[] = [];
|
||||
for (const country of nodesCountPerCountry) {
|
||||
nodesPerCountry.push({
|
||||
name: JSON.parse(country.names),
|
||||
iso: country.iso_code,
|
||||
count: country.nodesCount,
|
||||
share: Math.floor(country.nodesCount / nodesWithAS[0].total * 10000) / 100,
|
||||
capacity: country.capacity,
|
||||
})
|
||||
}
|
||||
|
||||
return nodesPerCountry;
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get nodes grouped by AS. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save or update a node present in the graph
|
||||
*/
|
||||
public async $saveNode(node: ILightningApi.Node): Promise<void> {
|
||||
try {
|
||||
const sockets = (node.addresses?.map(a => a.addr).join(',')) ?? '';
|
||||
const query = `INSERT INTO nodes(
|
||||
public_key,
|
||||
first_seen,
|
||||
updated_at,
|
||||
alias,
|
||||
color,
|
||||
sockets,
|
||||
status
|
||||
)
|
||||
VALUES (?, NOW(), FROM_UNIXTIME(?), ?, ?, ?, 1)
|
||||
ON DUPLICATE KEY UPDATE updated_at = FROM_UNIXTIME(?), alias = ?, color = ?, sockets = ?, status = 1`;
|
||||
|
||||
await DB.query(query, [
|
||||
node.pub_key,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
node.last_update,
|
||||
node.alias,
|
||||
node.color,
|
||||
sockets,
|
||||
]);
|
||||
} catch (e) {
|
||||
logger.err('$saveNode() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set all nodes not in `nodesPubkeys` as inactive (status = 0)
|
||||
*/
|
||||
public async $setNodesInactive(graphNodesPubkeys: string[]): Promise<void> {
|
||||
if (graphNodesPubkeys.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE nodes
|
||||
SET status = 0
|
||||
WHERE public_key NOT IN (
|
||||
${graphNodesPubkeys.map(pubkey => `"${pubkey}"`).join(',')}
|
||||
)
|
||||
`);
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} nodes as inactive because they are not in the graph`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$setNodesInactive() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesApi();
|
||||
161
backend/src/api/explorer/nodes.routes.ts
Normal file
161
backend/src/api/explorer/nodes.routes.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
import config from '../../config';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import nodesApi from './nodes.api';
|
||||
import DB from '../../database';
|
||||
|
||||
class NodesRoutes {
|
||||
constructor() { }
|
||||
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/country/:country', this.$getNodesPerCountry)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/search/:search', this.$searchNode)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/top', this.$getTopNodes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp-ranking', this.$getISPRanking)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/isp/:isp', this.$getNodesPerISP)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/countries', this.$getNodesCountries)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key/statistics', this.$getHistoricalNodeStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'lightning/nodes/:public_key', this.$getNode)
|
||||
;
|
||||
}
|
||||
|
||||
private async $searchNode(req: Request, res: Response) {
|
||||
try {
|
||||
const nodes = await nodesApi.$searchNodeByPublicKeyOrAlias(req.params.search);
|
||||
res.json(nodes);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNode(req: Request, res: Response) {
|
||||
try {
|
||||
const node = await nodesApi.$getNode(req.params.public_key);
|
||||
if (!node) {
|
||||
res.status(404).send('Node not found');
|
||||
return;
|
||||
}
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(node);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalNodeStats(req: Request, res: Response) {
|
||||
try {
|
||||
const statistics = await nodesApi.$getNodeStats(req.params.public_key);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(statistics);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getTopNodes(req: Request, res: Response) {
|
||||
try {
|
||||
const topCapacityNodes = await nodesApi.$getTopCapacityNodes();
|
||||
const topChannelsNodes = await nodesApi.$getTopChannelsNodes();
|
||||
res.json({
|
||||
topByCapacity: topCapacityNodes,
|
||||
topByChannels: topChannelsNodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getISPRanking(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const groupBy = req.query.groupBy as string;
|
||||
const showTor = req.query.showTor as string === 'true' ? true : false;
|
||||
|
||||
if (!['capacity', 'node-count'].includes(groupBy)) {
|
||||
res.status(400).send(`groupBy must be one of 'capacity' or 'node-count'`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodesPerAs = await nodesApi.$getNodesISPRanking(groupBy, showTor);
|
||||
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.json(nodesPerAs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesPerCountry(req: Request, res: Response) {
|
||||
try {
|
||||
const [country]: any[] = await DB.query(
|
||||
`SELECT geo_names.id, geo_names_country.names as country_names
|
||||
FROM geo_names
|
||||
JOIN geo_names geo_names_country on geo_names.id = geo_names_country.id AND geo_names_country.type = 'country'
|
||||
WHERE geo_names.type = 'country_iso_code' AND geo_names.names = ?`,
|
||||
[req.params.country]
|
||||
);
|
||||
|
||||
if (country.length === 0) {
|
||||
res.status(404).send(`This country does not exist or does not host any lightning nodes on clearnet`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodes = await nodesApi.$getNodesPerCountry(country[0].id);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
country: JSON.parse(country[0].country_names),
|
||||
nodes: nodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesPerISP(req: Request, res: Response) {
|
||||
try {
|
||||
const [isp]: any[] = await DB.query(
|
||||
`SELECT geo_names.names as isp_name
|
||||
FROM geo_names
|
||||
WHERE geo_names.type = 'as_organization' AND geo_names.id = ?`,
|
||||
[req.params.isp]
|
||||
);
|
||||
|
||||
if (isp.length === 0) {
|
||||
res.status(404).send(`This ISP does not exist or does not host any lightning nodes on clearnet`);
|
||||
return;
|
||||
}
|
||||
|
||||
const nodes = await nodesApi.$getNodesPerISP(req.params.isp);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
isp: JSON.parse(isp[0].isp_name),
|
||||
nodes: nodes,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getNodesCountries(req: Request, res: Response) {
|
||||
try {
|
||||
const nodesPerAs = await nodesApi.$getNodesCountries();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 600).toUTCString());
|
||||
res.json(nodesPerAs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new NodesRoutes();
|
||||
52
backend/src/api/explorer/statistics.api.ts
Normal file
52
backend/src/api/explorer/statistics.api.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import logger from '../../logger';
|
||||
import DB from '../../database';
|
||||
import { Common } from '../common';
|
||||
|
||||
class StatisticsApi {
|
||||
public async $getStatistics(interval: string | null = null): Promise<any> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(added) AS added, channel_count, total_capacity, tor_nodes, clearnet_nodes, unannounced_nodes
|
||||
FROM lightning_stats`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE added BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += ` ORDER BY added DESC`;
|
||||
|
||||
try {
|
||||
const [rows]: any = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('$getStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getLatestStatistics(): Promise<any> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1`);
|
||||
const [rows2]: any = await DB.query(`SELECT * FROM lightning_stats ORDER BY added DESC LIMIT 1 OFFSET 7`);
|
||||
return {
|
||||
latest: rows[0],
|
||||
previous: rows2[0],
|
||||
};
|
||||
} catch (e) {
|
||||
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getStatisticsCount(): Promise<number> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT count(*) as count FROM lightning_stats`);
|
||||
return rows[0].count;
|
||||
} catch (e) {
|
||||
logger.err('$getLatestStatistics error: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new StatisticsApi();
|
||||
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
272
backend/src/api/lightning/clightning/clightning-client.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
// Imported from https://github.com/shesek/lightning-client-js
|
||||
|
||||
'use strict';
|
||||
|
||||
const methods = [
|
||||
'addgossip',
|
||||
'autocleaninvoice',
|
||||
'check',
|
||||
'checkmessage',
|
||||
'close',
|
||||
'connect',
|
||||
'createinvoice',
|
||||
'createinvoicerequest',
|
||||
'createoffer',
|
||||
'createonion',
|
||||
'decode',
|
||||
'decodepay',
|
||||
'delexpiredinvoice',
|
||||
'delinvoice',
|
||||
'delpay',
|
||||
'dev-listaddrs',
|
||||
'dev-rescan-outputs',
|
||||
'disableoffer',
|
||||
'disconnect',
|
||||
'estimatefees',
|
||||
'feerates',
|
||||
'fetchinvoice',
|
||||
'fundchannel',
|
||||
'fundchannel_cancel',
|
||||
'fundchannel_complete',
|
||||
'fundchannel_start',
|
||||
'fundpsbt',
|
||||
'getchaininfo',
|
||||
'getinfo',
|
||||
'getlog',
|
||||
'getrawblockbyheight',
|
||||
'getroute',
|
||||
'getsharedsecret',
|
||||
'getutxout',
|
||||
'help',
|
||||
'invoice',
|
||||
'keysend',
|
||||
'legacypay',
|
||||
'listchannels',
|
||||
'listconfigs',
|
||||
'listforwards',
|
||||
'listfunds',
|
||||
'listinvoices',
|
||||
'listnodes',
|
||||
'listoffers',
|
||||
'listpays',
|
||||
'listpeers',
|
||||
'listsendpays',
|
||||
'listtransactions',
|
||||
'multifundchannel',
|
||||
'multiwithdraw',
|
||||
'newaddr',
|
||||
'notifications',
|
||||
'offer',
|
||||
'offerout',
|
||||
'openchannel_abort',
|
||||
'openchannel_bump',
|
||||
'openchannel_init',
|
||||
'openchannel_signed',
|
||||
'openchannel_update',
|
||||
'pay',
|
||||
'payersign',
|
||||
'paystatus',
|
||||
'ping',
|
||||
'plugin',
|
||||
'reserveinputs',
|
||||
'sendinvoice',
|
||||
'sendonion',
|
||||
'sendonionmessage',
|
||||
'sendpay',
|
||||
'sendpsbt',
|
||||
'sendrawtransaction',
|
||||
'setchannelfee',
|
||||
'signmessage',
|
||||
'signpsbt',
|
||||
'stop',
|
||||
'txdiscard',
|
||||
'txprepare',
|
||||
'txsend',
|
||||
'unreserveinputs',
|
||||
'utxopsbt',
|
||||
'waitanyinvoice',
|
||||
'waitblockheight',
|
||||
'waitinvoice',
|
||||
'waitsendpay',
|
||||
'withdraw'
|
||||
];
|
||||
|
||||
|
||||
import EventEmitter from 'events';
|
||||
import { existsSync, statSync } from 'fs';
|
||||
import { createConnection, Socket } from 'net';
|
||||
import { homedir } from 'os';
|
||||
import path from 'path';
|
||||
import { createInterface, Interface } from 'readline';
|
||||
import logger from '../../../logger';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import { convertAndmergeBidirectionalChannels, convertNode } from './clightning-convert';
|
||||
|
||||
class LightningError extends Error {
|
||||
type: string = 'lightning';
|
||||
message: string = 'lightning-client error';
|
||||
|
||||
constructor(error) {
|
||||
super();
|
||||
this.type = error.type;
|
||||
this.message = error.message;
|
||||
}
|
||||
}
|
||||
|
||||
const defaultRpcPath = path.join(homedir(), '.lightning')
|
||||
, fStat = (...p) => statSync(path.join(...p))
|
||||
, fExists = (...p) => existsSync(path.join(...p))
|
||||
|
||||
export default class CLightningClient extends EventEmitter implements AbstractLightningApi {
|
||||
private rpcPath: string;
|
||||
private reconnectWait: number;
|
||||
private reconnectTimeout;
|
||||
private reqcount: number;
|
||||
private client: Socket;
|
||||
private rl: Interface;
|
||||
private clientConnectionPromise: Promise<unknown>;
|
||||
|
||||
constructor(rpcPath = defaultRpcPath) {
|
||||
if (!path.isAbsolute(rpcPath)) {
|
||||
throw new Error('The rpcPath must be an absolute path');
|
||||
}
|
||||
|
||||
if (!fExists(rpcPath) || !fStat(rpcPath).isSocket()) {
|
||||
// network directory provided, use the lightning-rpc within in
|
||||
if (fExists(rpcPath, 'lightning-rpc')) {
|
||||
rpcPath = path.join(rpcPath, 'lightning-rpc');
|
||||
}
|
||||
|
||||
// main data directory provided, default to using the bitcoin mainnet subdirectory
|
||||
// to be removed in v0.2.0
|
||||
else if (fExists(rpcPath, 'bitcoin', 'lightning-rpc')) {
|
||||
logger.warn(`[CLightningClient] ${rpcPath}/lightning-rpc is missing, using the bitcoin mainnet subdirectory at ${rpcPath}/bitcoin instead.`)
|
||||
logger.warn(`[CLightningClient] specifying the main lightning data directory is deprecated, please specify the network directory explicitly.\n`)
|
||||
rpcPath = path.join(rpcPath, 'bitcoin', 'lightning-rpc')
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug(`[CLightningClient] Connecting to ${rpcPath}`);
|
||||
|
||||
super();
|
||||
this.rpcPath = rpcPath;
|
||||
this.reconnectWait = 0.5;
|
||||
this.reconnectTimeout = null;
|
||||
this.reqcount = 0;
|
||||
|
||||
const _self = this;
|
||||
|
||||
this.client = createConnection(rpcPath).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
this.rl = createInterface({ input: this.client }).on(
|
||||
'error', () => {
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
}
|
||||
);
|
||||
|
||||
this.clientConnectionPromise = new Promise<void>(resolve => {
|
||||
_self.client.on('connect', () => {
|
||||
logger.info(`[CLightningClient] Lightning client connected`);
|
||||
_self.reconnectWait = 1;
|
||||
resolve();
|
||||
});
|
||||
|
||||
_self.client.on('end', () => {
|
||||
logger.err('[CLightningClient] Lightning client connection closed, reconnecting');
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
|
||||
_self.client.on('error', error => {
|
||||
logger.err(`[CLightningClient] Lightning client connection error: ${error}`);
|
||||
_self.increaseWaitTime();
|
||||
_self.reconnect();
|
||||
});
|
||||
});
|
||||
|
||||
this.rl.on('line', line => {
|
||||
line = line.trim();
|
||||
if (!line) {
|
||||
return;
|
||||
}
|
||||
const data = JSON.parse(line);
|
||||
// logger.debug(`[CLightningClient] #${data.id} <-- ${JSON.stringify(data.error || data.result)}`);
|
||||
_self.emit('res:' + data.id, data);
|
||||
});
|
||||
}
|
||||
|
||||
increaseWaitTime(): void {
|
||||
if (this.reconnectWait >= 16) {
|
||||
this.reconnectWait = 16;
|
||||
} else {
|
||||
this.reconnectWait *= 2;
|
||||
}
|
||||
}
|
||||
|
||||
reconnect(): void {
|
||||
const _self = this;
|
||||
|
||||
if (this.reconnectTimeout) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reconnectTimeout = setTimeout(() => {
|
||||
logger.debug('[CLightningClient] Trying to reconnect...');
|
||||
|
||||
_self.client.connect(_self.rpcPath);
|
||||
_self.reconnectTimeout = null;
|
||||
}, this.reconnectWait * 1000);
|
||||
}
|
||||
|
||||
call(method, args = []): Promise<any> {
|
||||
const _self = this;
|
||||
|
||||
const callInt = ++this.reqcount;
|
||||
const sendObj = {
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params: args,
|
||||
id: '' + callInt
|
||||
};
|
||||
|
||||
// logger.debug(`[CLightningClient] #${callInt} --> ${method} ${args}`);
|
||||
|
||||
// Wait for the client to connect
|
||||
return this.clientConnectionPromise
|
||||
.then(() => new Promise((resolve, reject) => {
|
||||
// Wait for a response
|
||||
this.once('res:' + callInt, res => res.error == null
|
||||
? resolve(res.result)
|
||||
: reject(new LightningError(res.error))
|
||||
);
|
||||
|
||||
// Send the command
|
||||
_self.client.write(JSON.stringify(sendObj));
|
||||
}));
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
const listnodes: any[] = await this.call('listnodes');
|
||||
const listchannels: any[] = await this.call('listchannels');
|
||||
const channelsList = await convertAndmergeBidirectionalChannels(listchannels['channels']);
|
||||
|
||||
return {
|
||||
nodes: listnodes['nodes'].map(node => convertNode(node)),
|
||||
edges: channelsList,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const protify = s => s.replace(/-([a-z])/g, m => m[1].toUpperCase());
|
||||
|
||||
methods.forEach(k => {
|
||||
CLightningClient.prototype[protify(k)] = function (...args: any) {
|
||||
return this.call(k, args);
|
||||
};
|
||||
});
|
||||
131
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
131
backend/src/api/lightning/clightning/clightning-convert.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import FundingTxFetcher from '../../../tasks/lightning/sync-tasks/funding-tx-fetcher';
|
||||
import logger from '../../../logger';
|
||||
import { Common } from '../../common';
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" entry to a lnd node entry
|
||||
*/
|
||||
export function convertNode(clNode: any): ILightningApi.Node {
|
||||
return {
|
||||
alias: clNode.alias ?? '',
|
||||
color: `#${clNode.color ?? ''}`,
|
||||
features: [], // TODO parse and return clNode.feature
|
||||
pub_key: clNode.nodeid,
|
||||
addresses: clNode.addresses?.map((addr) => {
|
||||
return {
|
||||
network: addr.type,
|
||||
addr: `${addr.address}:${addr.port}`
|
||||
};
|
||||
}),
|
||||
last_update: clNode?.last_timestamp ?? 0,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert clightning "listchannels" response to lnd "describegraph.edges" format
|
||||
*/
|
||||
export async function convertAndmergeBidirectionalChannels(clChannels: any[]): Promise<ILightningApi.Channel[]> {
|
||||
logger.info('Converting clightning nodes and channels to lnd graph format');
|
||||
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
|
||||
const consolidatedChannelList: ILightningApi.Channel[] = [];
|
||||
const clChannelsDict = {};
|
||||
const clChannelsDictCount = {};
|
||||
|
||||
for (const clChannel of clChannels) {
|
||||
if (!clChannelsDict[clChannel.short_channel_id]) {
|
||||
clChannelsDict[clChannel.short_channel_id] = clChannel;
|
||||
clChannelsDictCount[clChannel.short_channel_id] = 1;
|
||||
} else {
|
||||
consolidatedChannelList.push(
|
||||
await buildFullChannel(clChannel, clChannelsDict[clChannel.short_channel_id])
|
||||
);
|
||||
delete clChannelsDict[clChannel.short_channel_id];
|
||||
clChannelsDictCount[clChannel.short_channel_id]++;
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building complete channels from clightning output. Channels processed: ${channelProcessed + 1} of ${clChannels.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
++channelProcessed;
|
||||
}
|
||||
|
||||
channelProcessed = 0;
|
||||
const keys = Object.keys(clChannelsDict);
|
||||
for (const short_channel_id of keys) {
|
||||
consolidatedChannelList.push(await buildIncompleteChannel(clChannelsDict[short_channel_id]));
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Building partial channels from clightning output. Channels processed: ${channelProcessed + 1} of ${keys.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
return consolidatedChannelList;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert two clightning "getchannels" entries into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy for both nodes
|
||||
*/
|
||||
async function buildFullChannel(clChannelA: any, clChannelB: any): Promise<ILightningApi.Channel> {
|
||||
const lastUpdate = Math.max(clChannelA.last_update ?? 0, clChannelB.last_update ?? 0);
|
||||
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannelA.short_channel_id);
|
||||
const parts = clChannelA.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannelA.short_channel_id),
|
||||
capacity: clChannelA.satoshis,
|
||||
last_update: lastUpdate,
|
||||
node1_policy: convertPolicy(clChannelA),
|
||||
node2_policy: convertPolicy(clChannelB),
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannelA.source,
|
||||
node2_pub: clChannelB.source,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert one clightning "getchannels" entry into a full a lnd "describegraph.edges" format
|
||||
* In this case, clightning knows the channel policy of only one node
|
||||
*/
|
||||
async function buildIncompleteChannel(clChannel: any): Promise<ILightningApi.Channel> {
|
||||
const tx = await FundingTxFetcher.$fetchChannelOpenTx(clChannel.short_channel_id);
|
||||
const parts = clChannel.short_channel_id.split('x');
|
||||
const outputIdx = parts[2];
|
||||
|
||||
return {
|
||||
channel_id: Common.channelShortIdToIntegerId(clChannel.short_channel_id),
|
||||
capacity: clChannel.satoshis,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
node1_policy: convertPolicy(clChannel),
|
||||
node2_policy: null,
|
||||
chan_point: `${tx.txid}:${outputIdx}`,
|
||||
node1_pub: clChannel.source,
|
||||
node2_pub: clChannel.destination,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a clightning "listnode" response to a lnd channel policy format
|
||||
*/
|
||||
function convertPolicy(clChannel: any): ILightningApi.RoutingPolicy {
|
||||
return {
|
||||
time_lock_delta: 0, // TODO
|
||||
min_htlc: clChannel.htlc_minimum_msat.slice(0, -4),
|
||||
max_htlc_msat: clChannel.htlc_maximum_msat.slice(0, -4),
|
||||
fee_base_msat: clChannel.base_fee_millisatoshi,
|
||||
fee_rate_milli_msat: clChannel.fee_per_millionth,
|
||||
disabled: !clChannel.active,
|
||||
last_update: clChannel.last_update ?? 0,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
import { ILightningApi } from './lightning-api.interface';
|
||||
|
||||
export interface AbstractLightningApi {
|
||||
$getNetworkGraph(): Promise<ILightningApi.NetworkGraph>;
|
||||
}
|
||||
16
backend/src/api/lightning/lightning-api-factory.ts
Normal file
16
backend/src/api/lightning/lightning-api-factory.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import config from '../../config';
|
||||
import CLightningClient from './clightning/clightning-client';
|
||||
import { AbstractLightningApi } from './lightning-api-abstract-factory';
|
||||
import LndApi from './lnd/lnd-api';
|
||||
|
||||
function lightningApiFactory(): AbstractLightningApi {
|
||||
switch (config.LIGHTNING.ENABLED === true && config.LIGHTNING.BACKEND) {
|
||||
case 'cln':
|
||||
return new CLightningClient(config.CLIGHTNING.SOCKET);
|
||||
case 'lnd':
|
||||
default:
|
||||
return new LndApi();
|
||||
}
|
||||
}
|
||||
|
||||
export default lightningApiFactory();
|
||||
85
backend/src/api/lightning/lightning-api.interface.ts
Normal file
85
backend/src/api/lightning/lightning-api.interface.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
export namespace ILightningApi {
|
||||
export interface NetworkInfo {
|
||||
graph_diameter: number;
|
||||
avg_out_degree: number;
|
||||
max_out_degree: number;
|
||||
num_nodes: number;
|
||||
num_channels: number;
|
||||
total_network_capacity: string;
|
||||
avg_channel_size: number;
|
||||
min_channel_size: string;
|
||||
max_channel_size: string;
|
||||
median_channel_size_sat: string;
|
||||
num_zombie_chans: string;
|
||||
}
|
||||
|
||||
export interface NetworkGraph {
|
||||
nodes: Node[];
|
||||
edges: Channel[];
|
||||
}
|
||||
|
||||
export interface Channel {
|
||||
channel_id: string;
|
||||
chan_point: string;
|
||||
last_update: number;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
capacity: string;
|
||||
node1_policy: RoutingPolicy | null;
|
||||
node2_policy: RoutingPolicy | null;
|
||||
}
|
||||
|
||||
export interface RoutingPolicy {
|
||||
time_lock_delta: number;
|
||||
min_htlc: string;
|
||||
fee_base_msat: string;
|
||||
fee_rate_milli_msat: string;
|
||||
disabled: boolean;
|
||||
max_htlc_msat: string;
|
||||
last_update: number;
|
||||
}
|
||||
|
||||
export interface Node {
|
||||
last_update: number;
|
||||
pub_key: string;
|
||||
alias: string;
|
||||
addresses: {
|
||||
network: string;
|
||||
addr: string;
|
||||
}[];
|
||||
color: string;
|
||||
features: { [key: number]: Feature };
|
||||
}
|
||||
|
||||
export interface Info {
|
||||
identity_pubkey: string;
|
||||
alias: string;
|
||||
num_pending_channels: number;
|
||||
num_active_channels: number;
|
||||
num_peers: number;
|
||||
block_height: number;
|
||||
block_hash: string;
|
||||
synced_to_chain: boolean;
|
||||
testnet: boolean;
|
||||
uris: string[];
|
||||
best_header_timestamp: string;
|
||||
version: string;
|
||||
num_inactive_channels: number;
|
||||
chains: {
|
||||
chain: string;
|
||||
network: string;
|
||||
}[];
|
||||
color: string;
|
||||
synced_to_graph: boolean;
|
||||
features: { [key: number]: Feature };
|
||||
commit_hash: string;
|
||||
/** Available on LND since v0.15.0-beta */
|
||||
require_htlc_interceptor?: boolean;
|
||||
}
|
||||
|
||||
export interface Feature {
|
||||
name: string;
|
||||
is_required: boolean;
|
||||
is_known: boolean;
|
||||
}
|
||||
}
|
||||
41
backend/src/api/lightning/lnd/lnd-api.ts
Normal file
41
backend/src/api/lightning/lnd/lnd-api.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import axios, { AxiosRequestConfig } from 'axios';
|
||||
import { Agent } from 'https';
|
||||
import * as fs from 'fs';
|
||||
import { AbstractLightningApi } from '../lightning-api-abstract-factory';
|
||||
import { ILightningApi } from '../lightning-api.interface';
|
||||
import config from '../../../config';
|
||||
|
||||
class LndApi implements AbstractLightningApi {
|
||||
axiosConfig: AxiosRequestConfig = {};
|
||||
|
||||
constructor() {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.axiosConfig = {
|
||||
headers: {
|
||||
'Grpc-Metadata-macaroon': fs.readFileSync(config.LND.MACAROON_PATH).toString('hex')
|
||||
},
|
||||
httpsAgent: new Agent({
|
||||
ca: fs.readFileSync(config.LND.TLS_CERT_PATH)
|
||||
}),
|
||||
timeout: 10000
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async $getNetworkInfo(): Promise<ILightningApi.NetworkInfo> {
|
||||
return axios.get<ILightningApi.NetworkInfo>(config.LND.REST_API_URL + '/v1/graph/info', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getInfo(): Promise<ILightningApi.Info> {
|
||||
return axios.get<ILightningApi.Info>(config.LND.REST_API_URL + '/v1/getinfo', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
|
||||
async $getNetworkGraph(): Promise<ILightningApi.NetworkGraph> {
|
||||
return axios.get<ILightningApi.NetworkGraph>(config.LND.REST_API_URL + '/v1/graph', this.axiosConfig)
|
||||
.then((response) => response.data);
|
||||
}
|
||||
}
|
||||
|
||||
export default LndApi;
|
||||
73
backend/src/api/liquid/liquid.routes.ts
Normal file
73
backend/src/api/liquid/liquid.routes.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import axios from 'axios';
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import elementsParser from './elements-parser';
|
||||
import icons from './icons';
|
||||
|
||||
class LiquidRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', this.getAllLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', this.$getAllFeaturedLiquidAssets)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', this.getLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', this.$getAssetGroup)
|
||||
;
|
||||
|
||||
if (config.DATABASE.ENABLED) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', this.$getElementsPegsByMonth)
|
||||
;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private getLiquidIcon(req: Request, res: Response) {
|
||||
const result = icons.getIconByAssetId(req.params.assetId);
|
||||
if (result) {
|
||||
res.setHeader('content-type', 'image/png');
|
||||
res.setHeader('content-length', result.length);
|
||||
res.send(result);
|
||||
} else {
|
||||
res.status(404).send('Asset icon not found');
|
||||
}
|
||||
}
|
||||
|
||||
private getAllLiquidIcon(req: Request, res: Response) {
|
||||
const result = icons.getAllIconIds();
|
||||
if (result) {
|
||||
res.json(result);
|
||||
} else {
|
||||
res.status(404).send('Asset icons not found');
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAllFeaturedLiquidAssets(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/featured`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getAssetGroup(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.LIQUID_API}/assets/group/${parseInt(req.params.id, 10)}`,
|
||||
{ responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getElementsPegsByMonth(req: Request, res: Response) {
|
||||
try {
|
||||
const pegs = await elementsParser.$getPegDataByMonth();
|
||||
res.json(pegs);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new LiquidRoutes();
|
||||
251
backend/src/api/mining/mining-routes.ts
Normal file
251
backend/src/api/mining/mining-routes.ts
Normal file
@@ -0,0 +1,251 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from "../../config";
|
||||
import logger from '../../logger';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
import HashratesRepository from '../../repositories/HashratesRepository';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import mining from "./mining";
|
||||
|
||||
class MiningRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', this.$getPools)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', this.$getPoolHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', this.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks/:height', this.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug', this.$getPool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/pools/:interval', this.$getPoolsHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', this.$getHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', this.$getRewardStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', this.$getHistoricalBlockFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', this.$getHistoricalBlockRewards)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', this.$getHistoricalBlockFeeRates)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', this.$getHistoricalBlockSizeAndWeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/difficulty-adjustments/:interval', this.$getDifficultyAdjustments)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/predictions/:interval', this.$getHistoricalBlockPrediction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/audit/:hash', this.$getBlockAudit)
|
||||
;
|
||||
}
|
||||
|
||||
private async $getPool(req: Request, res: Response): Promise<void> {
|
||||
try {
|
||||
const stats = await mining.$getPoolStat(req.params.slug);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(stats);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolBlocks(req: Request, res: Response) {
|
||||
try {
|
||||
const poolBlocks = await BlocksRepository.$getBlocksByPool(
|
||||
req.params.slug,
|
||||
req.params.height === undefined ? undefined : parseInt(req.params.height, 10),
|
||||
);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(poolBlocks);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPools(req: Request, res: Response) {
|
||||
try {
|
||||
const stats = await mining.$getPoolsStats(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(stats);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolsHistoricalHashrate(req: Request, res: Response) {
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getPoolsWeeklyHashrate(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(hashrates);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getPoolHistoricalHashrate(req: Request, res: Response) {
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getPoolWeeklyHashrate(req.params.slug);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(hashrates);
|
||||
} catch (e) {
|
||||
if (e instanceof Error && e.message.indexOf('This mining pool does not exist') > -1) {
|
||||
res.status(404).send(e.message);
|
||||
} else {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalHashrate(req: Request, res: Response) {
|
||||
let currentHashrate = 0, currentDifficulty = 0;
|
||||
try {
|
||||
currentHashrate = await bitcoinClient.getNetworkHashPs();
|
||||
currentDifficulty = await bitcoinClient.getDifficulty();
|
||||
} catch (e) {
|
||||
logger.debug('Bitcoin Core is not available, using zeroed value for current hashrate and difficulty');
|
||||
}
|
||||
|
||||
try {
|
||||
const hashrates = await HashratesRepository.$getNetworkDailyHashrate(req.params.interval);
|
||||
const difficulty = await DifficultyAdjustmentsRepository.$getAdjustments(req.params.interval, false);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json({
|
||||
hashrates: hashrates,
|
||||
difficulty: difficulty,
|
||||
currentHashrate: currentHashrate,
|
||||
currentDifficulty: currentDifficulty,
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockFees(req: Request, res: Response) {
|
||||
try {
|
||||
const blockFees = await mining.$getHistoricalBlockFees(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockFees);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockRewards(req: Request, res: Response) {
|
||||
try {
|
||||
const blockRewards = await mining.$getHistoricalBlockRewards(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockRewards);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockFeeRates(req: Request, res: Response) {
|
||||
try {
|
||||
const blockFeeRates = await mining.$getHistoricalBlockFeeRates(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockFeeRates);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockSizeAndWeight(req: Request, res: Response) {
|
||||
try {
|
||||
const blockSizes = await mining.$getHistoricalBlockSizes(req.params.interval);
|
||||
const blockWeights = await mining.$getHistoricalBlockWeights(req.params.interval);
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json({
|
||||
sizes: blockSizes,
|
||||
weights: blockWeights
|
||||
});
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getDifficultyAdjustments(req: Request, res: Response) {
|
||||
try {
|
||||
const difficulty = await DifficultyAdjustmentsRepository.$getRawAdjustments(req.params.interval, true);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
res.json(difficulty.map(adj => [adj.time, adj.height, adj.difficulty, adj.adjustment]));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $getRewardStats(req: Request, res: Response) {
|
||||
try {
|
||||
const response = await mining.$getRewardStats(parseInt(req.params.blockCount, 10));
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(response);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
}
|
||||
|
||||
private async $getHistoricalBlockPrediction(req: Request, res: Response) {
|
||||
try {
|
||||
const blockPredictions = await mining.$getBlockPredictionsHistory(req.params.interval);
|
||||
const blockCount = await BlocksAuditsRepository.$getPredictionsCount();
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.header('X-total-count', blockCount.toString());
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
res.json(blockPredictions.map(prediction => [prediction.time, prediction.height, prediction.match_rate]));
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAudit(req: Request, res: Response) {
|
||||
try {
|
||||
const audit = await BlocksAuditsRepository.$getBlockAudit(req.params.hash);
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 3600 * 24).toUTCString());
|
||||
res.json(audit);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new MiningRoutes();
|
||||
@@ -1,23 +1,39 @@
|
||||
import { PoolInfo, PoolStats, RewardStats } from '../mempool.interfaces';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
import PoolsRepository from '../repositories/PoolsRepository';
|
||||
import HashratesRepository from '../repositories/HashratesRepository';
|
||||
import bitcoinClient from './bitcoin/bitcoin-client';
|
||||
import logger from '../logger';
|
||||
import { Common } from './common';
|
||||
import loadingIndicators from './loading-indicators';
|
||||
import { BlockPrice, PoolInfo, PoolStats, RewardStats } from '../../mempool.interfaces';
|
||||
import BlocksRepository from '../../repositories/BlocksRepository';
|
||||
import PoolsRepository from '../../repositories/PoolsRepository';
|
||||
import HashratesRepository from '../../repositories/HashratesRepository';
|
||||
import bitcoinClient from '../bitcoin/bitcoin-client';
|
||||
import logger from '../../logger';
|
||||
import { Common } from '../common';
|
||||
import loadingIndicators from '../loading-indicators';
|
||||
import { escape } from 'mysql2';
|
||||
import DifficultyAdjustmentsRepository from '../../repositories/DifficultyAdjustmentsRepository';
|
||||
import config from '../../config';
|
||||
import BlocksAuditsRepository from '../../repositories/BlocksAuditsRepository';
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
|
||||
class Mining {
|
||||
blocksPriceIndexingRunning = false;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get historical block predictions match rate
|
||||
*/
|
||||
public async $getBlockPredictionsHistory(interval: string | null = null): Promise<any> {
|
||||
return await BlocksAuditsRepository.$getBlockPredictionsHistory(
|
||||
this.getTimeRange(interval),
|
||||
Common.getSqlInterval(interval)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get historical block total fee
|
||||
*/
|
||||
public async $getHistoricalBlockFees(interval: string | null = null): Promise<any> {
|
||||
return await BlocksRepository.$getHistoricalBlockFees(
|
||||
this.getTimeRange(interval),
|
||||
this.getTimeRange(interval, 5),
|
||||
Common.getSqlInterval(interval)
|
||||
);
|
||||
}
|
||||
@@ -159,26 +175,25 @@ class Mining {
|
||||
*/
|
||||
public async $generatePoolHashrateHistory(): Promise<void> {
|
||||
const now = new Date();
|
||||
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
|
||||
|
||||
try {
|
||||
const lastestRunDate = await HashratesRepository.$getLatestRun('last_weekly_hashrates_indexing');
|
||||
|
||||
// Run only if:
|
||||
// * lastestRunDate is set to 0 (node backend restart, reorg)
|
||||
// * we started a new week (around Monday midnight)
|
||||
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
|
||||
if (!runIndexing) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
// Run only if:
|
||||
// * lastestRunDate is set to 0 (node backend restart, reorg)
|
||||
// * we started a new week (around Monday midnight)
|
||||
const runIndexing = lastestRunDate === 0 || now.getUTCDay() === 1 && lastestRunDate !== now.getUTCDate();
|
||||
if (!runIndexing) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
|
||||
const indexedTimestamp = await HashratesRepository.$getWeeklyHashrateTimestamps();
|
||||
const hashrates: any[] = [];
|
||||
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
|
||||
|
||||
const lastMonday = new Date(now.setDate(now.getDate() - (now.getDay() + 6) % 7));
|
||||
const lastMondayMidnight = this.getDateMidnight(lastMonday);
|
||||
let toTimestamp = lastMondayMidnight.getTime();
|
||||
@@ -193,7 +208,7 @@ class Mining {
|
||||
logger.debug(`Indexing weekly mining pool hashrate`);
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
const fromTimestamp = toTimestamp - 604800000;
|
||||
|
||||
// Skip already indexed weeks
|
||||
@@ -203,14 +218,6 @@ class Mining {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have blocks for the previous week (which mean that the week
|
||||
// we are currently indexing has complete data)
|
||||
const blockStatsPreviousWeek: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, (fromTimestamp - 604800000) / 1000, (toTimestamp - 604800000) / 1000);
|
||||
if (blockStatsPreviousWeek.blockCount === 0) { // We are done indexing
|
||||
break;
|
||||
}
|
||||
|
||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
@@ -218,34 +225,35 @@ class Mining {
|
||||
|
||||
let pools = await PoolsRepository.$getPoolsInfoBetween(fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const totalBlocks = pools.reduce((acc, pool) => acc + pool.blockCount, 0);
|
||||
pools = pools.map((pool: any) => {
|
||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||
pool.share = (pool.blockCount / totalBlocks);
|
||||
return pool;
|
||||
});
|
||||
|
||||
for (const pool of pools) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: toTimestamp / 1000,
|
||||
avgHashrate: pool['hashrate'],
|
||||
poolId: pool.poolId,
|
||||
share: pool['share'],
|
||||
type: 'weekly',
|
||||
if (totalBlocks > 0) {
|
||||
pools = pools.map((pool: any) => {
|
||||
pool.hashrate = (pool.blockCount / totalBlocks) * lastBlockHashrate;
|
||||
pool.share = (pool.blockCount / totalBlocks);
|
||||
return pool;
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
for (const pool of pools) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: toTimestamp / 1000,
|
||||
avgHashrate: pool['hashrate'] ,
|
||||
poolId: pool.poolId,
|
||||
share: pool['share'],
|
||||
type: 'weekly',
|
||||
});
|
||||
}
|
||||
|
||||
newlyIndexed += hashrates.length;
|
||||
await HashratesRepository.$saveHashrates(hashrates);
|
||||
hashrates.length = 0;
|
||||
}
|
||||
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 1) {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const weeksPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalWeekIndexed * 10000) / 100;
|
||||
const timeLeft = Math.round((totalWeekIndexed - totalIndexed) / weeksPerSeconds);
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Getting weekly pool hashrate for ${formattedDate} | ~${weeksPerSeconds.toFixed(2)} weeks/sec | total: ~${totalIndexed}/${Math.round(totalWeekIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', progress, false);
|
||||
@@ -257,11 +265,14 @@ class Mining {
|
||||
}
|
||||
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.info(`Indexed ${newlyIndexed} pools weekly hashrate`);
|
||||
logger.notice(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
} else {
|
||||
logger.debug(`Weekly mining pools hashrates indexing completed: indexed ${newlyIndexed}`);
|
||||
}
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('weekly-hashrate-indexing', 100);
|
||||
logger.err(`Weekly mining pools hashrates indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -270,20 +281,19 @@ class Mining {
|
||||
* [INDEXING] Generate daily hashrate data
|
||||
*/
|
||||
public async $generateNetworkHashrateHistory(): Promise<void> {
|
||||
try {
|
||||
// We only run this once a day around midnight
|
||||
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
|
||||
const now = new Date().getUTCDate();
|
||||
if (now === latestRunDate) {
|
||||
return;
|
||||
}
|
||||
} catch (e) {
|
||||
throw e;
|
||||
// We only run this once a day around midnight
|
||||
const latestRunDate = await HashratesRepository.$getLatestRun('last_hashrates_indexing');
|
||||
const now = new Date().getUTCDate();
|
||||
if (now === latestRunDate) {
|
||||
return;
|
||||
}
|
||||
|
||||
const oldestConsecutiveBlockTimestamp = 1000 * (await BlocksRepository.$getOldestConsecutiveBlock()).timestamp;
|
||||
|
||||
try {
|
||||
const indexedTimestamp = (await HashratesRepository.$getNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const genesisTimestamp = 1231006505000; // bitcoin-cli getblock 000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
const genesisTimestamp = genesisBlock.time * 1000;
|
||||
const indexedTimestamp = (await HashratesRepository.$getRawNetworkDailyHashrate(null)).map(hashrate => hashrate.timestamp);
|
||||
const lastMidnight = this.getDateMidnight(new Date());
|
||||
let toTimestamp = Math.round(lastMidnight.getTime());
|
||||
const hashrates: any[] = [];
|
||||
@@ -298,27 +308,19 @@ class Mining {
|
||||
logger.debug(`Indexing daily network hashrate`);
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 0);
|
||||
|
||||
while (toTimestamp > genesisTimestamp) {
|
||||
while (toTimestamp > genesisTimestamp && toTimestamp > oldestConsecutiveBlockTimestamp) {
|
||||
const fromTimestamp = toTimestamp - 86400000;
|
||||
|
||||
// Skip already indexed weeks
|
||||
// Skip already indexed days
|
||||
if (indexedTimestamp.includes(toTimestamp / 1000)) {
|
||||
toTimestamp -= 86400000;
|
||||
++totalIndexed;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have blocks for the previous day (which mean that the day
|
||||
// we are currently indexing has complete data)
|
||||
const blockStatsPreviousDay: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, (fromTimestamp - 86400000) / 1000, (toTimestamp - 86400000) / 1000);
|
||||
if (blockStatsPreviousDay.blockCount === 0) { // We are done indexing
|
||||
break;
|
||||
}
|
||||
|
||||
const blockStats: any = await BlocksRepository.$blockCountBetweenTimestamp(
|
||||
null, fromTimestamp / 1000, toTimestamp / 1000);
|
||||
const lastBlockHashrate = await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
const lastBlockHashrate = blockStats.blockCount === 0 ? 0 : await bitcoinClient.getNetworkHashPs(blockStats.blockCount,
|
||||
blockStats.lastBlockHeight);
|
||||
|
||||
hashrates.push({
|
||||
@@ -340,9 +342,8 @@ class Mining {
|
||||
const runningFor = Math.max(1, Math.round((new Date().getTime() / 1000) - startedAt));
|
||||
const daysPerSeconds = Math.max(1, Math.round(indexedThisRun / elapsedSeconds));
|
||||
const progress = Math.round(totalIndexed / totalDayIndexed * 10000) / 100;
|
||||
const timeLeft = Math.round((totalDayIndexed - totalIndexed) / daysPerSeconds);
|
||||
const formattedDate = new Date(fromTimestamp).toUTCString();
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds | left: ~${timeLeft} seconds`);
|
||||
logger.debug(`Getting network daily hashrate for ${formattedDate} | ~${daysPerSeconds.toFixed(2)} days/sec | total: ~${totalIndexed}/${Math.round(totalDayIndexed)} (${progress}%) | elapsed: ${runningFor} seconds`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
indexedThisRun = 0;
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', progress);
|
||||
@@ -354,11 +355,12 @@ class Mining {
|
||||
}
|
||||
|
||||
// Add genesis block manually
|
||||
if (toTimestamp <= genesisTimestamp && !indexedTimestamp.includes(genesisTimestamp)) {
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && !indexedTimestamp.includes(genesisTimestamp / 1000)) {
|
||||
hashrates.push({
|
||||
hashrateTimestamp: genesisTimestamp,
|
||||
hashrateTimestamp: genesisTimestamp / 1000,
|
||||
avgHashrate: await bitcoinClient.getNetworkHashPs(1, 1),
|
||||
poolId: null,
|
||||
poolId: 0,
|
||||
share: 1,
|
||||
type: 'daily',
|
||||
});
|
||||
}
|
||||
@@ -368,15 +370,155 @@ class Mining {
|
||||
|
||||
await HashratesRepository.$setLatestRun('last_hashrates_indexing', new Date().getUTCDate());
|
||||
if (newlyIndexed > 0) {
|
||||
logger.info(`Indexed ${newlyIndexed} day of network hashrate`);
|
||||
logger.notice(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
} else {
|
||||
logger.debug(`Daily network hashrate indexing completed: indexed ${newlyIndexed} days`);
|
||||
}
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
} catch (e) {
|
||||
loadingIndicators.setProgress('daily-hashrate-indexing', 100);
|
||||
logger.err(`Daily network hashrate indexing failed. Trying again in 10 seconds. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index difficulty adjustments
|
||||
*/
|
||||
public async $indexDifficultyAdjustments(): Promise<void> {
|
||||
const indexedHeightsArray = await DifficultyAdjustmentsRepository.$getAdjustmentsHeights();
|
||||
const indexedHeights = {};
|
||||
for (const height of indexedHeightsArray) {
|
||||
indexedHeights[height] = true;
|
||||
}
|
||||
|
||||
const blocks: any = await BlocksRepository.$getBlocksDifficulty();
|
||||
const genesisBlock = await bitcoinClient.getBlock(await bitcoinClient.getBlockHash(0));
|
||||
let currentDifficulty = genesisBlock.difficulty;
|
||||
let totalIndexed = 0;
|
||||
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT === -1 && indexedHeights[0] !== true) {
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
time: genesisBlock.time,
|
||||
height: 0,
|
||||
difficulty: currentDifficulty,
|
||||
adjustment: 0.0,
|
||||
});
|
||||
}
|
||||
|
||||
const oldestConsecutiveBlock = await BlocksRepository.$getOldestConsecutiveBlock();
|
||||
if (config.MEMPOOL.INDEXING_BLOCKS_AMOUNT !== -1) {
|
||||
currentDifficulty = oldestConsecutiveBlock.difficulty;
|
||||
}
|
||||
|
||||
let totalBlockChecked = 0;
|
||||
let timer = new Date().getTime() / 1000;
|
||||
|
||||
for (const block of blocks) {
|
||||
if (block.difficulty !== currentDifficulty) {
|
||||
if (indexedHeights[block.height] === true) { // Already indexed
|
||||
if (block.height >= oldestConsecutiveBlock.height) {
|
||||
currentDifficulty = block.difficulty;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
let adjustment = block.difficulty / currentDifficulty;
|
||||
adjustment = Math.round(adjustment * 1000000) / 1000000; // Remove float point noise
|
||||
|
||||
await DifficultyAdjustmentsRepository.$saveAdjustments({
|
||||
time: block.time,
|
||||
height: block.height,
|
||||
difficulty: block.difficulty,
|
||||
adjustment: adjustment,
|
||||
});
|
||||
|
||||
totalIndexed++;
|
||||
if (block.height >= oldestConsecutiveBlock.height) {
|
||||
currentDifficulty = block.difficulty;
|
||||
}
|
||||
}
|
||||
|
||||
totalBlockChecked++;
|
||||
const elapsedSeconds = Math.max(1, Math.round((new Date().getTime() / 1000) - timer));
|
||||
if (elapsedSeconds > 5) {
|
||||
const progress = Math.round(totalBlockChecked / blocks.length * 100);
|
||||
logger.info(`Indexing difficulty adjustment at block #${block.height} | Progress: ${progress}%`);
|
||||
timer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (totalIndexed > 0) {
|
||||
logger.notice(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
} else {
|
||||
logger.debug(`Indexed ${totalIndexed} difficulty adjustments`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a link between blocks and the latest price at when they were mined
|
||||
*/
|
||||
public async $indexBlockPrices() {
|
||||
if (this.blocksPriceIndexingRunning === true) {
|
||||
return;
|
||||
}
|
||||
this.blocksPriceIndexingRunning = true;
|
||||
|
||||
try {
|
||||
const prices: any[] = await PricesRepository.$getPricesTimesAndId();
|
||||
const blocksWithoutPrices: any[] = await BlocksRepository.$getBlocksWithoutPrice();
|
||||
|
||||
let totalInserted = 0;
|
||||
const blocksPrices: BlockPrice[] = [];
|
||||
|
||||
for (const block of blocksWithoutPrices) {
|
||||
// Quick optimisation, out mtgox feed only goes back to 2010-07-19 02:00:00, so skip the first 68951 blocks
|
||||
if (['mainnet', 'testnet'].includes(config.MEMPOOL.NETWORK) && block.height < 68951) {
|
||||
blocksPrices.push({
|
||||
height: block.height,
|
||||
priceId: prices[0].id,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
for (const price of prices) {
|
||||
if (block.timestamp < price.time) {
|
||||
blocksPrices.push({
|
||||
height: block.height,
|
||||
priceId: price.id,
|
||||
});
|
||||
break;
|
||||
};
|
||||
}
|
||||
|
||||
if (blocksPrices.length >= 100000) {
|
||||
totalInserted += blocksPrices.length;
|
||||
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
blocksPrices.length = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (blocksPrices.length > 0) {
|
||||
totalInserted += blocksPrices.length;
|
||||
let logStr = `Linking ${blocksPrices.length} blocks to their closest price`;
|
||||
if (blocksWithoutPrices.length > 200000) {
|
||||
logStr += ` | Progress ${Math.round(totalInserted / blocksWithoutPrices.length * 100)}%`;
|
||||
}
|
||||
logger.debug(logStr);
|
||||
await BlocksRepository.$saveBlockPrices(blocksPrices);
|
||||
}
|
||||
} catch (e) {
|
||||
this.blocksPriceIndexingRunning = false;
|
||||
throw e;
|
||||
}
|
||||
|
||||
this.blocksPriceIndexingRunning = false;
|
||||
}
|
||||
|
||||
private getDateMidnight(date: Date): Date {
|
||||
date.setUTCHours(0);
|
||||
date.setUTCMinutes(0);
|
||||
@@ -386,18 +528,18 @@ class Mining {
|
||||
return date;
|
||||
}
|
||||
|
||||
private getTimeRange(interval: string | null): number {
|
||||
private getTimeRange(interval: string | null, scale = 1): number {
|
||||
switch (interval) {
|
||||
case '3y': return 43200; // 12h
|
||||
case '2y': return 28800; // 8h
|
||||
case '1y': return 28800; // 8h
|
||||
case '6m': return 10800; // 3h
|
||||
case '3m': return 7200; // 2h
|
||||
case '1m': return 1800; // 30min
|
||||
case '1w': return 300; // 5min
|
||||
case '3d': return 1;
|
||||
case '24h': return 1;
|
||||
default: return 86400; // 24h
|
||||
case '3y': return 43200 * scale; // 12h
|
||||
case '2y': return 28800 * scale; // 8h
|
||||
case '1y': return 28800 * scale; // 8h
|
||||
case '6m': return 10800 * scale; // 3h
|
||||
case '3m': return 7200 * scale; // 2h
|
||||
case '1m': return 1800 * scale; // 30min
|
||||
case '1w': return 300 * scale; // 5min
|
||||
case '3d': return 1 * scale;
|
||||
case '24h': return 1 * scale;
|
||||
default: return 86400 * scale;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import config from '../config';
|
||||
import BlocksRepository from '../repositories/BlocksRepository';
|
||||
|
||||
interface Pool {
|
||||
name: string;
|
||||
@@ -32,7 +33,6 @@ class PoolsParser {
|
||||
// First we save every entries without paying attention to pool duplication
|
||||
const poolsDuplicated: Pool[] = [];
|
||||
|
||||
logger.debug('Parse coinbase_tags');
|
||||
const coinbaseTags = Object.entries(poolsJson['coinbase_tags']);
|
||||
for (let i = 0; i < coinbaseTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
@@ -43,7 +43,6 @@ class PoolsParser {
|
||||
'slug': ''
|
||||
});
|
||||
}
|
||||
logger.debug('Parse payout_addresses');
|
||||
const addressesTags = Object.entries(poolsJson['payout_addresses']);
|
||||
for (let i = 0; i < addressesTags.length; ++i) {
|
||||
poolsDuplicated.push({
|
||||
@@ -56,7 +55,6 @@ class PoolsParser {
|
||||
}
|
||||
|
||||
// Then, we find unique mining pool names
|
||||
logger.debug('Identify unique mining pools');
|
||||
const poolNames: string[] = [];
|
||||
for (let i = 0; i < poolsDuplicated.length; ++i) {
|
||||
if (poolNames.indexOf(poolsDuplicated[i].name) === -1) {
|
||||
@@ -119,8 +117,15 @@ class PoolsParser {
|
||||
'slug': slug
|
||||
};
|
||||
|
||||
if (existingPools.find((pool) => pool.name === poolNames[i]) !== undefined) {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
const existingPool = existingPools.find((pool) => pool.name === poolNames[i]);
|
||||
if (existingPool !== undefined) {
|
||||
// Check if any data was actually updated
|
||||
const equals = (a, b) =>
|
||||
a.length === b.length &&
|
||||
a.every((v, i) => v === b[i]);
|
||||
if (!equals(JSON.parse(existingPool.addresses), poolObj.addresses) || !equals(JSON.parse(existingPool.regexes), poolObj.regexes)) {
|
||||
finalPoolDataUpdate.push(poolObj);
|
||||
}
|
||||
} else {
|
||||
logger.debug(`Add '${finalPoolName}' mining pool`);
|
||||
finalPoolDataAdd.push(poolObj);
|
||||
@@ -140,40 +145,51 @@ class PoolsParser {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.debug(`Update pools table now`);
|
||||
if (finalPoolDataAdd.length > 0 || finalPoolDataUpdate.length > 0) {
|
||||
logger.debug(`Update pools table now`);
|
||||
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
||||
}
|
||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
||||
// Add new mining pools into the database
|
||||
let queryAdd: string = 'INSERT INTO pools(name, link, regexes, addresses, slug) VALUES ';
|
||||
for (let i = 0; i < finalPoolDataAdd.length; ++i) {
|
||||
queryAdd += `('${finalPoolDataAdd[i].name}', '${finalPoolDataAdd[i].link}',
|
||||
'${JSON.stringify(finalPoolDataAdd[i].regexes)}', '${JSON.stringify(finalPoolDataAdd[i].addresses)}',
|
||||
${JSON.stringify(finalPoolDataAdd[i].slug)}),`;
|
||||
}
|
||||
queryAdd = queryAdd.slice(0, -1) + ';';
|
||||
|
||||
// Updated existing mining pools in the database
|
||||
const updateQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
||||
updateQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
||||
slug='${finalPoolDataUpdate[i].slug}'
|
||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
||||
;`);
|
||||
// Updated existing mining pools in the database
|
||||
const updateQueries: string[] = [];
|
||||
for (let i = 0; i < finalPoolDataUpdate.length; ++i) {
|
||||
updateQueries.push(`
|
||||
UPDATE pools
|
||||
SET name='${finalPoolDataUpdate[i].name}', link='${finalPoolDataUpdate[i].link}',
|
||||
regexes='${JSON.stringify(finalPoolDataUpdate[i].regexes)}', addresses='${JSON.stringify(finalPoolDataUpdate[i].addresses)}',
|
||||
slug='${finalPoolDataUpdate[i].slug}'
|
||||
WHERE name='${finalPoolDataUpdate[i].name}'
|
||||
;`);
|
||||
}
|
||||
|
||||
try {
|
||||
await this.$deleteBlocskToReindex(finalPoolDataUpdate);
|
||||
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
if (finalPoolDataAdd.length > 0) {
|
||||
await DB.query({ sql: queryAdd, timeout: 120000 });
|
||||
}
|
||||
for (const query of updateQueries) {
|
||||
await DB.query({ sql: query, timeout: 120000 });
|
||||
}
|
||||
await this.insertUnknownPool();
|
||||
logger.info('Mining pools.json import completed');
|
||||
} catch (e) {
|
||||
logger.err(`Cannot import pools in the database`);
|
||||
logger.err(`Cannot insert unknown pool in the database`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -201,6 +217,36 @@ class PoolsParser {
|
||||
logger.err('Unable to insert "Unknown" mining pool');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete blocks which needs to be reindexed
|
||||
*/
|
||||
private async $deleteBlocskToReindex(finalPoolDataUpdate: any[]) {
|
||||
if (config.MEMPOOL.AUTOMATIC_BLOCK_REINDEXING === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const blockCount = await BlocksRepository.$blockCount(null, null);
|
||||
if (blockCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const updatedPool of finalPoolDataUpdate) {
|
||||
const [pool]: any[] = await DB.query(`SELECT id, name from pools where slug = "${updatedPool.slug}"`);
|
||||
if (pool.length > 0) {
|
||||
logger.notice(`Deleting blocks from ${pool[0].name} mining pool for future re-indexing`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${pool[0].id}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore early days of Bitcoin as there were not mining pool yet
|
||||
logger.notice('Deleting blocks with unknown mining pool from height 130635 for future re-indexing');
|
||||
const [unknownPool] = await DB.query(`SELECT id from pools where slug = "unknown"`);
|
||||
await DB.query(`DELETE FROM blocks WHERE pool_id = ${unknownPool[0].id} AND height > 130635`);
|
||||
|
||||
logger.notice('Truncating hashrates for future re-indexing');
|
||||
await DB.query(`DELETE FROM hashrates`);
|
||||
}
|
||||
}
|
||||
|
||||
export default new PoolsParser();
|
||||
|
||||
@@ -1,160 +1,11 @@
|
||||
import memPool from './mempool';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import { Statistic, OptimizedStatistic } from '../../mempool.interfaces';
|
||||
|
||||
import { Statistic, TransactionExtended, OptimizedStatistic } from '../mempool.interfaces';
|
||||
import config from '../config';
|
||||
import { Common } from './common';
|
||||
|
||||
class Statistics {
|
||||
protected intervalTimer: NodeJS.Timer | undefined;
|
||||
protected newStatisticsEntryCallback: ((stats: OptimizedStatistic) => void) | undefined;
|
||||
class StatisticsApi {
|
||||
protected queryTimeout = 120000;
|
||||
|
||||
public setNewStatisticsEntryCallback(fn: (stats: OptimizedStatistic) => void) {
|
||||
this.newStatisticsEntryCallback = fn;
|
||||
}
|
||||
|
||||
constructor() { }
|
||||
|
||||
public startStatistics(): void {
|
||||
logger.info('Starting statistics service');
|
||||
|
||||
const now = new Date();
|
||||
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
|
||||
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
|
||||
const difference = nextInterval.getTime() - now.getTime();
|
||||
|
||||
setTimeout(() => {
|
||||
this.runStatistics();
|
||||
this.intervalTimer = setInterval(() => {
|
||||
this.runStatistics();
|
||||
}, 1 * 60 * 1000);
|
||||
}, difference);
|
||||
}
|
||||
|
||||
private async runStatistics(): Promise<void> {
|
||||
if (!memPool.isInSync()) {
|
||||
return;
|
||||
}
|
||||
const currentMempool = memPool.getMempool();
|
||||
const txPerSecond = memPool.getTxPerSecond();
|
||||
const vBytesPerSecond = memPool.getVBytesPerSecond();
|
||||
|
||||
logger.debug('Running statistics');
|
||||
|
||||
let memPoolArray: TransactionExtended[] = [];
|
||||
for (const i in currentMempool) {
|
||||
if (currentMempool.hasOwnProperty(i)) {
|
||||
memPoolArray.push(currentMempool[i]);
|
||||
}
|
||||
}
|
||||
// Remove 0 and undefined
|
||||
memPoolArray = memPoolArray.filter((tx) => tx.effectiveFeePerVsize);
|
||||
|
||||
if (!memPoolArray.length) {
|
||||
try {
|
||||
const insertIdZeroed = await this.$createZeroedStatistic();
|
||||
if (this.newStatisticsEntryCallback && insertIdZeroed) {
|
||||
const newStats = await this.$get(insertIdZeroed);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert zeroed statistics. ' + e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
memPoolArray.sort((a, b) => a.effectiveFeePerVsize - b.effectiveFeePerVsize);
|
||||
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
|
||||
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
|
||||
|
||||
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
|
||||
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
|
||||
|
||||
const weightVsizeFees: { [feePerWU: number]: number } = {};
|
||||
const lastItem = logFees.length - 1;
|
||||
|
||||
memPoolArray.forEach((transaction) => {
|
||||
for (let i = 0; i < logFees.length; i++) {
|
||||
if (
|
||||
(Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize * 10 < logFees[i + 1]))
|
||||
||
|
||||
(!Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize < logFees[i + 1]))
|
||||
) {
|
||||
if (weightVsizeFees[logFees[i]]) {
|
||||
weightVsizeFees[logFees[i]] += transaction.vsize;
|
||||
} else {
|
||||
weightVsizeFees[logFees[i]] = transaction.vsize;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const insertId = await this.$create({
|
||||
added: 'NOW()',
|
||||
unconfirmed_transactions: memPoolArray.length,
|
||||
tx_per_second: txPerSecond,
|
||||
vbytes_per_second: Math.round(vBytesPerSecond),
|
||||
mempool_byte_weight: totalWeight,
|
||||
total_fee: totalFee,
|
||||
fee_data: '',
|
||||
vsize_1: weightVsizeFees['1'] || 0,
|
||||
vsize_2: weightVsizeFees['2'] || 0,
|
||||
vsize_3: weightVsizeFees['3'] || 0,
|
||||
vsize_4: weightVsizeFees['4'] || 0,
|
||||
vsize_5: weightVsizeFees['5'] || 0,
|
||||
vsize_6: weightVsizeFees['6'] || 0,
|
||||
vsize_8: weightVsizeFees['8'] || 0,
|
||||
vsize_10: weightVsizeFees['10'] || 0,
|
||||
vsize_12: weightVsizeFees['12'] || 0,
|
||||
vsize_15: weightVsizeFees['15'] || 0,
|
||||
vsize_20: weightVsizeFees['20'] || 0,
|
||||
vsize_30: weightVsizeFees['30'] || 0,
|
||||
vsize_40: weightVsizeFees['40'] || 0,
|
||||
vsize_50: weightVsizeFees['50'] || 0,
|
||||
vsize_60: weightVsizeFees['60'] || 0,
|
||||
vsize_70: weightVsizeFees['70'] || 0,
|
||||
vsize_80: weightVsizeFees['80'] || 0,
|
||||
vsize_90: weightVsizeFees['90'] || 0,
|
||||
vsize_100: weightVsizeFees['100'] || 0,
|
||||
vsize_125: weightVsizeFees['125'] || 0,
|
||||
vsize_150: weightVsizeFees['150'] || 0,
|
||||
vsize_175: weightVsizeFees['175'] || 0,
|
||||
vsize_200: weightVsizeFees['200'] || 0,
|
||||
vsize_250: weightVsizeFees['250'] || 0,
|
||||
vsize_300: weightVsizeFees['300'] || 0,
|
||||
vsize_350: weightVsizeFees['350'] || 0,
|
||||
vsize_400: weightVsizeFees['400'] || 0,
|
||||
vsize_500: weightVsizeFees['500'] || 0,
|
||||
vsize_600: weightVsizeFees['600'] || 0,
|
||||
vsize_700: weightVsizeFees['700'] || 0,
|
||||
vsize_800: weightVsizeFees['800'] || 0,
|
||||
vsize_900: weightVsizeFees['900'] || 0,
|
||||
vsize_1000: weightVsizeFees['1000'] || 0,
|
||||
vsize_1200: weightVsizeFees['1200'] || 0,
|
||||
vsize_1400: weightVsizeFees['1400'] || 0,
|
||||
vsize_1600: weightVsizeFees['1600'] || 0,
|
||||
vsize_1800: weightVsizeFees['1800'] || 0,
|
||||
vsize_2000: weightVsizeFees['2000'] || 0,
|
||||
});
|
||||
|
||||
if (this.newStatisticsEntryCallback && insertId) {
|
||||
const newStats = await this.$get(insertId);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert statistics. ' + e);
|
||||
}
|
||||
}
|
||||
|
||||
private async $createZeroedStatistic(): Promise<number | undefined> {
|
||||
public async $createZeroedStatistic(): Promise<number | undefined> {
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@@ -212,7 +63,7 @@ class Statistics {
|
||||
}
|
||||
}
|
||||
|
||||
private async $create(statistics: Statistic): Promise<number | undefined> {
|
||||
public async $create(statistics: Statistic): Promise<number | undefined> {
|
||||
try {
|
||||
const query = `INSERT INTO statistics(
|
||||
added,
|
||||
@@ -413,7 +264,7 @@ class Statistics {
|
||||
ORDER BY statistics.added DESC;`;
|
||||
}
|
||||
|
||||
private async $get(id: number): Promise<OptimizedStatistic | undefined> {
|
||||
public async $get(id: number): Promise<OptimizedStatistic | undefined> {
|
||||
try {
|
||||
const query = `SELECT *, UNIX_TIMESTAMP(added) as added FROM statistics WHERE id = ?`;
|
||||
const [rows] = await DB.query(query, [id]);
|
||||
@@ -574,7 +425,6 @@ class Statistics {
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export default new Statistics();
|
||||
export default new StatisticsApi();
|
||||
67
backend/src/api/statistics/statistics.routes.ts
Normal file
67
backend/src/api/statistics/statistics.routes.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { Application, Request, Response } from 'express';
|
||||
import config from '../../config';
|
||||
import statisticsApi from './statistics-api';
|
||||
|
||||
class StatisticsRoutes {
|
||||
public initRoutes(app: Application) {
|
||||
app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2h', this.$getStatisticsByTime.bind(this, '2h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/24h', this.$getStatisticsByTime.bind(this, '24h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1w', this.$getStatisticsByTime.bind(this, '1w'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1m', this.$getStatisticsByTime.bind(this, '1m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3m', this.$getStatisticsByTime.bind(this, '3m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/6m', this.$getStatisticsByTime.bind(this, '6m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', this.$getStatisticsByTime.bind(this, '1y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', this.$getStatisticsByTime.bind(this, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', this.$getStatisticsByTime.bind(this, '3y'))
|
||||
;
|
||||
}
|
||||
|
||||
private async $getStatisticsByTime(time: '2h' | '24h' | '1w' | '1m' | '3m' | '6m' | '1y' | '2y' | '3y', req: Request, res: Response) {
|
||||
res.header('Pragma', 'public');
|
||||
res.header('Cache-control', 'public');
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 300).toUTCString());
|
||||
|
||||
try {
|
||||
let result;
|
||||
switch (time as string) {
|
||||
case '2h':
|
||||
result = await statisticsApi.$list2H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 30).toUTCString());
|
||||
break;
|
||||
case '24h':
|
||||
result = await statisticsApi.$list24H();
|
||||
res.setHeader('Expires', new Date(Date.now() + 1000 * 60).toUTCString());
|
||||
break;
|
||||
case '1w':
|
||||
result = await statisticsApi.$list1W();
|
||||
break;
|
||||
case '1m':
|
||||
result = await statisticsApi.$list1M();
|
||||
break;
|
||||
case '3m':
|
||||
result = await statisticsApi.$list3M();
|
||||
break;
|
||||
case '6m':
|
||||
result = await statisticsApi.$list6M();
|
||||
break;
|
||||
case '1y':
|
||||
result = await statisticsApi.$list1Y();
|
||||
break;
|
||||
case '2y':
|
||||
result = await statisticsApi.$list2Y();
|
||||
break;
|
||||
case '3y':
|
||||
result = await statisticsApi.$list3Y();
|
||||
break;
|
||||
default:
|
||||
result = await statisticsApi.$list2H();
|
||||
}
|
||||
res.json(result);
|
||||
} catch (e) {
|
||||
res.status(500).send(e instanceof Error ? e.message : e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new StatisticsRoutes();
|
||||
153
backend/src/api/statistics/statistics.ts
Normal file
153
backend/src/api/statistics/statistics.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import memPool from '../mempool';
|
||||
import logger from '../../logger';
|
||||
import { TransactionExtended, OptimizedStatistic } from '../../mempool.interfaces';
|
||||
import { Common } from '../common';
|
||||
import statisticsApi from './statistics-api';
|
||||
|
||||
class Statistics {
|
||||
protected intervalTimer: NodeJS.Timer | undefined;
|
||||
protected newStatisticsEntryCallback: ((stats: OptimizedStatistic) => void) | undefined;
|
||||
|
||||
public setNewStatisticsEntryCallback(fn: (stats: OptimizedStatistic) => void) {
|
||||
this.newStatisticsEntryCallback = fn;
|
||||
}
|
||||
|
||||
public startStatistics(): void {
|
||||
logger.info('Starting statistics service');
|
||||
|
||||
const now = new Date();
|
||||
const nextInterval = new Date(now.getFullYear(), now.getMonth(), now.getDate(), now.getHours(),
|
||||
Math.floor(now.getMinutes() / 1) * 1 + 1, 0, 0);
|
||||
const difference = nextInterval.getTime() - now.getTime();
|
||||
|
||||
setTimeout(() => {
|
||||
this.runStatistics();
|
||||
this.intervalTimer = setInterval(() => {
|
||||
this.runStatistics();
|
||||
}, 1 * 60 * 1000);
|
||||
}, difference);
|
||||
}
|
||||
|
||||
private async runStatistics(): Promise<void> {
|
||||
if (!memPool.isInSync()) {
|
||||
return;
|
||||
}
|
||||
const currentMempool = memPool.getMempool();
|
||||
const txPerSecond = memPool.getTxPerSecond();
|
||||
const vBytesPerSecond = memPool.getVBytesPerSecond();
|
||||
|
||||
logger.debug('Running statistics');
|
||||
|
||||
let memPoolArray: TransactionExtended[] = [];
|
||||
for (const i in currentMempool) {
|
||||
if (currentMempool.hasOwnProperty(i)) {
|
||||
memPoolArray.push(currentMempool[i]);
|
||||
}
|
||||
}
|
||||
// Remove 0 and undefined
|
||||
memPoolArray = memPoolArray.filter((tx) => tx.effectiveFeePerVsize);
|
||||
|
||||
if (!memPoolArray.length) {
|
||||
try {
|
||||
const insertIdZeroed = await statisticsApi.$createZeroedStatistic();
|
||||
if (this.newStatisticsEntryCallback && insertIdZeroed) {
|
||||
const newStats = await statisticsApi.$get(insertIdZeroed);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert zeroed statistics. ' + e);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
memPoolArray.sort((a, b) => a.effectiveFeePerVsize - b.effectiveFeePerVsize);
|
||||
const totalWeight = memPoolArray.map((tx) => tx.vsize).reduce((acc, curr) => acc + curr) * 4;
|
||||
const totalFee = memPoolArray.map((tx) => tx.fee).reduce((acc, curr) => acc + curr);
|
||||
|
||||
const logFees = [1, 2, 3, 4, 5, 6, 8, 10, 12, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100, 125, 150, 175, 200,
|
||||
250, 300, 350, 400, 500, 600, 700, 800, 900, 1000, 1200, 1400, 1600, 1800, 2000];
|
||||
|
||||
const weightVsizeFees: { [feePerWU: number]: number } = {};
|
||||
const lastItem = logFees.length - 1;
|
||||
|
||||
memPoolArray.forEach((transaction) => {
|
||||
for (let i = 0; i < logFees.length; i++) {
|
||||
if (
|
||||
(Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize * 10 < logFees[i + 1]))
|
||||
||
|
||||
(!Common.isLiquid() && (i === lastItem || transaction.effectiveFeePerVsize < logFees[i + 1]))
|
||||
) {
|
||||
if (weightVsizeFees[logFees[i]]) {
|
||||
weightVsizeFees[logFees[i]] += transaction.vsize;
|
||||
} else {
|
||||
weightVsizeFees[logFees[i]] = transaction.vsize;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const insertId = await statisticsApi.$create({
|
||||
added: 'NOW()',
|
||||
unconfirmed_transactions: memPoolArray.length,
|
||||
tx_per_second: txPerSecond,
|
||||
vbytes_per_second: Math.round(vBytesPerSecond),
|
||||
mempool_byte_weight: totalWeight,
|
||||
total_fee: totalFee,
|
||||
fee_data: '',
|
||||
vsize_1: weightVsizeFees['1'] || 0,
|
||||
vsize_2: weightVsizeFees['2'] || 0,
|
||||
vsize_3: weightVsizeFees['3'] || 0,
|
||||
vsize_4: weightVsizeFees['4'] || 0,
|
||||
vsize_5: weightVsizeFees['5'] || 0,
|
||||
vsize_6: weightVsizeFees['6'] || 0,
|
||||
vsize_8: weightVsizeFees['8'] || 0,
|
||||
vsize_10: weightVsizeFees['10'] || 0,
|
||||
vsize_12: weightVsizeFees['12'] || 0,
|
||||
vsize_15: weightVsizeFees['15'] || 0,
|
||||
vsize_20: weightVsizeFees['20'] || 0,
|
||||
vsize_30: weightVsizeFees['30'] || 0,
|
||||
vsize_40: weightVsizeFees['40'] || 0,
|
||||
vsize_50: weightVsizeFees['50'] || 0,
|
||||
vsize_60: weightVsizeFees['60'] || 0,
|
||||
vsize_70: weightVsizeFees['70'] || 0,
|
||||
vsize_80: weightVsizeFees['80'] || 0,
|
||||
vsize_90: weightVsizeFees['90'] || 0,
|
||||
vsize_100: weightVsizeFees['100'] || 0,
|
||||
vsize_125: weightVsizeFees['125'] || 0,
|
||||
vsize_150: weightVsizeFees['150'] || 0,
|
||||
vsize_175: weightVsizeFees['175'] || 0,
|
||||
vsize_200: weightVsizeFees['200'] || 0,
|
||||
vsize_250: weightVsizeFees['250'] || 0,
|
||||
vsize_300: weightVsizeFees['300'] || 0,
|
||||
vsize_350: weightVsizeFees['350'] || 0,
|
||||
vsize_400: weightVsizeFees['400'] || 0,
|
||||
vsize_500: weightVsizeFees['500'] || 0,
|
||||
vsize_600: weightVsizeFees['600'] || 0,
|
||||
vsize_700: weightVsizeFees['700'] || 0,
|
||||
vsize_800: weightVsizeFees['800'] || 0,
|
||||
vsize_900: weightVsizeFees['900'] || 0,
|
||||
vsize_1000: weightVsizeFees['1000'] || 0,
|
||||
vsize_1200: weightVsizeFees['1200'] || 0,
|
||||
vsize_1400: weightVsizeFees['1400'] || 0,
|
||||
vsize_1600: weightVsizeFees['1600'] || 0,
|
||||
vsize_1800: weightVsizeFees['1800'] || 0,
|
||||
vsize_2000: weightVsizeFees['2000'] || 0,
|
||||
});
|
||||
|
||||
if (this.newStatisticsEntryCallback && insertId) {
|
||||
const newStats = await statisticsApi.$get(insertId);
|
||||
if (newStats) {
|
||||
this.newStatisticsEntryCallback(newStats);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('Unable to insert statistics. ' + e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new Statistics();
|
||||
@@ -1,7 +1,9 @@
|
||||
import logger from '../logger';
|
||||
import * as WebSocket from 'ws';
|
||||
import { BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta,
|
||||
OptimizedStatistic, ILoadingIndicators, IConversionRates } from '../mempool.interfaces';
|
||||
import {
|
||||
BlockExtended, TransactionExtended, WebsocketResponse, MempoolBlock, MempoolBlockDelta,
|
||||
OptimizedStatistic, ILoadingIndicators, IConversionRates
|
||||
} from '../mempool.interfaces';
|
||||
import blocks from './blocks';
|
||||
import memPool from './mempool';
|
||||
import backendInfo from './backend-info';
|
||||
@@ -14,6 +16,8 @@ import transactionUtils from './transaction-utils';
|
||||
import rbfCache from './rbf-cache';
|
||||
import difficultyAdjustment from './difficulty-adjustment';
|
||||
import feeApi from './fee-api';
|
||||
import BlocksAuditsRepository from '../repositories/BlocksAuditsRepository';
|
||||
import BlocksSummariesRepository from '../repositories/BlocksSummariesRepository';
|
||||
|
||||
class WebsocketHandler {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
@@ -164,7 +168,7 @@ class WebsocketHandler {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.wss.clients.forEach((client: WebSocket) => {
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
@@ -179,7 +183,7 @@ class WebsocketHandler {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.wss.clients.forEach((client: WebSocket) => {
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
@@ -192,7 +196,7 @@ class WebsocketHandler {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.wss.clients.forEach((client: WebSocket) => {
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
@@ -224,7 +228,7 @@ class WebsocketHandler {
|
||||
throw new Error('WebSocket.Server is not set');
|
||||
}
|
||||
|
||||
this.wss.clients.forEach((client: WebSocket) => {
|
||||
this.wss.clients.forEach((client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
@@ -255,7 +259,7 @@ class WebsocketHandler {
|
||||
memPool.handleRbfTransactions(rbfTransactions);
|
||||
const recommendedFees = feeApi.getRecommendedFee();
|
||||
|
||||
this.wss.clients.forEach(async (client: WebSocket) => {
|
||||
this.wss.clients.forEach(async (client) => {
|
||||
if (client.readyState !== WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
@@ -414,17 +418,56 @@ class WebsocketHandler {
|
||||
|
||||
if (_mempoolBlocks[0]) {
|
||||
const matches: string[] = [];
|
||||
const added: string[] = [];
|
||||
const missing: string[] = [];
|
||||
|
||||
for (const txId of txIds) {
|
||||
if (_mempoolBlocks[0].transactionIds.indexOf(txId) > -1) {
|
||||
matches.push(txId);
|
||||
} else {
|
||||
added.push(txId);
|
||||
}
|
||||
delete _memPool[txId];
|
||||
}
|
||||
|
||||
matchRate = Math.round((matches.length / (txIds.length - 1)) * 100);
|
||||
for (const txId of _mempoolBlocks[0].transactionIds) {
|
||||
if (matches.includes(txId) || added.includes(txId)) {
|
||||
continue;
|
||||
}
|
||||
missing.push(txId);
|
||||
}
|
||||
|
||||
matchRate = Math.round((Math.max(0, matches.length - missing.length - added.length) / txIds.length * 100) * 100) / 100;
|
||||
mempoolBlocks.updateMempoolBlocks(_memPool);
|
||||
mBlocks = mempoolBlocks.getMempoolBlocks();
|
||||
mBlockDeltas = mempoolBlocks.getMempoolBlockDeltas();
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
const stripped = _mempoolBlocks[0].transactions.map((tx) => {
|
||||
return {
|
||||
txid: tx.txid,
|
||||
vsize: tx.vsize,
|
||||
fee: tx.fee ? Math.round(tx.fee) : 0,
|
||||
value: tx.value,
|
||||
};
|
||||
});
|
||||
BlocksSummariesRepository.$saveSummary({
|
||||
height: block.height,
|
||||
template: {
|
||||
id: block.id,
|
||||
transactions: stripped
|
||||
}
|
||||
});
|
||||
|
||||
BlocksAuditsRepository.$saveAudit({
|
||||
time: block.timestamp,
|
||||
height: block.height,
|
||||
hash: block.id,
|
||||
addedTxs: added,
|
||||
missingTxs: missing,
|
||||
matchRate: matchRate,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (block.extras) {
|
||||
|
||||
@@ -15,6 +15,7 @@ interface IConfig {
|
||||
INITIAL_BLOCKS_AMOUNT: number;
|
||||
MEMPOOL_BLOCKS_AMOUNT: number;
|
||||
INDEXING_BLOCKS_AMOUNT: number;
|
||||
BLOCKS_SUMMARIES_INDEXING: boolean;
|
||||
PRICE_FEED_UPDATE_INTERVAL: number;
|
||||
USE_SECOND_NODE_FOR_MINFEE: boolean;
|
||||
EXTERNAL_ASSETS: string[];
|
||||
@@ -22,10 +23,28 @@ interface IConfig {
|
||||
EXTERNAL_RETRY_INTERVAL: number;
|
||||
USER_AGENT: string;
|
||||
STDOUT_LOG_MIN_PRIORITY: 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
|
||||
AUTOMATIC_BLOCK_REINDEXING: boolean;
|
||||
POOLS_JSON_URL: string,
|
||||
POOLS_JSON_TREE_URL: string,
|
||||
};
|
||||
ESPLORA: {
|
||||
REST_API_URL: string;
|
||||
};
|
||||
LIGHTNING: {
|
||||
ENABLED: boolean;
|
||||
BACKEND: 'lnd' | 'cln' | 'ldk';
|
||||
TOPOLOGY_FOLDER: string;
|
||||
STATS_REFRESH_INTERVAL: number;
|
||||
GRAPH_REFRESH_INTERVAL: number;
|
||||
};
|
||||
LND: {
|
||||
TLS_CERT_PATH: string;
|
||||
MACAROON_PATH: string;
|
||||
REST_API_URL: string;
|
||||
};
|
||||
CLIGHTNING: {
|
||||
SOCKET: string;
|
||||
};
|
||||
ELECTRUM: {
|
||||
HOST: string;
|
||||
PORT: number;
|
||||
@@ -87,6 +106,12 @@ interface IConfig {
|
||||
BISQ_URL: string;
|
||||
BISQ_ONION: string;
|
||||
};
|
||||
MAXMIND: {
|
||||
ENABLED: boolean;
|
||||
GEOLITE2_CITY: string;
|
||||
GEOLITE2_ASN: string;
|
||||
GEOIP2_ISP: string;
|
||||
},
|
||||
}
|
||||
|
||||
const defaults: IConfig = {
|
||||
@@ -104,6 +129,7 @@ const defaults: IConfig = {
|
||||
'INITIAL_BLOCKS_AMOUNT': 8,
|
||||
'MEMPOOL_BLOCKS_AMOUNT': 8,
|
||||
'INDEXING_BLOCKS_AMOUNT': 11000, // 0 = disable indexing, -1 = index all blocks
|
||||
'BLOCKS_SUMMARIES_INDEXING': false,
|
||||
'PRICE_FEED_UPDATE_INTERVAL': 600,
|
||||
'USE_SECOND_NODE_FOR_MINFEE': false,
|
||||
'EXTERNAL_ASSETS': [],
|
||||
@@ -111,6 +137,9 @@ const defaults: IConfig = {
|
||||
'EXTERNAL_RETRY_INTERVAL': 0,
|
||||
'USER_AGENT': 'mempool',
|
||||
'STDOUT_LOG_MIN_PRIORITY': 'debug',
|
||||
'AUTOMATIC_BLOCK_REINDEXING': false,
|
||||
'POOLS_JSON_URL': 'https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json',
|
||||
'POOLS_JSON_TREE_URL': 'https://api.github.com/repos/mempool/mining-pools/git/trees/master',
|
||||
},
|
||||
'ESPLORA': {
|
||||
'REST_API_URL': 'http://127.0.0.1:3000',
|
||||
@@ -156,6 +185,21 @@ const defaults: IConfig = {
|
||||
'ENABLED': false,
|
||||
'DATA_PATH': '/bisq/statsnode-data/btc_mainnet/db'
|
||||
},
|
||||
'LIGHTNING': {
|
||||
'ENABLED': false,
|
||||
'BACKEND': 'lnd',
|
||||
'TOPOLOGY_FOLDER': '',
|
||||
'STATS_REFRESH_INTERVAL': 600,
|
||||
'GRAPH_REFRESH_INTERVAL': 600,
|
||||
},
|
||||
'LND': {
|
||||
'TLS_CERT_PATH': '',
|
||||
'MACAROON_PATH': '',
|
||||
'REST_API_URL': 'https://localhost:8080',
|
||||
},
|
||||
'CLIGHTNING': {
|
||||
'SOCKET': '',
|
||||
},
|
||||
'SOCKS5PROXY': {
|
||||
'ENABLED': false,
|
||||
'USE_ONION': true,
|
||||
@@ -164,18 +208,24 @@ const defaults: IConfig = {
|
||||
'USERNAME': '',
|
||||
'PASSWORD': ''
|
||||
},
|
||||
"PRICE_DATA_SERVER": {
|
||||
'PRICE_DATA_SERVER': {
|
||||
'TOR_URL': 'http://wizpriceje6q5tdrxkyiazsgu7irquiqjy2dptezqhrtu7l2qelqktid.onion/getAllMarketPrices',
|
||||
'CLEARNET_URL': 'https://price.bisq.wiz.biz/getAllMarketPrices'
|
||||
},
|
||||
"EXTERNAL_DATA_SERVER": {
|
||||
'EXTERNAL_DATA_SERVER': {
|
||||
'MEMPOOL_API': 'https://mempool.space/api/v1',
|
||||
'MEMPOOL_ONION': 'http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion/api/v1',
|
||||
'LIQUID_API': 'https://liquid.network/api/v1',
|
||||
'LIQUID_ONION': 'http://liquidmom47f6s3m53ebfxn47p76a6tlnxib3wp6deux7wuzotdr6cyd.onion/api/v1',
|
||||
'BISQ_URL': 'https://bisq.markets/api',
|
||||
'BISQ_ONION': 'http://bisqmktse2cabavbr2xjq7xw3h6g5ottemo5rolfcwt6aly6tp5fdryd.onion/api'
|
||||
}
|
||||
},
|
||||
"MAXMIND": {
|
||||
'ENABLED': false,
|
||||
"GEOLITE2_CITY": "/usr/local/share/GeoIP/GeoLite2-City.mmdb",
|
||||
"GEOLITE2_ASN": "/usr/local/share/GeoIP/GeoLite2-ASN.mmdb",
|
||||
"GEOIP2_ISP": "/usr/local/share/GeoIP/GeoIP2-ISP.mmdb"
|
||||
},
|
||||
};
|
||||
|
||||
class Config implements IConfig {
|
||||
@@ -188,9 +238,13 @@ class Config implements IConfig {
|
||||
SYSLOG: IConfig['SYSLOG'];
|
||||
STATISTICS: IConfig['STATISTICS'];
|
||||
BISQ: IConfig['BISQ'];
|
||||
LIGHTNING: IConfig['LIGHTNING'];
|
||||
LND: IConfig['LND'];
|
||||
CLIGHTNING: IConfig['CLIGHTNING'];
|
||||
SOCKS5PROXY: IConfig['SOCKS5PROXY'];
|
||||
PRICE_DATA_SERVER: IConfig['PRICE_DATA_SERVER'];
|
||||
EXTERNAL_DATA_SERVER: IConfig['EXTERNAL_DATA_SERVER'];
|
||||
MAXMIND: IConfig['MAXMIND'];
|
||||
|
||||
constructor() {
|
||||
const configs = this.merge(configFile, defaults);
|
||||
@@ -203,9 +257,13 @@ class Config implements IConfig {
|
||||
this.SYSLOG = configs.SYSLOG;
|
||||
this.STATISTICS = configs.STATISTICS;
|
||||
this.BISQ = configs.BISQ;
|
||||
this.LIGHTNING = configs.LIGHTNING;
|
||||
this.LND = configs.LND;
|
||||
this.CLIGHTNING = configs.CLIGHTNING;
|
||||
this.SOCKS5PROXY = configs.SOCKS5PROXY;
|
||||
this.PRICE_DATA_SERVER = configs.PRICE_DATA_SERVER;
|
||||
this.EXTERNAL_DATA_SERVER = configs.EXTERNAL_DATA_SERVER;
|
||||
this.MAXMIND = configs.MAXMIND;
|
||||
}
|
||||
|
||||
merge = (...objects: object[]): IConfig => {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import config from './config';
|
||||
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
|
||||
import logger from './logger';
|
||||
import { PoolOptions } from 'mysql2/typings/mysql';
|
||||
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
|
||||
|
||||
class DB {
|
||||
constructor() {
|
||||
@@ -28,7 +28,9 @@ import { PoolOptions } from 'mysql2/typings/mysql';
|
||||
}
|
||||
}
|
||||
|
||||
public async query(query, params?) {
|
||||
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||
OkPacket[] | ResultSetHeader>(query, params?): Promise<[T, FieldPacket[]]>
|
||||
{
|
||||
this.checkDBFlag();
|
||||
const pool = await this.getPool();
|
||||
return pool.query(query, params);
|
||||
|
||||
@@ -1,17 +1,14 @@
|
||||
import { Express, Request, Response, NextFunction } from 'express';
|
||||
import * as express from 'express';
|
||||
import express from "express";
|
||||
import { Application, Request, Response, NextFunction } from 'express';
|
||||
import * as http from 'http';
|
||||
import * as WebSocket from 'ws';
|
||||
import * as cluster from 'cluster';
|
||||
import axios from 'axios';
|
||||
|
||||
import cluster from 'cluster';
|
||||
import DB from './database';
|
||||
import config from './config';
|
||||
import routes from './routes';
|
||||
import blocks from './api/blocks';
|
||||
import memPool from './api/mempool';
|
||||
import diskCache from './api/disk-cache';
|
||||
import statistics from './api/statistics';
|
||||
import statistics from './api/statistics/statistics';
|
||||
import websocketHandler from './api/websocket-handler';
|
||||
import fiatConversion from './api/fiat-conversion';
|
||||
import bisq from './api/bisq/bisq';
|
||||
@@ -27,11 +24,22 @@ import icons from './api/liquid/icons';
|
||||
import { Common } from './api/common';
|
||||
import poolsUpdater from './tasks/pools-updater';
|
||||
import indexer from './indexer';
|
||||
import nodesRoutes from './api/explorer/nodes.routes';
|
||||
import channelsRoutes from './api/explorer/channels.routes';
|
||||
import generalLightningRoutes from './api/explorer/general.routes';
|
||||
import lightningStatsUpdater from './tasks/lightning/stats-updater.service';
|
||||
import networkSyncService from './tasks/lightning/network-sync.service';
|
||||
import statisticsRoutes from './api/statistics/statistics.routes';
|
||||
import miningRoutes from './api/mining/mining-routes';
|
||||
import bisqRoutes from './api/bisq/bisq.routes';
|
||||
import liquidRoutes from './api/liquid/liquid.routes';
|
||||
import bitcoinRoutes from './api/bitcoin/bitcoin.routes';
|
||||
import fundingTxFetcher from "./tasks/lightning/sync-tasks/funding-tx-fetcher";
|
||||
|
||||
class Server {
|
||||
private wss: WebSocket.Server | undefined;
|
||||
private server: http.Server | undefined;
|
||||
private app: Express;
|
||||
private app: Application;
|
||||
private currentBackendRetryInterval = 5;
|
||||
|
||||
constructor() {
|
||||
@@ -42,7 +50,7 @@ class Server {
|
||||
return;
|
||||
}
|
||||
|
||||
if (cluster.isMaster) {
|
||||
if (cluster.isPrimary) {
|
||||
logger.notice(`Mempool Server (Master) is running on port ${config.MEMPOOL.HTTP_PORT} (${backendInfo.getShortCommitHash()})`);
|
||||
|
||||
const numCPUs = config.MEMPOOL.SPAWN_CLUSTER_PROCS;
|
||||
@@ -76,7 +84,7 @@ class Server {
|
||||
})
|
||||
.use(express.urlencoded({ extended: true }))
|
||||
.use(express.text())
|
||||
;
|
||||
;
|
||||
|
||||
this.server = http.createServer(this.app);
|
||||
this.wss = new WebSocket.Server({ server: this.server });
|
||||
@@ -104,7 +112,7 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isMaster) {
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED && cluster.isPrimary) {
|
||||
statistics.startStatistics();
|
||||
}
|
||||
|
||||
@@ -128,6 +136,10 @@ class Server {
|
||||
bisqMarkets.startBisqService();
|
||||
}
|
||||
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
this.$runLightningBackend();
|
||||
}
|
||||
|
||||
this.server.listen(config.MEMPOOL.HTTP_PORT, () => {
|
||||
if (worker) {
|
||||
logger.info(`Mempool Server worker #${process.pid} started`);
|
||||
@@ -171,6 +183,18 @@ class Server {
|
||||
}
|
||||
}
|
||||
|
||||
async $runLightningBackend() {
|
||||
try {
|
||||
await fundingTxFetcher.$init();
|
||||
await networkSyncService.$startService();
|
||||
await lightningStatsUpdater.$startService();
|
||||
} catch(e) {
|
||||
logger.err(`Lightning backend crashed. Restarting in 1 minute. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
await Common.sleep$(1000 * 60);
|
||||
this.$runLightningBackend();
|
||||
};
|
||||
}
|
||||
|
||||
setUpWebsocketHandling() {
|
||||
if (this.wss) {
|
||||
websocketHandler.setWebsocketServer(this.wss);
|
||||
@@ -193,165 +217,23 @@ class Server {
|
||||
}
|
||||
|
||||
setUpHttpApiRoutes() {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'transaction-times', routes.getTransactionTimes)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'cpfp/:txId', routes.getCpfpInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'difficulty-adjustment', routes.getDifficultyChange)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/recommended', routes.getRecommendedFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'fees/mempool-blocks', routes.getMempoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'backend-info', routes.getBackendInfo)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'init-data', routes.getInitData)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'validate-address/:address', routes.validateAddress)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx/push', routes.$postTransactionForm)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'donations/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/donations/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'contributors/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/contributors/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators`, { responseType: 'stream', timeout: 10000 });
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'translators/images/:id', async (req, res) => {
|
||||
try {
|
||||
const response = await axios.get(`${config.EXTERNAL_DATA_SERVER.MEMPOOL_API}/translators/images/${req.params.id}`, {
|
||||
responseType: 'stream', timeout: 10000
|
||||
});
|
||||
response.data.pipe(res);
|
||||
} catch (e) {
|
||||
res.status(500).end();
|
||||
}
|
||||
})
|
||||
;
|
||||
|
||||
bitcoinRoutes.initRoutes(this.app);
|
||||
if (config.STATISTICS.ENABLED && config.DATABASE.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2h', routes.$getStatisticsByTime.bind(routes, '2h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/24h', routes.$getStatisticsByTime.bind(routes, '24h'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1w', routes.$getStatisticsByTime.bind(routes, '1w'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1m', routes.$getStatisticsByTime.bind(routes, '1m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3m', routes.$getStatisticsByTime.bind(routes, '3m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/6m', routes.$getStatisticsByTime.bind(routes, '6m'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/1y', routes.$getStatisticsByTime.bind(routes, '1y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/2y', routes.$getStatisticsByTime.bind(routes, '2y'))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'statistics/3y', routes.$getStatisticsByTime.bind(routes, '3y'))
|
||||
;
|
||||
statisticsRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (Common.indexingEnabled()) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pools/:interval', routes.$getPools)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/hashrate', routes.$getPoolHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks', routes.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug/blocks/:height', routes.$getPoolBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/pool/:slug', routes.$getPool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/pools/:interval', routes.$getPoolsHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/hashrate/:interval', routes.$getHistoricalHashrate)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/reward-stats/:blockCount', routes.$getRewardStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fees/:interval', routes.$getHistoricalBlockFees)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/rewards/:interval', routes.$getHistoricalBlockRewards)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/fee-rates/:interval', routes.$getHistoricalBlockFeeRates)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mining/blocks/sizes-weights/:interval', routes.$getHistoricalBlockSizeAndWeight)
|
||||
;
|
||||
miningRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (config.BISQ.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/stats', routes.getBisqStats)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/tx/:txId', routes.getBisqTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/block/:hash', routes.getBisqBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/tip/height', routes.getBisqTip)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/blocks/:index/:length', routes.getBisqBlocks)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/address/:address', routes.getBisqAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/txs/:index/:length', routes.getBisqTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/currencies', routes.getBisqMarketCurrencies.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/depth', routes.getBisqMarketDepth.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/hloc', routes.getBisqMarketHloc.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/markets', routes.getBisqMarketMarkets.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/offers', routes.getBisqMarketOffers.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/ticker', routes.getBisqMarketTicker.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/trades', routes.getBisqMarketTrades.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes', routes.getBisqMarketVolumes.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'bisq/markets/volumes/7d', routes.getBisqMarketVolumes7d.bind(routes))
|
||||
;
|
||||
bisqRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks', routes.getBlocks.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/:height', routes.getBlocks.bind(routes))
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash', routes.getBlock);
|
||||
|
||||
if (config.MEMPOOL.BACKEND !== 'esplora') {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool', routes.getMempool)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/txids', routes.getMempoolTxIds)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'mempool/recent', routes.getRecentMempoolTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId', routes.getTransaction)
|
||||
.post(config.MEMPOOL.API_URL_PREFIX + 'tx', routes.$postTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/hex', routes.getRawTransaction)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/status', routes.getTransactionStatus)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'tx/:txId/outspends', routes.getTransactionOutspends)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/header', routes.getBlockHeader)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'blocks/tip/height', routes.getBlockTipHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs', routes.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txs/:index', routes.getBlockTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block/:hash/txids', routes.getTxIdsForBlock)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'block-height/:height', routes.getBlockHeight)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address', routes.getAddress)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs', routes.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address/:address/txs/chain/:txId', routes.getAddressTransactions)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'address-prefix/:prefix', routes.getAddressPrefix)
|
||||
;
|
||||
}
|
||||
|
||||
if (Common.isLiquid()) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/icons', routes.getAllLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/featured', routes.$getAllFeaturedLiquidAssets)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'asset/:assetId/icon', routes.getLiquidIcon)
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'assets/group/:id', routes.$getAssetGroup)
|
||||
;
|
||||
liquidRoutes.initRoutes(this.app);
|
||||
}
|
||||
|
||||
if (Common.isLiquid() && config.DATABASE.ENABLED) {
|
||||
this.app
|
||||
.get(config.MEMPOOL.API_URL_PREFIX + 'liquid/pegs/month', routes.$getElementsPegsByMonth)
|
||||
;
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
generalLightningRoutes.initRoutes(this.app);
|
||||
nodesRoutes.initRoutes(this.app);
|
||||
channelsRoutes.initRoutes(this.app);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import { Common } from './api/common';
|
||||
import blocks from './api/blocks';
|
||||
import mempool from './api/mempool';
|
||||
import mining from './api/mining';
|
||||
import mining from './api/mining/mining';
|
||||
import logger from './logger';
|
||||
import HashratesRepository from './repositories/HashratesRepository';
|
||||
import bitcoinClient from './api/bitcoin/bitcoin-client';
|
||||
import priceUpdater from './tasks/price-updater';
|
||||
import PricesRepository from './repositories/PricesRepository';
|
||||
|
||||
class Indexer {
|
||||
runIndexer = true;
|
||||
indexerRunning = false;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
tasksRunning: string[] = [];
|
||||
|
||||
public reindex() {
|
||||
if (Common.indexingEnabled()) {
|
||||
@@ -18,6 +19,28 @@ class Indexer {
|
||||
}
|
||||
}
|
||||
|
||||
public async runSingleTask(task: 'blocksPrices') {
|
||||
if (!Common.indexingEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (task === 'blocksPrices' && !this.tasksRunning.includes(task)) {
|
||||
this.tasksRunning.push(task);
|
||||
const lastestPriceId = await PricesRepository.$getLatestPriceId();
|
||||
if (priceUpdater.historyInserted === false || lastestPriceId === null) {
|
||||
logger.debug(`Blocks prices indexer is waiting for the price updater to complete`)
|
||||
setTimeout(() => {
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
this.runSingleTask('blocksPrices');
|
||||
}, 10000);
|
||||
} else {
|
||||
logger.debug(`Blocks prices indexer will run now`)
|
||||
await mining.$indexBlockPrices();
|
||||
this.tasksRunning = this.tasksRunning.filter(runningTask => runningTask != task)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $run() {
|
||||
if (!Common.indexingEnabled() || this.runIndexer === false ||
|
||||
this.indexerRunning === true || mempool.hasPriority()
|
||||
@@ -25,20 +48,48 @@ class Indexer {
|
||||
return;
|
||||
}
|
||||
|
||||
// Do not attempt to index anything unless Bitcoin Core is fully synced
|
||||
const blockchainInfo = await bitcoinClient.getBlockchainInfo();
|
||||
if (blockchainInfo.blocks !== blockchainInfo.headers) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.runIndexer = false;
|
||||
this.indexerRunning = true;
|
||||
|
||||
logger.debug(`Running mining indexer`);
|
||||
|
||||
try {
|
||||
await blocks.$generateBlockDatabase();
|
||||
await this.$resetHashratesIndexingState();
|
||||
await priceUpdater.$run();
|
||||
|
||||
const chainValid = await blocks.$generateBlockDatabase();
|
||||
if (chainValid === false) {
|
||||
// Chain of block hash was invalid, so we need to reindex. Stop here and continue at the next iteration
|
||||
logger.warn(`The chain of block hash is invalid, re-indexing invalid data in 10 seconds.`);
|
||||
setTimeout(() => this.reindex(), 10000);
|
||||
this.indexerRunning = false;
|
||||
return;
|
||||
}
|
||||
|
||||
this.runSingleTask('blocksPrices');
|
||||
await mining.$indexDifficultyAdjustments();
|
||||
await this.$resetHashratesIndexingState(); // TODO - Remove this as it's not efficient
|
||||
await mining.$generateNetworkHashrateHistory();
|
||||
await mining.$generatePoolHashrateHistory();
|
||||
await blocks.$generateBlocksSummariesDatabase();
|
||||
} catch (e) {
|
||||
this.reindex();
|
||||
logger.err(`Indexer failed, trying again later. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
this.indexerRunning = false;
|
||||
logger.err(`Indexer failed, trying again in 10 seconds. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
setTimeout(() => this.reindex(), 10000);
|
||||
this.indexerRunning = false;
|
||||
return;
|
||||
}
|
||||
|
||||
this.indexerRunning = false;
|
||||
|
||||
const runEvery = 1000 * 3600; // 1 hour
|
||||
logger.debug(`Indexing completed. Next run planned at ${new Date(new Date().getTime() + runEvery).toUTCString()}`);
|
||||
setTimeout(() => this.reindex(), runEvery);
|
||||
}
|
||||
|
||||
async $resetHashratesIndexingState() {
|
||||
@@ -47,6 +98,7 @@ class Indexer {
|
||||
await HashratesRepository.$setLatestRun('last_weekly_hashrates_indexing', 0);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot reset hashrate indexing timestamps. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,6 +73,9 @@ class Logger {
|
||||
}
|
||||
|
||||
private getNetwork(): string {
|
||||
if (config.LIGHTNING.ENABLED) {
|
||||
return 'lightning';
|
||||
}
|
||||
if (config.BISQ.ENABLED) {
|
||||
return 'bisq';
|
||||
}
|
||||
|
||||
@@ -22,6 +22,15 @@ export interface PoolStats extends PoolInfo {
|
||||
emptyBlocks: number;
|
||||
}
|
||||
|
||||
export interface BlockAudit {
|
||||
time: number,
|
||||
height: number,
|
||||
hash: string,
|
||||
missingTxs: string[],
|
||||
addedTxs: string[],
|
||||
matchRate: number,
|
||||
}
|
||||
|
||||
export interface MempoolBlock {
|
||||
blockSize: number;
|
||||
blockVSize: number;
|
||||
@@ -100,12 +109,23 @@ export interface BlockExtension {
|
||||
avgFee?: number;
|
||||
avgFeeRate?: number;
|
||||
coinbaseRaw?: string;
|
||||
usd?: number | null;
|
||||
}
|
||||
|
||||
export interface BlockExtended extends IEsploraApi.Block {
|
||||
extras: BlockExtension;
|
||||
}
|
||||
|
||||
export interface BlockSummary {
|
||||
id: string;
|
||||
transactions: TransactionStripped[];
|
||||
}
|
||||
|
||||
export interface BlockPrice {
|
||||
height: number;
|
||||
priceId: number;
|
||||
}
|
||||
|
||||
export interface TransactionMinerInfo {
|
||||
vin: VinStrippedToScriptsig[];
|
||||
vout: VoutStrippedToScriptPubkey[];
|
||||
@@ -219,6 +239,13 @@ export interface IDifficultyAdjustment {
|
||||
timeOffset: number;
|
||||
}
|
||||
|
||||
export interface IndexedDifficultyAdjustment {
|
||||
time: number; // UNIX timestamp
|
||||
height: number; // Block height
|
||||
difficulty: number;
|
||||
adjustment: number;
|
||||
}
|
||||
|
||||
export interface RewardStats {
|
||||
totalReward: number;
|
||||
totalFee: number;
|
||||
|
||||
76
backend/src/repositories/BlocksAuditsRepository.ts
Normal file
76
backend/src/repositories/BlocksAuditsRepository.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import transactionUtils from '../api/transaction-utils';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockAudit } from '../mempool.interfaces';
|
||||
|
||||
class BlocksAuditRepositories {
|
||||
public async $saveAudit(audit: BlockAudit): Promise<void> {
|
||||
try {
|
||||
await DB.query(`INSERT INTO blocks_audits(time, height, hash, missing_txs, added_txs, match_rate)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?)`, [audit.time, audit.height, audit.hash, JSON.stringify(audit.missingTxs),
|
||||
JSON.stringify(audit.addedTxs), audit.matchRate]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block audit for block ${audit.hash} because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.err(`Cannot save block audit into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockPredictionsHistory(div: number, interval: string | null): Promise<any> {
|
||||
try {
|
||||
let query = `SELECT UNIX_TIMESTAMP(time) as time, height, match_rate FROM blocks_audits`;
|
||||
|
||||
if (interval !== null) {
|
||||
query += ` WHERE time BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(time) DIV ${div} ORDER BY height`;
|
||||
|
||||
const [rows] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getPredictionsCount(): Promise<number> {
|
||||
try {
|
||||
const [rows] = await DB.query(`SELECT count(hash) as count FROM blocks_audits`);
|
||||
return rows[0].count;
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block prediction history. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getBlockAudit(hash: string): Promise<any> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(
|
||||
`SELECT blocks.height, blocks.hash as id, UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.size,
|
||||
blocks.weight, blocks.tx_count,
|
||||
transactions, template, missing_txs as missingTxs, added_txs as addedTxs, match_rate as matchRate
|
||||
FROM blocks_audits
|
||||
JOIN blocks ON blocks.hash = blocks_audits.hash
|
||||
JOIN blocks_summaries ON blocks_summaries.id = blocks_audits.hash
|
||||
WHERE blocks_audits.hash = "${hash}"
|
||||
`);
|
||||
|
||||
rows[0].missingTxs = JSON.parse(rows[0].missingTxs);
|
||||
rows[0].addedTxs = JSON.parse(rows[0].addedTxs);
|
||||
rows[0].transactions = JSON.parse(rows[0].transactions);
|
||||
rows[0].template = JSON.parse(rows[0].template);
|
||||
|
||||
return rows[0];
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot fetch block audit from db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksAuditRepositories();
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { BlockExtended } from '../mempool.interfaces';
|
||||
import { BlockExtended, BlockPrice } from '../mempool.interfaces';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Common } from '../api/common';
|
||||
@@ -6,6 +6,8 @@ import { prepareBlock } from '../utils/blocks-utils';
|
||||
import PoolsRepository from './PoolsRepository';
|
||||
import HashratesRepository from './HashratesRepository';
|
||||
import { escape } from 'mysql2';
|
||||
import BlocksSummariesRepository from './BlocksSummariesRepository';
|
||||
import DifficultyAdjustmentsRepository from './DifficultyAdjustmentsRepository';
|
||||
|
||||
class BlocksRepository {
|
||||
/**
|
||||
@@ -254,7 +256,7 @@ class BlocksRepository {
|
||||
|
||||
const params: any[] = [];
|
||||
let query = ` SELECT
|
||||
height,
|
||||
blocks.height,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
size,
|
||||
@@ -306,7 +308,7 @@ class BlocksRepository {
|
||||
public async $getBlockByHeight(height: number): Promise<object | null> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT
|
||||
height,
|
||||
blocks.height,
|
||||
hash,
|
||||
hash as id,
|
||||
UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp,
|
||||
@@ -334,7 +336,7 @@ class BlocksRepository {
|
||||
avg_fee_rate
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE height = ${height};
|
||||
WHERE blocks.height = ${height}
|
||||
`);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
@@ -355,15 +357,15 @@ class BlocksRepository {
|
||||
public async $getBlockByHash(hash: string): Promise<object | null> {
|
||||
try {
|
||||
const query = `
|
||||
SELECT *, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
|
||||
SELECT *, blocks.height, UNIX_TIMESTAMP(blocks.blockTimestamp) as blockTimestamp, hash as id,
|
||||
pools.id as pool_id, pools.name as pool_name, pools.link as pool_link, pools.slug as pool_slug,
|
||||
pools.addresses as pool_addresses, pools.regexes as pool_regexes,
|
||||
previous_block_hash as previousblockhash
|
||||
FROM blocks
|
||||
JOIN pools ON blocks.pool_id = pools.id
|
||||
WHERE hash = '${hash}';
|
||||
WHERE hash = ?;
|
||||
`;
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
const [rows]: any[] = await DB.query(query, [hash]);
|
||||
|
||||
if (rows.length <= 0) {
|
||||
return null;
|
||||
@@ -380,51 +382,25 @@ class BlocksRepository {
|
||||
/**
|
||||
* Return blocks difficulty
|
||||
*/
|
||||
public async $getBlocksDifficulty(interval: string | null): Promise<object[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
// :D ... Yeah don't ask me about this one https://stackoverflow.com/a/40303162
|
||||
// Basically, using temporary user defined fields, we are able to extract all
|
||||
// difficulty adjustments from the blocks tables.
|
||||
// This allow use to avoid indexing it in another table.
|
||||
let query = `
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
UNIX_TIMESTAMP(blockTimestamp) as timestamp, difficulty, height,
|
||||
IF(@prevStatus = YT.difficulty, @rn := @rn + 1,
|
||||
IF(@prevStatus := YT.difficulty, @rn := 1, @rn := 1)
|
||||
) AS rn
|
||||
FROM blocks YT
|
||||
CROSS JOIN
|
||||
(
|
||||
SELECT @prevStatus := -1, @rn := 1
|
||||
) AS var
|
||||
`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += `
|
||||
ORDER BY YT.height
|
||||
) AS t
|
||||
WHERE t.rn = 1
|
||||
ORDER BY t.height
|
||||
`;
|
||||
|
||||
public async $getBlocksDifficulty(): Promise<object[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
|
||||
for (const row of rows) {
|
||||
delete row['rn'];
|
||||
}
|
||||
|
||||
const [rows]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(blockTimestamp) as time, height, difficulty FROM blocks`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate difficulty history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot get blocks difficulty list from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return blocks height
|
||||
*/
|
||||
public async $getBlocksHeightsAndTimestamp(): Promise<object[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT height, blockTimestamp as timestamp FROM blocks`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
@@ -451,26 +427,6 @@ class BlocksRepository {
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Check if the last 10 blocks chain is valid
|
||||
*/
|
||||
public async $validateRecentBlocks(): Promise<boolean> {
|
||||
try {
|
||||
const [lastBlocks]: any[] = await DB.query(`SELECT height, hash, previous_block_hash FROM blocks ORDER BY height DESC LIMIT 10`);
|
||||
|
||||
for (let i = 0; i < lastBlocks.length - 1; ++i) {
|
||||
if (lastBlocks[i].previous_block_hash !== lastBlocks[i + 1].hash) {
|
||||
logger.warn(`Chain divergence detected at block ${lastBlocks[i].height}, re-indexing most recent data`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (e) {
|
||||
return true; // Don't do anything if there is a db error
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the chain of block hash is valid and delete data from the stale branch if needed
|
||||
*/
|
||||
@@ -493,15 +449,17 @@ class BlocksRepository {
|
||||
}
|
||||
|
||||
if (blocks[idx].previous_block_hash !== blocks[idx - 1].hash) {
|
||||
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}, re-indexing newer blocks and hashrates`);
|
||||
logger.warn(`Chain divergence detected at block ${blocks[idx - 1].height}`);
|
||||
await this.$deleteBlocksFrom(blocks[idx - 1].height);
|
||||
await BlocksSummariesRepository.$deleteBlocksFrom(blocks[idx - 1].height);
|
||||
await HashratesRepository.$deleteHashratesFromTimestamp(blocks[idx - 1].timestamp - 604800);
|
||||
await DifficultyAdjustmentsRepository.$deleteAdjustementsFromHeight(blocks[idx - 1].height);
|
||||
return false;
|
||||
}
|
||||
++idx;
|
||||
}
|
||||
|
||||
logger.info(`${idx} blocks hash validated in ${new Date().getTime() - start} ms`);
|
||||
logger.debug(`${idx} blocks hash validated in ${new Date().getTime() - start} ms`);
|
||||
return true;
|
||||
} catch (e) {
|
||||
logger.err('Cannot validate chain of block hash. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
@@ -528,10 +486,14 @@ class BlocksRepository {
|
||||
public async $getHistoricalBlockFees(div: number, interval: string | null): Promise<any> {
|
||||
try {
|
||||
let query = `SELECT
|
||||
CAST(AVG(height) as INT) as avgHeight,
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(fees) as INT) as avgFees
|
||||
FROM blocks`;
|
||||
CAST(AVG(fees) as INT) as avgFees,
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
`;
|
||||
|
||||
if (interval !== null) {
|
||||
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
@@ -553,10 +515,14 @@ class BlocksRepository {
|
||||
public async $getHistoricalBlockRewards(div: number, interval: string | null): Promise<any> {
|
||||
try {
|
||||
let query = `SELECT
|
||||
CAST(AVG(height) as INT) as avgHeight,
|
||||
CAST(AVG(blocks.height) as INT) as avgHeight,
|
||||
CAST(AVG(UNIX_TIMESTAMP(blockTimestamp)) as INT) as timestamp,
|
||||
CAST(AVG(reward) as INT) as avgRewards
|
||||
FROM blocks`;
|
||||
CAST(AVG(reward) as INT) as avgRewards,
|
||||
prices.USD
|
||||
FROM blocks
|
||||
JOIN blocks_prices on blocks_prices.height = blocks.height
|
||||
JOIN prices on prices.id = blocks_prices.price_id
|
||||
`;
|
||||
|
||||
if (interval !== null) {
|
||||
query += ` WHERE blockTimestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
@@ -652,6 +618,77 @@ class BlocksRepository {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of blocks that have been indexed
|
||||
*/
|
||||
public async $getIndexedBlocks(): Promise<any[]> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT height, hash FROM blocks ORDER BY height DESC`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate block size and weight history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the oldest block from a consecutive chain of block from the most recent one
|
||||
*/
|
||||
public async $getOldestConsecutiveBlock(): Promise<any> {
|
||||
try {
|
||||
const [rows]: any = await DB.query(`SELECT height, UNIX_TIMESTAMP(blockTimestamp) as timestamp, difficulty FROM blocks ORDER BY height DESC`);
|
||||
for (let i = 0; i < rows.length - 1; ++i) {
|
||||
if (rows[i].height - rows[i + 1].height > 1) {
|
||||
return rows[i];
|
||||
}
|
||||
}
|
||||
return rows[rows.length - 1];
|
||||
} catch (e) {
|
||||
logger.err('Cannot generate block size and weight history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all blocks which have not be linked to a price yet
|
||||
*/
|
||||
public async $getBlocksWithoutPrice(): Promise<object[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(blocks.blockTimestamp) as timestamp, blocks.height
|
||||
FROM blocks
|
||||
LEFT JOIN blocks_prices ON blocks.height = blocks_prices.height
|
||||
WHERE blocks_prices.height IS NULL
|
||||
ORDER BY blocks.height
|
||||
`);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot get blocks height and timestamp from the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save block price by batch
|
||||
*/
|
||||
public async $saveBlockPrices(blockPrices: BlockPrice[]): Promise<void> {
|
||||
try {
|
||||
let query = `INSERT INTO blocks_prices(height, price_id) VALUES`;
|
||||
for (const price of blockPrices) {
|
||||
query += ` (${price.height}, ${price.priceId}),`
|
||||
}
|
||||
query = query.slice(0, -1);
|
||||
await DB.query(query);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.err(`Cannot save blocks prices for blocks [${blockPrices[0].height} to ${blockPrices[blockPrices.length - 1].height}] into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksRepository();
|
||||
|
||||
69
backend/src/repositories/BlocksSummariesRepository.ts
Normal file
69
backend/src/repositories/BlocksSummariesRepository.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { BlockSummary } from '../mempool.interfaces';
|
||||
|
||||
class BlocksSummariesRepository {
|
||||
public async $getByBlockId(id: string): Promise<BlockSummary | undefined> {
|
||||
try {
|
||||
const [summary]: any[] = await DB.query(`SELECT * from blocks_summaries WHERE id = ?`, [id]);
|
||||
if (summary.length > 0) {
|
||||
summary[0].transactions = JSON.parse(summary[0].transactions);
|
||||
return summary[0];
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summary for block id ${id}. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
public async $saveSummary(params: { height: number, mined?: BlockSummary, template?: BlockSummary}) {
|
||||
const blockId = params.mined?.id ?? params.template?.id;
|
||||
try {
|
||||
const [dbSummary]: any[] = await DB.query(`SELECT * FROM blocks_summaries WHERE id = "${blockId}"`);
|
||||
if (dbSummary.length === 0) { // First insertion
|
||||
await DB.query(`INSERT INTO blocks_summaries VALUE (?, ?, ?, ?)`, [
|
||||
params.height, blockId, JSON.stringify(params.mined?.transactions ?? []), JSON.stringify(params.template?.transactions ?? [])
|
||||
]);
|
||||
} else if (params.mined !== undefined) { // Update mined block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET transactions = ? WHERE id = "${params.mined.id}"`, [JSON.stringify(params.mined.transactions)]);
|
||||
} else if (params.template !== undefined) { // Update template block summary
|
||||
await DB.query(`UPDATE blocks_summaries SET template = ? WHERE id = "${params.template.id}"`, [JSON.stringify(params.template?.transactions)]);
|
||||
}
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save block summary for ${blockId} because it has already been indexed, ignoring`);
|
||||
} else {
|
||||
logger.debug(`Cannot save block summary for ${blockId}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getIndexedSummariesId(): Promise<string[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT id from blocks_summaries`);
|
||||
return rows.map(row => row.id);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get block summaries id list. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete blocks from the database from blockHeight
|
||||
*/
|
||||
public async $deleteBlocksFrom(blockHeight: number) {
|
||||
logger.info(`Delete newer blocks summary from height ${blockHeight} from the database`);
|
||||
|
||||
try {
|
||||
await DB.query(`DELETE FROM blocks_summaries where height >= ${blockHeight}`);
|
||||
} catch (e) {
|
||||
logger.err('Cannot delete indexed blocks summaries. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new BlocksSummariesRepository();
|
||||
|
||||
124
backend/src/repositories/DifficultyAdjustmentsRepository.ts
Normal file
124
backend/src/repositories/DifficultyAdjustmentsRepository.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { Common } from '../api/common';
|
||||
import config from '../config';
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { IndexedDifficultyAdjustment } from '../mempool.interfaces';
|
||||
|
||||
class DifficultyAdjustmentsRepository {
|
||||
public async $saveAdjustments(adjustment: IndexedDifficultyAdjustment): Promise<void> {
|
||||
if (adjustment.height === 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const query = `INSERT INTO difficulty_adjustments(time, height, difficulty, adjustment) VALUE (FROM_UNIXTIME(?), ?, ?, ?)`;
|
||||
const params: any[] = [
|
||||
adjustment.time,
|
||||
adjustment.height,
|
||||
adjustment.difficulty,
|
||||
adjustment.adjustment,
|
||||
];
|
||||
await DB.query(query, params);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Cannot save difficulty adjustment at block ${adjustment.height}, already indexed, ignoring`);
|
||||
} else {
|
||||
logger.err(`Cannot save difficulty adjustment at block ${adjustment.height}. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAdjustments(interval: string | null, descOrder: boolean = false): Promise<IndexedDifficultyAdjustment[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT
|
||||
CAST(AVG(UNIX_TIMESTAMP(time)) as INT) as time,
|
||||
CAST(AVG(height) AS INT) as height,
|
||||
CAST(AVG(difficulty) as DOUBLE) as difficulty,
|
||||
CAST(AVG(adjustment) as DOUBLE) as adjustment
|
||||
FROM difficulty_adjustments`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE time BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(time) DIV ${86400}`;
|
||||
|
||||
if (descOrder === true) {
|
||||
query += ` ORDER BY height DESC`;
|
||||
} else {
|
||||
query += ` ORDER BY height`;
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows] = await DB.query(query);
|
||||
return rows as IndexedDifficultyAdjustment[];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getRawAdjustments(interval: string | null, descOrder: boolean = false): Promise<IndexedDifficultyAdjustment[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT
|
||||
UNIX_TIMESTAMP(time) as time,
|
||||
height as height,
|
||||
difficulty as difficulty,
|
||||
adjustment as adjustment
|
||||
FROM difficulty_adjustments`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE time BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()`;
|
||||
}
|
||||
|
||||
if (descOrder === true) {
|
||||
query += ` ORDER BY height DESC`;
|
||||
} else {
|
||||
query += ` ORDER BY height`;
|
||||
}
|
||||
|
||||
try {
|
||||
const [rows] = await DB.query(query);
|
||||
return rows as IndexedDifficultyAdjustment[];
|
||||
} catch (e) {
|
||||
logger.err(`Cannot get difficulty adjustments from the database. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getAdjustmentsHeights(): Promise<number[]> {
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(`SELECT height FROM difficulty_adjustments`);
|
||||
return rows.map(block => block.height);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot get difficulty adjustment block heights. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteAdjustementsFromHeight(height: number): Promise<void> {
|
||||
try {
|
||||
logger.info(`Delete newer difficulty adjustments from height ${height} from the database`);
|
||||
await DB.query(`DELETE FROM difficulty_adjustments WHERE height >= ?`, [height]);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete difficulty adjustments from the database. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $deleteLastAdjustment(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Delete last difficulty adjustment from the database`);
|
||||
await DB.query(`DELETE FROM difficulty_adjustments ORDER BY time LIMIT 1`);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot delete last difficulty adjustment from the database. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new DifficultyAdjustmentsRepository();
|
||||
|
||||
@@ -29,10 +29,12 @@ class HashratesRepository {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
public async $getRawNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate
|
||||
let query = `SELECT
|
||||
UNIX_TIMESTAMP(hashrate_timestamp) as timestamp,
|
||||
avg_hashrate as avgHashrate
|
||||
FROM hashrates`;
|
||||
|
||||
if (interval) {
|
||||
@@ -53,6 +55,33 @@ class HashratesRepository {
|
||||
}
|
||||
}
|
||||
|
||||
public async $getNetworkDailyHashrate(interval: string | null): Promise<any[]> {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
let query = `SELECT
|
||||
CAST(AVG(UNIX_TIMESTAMP(hashrate_timestamp)) as INT) as timestamp,
|
||||
CAST(AVG(avg_hashrate) as DOUBLE) as avgHashrate
|
||||
FROM hashrates`;
|
||||
|
||||
if (interval) {
|
||||
query += ` WHERE hashrate_timestamp BETWEEN DATE_SUB(NOW(), INTERVAL ${interval}) AND NOW()
|
||||
AND hashrates.type = 'daily'`;
|
||||
} else {
|
||||
query += ` WHERE hashrates.type = 'daily'`;
|
||||
}
|
||||
|
||||
query += ` GROUP BY UNIX_TIMESTAMP(hashrate_timestamp) DIV ${86400}`;
|
||||
query += ` ORDER by hashrate_timestamp`;
|
||||
|
||||
try {
|
||||
const [rows]: any[] = await DB.query(query);
|
||||
return rows;
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch network hashrate history. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getWeeklyHashrateTimestamps(): Promise<number[]> {
|
||||
const query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp
|
||||
FROM hashrates
|
||||
@@ -75,6 +104,9 @@ class HashratesRepository {
|
||||
interval = Common.getSqlInterval(interval);
|
||||
|
||||
const topPoolsId = (await PoolsRepository.$getPoolsInfo('1w')).map((pool) => pool.poolId);
|
||||
if (topPoolsId.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let query = `SELECT UNIX_TIMESTAMP(hashrate_timestamp) as timestamp, avg_hashrate as avgHashrate, share, pools.name as poolName
|
||||
FROM hashrates
|
||||
|
||||
52
backend/src/repositories/PricesRepository.ts
Normal file
52
backend/src/repositories/PricesRepository.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { Prices } from '../tasks/price-updater';
|
||||
|
||||
class PricesRepository {
|
||||
public async $savePrices(time: number, prices: Prices): Promise<void> {
|
||||
if (prices.USD === -1) {
|
||||
// Some historical price entries have not USD prices, so we just ignore them to avoid future UX issues
|
||||
// As of today there are only 4 (on 2013-09-05, 2013-09-19, 2013-09-12 and 2013-09-26) so that's fine
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await DB.query(`
|
||||
INSERT INTO prices(time, USD, EUR, GBP, CAD, CHF, AUD, JPY)
|
||||
VALUE (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ? )`,
|
||||
[time, prices.USD, prices.EUR, prices.GBP, prices.CAD, prices.CHF, prices.AUD, prices.JPY]
|
||||
);
|
||||
} catch (e: any) {
|
||||
logger.err(`Cannot save exchange rate into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
public async $getOldestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getLatestPriceId(): Promise<number | null> {
|
||||
const [oldestRow] = await DB.query(`SELECT id from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].id : null;
|
||||
}
|
||||
|
||||
public async $getLatestPriceTime(): Promise<number> {
|
||||
const [oldestRow] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time DESC LIMIT 1`);
|
||||
return oldestRow[0] ? oldestRow[0].time : 0;
|
||||
}
|
||||
|
||||
public async $getPricesTimes(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time from prices WHERE USD != -1 ORDER BY time`);
|
||||
return times.map(time => time.time);
|
||||
}
|
||||
|
||||
public async $getPricesTimesAndId(): Promise<number[]> {
|
||||
const [times]: any[] = await DB.query(`SELECT UNIX_TIMESTAMP(time) as time, id, USD from prices ORDER BY time`);
|
||||
return times;
|
||||
}
|
||||
}
|
||||
|
||||
export default new PricesRepository();
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import DB from '../database';
|
||||
import logger from '../logger';
|
||||
import { IConversionRates } from '../mempool.interfaces';
|
||||
|
||||
class RatesRepository {
|
||||
public async $saveRate(height: number, rates: IConversionRates) {
|
||||
try {
|
||||
await DB.query(`INSERT INTO rates(height, bisq_rates) VALUE (?, ?)`, [height, JSON.stringify(rates)]);
|
||||
} catch (e: any) {
|
||||
if (e.errno === 1062) { // ER_DUP_ENTRY - This scenario is possible upon node backend restart
|
||||
logger.debug(`Rate already exists for block ${height}, ignoring`);
|
||||
} else {
|
||||
logger.err(`Cannot save exchange rate into db for block ${height} Reason: ` + (e instanceof Error ? e.message : e));
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new RatesRepository();
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
372
backend/src/tasks/lightning/network-sync.service.ts
Normal file
372
backend/src/tasks/lightning/network-sync.service.ts
Normal file
@@ -0,0 +1,372 @@
|
||||
import DB from '../../database';
|
||||
import logger from '../../logger';
|
||||
import channelsApi from '../../api/explorer/channels.api';
|
||||
import bitcoinApi from '../../api/bitcoin/bitcoin-api-factory';
|
||||
import config from '../../config';
|
||||
import { IEsploraApi } from '../../api/bitcoin/esplora-api.interface';
|
||||
import { ILightningApi } from '../../api/lightning/lightning-api.interface';
|
||||
import { $lookupNodeLocation } from './sync-tasks/node-locations';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import nodesApi from '../../api/explorer/nodes.api';
|
||||
import { ResultSetHeader } from 'mysql2';
|
||||
import fundingTxFetcher from './sync-tasks/funding-tx-fetcher';
|
||||
|
||||
class NetworkSyncService {
|
||||
loggerTimer = 0;
|
||||
|
||||
constructor() {}
|
||||
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting lightning network sync service');
|
||||
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
|
||||
await this.$runTasks();
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
try {
|
||||
logger.info(`Updating nodes and channels`);
|
||||
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
if (networkGraph.nodes.length === 0 || networkGraph.edges.length === 0) {
|
||||
logger.info(`LN Network graph is empty, retrying in 10 seconds`);
|
||||
setTimeout(() => { this.$runTasks(); }, 10000);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.$updateNodesList(networkGraph.nodes);
|
||||
await this.$updateChannelsList(networkGraph.edges);
|
||||
await this.$deactivateChannelsWithoutActiveNodes();
|
||||
await this.$lookUpCreationDateFromChain();
|
||||
await this.$updateNodeFirstSeen();
|
||||
await this.$scanForClosedChannels();
|
||||
if (config.MEMPOOL.BACKEND === 'esplora') {
|
||||
await this.$runClosedChannelsForensics();
|
||||
}
|
||||
|
||||
} catch (e) {
|
||||
logger.err('$runTasks() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
|
||||
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.GRAPH_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the `nodes` table to reflect the current network graph state
|
||||
*/
|
||||
private async $updateNodesList(nodes: ILightningApi.Node[]): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
const graphNodesPubkeys: string[] = [];
|
||||
for (const node of nodes) {
|
||||
await nodesApi.$saveNode(node);
|
||||
graphNodesPubkeys.push(node.pub_key);
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node ${progress}/${nodes.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`${progress} nodes updated`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
nodesApi.$setNodesInactive(graphNodesPubkeys);
|
||||
|
||||
if (config.MAXMIND.ENABLED) {
|
||||
$lookupNodeLocation();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the `channels` table to reflect the current network graph state
|
||||
*/
|
||||
private async $updateChannelsList(channels: ILightningApi.Channel[]): Promise<void> {
|
||||
try {
|
||||
let progress = 0;
|
||||
|
||||
const graphChannelsIds: string[] = [];
|
||||
for (const channel of channels) {
|
||||
await channelsApi.$saveChannel(channel);
|
||||
graphChannelsIds.push(channel.channel_id);
|
||||
++progress;
|
||||
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`${progress} channels updated`);
|
||||
|
||||
// If a channel if not present in the graph, mark it as inactive
|
||||
channelsApi.$setChannelsInactive(graphChannelsIds);
|
||||
} catch (e) {
|
||||
logger.err(`Cannot update channel list. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// This method look up the creation date of the earliest channel of the node
|
||||
// and update the node to that date in order to get the earliest first seen date
|
||||
private async $updateNodeFirstSeen(): Promise<void> {
|
||||
let progress = 0;
|
||||
let updated = 0;
|
||||
|
||||
try {
|
||||
const [nodes]: any[] = await DB.query(`
|
||||
SELECT nodes.public_key, UNIX_TIMESTAMP(nodes.first_seen) AS first_seen,
|
||||
(
|
||||
SELECT MIN(UNIX_TIMESTAMP(created))
|
||||
FROM channels
|
||||
WHERE channels.node1_public_key = nodes.public_key
|
||||
) AS created1,
|
||||
(
|
||||
SELECT MIN(UNIX_TIMESTAMP(created))
|
||||
FROM channels
|
||||
WHERE channels.node2_public_key = nodes.public_key
|
||||
) AS created2
|
||||
FROM nodes
|
||||
`);
|
||||
|
||||
for (const node of nodes) {
|
||||
const lowest = Math.min(
|
||||
node.created1 ?? Number.MAX_SAFE_INTEGER,
|
||||
node.created2 ?? Number.MAX_SAFE_INTEGER,
|
||||
node.first_seen ?? Number.MAX_SAFE_INTEGER
|
||||
);
|
||||
if (lowest < node.first_seen) {
|
||||
const query = `UPDATE nodes SET first_seen = FROM_UNIXTIME(?) WHERE public_key = ?`;
|
||||
const params = [lowest, node.public_key];
|
||||
await DB.query(query, params);
|
||||
}
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node first seen date ${progress}/${nodes.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
++updated;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${updated} node first seen dates`);
|
||||
} catch (e) {
|
||||
logger.err('$updateNodeFirstSeen() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $lookUpCreationDateFromChain(): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
logger.info(`Running channel creation date lookup`);
|
||||
try {
|
||||
const channels = await channelsApi.$getChannelsWithoutCreatedDate();
|
||||
for (const channel of channels) {
|
||||
const transaction = await fundingTxFetcher.$fetchChannelOpenTx(channel.short_id);
|
||||
await DB.query(`
|
||||
UPDATE channels SET created = FROM_UNIXTIME(?) WHERE channels.id = ?`,
|
||||
[transaction.timestamp, channel.id]
|
||||
);
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel creation date ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Updated ${channels.length} channels' creation date`);
|
||||
} catch (e) {
|
||||
logger.err('$lookUpCreationDateFromChain() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a channel does not have any active node linked to it, then also
|
||||
* mark that channel as inactive
|
||||
*/
|
||||
private async $deactivateChannelsWithoutActiveNodes(): Promise<void> {
|
||||
logger.info(`Find channels which nodes are offline`);
|
||||
|
||||
try {
|
||||
const result = await DB.query<ResultSetHeader>(`
|
||||
UPDATE channels
|
||||
SET status = 0
|
||||
WHERE channels.status = 1
|
||||
AND (
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node1_public_key
|
||||
AND nodes.status = 1
|
||||
) = 0
|
||||
OR (
|
||||
SELECT COUNT(*)
|
||||
FROM nodes
|
||||
WHERE nodes.public_key = channels.node2_public_key
|
||||
AND nodes.status = 1
|
||||
) = 0)
|
||||
`);
|
||||
|
||||
if (result[0].changedRows ?? 0 > 0) {
|
||||
logger.info(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
} else {
|
||||
logger.debug(`Marked ${result[0].changedRows} channels as inactive because they are not linked to any active node`);
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err('$deactivateChannelsWithoutActiveNodes() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private async $scanForClosedChannels(): Promise<void> {
|
||||
let progress = 0;
|
||||
|
||||
try {
|
||||
logger.info(`Starting closed channels scan...`);
|
||||
const channels = await channelsApi.$getChannelsByStatus(0);
|
||||
for (const channel of channels) {
|
||||
const spendingTx = await bitcoinApi.$getOutspend(channel.transaction_id, channel.transaction_vout);
|
||||
if (spendingTx.spent === true && spendingTx.status?.confirmed === true) {
|
||||
logger.debug('Marking channel: ' + channel.id + ' as closed.');
|
||||
await DB.query(`UPDATE channels SET status = 2, closing_date = FROM_UNIXTIME(?) WHERE id = ?`,
|
||||
[spendingTx.status.block_time, channel.id]);
|
||||
if (spendingTx.txid && !channel.closing_transaction_id) {
|
||||
await DB.query(`UPDATE channels SET closing_transaction_id = ? WHERE id = ?`, [spendingTx.txid, channel.id]);
|
||||
}
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Checking if channel has been closed ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$scanForClosedChannels() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
1. Mutually closed
|
||||
2. Forced closed
|
||||
3. Forced closed with penalty
|
||||
*/
|
||||
|
||||
private async $runClosedChannelsForensics(): Promise<void> {
|
||||
if (!config.ESPLORA.REST_API_URL) {
|
||||
return;
|
||||
}
|
||||
|
||||
let progress = 0;
|
||||
|
||||
try {
|
||||
logger.info(`Started running closed channel forensics...`);
|
||||
const channels = await channelsApi.$getClosedChannelsWithoutReason();
|
||||
for (const channel of channels) {
|
||||
let reason = 0;
|
||||
// Only Esplora backend can retrieve spent transaction outputs
|
||||
const outspends = await bitcoinApi.$getOutspends(channel.closing_transaction_id);
|
||||
const lightningScriptReasons: number[] = [];
|
||||
for (const outspend of outspends) {
|
||||
if (outspend.spent && outspend.txid) {
|
||||
const spendingTx = await bitcoinApi.$getRawTransaction(outspend.txid);
|
||||
const lightningScript = this.findLightningScript(spendingTx.vin[outspend.vin || 0]);
|
||||
lightningScriptReasons.push(lightningScript);
|
||||
}
|
||||
}
|
||||
if (lightningScriptReasons.length === outspends.length
|
||||
&& lightningScriptReasons.filter((r) => r === 1).length === outspends.length) {
|
||||
reason = 1;
|
||||
} else {
|
||||
const filteredReasons = lightningScriptReasons.filter((r) => r !== 1);
|
||||
if (filteredReasons.length) {
|
||||
if (filteredReasons.some((r) => r === 2 || r === 4)) {
|
||||
reason = 3;
|
||||
} else {
|
||||
reason = 2;
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
We can detect a commitment transaction (force close) by reading Sequence and Locktime
|
||||
https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction
|
||||
*/
|
||||
const closingTx = await bitcoinApi.$getRawTransaction(channel.closing_transaction_id);
|
||||
const sequenceHex: string = closingTx.vin[0].sequence.toString(16);
|
||||
const locktimeHex: string = closingTx.locktime.toString(16);
|
||||
if (sequenceHex.substring(0, 2) === '80' && locktimeHex.substring(0, 2) === '20') {
|
||||
reason = 2; // Here we can't be sure if it's a penalty or not
|
||||
} else {
|
||||
reason = 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (reason) {
|
||||
logger.debug('Setting closing reason ' + reason + ' for channel: ' + channel.id + '.');
|
||||
await DB.query(`UPDATE channels SET closing_reason = ? WHERE id = ?`, [reason, channel.id]);
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - this.loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating channel closed channel forensics ${progress}/${channels.length}`);
|
||||
this.loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
logger.info(`Closed channels forensics scan complete.`);
|
||||
} catch (e) {
|
||||
logger.err('$runClosedChannelsForensics() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
private findLightningScript(vin: IEsploraApi.Vin): number {
|
||||
const topElement = vin.witness[vin.witness.length - 2];
|
||||
if (/^OP_IF OP_PUSHBYTES_33 \w{66} OP_ELSE OP_PUSH(NUM_\d+|BYTES_(1 \w{2}|2 \w{4})) OP_CSV OP_DROP OP_PUSHBYTES_33 \w{66} OP_ENDIF OP_CHECKSIG$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#commitment-transaction-outputs
|
||||
if (topElement === '01') {
|
||||
// top element is '01' to get in the revocation path
|
||||
// 'Revoked Lightning Force Close';
|
||||
// Penalty force closed
|
||||
return 2;
|
||||
} else {
|
||||
// top element is '', this is a delayed to_local output
|
||||
// 'Lightning Force Close';
|
||||
return 3;
|
||||
}
|
||||
} else if (
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_NOTIF OP_DROP OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm) ||
|
||||
/^OP_DUP OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUAL OP_IF OP_CHECKSIG OP_ELSE OP_PUSHBYTES_33 \w{66} OP_SWAP OP_SIZE OP_PUSHBYTES_1 20 OP_EQUAL OP_IF OP_HASH160 OP_PUSHBYTES_20 \w{40} OP_EQUALVERIFY OP_PUSHNUM_2 OP_SWAP OP_PUSHBYTES_33 \w{66} OP_PUSHNUM_2 OP_CHECKMULTISIG OP_ELSE OP_DROP OP_PUSHBYTES_3 \w{6} OP_CLTV OP_DROP OP_CHECKSIG OP_ENDIF (OP_PUSHNUM_1 OP_CSV OP_DROP |)OP_ENDIF$/.test(vin.inner_witnessscript_asm)
|
||||
) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#offered-htlc-outputs
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#received-htlc-outputs
|
||||
if (topElement.length === 66) {
|
||||
// top element is a public key
|
||||
// 'Revoked Lightning HTLC'; Penalty force closed
|
||||
return 4;
|
||||
} else if (topElement) {
|
||||
// top element is a preimage
|
||||
// 'Lightning HTLC';
|
||||
return 5;
|
||||
} else {
|
||||
// top element is '' to get in the expiry of the script
|
||||
// 'Expired Lightning HTLC';
|
||||
return 6;
|
||||
}
|
||||
} else if (/^OP_PUSHBYTES_33 \w{66} OP_CHECKSIG OP_IFDUP OP_NOTIF OP_PUSHNUM_16 OP_CSV OP_ENDIF$/.test(vin.inner_witnessscript_asm)) {
|
||||
// https://github.com/lightning/bolts/blob/master/03-transactions.md#to_local_anchor-and-to_remote_anchor-output-option_anchors
|
||||
if (topElement) {
|
||||
// top element is a signature
|
||||
// 'Lightning Anchor';
|
||||
return 7;
|
||||
} else {
|
||||
// top element is '', it has been swept after 16 blocks
|
||||
// 'Swept Lightning Anchor';
|
||||
return 8;
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
export default new NetworkSyncService();
|
||||
34
backend/src/tasks/lightning/stats-updater.service.ts
Normal file
34
backend/src/tasks/lightning/stats-updater.service.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import logger from '../../logger';
|
||||
import lightningApi from '../../api/lightning/lightning-api-factory';
|
||||
import LightningStatsImporter from './sync-tasks/stats-importer';
|
||||
import config from '../../config';
|
||||
import { Common } from '../../api/common';
|
||||
|
||||
class LightningStatsUpdater {
|
||||
public async $startService(): Promise<void> {
|
||||
logger.info('Starting Lightning Stats service');
|
||||
|
||||
await this.$runTasks();
|
||||
LightningStatsImporter.$run();
|
||||
}
|
||||
|
||||
private async $runTasks(): Promise<void> {
|
||||
await this.$logStatsDaily();
|
||||
|
||||
setTimeout(() => { this.$runTasks(); }, 1000 * config.LIGHTNING.STATS_REFRESH_INTERVAL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the latest entry for each node every config.LIGHTNING.STATS_REFRESH_INTERVAL seconds
|
||||
*/
|
||||
private async $logStatsDaily(): Promise<void> {
|
||||
const date = new Date();
|
||||
Common.setDateMidnight(date);
|
||||
const networkGraph = await lightningApi.$getNetworkGraph();
|
||||
await LightningStatsImporter.computeNetworkStats(date.getTime() / 1000, networkGraph);
|
||||
|
||||
logger.info(`Updated latest network stats`);
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsUpdater();
|
||||
118
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
118
backend/src/tasks/lightning/sync-tasks/funding-tx-fetcher.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { existsSync, promises } from 'fs';
|
||||
import bitcoinClient from '../../../api/bitcoin/bitcoin-client';
|
||||
import { Common } from '../../../api/common';
|
||||
import config from '../../../config';
|
||||
import logger from '../../../logger';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
const BLOCKS_CACHE_MAX_SIZE = 100;
|
||||
const CACHE_FILE_NAME = config.MEMPOOL.CACHE_DIR + '/ln-funding-txs-cache.json';
|
||||
|
||||
class FundingTxFetcher {
|
||||
private running = false;
|
||||
private blocksCache = {};
|
||||
private channelNewlyProcessed = 0;
|
||||
public fundingTxCache = {};
|
||||
|
||||
async $init(): Promise<void> {
|
||||
// Load funding tx disk cache
|
||||
if (Object.keys(this.fundingTxCache).length === 0 && existsSync(CACHE_FILE_NAME)) {
|
||||
try {
|
||||
this.fundingTxCache = JSON.parse(await fsPromises.readFile(CACHE_FILE_NAME, 'utf-8'));
|
||||
} catch (e) {
|
||||
logger.err(`Unable to parse channels funding txs disk cache. Starting from scratch`);
|
||||
this.fundingTxCache = {};
|
||||
}
|
||||
logger.debug(`Imported ${Object.keys(this.fundingTxCache).length} funding tx amount from the disk cache`);
|
||||
}
|
||||
}
|
||||
|
||||
async $fetchChannelsFundingTxs(channelIds: string[]): Promise<void> {
|
||||
if (this.running) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
|
||||
const globalTimer = new Date().getTime() / 1000;
|
||||
let cacheTimer = new Date().getTime() / 1000;
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let channelProcessed = 0;
|
||||
this.channelNewlyProcessed = 0;
|
||||
for (const channelId of channelIds) {
|
||||
await this.$fetchChannelOpenTx(channelId);
|
||||
++channelProcessed;
|
||||
|
||||
let elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - globalTimer);
|
||||
logger.info(`Indexing channels funding tx ${channelProcessed + 1} of ${channelIds.length} ` +
|
||||
`(${Math.floor(channelProcessed / channelIds.length * 10000) / 100}%) | ` +
|
||||
`elapsed: ${elapsedSeconds} seconds`
|
||||
);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
elapsedSeconds = Math.round((new Date().getTime() / 1000) - cacheTimer);
|
||||
if (elapsedSeconds > 60) {
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
cacheTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.channelNewlyProcessed > 0) {
|
||||
logger.info(`Indexed ${this.channelNewlyProcessed} additional channels funding tx`);
|
||||
logger.debug(`Saving ${Object.keys(this.fundingTxCache).length} funding txs cache into disk`);
|
||||
fsPromises.writeFile(CACHE_FILE_NAME, JSON.stringify(this.fundingTxCache));
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
public async $fetchChannelOpenTx(channelId: string): Promise<{timestamp: number, txid: string, value: number}> {
|
||||
if (channelId.indexOf('x') === -1) {
|
||||
channelId = Common.channelIntegerIdToShortId(channelId);
|
||||
}
|
||||
|
||||
if (this.fundingTxCache[channelId]) {
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
|
||||
const parts = channelId.split('x');
|
||||
const blockHeight = parts[0];
|
||||
const txIdx = parts[1];
|
||||
const outputIdx = parts[2];
|
||||
|
||||
let block = this.blocksCache[blockHeight];
|
||||
// Fetch it from core
|
||||
if (!block) {
|
||||
const blockHash = await bitcoinClient.getBlockHash(parseInt(blockHeight, 10));
|
||||
block = await bitcoinClient.getBlock(blockHash, 1);
|
||||
}
|
||||
this.blocksCache[block.height] = block;
|
||||
|
||||
const blocksCacheHashes = Object.keys(this.blocksCache).sort((a, b) => parseInt(b) - parseInt(a)).reverse();
|
||||
if (blocksCacheHashes.length > BLOCKS_CACHE_MAX_SIZE) {
|
||||
for (let i = 0; i < 10; ++i) {
|
||||
delete this.blocksCache[blocksCacheHashes[i]];
|
||||
}
|
||||
}
|
||||
|
||||
const txid = block.tx[txIdx];
|
||||
const rawTx = await bitcoinClient.getRawTransaction(txid);
|
||||
const tx = await bitcoinClient.decodeRawTransaction(rawTx);
|
||||
|
||||
this.fundingTxCache[channelId] = {
|
||||
timestamp: block.time,
|
||||
txid: txid,
|
||||
value: tx.vout[outputIdx].value,
|
||||
};
|
||||
|
||||
++this.channelNewlyProcessed;
|
||||
|
||||
return this.fundingTxCache[channelId];
|
||||
}
|
||||
}
|
||||
|
||||
export default new FundingTxFetcher;
|
||||
111
backend/src/tasks/lightning/sync-tasks/node-locations.ts
Normal file
111
backend/src/tasks/lightning/sync-tasks/node-locations.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import * as net from 'net';
|
||||
import maxmind, { CityResponse, AsnResponse, IspResponse } from 'maxmind';
|
||||
import nodesApi from '../../../api/explorer/nodes.api';
|
||||
import config from '../../../config';
|
||||
import DB from '../../../database';
|
||||
import logger from '../../../logger';
|
||||
|
||||
export async function $lookupNodeLocation(): Promise<void> {
|
||||
let loggerTimer = new Date().getTime() / 1000;
|
||||
let progress = 0;
|
||||
|
||||
logger.info(`Running node location updater using Maxmind`);
|
||||
try {
|
||||
const nodes = await nodesApi.$getAllNodes();
|
||||
const lookupCity = await maxmind.open<CityResponse>(config.MAXMIND.GEOLITE2_CITY);
|
||||
const lookupAsn = await maxmind.open<AsnResponse>(config.MAXMIND.GEOLITE2_ASN);
|
||||
const lookupIsp = await maxmind.open<IspResponse>(config.MAXMIND.GEOIP2_ISP);
|
||||
|
||||
for (const node of nodes) {
|
||||
const sockets: string[] = node.sockets.split(',');
|
||||
for (const socket of sockets) {
|
||||
const ip = socket.substring(0, socket.lastIndexOf(':')).replace('[', '').replace(']', '');
|
||||
const hasClearnet = [4, 6].includes(net.isIP(ip));
|
||||
|
||||
if (hasClearnet && ip !== '127.0.1.1' && ip !== '127.0.0.1') {
|
||||
const city = lookupCity.get(ip);
|
||||
const asn = lookupAsn.get(ip);
|
||||
const isp = lookupIsp.get(ip);
|
||||
|
||||
if (city && (asn || isp)) {
|
||||
const query = `
|
||||
UPDATE nodes SET
|
||||
as_number = ?,
|
||||
city_id = ?,
|
||||
country_id = ?,
|
||||
subdivision_id = ?,
|
||||
longitude = ?,
|
||||
latitude = ?,
|
||||
accuracy_radius = ?
|
||||
WHERE public_key = ?
|
||||
`;
|
||||
|
||||
const params = [
|
||||
isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||
city.city?.geoname_id,
|
||||
city.country?.geoname_id,
|
||||
city.subdivisions ? city.subdivisions[0].geoname_id : null,
|
||||
city.location?.longitude,
|
||||
city.location?.latitude,
|
||||
city.location?.accuracy_radius,
|
||||
node.public_key
|
||||
];
|
||||
await DB.query(query, params);
|
||||
|
||||
// Store Continent
|
||||
if (city.continent?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
|
||||
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
|
||||
}
|
||||
|
||||
// Store Country
|
||||
if (city.country?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
|
||||
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
|
||||
}
|
||||
|
||||
// Store Country ISO code
|
||||
if (city.country?.iso_code) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
||||
[city.country?.geoname_id, city.country?.iso_code]);
|
||||
}
|
||||
|
||||
// Store Division
|
||||
if (city.subdivisions && city.subdivisions[0]) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
|
||||
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
|
||||
}
|
||||
|
||||
// Store City
|
||||
if (city.city?.geoname_id) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
|
||||
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
|
||||
}
|
||||
|
||||
// Store AS name
|
||||
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
|
||||
await DB.query(
|
||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
|
||||
[isp?.autonomous_system_number ?? asn?.autonomous_system_number, JSON.stringify(isp?.isp ?? asn?.autonomous_system_organization)]);
|
||||
}
|
||||
}
|
||||
|
||||
++progress;
|
||||
const elapsedSeconds = Math.round((new Date().getTime() / 1000) - loggerTimer);
|
||||
if (elapsedSeconds > 10) {
|
||||
logger.info(`Updating node location data ${progress}/${nodes.length}`);
|
||||
loggerTimer = new Date().getTime() / 1000;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
logger.info(`${progress} nodes location data updated`);
|
||||
} catch (e) {
|
||||
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
411
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
411
backend/src/tasks/lightning/sync-tasks/stats-importer.ts
Normal file
@@ -0,0 +1,411 @@
|
||||
import DB from '../../../database';
|
||||
import { promises } from 'fs';
|
||||
import { XMLParser } from 'fast-xml-parser';
|
||||
import logger from '../../../logger';
|
||||
import fundingTxFetcher from './funding-tx-fetcher';
|
||||
import config from '../../../config';
|
||||
|
||||
const fsPromises = promises;
|
||||
|
||||
interface Node {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
rgb_color: string;
|
||||
alias: string;
|
||||
addresses: unknown[];
|
||||
out_degree: number;
|
||||
in_degree: number;
|
||||
}
|
||||
|
||||
interface Channel {
|
||||
channel_id: string;
|
||||
node1_pub: string;
|
||||
node2_pub: string;
|
||||
timestamp: number;
|
||||
features: string;
|
||||
fee_base_msat: number;
|
||||
fee_rate_milli_msat: number;
|
||||
htlc_minimim_msat: number;
|
||||
cltv_expiry_delta: number;
|
||||
htlc_maximum_msat: number;
|
||||
}
|
||||
|
||||
class LightningStatsImporter {
|
||||
topologiesFolder = config.LIGHTNING.TOPOLOGY_FOLDER;
|
||||
parser = new XMLParser();
|
||||
|
||||
async $run(): Promise<void> {
|
||||
logger.info(`Importing historical lightning stats`);
|
||||
|
||||
const [channels]: any[] = await DB.query('SELECT short_id from channels;');
|
||||
logger.info('Caching funding txs for currently existing channels');
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(channels.map(channel => channel.short_id));
|
||||
|
||||
await this.$importHistoricalLightningStats();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate LN network stats for one day
|
||||
*/
|
||||
public async computeNetworkStats(timestamp: number, networkGraph): Promise<unknown> {
|
||||
// Node counts and network shares
|
||||
let clearnetNodes = 0;
|
||||
let torNodes = 0;
|
||||
let clearnetTorNodes = 0;
|
||||
let unannouncedNodes = 0;
|
||||
|
||||
for (const node of networkGraph.nodes) {
|
||||
let hasOnion = false;
|
||||
let hasClearnet = false;
|
||||
let isUnnanounced = true;
|
||||
|
||||
for (const socket of (node.addresses ?? [])) {
|
||||
hasOnion = hasOnion || ['torv2', 'torv3'].includes(socket.network);
|
||||
hasClearnet = hasClearnet || ['ipv4', 'ipv6'].includes(socket.network);
|
||||
}
|
||||
if (hasOnion && hasClearnet) {
|
||||
clearnetTorNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasOnion) {
|
||||
torNodes++;
|
||||
isUnnanounced = false;
|
||||
} else if (hasClearnet) {
|
||||
clearnetNodes++;
|
||||
isUnnanounced = false;
|
||||
}
|
||||
if (isUnnanounced) {
|
||||
unannouncedNodes++;
|
||||
}
|
||||
}
|
||||
|
||||
// Channels and node historical stats
|
||||
const nodeStats = {};
|
||||
let capacity = 0;
|
||||
let avgFeeRate = 0;
|
||||
let avgBaseFee = 0;
|
||||
const capacities: number[] = [];
|
||||
const feeRates: number[] = [];
|
||||
const baseFees: number[] = [];
|
||||
const alreadyCountedChannels = {};
|
||||
|
||||
for (const channel of networkGraph.edges) {
|
||||
let short_id = channel.channel_id;
|
||||
if (short_id.indexOf('/') !== -1) {
|
||||
short_id = short_id.slice(0, -2);
|
||||
}
|
||||
|
||||
const tx = await fundingTxFetcher.$fetchChannelOpenTx(short_id);
|
||||
if (!tx) {
|
||||
logger.err(`Unable to fetch funding tx for channel ${short_id}. Capacity and creation date is unknown. Skipping channel.`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!nodeStats[channel.node1_pub]) {
|
||||
nodeStats[channel.node1_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
if (!nodeStats[channel.node2_pub]) {
|
||||
nodeStats[channel.node2_pub] = {
|
||||
capacity: 0,
|
||||
channels: 0,
|
||||
};
|
||||
}
|
||||
|
||||
if (!alreadyCountedChannels[short_id]) {
|
||||
capacity += Math.round(tx.value * 100000000);
|
||||
capacities.push(Math.round(tx.value * 100000000));
|
||||
alreadyCountedChannels[short_id] = true;
|
||||
|
||||
nodeStats[channel.node1_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node1_pub].channels++;
|
||||
nodeStats[channel.node2_pub].capacity += Math.round(tx.value * 100000000);
|
||||
nodeStats[channel.node2_pub].channels++;
|
||||
}
|
||||
|
||||
if (channel.node1_policy !== undefined) { // Coming from the node
|
||||
for (const policy of [channel.node1_policy, channel.node2_policy]) {
|
||||
if (policy && policy.fee_rate_milli_msat < 5000) {
|
||||
avgFeeRate += parseInt(policy.fee_rate_milli_msat, 10);
|
||||
feeRates.push(parseInt(policy.fee_rate_milli_msat, 10));
|
||||
}
|
||||
if (policy && policy.fee_base_msat < 5000) {
|
||||
avgBaseFee += parseInt(policy.fee_base_msat, 10);
|
||||
baseFees.push(parseInt(policy.fee_base_msat, 10));
|
||||
}
|
||||
}
|
||||
} else { // Coming from the historical import
|
||||
if (channel.fee_rate_milli_msat < 5000) {
|
||||
avgFeeRate += parseInt(channel.fee_rate_milli_msat, 10);
|
||||
feeRates.push(parseInt(channel.fee_rate_milli_msat), 10);
|
||||
}
|
||||
if (channel.fee_base_msat < 5000) {
|
||||
avgBaseFee += parseInt(channel.fee_base_msat, 10);
|
||||
baseFees.push(parseInt(channel.fee_base_msat), 10);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
avgFeeRate /= Math.max(networkGraph.edges.length, 1);
|
||||
avgBaseFee /= Math.max(networkGraph.edges.length, 1);
|
||||
const medCapacity = capacities.sort((a, b) => b - a)[Math.round(capacities.length / 2 - 1)];
|
||||
const medFeeRate = feeRates.sort((a, b) => b - a)[Math.round(feeRates.length / 2 - 1)];
|
||||
const medBaseFee = baseFees.sort((a, b) => b - a)[Math.round(baseFees.length / 2 - 1)];
|
||||
const avgCapacity = Math.round(capacity / Math.max(capacities.length, 1));
|
||||
|
||||
let query = `INSERT INTO lightning_stats(
|
||||
added,
|
||||
channel_count,
|
||||
node_count,
|
||||
total_capacity,
|
||||
tor_nodes,
|
||||
clearnet_nodes,
|
||||
unannounced_nodes,
|
||||
clearnet_tor_nodes,
|
||||
avg_capacity,
|
||||
avg_fee_rate,
|
||||
avg_base_fee_mtokens,
|
||||
med_capacity,
|
||||
med_fee_rate,
|
||||
med_base_fee_mtokens
|
||||
)
|
||||
VALUES (FROM_UNIXTIME(?), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
channel_count = ?,
|
||||
node_count = ?,
|
||||
total_capacity = ?,
|
||||
tor_nodes = ?,
|
||||
clearnet_nodes = ?,
|
||||
unannounced_nodes = ?,
|
||||
clearnet_tor_nodes = ?,
|
||||
avg_capacity = ?,
|
||||
avg_fee_rate = ?,
|
||||
avg_base_fee_mtokens = ?,
|
||||
med_capacity = ?,
|
||||
med_fee_rate = ?,
|
||||
med_base_fee_mtokens = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
timestamp,
|
||||
capacities.length,
|
||||
networkGraph.nodes.length,
|
||||
capacity,
|
||||
torNodes,
|
||||
clearnetNodes,
|
||||
unannouncedNodes,
|
||||
clearnetTorNodes,
|
||||
avgCapacity,
|
||||
avgFeeRate,
|
||||
avgBaseFee,
|
||||
medCapacity,
|
||||
medFeeRate,
|
||||
medBaseFee,
|
||||
]);
|
||||
|
||||
for (const public_key of Object.keys(nodeStats)) {
|
||||
query = `INSERT INTO node_stats(
|
||||
public_key,
|
||||
added,
|
||||
capacity,
|
||||
channels
|
||||
)
|
||||
VALUES (?, FROM_UNIXTIME(?), ?, ?)
|
||||
ON DUPLICATE KEY UPDATE
|
||||
added = FROM_UNIXTIME(?),
|
||||
capacity = ?,
|
||||
channels = ?
|
||||
`;
|
||||
|
||||
await DB.query(query, [
|
||||
public_key,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
timestamp,
|
||||
nodeStats[public_key].capacity,
|
||||
nodeStats[public_key].channels,
|
||||
]);
|
||||
}
|
||||
|
||||
return {
|
||||
added: timestamp,
|
||||
node_count: networkGraph.nodes.length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Import topology files LN historical data into the database
|
||||
*/
|
||||
async $importHistoricalLightningStats(): Promise<void> {
|
||||
let latestNodeCount = 1;
|
||||
|
||||
const fileList = await fsPromises.readdir(this.topologiesFolder);
|
||||
// Insert history from the most recent to the oldest
|
||||
// This also put the .json cached files first
|
||||
fileList.sort().reverse();
|
||||
|
||||
const [rows]: any[] = await DB.query(`
|
||||
SELECT UNIX_TIMESTAMP(added) AS added, node_count
|
||||
FROM lightning_stats
|
||||
ORDER BY added DESC
|
||||
`);
|
||||
const existingStatsTimestamps = {};
|
||||
for (const row of rows) {
|
||||
existingStatsTimestamps[row.added] = row;
|
||||
}
|
||||
|
||||
// For logging purpose
|
||||
let processed = 10;
|
||||
let totalProcessed = -1;
|
||||
|
||||
for (const filename of fileList) {
|
||||
processed++;
|
||||
totalProcessed++;
|
||||
|
||||
const timestamp = parseInt(filename.split('_')[1], 10);
|
||||
|
||||
// Stats exist already, don't calculate/insert them
|
||||
if (existingStatsTimestamps[timestamp] !== undefined) {
|
||||
latestNodeCount = existingStatsTimestamps[timestamp].node_count;
|
||||
continue;
|
||||
}
|
||||
|
||||
logger.debug(`Reading ${this.topologiesFolder}/${filename}`);
|
||||
const fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||
|
||||
let graph;
|
||||
if (filename.indexOf('.json') !== -1) {
|
||||
try {
|
||||
graph = JSON.parse(fileContent);
|
||||
} catch (e) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
graph = this.parseFile(fileContent);
|
||||
if (!graph) {
|
||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content`);
|
||||
continue;
|
||||
}
|
||||
await fsPromises.writeFile(`${this.topologiesFolder}/${filename}.json`, JSON.stringify(graph));
|
||||
}
|
||||
|
||||
if (timestamp > 1556316000) {
|
||||
// "No, the reason most likely is just that I started collection in 2019,
|
||||
// so what I had before that is just the survivors from before, which weren't that many"
|
||||
const diffRatio = graph.nodes.length / latestNodeCount;
|
||||
if (diffRatio < 0.9) {
|
||||
// Ignore drop of more than 90% of the node count as it's probably a missing data point
|
||||
logger.debug(`Nodes count diff ratio threshold reached, ignore the data for this day ${graph.nodes.length} nodes vs ${latestNodeCount}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
latestNodeCount = graph.nodes.length;
|
||||
|
||||
const datestr = `${new Date(timestamp * 1000).toUTCString()} (${timestamp})`;
|
||||
logger.debug(`${datestr}: Found ${graph.nodes.length} nodes and ${graph.edges.length} channels`);
|
||||
|
||||
if (processed > 10) {
|
||||
logger.info(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
processed = 0;
|
||||
} else {
|
||||
logger.debug(`Generating LN network stats for ${datestr}. Processed ${totalProcessed}/${fileList.length} files`);
|
||||
}
|
||||
await fundingTxFetcher.$fetchChannelsFundingTxs(graph.edges.map(channel => channel.channel_id.slice(0, -2)));
|
||||
const stat = await this.computeNetworkStats(timestamp, graph);
|
||||
|
||||
existingStatsTimestamps[timestamp] = stat;
|
||||
}
|
||||
|
||||
logger.info(`Lightning network stats historical import completed`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse the file content into XML, and return a list of nodes and channels
|
||||
*/
|
||||
private parseFile(fileContent): any {
|
||||
const graph = this.parser.parse(fileContent);
|
||||
if (Object.keys(graph).length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nodes: Node[] = [];
|
||||
const channels: Channel[] = [];
|
||||
|
||||
// If there is only one entry, the parser does not return an array, so we override this
|
||||
if (!Array.isArray(graph.graphml.graph.node)) {
|
||||
graph.graphml.graph.node = [graph.graphml.graph.node];
|
||||
}
|
||||
if (!Array.isArray(graph.graphml.graph.edge)) {
|
||||
graph.graphml.graph.edge = [graph.graphml.graph.edge];
|
||||
}
|
||||
|
||||
for (const node of graph.graphml.graph.node) {
|
||||
if (!node.data) {
|
||||
continue;
|
||||
}
|
||||
const addresses: unknown[] = [];
|
||||
const sockets = node.data[5].split(',');
|
||||
for (const socket of sockets) {
|
||||
const parts = socket.split('://');
|
||||
addresses.push({
|
||||
network: parts[0],
|
||||
addr: parts[1],
|
||||
});
|
||||
}
|
||||
nodes.push({
|
||||
id: node.data[0],
|
||||
timestamp: node.data[1],
|
||||
features: node.data[2],
|
||||
rgb_color: node.data[3],
|
||||
alias: node.data[4],
|
||||
addresses: addresses,
|
||||
out_degree: node.data[6],
|
||||
in_degree: node.data[7],
|
||||
});
|
||||
}
|
||||
|
||||
for (const channel of graph.graphml.graph.edge) {
|
||||
if (!channel.data) {
|
||||
continue;
|
||||
}
|
||||
channels.push({
|
||||
channel_id: channel.data[0],
|
||||
node1_pub: channel.data[1],
|
||||
node2_pub: channel.data[2],
|
||||
timestamp: channel.data[3],
|
||||
features: channel.data[4],
|
||||
fee_base_msat: channel.data[5],
|
||||
fee_rate_milli_msat: channel.data[6],
|
||||
htlc_minimim_msat: channel.data[7],
|
||||
cltv_expiry_delta: channel.data[8],
|
||||
htlc_maximum_msat: channel.data[9],
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: nodes,
|
||||
edges: channels,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default new LightningStatsImporter;
|
||||
@@ -12,12 +12,11 @@ import * as https from 'https';
|
||||
*/
|
||||
class PoolsUpdater {
|
||||
lastRun: number = 0;
|
||||
currentSha: any = undefined;
|
||||
currentSha: string | undefined = undefined;
|
||||
poolsUrl: string = config.MEMPOOL.POOLS_JSON_URL;
|
||||
treeUrl: string = config.MEMPOOL.POOLS_JSON_TREE_URL;
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async updatePoolsJson() {
|
||||
public async updatePoolsJson(): Promise<void> {
|
||||
if (['mainnet', 'testnet', 'signet'].includes(config.MEMPOOL.NETWORK) === false) {
|
||||
return;
|
||||
}
|
||||
@@ -32,11 +31,10 @@ class PoolsUpdater {
|
||||
|
||||
this.lastRun = now;
|
||||
|
||||
logger.info('Updating latest mining pools from Github');
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
logger.info('List of public pools will be queried over the Tor network');
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over the Tor network`);
|
||||
} else {
|
||||
logger.info('List of public pools will be queried over clearnet');
|
||||
logger.info(`Updating latest mining pools from ${this.poolsUrl} over clearnet`);
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -46,7 +44,7 @@ class PoolsUpdater {
|
||||
}
|
||||
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
this.currentSha = await this.getShaFromDb();
|
||||
this.currentSha = await this.getShaFromDb();
|
||||
}
|
||||
|
||||
logger.debug(`Pools.json sha | Current: ${this.currentSha} | Github: ${githubSha}`);
|
||||
@@ -54,8 +52,12 @@ class PoolsUpdater {
|
||||
return;
|
||||
}
|
||||
|
||||
logger.warn('Pools.json is outdated, fetch latest from github');
|
||||
const poolsJson = await this.query('https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json');
|
||||
if (this.currentSha === undefined) {
|
||||
logger.info(`Downloading pools.json for the first time from ${this.poolsUrl}`);
|
||||
} else {
|
||||
logger.warn(`Pools.json is outdated, fetch latest from ${this.poolsUrl}`);
|
||||
}
|
||||
const poolsJson = await this.query(this.poolsUrl);
|
||||
if (poolsJson === undefined) {
|
||||
return;
|
||||
}
|
||||
@@ -65,21 +67,21 @@ class PoolsUpdater {
|
||||
|
||||
} catch (e) {
|
||||
this.lastRun = now - (oneWeek - oneDay); // Try again in 24h instead of waiting next week
|
||||
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('PoolsUpdater failed. Will try again in 24h. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch our latest pools.json sha from the db
|
||||
*/
|
||||
private async updateDBSha(githubSha: string) {
|
||||
private async updateDBSha(githubSha: string): Promise<void> {
|
||||
this.currentSha = githubSha;
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
try {
|
||||
await DB.query('DELETE FROM state where name="pools_json_sha"');
|
||||
await DB.query(`INSERT INTO state VALUES('pools_json_sha', NULL, '${githubSha}')`);
|
||||
} catch (e) {
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot save github pools.json sha into the db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -92,7 +94,7 @@ class PoolsUpdater {
|
||||
const [rows]: any[] = await DB.query('SELECT string FROM state WHERE name="pools_json_sha"');
|
||||
return (rows.length > 0 ? rows[0].string : undefined);
|
||||
} catch (e) {
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Cannot fetch pools.json sha from db. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
@@ -101,7 +103,7 @@ class PoolsUpdater {
|
||||
* Fetch our latest pools.json sha from github
|
||||
*/
|
||||
private async fetchPoolsSha(): Promise<string | undefined> {
|
||||
const response = await this.query('https://api.github.com/repos/mempool/mining-pools/git/trees/master');
|
||||
const response = await this.query(this.treeUrl);
|
||||
|
||||
if (response !== undefined) {
|
||||
for (const file of response['tree']) {
|
||||
@@ -111,7 +113,7 @@ class PoolsUpdater {
|
||||
}
|
||||
}
|
||||
|
||||
logger.err('Cannot to find latest pools.json sha from github api response');
|
||||
logger.err(`Cannot find "pools.json" in git tree (${this.treeUrl})`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -125,7 +127,7 @@ class PoolsUpdater {
|
||||
};
|
||||
timeout: number;
|
||||
httpsAgent?: https.Agent;
|
||||
}
|
||||
};
|
||||
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
|
||||
const axiosOptions: axiosOptions = {
|
||||
headers: {
|
||||
@@ -135,7 +137,7 @@ class PoolsUpdater {
|
||||
};
|
||||
let retry = 0;
|
||||
|
||||
while(retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
|
||||
while (retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
|
||||
try {
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
const socksOptions: any = {
|
||||
@@ -156,14 +158,14 @@ class PoolsUpdater {
|
||||
|
||||
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
|
||||
}
|
||||
|
||||
|
||||
const data: AxiosResponse = await axios.get(path, axiosOptions);
|
||||
if (data.statusText === 'error' || !data.data) {
|
||||
throw new Error(`Could not fetch data from Github, Error: ${data.status}`);
|
||||
throw new Error(`Could not fetch data from ${path}, Error: ${data.status}`);
|
||||
}
|
||||
return data.data;
|
||||
} catch (e) {
|
||||
logger.err('Could not connect to Github. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
logger.err('Could not connect to Github. Reason: ' + (e instanceof Error ? e.message : e));
|
||||
retry++;
|
||||
}
|
||||
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
|
||||
|
||||
43
backend/src/tasks/price-feeds/bitfinex-api.ts
Normal file
43
backend/src/tasks/price-feeds/bitfinex-api.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class BitfinexApi implements PriceFeed {
|
||||
public name: string = 'Bitfinex';
|
||||
public currencies: string[] = ['USD', 'EUR', 'GPB', 'JPY'];
|
||||
|
||||
public url: string = 'https://api.bitfinex.com/v1/pubticker/BTC';
|
||||
public urlHist: string = 'https://api-pub.bitfinex.com/v2/candles/trade:{GRANULARITY}:tBTC{CURRENCY}/hist';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['last_price'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '1h' : '1D').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
const time = Math.round(price[0] / 1000);
|
||||
if (priceHistory[time] === undefined) {
|
||||
priceHistory[time] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[time][currency] = price[2];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
}
|
||||
|
||||
export default BitfinexApi;
|
||||
24
backend/src/tasks/price-feeds/bitflyer-api.ts
Normal file
24
backend/src/tasks/price-feeds/bitflyer-api.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class BitflyerApi implements PriceFeed {
|
||||
public name: string = 'Bitflyer';
|
||||
public currencies: string[] = ['USD', 'EUR', 'JPY'];
|
||||
|
||||
public url: string = 'https://api.bitflyer.com/v1/ticker?product_code=BTC_';
|
||||
public urlHist: string = '';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['ltp'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
export default BitflyerApi;
|
||||
42
backend/src/tasks/price-feeds/coinbase-api.ts
Normal file
42
backend/src/tasks/price-feeds/coinbase-api.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class CoinbaseApi implements PriceFeed {
|
||||
public name: string = 'Coinbase';
|
||||
public currencies: string[] = ['USD', 'EUR', 'GBP'];
|
||||
|
||||
public url: string = 'https://api.coinbase.com/v2/prices/spot?currency=';
|
||||
public urlHist: string = 'https://api.exchange.coinbase.com/products/BTC-{CURRENCY}/candles?granularity={GRANULARITY}';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['data']['amount'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
if (priceHistory[price[0]] === undefined) {
|
||||
priceHistory[price[0]] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[price[0]][currency] = price[4];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
}
|
||||
|
||||
export default CoinbaseApi;
|
||||
43
backend/src/tasks/price-feeds/ftx-api.ts
Normal file
43
backend/src/tasks/price-feeds/ftx-api.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class FtxApi implements PriceFeed {
|
||||
public name: string = 'FTX';
|
||||
public currencies: string[] = ['USD', 'BRZ', 'EUR', 'JPY', 'AUD'];
|
||||
|
||||
public url: string = 'https://ftx.com/api/markets/BTC/';
|
||||
public urlHist: string = 'https://ftx.com/api/markets/BTC/{CURRENCY}/candles?resolution={GRANULARITY}';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['result']['last'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '3600' : '86400').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response['result'] : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
const time = Math.round(price['time'] / 1000);
|
||||
if (priceHistory[time] === undefined) {
|
||||
priceHistory[time] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[time][currency] = price['close'];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
}
|
||||
|
||||
export default FtxApi;
|
||||
43
backend/src/tasks/price-feeds/gemini-api.ts
Normal file
43
backend/src/tasks/price-feeds/gemini-api.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class GeminiApi implements PriceFeed {
|
||||
public name: string = 'Gemini';
|
||||
public currencies: string[] = ['USD', 'EUR', 'GBP', 'SGD'];
|
||||
|
||||
public url: string = 'https://api.gemini.com/v1/pubticker/BTC';
|
||||
public urlHist: string = 'https://api.gemini.com/v2/candles/BTC{CURRENCY}/{GRANULARITY}';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['last'], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', type === 'hour' ? '1hr' : '1day').replace('{CURRENCY}', currency));
|
||||
const pricesRaw = response ? response : [];
|
||||
|
||||
for (const price of pricesRaw as any[]) {
|
||||
const time = Math.round(price[0] / 1000);
|
||||
if (priceHistory[time] === undefined) {
|
||||
priceHistory[time] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[time][currency] = price[4];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
}
|
||||
|
||||
export default GeminiApi;
|
||||
99
backend/src/tasks/price-feeds/kraken-api.ts
Normal file
99
backend/src/tasks/price-feeds/kraken-api.ts
Normal file
@@ -0,0 +1,99 @@
|
||||
import logger from '../../logger';
|
||||
import PricesRepository from '../../repositories/PricesRepository';
|
||||
import { query } from '../../utils/axios-query';
|
||||
import priceUpdater, { PriceFeed, PriceHistory } from '../price-updater';
|
||||
|
||||
class KrakenApi implements PriceFeed {
|
||||
public name: string = 'Kraken';
|
||||
public currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
|
||||
|
||||
public url: string = 'https://api.kraken.com/0/public/Ticker?pair=XBT';
|
||||
public urlHist: string = 'https://api.kraken.com/0/public/OHLC?interval={GRANULARITY}&pair=XBT';
|
||||
|
||||
constructor() {
|
||||
}
|
||||
|
||||
private getTicker(currency) {
|
||||
let ticker = `XXBTZ${currency}`;
|
||||
if (['CHF', 'AUD'].includes(currency)) {
|
||||
ticker = `XBT${currency}`;
|
||||
}
|
||||
return ticker;
|
||||
}
|
||||
|
||||
public async $fetchPrice(currency): Promise<number> {
|
||||
const response = await query(this.url + currency);
|
||||
return response ? parseInt(response['result'][this.getTicker(currency)]['c'][0], 10) : -1;
|
||||
}
|
||||
|
||||
public async $fetchRecentPrice(currencies: string[], type: 'hour' | 'day'): Promise<PriceHistory> {
|
||||
const priceHistory: PriceHistory = {};
|
||||
|
||||
for (const currency of currencies) {
|
||||
if (this.currencies.includes(currency) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '60') + currency);
|
||||
const pricesRaw = response ? response['result'][this.getTicker(currency)] : [];
|
||||
|
||||
for (const price of pricesRaw) {
|
||||
if (priceHistory[price[0]] === undefined) {
|
||||
priceHistory[price[0]] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[price[0]][currency] = price[4];
|
||||
}
|
||||
}
|
||||
|
||||
return priceHistory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch weekly price and save it into the database
|
||||
*/
|
||||
public async $insertHistoricalPrice(): Promise<void> {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
// EUR weekly price history goes back to timestamp 1378339200 (September 5, 2013)
|
||||
// USD weekly price history goes back to timestamp 1380758400 (October 3, 2013)
|
||||
// GBP weekly price history goes back to timestamp 1415232000 (November 6, 2014)
|
||||
// JPY weekly price history goes back to timestamp 1415232000 (November 6, 2014)
|
||||
// CAD weekly price history goes back to timestamp 1436400000 (July 9, 2015)
|
||||
// CHF weekly price history goes back to timestamp 1575504000 (December 5, 2019)
|
||||
// AUD weekly price history goes back to timestamp 1591833600 (June 11, 2020)
|
||||
|
||||
let priceHistory: any = {}; // map: timestamp -> Prices
|
||||
|
||||
for (const currency of this.currencies) {
|
||||
const response = await query(this.urlHist.replace('{GRANULARITY}', '10080') + currency);
|
||||
const priceHistoryRaw = response ? response['result'][this.getTicker(currency)] : [];
|
||||
|
||||
for (const price of priceHistoryRaw) {
|
||||
if (existingPriceTimes.includes(parseInt(price[0]))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// prices[0] = kraken price timestamp
|
||||
// prices[4] = closing price
|
||||
if (priceHistory[price[0]] === undefined) {
|
||||
priceHistory[price[0]] = priceUpdater.getEmptyPricesObj();
|
||||
}
|
||||
priceHistory[price[0]][currency] = price[4];
|
||||
}
|
||||
}
|
||||
|
||||
for (const time in priceHistory) {
|
||||
if (priceHistory[time].USD === -1) {
|
||||
delete priceHistory[time];
|
||||
continue;
|
||||
}
|
||||
await PricesRepository.$savePrices(parseInt(time, 10), priceHistory[time]);
|
||||
}
|
||||
|
||||
if (Object.keys(priceHistory).length > 0) {
|
||||
logger.notice(`Inserted ${Object.keys(priceHistory).length} Kraken EUR, USD, GBP, JPY, CAD, CHF and AUD weekly price history into db`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default KrakenApi;
|
||||
762
backend/src/tasks/price-feeds/mtgox-weekly.json
Normal file
762
backend/src/tasks/price-feeds/mtgox-weekly.json
Normal file
@@ -0,0 +1,762 @@
|
||||
[
|
||||
{
|
||||
"ct": 1279497600,
|
||||
"c": "0.08584"
|
||||
},
|
||||
{
|
||||
"ct": 1280102400,
|
||||
"c": "0.0505"
|
||||
},
|
||||
{
|
||||
"ct": 1280707200,
|
||||
"c": "0.0611"
|
||||
},
|
||||
{
|
||||
"ct": 1281312000,
|
||||
"c": "0.0609"
|
||||
},
|
||||
{
|
||||
"ct": 1281916800,
|
||||
"c": "0.06529"
|
||||
},
|
||||
{
|
||||
"ct": 1282521600,
|
||||
"c": "0.066"
|
||||
},
|
||||
{
|
||||
"ct": 1283126400,
|
||||
"c": "0.064"
|
||||
},
|
||||
{
|
||||
"ct": 1283731200,
|
||||
"c": "0.06165"
|
||||
},
|
||||
{
|
||||
"ct": 1284336000,
|
||||
"c": "0.0615"
|
||||
},
|
||||
{
|
||||
"ct": 1284940800,
|
||||
"c": "0.0627"
|
||||
},
|
||||
{
|
||||
"ct": 1285545600,
|
||||
"c": "0.0622"
|
||||
},
|
||||
{
|
||||
"ct": 1286150400,
|
||||
"c": "0.06111"
|
||||
},
|
||||
{
|
||||
"ct": 1286755200,
|
||||
"c": "0.0965"
|
||||
},
|
||||
{
|
||||
"ct": 1287360000,
|
||||
"c": "0.102"
|
||||
},
|
||||
{
|
||||
"ct": 1287964800,
|
||||
"c": "0.11501"
|
||||
},
|
||||
{
|
||||
"ct": 1288569600,
|
||||
"c": "0.1925"
|
||||
},
|
||||
{
|
||||
"ct": 1289174400,
|
||||
"c": "0.34"
|
||||
},
|
||||
{
|
||||
"ct": 1289779200,
|
||||
"c": "0.27904"
|
||||
},
|
||||
{
|
||||
"ct": 1290384000,
|
||||
"c": "0.27675"
|
||||
},
|
||||
{
|
||||
"ct": 1290988800,
|
||||
"c": "0.27"
|
||||
},
|
||||
{
|
||||
"ct": 1291593600,
|
||||
"c": "0.19"
|
||||
},
|
||||
{
|
||||
"ct": 1292198400,
|
||||
"c": "0.2189"
|
||||
},
|
||||
{
|
||||
"ct": 1292803200,
|
||||
"c": "0.2401"
|
||||
},
|
||||
{
|
||||
"ct": 1293408000,
|
||||
"c": "0.263"
|
||||
},
|
||||
{
|
||||
"ct": 1294012800,
|
||||
"c": "0.29997"
|
||||
},
|
||||
{
|
||||
"ct": 1294617600,
|
||||
"c": "0.323"
|
||||
},
|
||||
{
|
||||
"ct": 1295222400,
|
||||
"c": "0.38679"
|
||||
},
|
||||
{
|
||||
"ct": 1295827200,
|
||||
"c": "0.4424"
|
||||
},
|
||||
{
|
||||
"ct": 1296432000,
|
||||
"c": "0.4799"
|
||||
},
|
||||
{
|
||||
"ct": 1297036800,
|
||||
"c": "0.8968"
|
||||
},
|
||||
{
|
||||
"ct": 1297641600,
|
||||
"c": "1.05"
|
||||
},
|
||||
{
|
||||
"ct": 1298246400,
|
||||
"c": "0.865"
|
||||
},
|
||||
{
|
||||
"ct": 1298851200,
|
||||
"c": "0.89"
|
||||
},
|
||||
{
|
||||
"ct": 1299456000,
|
||||
"c": "0.8999"
|
||||
},
|
||||
{
|
||||
"ct": 1300060800,
|
||||
"c": "0.89249"
|
||||
},
|
||||
{
|
||||
"ct": 1300665600,
|
||||
"c": "0.75218"
|
||||
},
|
||||
{
|
||||
"ct": 1301270400,
|
||||
"c": "0.82754"
|
||||
},
|
||||
{
|
||||
"ct": 1301875200,
|
||||
"c": "0.779"
|
||||
},
|
||||
{
|
||||
"ct": 1302480000,
|
||||
"c": "0.7369"
|
||||
},
|
||||
{
|
||||
"ct": 1303084800,
|
||||
"c": "1.1123"
|
||||
},
|
||||
{
|
||||
"ct": 1303689600,
|
||||
"c": "1.6311"
|
||||
},
|
||||
{
|
||||
"ct": 1304294400,
|
||||
"c": "3.03311"
|
||||
},
|
||||
{
|
||||
"ct": 1304899200,
|
||||
"c": "3.8659"
|
||||
},
|
||||
{
|
||||
"ct": 1305504000,
|
||||
"c": "6.98701"
|
||||
},
|
||||
{
|
||||
"ct": 1306108800,
|
||||
"c": "6.6901"
|
||||
},
|
||||
{
|
||||
"ct": 1306713600,
|
||||
"c": "8.4"
|
||||
},
|
||||
{
|
||||
"ct": 1307318400,
|
||||
"c": "16.7"
|
||||
},
|
||||
{
|
||||
"ct": 1307923200,
|
||||
"c": "18.5464"
|
||||
},
|
||||
{
|
||||
"ct": 1308528000,
|
||||
"c": "17.51"
|
||||
},
|
||||
{
|
||||
"ct": 1309132800,
|
||||
"c": "16.45001"
|
||||
},
|
||||
{
|
||||
"ct": 1309737600,
|
||||
"c": "15.44049"
|
||||
},
|
||||
{
|
||||
"ct": 1310342400,
|
||||
"c": "14.879"
|
||||
},
|
||||
{
|
||||
"ct": 1310947200,
|
||||
"c": "13.16"
|
||||
},
|
||||
{
|
||||
"ct": 1311552000,
|
||||
"c": "13.98001"
|
||||
},
|
||||
{
|
||||
"ct": 1312156800,
|
||||
"c": "13.35"
|
||||
},
|
||||
{
|
||||
"ct": 1312761600,
|
||||
"c": "7.9"
|
||||
},
|
||||
{
|
||||
"ct": 1313366400,
|
||||
"c": "10.7957"
|
||||
},
|
||||
{
|
||||
"ct": 1313971200,
|
||||
"c": "11.31125"
|
||||
},
|
||||
{
|
||||
"ct": 1314576000,
|
||||
"c": "9.07011"
|
||||
},
|
||||
{
|
||||
"ct": 1315180800,
|
||||
"c": "8.17798"
|
||||
},
|
||||
{
|
||||
"ct": 1315785600,
|
||||
"c": "5.86436"
|
||||
},
|
||||
{
|
||||
"ct": 1316390400,
|
||||
"c": "5.2"
|
||||
},
|
||||
{
|
||||
"ct": 1316995200,
|
||||
"c": "5.33"
|
||||
},
|
||||
{
|
||||
"ct": 1317600000,
|
||||
"c": "5.02701"
|
||||
},
|
||||
{
|
||||
"ct": 1318204800,
|
||||
"c": "4.10288"
|
||||
},
|
||||
{
|
||||
"ct": 1318809600,
|
||||
"c": "3.5574"
|
||||
},
|
||||
{
|
||||
"ct": 1319414400,
|
||||
"c": "3.12657"
|
||||
},
|
||||
{
|
||||
"ct": 1320019200,
|
||||
"c": "3.27"
|
||||
},
|
||||
{
|
||||
"ct": 1320624000,
|
||||
"c": "2.95959"
|
||||
},
|
||||
{
|
||||
"ct": 1321228800,
|
||||
"c": "2.99626"
|
||||
},
|
||||
{
|
||||
"ct": 1321833600,
|
||||
"c": "2.2"
|
||||
},
|
||||
{
|
||||
"ct": 1322438400,
|
||||
"c": "2.47991"
|
||||
},
|
||||
{
|
||||
"ct": 1323043200,
|
||||
"c": "2.82809"
|
||||
},
|
||||
{
|
||||
"ct": 1323648000,
|
||||
"c": "3.2511"
|
||||
},
|
||||
{
|
||||
"ct": 1324252800,
|
||||
"c": "3.193"
|
||||
},
|
||||
{
|
||||
"ct": 1324857600,
|
||||
"c": "4.225"
|
||||
},
|
||||
{
|
||||
"ct": 1325462400,
|
||||
"c": "5.26766"
|
||||
},
|
||||
{
|
||||
"ct": 1326067200,
|
||||
"c": "7.11358"
|
||||
},
|
||||
{
|
||||
"ct": 1326672000,
|
||||
"c": "7.00177"
|
||||
},
|
||||
{
|
||||
"ct": 1327276800,
|
||||
"c": "6.3097"
|
||||
},
|
||||
{
|
||||
"ct": 1327881600,
|
||||
"c": "5.38191"
|
||||
},
|
||||
{
|
||||
"ct": 1328486400,
|
||||
"c": "5.68881"
|
||||
},
|
||||
{
|
||||
"ct": 1329091200,
|
||||
"c": "5.51468"
|
||||
},
|
||||
{
|
||||
"ct": 1329696000,
|
||||
"c": "4.38669"
|
||||
},
|
||||
{
|
||||
"ct": 1330300800,
|
||||
"c": "4.922"
|
||||
},
|
||||
{
|
||||
"ct": 1330905600,
|
||||
"c": "4.8201"
|
||||
},
|
||||
{
|
||||
"ct": 1331510400,
|
||||
"c": "4.90901"
|
||||
},
|
||||
{
|
||||
"ct": 1332115200,
|
||||
"c": "5.27943"
|
||||
},
|
||||
{
|
||||
"ct": 1332720000,
|
||||
"c": "4.55001"
|
||||
},
|
||||
{
|
||||
"ct": 1333324800,
|
||||
"c": "4.81922"
|
||||
},
|
||||
{
|
||||
"ct": 1333929600,
|
||||
"c": "4.79253"
|
||||
},
|
||||
{
|
||||
"ct": 1334534400,
|
||||
"c": "4.96892"
|
||||
},
|
||||
{
|
||||
"ct": 1335139200,
|
||||
"c": "5.20352"
|
||||
},
|
||||
{
|
||||
"ct": 1335744000,
|
||||
"c": "4.90441"
|
||||
},
|
||||
{
|
||||
"ct": 1336348800,
|
||||
"c": "5.04991"
|
||||
},
|
||||
{
|
||||
"ct": 1336953600,
|
||||
"c": "4.92996"
|
||||
},
|
||||
{
|
||||
"ct": 1337558400,
|
||||
"c": "5.09002"
|
||||
},
|
||||
{
|
||||
"ct": 1338163200,
|
||||
"c": "5.13896"
|
||||
},
|
||||
{
|
||||
"ct": 1338768000,
|
||||
"c": "5.2051"
|
||||
},
|
||||
{
|
||||
"ct": 1339372800,
|
||||
"c": "5.46829"
|
||||
},
|
||||
{
|
||||
"ct": 1339977600,
|
||||
"c": "6.16382"
|
||||
},
|
||||
{
|
||||
"ct": 1340582400,
|
||||
"c": "6.35002"
|
||||
},
|
||||
{
|
||||
"ct": 1341187200,
|
||||
"c": "6.62898"
|
||||
},
|
||||
{
|
||||
"ct": 1341792000,
|
||||
"c": "6.79898"
|
||||
},
|
||||
{
|
||||
"ct": 1342396800,
|
||||
"c": "7.62101"
|
||||
},
|
||||
{
|
||||
"ct": 1343001600,
|
||||
"c": "8.4096"
|
||||
},
|
||||
{
|
||||
"ct": 1343606400,
|
||||
"c": "8.71027"
|
||||
},
|
||||
{
|
||||
"ct": 1344211200,
|
||||
"c": "10.86998"
|
||||
},
|
||||
{
|
||||
"ct": 1344816000,
|
||||
"c": "11.6239"
|
||||
},
|
||||
{
|
||||
"ct": 1345420800,
|
||||
"c": "7.98"
|
||||
},
|
||||
{
|
||||
"ct": 1346025600,
|
||||
"c": "10.61"
|
||||
},
|
||||
{
|
||||
"ct": 1346630400,
|
||||
"c": "10.2041"
|
||||
},
|
||||
{
|
||||
"ct": 1347235200,
|
||||
"c": "11.02"
|
||||
},
|
||||
{
|
||||
"ct": 1347840000,
|
||||
"c": "11.87"
|
||||
},
|
||||
{
|
||||
"ct": 1348444800,
|
||||
"c": "12.19331"
|
||||
},
|
||||
{
|
||||
"ct": 1349049600,
|
||||
"c": "12.4"
|
||||
},
|
||||
{
|
||||
"ct": 1349654400,
|
||||
"c": "11.8034"
|
||||
},
|
||||
{
|
||||
"ct": 1350259200,
|
||||
"c": "11.7389"
|
||||
},
|
||||
{
|
||||
"ct": 1350864000,
|
||||
"c": "11.63107"
|
||||
},
|
||||
{
|
||||
"ct": 1351468800,
|
||||
"c": "10.69998"
|
||||
},
|
||||
{
|
||||
"ct": 1352073600,
|
||||
"c": "10.80011"
|
||||
},
|
||||
{
|
||||
"ct": 1352678400,
|
||||
"c": "10.84692"
|
||||
},
|
||||
{
|
||||
"ct": 1353283200,
|
||||
"c": "11.65961"
|
||||
},
|
||||
{
|
||||
"ct": 1353888000,
|
||||
"c": "12.4821"
|
||||
},
|
||||
{
|
||||
"ct": 1354492800,
|
||||
"c": "12.50003"
|
||||
},
|
||||
{
|
||||
"ct": 1355097600,
|
||||
"c": "13.388"
|
||||
},
|
||||
{
|
||||
"ct": 1355702400,
|
||||
"c": "13.30002"
|
||||
},
|
||||
{
|
||||
"ct": 1356307200,
|
||||
"c": "13.31202"
|
||||
},
|
||||
{
|
||||
"ct": 1356912000,
|
||||
"c": "13.45001"
|
||||
},
|
||||
{
|
||||
"ct": 1357516800,
|
||||
"c": "13.5199"
|
||||
},
|
||||
{
|
||||
"ct": 1358121600,
|
||||
"c": "14.11601"
|
||||
},
|
||||
{
|
||||
"ct": 1358726400,
|
||||
"c": "15.7"
|
||||
},
|
||||
{
|
||||
"ct": 1359331200,
|
||||
"c": "17.95"
|
||||
},
|
||||
{
|
||||
"ct": 1359936000,
|
||||
"c": "20.59"
|
||||
},
|
||||
{
|
||||
"ct": 1360540800,
|
||||
"c": "23.96975"
|
||||
},
|
||||
{
|
||||
"ct": 1361145600,
|
||||
"c": "26.8146"
|
||||
},
|
||||
{
|
||||
"ct": 1361750400,
|
||||
"c": "29.88999"
|
||||
},
|
||||
{
|
||||
"ct": 1362355200,
|
||||
"c": "34.49999"
|
||||
},
|
||||
{
|
||||
"ct": 1362960000,
|
||||
"c": "46"
|
||||
},
|
||||
{
|
||||
"ct": 1363564800,
|
||||
"c": "47.4"
|
||||
},
|
||||
{
|
||||
"ct": 1364169600,
|
||||
"c": "71.93"
|
||||
},
|
||||
{
|
||||
"ct": 1364774400,
|
||||
"c": "93.03001"
|
||||
},
|
||||
{
|
||||
"ct": 1365379200,
|
||||
"c": "162.30102"
|
||||
},
|
||||
{
|
||||
"ct": 1365984000,
|
||||
"c": "89.99999"
|
||||
},
|
||||
{
|
||||
"ct": 1366588800,
|
||||
"c": "119.2"
|
||||
},
|
||||
{
|
||||
"ct": 1367193600,
|
||||
"c": "134.44444"
|
||||
},
|
||||
{
|
||||
"ct": 1367798400,
|
||||
"c": "115.98"
|
||||
},
|
||||
{
|
||||
"ct": 1368403200,
|
||||
"c": "114.82002"
|
||||
},
|
||||
{
|
||||
"ct": 1369008000,
|
||||
"c": "122.49999"
|
||||
},
|
||||
{
|
||||
"ct": 1369612800,
|
||||
"c": "133.5"
|
||||
},
|
||||
{
|
||||
"ct": 1370217600,
|
||||
"c": "122.5"
|
||||
},
|
||||
{
|
||||
"ct": 1370822400,
|
||||
"c": "100.43743"
|
||||
},
|
||||
{
|
||||
"ct": 1371427200,
|
||||
"c": "99.9"
|
||||
},
|
||||
{
|
||||
"ct": 1372032000,
|
||||
"c": "107.90001"
|
||||
},
|
||||
{
|
||||
"ct": 1372636800,
|
||||
"c": "97.51"
|
||||
},
|
||||
{
|
||||
"ct": 1373241600,
|
||||
"c": "76.5"
|
||||
},
|
||||
{
|
||||
"ct": 1373846400,
|
||||
"c": "94.41986"
|
||||
},
|
||||
{
|
||||
"ct": 1374451200,
|
||||
"c": "91.998"
|
||||
},
|
||||
{
|
||||
"ct": 1375056000,
|
||||
"c": "98.78008"
|
||||
},
|
||||
{
|
||||
"ct": 1375660800,
|
||||
"c": "105.12"
|
||||
},
|
||||
{
|
||||
"ct": 1376265600,
|
||||
"c": "105"
|
||||
},
|
||||
{
|
||||
"ct": 1376870400,
|
||||
"c": "113.38"
|
||||
},
|
||||
{
|
||||
"ct": 1377475200,
|
||||
"c": "122.11102"
|
||||
},
|
||||
{
|
||||
"ct": 1378080000,
|
||||
"c": "146.01003"
|
||||
},
|
||||
{
|
||||
"ct": 1378684800,
|
||||
"c": "126.31501"
|
||||
},
|
||||
{
|
||||
"ct": 1379289600,
|
||||
"c": "138.3002"
|
||||
},
|
||||
{
|
||||
"ct": 1379894400,
|
||||
"c": "134.00001"
|
||||
},
|
||||
{
|
||||
"ct": 1380499200,
|
||||
"c": "143.88402"
|
||||
},
|
||||
{
|
||||
"ct": 1381104000,
|
||||
"c": "137.8"
|
||||
},
|
||||
{
|
||||
"ct": 1381708800,
|
||||
"c": "147.53"
|
||||
},
|
||||
{
|
||||
"ct": 1382313600,
|
||||
"c": "186.1"
|
||||
},
|
||||
{
|
||||
"ct": 1382918400,
|
||||
"c": "207.0001"
|
||||
},
|
||||
{
|
||||
"ct": 1383523200,
|
||||
"c": "224.01001"
|
||||
},
|
||||
{
|
||||
"ct": 1384128000,
|
||||
"c": "336.33101"
|
||||
},
|
||||
{
|
||||
"ct": 1384732800,
|
||||
"c": "528"
|
||||
},
|
||||
{
|
||||
"ct": 1385337600,
|
||||
"c": "795"
|
||||
},
|
||||
{
|
||||
"ct": 1385942400,
|
||||
"c": "1004.42392"
|
||||
},
|
||||
{
|
||||
"ct": 1386547200,
|
||||
"c": "804.5"
|
||||
},
|
||||
{
|
||||
"ct": 1387152000,
|
||||
"c": "919.985"
|
||||
},
|
||||
{
|
||||
"ct": 1387756800,
|
||||
"c": "639.48"
|
||||
},
|
||||
{
|
||||
"ct": 1388361600,
|
||||
"c": "786.98"
|
||||
},
|
||||
{
|
||||
"ct": 1388966400,
|
||||
"c": "1015"
|
||||
},
|
||||
{
|
||||
"ct": 1389571200,
|
||||
"c": "940"
|
||||
},
|
||||
{
|
||||
"ct": 1390176000,
|
||||
"c": "954.995"
|
||||
},
|
||||
{
|
||||
"ct": 1390780800,
|
||||
"c": "1007.98999"
|
||||
},
|
||||
{
|
||||
"ct": 1391385600,
|
||||
"c": "954"
|
||||
},
|
||||
{
|
||||
"ct": 1391990400,
|
||||
"c": "659.49776"
|
||||
},
|
||||
{
|
||||
"ct": 1392595200,
|
||||
"c": "299.702"
|
||||
},
|
||||
{
|
||||
"ct": 1393200000,
|
||||
"c": "310.00001"
|
||||
},
|
||||
{
|
||||
"ct": 1393804800,
|
||||
"c": "135"
|
||||
}
|
||||
]
|
||||
264
backend/src/tasks/price-updater.ts
Normal file
264
backend/src/tasks/price-updater.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
import * as fs from 'fs';
|
||||
import { Common } from '../api/common';
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import PricesRepository from '../repositories/PricesRepository';
|
||||
import BitfinexApi from './price-feeds/bitfinex-api';
|
||||
import BitflyerApi from './price-feeds/bitflyer-api';
|
||||
import CoinbaseApi from './price-feeds/coinbase-api';
|
||||
import FtxApi from './price-feeds/ftx-api';
|
||||
import GeminiApi from './price-feeds/gemini-api';
|
||||
import KrakenApi from './price-feeds/kraken-api';
|
||||
|
||||
export interface PriceFeed {
|
||||
name: string;
|
||||
url: string;
|
||||
urlHist: string;
|
||||
currencies: string[];
|
||||
|
||||
$fetchPrice(currency): Promise<number>;
|
||||
$fetchRecentPrice(currencies: string[], type: string): Promise<PriceHistory>;
|
||||
}
|
||||
|
||||
export interface PriceHistory {
|
||||
[timestamp: number]: Prices;
|
||||
}
|
||||
|
||||
export interface Prices {
|
||||
USD: number;
|
||||
EUR: number;
|
||||
GBP: number;
|
||||
CAD: number;
|
||||
CHF: number;
|
||||
AUD: number;
|
||||
JPY: number;
|
||||
}
|
||||
|
||||
class PriceUpdater {
|
||||
public historyInserted = false;
|
||||
lastRun = 0;
|
||||
lastHistoricalRun = 0;
|
||||
running = false;
|
||||
feeds: PriceFeed[] = [];
|
||||
currencies: string[] = ['USD', 'EUR', 'GBP', 'CAD', 'CHF', 'AUD', 'JPY'];
|
||||
latestPrices: Prices;
|
||||
|
||||
constructor() {
|
||||
this.latestPrices = this.getEmptyPricesObj();
|
||||
|
||||
this.feeds.push(new BitflyerApi()); // Does not have historical endpoint
|
||||
this.feeds.push(new FtxApi());
|
||||
this.feeds.push(new KrakenApi());
|
||||
this.feeds.push(new CoinbaseApi());
|
||||
this.feeds.push(new BitfinexApi());
|
||||
this.feeds.push(new GeminiApi());
|
||||
}
|
||||
|
||||
public getEmptyPricesObj(): Prices {
|
||||
return {
|
||||
USD: -1,
|
||||
EUR: -1,
|
||||
GBP: -1,
|
||||
CAD: -1,
|
||||
CHF: -1,
|
||||
AUD: -1,
|
||||
JPY: -1,
|
||||
};
|
||||
}
|
||||
|
||||
public async $run(): Promise<void> {
|
||||
if (this.running === true) {
|
||||
return;
|
||||
}
|
||||
this.running = true;
|
||||
|
||||
if ((Math.round(new Date().getTime() / 1000) - this.lastHistoricalRun) > 3600 * 24) {
|
||||
// Once a day, look for missing prices (could happen due to network connectivity issues)
|
||||
this.historyInserted = false;
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.historyInserted === false && config.DATABASE.ENABLED === true) {
|
||||
await this.$insertHistoricalPrices();
|
||||
} else {
|
||||
await this.$updatePrice();
|
||||
}
|
||||
} catch (e) {
|
||||
logger.err(`Cannot save BTC prices in db. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
|
||||
this.running = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch last BTC price from exchanges, average them, and save it in the database once every hour
|
||||
*/
|
||||
private async $updatePrice(): Promise<void> {
|
||||
if (this.lastRun === 0 && config.DATABASE.ENABLED === true) {
|
||||
this.lastRun = await PricesRepository.$getLatestPriceTime();
|
||||
}
|
||||
|
||||
if ((Math.round(new Date().getTime() / 1000) - this.lastRun) < 3600) {
|
||||
// Refresh only once every hour
|
||||
return;
|
||||
}
|
||||
|
||||
const previousRun = this.lastRun;
|
||||
this.lastRun = new Date().getTime() / 1000;
|
||||
|
||||
for (const currency of this.currencies) {
|
||||
let prices: number[] = [];
|
||||
|
||||
for (const feed of this.feeds) {
|
||||
// Fetch prices from API which supports `currency`
|
||||
if (feed.currencies.includes(currency)) {
|
||||
try {
|
||||
const price = await feed.$fetchPrice(currency);
|
||||
if (price > 0) {
|
||||
prices.push(price);
|
||||
}
|
||||
logger.debug(`${feed.name} BTC/${currency} price: ${price}`);
|
||||
} catch (e) {
|
||||
logger.debug(`Could not fetch BTC/${currency} price at ${feed.name}. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (prices.length === 1) {
|
||||
logger.debug(`Only ${prices.length} feed available for BTC/${currency} price`);
|
||||
}
|
||||
|
||||
// Compute average price, non weighted
|
||||
prices = prices.filter(price => price > 0);
|
||||
this.latestPrices[currency] = Math.round((prices.reduce((partialSum, a) => partialSum + a, 0)) / prices.length);
|
||||
}
|
||||
|
||||
logger.info(`Latest BTC fiat averaged price: ${JSON.stringify(this.latestPrices)}`);
|
||||
|
||||
if (config.DATABASE.ENABLED === true) {
|
||||
// Save everything in db
|
||||
try {
|
||||
const p = 60 * 60 * 1000; // milliseconds in an hour
|
||||
const nowRounded = new Date(Math.round(new Date().getTime() / p) * p); // https://stackoverflow.com/a/28037042
|
||||
await PricesRepository.$savePrices(nowRounded.getTime() / 1000, this.latestPrices);
|
||||
} catch (e) {
|
||||
this.lastRun = previousRun + 5 * 60;
|
||||
logger.err(`Cannot save latest prices into db. Trying again in 5 minutes. Reason: ${(e instanceof Error ? e.message : e)}`);
|
||||
}
|
||||
}
|
||||
|
||||
this.lastRun = new Date().getTime() / 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Called once by the database migration to initialize historical prices data (weekly)
|
||||
* We use MtGox weekly price from July 19, 2010 to September 30, 2013
|
||||
* We use Kraken weekly price from October 3, 2013 up to last month
|
||||
* We use Kraken hourly price for the past month
|
||||
*/
|
||||
private async $insertHistoricalPrices(): Promise<void> {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
// Insert MtGox weekly prices
|
||||
const pricesJson: any[] = JSON.parse(fs.readFileSync('./src/tasks/price-feeds/mtgox-weekly.json').toString());
|
||||
const prices = this.getEmptyPricesObj();
|
||||
let insertedCount: number = 0;
|
||||
for (const price of pricesJson) {
|
||||
if (existingPriceTimes.includes(price['ct'])) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// From 1380758400 we will use Kraken price as it follows closely MtGox, but was not affected as much
|
||||
// by the MtGox exchange collapse a few months later
|
||||
if (price['ct'] > 1380758400) {
|
||||
break;
|
||||
}
|
||||
prices.USD = price['c'];
|
||||
await PricesRepository.$savePrices(price['ct'], prices);
|
||||
++insertedCount;
|
||||
}
|
||||
if (insertedCount > 0) {
|
||||
logger.notice(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
} else {
|
||||
logger.debug(`Inserted ${insertedCount} MtGox USD weekly price history into db`);
|
||||
}
|
||||
|
||||
// Insert Kraken weekly prices
|
||||
await new KrakenApi().$insertHistoricalPrice();
|
||||
|
||||
// Insert missing recent hourly prices
|
||||
await this.$insertMissingRecentPrices('day');
|
||||
await this.$insertMissingRecentPrices('hour');
|
||||
|
||||
this.historyInserted = true;
|
||||
this.lastHistoricalRun = new Date().getTime();
|
||||
}
|
||||
|
||||
/**
|
||||
* Find missing hourly prices and insert them in the database
|
||||
* It has a limited backward range and it depends on which API are available
|
||||
*/
|
||||
private async $insertMissingRecentPrices(type: 'hour' | 'day'): Promise<void> {
|
||||
const existingPriceTimes = await PricesRepository.$getPricesTimes();
|
||||
|
||||
logger.info(`Fetching ${type === 'day' ? 'dai' : 'hour'}ly price history from exchanges and saving missing ones into the database, this may take a while`);
|
||||
|
||||
const historicalPrices: PriceHistory[] = [];
|
||||
|
||||
// Fetch all historical hourly prices
|
||||
for (const feed of this.feeds) {
|
||||
try {
|
||||
historicalPrices.push(await feed.$fetchRecentPrice(this.currencies, type));
|
||||
} catch (e) {
|
||||
logger.err(`Cannot fetch hourly historical price from ${feed.name}. Ignoring this feed. Reason: ${e instanceof Error ? e.message : e}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Group them by timestamp and currency, for example
|
||||
// grouped[123456789]['USD'] = [1, 2, 3, 4];
|
||||
const grouped: Object = {};
|
||||
for (const historicalEntry of historicalPrices) {
|
||||
for (const time in historicalEntry) {
|
||||
if (existingPriceTimes.includes(parseInt(time, 10))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (grouped[time] === undefined) {
|
||||
grouped[time] = {
|
||||
USD: [], EUR: [], GBP: [], CAD: [], CHF: [], AUD: [], JPY: []
|
||||
};
|
||||
}
|
||||
|
||||
for (const currency of this.currencies) {
|
||||
const price = historicalEntry[time][currency];
|
||||
if (price > 0) {
|
||||
grouped[time][currency].push(parseInt(price, 10));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Average prices and insert everything into the db
|
||||
let totalInserted = 0;
|
||||
for (const time in grouped) {
|
||||
const prices: Prices = this.getEmptyPricesObj();
|
||||
for (const currency in grouped[time]) {
|
||||
if (grouped[time][currency].length === 0) {
|
||||
continue;
|
||||
}
|
||||
prices[currency] = Math.round((grouped[time][currency].reduce(
|
||||
(partialSum, a) => partialSum + a, 0)
|
||||
) / grouped[time][currency].length);
|
||||
}
|
||||
await PricesRepository.$savePrices(parseInt(time, 10), prices);
|
||||
++totalInserted;
|
||||
}
|
||||
|
||||
if (totalInserted > 0) {
|
||||
logger.notice(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
} else {
|
||||
logger.debug(`Inserted ${totalInserted} ${type === 'day' ? 'dai' : 'hour'}ly historical prices into the db`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default new PriceUpdater();
|
||||
63
backend/src/utils/axios-query.ts
Normal file
63
backend/src/utils/axios-query.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import axios, { AxiosResponse } from 'axios';
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent';
|
||||
import backendInfo from '../api/backend-info';
|
||||
import config from '../config';
|
||||
import logger from '../logger';
|
||||
import * as https from 'https';
|
||||
|
||||
export async function query(path): Promise<object | undefined> {
|
||||
type axiosOptions = {
|
||||
headers: {
|
||||
'User-Agent': string
|
||||
};
|
||||
timeout: number;
|
||||
httpsAgent?: https.Agent;
|
||||
};
|
||||
const setDelay = (secs: number = 1): Promise<void> => new Promise(resolve => setTimeout(() => resolve(), secs * 1000));
|
||||
const axiosOptions: axiosOptions = {
|
||||
headers: {
|
||||
'User-Agent': (config.MEMPOOL.USER_AGENT === 'mempool') ? `mempool/v${backendInfo.getBackendInfo().version}` : `${config.MEMPOOL.USER_AGENT}`
|
||||
},
|
||||
timeout: config.SOCKS5PROXY.ENABLED ? 30000 : 10000
|
||||
};
|
||||
let retry = 0;
|
||||
|
||||
while (retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
|
||||
try {
|
||||
if (config.SOCKS5PROXY.ENABLED) {
|
||||
const socksOptions: any = {
|
||||
agentOptions: {
|
||||
keepAlive: true,
|
||||
},
|
||||
hostname: config.SOCKS5PROXY.HOST,
|
||||
port: config.SOCKS5PROXY.PORT
|
||||
};
|
||||
|
||||
if (config.SOCKS5PROXY.USERNAME && config.SOCKS5PROXY.PASSWORD) {
|
||||
socksOptions.username = config.SOCKS5PROXY.USERNAME;
|
||||
socksOptions.password = config.SOCKS5PROXY.PASSWORD;
|
||||
} else {
|
||||
// Retry with different tor circuits https://stackoverflow.com/a/64960234
|
||||
socksOptions.username = `circuit${retry}`;
|
||||
}
|
||||
|
||||
axiosOptions.httpsAgent = new SocksProxyAgent(socksOptions);
|
||||
}
|
||||
|
||||
const data: AxiosResponse = await axios.get(path, axiosOptions);
|
||||
if (data.statusText === 'error' || !data.data) {
|
||||
throw new Error(`Could not fetch data from ${path}, Error: ${data.status}`);
|
||||
}
|
||||
return data.data;
|
||||
} catch (e) {
|
||||
logger.warn(`Could not connect to ${path} (Attempt ${retry + 1}/${config.MEMPOOL.EXTERNAL_MAX_RETRY}). Reason: ` + (e instanceof Error ? e.message : e));
|
||||
retry++;
|
||||
}
|
||||
if (retry < config.MEMPOOL.EXTERNAL_MAX_RETRY) {
|
||||
await setDelay(config.MEMPOOL.EXTERNAL_RETRY_INTERVAL);
|
||||
}
|
||||
}
|
||||
|
||||
logger.err(`Could not connect to ${path}. All ${config.MEMPOOL.EXTERNAL_MAX_RETRY} attempts failed`);
|
||||
return undefined;
|
||||
}
|
||||
@@ -3,14 +3,14 @@ import { BlockExtended } from '../mempool.interfaces';
|
||||
export function prepareBlock(block: any): BlockExtended {
|
||||
return <BlockExtended>{
|
||||
id: block.id ?? block.hash, // hash for indexed block
|
||||
timestamp: block.timestamp ?? block.blockTimestamp, // blockTimestamp for indexed block
|
||||
timestamp: block.timestamp ?? block.time ?? block.blockTimestamp, // blockTimestamp for indexed block
|
||||
height: block.height,
|
||||
version: block.version,
|
||||
bits: block.bits,
|
||||
bits: (typeof block.bits === 'string' ? parseInt(block.bits, 16): block.bits),
|
||||
nonce: block.nonce,
|
||||
difficulty: block.difficulty,
|
||||
merkle_root: block.merkle_root,
|
||||
tx_count: block.tx_count,
|
||||
merkle_root: block.merkle_root ?? block.merkleroot,
|
||||
tx_count: block.tx_count ?? block.nTx,
|
||||
size: block.size,
|
||||
weight: block.weight,
|
||||
previousblockhash: block.previousblockhash,
|
||||
@@ -27,6 +27,7 @@ export function prepareBlock(block: any): BlockExtended {
|
||||
name: block.pool_name,
|
||||
slug: block.pool_slug,
|
||||
} : undefined),
|
||||
usd: block?.extras?.usd ?? block.usd ?? null,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"types": ["node"],
|
||||
"module": "commonjs",
|
||||
"target": "esnext",
|
||||
"lib": ["es2019", "dom"],
|
||||
@@ -11,7 +12,8 @@
|
||||
"typeRoots": [
|
||||
"node_modules/@types"
|
||||
],
|
||||
"allowSyntheticDefaultImports": true
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*.ts"
|
||||
|
||||
@@ -1,137 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"arrow-return-shorthand": true,
|
||||
"callable-types": true,
|
||||
"class-name": true,
|
||||
"comment-format": [
|
||||
true,
|
||||
"check-space"
|
||||
],
|
||||
"curly": true,
|
||||
"deprecation": {
|
||||
"severity": "warn"
|
||||
},
|
||||
"eofline": true,
|
||||
"forin": false,
|
||||
"import-blacklist": [
|
||||
true,
|
||||
"rxjs",
|
||||
"rxjs/Rx"
|
||||
],
|
||||
"import-spacing": true,
|
||||
"indent": [
|
||||
true,
|
||||
"spaces"
|
||||
],
|
||||
"interface-over-type-literal": true,
|
||||
"label-position": true,
|
||||
"max-line-length": [
|
||||
true,
|
||||
140
|
||||
],
|
||||
"member-access": false,
|
||||
"member-ordering": [
|
||||
true,
|
||||
{
|
||||
"order": [
|
||||
"static-field",
|
||||
"instance-field",
|
||||
"static-method",
|
||||
"instance-method"
|
||||
]
|
||||
}
|
||||
],
|
||||
"no-arg": true,
|
||||
"no-bitwise": true,
|
||||
"no-console": [
|
||||
true,
|
||||
"debug",
|
||||
"info",
|
||||
"time",
|
||||
"timeEnd",
|
||||
"trace"
|
||||
],
|
||||
"no-construct": true,
|
||||
"no-debugger": true,
|
||||
"no-duplicate-super": true,
|
||||
"no-empty": false,
|
||||
"no-empty-interface": true,
|
||||
"no-eval": true,
|
||||
"no-inferrable-types": false,
|
||||
"no-misused-new": true,
|
||||
"no-non-null-assertion": true,
|
||||
"no-shadowed-variable": true,
|
||||
"no-string-literal": false,
|
||||
"no-string-throw": true,
|
||||
"no-switch-case-fall-through": true,
|
||||
"no-trailing-whitespace": true,
|
||||
"no-unnecessary-initializer": true,
|
||||
"no-unused-expression": true,
|
||||
"no-use-before-declare": true,
|
||||
"no-var-keyword": true,
|
||||
"object-literal-sort-keys": false,
|
||||
"one-line": [
|
||||
true,
|
||||
"check-open-brace",
|
||||
"check-catch",
|
||||
"check-else",
|
||||
"check-whitespace"
|
||||
],
|
||||
"prefer-const": true,
|
||||
"quotemark": [
|
||||
true,
|
||||
"single"
|
||||
],
|
||||
"radix": true,
|
||||
"semicolon": [
|
||||
true,
|
||||
"always"
|
||||
],
|
||||
"triple-equals": [
|
||||
true,
|
||||
"allow-null-check"
|
||||
],
|
||||
"typedef-whitespace": [
|
||||
true,
|
||||
{
|
||||
"call-signature": "nospace",
|
||||
"index-signature": "nospace",
|
||||
"parameter": "nospace",
|
||||
"property-declaration": "nospace",
|
||||
"variable-declaration": "nospace"
|
||||
}
|
||||
],
|
||||
"unified-signatures": true,
|
||||
"variable-name": false,
|
||||
"whitespace": [
|
||||
true,
|
||||
"check-branch",
|
||||
"check-decl",
|
||||
"check-operator",
|
||||
"check-separator",
|
||||
"check-type"
|
||||
],
|
||||
"directive-selector": [
|
||||
true,
|
||||
"attribute",
|
||||
"app",
|
||||
"camelCase"
|
||||
],
|
||||
"component-selector": [
|
||||
true,
|
||||
"element",
|
||||
"app",
|
||||
"kebab-case"
|
||||
],
|
||||
"no-output-on-prefix": true,
|
||||
"use-input-property-decorator": true,
|
||||
"use-output-property-decorator": true,
|
||||
"use-host-property-decorator": true,
|
||||
"no-input-rename": true,
|
||||
"no-output-rename": true,
|
||||
"use-life-cycle-interface": true,
|
||||
"use-pipe-transform-interface": true,
|
||||
"component-class-suffix": true,
|
||||
"directive-class-suffix": true
|
||||
}
|
||||
}
|
||||
3
contributors/erikarvstedt.txt
Normal file
3
contributors/erikarvstedt.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 6, 2022.
|
||||
|
||||
Signed: erikarvstedt
|
||||
3
contributors/oleonardolima.txt
Normal file
3
contributors/oleonardolima.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
I hereby accept the terms of the Contributor License Agreement in the CONTRIBUTING.md file of the mempool/mempool git repository as of July 25, 2022.
|
||||
|
||||
Signed: oleonardolima
|
||||
@@ -4,6 +4,8 @@ This directory contains the Dockerfiles used to build and release the official i
|
||||
|
||||
If you are looking to use these Docker images to deploy your own instance of Mempool, note that they only containerize Mempool's frontend and backend. You will still need to deploy and configure Bitcoin Core and an Electrum Server separately, along with any other utilities specific to your use case (e.g., a reverse proxy, etc). Such configuration is mostly beyond the scope of the Mempool project, so please only proceed if you know what you're doing.
|
||||
|
||||
See a video guide of this installation method by k3tan [on BitcoinTV.com](https://bitcointv.com/w/8fpAx6rf5CQ16mMhospwjg).
|
||||
|
||||
Jump to a section in this doc:
|
||||
- [Configure with Bitcoin Core Only](#configure-with-bitcoin-core-only)
|
||||
- [Configure with Bitcoin Core + Electrum Server](#configure-with-bitcoin-core--electrum-server)
|
||||
@@ -100,7 +102,9 @@ Below we list all settings from `mempool-config.json` and the corresponding over
|
||||
"PRICE_FEED_UPDATE_INTERVAL": 600,
|
||||
"USE_SECOND_NODE_FOR_MINFEE": false,
|
||||
"EXTERNAL_ASSETS": ["https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json"],
|
||||
"STDOUT_LOG_MIN_PRIORITY": "info"
|
||||
"STDOUT_LOG_MIN_PRIORITY": "info",
|
||||
"POOLS_JSON_URL": "https://raw.githubusercontent.com/mempool/mining-pools/master/pools.json",
|
||||
"POOLS_JSON_TREE_URL": "https://api.github.com/repos/mempool/mining-pools/git/trees/master"
|
||||
},
|
||||
```
|
||||
|
||||
@@ -124,6 +128,8 @@ Corresponding `docker-compose.yml` overrides:
|
||||
MEMPOOL_USE_SECOND_NODE_FOR_MINFEE: ""
|
||||
MEMPOOL_EXTERNAL_ASSETS: ""
|
||||
MEMPOOL_STDOUT_LOG_MIN_PRIORITY: ""
|
||||
MEMPOOL_POOLS_JSON_URL: ""
|
||||
MEMPOOL_POOLS_JSON_TREE_URL: ""
|
||||
...
|
||||
```
|
||||
|
||||
@@ -233,7 +239,7 @@ Corresponding `docker-compose.yml` overrides:
|
||||
DATABASE_HOST: ""
|
||||
DATABASE_PORT: ""
|
||||
DATABASE_DATABASE: ""
|
||||
DATABASE_USERAME: ""
|
||||
DATABASE_USERNAME: ""
|
||||
DATABASE_PASSWORD: ""
|
||||
...
|
||||
```
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:16.15.0-buster-slim AS builder
|
||||
FROM node:16.16.0-buster-slim AS builder
|
||||
|
||||
ARG commitHash
|
||||
ENV DOCKER_COMMIT_HASH=${commitHash}
|
||||
@@ -8,10 +8,10 @@ COPY . .
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential python3 pkg-config
|
||||
RUN npm install
|
||||
RUN npm install --omit=dev --omit=optional
|
||||
RUN npm run build
|
||||
|
||||
FROM node:16.15.0-buster-slim
|
||||
FROM node:16.16.0-buster-slim
|
||||
|
||||
WORKDIR /backend
|
||||
|
||||
|
||||
@@ -19,7 +19,9 @@
|
||||
"EXTERNAL_RETRY_INTERVAL": __MEMPOOL_EXTERNAL_RETRY_INTERVAL__,
|
||||
"USER_AGENT": "__MEMPOOL_USER_AGENT__",
|
||||
"STDOUT_LOG_MIN_PRIORITY": "__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__",
|
||||
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__
|
||||
"INDEXING_BLOCKS_AMOUNT": __MEMPOOL_INDEXING_BLOCKS_AMOUNT__,
|
||||
"BLOCKS_SUMMARIES_INDEXING": __MEMPOOL_BLOCKS_SUMMARIES_INDEXING__,
|
||||
"AUTOMATIC_BLOCK_REINDEXING": __MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__
|
||||
},
|
||||
"CORE_RPC": {
|
||||
"HOST": "__CORE_RPC_HOST__",
|
||||
|
||||
@@ -14,6 +14,7 @@ __MEMPOOL_BLOCK_WEIGHT_UNITS__=${MEMPOOL_BLOCK_WEIGHT_UNITS:=4000000}
|
||||
__MEMPOOL_INITIAL_BLOCKS_AMOUNT__=${MEMPOOL_INITIAL_BLOCKS_AMOUNT:=8}
|
||||
__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__=${MEMPOOL_MEMPOOL_BLOCKS_AMOUNT:=8}
|
||||
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=11000}
|
||||
__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__=${MEMPOOL_BLOCKS_SUMMARIES_INDEXING:=false}
|
||||
__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__=${MEMPOOL_PRICE_FEED_UPDATE_INTERVAL:=600}
|
||||
__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__=${MEMPOOL_USE_SECOND_NODE_FOR_MINFEE:=false}
|
||||
__MEMPOOL_EXTERNAL_ASSETS__=${MEMPOOL_EXTERNAL_ASSETS:=[]}
|
||||
@@ -21,6 +22,10 @@ __MEMPOOL_EXTERNAL_MAX_RETRY__=${MEMPOOL_EXTERNAL_MAX_RETRY:=1}
|
||||
__MEMPOOL_EXTERNAL_RETRY_INTERVAL__=${MEMPOOL_EXTERNAL_RETRY_INTERVAL:=0}
|
||||
__MEMPOOL_USER_AGENT__=${MEMPOOL_USER_AGENT:=mempool}
|
||||
__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__=${MEMPOOL_STDOUT_LOG_MIN_PRIORITY:=info}
|
||||
__MEMPOOL_INDEXING_BLOCKS_AMOUNT__=${MEMPOOL_INDEXING_BLOCKS_AMOUNT:=false}
|
||||
__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__=${MEMPOOL_AUTOMATIC_BLOCK_REINDEXING:=false}
|
||||
__MEMPOOL_POOLS_JSON_URL__=${MEMPOOL_POOLS_JSON_URL:=false}
|
||||
__MEMPOOL_POOLS_JSON_TREE_URL__=${MEMPOOL_POOLS_JSON_TREE_URL:=false}
|
||||
|
||||
# CORE_RPC
|
||||
__CORE_RPC_HOST__=${CORE_RPC_HOST:=127.0.0.1}
|
||||
@@ -101,10 +106,18 @@ sed -i "s/__MEMPOOL_BLOCK_WEIGHT_UNITS__/${__MEMPOOL_BLOCK_WEIGHT_UNITS__}/g" me
|
||||
sed -i "s/__MEMPOOL_INITIAL_BLOCKS_AMOUNT__/${__MEMPOOL_INITIAL_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__/${__MEMPOOL_MEMPOOL_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__/${__MEMPOOL_BLOCKS_SUMMARIES_INDEXING__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__/${__MEMPOOL_PRICE_FEED_UPDATE_INTERVAL__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__/${__MEMPOOL_USE_SECOND_NODE_FOR_MINFEE__}/g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_EXTERNAL_ASSETS__!${__MEMPOOL_EXTERNAL_ASSETS__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_EXTERNAL_MAX_RETRY__!${__MEMPOOL_EXTERNAL_MAX_RETRY__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_EXTERNAL_RETRY_INTERVAL__!${__MEMPOOL_EXTERNAL_RETRY_INTERVAL__}!g" mempool-config.json
|
||||
sed -i "s!__MEMPOOL_USER_AGENT__!${__MEMPOOL_USER_AGENT__}!g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__/${__MEMPOOL_STDOUT_LOG_MIN_PRIORITY__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_INDEXING_BLOCKS_AMOUNT__/${__MEMPOOL_INDEXING_BLOCKS_AMOUNT__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__/${__MEMPOOL_AUTOMATIC_BLOCK_REINDEXING__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_POOLS_JSON_URL__/${__MEMPOOL_POOLS_JSON_URL__}/g" mempool-config.json
|
||||
sed -i "s/__MEMPOOL_POOLS_JSON_TREE_URL__/${__MEMPOOL_POOLS_JSON_TREE_URL__}/g" mempool-config.json
|
||||
|
||||
sed -i "s/__CORE_RPC_HOST__/${__CORE_RPC_HOST__}/g" mempool-config.json
|
||||
sed -i "s/__CORE_RPC_PORT__/${__CORE_RPC_PORT__}/g" mempool-config.json
|
||||
@@ -144,6 +157,7 @@ sed -i "s/__BISQ_ENABLED__/${__BISQ_ENABLED__}/g" mempool-config.json
|
||||
sed -i "s!__BISQ_DATA_PATH__!${__BISQ_DATA_PATH__}!g" mempool-config.json
|
||||
|
||||
sed -i "s/__SOCKS5PROXY_ENABLED__/${__SOCKS5PROXY_ENABLED__}/g" mempool-config.json
|
||||
sed -i "s/__SOCKS5PROXY_USE_ONION__/${__SOCKS5PROXY_USE_ONION__}/g" mempool-config.json
|
||||
sed -i "s/__SOCKS5PROXY_HOST__/${__SOCKS5PROXY_HOST__}/g" mempool-config.json
|
||||
sed -i "s/__SOCKS5PROXY_PORT__/${__SOCKS5PROXY_PORT__}/g" mempool-config.json
|
||||
sed -i "s/__SOCKS5PROXY_USERNAME__/${__SOCKS5PROXY_USERNAME__}/g" mempool-config.json
|
||||
@@ -152,4 +166,11 @@ sed -i "s/__SOCKS5PROXY_PASSWORD__/${__SOCKS5PROXY_PASSWORD__}/g" mempool-config
|
||||
sed -i "s!__PRICE_DATA_SERVER_TOR_URL__!${__PRICE_DATA_SERVER_TOR_URL__}!g" mempool-config.json
|
||||
sed -i "s!__PRICE_DATA_SERVER_CLEARNET_URL__!${__PRICE_DATA_SERVER_CLEARNET_URL__}!g" mempool-config.json
|
||||
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_MEMPOOL_API__!${__EXTERNAL_DATA_SERVER_MEMPOOL_API__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__!${__EXTERNAL_DATA_SERVER_MEMPOOL_ONION__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_API__!${__EXTERNAL_DATA_SERVER_LIQUID_API__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_LIQUID_ONION__!${__EXTERNAL_DATA_SERVER_LIQUID_ONION__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_URL__!${__EXTERNAL_DATA_SERVER_BISQ_URL__}!g" mempool-config.json
|
||||
sed -i "s!__EXTERNAL_DATA_SERVER_BISQ_ONION__!${__EXTERNAL_DATA_SERVER_BISQ_ONION__}!g" mempool-config.json
|
||||
|
||||
node /backend/dist/index.js
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM node:16.15.0-buster-slim AS builder
|
||||
FROM node:16.16.0-buster-slim AS builder
|
||||
|
||||
ARG commitHash
|
||||
ENV DOCKER_COMMIT_HASH=${commitHash}
|
||||
@@ -8,7 +8,7 @@ WORKDIR /build
|
||||
COPY . .
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y build-essential rsync
|
||||
RUN npm i
|
||||
RUN npm install --omit=dev --omit=optional
|
||||
RUN npm run build
|
||||
|
||||
FROM nginx:1.17.8-alpine
|
||||
|
||||
@@ -8,6 +8,10 @@ indent_size = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.ts]
|
||||
quote_type = single
|
||||
|
||||
[*.md]
|
||||
max_line_length = off
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
|
||||
3
frontend/.eslintignore
Normal file
3
frontend/.eslintignore
Normal file
@@ -0,0 +1,3 @@
|
||||
node_modules
|
||||
dist
|
||||
frontend
|
||||
37
frontend/.eslintrc
Normal file
37
frontend/.eslintrc
Normal file
@@ -0,0 +1,37 @@
|
||||
{
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"plugins": [
|
||||
"@typescript-eslint"
|
||||
],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/ban-ts-comment": 1,
|
||||
"@typescript-eslint/ban-types": 1,
|
||||
"@typescript-eslint/no-empty-function": 1,
|
||||
"@typescript-eslint/no-explicit-any": 1,
|
||||
"@typescript-eslint/no-inferrable-types": 0,
|
||||
"@typescript-eslint/no-namespace": 1,
|
||||
"@typescript-eslint/no-this-alias": 1,
|
||||
"@typescript-eslint/no-var-requires": 1,
|
||||
"@typescript-eslint/explicit-function-return-type": 1,
|
||||
"no-case-declarations": 1,
|
||||
"no-console": 1,
|
||||
"no-constant-condition": 1,
|
||||
"no-dupe-else-if": 1,
|
||||
"no-empty": 1,
|
||||
"no-extra-boolean-cast": 1,
|
||||
"no-prototype-builtins": 1,
|
||||
"no-self-assign": 1,
|
||||
"no-useless-catch": 1,
|
||||
"no-var": 1,
|
||||
"prefer-const": 1,
|
||||
"prefer-rest-params": 1,
|
||||
"quotes": [1, "single", { "allowTemplateLiterals": true }],
|
||||
"semi": 1
|
||||
}
|
||||
}
|
||||
2
frontend/.prettierignore
Normal file
2
frontend/.prettierignore
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
package-lock.json
|
||||
6
frontend/.prettierrc
Normal file
6
frontend/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"endOfLine": "lf",
|
||||
"printWidth": 80,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
@@ -34,7 +34,7 @@ $ npm run config:defaults:bisq
|
||||
|
||||
### 3. Run the Frontend
|
||||
|
||||
_Make sure to use Node.js 16.15 and npm 7._
|
||||
_Make sure to use Node.js 16.10 and npm 7._
|
||||
|
||||
Install project dependencies and run the frontend server:
|
||||
|
||||
@@ -71,13 +71,13 @@ Set up the [Mempool backend](../backend/) first, if you haven't already.
|
||||
|
||||
### 1. Build the Frontend
|
||||
|
||||
_Node.js 16 and npm 7 are recommended._
|
||||
_Make sure to use Node.js 16.10 and npm 7._
|
||||
|
||||
Build the frontend:
|
||||
|
||||
```
|
||||
cd frontend
|
||||
npm install # add --prod for production
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
|
||||
@@ -248,23 +248,6 @@
|
||||
"browserTarget": "mempool:build"
|
||||
}
|
||||
},
|
||||
"test": {
|
||||
"builder": "@angular-devkit/build-angular:karma",
|
||||
"options": {
|
||||
"main": "src/test.ts",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
"tsConfig": "tsconfig.spec.json",
|
||||
"karmaConfig": "karma.conf.js",
|
||||
"assets": [
|
||||
"src/favicon.ico",
|
||||
"src/resources"
|
||||
],
|
||||
"styles": [
|
||||
"src/styles.scss"
|
||||
],
|
||||
"scripts": []
|
||||
}
|
||||
},
|
||||
"e2e": {
|
||||
"builder": "@cypress/schematic:cypress",
|
||||
"options": {
|
||||
|
||||
@@ -35,21 +35,23 @@ const getRectangle = ($el) => $el[0].getBoundingClientRect();
|
||||
describe('Mainnet', () => {
|
||||
beforeEach(() => {
|
||||
//cy.intercept('/sockjs-node/info*').as('socket');
|
||||
cy.intercept('/api/block-height/*').as('block-height');
|
||||
cy.intercept('/api/block/*').as('block');
|
||||
cy.intercept('/api/block/*/txs/0').as('block-txs');
|
||||
cy.intercept('/api/tx/*/outspends').as('tx-outspends');
|
||||
cy.intercept('/resources/pools.json').as('pools');
|
||||
// cy.intercept('/api/block-height/*').as('block-height');
|
||||
// cy.intercept('/api/v1/block/*').as('block');
|
||||
// cy.intercept('/api/block/*/txs/0').as('block-txs');
|
||||
// cy.intercept('/api/v1/block/*/summary').as('block-summary');
|
||||
// cy.intercept('/api/v1/outspends/*').as('outspends');
|
||||
// cy.intercept('/api/tx/*/outspends').as('tx-outspends');
|
||||
// cy.intercept('/resources/pools.json').as('pools');
|
||||
|
||||
// Search Auto Complete
|
||||
cy.intercept('/api/address-prefix/1wiz').as('search-1wiz');
|
||||
cy.intercept('/api/address-prefix/1wizS').as('search-1wizS');
|
||||
cy.intercept('/api/address-prefix/1wizSA').as('search-1wizSA');
|
||||
|
||||
Cypress.Commands.add('waitForBlockData', () => {
|
||||
cy.wait('@tx-outspends');
|
||||
cy.wait('@pools');
|
||||
});
|
||||
// Cypress.Commands.add('waitForBlockData', () => {
|
||||
// cy.wait('@tx-outspends');
|
||||
// cy.wait('@pools');
|
||||
// });
|
||||
});
|
||||
|
||||
if (baseModule === 'mempool') {
|
||||
@@ -121,20 +123,20 @@ describe('Mainnet', () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type('1wiz').then(() => {
|
||||
cy.wait('@search-1wiz');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 10);
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 10);
|
||||
});
|
||||
|
||||
cy.get('.search-box-container > .form-control').type('S').then(() => {
|
||||
cy.wait('@search-1wizS');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 5);
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 5);
|
||||
});
|
||||
|
||||
cy.get('.search-box-container > .form-control').type('A').then(() => {
|
||||
cy.wait('@search-1wizSA');
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1)
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1)
|
||||
});
|
||||
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/1wizSAYSbuyXbt9d8JV8ytm5acqq2TorC');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@@ -145,8 +147,8 @@ describe('Mainnet', () => {
|
||||
it(`allows searching for partial case insensitive bech32m addresses: ${searchTerm}`, () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type(searchTerm).then(() => {
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1);
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1);
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/bc1pqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqszqgpqyqsyjer9e');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@@ -159,8 +161,8 @@ describe('Mainnet', () => {
|
||||
it(`allows searching for partial case insensitive bech32 addresses: ${searchTerm}`, () => {
|
||||
cy.visit('/');
|
||||
cy.get('.search-box-container > .form-control').type(searchTerm).then(() => {
|
||||
cy.get('ngb-typeahead-window button.dropdown-item').should('have.length', 1);
|
||||
cy.get('ngb-typeahead-window button.dropdown-item.active').click().then(() => {
|
||||
cy.get('app-search-results button.dropdown-item').should('have.length', 1);
|
||||
cy.get('app-search-results button.dropdown-item.active').click().then(() => {
|
||||
cy.url().should('include', '/address/bc1q000375vxcuf5v04lmwy22vy2thvhqkxghgq7dy');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('.text-center').should('not.have.text', 'Invalid Bitcoin address');
|
||||
@@ -409,7 +411,7 @@ describe('Mainnet', () => {
|
||||
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/');
|
||||
cy.visit('/graphs/mempool');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('macbook-16');
|
||||
|
||||
171
frontend/cypress/e2e/mainnet/mining.spec.ts
Normal file
171
frontend/cypress/e2e/mainnet/mining.spec.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
const baseModule = Cypress.env("BASE_MODULE");
|
||||
|
||||
describe('Mainnet - Mining Features', () => {
|
||||
beforeEach(() => {
|
||||
//https://github.com/cypress-io/cypress/issues/14459
|
||||
if (Cypress.browser.family === 'chromium') {
|
||||
Cypress.automation('remote:debugger:protocol', {
|
||||
command: 'Network.enable',
|
||||
params: {}
|
||||
});
|
||||
Cypress.automation('remote:debugger:protocol', {
|
||||
command: 'Network.setCacheDisabled',
|
||||
params: { cacheDisabled: true }
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (baseModule === 'mempool') {
|
||||
|
||||
describe('Miner page', () => {
|
||||
beforeEach(() => {
|
||||
cy.intercept('/api/v1/mining/pool/**').as('pool');
|
||||
cy.intercept('/api/v1/mining/hashrate/pools/**').as('hashrate');
|
||||
cy.intercept('/api/tx/**').as('tx');
|
||||
cy.intercept('/api/v1/outpends/**').as('outspends');
|
||||
});
|
||||
it('loads the mining pool page from the dashboard', () => {
|
||||
cy.visit('/mining');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('[data-cy="bitcoin-block-0-pool"]').click().then(() => {
|
||||
cy.waitForSkeletonGone();
|
||||
cy.wait('@pool');
|
||||
cy.url().should('match', /\/mining\/pool\/(\w+)/);
|
||||
});
|
||||
});
|
||||
|
||||
it('loads the mining pool page from the blocks page', () => {
|
||||
cy.visit('/mining');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('[data-cy="bitcoin-block-0-height"]').click().then(() => {
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('[data-cy="block-details-miner-badge"]').click().then(() => {
|
||||
cy.waitForSkeletonGone();
|
||||
cy.wait('@pool');
|
||||
cy.url().should('match', /\/mining\/pool\/(\w+)/);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Mining Dashboard Landing page widgets', () => {
|
||||
|
||||
beforeEach(() => {
|
||||
cy.visit('/mining');
|
||||
cy.waitForSkeletonGone();
|
||||
});
|
||||
|
||||
it('shows the mempool blocks', () => {
|
||||
cy.get('[data-cy="mempool-block-0-fees"]').invoke('text').should('match', /~(.*) sat\/vB/);
|
||||
cy.get('[data-cy="mempool-block-0-fee-span"]').invoke('text').should('match', /(.*) - (.*) sat\/vB/);
|
||||
cy.get('[data-cy="mempool-block-0-total-fees"]').invoke('text').should('match', /(.*) BTC/);
|
||||
cy.get('[data-cy="mempool-block-0-transaction-count"]').invoke('text').should('match', /(.*) transactions/);
|
||||
cy.get('[data-cy="mempool-block-0-time"]').invoke('text').should('match', /In ~(.*) minutes/);
|
||||
});
|
||||
|
||||
it('shows the mined blocks', () => {
|
||||
cy.get('[data-cy="bitcoin-block-0-height"]').invoke('text').should('match', /(\d)/);
|
||||
cy.get('[data-cy="bitcoin-block-0-fees"]').invoke('text').should('match', /~(.*) sat\/vB/);
|
||||
cy.get('[data-cy="bitcoin-block-0-fee-span"]').invoke('text').should('match', /(.*) - (.*) sat\/vB/);
|
||||
cy.get('[data-cy="bitcoin-block-0-total-fees"]').invoke('text').should('match', /(.*) BTC/);
|
||||
cy.get('[data-cy="bitcoin-block-0-transactions"]').invoke('text').should('match', /(.*) transactions/);
|
||||
cy.get('[data-cy="bitcoin-block-0-time"]').invoke('text').should('match', /((.*) ago|Just now)/);
|
||||
cy.get('[data-cy="bitcoin-block-0-pool"]').invoke('text').should('match', /(\w)/);
|
||||
});
|
||||
|
||||
it('shows the reward stats for the last 144 blocks', () => {
|
||||
cy.get('[data-cy="reward-stats"]');
|
||||
});
|
||||
|
||||
it('shows the difficulty adjustment stats', () => {
|
||||
cy.get('[data-cy="difficulty-adjustment"]');
|
||||
});
|
||||
|
||||
it('shows the latest blocks', () => {
|
||||
cy.get('[data-cy="latest-blocks"]');
|
||||
});
|
||||
|
||||
it('shows the pools pie chart', () => {
|
||||
cy.get('[data-cy="pool-distribution"]');
|
||||
});
|
||||
|
||||
it('shows the hashrate graph', () => {
|
||||
cy.get('[data-cy="hashrate-graph"]');
|
||||
});
|
||||
it('shows the latest blocks', () => {
|
||||
cy.get('[data-cy="latest-blocks"]');
|
||||
});
|
||||
|
||||
it('shows the latest adjustments', () => {
|
||||
cy.get('[data-cy="difficulty-adjustments-table"]');
|
||||
});
|
||||
});
|
||||
|
||||
describe.only('mining graphs', () => {
|
||||
describe('pools ranking', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/pools');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('pools dominance', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/pools-dominance');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('hashrate & difficulty', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/hashrate-difficulty');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('block fee rates', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/block-fee-rates');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('block fees', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/block-fees');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('block rewards', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/block-rewards');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
|
||||
describe('block sizes and weights', () => {
|
||||
it('loads the graph', () => {
|
||||
cy.visit('/graphs/mining/block-sizes-weights');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.waitForPageIdle();
|
||||
cy.get('.spinner-border').should('not.exist');
|
||||
});
|
||||
});
|
||||
});
|
||||
} else {
|
||||
it.skip(`Tests cannot be run on the selected BASE_MODULE ${baseModule}`);
|
||||
}
|
||||
});
|
||||
@@ -60,10 +60,10 @@ describe('Signet', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('tv mode', () => {
|
||||
describe.skip('tv mode', () => {
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/signet');
|
||||
cy.visit('/signet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.get('.chart-holder').should('be.visible');
|
||||
@@ -73,19 +73,17 @@ describe('Signet', () => {
|
||||
});
|
||||
|
||||
it('loads the tv screen - mobile', () => {
|
||||
cy.visit('/signet');
|
||||
cy.visit('/signet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('iphone-8');
|
||||
cy.get('.chart-holder').should('be.visible');
|
||||
cy.get('.tv-only').should('not.exist');
|
||||
//TODO: Remove comment when the bug is fixed
|
||||
//cy.get('#mempool-block-0').should('be.visible');
|
||||
cy.get('#mempool-block-0').should('be.visible');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('loads the api screen', () => {
|
||||
cy.visit('/signet');
|
||||
cy.waitForSkeletonGone();
|
||||
|
||||
@@ -63,18 +63,17 @@ describe('Testnet', () => {
|
||||
describe('tv mode', () => {
|
||||
it('loads the tv screen - desktop', () => {
|
||||
cy.viewport('macbook-16');
|
||||
cy.visit('/testnet');
|
||||
cy.visit('/testnet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.wait(1000);
|
||||
cy.get('.tv-only').should('not.exist');
|
||||
//TODO: Remove comment when the bug is fixed
|
||||
//cy.get('#mempool-block-0').should('be.visible');
|
||||
cy.get('#mempool-block-0').should('be.visible');
|
||||
});
|
||||
});
|
||||
|
||||
it('loads the tv screen - mobile', () => {
|
||||
cy.visit('/testnet');
|
||||
cy.visit('/testnet/graphs');
|
||||
cy.waitForSkeletonGone();
|
||||
cy.get('#btn-tv').click().then(() => {
|
||||
cy.viewport('iphone-6');
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
// Karma configuration file, see link for more information
|
||||
// https://karma-runner.github.io/1.0/config/configuration-file.html
|
||||
|
||||
module.exports = function (config) {
|
||||
config.set({
|
||||
basePath: '',
|
||||
frameworks: ['jasmine', '@angular-devkit/build-angular'],
|
||||
plugins: [
|
||||
require('karma-jasmine'),
|
||||
require('karma-chrome-launcher'),
|
||||
require('karma-jasmine-html-reporter'),
|
||||
require('karma-coverage-istanbul-reporter'),
|
||||
require('@angular-devkit/build-angular/plugins/karma')
|
||||
],
|
||||
client: {
|
||||
clearContext: false // leave Jasmine Spec Runner output visible in browser
|
||||
},
|
||||
coverageIstanbulReporter: {
|
||||
dir: require('path').join(__dirname, './coverage/mempool'),
|
||||
reports: ['html', 'lcovonly', 'text-summary'],
|
||||
fixWebpackSourcePaths: true
|
||||
},
|
||||
reporters: ['progress', 'kjhtml'],
|
||||
port: 9876,
|
||||
colors: true,
|
||||
logLevel: config.LOG_INFO,
|
||||
autoWatch: true,
|
||||
browsers: ['Chrome'],
|
||||
singleRun: false,
|
||||
restartOnFileChange: true
|
||||
});
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user