Merge pull request #2385 from mempool/nymkappa/bugfix/incorrect-log
Log correct maxmind mysql updates - fix stats import processed files counter
This commit is contained in:
commit
e402be1dd2
@ -4,11 +4,14 @@ import nodesApi from '../../../api/explorer/nodes.api';
|
|||||||
import config from '../../../config';
|
import config from '../../../config';
|
||||||
import DB from '../../../database';
|
import DB from '../../../database';
|
||||||
import logger from '../../../logger';
|
import logger from '../../../logger';
|
||||||
|
import { ResultSetHeader } from 'mysql2';
|
||||||
import * as IPCheck from '../../../utils/ipcheck.js';
|
import * as IPCheck from '../../../utils/ipcheck.js';
|
||||||
|
|
||||||
export async function $lookupNodeLocation(): Promise<void> {
|
export async function $lookupNodeLocation(): Promise<void> {
|
||||||
let loggerTimer = new Date().getTime() / 1000;
|
let loggerTimer = new Date().getTime() / 1000;
|
||||||
let progress = 0;
|
let progress = 0;
|
||||||
|
let nodesUpdated = 0;
|
||||||
|
let geoNamesInserted = 0;
|
||||||
|
|
||||||
logger.info(`Running node location updater using Maxmind`);
|
logger.info(`Running node location updater using Maxmind`);
|
||||||
try {
|
try {
|
||||||
@ -71,51 +74,72 @@ export async function $lookupNodeLocation(): Promise<void> {
|
|||||||
city.location?.accuracy_radius,
|
city.location?.accuracy_radius,
|
||||||
node.public_key
|
node.public_key
|
||||||
];
|
];
|
||||||
await DB.query(query, params);
|
let result = await DB.query<ResultSetHeader>(query, params);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++nodesUpdated;
|
||||||
|
}
|
||||||
|
|
||||||
// Store Continent
|
// Store Continent
|
||||||
if (city.continent?.geoname_id) {
|
if (city.continent?.geoname_id) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'continent', ?)`,
|
||||||
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
|
[city.continent?.geoname_id, JSON.stringify(city.continent?.names)]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store Country
|
// Store Country
|
||||||
if (city.country?.geoname_id) {
|
if (city.country?.geoname_id) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country', ?)`,
|
||||||
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
|
[city.country?.geoname_id, JSON.stringify(city.country?.names)]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store Country ISO code
|
// Store Country ISO code
|
||||||
if (city.country?.iso_code) {
|
if (city.country?.iso_code) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'country_iso_code', ?)`,
|
||||||
[city.country?.geoname_id, city.country?.iso_code]);
|
[city.country?.geoname_id, city.country?.iso_code]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store Division
|
// Store Division
|
||||||
if (city.subdivisions && city.subdivisions[0]) {
|
if (city.subdivisions && city.subdivisions[0]) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'division', ?)`,
|
||||||
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
|
[city.subdivisions[0].geoname_id, JSON.stringify(city.subdivisions[0]?.names)]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store City
|
// Store City
|
||||||
if (city.city?.geoname_id) {
|
if (city.city?.geoname_id) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'city', ?)`,
|
||||||
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
|
[city.city?.geoname_id, JSON.stringify(city.city?.names)]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Store AS name
|
// Store AS name
|
||||||
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
|
if (isp?.autonomous_system_organization ?? asn?.autonomous_system_organization) {
|
||||||
await DB.query(
|
result = await DB.query<ResultSetHeader>(
|
||||||
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
|
`INSERT IGNORE INTO geo_names (id, type, names) VALUES (?, 'as_organization', ?)`,
|
||||||
[
|
[
|
||||||
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
asOverwrite?.asn ?? isp?.autonomous_system_number ?? asn?.autonomous_system_number,
|
||||||
JSON.stringify(asOverwrite?.name ?? isp?.isp ?? asn?.autonomous_system_organization)
|
JSON.stringify(asOverwrite?.name ?? isp?.isp ?? asn?.autonomous_system_organization)
|
||||||
]);
|
]);
|
||||||
|
if (result[0].changedRows ?? 0 > 0) {
|
||||||
|
++geoNamesInserted;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +152,12 @@ export async function $lookupNodeLocation(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
logger.info(`${progress} nodes location data updated`);
|
|
||||||
|
if (nodesUpdated > 0) {
|
||||||
|
logger.info(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||||
|
} else {
|
||||||
|
logger.debug(`${nodesUpdated} nodes maxmind data updated, ${geoNamesInserted} geo names inserted`);
|
||||||
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
logger.err('$lookupNodeLocation() error: ' + (e instanceof Error ? e.message : e));
|
||||||
}
|
}
|
||||||
|
@ -360,9 +360,11 @@ class LightningStatsImporter {
|
|||||||
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
fileContent = await fsPromises.readFile(`${this.topologiesFolder}/${filename}`, 'utf8');
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
if (e.errno == -1) { // EISDIR - Ignore directorie
|
if (e.errno == -1) { // EISDIR - Ignore directorie
|
||||||
|
totalProcessed++;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
|
logger.err(`Unable to open ${this.topologiesFolder}/${filename}`);
|
||||||
|
totalProcessed++;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -372,6 +374,7 @@ class LightningStatsImporter {
|
|||||||
graph = await this.cleanupTopology(graph);
|
graph = await this.cleanupTopology(graph);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
|
logger.debug(`Invalid topology file ${this.topologiesFolder}/${filename}, cannot parse the content. Reason: ${e instanceof Error ? e.message : e}`);
|
||||||
|
totalProcessed++;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user