Merge branch 'master' into mononaut/refactor-task-scheduler
This commit is contained in:
commit
a7c64c0df3
@ -2,6 +2,7 @@ import * as fs from 'fs';
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import config from './config';
|
import config from './config';
|
||||||
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
|
import { createPool, Pool, PoolConnection } from 'mysql2/promise';
|
||||||
|
import { LogLevel } from './logger';
|
||||||
import logger from './logger';
|
import logger from './logger';
|
||||||
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
|
import { FieldPacket, OkPacket, PoolOptions, ResultSetHeader, RowDataPacket } from 'mysql2/typings/mysql';
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
@ -33,7 +34,7 @@ import { execSync } from 'child_process';
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
public async query<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||||
OkPacket[] | ResultSetHeader>(query, params?, connection?: PoolConnection): Promise<[T, FieldPacket[]]>
|
OkPacket[] | ResultSetHeader>(query, params?, errorLogLevel: LogLevel | 'silent' = 'debug', connection?: PoolConnection): Promise<[T, FieldPacket[]]>
|
||||||
{
|
{
|
||||||
this.checkDBFlag();
|
this.checkDBFlag();
|
||||||
let hardTimeout;
|
let hardTimeout;
|
||||||
@ -55,7 +56,9 @@ import { execSync } from 'child_process';
|
|||||||
}).then(result => {
|
}).then(result => {
|
||||||
resolve(result);
|
resolve(result);
|
||||||
}).catch(error => {
|
}).catch(error => {
|
||||||
logger.debug(`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
|
if (errorLogLevel !== 'silent') {
|
||||||
|
logger[errorLogLevel](`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
|
||||||
|
}
|
||||||
reject(error);
|
reject(error);
|
||||||
}).finally(() => {
|
}).finally(() => {
|
||||||
clearTimeout(timer);
|
clearTimeout(timer);
|
||||||
@ -66,7 +69,9 @@ import { execSync } from 'child_process';
|
|||||||
const pool = await this.getPool();
|
const pool = await this.getPool();
|
||||||
return pool.query(query, params);
|
return pool.query(query, params);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.debug(`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
|
if (errorLogLevel !== 'silent') {
|
||||||
|
logger[errorLogLevel](`database query "${query?.sql?.slice(0, 160) || (typeof(query) === 'string' || query instanceof String ? query?.slice(0, 160) : 'unknown query')}" failed!`);
|
||||||
|
}
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -82,7 +87,7 @@ import { execSync } from 'child_process';
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
public async $atomicQuery<T extends RowDataPacket[][] | RowDataPacket[] | OkPacket |
|
||||||
OkPacket[] | ResultSetHeader>(queries: { query, params }[]): Promise<[T, FieldPacket[]][]>
|
OkPacket[] | ResultSetHeader>(queries: { query, params }[], errorLogLevel: LogLevel | 'silent' = 'debug'): Promise<[T, FieldPacket[]][]>
|
||||||
{
|
{
|
||||||
const pool = await this.getPool();
|
const pool = await this.getPool();
|
||||||
const connection = await pool.getConnection();
|
const connection = await pool.getConnection();
|
||||||
@ -91,7 +96,7 @@ import { execSync } from 'child_process';
|
|||||||
|
|
||||||
const results: [T, FieldPacket[]][] = [];
|
const results: [T, FieldPacket[]][] = [];
|
||||||
for (const query of queries) {
|
for (const query of queries) {
|
||||||
const result = await this.query(query.query, query.params, connection) as [T, FieldPacket[]];
|
const result = await this.query(query.query, query.params, errorLogLevel, connection) as [T, FieldPacket[]];
|
||||||
results.push(result);
|
results.push(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -157,4 +157,6 @@ class Logger {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type LogLevel = 'emerg' | 'alert' | 'crit' | 'err' | 'warn' | 'notice' | 'info' | 'debug';
|
||||||
|
|
||||||
export default new Logger();
|
export default new Logger();
|
||||||
|
@ -14,7 +14,7 @@ class NodesSocketsRepository {
|
|||||||
await DB.query(`
|
await DB.query(`
|
||||||
INSERT INTO nodes_sockets(public_key, socket, type)
|
INSERT INTO nodes_sockets(public_key, socket, type)
|
||||||
VALUE (?, ?, ?)
|
VALUE (?, ?, ?)
|
||||||
`, [socket.publicKey, socket.addr, socket.network]);
|
`, [socket.publicKey, socket.addr, socket.network], 'silent');
|
||||||
} catch (e: any) {
|
} catch (e: any) {
|
||||||
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
|
if (e.errno !== 1062) { // ER_DUP_ENTRY - Not an issue, just ignore this
|
||||||
logger.err(`Cannot save node socket (${[socket.publicKey, socket.addr, socket.network]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
|
logger.err(`Cannot save node socket (${[socket.publicKey, socket.addr, socket.network]}) into db. Reason: ` + (e instanceof Error ? e.message : e));
|
||||||
|
Loading…
x
Reference in New Issue
Block a user