From e5f97ace8b13c8bec9d9f8d4575bbb9a7f4ed5dc Mon Sep 17 00:00:00 2001 From: Mononaut Date: Mon, 1 May 2023 14:30:30 -0600 Subject: [PATCH] delay writing disk cache until block handler completes --- backend/src/api/blocks.ts | 4 ++++ backend/src/api/disk-cache.ts | 33 +++++++++++++++++++++++++++++++++ backend/src/index.ts | 2 ++ 3 files changed, 39 insertions(+) diff --git a/backend/src/api/blocks.ts b/backend/src/api/blocks.ts index 2837d40a0..c50c38107 100644 --- a/backend/src/api/blocks.ts +++ b/backend/src/api/blocks.ts @@ -533,6 +533,8 @@ class Blocks { // warn if this run stalls the main loop for more than 2 minutes const timer = this.startTimer(); + diskCache.lock(); + let fastForwarded = false; const blockHeightTip = await bitcoinApi.$getBlockHeightTip(); this.updateTimerProgress(timer, 'got block height tip'); @@ -697,6 +699,8 @@ class Blocks { this.updateTimerProgress(timer, `async callbacks completed for ${this.currentBlockHeight}`); } + diskCache.unlock(); + this.clearTimer(timer); } diff --git a/backend/src/api/disk-cache.ts b/backend/src/api/disk-cache.ts index 7a38e7da0..220d22b8e 100644 --- a/backend/src/api/disk-cache.ts +++ b/backend/src/api/disk-cache.ts @@ -22,6 +22,11 @@ class DiskCache { private static CHUNK_FILES = 25; private isWritingCache = false; + private semaphore: { resume: (() => void)[], locks: number } = { + resume: [], + locks: 0, + }; + constructor() { if (!cluster.isPrimary) { return; @@ -77,6 +82,7 @@ class DiskCache { fs.renameSync(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), DiskCache.FILE_NAMES.replace('{number}', i.toString())); } } else { + await this.$yield(); await fsPromises.writeFile(DiskCache.TMP_FILE_NAME, JSON.stringify({ network: config.MEMPOOL.NETWORK, cacheSchemaVersion: this.cacheSchemaVersion, @@ -86,6 +92,7 @@ class DiskCache { mempoolArray: mempoolArray.splice(0, chunkSize), }), { flag: 'w' }); for (let i = 1; i < DiskCache.CHUNK_FILES; i++) { + await this.$yield(); await fsPromises.writeFile(DiskCache.TMP_FILE_NAMES.replace('{number}', i.toString()), JSON.stringify({ mempool: {}, mempoolArray: mempoolArray.splice(0, chunkSize), @@ -240,6 +247,32 @@ class DiskCache { logger.warn('Failed to parse rbf cache. Skipping. Reason: ' + (e instanceof Error ? e.message : e)); } } + + private $yield(): Promise { + if (this.semaphore.locks) { + logger.debug('Pause writing mempool and blocks data to disk cache (async)'); + return new Promise((resolve) => { + this.semaphore.resume.push(resolve); + }); + } else { + return Promise.resolve(); + } + } + + public lock(): void { + this.semaphore.locks++; + } + + public unlock(): void { + this.semaphore.locks = Math.max(0, this.semaphore.locks - 1); + if (!this.semaphore.locks && this.semaphore.resume.length) { + const nextResume = this.semaphore.resume.shift(); + if (nextResume) { + logger.debug('Resume writing mempool and blocks data to disk cache (async)'); + nextResume(); + } + } + } } export default new DiskCache(); diff --git a/backend/src/index.ts b/backend/src/index.ts index 384f96c38..3887aac2c 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -205,6 +205,8 @@ class Server { logger.debug(`AxiosError: ${e?.message}`); } setTimeout(this.runMainUpdateLoop.bind(this), 1000 * this.currentBackendRetryInterval); + } finally { + diskCache.unlock(); } }