From 3101a4045bd7030f232d6cb2ae61339214968321 Mon Sep 17 00:00:00 2001 From: Hiroo Ono <49257691+oikumene@users.noreply.github.com> Date: Wed, 10 Jul 2024 01:31:49 +0900 Subject: [PATCH] fix: limit concurrent open files during 'npm cache verify' (#7631) This change solves https://github.com/npm/cli/issues/4783 During 'npm cache verify', currently all the cache files are open at the same time, which will bring EMFILE error in an environment that limit max open files. This change limits the concurrent open files in garbageCollect() with p-map module to avoid this problem. ## References Fixes #4783 --- node_modules/cacache/lib/entry-index.js | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/node_modules/cacache/lib/entry-index.js b/node_modules/cacache/lib/entry-index.js index 5bc2189382770..f73a11ca29ddf 100644 --- a/node_modules/cacache/lib/entry-index.js +++ b/node_modules/cacache/lib/entry-index.js @@ -19,6 +19,10 @@ const hashToSegments = require('./util/hash-to-segments') const indexV = require('../package.json')['cache-version'].index const { moveFile } = require('@npmcli/fs') +const pMap = require('p-map') +const lsStreamConcurrency = 5 + + module.exports.NotFoundError = class NotFoundError extends Error { constructor (cache, key) { super(`No cache entry for ${key} found in ${cache}`) @@ -182,15 +186,15 @@ function lsStream (cache) { // Set all this up to run on the stream and then just return the stream Promise.resolve().then(async () => { const buckets = await readdirOrEmpty(indexDir) - await Promise.all(buckets.map(async (bucket) => { + await pMap(buckets, async (bucket) => { const bucketPath = path.join(indexDir, bucket) const subbuckets = await readdirOrEmpty(bucketPath) - await Promise.all(subbuckets.map(async (subbucket) => { + await pMap(subbuckets, async (subbucket) => { const subbucketPath = path.join(bucketPath, subbucket) // "/cachename//./*" const subbucketEntries = await readdirOrEmpty(subbucketPath) - await Promise.all(subbucketEntries.map(async (entry) => { + await pMap(subbucketEntries, async (entry) => { const entryPath = path.join(subbucketPath, entry) try { const entries = await bucketEntries(entryPath) @@ -213,9 +217,12 @@ function lsStream (cache) { } throw err } - })) - })) - })) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) stream.end() return stream }).catch(err => stream.emit('error', err))