Skip to content

Commit ba4e83c

Browse files
authored
Merge pull request #15373 from webpack/fix/issue-14907
if cache pack is too big, we should batch writing
2 parents 18c3590 + 7badefd commit ba4e83c

File tree

2 files changed

+44
-13
lines changed

2 files changed

+44
-13
lines changed

lib/serialization/FileMiddleware.js

Lines changed: 44 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,8 @@ Section -> Buffer
4040

4141
// "wpc" + 1 in little-endian
4242
const VERSION = 0x01637077;
43+
const WRITE_LIMIT_TOTAL = 0x7fff0000;
44+
const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
4345

4446
/**
4547
* @param {Buffer[]} buffers buffers
@@ -87,7 +89,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
8789
* @param {FileMiddleware} middleware this
8890
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
8991
* @param {string | boolean} name file base name
90-
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
92+
* @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
9193
* @param {string | Hash} hashFunction hash function to use
9294
* @returns {Promise<SerializeResult>} resulting file pointer and promise
9395
*/
@@ -212,9 +214,9 @@ const serialize = async (
212214
if (name === true) {
213215
name = hashForName(buf, hashFunction);
214216
}
215-
backgroundJobs.push(writeFile(name, buf));
216217
let size = 0;
217218
for (const b of buf) size += b.length;
219+
backgroundJobs.push(writeFile(name, buf, size));
218220
return {
219221
size,
220222
name,
@@ -422,7 +424,7 @@ class FileMiddleware extends SerializerMiddleware {
422424
// It's important that we don't touch existing files during serialization
423425
// because serialize may read existing files (when deserializing)
424426
const allWrittenFiles = new Set();
425-
const writeFile = async (name, content) => {
427+
const writeFile = async (name, content, size) => {
426428
const file = name
427429
? join(this.fs, filename, `../${name}${extension}`)
428430
: filename;
@@ -441,10 +443,7 @@ class FileMiddleware extends SerializerMiddleware {
441443
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
442444
[zConstants.BROTLI_PARAM_QUALITY]: 2,
443445
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
444-
[zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
445-
(size, b) => size + b.length,
446-
0
447-
)
446+
[zConstants.BROTLI_PARAM_SIZE_HINT]: size
448447
}
449448
});
450449
}
@@ -456,8 +455,44 @@ class FileMiddleware extends SerializerMiddleware {
456455
stream.on("error", err => reject(err));
457456
stream.on("finish", () => resolve());
458457
}
459-
for (const b of content) stream.write(b);
460-
stream.end();
458+
// split into chunks for WRITE_LIMIT_CHUNK size
459+
const chunks = [];
460+
for (const b of content) {
461+
if (b.length < WRITE_LIMIT_CHUNK) {
462+
chunks.push(b);
463+
} else {
464+
for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
465+
chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
466+
}
467+
}
468+
}
469+
470+
const len = chunks.length;
471+
let i = 0;
472+
const batchWrite = err => {
473+
// will be handled in "on" error handler
474+
if (err) return;
475+
476+
if (i === len) {
477+
stream.end();
478+
return;
479+
}
480+
481+
// queue up a batch of chunks up to the write limit
482+
// end is exclusive
483+
let end = i;
484+
let sum = chunks[end++].length;
485+
while (end < len) {
486+
sum += chunks[end].length;
487+
if (sum > WRITE_LIMIT_TOTAL) break;
488+
end++;
489+
}
490+
while (i < end - 1) {
491+
stream.write(chunks[i++]);
492+
}
493+
stream.write(chunks[i++], batchWrite);
494+
};
495+
batchWrite();
461496
});
462497
if (name) allWrittenFiles.add(file);
463498
};

test/TestCasesCachePack.longtest.js

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,6 @@ describe("TestCases", () => {
2525
["no-string"]:
2626
/^Pack got invalid because of write to: Compilation\/modules.+no-string[/\\]loader\.js!.+no-string[/\\]file\.js$/
2727
},
28-
large: {
29-
["big-assets"]:
30-
/^Pack got invalid because of write to: ResolverCachePlugin|normal|dependencyType=|esm|path=|.+|request=|\.\/large\/big-assets\/$/
31-
},
3228
parsing: {
3329
// Module parse failed
3430
context:

0 commit comments

Comments
 (0)