diff --git a/test/eslint.config_partial.mjs b/test/eslint.config_partial.mjs index d19e98a4bf3131..c5f6b06c02a01b 100644 --- a/test/eslint.config_partial.mjs +++ b/test/eslint.config_partial.mjs @@ -190,6 +190,10 @@ export default [ 'wasm-allocation', 'wpt', ].join(',')}}/**/*.{js,mjs,cjs}`, + `test/parallel/test-{${ + // 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…' + Array.from({ length: 2 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',') + }}.{js,mjs,cjs}`, ], rules: { 'node-core/must-call-assert': 'error', diff --git a/test/parallel/test-zlib-brotli-16GB.js b/test/parallel/test-zlib-brotli-16GB.js index 9b894320e91ebd..7bd5a137f908fd 100644 --- a/test/parallel/test-zlib-brotli-16GB.js +++ b/test/parallel/test-zlib-brotli-16GB.js @@ -2,7 +2,7 @@ const common = require('../common'); const { createBrotliDecompress } = require('node:zlib'); -const strictEqual = require('node:assert').strictEqual; +const assert = require('node:assert'); const { getDefaultHighWaterMark } = require('stream'); // This tiny HEX string is a 16GB file. @@ -19,5 +19,5 @@ decoder.end(buf); // to process the data and the buffer is not empty. setTimeout(common.mustCall(() => { // There is only one chunk in the buffer - strictEqual(decoder._readableState.buffer.length, getDefaultHighWaterMark() / (16 * 1024)); + assert.strictEqual(decoder._readableState.buffer.length, getDefaultHighWaterMark() / (16 * 1024)); }), common.platformTimeout(500)); diff --git a/test/parallel/test-zlib-brotli-kmaxlength-rangeerror.js b/test/parallel/test-zlib-brotli-kmaxlength-rangeerror.js index 6a59ad34b0a174..69f23542cb9ac0 100644 --- a/test/parallel/test-zlib-brotli-kmaxlength-rangeerror.js +++ b/test/parallel/test-zlib-brotli-kmaxlength-rangeerror.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); // This test ensures that zlib throws a RangeError if the final buffer needs to // be larger than kMaxLength and concatenation fails. @@ -18,9 +18,9 @@ buffer.kMaxLength = oldkMaxLength; const encoded = Buffer.from('G38A+CXCIrFAIAM=', 'base64'); // Async -zlib.brotliDecompress(encoded, function(err) { +zlib.brotliDecompress(encoded, common.mustCall((err) => { assert.ok(err instanceof RangeError); -}); +})); // Sync assert.throws(function() { diff --git a/test/parallel/test-zlib-from-concatenated-gzip.js b/test/parallel/test-zlib-from-concatenated-gzip.js index 1de36dacf95f3d..31985aa2bbe791 100644 --- a/test/parallel/test-zlib-from-concatenated-gzip.js +++ b/test/parallel/test-zlib-from-concatenated-gzip.js @@ -47,9 +47,7 @@ const pmmResultBuffers = []; fs.createReadStream(pmmFileGz) .pipe(zlib.createGunzip()) - .on('error', (err) => { - assert.ifError(err); - }) + .on('error', common.mustNotCall()) .on('data', (data) => pmmResultBuffers.push(data)) .on('finish', common.mustCall(() => { // Result should match original random garbage @@ -61,9 +59,7 @@ fs.createReadStream(pmmFileGz) const resultBuffers = []; const unzip = zlib.createGunzip() - .on('error', (err) => { - assert.ifError(err); - }) + .on('error', common.mustNotCall()) .on('data', (data) => resultBuffers.push(data)) .on('finish', common.mustCall(() => { assert.strictEqual( diff --git a/test/parallel/test-zlib-kmaxlength-rangeerror.js b/test/parallel/test-zlib-kmaxlength-rangeerror.js index 9803630214eb36..bae49a6530a902 100644 --- a/test/parallel/test-zlib-kmaxlength-rangeerror.js +++ b/test/parallel/test-zlib-kmaxlength-rangeerror.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); // This test ensures that zlib throws a RangeError if the final buffer needs to // be larger than kMaxLength and concatenation fails. @@ -18,9 +18,9 @@ buffer.kMaxLength = oldkMaxLength; const encoded = Buffer.from('H4sIAAAAAAAAA0tMHFgAAIw2K/GAAAAA', 'base64'); // Async -zlib.gunzip(encoded, function(err) { +zlib.gunzip(encoded, common.mustCall((err) => { assert.ok(err instanceof RangeError); -}); +})); // Sync assert.throws(function() { diff --git a/test/parallel/test-zlib-maxOutputLength.js b/test/parallel/test-zlib-maxOutputLength.js index 9af0b3736f8815..ec293aabe7fc04 100644 --- a/test/parallel/test-zlib-maxOutputLength.js +++ b/test/parallel/test-zlib-maxOutputLength.js @@ -17,9 +17,7 @@ assert.throws(function() { }, RangeError); // Async -zlib.brotliDecompress(encoded, { maxOutputLength: 256 }, function(err) { - assert.strictEqual(err, null); -}); +zlib.brotliDecompress(encoded, { maxOutputLength: 256 }, common.mustSucceed()); // Sync zlib.brotliDecompressSync(encoded, { maxOutputLength: 256 }); diff --git a/test/parallel/test-zlib-reset-before-write.js b/test/parallel/test-zlib-reset-before-write.js index afa207f12c1b30..05c1cdce2c4972 100644 --- a/test/parallel/test-zlib-reset-before-write.js +++ b/test/parallel/test-zlib-reset-before-write.js @@ -21,9 +21,7 @@ for (const fn of [ const output = []; inflate - .on('error', (err) => { - assert.ifError(err); - }) + .on('error', common.mustNotCall()) .on('data', (chunk) => output.push(chunk)) .on('end', common.mustCall( () => assert.strictEqual(Buffer.concat(output).toString(), 'abc'))); diff --git a/test/parallel/test-zlib-truncated.js b/test/parallel/test-zlib-truncated.js index 94bc0e21cb7a5f..c489388a674e52 100644 --- a/test/parallel/test-zlib-truncated.js +++ b/test/parallel/test-zlib-truncated.js @@ -1,7 +1,7 @@ 'use strict'; // Tests zlib streams with truncated compressed input -require('../common'); +const common = require('../common'); const assert = require('assert'); const zlib = require('zlib'); @@ -23,8 +23,7 @@ const errMessage = /unexpected end of file/; { comp: 'deflate', decomp: 'inflate', decompSync: 'inflateSync' }, { comp: 'deflateRaw', decomp: 'inflateRaw', decompSync: 'inflateRawSync' }, ].forEach(function(methods) { - zlib[methods.comp](inputString, function(err, compressed) { - assert.ifError(err); + zlib[methods.comp](inputString, common.mustSucceed((compressed) => { const truncated = compressed.slice(0, compressed.length / 2); const toUTF8 = (buffer) => buffer.toString('utf-8'); @@ -33,10 +32,9 @@ const errMessage = /unexpected end of file/; assert.strictEqual(toUTF8(decompressed), inputString); // async sanity - zlib[methods.decomp](compressed, function(err, result) { - assert.ifError(err); + zlib[methods.decomp](compressed, common.mustSucceed((result) => { assert.strictEqual(toUTF8(result), inputString); - }); + })); // Sync truncated input test assert.throws(function() { @@ -44,9 +42,9 @@ const errMessage = /unexpected end of file/; }, errMessage); // Async truncated input test - zlib[methods.decomp](truncated, function(err, result) { + zlib[methods.decomp](truncated, common.mustCall((err) => { assert.match(err.message, errMessage); - }); + })); const syncFlushOpt = { finishFlush: zlib.constants.Z_SYNC_FLUSH }; @@ -55,10 +53,9 @@ const errMessage = /unexpected end of file/; assert.strictEqual(result, inputString.slice(0, result.length)); // Async truncated input test, finishFlush = Z_SYNC_FLUSH - zlib[methods.decomp](truncated, syncFlushOpt, function(err, decompressed) { - assert.ifError(err); + zlib[methods.decomp](truncated, syncFlushOpt, common.mustSucceed((decompressed) => { const result = toUTF8(decompressed); assert.strictEqual(result, inputString.slice(0, result.length)); - }); - }); + })); + })); }); diff --git a/test/parallel/test-zlib-type-error.js b/test/parallel/test-zlib-type-error.js index 3432d75e346ef1..912b59fd9ca923 100644 --- a/test/parallel/test-zlib-type-error.js +++ b/test/parallel/test-zlib-type-error.js @@ -1,20 +1,9 @@ 'use strict'; require('../common'); -const assert = require('assert').strict; +const assert = require('assert'); const test = require('node:test'); const { DecompressionStream } = require('stream/web'); -async function expectTypeError(promise) { - let threw = false; - try { - await promise; - } catch (err) { - threw = true; - assert(err instanceof TypeError, `Expected TypeError, got ${err}`); - } - assert(threw, 'Expected promise to reject'); -} - test('DecompressStream deflat emits error on trailing data', async () => { const valid = new Uint8Array([120, 156, 75, 4, 0, 0, 98, 0, 98]); // deflate('a') const empty = new Uint8Array(1); @@ -22,10 +11,11 @@ test('DecompressStream deflat emits error on trailing data', async () => { const double = new Uint8Array([...valid, ...valid]); for (const chunk of [[invalid], [valid, empty], [valid, valid], [valid, double]]) { - await expectTypeError( + await assert.rejects( Array.fromAsync( new Blob([chunk]).stream().pipeThrough(new DecompressionStream('deflate')) - ) + ), + { name: 'TypeError' }, ); } }); @@ -37,10 +27,11 @@ test('DecompressStream gzip emits error on trailing data', async () => { const invalid = new Uint8Array([...valid, ...empty]); const double = new Uint8Array([...valid, ...valid]); for (const chunk of [[invalid], [valid, empty], [valid, valid], [double]]) { - await expectTypeError( + await assert.rejects( Array.fromAsync( new Blob([chunk]).stream().pipeThrough(new DecompressionStream('gzip')) - ) + ), + { name: 'TypeError' }, ); } }); diff --git a/test/parallel/test-zlib-unzip-one-byte-chunks.js b/test/parallel/test-zlib-unzip-one-byte-chunks.js index b379584a0438ec..8fb8bcd39a7a28 100644 --- a/test/parallel/test-zlib-unzip-one-byte-chunks.js +++ b/test/parallel/test-zlib-unzip-one-byte-chunks.js @@ -1,38 +1,30 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); const zlib = require('node:zlib'); -const { test } = require('node:test'); -test('zlib should unzip one byte chunks', async () => { - const { promise, resolve } = Promise.withResolvers(); - const data = Buffer.concat([ - zlib.gzipSync('abc'), - zlib.gzipSync('def'), - ]); +const data = Buffer.concat([ + zlib.gzipSync('abc'), + zlib.gzipSync('def'), +]); - const resultBuffers = []; +const resultBuffers = []; - const unzip = zlib.createUnzip() - .on('error', (err) => { - assert.ifError(err); - }) - .on('data', (data) => resultBuffers.push(data)) - .on('finish', () => { - const unzipped = Buffer.concat(resultBuffers).toString(); - assert.strictEqual(unzipped, 'abcdef', - `'${unzipped}' should match 'abcdef' after zipping ` + - 'and unzipping'); - resolve(); - }); +const unzip = zlib.createUnzip() + .on('error', common.mustNotCall()) + .on('data', (data) => resultBuffers.push(data)) + .on('finish', common.mustCall(() => { + const unzipped = Buffer.concat(resultBuffers).toString(); + assert.strictEqual(unzipped, 'abcdef', + `'${unzipped}' should match 'abcdef' after zipping ` + + 'and unzipping'); + })); - for (let i = 0; i < data.length; i++) { - // Write each single byte individually. - unzip.write(Buffer.from([data[i]])); - } +for (let i = 0; i < data.length; i++) { + // Write each single byte individually. + unzip.write(Buffer.from([data[i]])); +} - unzip.end(); - await promise; -}); +unzip.end(); diff --git a/test/parallel/test-zlib-write-after-close.js b/test/parallel/test-zlib-write-after-close.js index 2d5d4965b0fec4..6b2b1c8cd0172b 100644 --- a/test/parallel/test-zlib-write-after-close.js +++ b/test/parallel/test-zlib-write-after-close.js @@ -21,25 +21,17 @@ 'use strict'; -require('../common'); +const common = require('../common'); const zlib = require('node:zlib'); const assert = require('node:assert'); -const { test } = require('node:test'); -test('zlib should not allow writing after close', async (t) => { - const { promise, resolve } = Promise.withResolvers(); - const closeCallback = t.mock.fn(); - zlib.gzip('hello', function() { - const unzip = zlib.createGunzip(); - unzip.close(closeCallback); - unzip.write('asd', function(err) { - assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); - assert.strictEqual(err.name, 'Error'); - assert.strictEqual(err.message, 'Cannot call write after a stream was destroyed'); - resolve(); - }); - }); - await promise; - assert.strictEqual(closeCallback.mock.callCount(), 1); -}); +zlib.gzip('hello', common.mustCall(() => { + const unzip = zlib.createGunzip(); + unzip.close(common.mustCall()); + unzip.write('asd', common.mustCall((err) => { + assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED'); + assert.strictEqual(err.name, 'Error'); + assert.strictEqual(err.message, 'Cannot call write after a stream was destroyed'); + })); +})); diff --git a/test/parallel/test-zlib-write-after-flush.js b/test/parallel/test-zlib-write-after-flush.js index fbe7c9859ea9fe..9449ab46823b80 100644 --- a/test/parallel/test-zlib-write-after-flush.js +++ b/test/parallel/test-zlib-write-after-flush.js @@ -21,39 +21,33 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); const zlib = require('node:zlib'); -const { test } = require('node:test'); -test('zlib should accept writing after flush', async () => { - for (const [ createCompress, createDecompress ] of [ - [ zlib.createGzip, zlib.createGunzip ], - [ zlib.createBrotliCompress, zlib.createBrotliDecompress ], - [ zlib.createZstdCompress, zlib.createZstdDecompress ], - ]) { - const { promise, resolve, reject } = Promise.withResolvers(); - const gzip = createCompress(); - const gunz = createDecompress(); +for (const [ createCompress, createDecompress ] of [ + [ zlib.createGzip, zlib.createGunzip ], + [ zlib.createBrotliCompress, zlib.createBrotliDecompress ], + [ zlib.createZstdCompress, zlib.createZstdDecompress ], +]) { + const gzip = createCompress(); + const gunz = createDecompress(); - gzip.pipe(gunz); + gzip.pipe(gunz); - let output = ''; - const input = 'A line of data\n'; - gunz.setEncoding('utf8'); - gunz.on('error', reject); - gunz.on('data', (c) => output += c); - gunz.on('end', () => { - assert.strictEqual(output, input); - resolve(); - }); + let output = ''; + const input = 'A line of data\n'; + gunz.setEncoding('utf8'); + gunz.on('error', common.mustNotCall()); + gunz.on('data', (c) => output += c); + gunz.on('end', common.mustCall(() => { + assert.strictEqual(output, input); + })); - // Make sure that flush/write doesn't trigger an assert failure - gzip.flush(); - gzip.write(input); - gzip.end(); - gunz.read(0); - await promise; - } -}); + // Make sure that flush/write doesn't trigger an assert failure + gzip.flush(); + gzip.write(input); + gzip.end(); + gunz.read(0); +} diff --git a/test/parallel/test-zlib-zero-byte.js b/test/parallel/test-zlib-zero-byte.js index a6120d4b92ed29..c3e56bd180ec93 100644 --- a/test/parallel/test-zlib-zero-byte.js +++ b/test/parallel/test-zlib-zero-byte.js @@ -21,35 +21,29 @@ 'use strict'; -require('../common'); +const common = require('../common'); const assert = require('node:assert'); const zlib = require('node:zlib'); -const { test } = require('node:test'); -test('zlib should properly handle zero byte input', async () => { - const compressors = [ - [zlib.Gzip, 20], - [zlib.BrotliCompress, 1], - [zlib.ZstdCompress, 9], - ]; +const compressors = [ + [zlib.Gzip, 20], + [zlib.BrotliCompress, 1], + [zlib.ZstdCompress, 9], +]; - for (const [Compressor, expected] of compressors) { - const { promise, resolve, reject } = Promise.withResolvers(); - const gz = new Compressor(); - const emptyBuffer = Buffer.alloc(0); - let received = 0; - gz.on('data', function(c) { - received += c.length; - }); - gz.on('error', reject); - gz.on('end', function() { - assert.strictEqual(received, expected, - `${received}, ${expected}, ${Compressor.name}`); - resolve(); - }); - gz.write(emptyBuffer); - gz.end(); - await promise; - } -}); +for (const [Compressor, expected] of compressors) { + const gz = new Compressor(); + const emptyBuffer = Buffer.alloc(0); + let received = 0; + gz.on('data', function(c) { + received += c.length; + }); + gz.on('error', common.mustNotCall()); + gz.on('end', common.mustCall(() => { + assert.strictEqual(received, expected, + `${received}, ${expected}, ${Compressor.name}`); + })); + gz.write(emptyBuffer); + gz.end(); +} diff --git a/test/parallel/test-zlib-zstd-kmaxlength-rangeerror.js b/test/parallel/test-zlib-zstd-kmaxlength-rangeerror.js index 58ad8ff2c98dea..4537f16370148a 100644 --- a/test/parallel/test-zlib-zstd-kmaxlength-rangeerror.js +++ b/test/parallel/test-zlib-zstd-kmaxlength-rangeerror.js @@ -1,5 +1,5 @@ 'use strict'; -require('../common'); +const common = require('../common'); // This test ensures that zlib throws a RangeError if the final buffer needs to // be larger than kMaxLength and concatenation fails. @@ -19,9 +19,9 @@ buffer.kMaxLength = oldkMaxLength; const encoded = Buffer.from('KLUv/SCARQAAEGFhAQA7BVg=', 'base64'); // Async -zlib.zstdDecompress(encoded, function(err) { +zlib.zstdDecompress(encoded, common.mustCall((err) => { assert.ok(err instanceof RangeError); -}); +})); // Sync assert.throws(function() {