Skip to content

Commit

Permalink
fs: writeFile support AsyncIterable, Iterable & Stream as `da…
Browse files Browse the repository at this point in the history
…ta` argument

Fixes: nodejs#37391
  • Loading branch information
HiroyukiYagihashi committed Feb 23, 2021
1 parent 75cc41e commit b5538ff
Show file tree
Hide file tree
Showing 3 changed files with 107 additions and 13 deletions.
3 changes: 2 additions & 1 deletion doc/api/fs.md
Original file line number Diff line number Diff line change
Expand Up @@ -3866,7 +3866,8 @@ changes:
-->
* `file` {string|Buffer|URL|integer} filename or file descriptor
* `data` {string|Buffer|TypedArray|DataView|Object}
* `data` {string|Buffer|TypedArray|DataView|Object|AsyncIterable|Iterable
|Stream}
* `options` {Object|string}
* `encoding` {string|null} **Default:** `'utf8'`
* `mode` {integer} **Default:** `0o666`
Expand Down
63 changes: 51 additions & 12 deletions lib/internal/fs/promises.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ const kReadFileMaxChunkSize = 2 ** 14;
const kWriteFileMaxChunkSize = 2 ** 14;

const {
ArrayIsArray,
ArrayPrototypePush,
Error,
MathMax,
Expand All @@ -21,7 +22,9 @@ const {
PromiseResolve,
SafeArrayIterator,
Symbol,
Uint8Array,
SymbolAsyncIterator,
SymbolIterator,
Uint8Array
} = primordials;

const {
Expand All @@ -41,7 +44,7 @@ const {
ERR_INVALID_ARG_VALUE,
ERR_METHOD_NOT_IMPLEMENTED,
} = codes;
const { isArrayBufferView } = require('internal/util/types');
const { isArrayBufferView, isTypedArray } = require('internal/util/types');
const { rimrafPromises } = require('internal/fs/rimraf');
const {
copyObject,
Expand Down Expand Up @@ -663,19 +666,55 @@ async function writeFile(path, data, options) {
options = getOptions(options, { encoding: 'utf8', mode: 0o666, flag: 'w' });
const flag = options.flag || 'w';

if (!isArrayBufferView(data)) {
validateStringAfterArrayBufferView(data, 'data');
data = Buffer.from(data, options.encoding || 'utf8');
}
if (isIterable(data)) {
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
const fd = await open(path, flag, options.mode);
try {
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
for await (const buf of data) {
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
await fd.write(buf);
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
}
} finally {
await fd.close();
}
} else {
if (!isArrayBufferView(data)) {
validateStringAfterArrayBufferView(data, 'data');
data = Buffer.from(data, options.encoding || 'utf8');
}

if (path instanceof FileHandle)
return writeFileHandle(path, data, options.signal);
if (path instanceof FileHandle) {
return writeFileHandle(path, data, options.signal);
}

const fd = await open(path, flag, options.mode);
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
const fd = await open(path, flag, options.mode);
if (options.signal?.aborted) {
throw lazyDOMException('The operation was aborted', 'AbortError');
}
return PromisePrototypeFinally(writeFileHandle(fd, data), fd.close);
}
return PromisePrototypeFinally(writeFileHandle(fd, data), fd.close);
}

function isIterable(obj) {
if (obj == null) {
return false;
}

return SymbolAsyncIterator in obj || (
SymbolIterator in obj &&
typeof obj !== 'string' &&
!ArrayIsArray(obj) &&
!isTypedArray(obj));
}

async function appendFile(path, data, options) {
Expand Down
54 changes: 54 additions & 0 deletions test/parallel/test-fs-promises-writefile.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,70 @@ const path = require('path');
const tmpdir = require('../common/tmpdir');
const assert = require('assert');
const tmpDir = tmpdir.path;
const { Readable } = require('stream');

tmpdir.refresh();

const dest = path.resolve(tmpDir, 'tmp.txt');
const otherDest = path.resolve(tmpDir, 'tmp-2.txt');
const buffer = Buffer.from('abc'.repeat(1000));
const buffer2 = Buffer.from('xyz'.repeat(1000));
const stream = Readable.from(['a', 'b', 'c']);
const stream2 = Readable.from(['a', 'b', 'c']);
const iterable = {
[Symbol.iterator]: function*() {
yield 'a';
yield 'b';
yield 'c';
}
};
const asyncIterable = {
async* [Symbol.asyncIterator]() {
yield 'a';
yield 'b';
yield 'c';
}
};

async function doWrite() {
await fsPromises.writeFile(dest, buffer);
const data = fs.readFileSync(dest);
assert.deepStrictEqual(data, buffer);
}

async function doWriteStream() {
await fsPromises.writeFile(dest, stream);
let expected = '';
for await (const v of stream2) expected += v;
const data = fs.readFileSync(dest, 'utf-8');
assert.deepStrictEqual(data, expected);
}

async function doWriteStreamWithCancel() {
const controller = new AbortController();
const { signal } = controller;
process.nextTick(() => controller.abort());
assert.rejects(fsPromises.writeFile(otherDest, stream, { signal }), {
name: 'AbortError'
});
}

async function doWriteIterable() {
await fsPromises.writeFile(dest, iterable);
let expected = '';
for await (const v of iterable) expected += v;
const data = fs.readFileSync(dest, 'utf-8');
assert.deepStrictEqual(data, expected);
}

async function doWriteAsyncIterable() {
await fsPromises.writeFile(dest, asyncIterable);
let expected = '';
for await (const v of asyncIterable) expected += v;
const data = fs.readFileSync(dest, 'utf-8');
assert.deepStrictEqual(data, expected);
}

async function doWriteWithCancel() {
const controller = new AbortController();
const { signal } = controller;
Expand Down Expand Up @@ -55,4 +105,8 @@ doWrite()
.then(doAppend)
.then(doRead)
.then(doReadWithEncoding)
.then(doWriteStream)
.then(doWriteStreamWithCancel)
.then(doWriteIterable)
.then(doWriteAsyncIterable)
.then(common.mustCall());

0 comments on commit b5538ff

Please sign in to comment.