Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"rust-analyzer.cargo.allTargets": true,
"rust-analyzer.cargo.features": "all",
"rust-analyzer.procMacro.ignored": { "napi-derive": ["napi"] },
"rust-analyzer.linkedProjects": [
"${workspaceFolder}/core/Cargo.toml",
"${workspaceFolder}/bindings/python/Cargo.toml",
Expand Down
166 changes: 162 additions & 4 deletions bindings/nodejs/generated.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,158 @@ export interface StatOptions {
*/
overrideContentDisposition?: string
}
export interface ReadOptions {
/**
* Set `version` for this operation.
*
* This option can be used to retrieve the data of a specified version of the given path.
*/
version?: string
/**
* Set `concurrent` for the operation.
*
* OpenDAL by default to read file without concurrent. This is not efficient for cases when users
* read large chunks of data. By setting `concurrent`, opendal will reading files concurrently
* on support storage services.
*
* By setting `concurrent`, opendal will fetch chunks concurrently with
* the give chunk size.
*/
concurrent?: number
/**
* Sets the chunk size for this operation.
*
* OpenDAL will use services' preferred chunk size by default. Users can set chunk based on their own needs.
*/
chunk?: number
/**
* Controls the optimization strategy for range reads in [`Reader::fetch`].
*
* When performing range reads, if the gap between two requested ranges is smaller than
* the configured `gap` size, OpenDAL will merge these ranges into a single read request
* and discard the unrequested data in between. This helps reduce the number of API calls
* to remote storage services.
*
* This optimization is particularly useful when performing multiple small range reads
* that are close to each other, as it reduces the overhead of multiple network requests
* at the cost of transferring some additional data.
*/
gap?: bigint
/**
* Sets the offset (starting position) for range read operations.
* The read will start from this position in the file.
*/
offset?: bigint
/**
* Sets the size (length) for range read operations.
* The read will continue for this many bytes after the offset.
*/
size?: bigint
/**
* Sets if-match condition for this operation.
* If file exists and its etag doesn't match, an error will be returned.
*/
ifMatch?: string
/**
* Sets if-none-match condition for this operation.
* If file exists and its etag matches, an error will be returned.
*/
ifNoneMatch?: string
/**
* Sets if-modified-since condition for this operation.
* If file exists and hasn't been modified since the specified time, an error will be returned.
* ISO 8601 formatted date string
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
*/
ifModifiedSince?: string
/**
* Sets if-unmodified-since condition for this operation.
* If file exists and has been modified since the specified time, an error will be returned.
* ISO 8601 formatted date string
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
*/
ifUnmodifiedSince?: string
/**
* Specify the `content-type` header that should be sent back by the operation.
*
* This option is only meaningful when used along with presign.
*/
contentType?: string
/**
* Specify the `cache-control` header that should be sent back by the operation.
*
* This option is only meaningful when used along with presign.
*/
cacheControl?: string
/**
* Specify the `content-disposition` header that should be sent back by the operation.
*
* This option is only meaningful when used along with presign.
*/
contentDisposition?: string
}
export interface ReaderOptions {
/**
* Set `version` for this operation.
*
* This option can be used to retrieve the data of a specified version of the given path.
*/
version?: string
/**
* Set `concurrent` for the operation.
*
* OpenDAL by default to read file without concurrent. This is not efficient for cases when users
* read large chunks of data. By setting `concurrent`, opendal will reading files concurrently
* on support storage services.
*
* By setting `concurrent`, opendal will fetch chunks concurrently with
* the give chunk size.
*/
concurrent?: number
/**
* Sets the chunk size for this operation.
*
* OpenDAL will use services' preferred chunk size by default. Users can set chunk based on their own needs.
*/
chunk?: number
/**
* Controls the optimization strategy for range reads in [`Reader::fetch`].
*
* When performing range reads, if the gap between two requested ranges is smaller than
* the configured `gap` size, OpenDAL will merge these ranges into a single read request
* and discard the unrequested data in between. This helps reduce the number of API calls
* to remote storage services.
*
* This optimization is particularly useful when performing multiple small range reads
* that are close to each other, as it reduces the overhead of multiple network requests
* at the cost of transferring some additional data.
*/
gap?: bigint
/**
* Sets if-match condition for this operation.
* If file exists and its etag doesn't match, an error will be returned.
*/
ifMatch?: string
/**
* Sets if-none-match condition for this operation.
* If file exists and its etag matches, an error will be returned.
*/
ifNoneMatch?: string
/**
* Sets if-modified-since condition for this operation.
* If file exists and hasn't been modified since the specified time, an error will be returned.
* ISO 8601 formatted date string
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
*/
ifModifiedSince?: string
/**
* Sets if-unmodified-since condition for this operation.
* If file exists and has been modified since the specified time, an error will be returned.
* ISO 8601 formatted date string
* https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString
*/
ifUnmodifiedSince?: string
}
export const enum EntryMode {
/** FILE means the path has data to read. */
FILE = 0,
Expand Down Expand Up @@ -157,6 +309,12 @@ export class Capability {
get statWithOverrideContentDisposition(): boolean
/** If operator supports read. */
get read(): boolean
/** If operator supports read with version. */
get readWithVersion(): boolean
/** If operator supports read with range. */
get readWithIfModifiedSince(): boolean
/** If operator supports read with if unmodified since. */
get readWithIfUnmodifiedSince(): boolean
/** If operator supports read with if matched. */
get readWithIfMatch(): boolean
/** If operator supports read with if not match. */
Expand Down Expand Up @@ -332,13 +490,13 @@ export class Operator {
* const buf = await op.read("path/to/file");
* ```
*/
read(path: string): Promise<Buffer>
read(path: string, options?: ReadOptions | undefined | null): Promise<Buffer>
/**
* Create a reader to read the given path.
*
* It could be used to read large file in a streaming way.
*/
reader(path: string): Promise<Reader>
reader(path: string, options?: ReaderOptions | undefined | null): Promise<Reader>
/**
* Read the whole path into a buffer synchronously.
*
Expand All @@ -347,13 +505,13 @@ export class Operator {
* const buf = op.readSync("path/to/file");
* ```
*/
readSync(path: string): Buffer
readSync(path: string, options?: ReadOptions | undefined | null): Buffer
/**
* Create a reader to read the given path synchronously.
*
* It could be used to read large file in a streaming way.
*/
readerSync(path: string): BlockingReader
readerSync(path: string, options?: ReaderOptions | undefined | null): BlockingReader
/**
* Write bytes into a path.
*
Expand Down
18 changes: 18 additions & 0 deletions bindings/nodejs/src/capability.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,24 @@ impl Capability {
self.0.read
}

/// If operator supports read with version.
#[napi(getter)]
pub fn read_with_version(&self) -> bool {
self.0.read_with_version
}

/// If operator supports read with range.
#[napi(getter)]
pub fn read_with_if_modified_since(&self) -> bool {
self.0.read_with_if_modified_since
}

/// If operator supports read with if unmodified since.
#[napi(getter)]
pub fn read_with_if_unmodified_since(&self) -> bool {
self.0.read_with_if_unmodified_since
}

/// If operator supports read with if matched.
#[napi(getter)]
pub fn read_with_if_match(&self) -> bool {
Expand Down
39 changes: 29 additions & 10 deletions bindings/nodejs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use std::time::Duration;
use futures::AsyncReadExt;
use futures::TryStreamExt;
use napi::bindgen_prelude::*;
use opendal::options::StatOptions;
use opendal::options::{ReadOptions, ReaderOptions, StatOptions};

mod capability;
mod options;
Expand Down Expand Up @@ -214,10 +214,15 @@ impl Operator {
/// const buf = await op.read("path/to/file");
/// ```
#[napi]
pub async fn read(&self, path: String) -> Result<Buffer> {
pub async fn read(
&self,
path: String,
options: Option<options::ReadOptions>,
) -> Result<Buffer> {
let options = options.map_or_else(ReadOptions::default, ReadOptions::from);
let res = self
.async_op
.read(&path)
.read_options(&path, options)
.await
.map_err(format_napi_error)?
.to_vec();
Expand All @@ -228,15 +233,20 @@ impl Operator {
///
/// It could be used to read large file in a streaming way.
#[napi]
pub async fn reader(&self, path: String) -> Result<Reader> {
pub async fn reader(
&self,
path: String,
options: Option<options::ReaderOptions>,
) -> Result<Reader> {
let options = options.map_or_else(ReaderOptions::default, ReaderOptions::from);
let r = self
.async_op
.reader(&path)
.reader_options(&path, options)
.await
.map_err(format_napi_error)?;
Ok(Reader {
inner: r
.into_futures_async_read(..)
.into_futures_async_read(std::ops::RangeFull)
.await
.map_err(format_napi_error)?,
})
Expand All @@ -249,10 +259,11 @@ impl Operator {
/// const buf = op.readSync("path/to/file");
/// ```
#[napi]
pub fn read_sync(&self, path: String) -> Result<Buffer> {
pub fn read_sync(&self, path: String, options: Option<options::ReadOptions>) -> Result<Buffer> {
let options = options.map_or_else(ReadOptions::default, ReadOptions::from);
let res = self
.blocking_op
.read(&path)
.read_options(&path, options)
.map_err(format_napi_error)?
.to_vec();
Ok(res.into())
Expand All @@ -262,8 +273,16 @@ impl Operator {
///
/// It could be used to read large file in a streaming way.
#[napi]
pub fn reader_sync(&self, path: String) -> Result<BlockingReader> {
let r = self.blocking_op.reader(&path).map_err(format_napi_error)?;
pub fn reader_sync(
&self,
path: String,
options: Option<options::ReaderOptions>,
) -> Result<BlockingReader> {
let options = options.map_or_else(ReaderOptions::default, ReaderOptions::from);
let r = self
.blocking_op
.reader_options(&path, options)
.map_err(format_napi_error)?;
Ok(BlockingReader {
inner: r.into_std_read(..).map_err(format_napi_error)?,
})
Expand Down
Loading
Loading