Skip to content

Commit b29849a

Browse files
authored
[js/common] fix typedoc warnings (#19933)
### Description Fix a few warnings in typedoc (for generating JS API): ``` [warning] The signature TrainingSession.loadParametersBuffer has an @param with name "buffer", which was not used. [warning] NonTensorType, defined in ./lib/onnx-value.ts, is referenced by OnnxValue but not included in the documentation. [warning] TensorFactory, defined in ./lib/tensor-factory.ts, is referenced by Tensor but not included in the documentation. [warning] ExternalDataFileType, defined in ./lib/onnx-model.ts, is referenced by InferenceSession.SessionOptions.externalData but not included in the documentation. [warning] TensorToDataUrlOptions, defined in ./lib/tensor-conversion.ts, is referenced by Tensor.toDataURL.toDataURL.options but not included in the documentation. [warning] TensorToImageDataOptions, defined in ./lib/tensor-conversion.ts, is referenced by Tensor.toImageData.toImageData.options but not included in the documentation. [warning] Failed to resolve link to "GpuBufferType" in comment for Env.WebGpuFlags.adapter. [warning] Failed to resolve link to "GpuBufferType" in comment for Env.WebGpuFlags.device. ``` Changes highlighted: - Merge `CoreMlExecutionProviderOption` and `CoreMLExecutionProviderOption`. They expose 2 set of different options for React-native and ORT nodejs binding. This should be fixed in future. - Fix a few inconsistency of names between JSDoc and parameters - Fix broken type links - Exclude trace functions
1 parent acb0df2 commit b29849a

File tree

10 files changed

+68
-25
lines changed

10 files changed

+68
-25
lines changed

js/common/lib/backend.ts

+3-3
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ export interface TrainingSessionHandler extends SessionHandler {
5858
options: InferenceSession.RunOptions): Promise<SessionHandler.ReturnType>;
5959

6060
getParametersSize(trainableOnly: boolean): Promise<number>;
61-
loadParametersBuffer(array: Uint8Array, trainableOnly: boolean): Promise<void>;
61+
loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise<void>;
6262
getContiguousParameters(trainableOnly: boolean): Promise<OnnxValue>;
6363
}
6464

@@ -77,8 +77,8 @@ export interface Backend {
7777
Promise<InferenceSessionHandler>;
7878

7979
createTrainingSessionHandler?
80-
(checkpointStateUriOrBuffer: TrainingSession.URIorBuffer, trainModelUriOrBuffer: TrainingSession.URIorBuffer,
81-
evalModelUriOrBuffer: TrainingSession.URIorBuffer, optimizerModelUriOrBuffer: TrainingSession.URIorBuffer,
80+
(checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,
81+
evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,
8282
options: InferenceSession.SessionOptions): Promise<TrainingSessionHandler>;
8383
}
8484

js/common/lib/env.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ export declare namespace Env {
173173
* When use with TypeScript, the type of this property is `GPUAdapter` defined in "@webgpu/types".
174174
* Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.
175175
*
176-
* see comments on {@link GpuBufferType}
176+
* see comments on {@link Tensor.GpuBufferType}
177177
*/
178178
readonly adapter: unknown;
179179
/**
@@ -184,7 +184,7 @@ export declare namespace Env {
184184
* When use with TypeScript, the type of this property is `GPUDevice` defined in "@webgpu/types".
185185
* Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.
186186
*
187-
* see comments on {@link GpuBufferType} for more details about why not use types defined in "@webgpu/types".
187+
* see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in "@webgpu/types".
188188
*/
189189
readonly device: unknown;
190190
/**

js/common/lib/index.ts

+3
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,9 @@ export * from './backend.js';
2121
export * from './env.js';
2222
export * from './inference-session.js';
2323
export * from './tensor.js';
24+
export * from './tensor-conversion.js';
25+
export * from './tensor-factory.js';
2426
export * from './trace.js';
27+
export * from './onnx-model.js';
2528
export * from './onnx-value.js';
2629
export * from './training-session.js';

js/common/lib/inference-session.ts

+35-8
Original file line numberDiff line numberDiff line change
@@ -186,22 +186,22 @@ export declare namespace InferenceSession {
186186
// #region execution providers
187187

188188
// Currently, we have the following backends to support execution providers:
189-
// Backend Node.js binding: supports 'cpu' and 'cuda'.
189+
// Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).
190190
// Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.
191191
// Backend ONNX.js: supports 'webgl'.
192192
// Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).
193193
interface ExecutionProviderOptionMap {
194+
coreml: CoreMLExecutionProviderOption;
194195
cpu: CpuExecutionProviderOption;
195-
coreml: CoreMlExecutionProviderOption;
196196
cuda: CudaExecutionProviderOption;
197197
dml: DmlExecutionProviderOption;
198+
nnapi: NnapiExecutionProviderOption;
198199
tensorrt: TensorRtExecutionProviderOption;
199200
wasm: WebAssemblyExecutionProviderOption;
200201
webgl: WebGLExecutionProviderOption;
201-
xnnpack: XnnpackExecutionProviderOption;
202202
webgpu: WebGpuExecutionProviderOption;
203203
webnn: WebNNExecutionProviderOption;
204-
nnapi: NnapiExecutionProviderOption;
204+
xnnpack: XnnpackExecutionProviderOption;
205205
}
206206

207207
type ExecutionProviderName = keyof ExecutionProviderOptionMap;
@@ -219,10 +219,6 @@ export declare namespace InferenceSession {
219219
readonly name: 'cuda';
220220
deviceId?: number;
221221
}
222-
export interface CoreMlExecutionProviderOption extends ExecutionProviderOption {
223-
readonly name: 'coreml';
224-
coreMlFlags?: number;
225-
}
226222
export interface DmlExecutionProviderOption extends ExecutionProviderOption {
227223
readonly name: 'dml';
228224
deviceId?: number;
@@ -253,8 +249,39 @@ export declare namespace InferenceSession {
253249
}
254250
export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {
255251
readonly name: 'coreml';
252+
/**
253+
* The bit flags for CoreML execution provider.
254+
*
255+
* ```
256+
* COREML_FLAG_USE_CPU_ONLY = 0x001
257+
* COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002
258+
* COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004
259+
* COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008
260+
* COREML_FLAG_CREATE_MLPROGRAM = 0x010
261+
* ```
262+
*
263+
* See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.
264+
*
265+
* This flag is available only in ONNXRuntime (Node.js binding).
266+
*/
267+
coreMlFlags?: number;
268+
/**
269+
* Specify whether to use CPU only in CoreML EP.
270+
*
271+
* This setting is available only in ONNXRuntime (react-native).
272+
*/
256273
useCPUOnly?: boolean;
274+
/**
275+
* Specify whether to enable CoreML EP on subgraph.
276+
*
277+
* This setting is available only in ONNXRuntime (react-native).
278+
*/
257279
enableOnSubgraph?: boolean;
280+
/**
281+
* Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).
282+
*
283+
* This setting is available only in ONNXRuntime (react-native).
284+
*/
258285
onlyEnableDeviceWithANE?: boolean;
259286
}
260287
export interface NnapiExecutionProviderOption extends ExecutionProviderOption {

js/common/lib/onnx-value.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
import {Tensor} from './tensor.js';
55

6-
type NonTensorType = never;
6+
export type NonTensorType = never;
77

88
/**
99
* Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.

js/common/lib/tensor-factory.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -253,7 +253,7 @@ export interface TensorFactory {
253253
/**
254254
* create a tensor from an ImageBitmap object
255255
*
256-
* @param bitMap - the ImageBitmap object to create tensor from
256+
* @param bitmap - the ImageBitmap object to create tensor from
257257
* @param options - An optional object representing options for creating tensor from URL.
258258
*
259259
* The following default settings will be applied:

js/common/lib/tensor.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ export interface Tensor extends TypedTensorBase<Tensor.Type>, TypedTensorUtils<T
160160
/**
161161
* type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.
162162
*/
163-
export interface TensorConstructor {
163+
export interface TensorConstructor extends TensorFactory {
164164
// #region CPU tensor - specify element type
165165
/**
166166
* Construct a new string tensor object from the given type, data and dims.
@@ -326,4 +326,4 @@ export interface TensorConstructor {
326326
}
327327

328328
// eslint-disable-next-line @typescript-eslint/naming-convention
329-
export const Tensor = TensorImpl as (TensorConstructor & TensorFactory);
329+
export const Tensor = TensorImpl as TensorConstructor;

js/common/lib/trace.ts

+9
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33

44
import {env} from './env-impl.js';
55

6+
/**
7+
* @ignore
8+
*/
69
export const TRACE = (deviceType: string, label: string) => {
710
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
811
return;
@@ -29,13 +32,19 @@ const TRACE_FUNC = (msg: string, extraMsg?: string) => {
2932
}
3033
};
3134

35+
/**
36+
* @ignore
37+
*/
3238
export const TRACE_FUNC_BEGIN = (extraMsg?: string) => {
3339
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
3440
return;
3541
}
3642
TRACE_FUNC('BEGIN', extraMsg);
3743
};
3844

45+
/**
46+
* @ignore
47+
*/
3948
export const TRACE_FUNC_END = (extraMsg?: string) => {
4049
if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {
4150
return;

js/common/lib/training-session.ts

+8-8
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ export declare namespace TrainingSession {
1111
/**
1212
* Either URI file path (string) or Uint8Array containing model or checkpoint information.
1313
*/
14-
type URIorBuffer = string|Uint8Array;
14+
type UriOrBuffer = string|Uint8Array;
1515
}
1616

1717
/**
@@ -98,13 +98,13 @@ export interface TrainingSession {
9898
getParametersSize(trainableOnly: boolean): Promise<number>;
9999

100100
/**
101-
* Copies parameter values from the given array to the training state. Currently, only supporting models with
101+
* Copies parameter values from the given buffer to the training state. Currently, only supporting models with
102102
* parameters of type Float32.
103103
*
104-
* @param buffer - Float32 buffer containing parameters converted to a Uint8Array.
104+
* @param buffer - A Uint8Array representation of Float32 parameters.
105105
* @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.
106106
*/
107-
loadParametersBuffer(array: Uint8Array, trainableOnly: boolean): Promise<void>;
107+
loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise<void>;
108108

109109
/**
110110
* Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.
@@ -157,19 +157,19 @@ export interface TrainingSessionCreateOptions {
157157
/**
158158
* URI or buffer for a .ckpt file that contains the checkpoint for the training model.
159159
*/
160-
checkpointState: TrainingSession.URIorBuffer;
160+
checkpointState: TrainingSession.UriOrBuffer;
161161
/**
162162
* URI or buffer for the .onnx training file.
163163
*/
164-
trainModel: TrainingSession.URIorBuffer;
164+
trainModel: TrainingSession.UriOrBuffer;
165165
/**
166166
* Optional. URI or buffer for the .onnx optimizer model file.
167167
*/
168-
optimizerModel?: TrainingSession.URIorBuffer;
168+
optimizerModel?: TrainingSession.UriOrBuffer;
169169
/**
170170
* Optional. URI or buffer for the .onnx eval model file.
171171
*/
172-
evalModel?: TrainingSession.URIorBuffer;
172+
evalModel?: TrainingSession.UriOrBuffer;
173173
}
174174

175175
/**

tools/ci_build/github/azure-pipelines/templates/linux-web-init-and-check.yml

+4
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,10 @@ steps:
3131
node -e "a=require('child_process').execSync('git diff --name-only').toString();if(a)throw new Error('Following source files are not formatted: (did you run \"npm run format\"?)\n'+a)"
3232
workingDirectory: '$(Build.SourcesDirectory)/js'
3333
displayName: 'Check unformatted files'
34+
- script: |
35+
npx typedoc --emit none --treatWarningsAsErrors
36+
workingDirectory: '$(Build.SourcesDirectory)/js/common'
37+
displayName: 'TypeDoc Validation'
3438
- script: |
3539
npm run build:doc
3640
workingDirectory: '$(Build.SourcesDirectory)/js/web'

0 commit comments

Comments
 (0)