Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improvements on Exposed ORT support #976

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 17 additions & 4 deletions src/backends/onnx.js
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,25 @@ const supportedDevices = [];
/** @type {ONNXExecutionProviders[]} */
let defaultDevices;
let ONNX;
const ORT_SYMBOL = Symbol.for('onnxruntime');

if (ORT_SYMBOL in globalThis) {
// If the JS runtime exposes their own ONNX runtime, use it
ONNX = globalThis[ORT_SYMBOL];
// If the JS runtime exposes their own ONNX runtime, use it
if (apis.IS_EXPOSED_RUNTIME_ENV) {
const onnxruntime = globalThis[apis.EXPOSED_RUNTIME_SYMBOL];

// ensure that the runtime implements the necessary functions
// consider use array map if need to check more required members.
if (!Object.hasOwn(onnxruntime, 'Tensor')) {
throw new Error(`Invalid "globalThis[${String(apis.EXPOSED_RUNTIME_SYMBOL)}]" definition. Missing required exported member "Tensor".`)
}

if (!Object.hasOwn(onnxruntime, 'InferenceSession')) {
throw new Error(`Invalid "globalThis[${String(apis.EXPOSED_RUNTIME_SYMBOL)}]" definition. Missing required exported member "InferenceSession".`)
}
if(!Object.hasOwn(onnxruntime?.InferenceSession, 'create')) {
throw new Error(`Invalid "globalThis[${String(apis.EXPOSED_RUNTIME_SYMBOL)}].InferenceSession" definition. Missing required exported member "InferenceSession.create".`)
}

ONNX = onnxruntime;
} else if (apis.IS_NODE_ENV) {
ONNX = ONNX_NODE.default ?? ONNX_NODE;

Expand Down
9 changes: 9 additions & 0 deletions src/env.js
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ const IS_WEB_CACHE_AVAILABLE = typeof self !== "undefined" && 'caches' in self;
const IS_WEBGPU_AVAILABLE = typeof navigator !== 'undefined' && 'gpu' in navigator;
const IS_WEBNN_AVAILABLE = typeof navigator !== 'undefined' && 'ml' in navigator;

const EXPOSED_RUNTIME_SYMBOL = Symbol.for('onnxruntime');
const IS_EXPOSED_RUNTIME_ENV = EXPOSED_RUNTIME_SYMBOL in globalThis;

const IS_PROCESS_AVAILABLE = typeof process !== 'undefined';
const IS_NODE_ENV = IS_PROCESS_AVAILABLE && process?.release?.name === 'node';
const IS_FS_AVAILABLE = !isEmpty(fs);
Expand All @@ -59,6 +62,12 @@ export const apis = Object.freeze({
/** Whether the WebNN API is available */
IS_WEBNN_AVAILABLE,

/** Symbol from JS environment that exposes their own ONNX runtime */
EXPOSED_RUNTIME_SYMBOL,

xenova marked this conversation as resolved.
Show resolved Hide resolved
/** Whether we are running in a JS environment that exposes their own ONNX runtime */
IS_EXPOSED_RUNTIME_ENV,

/** Whether the Node.js process API is available */
IS_PROCESS_AVAILABLE,

Expand Down
9 changes: 6 additions & 3 deletions src/models.js
Original file line number Diff line number Diff line change
Expand Up @@ -165,9 +165,12 @@ async function getSession(pretrained_model_name_or_path, fileName, options) {
}

// If the device is not specified, we use the default (supported) execution providers.
const selectedDevice = /** @type {import("./utils/devices.js").DeviceType} */(
device ?? (apis.IS_NODE_ENV ? 'cpu' : 'wasm')
let selectedDevice = /** @type {import("./utils/devices.js").DeviceType} */ (
// Do not asign default device if 'IS_EXPOSED_RUNTIME_ENV'
device ?? (apis.IS_EXPOSED_RUNTIME_ENV ? undefined
: (apis.IS_NODE_ENV ? 'cpu' : 'wasm'))
);

const executionProviders = deviceToExecutionProviders(selectedDevice);

// If options.dtype is specified, we use it to choose the suffix for the model file.
Expand Down Expand Up @@ -235,7 +238,7 @@ async function getSession(pretrained_model_name_or_path, fileName, options) {
const free_dimension_overrides = custom_config.free_dimension_overrides;
if (free_dimension_overrides) {
session_options.freeDimensionOverrides ??= free_dimension_overrides;
} else if (selectedDevice.startsWith('webnn') && !session_options.freeDimensionOverrides) {
} else if (selectedDevice?.startsWith('webnn') && !session_options.freeDimensionOverrides) {
console.warn(
'WebNN does not currently support dynamic shapes and requires `free_dimension_overrides` to be set in config.json as a field within "transformers.js_config". ' +
'When `free_dimension_overrides` is not set, you may experience significant performance degradation.'
Expand Down