-
Notifications
You must be signed in to change notification settings - Fork 3k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
2058d63
commit a6ca33c
Showing
42 changed files
with
41,792 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
# Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. | ||
# Licensed under the MIT License. | ||
|
||
#set(CMAKE_VERBOSE_MAKEFILE on) | ||
|
||
# Setup Java compilation | ||
include(FindJava) | ||
find_package(Java REQUIRED) | ||
find_package(JNI REQUIRED) | ||
include(UseJava) | ||
include_directories(${JNI_INCLUDE_DIRS}) | ||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c11") | ||
|
||
set(JAVA_ROOT ${REPO_ROOT}/java) | ||
set(CMAKE_JAVA_COMPILE_FLAGS "-source" "1.8" "-target" "1.8" "-encoding" "UTF-8") | ||
if (onnxruntime_RUN_ONNX_TESTS) | ||
set(JAVA_DEPENDS onnxruntime ${test_data_target}) | ||
else() | ||
set(JAVA_DEPENDS onnxruntime) | ||
endif() | ||
|
||
# Specify the Java source files | ||
set(onnxruntime4j_src | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/MapInfo.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/NodeInfo.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxRuntime.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxJavaType.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxMap.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxSequence.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxTensor.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OnnxValue.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtAllocator.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtEnvironment.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtException.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtSession.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/OrtUtil.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/package-info.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/SequenceInfo.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/TensorInfo.java | ||
${REPO_ROOT}/java/src/main/java/ai/onnxruntime/ValueInfo.java | ||
) | ||
|
||
# Build the jar and generate the native headers | ||
add_jar(onnxruntime4j SOURCES ${onnxruntime4j_src} VERSION ${ORT_VERSION} GENERATE_NATIVE_HEADERS onnxruntime4j_generated DESTINATION ${REPO_ROOT}/java/src/main/native/) | ||
|
||
# Specify the native sources (without the generated headers) | ||
file(GLOB onnxruntime4j_native_src | ||
"${REPO_ROOT}/java/src/main/native/*.c" | ||
"${REPO_ROOT}/java/src/main/native/OrtJniUtil.h" | ||
"${REPO_ROOT}/include/onnxruntime/core/session/*.h" | ||
) | ||
|
||
# Build the JNI library | ||
add_library(onnxruntime4j_jni SHARED ${onnxruntime4j_native_src} ${onnxruntime4j_generated}) | ||
onnxruntime_add_include_to_target(onnxruntime4j_jni onnxruntime_session) | ||
target_include_directories(onnxruntime4j_jni PRIVATE ${REPO_ROOT}/include ${REPO_ROOT}/java/src/main/native) | ||
target_link_libraries(onnxruntime4j_jni PUBLIC ${JNI_LIBRARIES} onnxruntime onnxruntime4j_generated) | ||
|
||
# Now the jar, jni binary and shared lib binary have been built, now to build the jar with the binaries added. | ||
|
||
# This blob creates the new jar name | ||
get_property(onnxruntime_jar_name TARGET onnxruntime4j PROPERTY JAR_FILE) | ||
get_filename_component(onnxruntime_jar_abs ${onnxruntime_jar_name} ABSOLUTE) | ||
get_filename_component(jar_path ${onnxruntime_jar_abs} DIRECTORY) | ||
set(onnxruntime_jar_binaries_name "${jar_path}/onnxruntime4j-${ORT_VERSION}-with-binaries.jar") | ||
set(onnxruntime_jar_binaries_platform "$<SHELL_PATH:${onnxruntime_jar_binaries_name}>") | ||
|
||
# Copy the current jar | ||
add_custom_command(TARGET onnxruntime4j_jni PRE_BUILD | ||
COMMAND ${CMAKE_COMMAND} -E copy | ||
${onnxruntime_jar_name} | ||
${onnxruntime_jar_binaries_platform}) | ||
|
||
# Make a temp directory to store the binaries | ||
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD | ||
COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib") | ||
|
||
# Copy the binaries | ||
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_FILE:onnxruntime4j_jni>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/) | ||
|
||
if (WIN32) | ||
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_FILE:onnxruntime>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/) | ||
# Update the with-binaries jar so it includes the binaries | ||
add_custom_command( | ||
TARGET onnxruntime4j_jni POST_BUILD | ||
COMMAND ${Java_JAR_EXECUTABLE} -uf ${onnxruntime_jar_binaries_platform} -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime4j_jni> -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime> | ||
DEPENDS onnxruntime4j | ||
COMMENT "Rebuilding Java archive ${_JAVA_TARGET_OUTPUT_NAME}" | ||
VERBATIM | ||
) | ||
else () | ||
add_custom_command(TARGET onnxruntime4j_jni POST_BUILD COMMAND ${CMAKE_COMMAND} -E copy "$<TARGET_LINKER_FILE:onnxruntime>" ${CMAKE_CURRENT_BINARY_DIR}/java-libs/lib/) | ||
# Update the with-binaries jar so it includes the binaries | ||
add_custom_command( | ||
TARGET onnxruntime4j_jni POST_BUILD | ||
COMMAND ${Java_JAR_EXECUTABLE} -uf ${onnxruntime_jar_binaries_platform} -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_FILE_NAME:onnxruntime4j_jni> -C ${CMAKE_CURRENT_BINARY_DIR}/java-libs lib/$<TARGET_LINKER_FILE_NAME:onnxruntime> | ||
DEPENDS onnxruntime4j | ||
COMMENT "Rebuilding Java archive ${_JAVA_TARGET_OUTPUT_NAME}" | ||
VERBATIM | ||
) | ||
endif() | ||
|
||
create_javadoc(onnxruntime4j_javadoc | ||
FILES ${onnxruntime4j_src} | ||
DOCTITLE "Onnx Runtime Java API" | ||
WINDOWTITLE "OnnxRuntime-Java-API" | ||
AUTHOR FALSE | ||
USE TRUE | ||
VERSION FALSE | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
# ONNX Runtime Java API | ||
The ONNX runtime provides a Java binding for running inference on ONNX models on a JVM, using Java 8 or newer. | ||
|
||
Two jar files are created during the build process, one contains the onnxruntime shared library, the JNI binding and the Java class files, and the other only contains the class files. By default the shared libraries are loaded from the classpath in a folder called `/lib`, if you wish to have them load from `java.library.path` then supply `-DORT_LOAD_FROM_LIBRARY_PATH` to the JVM at runtime. | ||
|
||
## Sample Code | ||
|
||
The unit tests contain several examples of loading models, inspecting input/output node shapes and types, as well as constructing tensors for scoring. | ||
|
||
* [../java/src/test/java/ai/onnxruntime/InferenceTest.java#L66](../java/src/test/java/ai/onnxruntime/InferenceTest.java#L66) | ||
|
||
## Getting Started | ||
Here is simple tutorial for getting started with running inference on an existing ONNX model for a given input data. The model is typically trained using any of the well-known training frameworks and exported into the ONNX format. | ||
Note the code presented below uses syntax available from Java 10 onwards. The Java 8 syntax is similar but more verbose. | ||
To start a scoring session, first create the `OrtEnvironment`, then open a session using the `OrtSession` class, passing in the file path to the model as a parameter. | ||
|
||
var env = OrtEnvironment.getEnvironment(); | ||
var session = env.createSession("model.onnx",new OrtSession.SessionOptions()); | ||
|
||
Once a session is created, you can execute queries using the `run` method of the `OrtSession` object. | ||
At the moment we support `OnnxTensor` inputs, and models can produce `OnnxTensor`, `OnnxSequence` or `OnnxMap` outputs. The latter two are more likely when scoring models produced by frameworks like scikit-learn. | ||
The run call expects a `Map<String,OnnxTensor>` where the keys match input node names stored in the model. These can be viewed by calling `session.getInputNames()` or `session.getInputInfo()` on an instantiated session. | ||
The run call produces a `Result` object, which contains a `Map<String,OnnxValue>` representing the output. The `Result` object is `AutoCloseable` and can be used in a try-with-resources statement to | ||
prevent references from leaking out. Once the `Result` object is closed, all it's child `OnnxValue`s are closed too. | ||
|
||
OnnxTensor t1,t2; | ||
var inputs = Map.of("name1",t1,"name2",t2); | ||
try (var results = session.run(inputs)) { | ||
// manipulate the results | ||
} | ||
|
||
You can load your input data into OnnxTensor objects in several ways. The most efficient way is to use a `java.nio.Buffer`, but it's possible to use multidimensional arrays too. If constructed using arrays the arrays must not be ragged. | ||
|
||
FloatBuffer sourceData; // assume your data is loaded into a FloatBuffer | ||
long[] dimensions; // and the dimensions of the input are stored here | ||
var tensorFromBuffer = OnnxTensor.createTensor(env,sourceData,dimensions); | ||
|
||
float[][] sourceArray = new float[28][28]; // assume your data is loaded into a float array | ||
var tensorFromArray = OnnxTensor.createTensor(env,sourceArray); | ||
|
||
Here is a [complete sample program](../java/sample/ScoreMNIST.java) that runs inference on a pretrained MNIST model. | ||
|
||
## Running on a GPU or with another provider (Optional) | ||
To enable other execution providers like GPUs simply turn on the appropriate flag on SessionOptions when creating an OrtSession. | ||
|
||
int gpuDeviceId = 0; // The GPU device ID to execute on | ||
var sessionOptions = new OrtSession.SessionOptions(); | ||
sessionOptions.addCUDA(gpuDeviceId); | ||
var session = environment.createSession("model.onnx", sessionOptions); | ||
|
||
The execution providers are preferred in the order they were enabled. | ||
|
||
## API Reference | ||
|
||
The Javadoc is available [here](https://microsoft.github.io/onnxruntime/java/index.html). | ||
|
Oops, something went wrong.