Skip to content

Commit

Permalink
code optimization
Browse files Browse the repository at this point in the history
  • Loading branch information
heliqi committed Mar 3, 2022
1 parent 4e9a2ee commit 25e8d04
Show file tree
Hide file tree
Showing 9 changed files with 29 additions and 35 deletions.
10 changes: 6 additions & 4 deletions cmake/external/onnxruntime.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ SET(ONNXRUNTIME_SOURCE_DIR ${THIRD_PARTY_PATH}/onnxruntime/src/${ONNXRUNTIME
SET(ONNXRUNTIME_INSTALL_DIR ${THIRD_PARTY_PATH}/install/onnxruntime)
SET(ONNXRUNTIME_INC_DIR "${ONNXRUNTIME_INSTALL_DIR}/include" CACHE PATH "onnxruntime include directory." FORCE)
SET(ONNXRUNTIME_LIB_DIR "${ONNXRUNTIME_INSTALL_DIR}/lib" CACHE PATH "onnxruntime lib directory." FORCE)
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${ONNXRUNTIME_LIB_DIR}")


if (WIN32)
SET(ONNXRUNTIME_URL "https://github.com/microsoft/onnxruntime/releases/download/v1.10.0/onnxruntime-win-x64-1.10.0.zip")
Expand All @@ -44,16 +46,16 @@ endif()
INCLUDE_DIRECTORIES(${ONNXRUNTIME_INC_DIR}) # For ONNXRUNTIME code to include internal headers.
if (WIN32)
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_SHARE_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME share library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.dll" CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/onnxruntime.lib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
elseif (APPLE)
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.1.10.0.dylib" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
SET(ONNXRUNTIME_SHARE_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME share library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
else ()
SET(ONNXRUNTIME_SOURCE_LIB "${ONNXRUNTIME_SOURCE_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME source library." FORCE)
SET(ONNXRUNTIME_LIB "${ONNXRUNTIME_INSTALL_DIR}/lib/libonnxruntime.so.1.10.0" CACHE FILEPATH "ONNXRUNTIME static library." FORCE)
SET(ONNXRUNTIME_SHARE_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME share library." FORCE)
SET(ONNXRUNTIME_SHARED_LIB ${ONNXRUNTIME_LIB} CACHE FILEPATH "ONNXRUNTIME shared library." FORCE)
endif ()


Expand All @@ -67,7 +69,7 @@ if (WIN32)
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
UPDATE_COMMAND ""
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_SHARE_LIB} &&
INSTALL_COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_LIB} ${ONNXRUNTIME_SHARED_LIB} &&
${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SOURCE_DIR}/lib/onnxruntime.lib ${ONNXRUNTIME_LIB} &&
${CMAKE_COMMAND} -E copy_directory ${ONNXRUNTIME_SOURCE_DIR}/include ${ONNXRUNTIME_INC_DIR}
BUILD_BYPRODUCTS ${ONNXRUNTIME_LIB}
Expand Down
5 changes: 3 additions & 2 deletions cmake/external/paddle2onnx.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -29,19 +29,20 @@ SET(PADDLE2ONNX_INSTALL_DIR ${THIRD_PARTY_PATH}/install/paddle2onnx)
SET(PADDLE2ONNX_INC_DIR "${PADDLE2ONNX_INSTALL_DIR}/include" CACHE PATH "paddle2onnx include directory." FORCE)
SET(PADDLE2ONNX_REPOSITORY ${GIT_URL}/PaddlePaddle/Paddle2ONNX.git)
SET(PADDLE2ONNX_TAG cpp)

SET(LIBDIR "lib")
SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_RPATH}" "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}")

INCLUDE_DIRECTORIES(${PADDLE2ONNX_INC_DIR}) # For PADDLE2ONNX code to include internal headers.
if(WIN32)
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}/paddle2onnx.lib" CACHE FILEPATH "paddle2onnx static library." FORCE)
SET(PADDLE2ONNX_SHARE_LIB "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}/paddle2onnx.dll" CACHE FILEPATH "paddle2onnx share library." FORCE)
SET(PADDLE2ONNX_SHARED_LIB "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}/paddle2onnx.dll" CACHE FILEPATH "paddle2onnx shared library." FORCE)
elseif(APPLE)
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}/libpaddle2onnx.dylib" CACHE FILEPATH "PADDLE2ONNX library." FORCE)
else()
SET(PADDLE2ONNX_LIB "${PADDLE2ONNX_INSTALL_DIR}/${LIBDIR}/libpaddle2onnx.so" CACHE FILEPATH "PADDLE2ONNX library." FORCE)
endif(WIN32)


# The protoc path is required to compile onnx.
string(REPLACE "/" ";" PROTOC_BIN_PATH ${PROTOBUF_PROTOC_EXECUTABLE})
list(POP_BACK PROTOC_BIN_PATH)
Expand Down
2 changes: 1 addition & 1 deletion cmake/inference_lib.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ function(copy_part_of_thrid_party TARGET DST)
set(dst_dir "${DST}/third_party/install/paddle2onnx")
if(WIN32)
copy(${TARGET}
SRCS ${PADDLE2ONNX_INC_DIR}/paddle2onnx ${PADDLE2ONNX_SHARE_LIB} ${PADDLE2ONNX_LIB}
SRCS ${PADDLE2ONNX_INC_DIR}/paddle2onnx ${PADDLE2ONNX_SHARED_LIB} ${PADDLE2ONNX_LIB}
DSTS ${dst_dir}/include ${dst_dir}/lib ${dst_dir}/lib)
else()
copy(${TARGET}
Expand Down
9 changes: 6 additions & 3 deletions paddle/fluid/inference/api/analysis_predictor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
#include "paddle/fluid/inference/analysis/helper.h"
#include "paddle/fluid/inference/analysis/passes/memory_optimize_pass.h"
#include "paddle/fluid/inference/api/helper.h"
#include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/api/paddle_inference_pass.h"
#include "paddle/fluid/inference/utils/io_utils.h"
Expand All @@ -57,6 +56,10 @@
#include "paddle/fluid/inference/api/mkldnn_quantizer.h"
#endif

#ifdef PADDLE_WITH_ONNXRUNTIME
#include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#endif

#if PADDLE_WITH_TENSORRT
#include "paddle/fluid/inference/tensorrt/convert/op_converter.h"
#include "paddle/fluid/inference/tensorrt/helper.h"
Expand Down Expand Up @@ -1495,8 +1498,8 @@ Predictor::Predictor(const Config &config) {
if (config.use_onnxruntime()) {
#ifdef PADDLE_WITH_ONNXRUNTIME
if (config.use_gpu()) {
LOG(WARNING) << "ONNXRuntime not support gpu for now, fall back to "
"using Paddle Inference.";
LOG(WARNING) << "The current ONNXRuntime backend doesn't support GPU,"
"and it falls back to use Paddle Inference.";
} else if (!paddle::CheckConvertToONNX(config)) {
LOG(WARNING)
<< "Paddle2ONNX do't support convert the Model, fall back to using "
Expand Down
13 changes: 4 additions & 9 deletions paddle/fluid/inference/api/onnxruntime_predictor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,6 @@ framework::proto::VarType::Type ConvertONNXType(
}
}

/*
bool CheckConvertToONNX(const AnalysisConfig &config) { return true; }
*/

bool CheckConvertToONNX(const AnalysisConfig &config) {
if (!config.model_dir().empty()) {
LOG(ERROR) << "Paddle2ONNX not support model_dir config";
Expand Down Expand Up @@ -104,11 +100,14 @@ bool ONNXRuntimePredictor::Init() {
config_.model_from_memory());

Ort::SessionOptions session_options;
// Turn optimization off first, and then turn it on when it's stabl
// Turn optimization off first, and then turn it on when it's stable
// session_options.SetGraphOptimizationLevel(GraphOptimizationLevel::ORT_ENABLE_ALL);
// session_options.SetExecutionMode(ExecutionMode::ORT_SEQUENTIAL);
// session_options.EnableCpuMemArena();
// session_options.EnableMemPattern();
// session_options.SetInterOpNumThreads(config_.cpu_math_library_num_threads());
session_options.SetIntraOpNumThreads(config_.cpu_math_library_num_threads());
VLOG(2) << "ONNXRuntime threads " << config_.cpu_math_library_num_threads();
if (config_.profile_enabled()) {
LOG(WARNING) << "ONNXRuntime Profiler is activated, which might affect the "
"performance";
Expand All @@ -122,10 +121,6 @@ bool ONNXRuntimePredictor::Init() {
"will be "
"generated.";
}
// session_options.SetInterOpNumThreads(config_.cpu_math_library_num_threads());
session_options.SetIntraOpNumThreads(config_.cpu_math_library_num_threads());
VLOG(2) << "ONNXRuntime threads " << config_.cpu_math_library_num_threads();
// session_ = {env_, config_.prog_file().c_str(), session_options};
session_ = {env_, onnx_proto.data(), onnx_proto.size(), session_options};

auto memory_info =
Expand Down
10 changes: 0 additions & 10 deletions paddle/fluid/inference/api/onnxruntime_predictor.h
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,9 @@
#include "paddle/fluid/platform/device/gpu/gpu_types.h"
#include "paddle/fluid/string/printf.h"

#ifdef PADDLE_WITH_ONNXRUNTIME
#include "onnxruntime_c_api.h" // NOLINT
#include "onnxruntime_cxx_api.h" // NOLINT
#include "paddle2onnx/converter.h"
#endif

///
/// \file onnxruntime_predictor.h
Expand All @@ -51,9 +49,7 @@ bool CheckConvertToONNX(const AnalysisConfig &config);
struct ONNXDesc {
std::string name;
std::vector<int64_t> shape;
#ifdef PADDLE_WITH_ONNXRUNTIME
ONNXTensorElementDataType dtype;
#endif
};

///
Expand Down Expand Up @@ -95,9 +91,7 @@ class ONNXRuntimePredictor : public PaddlePredictor {
explicit ONNXRuntimePredictor(const AnalysisConfig &config)
: config_(config) {
predictor_id_ = inference::GetUniqueId();
#ifdef PADDLE_WITH_ONNXRUNTIME
env_ = Ort::Env(ORT_LOGGING_LEVEL_VERBOSE, "onnx");
#endif
}
///
/// \brief Destroy the ONNXRuntime Predictor object
Expand Down Expand Up @@ -181,7 +175,6 @@ class ONNXRuntimePredictor : public PaddlePredictor {
std::shared_ptr<framework::Scope> scope_;

private:
#ifdef PADDLE_WITH_ONNXRUNTIME
///
/// \brief get the Ort Value(input Tensor).
///
Expand All @@ -203,16 +196,13 @@ class ONNXRuntimePredictor : public PaddlePredictor {
/// \return get a Ort::Value
///
void AsTensor(const Ort::Value &value, const ONNXDesc &desc);
#endif

private:
AnalysisConfig config_;

#ifdef PADDLE_WITH_ONNXRUNTIME
// ONNXRuntime
Ort::Env env_;
Ort::Session session_{nullptr};
#endif

platform::Place place_;
framework::Scope *sub_scope_{nullptr};
Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/pybind/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -246,13 +246,13 @@ if(WITH_PYTHON)
endif()
if(WITH_ONNXRUNTIME)
ADD_CUSTOM_COMMAND(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/paddle2onnx.dll
COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_SHARE_LIB} ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E copy ${PADDLE2ONNX_SHARED_LIB} ${CMAKE_CURRENT_BINARY_DIR}
DEPENDS paddle2onnx)
list(APPEND OP_IMPL_DEPS ${CMAKE_CURRENT_BINARY_DIR}/paddle2onnx.dll)
list(APPEND EAGER_OP_IMPL_DEPS ${CMAKE_CURRENT_BINARY_DIR}/paddle2onnx.dll)

ADD_CUSTOM_COMMAND(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/onnxruntime.dll
COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SHARE_LIB} ${CMAKE_CURRENT_BINARY_DIR}
COMMAND ${CMAKE_COMMAND} -E copy ${ONNXRUNTIME_SHARED_LIB} ${CMAKE_CURRENT_BINARY_DIR}
DEPENDS onnxruntime)
list(APPEND OP_IMPL_DEPS ${CMAKE_CURRENT_BINARY_DIR}/onnxruntime.dll)
list(APPEND EAGER_OP_IMPL_DEPS ${CMAKE_CURRENT_BINARY_DIR}/onnxruntime.dll)
Expand Down
7 changes: 5 additions & 2 deletions paddle/fluid/pybind/inference_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,15 @@
#include <vector>
#include "paddle/fluid/inference/api/analysis_predictor.h"
#include "paddle/fluid/inference/api/helper.h"
#include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#include "paddle/fluid/inference/api/paddle_infer_contrib.h"
#include "paddle/fluid/inference/api/paddle_inference_api.h"
#include "paddle/fluid/inference/api/paddle_pass_builder.h"
#include "paddle/fluid/inference/utils/io_utils.h"

#ifdef PADDLE_WITH_ONNXRUNTIME
#include "paddle/fluid/inference/api/onnxruntime_predictor.h"
#endif

namespace py = pybind11;

namespace pybind11 {
Expand Down Expand Up @@ -559,7 +562,7 @@ void BindAnalysisConfig(py::module *m) {
.def("disable_gpu", &AnalysisConfig::DisableGpu)
.def("enable_onnxruntime", &AnalysisConfig::EnableONNXRuntime)
.def("disable_onnxruntime", &AnalysisConfig::DisableONNXRuntime)
.def("onnxruntime_enable", &AnalysisConfig::use_onnxruntime)
.def("onnxruntime_enabled", &AnalysisConfig::use_onnxruntime)
.def("use_gpu", &AnalysisConfig::use_gpu)
.def("use_xpu", &AnalysisConfig::use_xpu)
.def("use_npu", &AnalysisConfig::use_npu)
Expand Down
4 changes: 2 additions & 2 deletions python/setup.py.in
Original file line number Diff line number Diff line change
Expand Up @@ -504,9 +504,9 @@ if '${WITH_MKLDNN}' == 'ON':
package_data['paddle.libs']+=['mkldnn.dll']

if '${WITH_ONNXRUNTIME}' == 'ON':
shutil.copy('${ONNXRUNTIME_SHARE_LIB}', libs_path)
shutil.copy('${ONNXRUNTIME_SHARED_LIB}', libs_path)
if os.name == 'nt':
shutil.copy('${PADDLE2ONNX_SHARE_LIB}', libs_path)
shutil.copy('${PADDLE2ONNX_SHARED_LIB}', libs_path)
package_data['paddle.libs']+=['paddle2onnx.dll', 'onnxruntime.dll']
else:
shutil.copy('${PADDLE2ONNX_LIB}', libs_path)
Expand Down

0 comments on commit 25e8d04

Please sign in to comment.