Skip to content

Commit

Permalink
Merge pull request PaddlePaddle#1964 from HydrogenSulfate/add_paddle2…
Browse files Browse the repository at this point in the history
…onnx_tipc

add paddle2onnx tipc chain
  • Loading branch information
RainFrost1 authored Jun 7, 2022
2 parents a43f853 + 1740116 commit ff81fa0
Show file tree
Hide file tree
Showing 21 changed files with 339 additions and 19 deletions.
8 changes: 6 additions & 2 deletions test_tipc/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,18 +35,22 @@
│ ├── MobileNetV3 # MobileNetV3系列模型测试配置文件目录
│ │ ├── MobileNetV3_large_x1_0_train_infer_python.txt #基础训练预测配置文件
│ │ ├── MobileNetV3_large_x1_0_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件
│ │ └── MobileNetV3_large_x1_0_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件
│ │ ├── MobileNetV3_large_x1_0_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件
│ │ └── MobileNetV3_large_x1_0_paddle2onnx_infer_python.txt #paddle2onnx推理测试配置文件
│ └── ResNet # ResNet系列模型测试配置文件目录
│ ├── ResNet50_vd_train_infer_python.txt #基础训练预测配置文件
│ ├── ResNet50_vd_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件
│ └── ResNet50_vd_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件
│ ├── ResNet50_vd_train_linux_gpu_fleet_amp_infer_python_linux_gpu_cpu.txt #多机多卡训练预测配置文件
│ ├── ResNet50_vd_train_linux_gpu_normal_amp_infer_python_linux_gpu_cpu.txt #混合精度训练预测配置文件
│ └── ResNet50_vd_paddle2onnx_infer_python.txt #paddle2onnx推理测试配置文件
| ......
├── docs
│ ├── guide.png
│ └── test.png
├── prepare.sh # 完成test_*.sh运行所需要的数据和模型下载
├── README.md # 使用文档
├── results # 预先保存的预测结果,用于和实际预测结果进行精读比对
├── test_paddle2onnx.sh # 测试paddle2onnx推理预测的主程序
└── test_train_inference_python.sh # 测试python训练预测的主程序
```

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:MobileNetV3_large_x1_0
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/MobileNetV3_large_x1_0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/MobileNetV3_large_x1_0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/MobileNetV3_large_x1_0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/MobileNetV3_large_x1_0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PP-ShiTu_general_rec
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/general_PPLCNet_x2_5_lite_v1.0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/general_PPLCNet_x2_5_lite_v1.0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/general_PPLCNet_x2_5_lite_v1.0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PP-ShiTu_mainbody_det
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
===========================paddle2onnx_params===========================
model_name:PPHGNet_small
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPHGNet_small_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPHGNet_small_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPHGNet_small_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
===========================paddle2onnx_params===========================
model_name:PPHGNet_tiny
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPHGNet_tiny_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPHGNet_tiny_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPHGNet_tiny_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x0_25
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x0_25_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x0_25_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x0_25_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x0_25
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x0_25_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x0_25_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_25_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x0_25_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PP-ShiTu_mainbody_det
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/rec/models/inference/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/picodet_PPLCNet_x2_5_mainbody_lite_v1.0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x0_75
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x0_75_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x0_75_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x0_75_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x0_75_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x1_0
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x1_0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x1_0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x1_0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x1_5
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x1_5_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x1_5_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x1_5_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x1_5_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x2_0
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x2_0_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x2_0_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_0_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x2_0_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNet_x2_5
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNet_x2_5_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNet_x2_5_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNet_x2_5_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNet_x2_5_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:PPLCNetV2_base
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/PPLCNetV2_base_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/PPLCNetV2_base_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/PPLCNetV2_base_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/PPLCNetV2_base_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:ResNet50
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/ResNet50_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/ResNet50_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/ResNet50_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ python:python3.7
--save_file:./deploy/models/ResNet50_vd_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar
inference: python/predict_cls.py -c configs/inference_cls.yaml
Global.use_onnx:True
Global.inference_model_dir:models/ResNet50_vd_infer/
Global.use_gpu:False
-c:configs/inference_cls.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
===========================paddle2onnx_params===========================
model_name:SwinTransformer_tiny_patch4_window7_224
python:python3.7
2onnx: paddle2onnx
--model_dir:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/
--model_filename:inference.pdmodel
--params_filename:inference.pdiparams
--save_file:./deploy/models/SwinTransformer_tiny_patch4_window7_224_infer/inference.onnx
--opset_version:10
--enable_onnx_checker:True
inference_model_url:https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/SwinTransformer_tiny_patch4_window7_224_infer.tar
inference:./python/predict_cls.py
Global.use_onnx:True
Global.inference_model_dir:./models/SwinTransformer_tiny_patch4_window7_224_infer
Global.use_gpu:False
-c:configs/inference_cls.yaml
52 changes: 52 additions & 0 deletions test_tipc/docs/test_paddle2onnx.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# Paddle2onnx预测功能测试

PaddleServing预测功能测试的主程序为`test_paddle2onnx.sh`,可以测试Paddle2ONNX的模型转化功能,并验证正确性。

## 1. 测试结论汇总

基于训练是否使用量化,进行本测试的模型可以分为`正常模型``量化模型`,这两类模型对应的Paddle2ONNX预测功能汇总如下:

| 模型类型 |device |
| ---- | ---- |
| 正常模型 | GPU |
| 正常模型 | CPU |


## 2. 测试流程

以下内容以`ResNet50`模型的paddle2onnx测试为例

### 2.1 功能测试
先运行`prepare.sh`准备数据和模型,然后运行`test_paddle2onnx.sh`进行测试,最终在`test_tipc/output/ResNet50`目录下生成`paddle2onnx_infer_*.log`后缀的日志文件
下方展示以PPHGNet_small为例的测试命令与结果。

```shell
bash test_tipc/prepare.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt paddle2onnx_infer

# 用法:
bash test_tipc/test_paddle2onnx.sh ./test_tipc/config/ResNet/ResNet50_linux_gpu_normal_normal_paddle2onnx_python_linux_cpu.txt
```

#### 运行结果

各测试的运行情况会打印在 `./test_tipc/output/ResNet50/results_paddle2onnx.log` 中:
运行成功时会输出:

```
Run successfully with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
Run successfully with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../!
```

运行失败时会输出:

```
Run failed with command - paddle2onnx --model_dir=./deploy/models/ResNet50_infer/ --model_filename=inference.pdmodel --params_filename=inference.pdiparams --save_file=./deploy/models/ResNet50_infer/inference.onnx --opset_version=10 --enable_onnx_checker=True!
Run failed with command - cd deploy && python3.7 ./python/predict_cls.py -o Global.inference_model_dir=./models/ResNet50_infer -o Global.use_onnx=True -o Global.use_gpu=False -c=configs/inference_cls.yaml > ../test_tipc/output/ResNet50/paddle2onnx_infer_cpu.log 2>&1 && cd ../!
...
```


## 3. 更多教程

本文档为功能测试用,更详细的Paddle2onnx预测使用教程请参考:[Paddle2ONNX](https://github.com/PaddlePaddle/Paddle2ONNX)
14 changes: 10 additions & 4 deletions test_tipc/prepare.sh
Original file line number Diff line number Diff line change
Expand Up @@ -174,13 +174,19 @@ fi
if [ ${MODE} = "paddle2onnx_infer" ];then
# prepare paddle2onnx env
python_name=$(func_parser_value "${lines[2]}")
inference_model_url=$(func_parser_value "${lines[10]}")
tar_name=${inference_model_url##*/}

${python_name} -m pip install install paddle2onnx
${python_name} -m pip install onnxruntime

# wget model
cd deploy && mkdir models && cd models
wget -nc https://paddle-imagenet-models-name.bj.bcebos.com/dygraph/inference/ResNet50_vd_infer.tar && tar xf ResNet50_vd_infer.tar
cd deploy
mkdir models
cd models
wget -nc ${inference_model_url}
tar xf ${tar_name}
cd ../../


fi

if [ ${MODE} = "benchmark_train" ];then
Expand Down
Loading

0 comments on commit ff81fa0

Please sign in to comment.