20
20
21
21
CONFIG_PATH=" /neural-compressor/examples/.config/model_params_onnxrt.json"
22
22
model_src_dir=$( jq -r " .\" onnxrt\" .\" $model \" .\" model_src_dir\" " " $CONFIG_PATH " )
23
- dataset_location=$( jq -r " .\" onnxrt\" .\" $model \" .\" dataset_location\" " " $CONFIG_PATH " )
23
+ if [ $model == * " resnet" * ]; then
24
+ dataset_location=" /tf_dataset2/datasets/imagenet/ImagenetRaw/ImagenetRaw_small_5000/ILSVRC2012_img_val"
25
+ label_path=" /tf_dataset2/datasets/imagenet/ImagenetRaw/ImagenetRaw_small_5000/val.txt"
26
+ else
27
+ dataset_location=$( jq -r " .\" onnxrt\" .\" $model \" .\" dataset_location\" " " $CONFIG_PATH " )
28
+ fi
29
+
24
30
input_model=$( jq -r " .\" onnxrt\" .\" $model \" .\" input_model\" " " $CONFIG_PATH " )
25
31
26
32
function run_prepare_model() {
@@ -32,10 +38,18 @@ function run_prepare_model() {
32
38
}
33
39
34
40
function run_quantize() {
35
- bash run_quant.sh --input_model=" $input_model " \
36
- --dataset_location=" $dataset_location " \
37
- --label_path=" $model " \
38
- --output_model=" ./model_tune"
41
+ if [ " $model " == " bert-base-uncased" ]; then
42
+ bash run_quant.sh --input_model=" $input_model " \
43
+ --dataset_location=" $dataset_location " \
44
+ --label_path=" $model " \
45
+ --output_model=" ./model_tune" \
46
+ --quant_format=" QDQ"
47
+ else
48
+ bash run_quant.sh --input_model=" $input_model " \
49
+ --dataset_location=" $dataset_location " \
50
+ --label_path=" $model " \
51
+ --output_model=" ./model_tune"
52
+ fi
39
53
}
40
54
41
55
function run_accuracy() {
0 commit comments