Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
fix CI
Browse files Browse the repository at this point in the history
  • Loading branch information
wuxun-zhang committed Jun 12, 2019
1 parent 4b60163 commit 19ced82
Showing 1 changed file with 7 additions and 5 deletions.
12 changes: 7 additions & 5 deletions cpp-package/example/inference/unit_test_imagenet_inference.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,18 @@ if [ "$(uname)" == "Darwin" ]; then

echo ">>> INFO: FP32 dummy data"
DYLD_LIBRARY_PATH=${DYLD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/Inception-BN-symbol.json" --batch_size 1 --num_inference_batches 500 --benchmark

echo ">>> INFO: INT8 dummy data"
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/resnet50_v1_int8-symbol.json" --batch_size 1 --num_inference_batches 500 --benchmark
else
echo ">>> INFO: FP32 real data"
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/Inception-BN-symbol.json" --params_file "./model/Inception-BN-0126.params" --dataset "./data/val_256_q90.rec" --rgb_mean "123.68 116.779 103.939" --batch_size 1 --num_skipped_batches 50 --num_inference_batches 500

echo ">>> INFO: FP32 dummy data"
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/Inception-BN-symbol.json" --batch_size 1 --num_inference_batches 500 --benchmark

echo ">>> INFO: INT8 dummy data"
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/resnet50_v1_int8-symbol.json" --batch_size 1 --num_inference_batches 500 --benchmark
lib_name=$(ls -a ../../../lib | grep -oE 'mkldnn' | tail -1)
if [[ -n ${lib_name} ]] && [[ 'mkldnn' =~ ${lib_name} ]]; then
echo ">>> INFO: INT8 dummy data"
LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:../../../lib ./imagenet_inference --symbol_file "./model/resnet50_v1_int8-symbol.json" --batch_size 1 --num_inference_batches 500 --benchmark
else
echo "Skipped INT8 test because mkldnn was not found which is required for running inference with quantized models."
fi
fi

0 comments on commit 19ced82

Please sign in to comment.