Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

[MXNET-910] Multithreading inference. #12456

Merged
merged 20 commits into from
Sep 19, 2018
Merged
2 changes: 1 addition & 1 deletion example/image-classification/predict-cpp/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ LDFLAGS+=`pkg-config --libs opencv`
export MXNET_ROOT=`pwd`/../../..

CFLAGS+=-Wall -I$(MXNET_ROOT)/include
LDFLAGS+=$(MXNET_ROOT)/lib/libmxnet.so
LDFLAGS+=$(MXNET_ROOT)/lib/libmxnet.so -lpthread

image-classification-predict: image-classification-predict.o
g++ -O3 -o image-classification-predict image-classification-predict.o $(LDFLAGS)
Expand Down
6 changes: 3 additions & 3 deletions example/image-classification/predict-cpp/README.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Image Classification Example Using the C Predict API
This is a simple predictor which shows how to use the MXNet C Predict API for image classification with a pre-trained ImageNet model.
This is a simple predictor which shows how to use the MXNet C Predict API for image classification with a pre-trained ImageNet model in a single thread and multiple threads.

## Prerequisites

Expand Down Expand Up @@ -45,10 +45,10 @@ Run the example by passing it an image that you want to classify. If you don't h
wget https://upload.wikimedia.org/wikipedia/commons/thumb/f/f4/Honeycrisp.jpg/1920px-Honeycrisp.jpg
```

Then run the `image-classification-predict` program, passing the image as the argument.
Then run the `image-classification-predict` program, passing the image as the first argument and the number of threads as the second parameter.

```bash
./image-classification-predict 1920px-Honeycrisp.jpg
./image-classification-predict 1920px-Honeycrisp.jpg 1
```

## Tips
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
#include <fstream>
#include <vector>
#include <memory>
#include <thread>
#include <iomanip>
#include <opencv2/opencv.hpp>
// Path for c_predict_api
Expand Down Expand Up @@ -179,14 +180,56 @@ void PrintOutputResult(const std::vector<float>& data, const std::vector<std::st
"accuracy=" << std::setprecision(8) << best_accuracy << ")" << std::endl;
}

void predict(PredictorHandle pred_hnd, const std::vector<mx_float> &image_data,
NDListHandle nd_hnd, const std::string &synset_file, int i) {
auto image_size = image_data.size();
// Set Input Image
MXPredSetInput(pred_hnd, "data", image_data.data(), static_cast<mx_uint>(image_size));

// Do Predict Forward
MXPredForward(pred_hnd);

mx_uint output_index = 0;

mx_uint* shape = nullptr;
mx_uint shape_len;

// Get Output Result
MXPredGetOutputShape(pred_hnd, output_index, &shape, &shape_len);

std::size_t size = 1;
for (mx_uint i = 0; i < shape_len; ++i) { size *= shape[i]; }

std::vector<float> data(size);

MXPredGetOutput(pred_hnd, output_index, &(data[0]), static_cast<mx_uint>(size));

// Release NDList
if (nd_hnd) {
MXNDListFree(nd_hnd);
}

// Release Predictor
MXPredFree(pred_hnd);

// Synset path for your model, you have to modify it
auto synset = LoadSynset(synset_file);

// Print Output Data
PrintOutputResult(data, synset);
}

int main(int argc, char* argv[]) {
if (argc < 2) {
std::cout << "No test image here." << std::endl
<< "Usage: ./image-classification-predict apple.jpg" << std::endl;
<< "Usage: ./image-classification-predict apple.jpg [num_threads]" << std::endl;
return EXIT_FAILURE;
}

std::string test_file(argv[1]);
int num_threads = 1;
if (argc == 3)
num_threads = std::atoi(argv[2]);

// Models path for your model, you have to modify it
std::string json_file = "model/Inception/Inception-BN-symbol.json";
Expand Down Expand Up @@ -214,25 +257,11 @@ int main(int argc, char* argv[]) {
static_cast<mx_uint>(channels),
static_cast<mx_uint>(height),
static_cast<mx_uint>(width) };
PredictorHandle pred_hnd = nullptr;

if (json_data.GetLength() == 0 || param_data.GetLength() == 0) {
return EXIT_FAILURE;
}

// Create Predictor
MXPredCreate(static_cast<const char*>(json_data.GetBuffer()),
static_cast<const char*>(param_data.GetBuffer()),
static_cast<int>(param_data.GetLength()),
dev_type,
dev_id,
num_input_nodes,
input_keys,
input_shape_indptr,
input_shape_data,
&pred_hnd);
assert(pred_hnd);

auto image_size = static_cast<std::size_t>(width * height * channels);

// Read Mean Data
Expand All @@ -259,40 +288,46 @@ int main(int argc, char* argv[]) {

GetImageFile(test_file, image_data.data(), channels, cv::Size(width, height), nd_data);

// Set Input Image
MXPredSetInput(pred_hnd, "data", image_data.data(), static_cast<mx_uint>(image_size));

// Do Predict Forward
MXPredForward(pred_hnd);

mx_uint output_index = 0;

mx_uint* shape = nullptr;
mx_uint shape_len;

// Get Output Result
MXPredGetOutputShape(pred_hnd, output_index, &shape, &shape_len);

std::size_t size = 1;
for (mx_uint i = 0; i < shape_len; ++i) { size *= shape[i]; }

std::vector<float> data(size);

MXPredGetOutput(pred_hnd, output_index, &(data[0]), static_cast<mx_uint>(size));

// Release NDList
if (nd_hnd) {
MXNDListFree(nd_hnd);
if (num_threads == 1) {
// Create Predictor
PredictorHandle pred_hnd;
MXPredCreate(static_cast<const char*>(json_data.GetBuffer()),
static_cast<const char*>(param_data.GetBuffer()),
static_cast<int>(param_data.GetLength()),
dev_type,
dev_id,
num_input_nodes,
input_keys,
input_shape_indptr,
input_shape_data,
&pred_hnd);
assert(pred_hnd);

predict(pred_hnd, image_data, nd_hnd, synset_file, 0);
} else {
// Create Predictor
std::vector<PredictorHandle> pred_hnds(num_threads, nullptr);
MXPredCreateMultiThread(static_cast<const char*>(json_data.GetBuffer()),
static_cast<const char*>(param_data.GetBuffer()),
static_cast<int>(param_data.GetLength()),
dev_type,
dev_id,
num_input_nodes,
input_keys,
input_shape_indptr,
input_shape_data,
pred_hnds.size(),
pred_hnds.data());
for (auto hnd : pred_hnds)
assert(hnd);

std::vector<std::thread> threads;
for (int i = 0; i < num_threads; i++)
threads.emplace_back(predict, pred_hnds[i], image_data, nd_hnd, synset_file, i);
for (int i = 0; i < num_threads; i++)
threads[i].join();
}

// Release Predictor
MXPredFree(pred_hnd);

// Synset path for your model, you have to modify it
auto synset = LoadSynset(synset_file);

// Print Output Data
PrintOutputResult(data, synset);
printf("run successfully\n");

return EXIT_SUCCESS;
}
33 changes: 33 additions & 0 deletions include/mxnet/c_predict_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -119,6 +119,39 @@ MXNET_DLL int MXPredCreatePartialOut(const char* symbol_json_str,
mx_uint num_output_nodes,
const char** output_keys,
PredictorHandle* out);

/*!
* \brief create predictors for multiple threads. One predictor for a thread.
* \param symbol_json_str The JSON string of the symbol.
* \param param_bytes The in-memory raw bytes of parameter ndarray file.
* \param param_size The size of parameter ndarray file.
* \param dev_type The device type, 1: cpu, 2:gpu
* \param dev_id The device id of the predictor.
* \param num_input_nodes Number of input nodes to the net,
* For feedforward net, this is 1.
* \param input_keys The name of input argument.
* For feedforward net, this is {"data"}
* \param input_shape_indptr Index pointer of shapes of each input node.
* The length of this array = num_input_nodes + 1.
* For feedforward net that takes 4 dimensional input, this is {0, 4}.
* \param input_shape_data A flatted data of shapes of each input node.
* For feedforward net that takes 4 dimensional input, this is the shape data.
* \param num_threads The number of threads that we'll run the predictors.
* \param out An array of created predictor handles. The array has to be large
* enough to keep `num_threads` predictors.
* \return 0 when success, -1 when failure.
*/
MXNET_DLL int MXPredCreateMultiThread(const char* symbol_json_str,
const void* param_bytes,
int param_size,
int dev_type, int dev_id,
mx_uint num_input_nodes,
const char** input_keys,
const mx_uint* input_shape_indptr,
const mx_uint* input_shape_data,
int num_threads,
PredictorHandle* out);

/*!
* \brief Change the input shape of an existing predictor.
* \param num_input_nodes Number of input nodes to the net,
Expand Down
Loading