Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Add cpp_package build option in CI (#5844)
Browse files Browse the repository at this point in the history
* add cpp_package build option in CI

* Change usage of Reshape in cpp-package
  • Loading branch information
eric-haibin-lin authored and piiswrong committed Apr 18, 2017
1 parent 093a920 commit 53d3c63
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 48 deletions.
3 changes: 3 additions & 0 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ stage('Build') {
init_git()
def flag = """ \
USE_PROFILER=1 \
USE_CPP_PACKAGE=1 \
USE_BLAS=openblas \
-j\$(nproc)
"""
Expand All @@ -99,6 +100,7 @@ USE_BLAS=openblas \
USE_CUDA=1 \
USE_CUDA_PATH=/usr/local/cuda \
USE_CUDNN=1 \
USE_CPP_PACKAGE=1 \
-j\$(nproc)
"""
make('gpu', flag)
Expand Down Expand Up @@ -126,6 +128,7 @@ USE_MKL2017_EXPERIMENTAL=1 \
USE_CUDA=1 \
USE_CUDA_PATH=/usr/local/cuda \
USE_CUDNN=1 \
USE_CPP_PACKAGE=1 \
-j\$(nproc)
"""
make('mklml_gpu', flag)
Expand Down
13 changes: 7 additions & 6 deletions cpp-package/example/charRNN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ Symbol LSTMUnroll(int num_lstm_layer, int sequence_length, int input_dim,

auto label = Symbol::Variable("softmax_label");
label = transpose(label);
label = Reshape(label, Shape(), false, false, Shape(-1)); // -1: infer from graph
label = Reshape(label, Shape(), false, Shape(-1), false); // -1: infer from graph
auto sm = SoftmaxOutput("softmax", pred, label);
if (isTrain)
return sm;
Expand All @@ -141,7 +141,7 @@ Symbol LSTMWithBuiltInRNNOp(int num_lstm_layer, int sequence_length, int input_d
auto label = Symbol::Variable("softmax_label");
label = transpose(label);
label = Reshape(label, Shape(), false,
false, Shape(-1)); // FullyConnected requires one dimension
Shape(-1), false); // FullyConnected requires one dimension
if (!TIME_MAJOR && isTrain)
embed = SwapAxis(embed, 0, 1); // Change to time-major as cuDNN requires

Expand All @@ -151,7 +151,7 @@ Symbol LSTMWithBuiltInRNNOp(int num_lstm_layer, int sequence_length, int input_d
auto rnn_params = Symbol::Variable("LSTM_parameters"); // See explanations near RNNXavier class
auto rnn = RNN(embed, rnn_params, rnn_h_init, rnn_c_init, num_hidden, num_lstm_layer,
RNNMode::lstm, false, dropout, !isTrain);
auto hidden = Reshape(rnn[0], Shape(), false, false, Shape(-1, num_hidden));
auto hidden = Reshape(rnn[0], Shape(), false, Shape(-1, num_hidden), false);

auto cls_weight = Symbol::Variable("cls_weight");
auto cls_bias = Symbol::Variable("cls_bias");
Expand Down Expand Up @@ -194,7 +194,8 @@ class Shuffler {

class BucketSentenceIter : public DataIter {
Shuffler* random;
int batch, current, end, sequence_length;
int batch, current, end;
unsigned int sequence_length;
Context device;
vector<vector<mx_float>> sequences;
vector<wchar_t> index2chars;
Expand Down Expand Up @@ -582,7 +583,7 @@ void predict(wstring* ptext, int sequence_length, const string param_file,
LoadCheckpoint(param_file, exe);

mx_float index;
wchar_t next;
wchar_t next = 0;
vector<mx_float> softmax;
softmax.resize(input_dim);
for (auto c : *ptext) {
Expand Down Expand Up @@ -642,7 +643,7 @@ void predictWithBuiltInRNNOp(wstring* ptext, int sequence_length, const string p
LoadCheckpoint(param_file, exe);

mx_float index;
wchar_t next;
wchar_t next = 0;
vector<mx_float> softmax;
softmax.resize(input_dim);
for (auto c : *ptext) {
Expand Down
43 changes: 1 addition & 42 deletions cpp-package/include/mxnet-cpp/op_suppl.h
Original file line number Diff line number Diff line change
Expand Up @@ -119,48 +119,7 @@ inline Symbol Crop(const std::string& symbol_name,


/*!
* \breif Slice input equally along specified axis.
* \param data input symbol.
* \param num_outputs Number of outputs to be sliced.
* \param axis Dimension along which to slice.
* \param squeeze_axis If true AND the sliced dimension becomes 1, squeeze that dimension.
* \return new symbol
*/
inline Symbol SliceChannel(Symbol data,
int num_outputs,
int axis = 1,
bool squeeze_axis = false) {
return Operator("SliceChannel")
.SetParam("num_outputs", num_outputs)
.SetParam("axis", axis)
.SetParam("squeeze_axis", squeeze_axis) (data)
.CreateSymbol();
}


/*!
* \breif Slice input equally along specified axis.
* \param symbol_name name of the resulting symbol.
* \param data input symbol.
* \param num_outputs Number of outputs to be sliced.
* \param axis Dimension along which to slice.
* \param squeeze_axis If true AND the sliced dimension becomes 1, squeeze that dimension.
* \return new symbol
*/
inline Symbol SliceChannel(const std::string& symbol_name,
Symbol data,
int num_outputs,
int axis = 1,
bool squeeze_axis = false) {
return Operator("SliceChannel")
.SetParam("num_outputs", num_outputs)
.SetParam("axis", axis)
.SetParam("squeeze_axis", squeeze_axis) (data)
.CreateSymbol(symbol_name);
}

/*!
* \breif Apply activation function to input.
* \brief Apply activation function to input.
* Softmax Activation is only available with CUDNN on GPUand will be
* computed at each location across channel if input is 4D.
* \param symbol_name name of the resulting symbol.
Expand Down

0 comments on commit 53d3c63

Please sign in to comment.