From 5486828ea5b5ddcd5d5441533664820aab937edf Mon Sep 17 00:00:00 2001 From: Connor Goggins Date: Thu, 20 Feb 2020 10:20:08 -0800 Subject: [PATCH] [Large Tensor] Fixed SoftmaxActivation op (#17634) * Changed dtype for data & gradient dimensions * Add nightly test --- src/operator/nn/softmax_activation-inl.h | 14 +++++++------- tests/nightly/test_large_array.py | 12 ++++++++++++ 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/operator/nn/softmax_activation-inl.h b/src/operator/nn/softmax_activation-inl.h index 1c6738079dda..9f8e581e2fe3 100644 --- a/src/operator/nn/softmax_activation-inl.h +++ b/src/operator/nn/softmax_activation-inl.h @@ -82,9 +82,9 @@ void SoftmaxActivationCompute(const nnvm::NodeAttrs& attrs, } else { CHECK_GE(in_data.ndim(), 3) << "Input need to have a least 3 dimensions when mode=channel"; - int n = in_data.size(0); - int k = in_data.size(1); - Shape<3> s3 = Shape3(n, k, static_cast(in_data.Size()/n/k)); + index_t n = in_data.size(0); + index_t k = in_data.size(1); + Shape<3> s3 = Shape3(n, k, static_cast(in_data.Size()/n/k)); Tensor data = in_data.get_with_shape(s3, s); Tensor out = out_data.get_with_shape(s3, s); Softmax(out, data); @@ -107,10 +107,10 @@ void SoftmaxActivationGradCompute(const nnvm::NodeAttrs& attrs, const OpReqType &req = reqs[0]; const TBlob &in_grad = outputs[0]; // Use 3d tensor for both mode -> {instance, channel}. Get shapes - int total_size = in_grad.Size(); - int batch_size = in_grad.shape_[0]; - int channel_num = in_grad.shape_[1]; - int rest_size = total_size / (batch_size * channel_num); + index_t total_size = in_grad.Size(); + index_t batch_size = in_grad.shape_[0]; + index_t channel_num = in_grad.shape_[1]; + index_t rest_size = total_size / (batch_size * channel_num); const Shape<3> data_shape = Shape3(batch_size, channel_num, rest_size); // Get tensors Stream *s = ctx.get_stream(); diff --git a/tests/nightly/test_large_array.py b/tests/nightly/test_large_array.py index 379030a4d84e..4e09cfe66bfc 100644 --- a/tests/nightly/test_large_array.py +++ b/tests/nightly/test_large_array.py @@ -127,6 +127,17 @@ def check_softmax_output(): expected_grad_out[k] = -1 assert np.isclose(grad_out - softmax_out, expected_grad_out).all() + def check_softmax_activation(): + data = nd.random_normal(shape=(2**29, 2, 2, 2)) + out = nd.random_normal(shape=(2**29, 2, 2, 2)) + + res = nd.SoftmaxActivation(data=data, out=out) + + assert res.shape[0] == 536870912 + assert res.shape[1] == 2 + assert res.shape[2] == 2 + assert res.shape[3] == 2 + def np_softmax(x, axis=-1, temperature=1.0): x = x - np.max(x, axis=axis, keepdims=True) x = np.exp(x/temperature) @@ -450,6 +461,7 @@ def npy_instance_norm(data, gamma, beta, axis, eps=1E-5): check_softmax() check_softmax_cross_entropy() check_softmax_output() + check_softmax_activation() check_log_softmax() check_leaky_relu() check_pooling()