diff --git a/src/operator/tensor/matrix_op-inl.h b/src/operator/tensor/matrix_op-inl.h index 49847502258f..597c92a8b124 100644 --- a/src/operator/tensor/matrix_op-inl.h +++ b/src/operator/tensor/matrix_op-inl.h @@ -1481,7 +1481,7 @@ struct ClipParam : public dmlc::Parameter { struct clip { template MSHADOW_XINLINE static void Map(index_t i, DType* out, const DType* datas, - DType a_min, DType a_max) { + const float a_min, const float a_max) { DType data = datas[i]; if (data > a_max) { out[i] = a_max; @@ -1497,7 +1497,7 @@ struct clip { struct clip_grad { template MSHADOW_XINLINE static void Map(index_t i, DType* out, const DType* grad, const DType* datas, - DType a_min, DType a_max) { + const float a_min, const float a_max) { DType data = datas[i]; if (data > a_max) { out[i] = 0; @@ -1524,7 +1524,7 @@ void Clip(const nnvm::NodeAttrs& attrs, MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, { mxnet_op::Kernel::Launch(s, outputs[0].Size(), outputs[0].dptr(), inputs[0].dptr(), - DType(param.a_min), DType(param.a_max)); + param.a_min, param.a_max); }); } @@ -1553,7 +1553,7 @@ void ClipGrad_(const nnvm::NodeAttrs& attrs, Stream *s = ctx.get_stream(); MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, { Kernel::Launch(s, outputs[0].Size(), outputs[0].dptr(), - inputs[0].dptr(), inputs[1].dptr(), DType(param.a_min), DType(param.a_max)); + inputs[0].dptr(), inputs[1].dptr(), param.a_min, param.a_max); }); } diff --git a/src/operator/tensor/matrix_op.cc b/src/operator/tensor/matrix_op.cc index 59e8386d6679..c8e7b60d9548 100644 --- a/src/operator/tensor/matrix_op.cc +++ b/src/operator/tensor/matrix_op.cc @@ -702,9 +702,11 @@ MXNET_ADD_SPARSE_OP_ALIAS(clip) .describe(R"code(Clips (limits) the values in an array. Given an interval, values outside the interval are clipped to the interval edges. -Clipping ``x`` between `a_min` and `a_x` would be:: +Clipping ``x`` between `a_min` and `a_max` would be:: - clip(x, a_min, a_max) = max(min(x, a_max), a_min)) +.. math:: + + clip(x, a_min, a_max) = \max(\min(x, a_max), a_min)) Example:: @@ -766,7 +768,7 @@ parameter values: .add_arguments(ClipParam::__FIELDS__()); NNVM_REGISTER_OP(_backward_clip) -.set_num_inputs(1) +.set_num_inputs(2) .set_num_outputs(1) .set_attr_parser(ParamParser) .set_attr("TIsBackward", true) diff --git a/tests/python/unittest/test_operator.py b/tests/python/unittest/test_operator.py index 157acd252e16..799bf8db99cf 100644 --- a/tests/python/unittest/test_operator.py +++ b/tests/python/unittest/test_operator.py @@ -4174,15 +4174,25 @@ def test_special_functions_using_scipy(): @with_seed() -@unittest.skip("Flaky test, tracked at https://github.com/apache/incubator-mxnet/issues/12901") def test_clip(): data = mx.symbol.Variable('data') shape = (30, 30) - data_tmp = np.random.uniform(-1, 1, shape) + data_tmp = np.random.uniform(-1, 1, shape).astype('float32') test = mx.sym.clip(data, a_max=0.6, a_min=-0.6) check_symbolic_forward(test, [data_tmp], [np.clip(data_tmp, -0.6, 0.6)]) check_symbolic_backward(test, [data_tmp], [np.ones(shape)], - [np.where(data_tmp < 0.6, [1], [0]) * np.where(data_tmp > -0.6, [1], [0])]) + [np.where(data_tmp <= 0.6, [1], [0]) * np.where(data_tmp >= -0.6, [1], [0])]) + + # Test monitor on symbol using clip + + def simple_callback(name, arr): + pass + + exe = test.simple_bind(ctx=mx.current_context(), data=shape) + exe.set_monitor_callback(simple_callback, monitor_all=True) + exe.forward(is_train=True) + exe.backward(out_grads=mx.nd.ones(shape)) + mx.nd.waitall() @with_seed()