diff --git a/src/operator/tensor/elemwise_unary_op.h b/src/operator/tensor/elemwise_unary_op.h index 83b86bf1d94c..961d776c7a29 100644 --- a/src/operator/tensor/elemwise_unary_op.h +++ b/src/operator/tensor/elemwise_unary_op.h @@ -34,6 +34,9 @@ #include "../mxnet_op.h" #include "../elemwise_op_common.h" #include "../../ndarray/ndarray_function.h" +#if MSHADOW_USE_MKL == 1 +#include "mkl.h" +#endif namespace mxnet { namespace op { @@ -348,6 +351,40 @@ class UnaryOp : public OpBase { LogUnimplementedOp(attrs, ctx, inputs, req, outputs); } } + +#if MSHADOW_USE_MKL == 1 +#define MKLLOG(fname, DType) \ +static void MKLLog(size_t size, const DType* pIn, DType* pOut) { \ + fname(size, pIn, pOut); \ +} + +MKLLOG(vsLn, float) +MKLLOG(vdLn, double) +#endif + + template + static void LogCompute(const nnvm::NodeAttrs& attrs, + const OpContext& ctx, + const std::vector& inputs, + const std::vector& req, + const std::vector& outputs) { + if (req[0] == kNullOp) return; + // if defined MSHADOW_USE_MKL then call mkl log when req is KWriteTo and type_flag + // is mshadow::kFloat32 or mshadow::kFloat64 +#if MSHADOW_USE_MKL == 1 + auto type_flag = inputs[0].type_flag_; + if (req[0] == kWriteTo && (type_flag == mshadow::kFloat32 + || type_flag == mshadow::kFloat64)) { + MSHADOW_SGL_DBL_TYPE_SWITCH(type_flag, DType, { + MKLLog(inputs[0].Size(), inputs[0].dptr(), outputs[0].dptr()); + }) + } else { + Compute(attrs, ctx, inputs, req, outputs); + } +#else + Compute(attrs, ctx, inputs, req, outputs); +#endif + } }; /*! \brief Map legacy unary_bwd to backward_grad */ diff --git a/src/operator/tensor/elemwise_unary_op_basic.cc b/src/operator/tensor/elemwise_unary_op_basic.cc index 301fc48d2128..9730d0096e58 100644 --- a/src/operator/tensor/elemwise_unary_op_basic.cc +++ b/src/operator/tensor/elemwise_unary_op_basic.cc @@ -940,7 +940,7 @@ The storage type of ``exp`` output is always dense .set_attr("FGradient", ElemwiseGradUseOut{"_mul"}); // log -MXNET_OPERATOR_REGISTER_UNARY_WITH_SPARSE_DR(log, cpu, mshadow_op::log) +MXNET_OPERATOR_REGISTER_UNARY(log) MXNET_ADD_SPARSE_OP_ALIAS(log) .describe(R"code(Returns element-wise Natural logarithmic value of the input. @@ -949,6 +949,7 @@ The natural logarithm is logarithm in base *e*, so that ``log(exp(x)) = x`` The storage type of ``log`` output is always dense )code" ADD_FILELINE) +.set_attr("FCompute", UnaryOp::LogCompute) .set_attr("FGradient", ElemwiseGradUseIn{"_backward_log"}); // log10