Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
Zero intialization to avoid error message on a Centos
Browse files Browse the repository at this point in the history
  • Loading branch information
mozga-intel committed Nov 4, 2021
1 parent 683c974 commit a470710
Show file tree
Hide file tree
Showing 77 changed files with 132 additions and 128 deletions.
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_eig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ MXNET_REGISTER_API("_npi.eigh").set_body([](runtime::MXNetArgs args, runtime::MX
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_eigh");
nnvm::NodeAttrs attrs;
op::EighParam param;
op::EighParam param = {};
param.UPLO = *((args[1].operator std::string()).c_str());
attrs.parsed = param;
attrs.op = op;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_eigvals.cc
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ MXNET_REGISTER_API("_npi.eigvalsh")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_eigvalsh");
nnvm::NodeAttrs attrs;
op::EigvalshParam param;
op::EigvalshParam param = {};
param.UPLO = *((args[1].operator std::string()).c_str());
attrs.parsed = param;
attrs.op = op;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_lstsq.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.lstsq").set_body([](runtime::MXNetArgs args, runtime::M
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_lstsq");
nnvm::NodeAttrs attrs;
op::LstsqParam param;
op::LstsqParam param = {};
if (args[2].type_code() == kNull) {
param.rcond = static_cast<double>(1);
} else if (args[2].type_code() == kStr) {
Expand Down
4 changes: 2 additions & 2 deletions src/api/operator/numpy/linalg/np_matrix_rank.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ namespace mxnet {
inline static void _npi_matrix_rank_none_tol(runtime::MXNetArgs args, runtime::MXNetRetValue* ret) {
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_matrix_rank_none_tol");
op::MatrixRankNoneTolParam param;
op::MatrixRankNoneTolParam param = {};
nnvm::NodeAttrs attrs;
param.hermitian = args[2].operator bool();
param.finfoEps32 = args[3].operator double();
Expand All @@ -49,7 +49,7 @@ inline static void _npi_matrix_rank_none_tol(runtime::MXNetArgs args, runtime::M
inline static void _npi_matrix_rank(runtime::MXNetArgs args, runtime::MXNetRetValue* ret) {
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_matrix_rank");
op::MatrixRankParam param;
op::MatrixRankParam param = {};
nnvm::NodeAttrs attrs;
param.hermitian = args[2].operator bool();
attrs.parsed = param;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.norm").set_body([](runtime::MXNetArgs args, runtime::MX
using namespace runtime;
nnvm::NodeAttrs attrs;
const nnvm::Op* op = Op::Get("_npi_norm");
op::NumpyNormParam param;
op::NumpyNormParam param = {};
param.ord = args[1].operator double();
if (args[2].type_code() == kNull) {
param.axis = dmlc::optional<mxnet::TShape>();
Expand Down
4 changes: 2 additions & 2 deletions src/api/operator/numpy/linalg/np_pinv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ namespace mxnet {
inline static void _npi_pinv(runtime::MXNetArgs args, runtime::MXNetRetValue* ret) {
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_pinv");
op::PinvParam param;
op::PinvParam param = {};
nnvm::NodeAttrs attrs;
param.hermitian = args[2].operator bool();
attrs.parsed = param;
Expand All @@ -47,7 +47,7 @@ inline static void _npi_pinv(runtime::MXNetArgs args, runtime::MXNetRetValue* re
inline static void _npi_pinv_scalar_rcond(runtime::MXNetArgs args, runtime::MXNetRetValue* ret) {
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_pinv_scalar_rcond");
op::PinvScalarRcondParam param;
op::PinvScalarRcondParam param = {};
nnvm::NodeAttrs attrs;
param.rcond = args[1].operator double();
param.hermitian = args[2].operator bool();
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_potrf.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.cholesky")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_cholesky");
nnvm::NodeAttrs attrs;
op::LaCholeskyParam param;
op::LaCholeskyParam param = {};
param.lower = args[1].operator bool();
attrs.parsed = param;
attrs.op = op;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_tensorinv.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.tensorinv")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_tensorinv");
nnvm::NodeAttrs attrs;
op::TensorinvParam param;
op::TensorinvParam param = {};
param.ind = args[1].operator int();
attrs.parsed = param;
attrs.op = op;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/linalg/np_tensorsolve.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.tensorsolve")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_tensorsolve");
nnvm::NodeAttrs attrs;
op::TensorsolveParam param;
op::TensorsolveParam param = {};
if (args[2].type_code() == kNull) {
param.a_axes = Tuple<int>();
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_bincount_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.bincount")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_bincount");
nnvm::NodeAttrs attrs;
op::NumpyBincountParam param;
op::NumpyBincountParam param = {};

int num_outputs = 0;
if (args[1].type_code() == kNull) {
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_broadcast_reduce_op_boolean.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.all").set_body([](runtime::MXNetArgs args, runtime::MXN
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_all");
nnvm::NodeAttrs attrs;
op::NumpyReduceAxesBoolParam param;
op::NumpyReduceAxesBoolParam param = {};

NDArray* out = args[3].operator mxnet::NDArray*();
NDArray** outputs = out == nullptr ? nullptr : &out;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_broadcast_reduce_op_index.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ MXNET_REGISTER_API("_npi.argmax")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_argmax");
nnvm::NodeAttrs attrs;
op::ReduceAxisParam param;
op::ReduceAxisParam param = {};
// param.axis
if (args[1].type_code() == kNull) {
param.axis = dmlc::nullopt;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_broadcast_reduce_op_value.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ MXNET_REGISTER_API("_npi.broadcast_to")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_broadcast_to");
nnvm::NodeAttrs attrs;
op::BroadcastToParam param;
op::BroadcastToParam param = {};
if (args[1].type_code() == kDLInt) {
param.shape = TShape(1, args[1].operator int64_t());
} else {
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_cross.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.cross").set_body([](runtime::MXNetArgs args, runtime::M
using namespace runtime;
nnvm::NodeAttrs attrs;
const nnvm::Op* op = Op::Get("_npi_cross");
op::NumpyCrossParam param;
op::NumpyCrossParam param = {};
param.axisa = args[2].operator int();
param.axisb = args[3].operator int();
param.axisc = args[4].operator int();
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_cumsum.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.cumsum")
using namespace runtime;
nnvm::NodeAttrs attrs;
const nnvm::Op* op = Op::Get("_npi_cumsum");
op::CumsumParam param;
op::CumsumParam param = {};
// axis
if (args[1].type_code() == kNull) {
param.axis = dmlc::nullopt;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_delete_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ MXNET_REGISTER_API("_npi.delete")
using namespace runtime;
static const nnvm::Op* op = Op::Get("_npi_delete");
nnvm::NodeAttrs attrs;
op::NumpyDeleteParam param;
op::NumpyDeleteParam param = {};
int num_inputs = 0;
param.start = dmlc::nullopt;
param.step = dmlc::nullopt;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_diff_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ MXNET_REGISTER_API("_npi.diff").set_body([](runtime::MXNetArgs args, runtime::MX
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_diff");
nnvm::NodeAttrs attrs;
op::DiffParam param;
op::DiffParam param = {};
param.n = args[1].operator int();
param.axis = args[2].operator int();

Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_ediff1d_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.ediff1d")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_ediff1d");
nnvm::NodeAttrs attrs;
op::EDiff1DParam param;
op::EDiff1DParam param = {};
int num_inputs = 1;
NDArray* inputs[3];
inputs[0] = args[0].operator mxnet::NDArray*();
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_einsum_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ MXNET_REGISTER_API("_npi.einsum")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_einsum");
nnvm::NodeAttrs attrs;
op::NumpyEinsumParam param;
op::NumpyEinsumParam param = {};
int args_size = args.size();
// param.num_args
param.num_args = args_size - 3;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_elemwise_unary_op_basic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ MXNET_REGISTER_API("_npi.around")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_around");
nnvm::NodeAttrs attrs;
op::AroundParam param;
op::AroundParam param = {};
param.decimals = args[1].operator int64_t();
attrs.parsed = param;
attrs.op = op;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_fill_diagonal_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ MXNET_REGISTER_API("_npi.fill_diagonal")
const nnvm::Op* op = Op::Get("_npi_fill_diagonal");
nnvm::NodeAttrs attrs;

op::NumpyFillDiagonalParam param;
op::NumpyFillDiagonalParam param = {};
int num_inputs = 1;
NDArray* inputs[] = {args[0].operator mxnet::NDArray*()};

Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_histogram_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ MXNET_REGISTER_API("_npi.histogram")
using namespace runtime;
nnvm::NodeAttrs attrs;
const nnvm::Op* op = Op::Get("_npi_histogram");
op::HistogramParam param;
op::HistogramParam param = {};
// parse bin_cnt
if (args[2].type_code() == kNull) {
param.bin_cnt = dmlc::nullopt;
Expand Down
26 changes: 15 additions & 11 deletions src/api/operator/numpy/np_init_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ MXNET_REGISTER_API("_npi.zeros").set_body([](runtime::MXNetArgs args, runtime::M
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_zeros");
nnvm::NodeAttrs attrs;
op::InitOpParam param;
op::InitOpParam param = {};
if (args[0].type_code() == kDLInt) {
param.shape = TShape(1, args[0].operator int64_t());
} else {
Expand All @@ -62,7 +62,7 @@ MXNET_REGISTER_API("_npi.full_like")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_full_like");
nnvm::NodeAttrs attrs;
op::FullLikeOpParam param;
op::FullLikeOpParam param = {};
param.fill_value = args[1].operator double();
if (args[2].type_code() == kNull) {
param.dtype = dmlc::nullopt;
Expand Down Expand Up @@ -93,7 +93,7 @@ MXNET_REGISTER_API("_npi.indices")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_indices");
nnvm::NodeAttrs attrs;
op::IndicesOpParam param;
op::IndicesOpParam param = {};
// param.dimensions
if (args[0].type_code() == kDLInt) {
param.dimensions = TShape(1, args[0].operator int64_t());
Expand Down Expand Up @@ -124,7 +124,7 @@ MXNET_REGISTER_API("_npi.atleast_1d")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_atleast_1d");
nnvm::NodeAttrs attrs;
op::AtleastNDParam param;
op::AtleastNDParam param = {};
int args_size = args.size();
param.num_args = args_size;
attrs.parsed = param;
Expand All @@ -151,7 +151,7 @@ MXNET_REGISTER_API("_npi.atleast_2d")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_atleast_2d");
nnvm::NodeAttrs attrs;
op::AtleastNDParam param;
op::AtleastNDParam param = {};
int args_size = args.size();
param.num_args = args_size;
attrs.parsed = param;
Expand All @@ -178,7 +178,7 @@ MXNET_REGISTER_API("_npi.atleast_3d")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_atleast_3d");
nnvm::NodeAttrs attrs;
op::AtleastNDParam param;
op::AtleastNDParam param = {};
int args_size = args.size();
param.num_args = args_size;
attrs.parsed = param;
Expand All @@ -205,7 +205,7 @@ MXNET_REGISTER_API("_npi.arange")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_arange");
nnvm::NodeAttrs attrs;
op::RangeParam param;
op::RangeParam param = {};
param.start = args[0].operator double();
if (args[1].type_code() == kNull) {
param.stop = dmlc::nullopt;
Expand Down Expand Up @@ -236,7 +236,7 @@ MXNET_REGISTER_API("_npi.eye").set_body([](runtime::MXNetArgs args, runtime::MXN
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_eye");
nnvm::NodeAttrs attrs;
op::NumpyEyeParam param;
op::NumpyEyeParam param = {};
param.N = args[0].operator nnvm::dim_t();
if (args[1].type_code() == kNull) {
param.M = dmlc::nullopt;
Expand Down Expand Up @@ -317,7 +317,7 @@ MXNET_REGISTER_API("_npi.logspace")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_logspace");
nnvm::NodeAttrs attrs;
op::LogspaceParam param;
op::LogspaceParam param = {};
param.start = args[0].operator double();
param.stop = args[1].operator double();
if (features::is_enabled(features::INT64_TENSOR_SIZE))
Expand Down Expand Up @@ -354,7 +354,7 @@ MXNET_REGISTER_API("_npi.ones").set_body([](runtime::MXNetArgs args, runtime::MX
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_ones");
nnvm::NodeAttrs attrs;
op::InitOpParam param;
op::InitOpParam param = {};
if (args[0].type_code() == kDLInt) {
param.shape = TShape(1, args[0].operator int64_t());
} else {
Expand All @@ -380,7 +380,11 @@ MXNET_REGISTER_API("_npi.full").set_body([](runtime::MXNetArgs args, runtime::MX
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_full");
nnvm::NodeAttrs attrs;
<<<<<<< HEAD
op::NumpyInitOpWithScalarParam param;
=======
op::InitOpWithScalarParam param = {};
>>>>>>> 119a2314f (Zero intialization to avoid error message on a Centos)
if (args[0].type_code() == kDLInt) {
param.shape = TShape(1, args[0].operator int64_t());
} else {
Expand Down Expand Up @@ -423,7 +427,7 @@ MXNET_REGISTER_API("_npi.identity")
using namespace runtime;
const nnvm::Op* op = Op::Get("_npi_identity");
nnvm::NodeAttrs attrs;
op::InitOpParam param;
op::InitOpParam param = {};
param.shape = TShape(args[0].operator ObjectRef());
if (args[1].type_code() == kNull) {
param.dtype = mxnet::common::GetDefaultDtype();
Expand Down
6 changes: 3 additions & 3 deletions src/api/operator/numpy/np_insert_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ MXNET_REGISTER_API("_npi.insert_scalar")
using namespace runtime;
static const nnvm::Op* op = Op::Get("_npi_insert_scalar");
nnvm::NodeAttrs attrs;
op::NumpyInsertParam param;
op::NumpyInsertParam param = {};
int num_inputs = 0;
param.start = dmlc::nullopt;
param.step = dmlc::nullopt;
Expand Down Expand Up @@ -78,7 +78,7 @@ MXNET_REGISTER_API("_npi.insert_slice")
using namespace runtime;
static const nnvm::Op* op = Op::Get("_npi_insert_slice");
nnvm::NodeAttrs attrs;
op::NumpyInsertParam param;
op::NumpyInsertParam param = {};
int num_inputs = 0;
if (args[1].type_code() == kDLInt || args[1].type_code() == kDLUInt ||
args[1].type_code() == kDLFloat) {
Expand Down Expand Up @@ -126,7 +126,7 @@ MXNET_REGISTER_API("_npi.insert_tensor")
using namespace runtime;
static const nnvm::Op* op = Op::Get("_npi_insert_tensor");
nnvm::NodeAttrs attrs;
op::NumpyInsertParam param;
op::NumpyInsertParam param = {};
param.start = dmlc::nullopt;
param.step = dmlc::nullopt;
param.stop = dmlc::nullopt;
Expand Down
2 changes: 1 addition & 1 deletion src/api/operator/numpy/np_interp_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ MXNET_REGISTER_API("_npi.interp")
using namespace runtime;
static const nnvm::Op* op = Op::Get("_npi_interp");
nnvm::NodeAttrs attrs;
op::NumpyInterpParam param;
op::NumpyInterpParam param = {};
if (args[3].type_code() == kNull) {
param.left = dmlc::nullopt;
} else {
Expand Down
Loading

0 comments on commit a470710

Please sign in to comment.