diff --git a/include/mxnet/lib_api.h b/include/mxnet/lib_api.h index db93dbe6ff41..2f7864fdea29 100644 --- a/include/mxnet/lib_api.h +++ b/include/mxnet/lib_api.h @@ -912,25 +912,25 @@ class Registry { /*! \brief declare a variable with custom name */ #define MX_REGISTER_NAME_(Name) MXNet ## _CustomOp ## _ -#define MX_REGISTER_DEF_(Name) CustomOp MX_REGISTER_NAME_(Name) +#define MX_REGISTER_DEF_(Name) mxnet::ext::CustomOp MX_REGISTER_NAME_(Name) #define MX_REGISTER_PROP_NAME_(Name) MXNet ## _CustomSubProp ## _ -#define MX_REGISTER_PROP_DEF_(Name) CustomPartitioner MX_REGISTER_PROP_NAME_(Name) +#define MX_REGISTER_PROP_DEF_(Name) mxnet::ext::CustomPartitioner MX_REGISTER_PROP_NAME_(Name) #define MX_REGISTER_PASS_NAME_(Name) MXNet ## _CustomPass ## _ -#define MX_REGISTER_PASS_DEF_(Name) CustomPass MX_REGISTER_PASS_NAME_(Name) +#define MX_REGISTER_PASS_DEF_(Name) mxnet::ext::CustomPass MX_REGISTER_PASS_NAME_(Name) /*! \brief assign a var to a value */ #define REGISTER_OP(Name) MX_STR_CONCAT(MX_REGISTER_DEF_(Name), __COUNTER__) = \ - Registry::get()->add(MX_TOSTRING(Name)) + mxnet::ext::Registry::get()->add(MX_TOSTRING(Name)) #define REGISTER_PARTITIONER(Name) \ MX_STR_CONCAT(MX_REGISTER_PROP_DEF_(Name), __COUNTER__) = \ - Registry::get()->add(MX_TOSTRING(Name)) + mxnet::ext::Registry::get()->add(MX_TOSTRING(Name)) #define REGISTER_PASS(Name) \ MX_STR_CONCAT(MX_REGISTER_PASS_DEF_(Name), __COUNTER__) = \ - Registry::get()->add(MX_TOSTRING(Name)) + mxnet::ext::Registry::get()->add(MX_TOSTRING(Name)) /* -------------- BELOW ARE CTYPE FUNCTIONS PROTOTYPES --------------- */ diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py index 8282c93a6f6d..850ce1fffb6d 100644 --- a/python/mxnet/gluon/block.py +++ b/python/mxnet/gluon/block.py @@ -999,7 +999,7 @@ def _build_cache(self, *args): 'added to the parameter dicts.\n' 'Please check the backend.') - param = Parameter(name) + param = Parameter(name, dtype=param_data.dtype) param._load_init(param_data, args[0].context) pair = (False, param) diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py index 55b0f4a963a1..89456cb07f7b 100644 --- a/python/mxnet/gluon/parameter.py +++ b/python/mxnet/gluon/parameter.py @@ -389,10 +389,13 @@ def _reduce(self): ctx = context.cpu() if self._stype == 'default': block = self.list_data() - if is_np_array(): - data = sum([w.copyto(ctx) for w in block]) / len(block) + if len(block) > 1: + if is_np_array(): + data = sum([w.copyto(ctx) for w in block]) / len(block) + else: + data = ndarray.add_n(*(w.copyto(ctx) for w in block)) / len(block) else: - data = ndarray.add_n(*(w.copyto(ctx) for w in block)) / len(block) + data = self.data().copyto(ctx) else: # fetch all rows for 'row_sparse' param all_row_ids = ndarray.arange(0, self.shape[0], dtype='int64', ctx=ctx) diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index 5e91bccde99e..123c46e5859f 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -869,7 +869,13 @@ void registerOperators(void *lib, int verbose, mxnet::ext::msgSize_t msgSize, auto in_first = in_shape->begin(); auto in_last = in_first + in_shape->size() - extra_inputs; mxnet::ShapeVector *sg_in_shapes = new mxnet::ShapeVector(in_first, in_last); - return mxnet::op::DefaultSubgraphOpShape(attrs, sg_in_shapes, out_shape); + bool res = mxnet::op::DefaultSubgraphOpShape(attrs, sg_in_shapes, out_shape); + + // assign modified input shapes to ShapeVector + for (unsigned i = 0; i < sg_in_shapes->size(); ++i) { + SHAPE_ASSIGN_CHECK(*in_shape, i, sg_in_shapes->at(i)); + } + return res; }; // lambda function to call infer type @@ -933,7 +939,12 @@ void registerOperators(void *lib, int verbose, mxnet::ext::msgSize_t msgSize, auto in_last = in_first + in_type->size() - extra_inputs; std::vector *sg_in_types = new std::vector(in_first, in_last); - return mxnet::op::DefaultSubgraphOpType(attrs, sg_in_types, out_type); + bool res = mxnet::op::DefaultSubgraphOpType(attrs, sg_in_types, out_type); + // copy and assign modified input types + for (size_t i = 0; i < sg_in_types->size(); i++) { + TYPE_ASSIGN_CHECK(*in_type, i, sg_in_types->at(i)); + } + return res; }; // lambda function to convert from external mutate_inputs to internal MXNet types @@ -1033,8 +1044,13 @@ void registerOperators(void *lib, int verbose, mxnet::ext::msgSize_t msgSize, auto in_last = in_first + in_stypes->size() - extra_inputs; std::vector *sg_in_stypes = new std::vector(in_first, in_last); - return mxnet::op::DefaultSubgraphOpStorageType(attrs, dev_mask, dispatch_mode, - sg_in_stypes, out_stypes); + bool res = mxnet::op::DefaultSubgraphOpStorageType(attrs, dev_mask, dispatch_mode, + sg_in_stypes, out_stypes); + // copy and assign modified input storage types + for (size_t i = 0; i < sg_in_stypes->size(); i++) { + STORAGE_TYPE_ASSIGN_CHECK(*in_stypes, i, sg_in_stypes->at(i)); + } + return res; }; // FGradient register lambda @@ -1416,7 +1432,7 @@ void registerPasses(void *lib, int verbose, mxnet::ext::msgSize_t msgSize, // this temp workspace holds memory allocated by custom library via OpResource auto ndarray_alloc = [&](const mxnet::TShape &shape, Context ctx, int dtype, std::string name, bool isArg) { - NDArray* arr = new NDArray(shape, ctx, dtype); + NDArray* arr = new NDArray(shape, ctx, false, dtype); if (isArg) { new_args.push_back(arr); new_arg_names.push_back(name); diff --git a/tools/pip/setup.py b/tools/pip/setup.py index 0d28362f7c78..1950f1dd28ac 100644 --- a/tools/pip/setup.py +++ b/tools/pip/setup.py @@ -104,6 +104,10 @@ def has_ext_modules(self): shutil.copytree(os.path.join(CURRENT_DIR, 'mxnet-build/3rdparty/tvm/nnvm/include/nnvm'), os.path.join(CURRENT_DIR, 'mxnet/include/nnvm')) +# copy cc file for mxnet extensions +shutil.copy(os.path.join(CURRENT_DIR, 'mxnet-build/src/lib_api.cc'), + os.path.join(CURRENT_DIR, 'mxnet/src')) + package_name = 'mxnet' variant = os.environ['mxnet_variant'].upper()