Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
init commit
Browse files Browse the repository at this point in the history
  • Loading branch information
Rohit Kumar Srivastava committed Jul 18, 2019
1 parent 4d07d78 commit a0014ed
Show file tree
Hide file tree
Showing 10 changed files with 116 additions and 45 deletions.
20 changes: 17 additions & 3 deletions include/mxnet/c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ extern "C" {
#endif

/*! \brief manually define unsigned int */
typedef unsigned int mx_uint;
typedef int64_t mx_int64;
typedef uint32_t mx_uint;
/*! \brief manually define float */
typedef float mx_float;
/*! \brief data type to store dim size */
Expand Down Expand Up @@ -556,6 +557,13 @@ MXNET_DLL int MXNDArrayCreateEx(const mx_uint *shape,
int dtype,
NDArrayHandle *out);

MXNET_DLL int MXNDArrayCreateExInt64(const mx_int64 *shape,
mx_uint ndim,
int dev_type,
int dev_id,
int delay_alloc,
int dtype,
NDArrayHandle *out);

/*!
* \brief create an empty sparse NDArray with specified shape and data type
Expand Down Expand Up @@ -793,6 +801,11 @@ MXNET_DLL int MXNDArrayReshape64(NDArrayHandle handle,
MXNET_DLL int MXNDArrayGetShape(NDArrayHandle handle,
mx_uint *out_dim,
const mx_uint **out_pdata);

MXNET_DLL int MXNDArrayGetShapeInt64(NDArrayHandle handle,
mx_int64 *out_dim,
const mx_int64 **out_pdata);

/*!
* \brief get the shape of the array
* \param handle the handle to the narray
Expand All @@ -802,7 +815,8 @@ MXNET_DLL int MXNDArrayGetShape(NDArrayHandle handle,
*/
MXNET_DLL int MXNDArrayGetShapeEx(NDArrayHandle handle,
int *out_dim,
const int **out_pdata);
const int64_t **out_pdata);

/*!
* \brief get the content of the data in NDArray
* \param handle the handle to the ndarray
Expand Down Expand Up @@ -1456,7 +1470,7 @@ MXNET_DLL int MXSymbolListOutputs(SymbolHandle symbol,
* \return 0 when success, -1 when failure happens
*/
MXNET_DLL int MXSymbolGetNumOutputs(SymbolHandle symbol,
mx_uint *output_count);
mx_uint *output_count);

/*!
* \brief Get a symbol that contains all the internals.
Expand Down
3 changes: 2 additions & 1 deletion include/mxnet/c_predict_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,8 @@ extern "C" {
#endif

/*! \brief manually define unsigned int */
typedef unsigned int mx_uint;
typedef int64_t mx_int64;
typedef uint32_t mx_uint;
/*! \brief manually define float */
typedef float mx_float;
/*! \brief handle to Predictor */
Expand Down
1 change: 0 additions & 1 deletion include/mxnet/tuple.h
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,6 @@ class Tuple {
}
};


/*! brief check if a shape's ndim is known. */
inline bool ndim_is_known(const int ndim) {
CHECK_GE(ndim, -1) << "shape ndim must be >= -1, while received " << ndim;
Expand Down
1 change: 1 addition & 0 deletions python/mxnet/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ def _load_lib():
# type definitions
mx_int = ctypes.c_int
mx_uint = ctypes.c_uint
mx_int64 = ctypes.c_int64
mx_float = ctypes.c_float
mx_float_p = ctypes.POINTER(mx_float)
mx_real_t = _np.float32
Expand Down
12 changes: 7 additions & 5 deletions python/mxnet/ndarray/ndarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import numpy as np
from ..base import _LIB, numeric_types, integer_types
from ..base import c_str, c_array, c_array_buf, c_handle_array, mx_real_t
from ..base import mx_uint, NDArrayHandle, check_call, DLPackHandle, mx_int
from ..base import mx_uint, NDArrayHandle, check_call, DLPackHandle, mx_int, mx_int64
from ..base import ctypes2buffer
from ..context import Context, current_context
from . import _internal
Expand Down Expand Up @@ -130,10 +130,12 @@ def _new_alloc_handle(shape, ctx, delay_alloc, dtype=mx_real_t):
handle
A new empty `NDArray` handle.
"""
print("shape={}".format(shape))
hdl = NDArrayHandle()
check_call(_LIB.MXNDArrayCreateEx(
c_array_buf(mx_uint, native_array('I', shape)),
mx_uint(len(shape)),
# check_call(_LIB.MXNDArrayCreateEx(
check_call(_LIB.MXNDArrayCreateExInt64(
c_array_buf(mx_int64, native_array('q', shape)),
mx_int64(len(shape)),
ctypes.c_int(ctx.device_typeid),
ctypes.c_int(ctx.device_id),
ctypes.c_int(int(delay_alloc)),
Expand Down Expand Up @@ -1847,7 +1849,7 @@ def shape(self):
(2L, 3L, 4L)
"""
ndim = mx_int()
pdata = ctypes.POINTER(mx_int)()
pdata = ctypes.POINTER(mx_int64)()
check_call(_LIB.MXNDArrayGetShapeEx(
self.handle, ctypes.byref(ndim), ctypes.byref(pdata)))
if ndim.value == -1:
Expand Down
45 changes: 36 additions & 9 deletions src/c_api/c_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -179,13 +179,29 @@ int MXNDArrayCreate(const mx_uint *shape,
API_END();
}

int MXNDArrayCreateExInt64(const mx_int64 *shape,
mx_uint ndim,
int dev_type,
int dev_id,
int delay_alloc,
int dtype,
NDArrayHandle *out) {
API_BEGIN();
*out = new NDArray(
mxnet::TShape(shape, shape + ndim),
Context::Create(static_cast<Context::DeviceType>(dev_type), dev_id),
delay_alloc != 0,
dtype);
API_END();
}

int MXNDArrayCreateEx(const mx_uint *shape,
mx_uint ndim,
int dev_type,
int dev_id,
int delay_alloc,
int dtype,
NDArrayHandle *out) {
mx_uint ndim,
int dev_type,
int dev_id,
int delay_alloc,
int dtype,
NDArrayHandle *out) {
API_BEGIN();
*out = new NDArray(
mxnet::TShape(shape, shape + ndim),
Expand Down Expand Up @@ -513,9 +529,10 @@ int MXNDArrayGetShape(NDArrayHandle handle,
API_END();
}

int MXNDArrayGetShapeEx(NDArrayHandle handle,
template<typename IntType>
int MXNDArrayGetShapeExInt(NDArrayHandle handle,
int *out_dim,
const int **out_pdata) {
const IntType **out_pdata) {
MXAPIThreadLocalEntry *ret = MXAPIThreadLocalStore::Get();
API_BEGIN();
NDArray *arr = static_cast<NDArray*>(handle);
Expand All @@ -526,7 +543,7 @@ int MXNDArrayGetShapeEx(NDArrayHandle handle,
}
*out_dim = s.ndim();
if (s.ndim() >= 0) {
std::vector<int> &buffer = ret->arg_shape_buffer_ex;
std::vector<IntType> &buffer = ret->arg_shape_buffer_ex_int64;
buffer.resize(s.ndim());
mxnet::ShapeTypeCast(s.begin(), s.end(), buffer.data());
*out_pdata = buffer.data();
Expand All @@ -541,6 +558,16 @@ int MXNDArrayGetShapeEx(NDArrayHandle handle,
API_END();
}

int MXNDArrayGetShapeEx(NDArrayHandle handle,
int *out_dim,
const int64_t **out_pdata) {
#if MXNET_USE_INT64_TENSOR_SIZE == 1
return MXNDArrayGetShapeExInt<int64_t>(handle, out_dim, out_pdata);
#else
return MXNDArrayGetShapeExInt<int32_t>(handle, out_dim, out_pdata);
#endif
}

int MXNDArrayGetData(NDArrayHandle handle,
void **out_pdata) {
API_BEGIN();
Expand Down
27 changes: 27 additions & 0 deletions src/c_api/c_api_common.h
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,12 @@ struct MXAPIThreadLocalEntry {
std::vector<const mx_uint*> arg_shape_data, out_shape_data, aux_shape_data;
/*! \brief result holder for returning shape pointer */
std::vector<const int*> arg_shape_data_ex, out_shape_data_ex, aux_shape_data_ex;
std::vector<const int64_t*> arg_shape_data_ex_int64, out_shape_data_ex_int64, aux_shape_data_ex_int64;
/*! \brief uint32_t buffer for returning shape pointer */
std::vector<uint32_t> arg_shape_buffer, out_shape_buffer, aux_shape_buffer;
/*! \brief uint32_t buffer for returning shape pointer */
std::vector<int> arg_shape_buffer_ex, out_shape_buffer_ex, aux_shape_buffer_ex;
std::vector<int64_t> arg_shape_buffer_ex_int64, out_shape_buffer_ex_int64, aux_shape_buffer_ex_int64;
/*! \brief bool buffer */
std::vector<bool> save_inputs, save_outputs;
// DEPRECATED. Use SetupShapeArrayReturnWithBufferEx instead.
Expand Down Expand Up @@ -130,6 +132,31 @@ struct MXAPIThreadLocalEntry {
}
}
}

inline static void SetupShapeArrayReturnWithBufferExInt64(
const mxnet::ShapeVector &shapes,
std::vector<int> *ndim,
std::vector<const int64_t*> *data,
std::vector<int64_t> *buffer) {
ndim->resize(shapes.size());
data->resize(shapes.size());
size_t size = 0;
for (const auto& s : shapes) {
if (s.ndim() > 0) {
size += s.ndim();
}
}
buffer->resize(size);
int64_t *ptr = buffer->data();
for (size_t i = 0; i < shapes.size(); ++i) {
ndim->at(i) = shapes[i].ndim();
data->at(i) = ptr;
if (shapes[i].ndim() > 0) {
ptr = mxnet::ShapeTypeCast(shapes[i].begin(), shapes[i].end(), ptr);
}
}
}

};

// define the threadlocal store.
Expand Down
42 changes: 21 additions & 21 deletions src/operator/tensor/matrix_op-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -647,9 +647,9 @@ void SliceEx(const nnvm::NodeAttrs& attrs,

template<int ndim>
inline void GetIndexRange(const mxnet::TShape& dshape,
const mxnet::Tuple<dmlc::optional<int>>& param_begin,
const mxnet::Tuple<dmlc::optional<int>>& param_end,
const mxnet::Tuple<dmlc::optional<int>>& param_step,
const mxnet::Tuple<dmlc::optional<index_t>>& param_begin,
const mxnet::Tuple<dmlc::optional<index_t>>& param_end,
const mxnet::Tuple<dmlc::optional<index_t>>& param_step,
common::StaticArray<index_t, ndim>* begin,
common::StaticArray<index_t, ndim>* end,
common::StaticArray<index_t, ndim>* step) {
Expand Down Expand Up @@ -1010,8 +1010,8 @@ void SliceAssignOpForward(const nnvm::NodeAttrs& attrs,

struct SliceAssignScalarParam : public dmlc::Parameter<SliceAssignScalarParam> {
double scalar;
mxnet::Tuple<dmlc::optional<int>> begin, end;
mxnet::Tuple<dmlc::optional<int>> step;
mxnet::Tuple<dmlc::optional<index_t>> begin, end;
mxnet::Tuple<dmlc::optional<index_t>> step;
DMLC_DECLARE_PARAMETER(SliceAssignScalarParam) {
DMLC_DECLARE_FIELD(scalar)
.set_default(0)
Expand All @@ -1021,7 +1021,7 @@ struct SliceAssignScalarParam : public dmlc::Parameter<SliceAssignScalarParam> {
DMLC_DECLARE_FIELD(end)
.describe("ending indices for the slice operation, supports negative indices.");
DMLC_DECLARE_FIELD(step)
.set_default(mxnet::Tuple<dmlc::optional<int>>())
.set_default(mxnet::Tuple<dmlc::optional<index_t>>())
.describe("step for the slice operation, supports negative values.");
}
};
Expand Down Expand Up @@ -1323,12 +1323,12 @@ inline bool SliceLikeShape(const nnvm::NodeAttrs& attrs,
inline void SliceLikeInferRanges(const mxnet::TShape& dshape,
const mxnet::TShape& fshape,
const mxnet::Tuple<int>& axes,
mxnet::Tuple<dmlc::optional<int>>* param_begin,
mxnet::Tuple<dmlc::optional<int>>* param_end,
mxnet::Tuple<dmlc::optional<int>>* param_step) {
std::vector<dmlc::optional<int>> pb(dshape.ndim());
std::vector<dmlc::optional<int>> pe(dshape.ndim());
std::vector<dmlc::optional<int>> ps(dshape.ndim());
mxnet::Tuple<dmlc::optional<index_t>>* param_begin,
mxnet::Tuple<dmlc::optional<index_t>>* param_end,
mxnet::Tuple<dmlc::optional<index_t>>* param_step) {
std::vector<dmlc::optional<index_t>> pb(dshape.ndim());
std::vector<dmlc::optional<index_t>> pe(dshape.ndim());
std::vector<dmlc::optional<index_t>> ps(dshape.ndim());
if (axes.ndim() == 0) {
for (int i = 0; i < dshape.ndim(); ++i) {
pb[i] = 0;
Expand All @@ -1352,9 +1352,9 @@ inline void SliceLikeInferRanges(const mxnet::TShape& dshape,
ps[axis] = 1;
}
}
*param_begin = mxnet::Tuple<dmlc::optional<int>>(pb.begin(), pb.end());
*param_end = mxnet::Tuple<dmlc::optional<int>>(pe.begin(), pe.end());
*param_step = mxnet::Tuple<dmlc::optional<int>>(ps.begin(), ps.end());
*param_begin = mxnet::Tuple<dmlc::optional<index_t>>(pb.begin(), pb.end());
*param_end = mxnet::Tuple<dmlc::optional<index_t>>(pe.begin(), pe.end());
*param_step = mxnet::Tuple<dmlc::optional<index_t>>(ps.begin(), ps.end());
}

template<typename xpu>
Expand All @@ -1373,9 +1373,9 @@ void SliceLikeForward(const nnvm::NodeAttrs& attrs,
const TBlob& out = outputs[0];
const mxnet::TShape& ishape = data.shape_;
const mxnet::TShape& from_shape = inputs[1].shape_;
mxnet::Tuple<dmlc::optional<int>> param_begin;
mxnet::Tuple<dmlc::optional<int>> param_end;
mxnet::Tuple<dmlc::optional<int>> param_step;
mxnet::Tuple<dmlc::optional<index_t>> param_begin;
mxnet::Tuple<dmlc::optional<index_t>> param_end;
mxnet::Tuple<dmlc::optional<index_t>> param_step;
SliceLikeInferRanges(ishape, from_shape, param.axes, &param_begin, &param_end, &param_step);

MXNET_NDIM_SWITCH(data.ndim(), ndim, {
Expand Down Expand Up @@ -1421,9 +1421,9 @@ void SliceLikeBackward(const nnvm::NodeAttrs& attrs,

const mxnet::TShape& ishape = ograd.shape_;
const mxnet::TShape& from_shape = outputs[1].shape_;
mxnet::Tuple<dmlc::optional<int>> param_begin;
mxnet::Tuple<dmlc::optional<int>> param_end;
mxnet::Tuple<dmlc::optional<int>> param_step;
mxnet::Tuple<dmlc::optional<index_t>> param_begin;
mxnet::Tuple<dmlc::optional<index_t>> param_end;
mxnet::Tuple<dmlc::optional<index_t>> param_step;
SliceLikeInferRanges(ishape, from_shape, param.axes, &param_begin, &param_end, &param_step);

MXNET_NDIM_SWITCH(ograd.ndim(), ndim, {
Expand Down
6 changes: 3 additions & 3 deletions src/operator/tensor/slice-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -34,15 +34,15 @@ namespace mxnet {
namespace op {

struct SliceParam : public dmlc::Parameter<SliceParam> {
mxnet::Tuple<dmlc::optional<int>> begin, end;
mxnet::Tuple<dmlc::optional<int>> step;
mxnet::Tuple<dmlc::optional<index_t>> begin, end;
mxnet::Tuple<dmlc::optional<index_t>> step;
DMLC_DECLARE_PARAMETER(SliceParam) {
DMLC_DECLARE_FIELD(begin)
.describe("starting indices for the slice operation, supports negative indices.");
DMLC_DECLARE_FIELD(end)
.describe("ending indices for the slice operation, supports negative indices.");
DMLC_DECLARE_FIELD(step)
.set_default(mxnet::Tuple<dmlc::optional<int>>())
.set_default(mxnet::Tuple<dmlc::optional<index_t>>())
.describe("step for the slice operation, supports negative values.");
}
bool operator==(const SliceParam& other) const {
Expand Down
4 changes: 2 additions & 2 deletions tests/nightly/test_large_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,8 +157,8 @@ def test_take():


def test_slice():
a = nd.ones(shape=(LARGE_X, SMALL_Y))
res = nd.slice(a, begin=(LARGE_X-1000, 1), end=(LARGE_X, SMALL_Y))
a = nd.ones(shape=(LARGE_SIZE, 2))
res = nd.slice(a, begin=(LARGE_SIZE-1000, 1), end=(LARGE_SIZE, 2))
assert np.sum(res[-1].asnumpy() == 1) == res.shape[1]


Expand Down

0 comments on commit a0014ed

Please sign in to comment.