From 822706ed8968767754feb0cd5bc2d3328faca319 Mon Sep 17 00:00:00 2001 From: wkcn Date: Fri, 3 Aug 2018 08:03:53 +0800 Subject: [PATCH 01/26] add dlpack convertor api --- include/mxnet/c_api.h | 4 ++++ include/mxnet/tensor_blob.h | 17 +++++++++++++++++ src/c_api/c_api.cc | 20 ++++++++++++++++++++ 3 files changed, 41 insertions(+) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index 75147cfd706d..f13150d59bfc 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -690,6 +690,10 @@ MXNET_DLL int MXNDArrayGetShape(NDArrayHandle handle, */ MXNET_DLL int MXNDArrayGetData(NDArrayHandle handle, void **out_pdata); +MXNET_DLL int MXNDArrayToDLTensor(NDArrayHandle handle, + void **out_pdltensor); +MXNET_DLL int MXNDArrayFromDLTensor(void *in_pdltensor, + NDArrayHandle *out_handle); /*! * \brief get the type of the data in NDArray * \param handle the handle to the narray diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 6f604a5bb8d9..433dc2795a69 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -104,6 +104,8 @@ class TBlob { : dptr_(dptr), shape_(shape), type_flag_(type_flag) { SetDLTensor(dev_mask, dev_id); } + TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { + } /*! * \brief constructor from tensor * \param src source tensor @@ -336,6 +338,21 @@ class TBlob { } } } + static int DLDataTypeTransform(DLDataType dldata_type_flag) { + switch (dldata_type_flag) { + case DLDataType{kDLFloat, 32, 1}: return mshadow::kFloat32; + case DLDataType{kDLFloat, 64, 1}: return mshadow::kFloat64; + case DLDataType{kDLFloat, 16, 1}: return mshadow::kFloat16; + case DLDataType{kDLUInt, 8, 1}: return mshadow::kUint8; + case DLDataType{kDLInt, 32, 1}: return mshadow::kInt32; + case DLDataType{kDLInt, 8, 1}: return mshadow::kInt8; + case DLDataType{kDLInt, 64, 1}: return mshadow::kInt64; + default: { + LOG(FATAL) << "Unknown dldata_type_flag=" << type_flag; + return mshadow::kFloat32; + } + } + } inline void SetDLTensor(int dev_mask, int dev_id) { dltensor_.data = dptr_; diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index 118af6793156..269c41928651 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -492,6 +492,26 @@ int MXNDArrayGetData(NDArrayHandle handle, API_END(); } +int MXNDArrayToDLTensor(NDArrayHandle handle, + void **out_pdltensor) { + API_BEGIN(); + NDArray *arr = static_cast(handle); + if (!arr->is_none()) { + *out_pdltensor = arr->data().dlpack(); + } else { + *out_pdltensor = nullptr; + } + API_END(); +} + +int MXNDArrayFromDLTensor(void *in_pdltensor, + NDArrayHandle *out_handle) { + API_BEGIN(); + DLTensor *pdltensor = static_cast(in_pdltensor); + *out_handle = new NDArray(TBlob(*pdltensor), pdltensor->device_id); + API_END(); +} + int MXNDArrayGetDType(NDArrayHandle handle, int *out_dtype) { API_BEGIN(); From ab6fa85c7079babf243dd82d5c675b73d6a2d0ad Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 19:12:57 +0800 Subject: [PATCH 02/26] add to_dlpack and from_dlpack for NDArray --- 3rdparty/dmlc-core | 2 +- 3rdparty/googletest | 2 +- 3rdparty/mkldnn | 2 +- 3rdparty/mshadow | 2 +- 3rdparty/nnvm | 1 + include/mxnet/c_api.h | 36 +++++++++++++++++--- include/mxnet/ndarray.h | 20 +++++++++++ include/mxnet/tensor_blob.h | 50 ++++++++++++++++++++-------- python/mxnet/base.py | 13 ++++++++ python/mxnet/ndarray/ndarray.py | 59 +++++++++++++++++++++++++++++++-- src/c_api/c_api.cc | 26 +++++++++------ src/ndarray/ndarray.cc | 40 ++++++++++++++++++++++ 12 files changed, 219 insertions(+), 34 deletions(-) create mode 160000 3rdparty/nnvm diff --git a/3rdparty/dmlc-core b/3rdparty/dmlc-core index 649be18a8c55..7d88fa915177 160000 --- a/3rdparty/dmlc-core +++ b/3rdparty/dmlc-core @@ -1 +1 @@ -Subproject commit 649be18a8c55c48517861d67158a45dec54992ee +Subproject commit 7d88fa915177223e524ae5e057ca5f1afa8f6886 diff --git a/3rdparty/googletest b/3rdparty/googletest index ec44c6c1675c..e5e2ef7cd27c 160000 --- a/3rdparty/googletest +++ b/3rdparty/googletest @@ -1 +1 @@ -Subproject commit ec44c6c1675c25b9827aacd08c02433cccde7780 +Subproject commit e5e2ef7cd27cc089c1d8302a11970ef870554294 diff --git a/3rdparty/mkldnn b/3rdparty/mkldnn index 0e7ca738866d..a46b870cce30 160000 --- a/3rdparty/mkldnn +++ b/3rdparty/mkldnn @@ -1 +1 @@ -Subproject commit 0e7ca738866d22cc700aa33b8de120b938f910d0 +Subproject commit a46b870cce30fdef186be4105ab1566fa902c499 diff --git a/3rdparty/mshadow b/3rdparty/mshadow index 463c0dffe3ea..d68d3694fdfb 160000 --- a/3rdparty/mshadow +++ b/3rdparty/mshadow @@ -1 +1 @@ -Subproject commit 463c0dffe3eae8c39caf7989c85b7244823df27e +Subproject commit d68d3694fdfb44fdbb7c840c3591131ff2310a59 diff --git a/3rdparty/nnvm b/3rdparty/nnvm new file mode 160000 index 000000000000..2bc5144cd373 --- /dev/null +++ b/3rdparty/nnvm @@ -0,0 +1 @@ +Subproject commit 2bc5144cd3733fd239287e3560c7db8285d21f02 diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index f13150d59bfc..b248f1d79cb3 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -93,6 +93,8 @@ typedef void *CudaModuleHandle; typedef void *CudaKernelHandle; /*! \brief handle to a Profile object (domain, duration, counter, etc.) */ typedef void *ProfileHandle; +/*! \brief handle to DLManagedTensor*/ +typedef void *DLManagedTensorHandle; typedef void (*ExecutorMonitorCallback)(const char*, NDArrayHandle, @@ -690,10 +692,36 @@ MXNET_DLL int MXNDArrayGetShape(NDArrayHandle handle, */ MXNET_DLL int MXNDArrayGetData(NDArrayHandle handle, void **out_pdata); -MXNET_DLL int MXNDArrayToDLTensor(NDArrayHandle handle, - void **out_pdltensor); -MXNET_DLL int MXNDArrayFromDLTensor(void *in_pdltensor, - NDArrayHandle *out_handle); +/*! +* \brief Create a reference view of NDArray that +* represents as DLManagedTensor. +* \param handle the handle to the ndarray +* \param out_dlpack pointer holder to get pointer of DLManagedTensor +* \return 0 when success, -1 when failure happens +*/ +MXNET_DLL int MXNDArrayToDLPack(NDArrayHandle handle, + DLManagedTensorHandle *out_dlpack); +/*! +* \brief Create a NDArray backed by a dlpack tensor. +* +* This allows us to create a NDArray using the memory +* allocated by an external deep learning framework +* that is DLPack compatible. +* +* The memory is retained until the NDArray went out of scope. +* +* \param dlpack the pointer of the input DLManagedTensor +* \param out_handle pointer holder to get pointer of NDArray +* \return 0 when success, -1 when failure happens +*/ +MXNET_DLL int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, + NDArrayHandle *out_handle); +/*! + * \brief Delete a dlpack tensor + * \param dlpack the pointer of the input DLManagedTensor + * \return 0 when success, -1 when failure happens + */ +MXNET_DLL int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack); /*! * \brief get the type of the data in NDArray * \param handle the handle to the narray diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index bae3ea90d5e0..446abab9e5ab 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -519,6 +519,26 @@ class NDArray { return ret; } + /*! + * \brief Create a reference view of NDArray that + * represents as DLManagedTensor. + * \return A DLManagedTensor + */ + DLManagedTensor* ToDLPack() const; + + /*! + * \brief Create a NDArray backed by a dlpack tensor. + * + * This allows us to create a NDArray using the memory + * allocated by an external deep learning framework + * that is DLPack compatible. + * + * The memory is retained until the NDArray went out of scope. + * + * \return The created NDArray view. + */ + static NDArray FromDLPack(DLManagedTensor* tensor); + /*! * \brief Update ndarray chunk storage handles using existing ndarray storage handles * Also update the aux_handle, aux_shapes and aux_types. diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 433dc2795a69..1487a64ec7b3 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -338,20 +338,42 @@ class TBlob { } } } - static int DLDataTypeTransform(DLDataType dldata_type_flag) { - switch (dldata_type_flag) { - case DLDataType{kDLFloat, 32, 1}: return mshadow::kFloat32; - case DLDataType{kDLFloat, 64, 1}: return mshadow::kFloat64; - case DLDataType{kDLFloat, 16, 1}: return mshadow::kFloat16; - case DLDataType{kDLUInt, 8, 1}: return mshadow::kUint8; - case DLDataType{kDLInt, 32, 1}: return mshadow::kInt32; - case DLDataType{kDLInt, 8, 1}: return mshadow::kInt8; - case DLDataType{kDLInt, 64, 1}: return mshadow::kInt64; - default: { - LOG(FATAL) << "Unknown dldata_type_flag=" << type_flag; - return mshadow::kFloat32; - } + static int DLDataTypeTransform(DLDataType dldata_type) { + if (dldata_type.lanes != 1) { + LOG(FATAL) << "Unsupported DLDataType whose lanes != 1"; + } + switch (dldata_type.code) { + case kDLFloat: + switch (dldata_type.bits) { + case 16: + return mshadow::kFloat16; + case 32: + return mshadow::kFloat32; + case 64: + return mshadow::kFloat64; + } + break; + case kDLUInt: + switch (dldata_type.bits) { + case 8: + return mshadow::kUint8; + } + break; + case kDLInt: + switch (dldata_type.bits) { + case 8: + return mshadow::kInt8; + case 32: + return mshadow::kInt32; + case 64: + return mshadow::kInt64; + } + break; } + LOG(FATAL) << "Unknown DLDataType{" << dldata_type.code + << ", " << dldata_type.bits + << ", " << dldata_type.lanes << "}"; + return mshadow::kFloat32; } inline void SetDLTensor(int dev_mask, int dev_id) { @@ -360,7 +382,7 @@ class TBlob { dltensor_.ndim = shape_.ndim(); dltensor_.dtype = DTypeTransform(type_flag_); dltensor_.shape = shape_.data(); - dltensor_.strides = NULL; + dltensor_.strides = nullptr; dltensor_.byte_offset = 0; } diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 3d8ee0191757..0cd3639ee47e 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -235,6 +235,7 @@ def _load_lib(): CudaModuleHandle = ctypes.c_void_p CudaKernelHandle = ctypes.c_void_p ProfileHandle = ctypes.c_void_p +DLPackHandle = ctypes.c_void_p #---------------------------- @@ -729,3 +730,15 @@ def write_all_str(module_file, module_all_list): module_op_file.close() write_all_str(module_internal_file, module_internal_all) module_internal_file.close() + + + +ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object +ctypes.pythonapi.PyCapsule_New.argtypes = [ctypes.c_void_p, ctypes.c_char_p, + ctypes.c_void_p] + +ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p +ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] + +ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int +ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 46b21a90d4c6..7a615998897d 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -35,7 +35,7 @@ import numpy as np from ..base import _LIB, numeric_types, integer_types from ..base import c_array, c_array_buf, c_handle_array, mx_real_t -from ..base import mx_uint, NDArrayHandle, check_call +from ..base import mx_uint, NDArrayHandle, check_call, DLPackHandle from ..base import ctypes2buffer from ..context import Context, current_context from . import _internal @@ -46,7 +46,7 @@ "ones", "add", "arange", "eye", "divide", "equal", "full", "greater", "greater_equal", "imdecode", "lesser", "lesser_equal", "logical_and", "logical_or", "logical_xor", "maximum", "minimum", "moveaxis", "modulo", "multiply", "not_equal", "onehot_encode", - "power", "subtract", "true_divide", "waitall", "_new_empty_handle", "histogram"] + "power", "subtract", "true_divide", "waitall", "_new_empty_handle", "histogram", "to_dlpack", "from_dlpack"] _STORAGE_TYPE_UNDEFINED = -1 _STORAGE_TYPE_DEFAULT = 0 @@ -2205,6 +2205,8 @@ def tostype(self, stype): """ return op.cast_storage(self, stype=stype) + def asdlpack(self): + return to_dlpack(self) def _get_indexing_dispatch_code(key): """Returns a dispatch code for calling basic or advanced indexing functions.""" @@ -3851,3 +3853,56 @@ def histogram(a, bins=10, range=None): return _internal._histogram(data=a, bin_cnt=bins, range=range) raise ValueError("bins argument should be either an integer or an NDArray") # pylint: enable= no-member, protected-access, redefined-builtin + +def pycapsule_dlpack_deleter(dlpack): + """The deleter of DLPack Tensor + + Parameters + ---------- + dlpack: void * + """ + ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] + try: + dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(ctypes.c_void_p(dlpack), b'dltensor')) + except: + dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(ctypes.c_void_p(dlpack), b'used_dltensor')) + check_call(_LIB.MXNDArrayCallDLPackDeleter(dlpack_handle)) + +def to_dlpack(data): + """Returns a reference view of NDArray that represents as DLManagedTensor. + + Parameters + ---------- + data: NDArray + input data. + + Returns + ------- + PyCapsule (the pointer of DLManagedTensor) + a reference view of NDArray that represents as DLManagedTensor. + """ + dlpack = DLPackHandle() + check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) + func_def = ctypes.CFUNCTYPE(None, ctypes.c_void_p) + return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', func_def(pycapsule_dlpack_deleter)) + +def from_dlpack(dlpack): + """Returns a NDArray backed by a dlpack tensor. + + Parameters + ---------- + dlpack: PyCapsule (the pointer of DLManagedTensor) + input data + + Returns + ------- + NDArray + a NDArray backed by a dlpack tensor + """ + handle = NDArrayHandle() + ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] + dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, b'dltensor')) + assert dlpack_handle.value != 0, ValueError('Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') + check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) + ctypes.pythonapi.PyCapsule_SetName(dlpack, b'used_dltensor') + return NDArray(handle=handle) diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index f8815eda992c..5a6703f4846b 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -494,23 +494,29 @@ int MXNDArrayGetData(NDArrayHandle handle, API_END(); } -int MXNDArrayToDLTensor(NDArrayHandle handle, - void **out_pdltensor) { +int MXNDArrayToDLPack(NDArrayHandle handle, + DLManagedTensorHandle *out_dlpack) { API_BEGIN(); NDArray *arr = static_cast(handle); - if (!arr->is_none()) { - *out_pdltensor = arr->data().dlpack(); - } else { - *out_pdltensor = nullptr; - } + *out_dlpack = arr->ToDLPack(); API_END(); } -int MXNDArrayFromDLTensor(void *in_pdltensor, +int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, NDArrayHandle *out_handle) { API_BEGIN(); - DLTensor *pdltensor = static_cast(in_pdltensor); - *out_handle = new NDArray(TBlob(*pdltensor), pdltensor->device_id); + NDArray *pdata = new NDArray(); + *pdata = NDArray::FromDLPack( + static_cast(dlpack)); + *out_handle = pdata; + API_END(); +} + +int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack) { + API_BEGIN(); + DLManagedTensor *p_dlpack = static_cast(dlpack); + if (p_dlpack) + p_dlpack->deleter(p_dlpack); API_END(); } diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index 853838a87f4c..9b2ca79bbb81 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -312,6 +312,46 @@ NDArray NDArray::data_ndarray() const { return ret; } +struct NDArrayDLManager { + NDArray handle; // ref NDArray + DLManagedTensor tensor; + TShape strides; // store variable strides +}; + +DLManagedTensor* NDArray::ToDLPack() const { + NDArrayDLManager* dlmanager(new NDArrayDLManager); + dlmanager->handle = *this; + if (!is_none()) { + dlmanager->tensor.dl_tensor = data().dltensor(); + // assign value for dl_tensor.strides + if (!dlmanager->tensor.dl_tensor.strides) { + TShape &strides_ = dlmanager->strides; + strides_ = TShape(shape_.ndim()); + const uint32_t ndim = shape_.ndim(); + if (ndim >= 1) { + strides_[ndim - 1] = 1; + for (uint32_t u = 1, i = ndim - 2; u < ndim; ++u, --i) { + strides_[i] = shape_[i + 1] * strides_[i + 1]; + } + } + dlmanager->tensor.dl_tensor.strides = strides_.data(); + } else { + dlmanager->strides = TShape(dlmanager->tensor.dl_tensor.strides, + dlmanager->tensor.dl_tensor.strides + dlmanager->tensor.dl_tensor.ndim); + } + } + dlmanager->tensor.manager_ctx = dlmanager; + dlmanager->tensor.deleter = [](DLManagedTensor* dlmanager){ + delete static_cast(dlmanager->manager_ctx); + }; + return &(dlmanager->tensor); +} + +NDArray NDArray::FromDLPack(DLManagedTensor* tensor) { + const DLTensor &dl_tensor = tensor->dl_tensor; + return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id); +} + bool NDArray::fresh_out_grad() const { if (Imperative::AGInfo::IsNone(*this)) return false; Imperative::AGInfo& info = Imperative::AGInfo::Get(entry_.node); From 8c6e9d2c45251bfd27f9d45b53e227b09f56df21 Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 19:32:42 +0800 Subject: [PATCH 03/26] fix dlpack deleter and add unittest for dlpack --- python/mxnet/ndarray/ndarray.py | 4 ++-- tests/python/unittest/test_ndarray.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 7a615998897d..886d8e12798f 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3864,9 +3864,9 @@ def pycapsule_dlpack_deleter(dlpack): ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] try: dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(ctypes.c_void_p(dlpack), b'dltensor')) + check_call(_LIB.MXNDArrayCallDLPackDeleter(dlpack_handle)) except: - dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(ctypes.c_void_p(dlpack), b'used_dltensor')) - check_call(_LIB.MXNDArrayCallDLPackDeleter(dlpack_handle)) + pass def to_dlpack(data): """Returns a reference view of NDArray that represents as DLManagedTensor. diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index e55fa1af90e8..5a7ce9ef82d1 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -1334,6 +1334,25 @@ def test_ndarray_cpu_shared_ctx(): res = mx.nd.zeros((1, 2, 3), ctx=ctx) assert(res.context == ctx) +@with_seed() +def test_dlpack(): + for dtype in [np.float32, np.int32]: + for shape in [(3, 4, 5, 6), (2, 10), (15,)]: + a = mx.nd.random.uniform(shape = shape) + a_np = a.asnumpy() + + pack = a.asdlpack() + b = mx.nd.from_dlpack(pack) + + pack2 = mx.nd.to_dlpack(a) + c = mx.nd.from_dlpack(pack2) + + del a, pack, pack2 + + b_np = b.asnumpy() + c_np = c.asnumpy() + mx.test_utils.assert_almost_equal(a_np, b_np) + mx.test_utils.assert_almost_equal(a_np, c_np) if __name__ == '__main__': import nose From 1142787871dbac9afd64e8631e535fb336a13a32 Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 20:20:13 +0800 Subject: [PATCH 04/26] update 3rdparty --- 3rdparty/dmlc-core | 2 +- 3rdparty/googletest | 2 +- 3rdparty/mkldnn | 2 +- 3rdparty/mshadow | 2 +- 3rdparty/nnvm | 1 - 5 files changed, 4 insertions(+), 5 deletions(-) delete mode 160000 3rdparty/nnvm diff --git a/3rdparty/dmlc-core b/3rdparty/dmlc-core index 7d88fa915177..649be18a8c55 160000 --- a/3rdparty/dmlc-core +++ b/3rdparty/dmlc-core @@ -1 +1 @@ -Subproject commit 7d88fa915177223e524ae5e057ca5f1afa8f6886 +Subproject commit 649be18a8c55c48517861d67158a45dec54992ee diff --git a/3rdparty/googletest b/3rdparty/googletest index e5e2ef7cd27c..ec44c6c1675c 160000 --- a/3rdparty/googletest +++ b/3rdparty/googletest @@ -1 +1 @@ -Subproject commit e5e2ef7cd27cc089c1d8302a11970ef870554294 +Subproject commit ec44c6c1675c25b9827aacd08c02433cccde7780 diff --git a/3rdparty/mkldnn b/3rdparty/mkldnn index a46b870cce30..0e7ca738866d 160000 --- a/3rdparty/mkldnn +++ b/3rdparty/mkldnn @@ -1 +1 @@ -Subproject commit a46b870cce30fdef186be4105ab1566fa902c499 +Subproject commit 0e7ca738866d22cc700aa33b8de120b938f910d0 diff --git a/3rdparty/mshadow b/3rdparty/mshadow index d68d3694fdfb..463c0dffe3ea 160000 --- a/3rdparty/mshadow +++ b/3rdparty/mshadow @@ -1 +1 @@ -Subproject commit d68d3694fdfb44fdbb7c840c3591131ff2310a59 +Subproject commit 463c0dffe3eae8c39caf7989c85b7244823df27e diff --git a/3rdparty/nnvm b/3rdparty/nnvm deleted file mode 160000 index 2bc5144cd373..000000000000 --- a/3rdparty/nnvm +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 2bc5144cd3733fd239287e3560c7db8285d21f02 From 16df8d5aee61875cbaf48ba823a9f92eab0b6ace Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 20:34:58 +0800 Subject: [PATCH 05/26] fix for cpplint --- include/mxnet/tensor_blob.h | 4 +++- src/ndarray/ndarray.cc | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 1487a64ec7b3..003b69b81e9a 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -104,7 +104,9 @@ class TBlob { : dptr_(dptr), shape_(shape), type_flag_(type_flag) { SetDLTensor(dev_mask, dev_id); } - TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { + explicit TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), + shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), + type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { } /*! * \brief constructor from tensor diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index 9b2ca79bbb81..078a8fe5b62e 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -313,9 +313,9 @@ NDArray NDArray::data_ndarray() const { } struct NDArrayDLManager { - NDArray handle; // ref NDArray + NDArray handle; // ref NDArray DLManagedTensor tensor; - TShape strides; // store variable strides + TShape strides; // store variable strides }; DLManagedTensor* NDArray::ToDLPack() const { From bfcffa2db557dd5cc57e936388b778f9902c442f Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 20:56:43 +0800 Subject: [PATCH 06/26] fix pylint and add destructor for dlpack --- python/mxnet/_ctypes/ndarray.py | 7 +++++-- python/mxnet/base.py | 2 +- python/mxnet/cython/base.pyi | 1 + python/mxnet/cython/ndarray.pyx | 6 +++++- python/mxnet/ndarray/ndarray.py | 14 +++++++++----- 5 files changed, 21 insertions(+), 9 deletions(-) diff --git a/python/mxnet/_ctypes/ndarray.py b/python/mxnet/_ctypes/ndarray.py index f324545a2352..fd7678e29c66 100644 --- a/python/mxnet/_ctypes/ndarray.py +++ b/python/mxnet/_ctypes/ndarray.py @@ -31,10 +31,10 @@ class NDArrayBase(object): """Base data structure for ndarray""" - __slots__ = ["handle", "writable"] + __slots__ = ["handle", "writable", "dlpack_handle"] # pylint: disable= no-member - def __init__(self, handle, writable=True): + def __init__(self, handle, writable=True, dlpack_handle=None): """initialize a new NDArray Parameters @@ -46,9 +46,12 @@ def __init__(self, handle, writable=True): assert isinstance(handle, NDArrayHandle) self.handle = handle self.writable = writable + self.dlpack_handle = dlpack_handle def __del__(self): check_call(_LIB.MXNDArrayFree(self.handle)) + if self.dlpack_handle is not None: + check_call(_LIB.MXNDArrayCallDLPackDeleter(self.dlpack_handle)) def __reduce__(self): return (_ndarray_cls, (None,), self.__getstate__()) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 0cd3639ee47e..14535d9d254b 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -738,7 +738,7 @@ def write_all_str(module_file, module_all_list): ctypes.c_void_p] ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p -ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] +# ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] or [ctypes.py_object, ctypes.c_char_p] ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] diff --git a/python/mxnet/cython/base.pyi b/python/mxnet/cython/base.pyi index d73e1a7d0194..8e2332e67b19 100644 --- a/python/mxnet/cython/base.pyi +++ b/python/mxnet/cython/base.pyi @@ -8,6 +8,7 @@ ctypedef void* SymbolHandle ctypedef void* NDArrayHandle ctypedef void* OpHandle ctypedef void* CachedOpHandle +ctypedef void* DLPackHandle ctypedef unsigned nn_uint cdef py_str(const char* x): diff --git a/python/mxnet/cython/ndarray.pyx b/python/mxnet/cython/ndarray.pyx index 319dc492dbb8..3d687363ac8c 100644 --- a/python/mxnet/cython/ndarray.pyx +++ b/python/mxnet/cython/ndarray.pyx @@ -30,6 +30,7 @@ cdef class NDArrayBase: # handle for symbolic operator. cdef NDArrayHandle chandle cdef int cwritable + cdef DLPackHandle cdlpack_handle cdef _set_handle(self, handle): cdef unsigned long long ptr @@ -52,12 +53,15 @@ cdef class NDArrayBase: def __get__(self): return bool(self.cwritable) - def __init__(self, handle, writable=True): + def __init__(self, handle, writable=True, dlpack_handle=None): self._set_handle(handle) self.cwritable = writable + self.cdlpack_handle = dlpack_handle def __dealloc__(self): CALL(MXNDArrayFree(self.chandle)) + if self.cdlpack_handle: + CALL(MXNDArrayCallDLPackDeleter(self.cdlpack_handle)) def __reduce__(self): return (_ndarray_cls, (None,), self.__getstate__()) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 886d8e12798f..4ed9c116fd11 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -46,7 +46,8 @@ "ones", "add", "arange", "eye", "divide", "equal", "full", "greater", "greater_equal", "imdecode", "lesser", "lesser_equal", "logical_and", "logical_or", "logical_xor", "maximum", "minimum", "moveaxis", "modulo", "multiply", "not_equal", "onehot_encode", - "power", "subtract", "true_divide", "waitall", "_new_empty_handle", "histogram", "to_dlpack", "from_dlpack"] + "power", "subtract", "true_divide", "waitall", "_new_empty_handle", "histogram", + "to_dlpack", "from_dlpack"] _STORAGE_TYPE_UNDEFINED = -1 _STORAGE_TYPE_DEFAULT = 0 @@ -3863,9 +3864,11 @@ def pycapsule_dlpack_deleter(dlpack): """ ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] try: - dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(ctypes.c_void_p(dlpack), b'dltensor')) + dlpack_handle = ctypes.c_void_p( + ctypes.pythonapi.PyCapsule_GetPointer( + ctypes.c_void_p(dlpack), b'dltensor')) check_call(_LIB.MXNDArrayCallDLPackDeleter(dlpack_handle)) - except: + except ValueError: pass def to_dlpack(data): @@ -3902,7 +3905,8 @@ def from_dlpack(dlpack): handle = NDArrayHandle() ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, b'dltensor')) - assert dlpack_handle.value != 0, ValueError('Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') + assert dlpack_handle.value != 0, ValueError( + 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) ctypes.pythonapi.PyCapsule_SetName(dlpack, b'used_dltensor') - return NDArray(handle=handle) + return NDArray(handle=handle, dlpack_handle=dlpack_handle) From f5c2552419d66602a9911e44dd787ce5a52efa4d Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 21:10:30 +0800 Subject: [PATCH 07/26] fix pylint in base.py --- python/mxnet/base.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 14535d9d254b..13dc86c7a68f 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -733,12 +733,19 @@ def write_all_str(module_file, module_all_list): +""" +Assign ctypes types to specify the result type and the argument types +of the foreign functions +""" ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object ctypes.pythonapi.PyCapsule_New.argtypes = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p] ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p -# ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] or [ctypes.py_object, ctypes.c_char_p] +""" +ctypes.pythonapi.PyCapsule_GetPointer.argtypes = + [ctypes.py_object|ctypes.c_void_p, ctypes.c_char_p] +""" ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] From 98b5d117f229497a7f8be7953045d177ffdbc844 Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 21:15:02 +0800 Subject: [PATCH 08/26] fix lint in base.py --- python/mxnet/base.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 13dc86c7a68f..27ebd491c2e4 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -731,21 +731,11 @@ def write_all_str(module_file, module_all_list): write_all_str(module_internal_file, module_internal_all) module_internal_file.close() - - -""" -Assign ctypes types to specify the result type and the argument types -of the foreign functions -""" ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object ctypes.pythonapi.PyCapsule_New.argtypes = [ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p] ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p -""" -ctypes.pythonapi.PyCapsule_GetPointer.argtypes = - [ctypes.py_object|ctypes.c_void_p, ctypes.c_char_p] -""" ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] From 7bdde8f282065a74ad6743ebf6e708007382c55f Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 6 Aug 2018 21:26:31 +0800 Subject: [PATCH 09/26] add document for DLPack transformation API --- include/mxnet/tensor_blob.h | 4 ++++ python/mxnet/ndarray/ndarray.py | 35 +++++++++++++++++++++++++++++++++ 2 files changed, 39 insertions(+) diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 003b69b81e9a..25ccabc4ba78 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -104,6 +104,10 @@ class TBlob { : dptr_(dptr), shape_(shape), type_flag_(type_flag) { SetDLTensor(dev_mask, dev_id); } + /*! + * \brief constructor that construct TBlob from DLTensor + * \param DLTensor Object + */ explicit TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 4ed9c116fd11..776f0bb2e4eb 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -2207,6 +2207,20 @@ def tostype(self, stype): return op.cast_storage(self, stype=stype) def asdlpack(self): + """Returns a reference view of NDArray that represents as DLManagedTensor. + + Returns + ------- + PyCapsule (the pointer of DLManagedTensor) + a reference view of NDArray that represents as DLManagedTensor. + + Examples + -------- + >>> x = mx.nd.ones((2,3)) + >>> y = x.asdlpack() + >>> type(y) + + """ return to_dlpack(self) def _get_indexing_dispatch_code(key): @@ -3883,6 +3897,13 @@ def to_dlpack(data): ------- PyCapsule (the pointer of DLManagedTensor) a reference view of NDArray that represents as DLManagedTensor. + + Examples + -------- + >>> x = mx.nd.ones((2,3)) + >>> y = mx.nd.to_dlpack(x) + >>> type(y) + """ dlpack = DLPackHandle() check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) @@ -3901,6 +3922,20 @@ def from_dlpack(dlpack): ------- NDArray a NDArray backed by a dlpack tensor + + Examples + -------- + >>> x = mx.nd.ones((2,3)) + >>> y = mx.nd.to_dlpack(x) + >>> type(y) + + >>> z = mx.nd.from_dlpack(y) + >>> type(z) + + >>> z + [[ 1. 1. 1.] + [ 1. 1. 1.]] + """ handle = NDArrayHandle() ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] From f225d27a7a0e66e93dc9529e22510da022de2e1f Mon Sep 17 00:00:00 2001 From: wkcn Date: Tue, 7 Aug 2018 14:47:02 +0800 Subject: [PATCH 10/26] add to_dlpack_for_read and to_dlpack_for_write --- include/mxnet/c_api.h | 33 +++++-- python/mxnet/_ctypes/ndarray.py | 10 +-- python/mxnet/base.py | 4 + python/mxnet/cython/ndarray.pyx | 8 +- python/mxnet/ndarray/ndarray.py | 119 ++++++++++++++++++++------ src/c_api/c_api.cc | 43 +++++++++- src/ndarray/ndarray.cc | 39 +++++---- tests/python/unittest/test_ndarray.py | 18 +++- 8 files changed, 207 insertions(+), 67 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index 625f5d1d9fda..d19c9281519f 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -95,6 +95,8 @@ typedef void *CudaKernelHandle; typedef void *ProfileHandle; /*! \brief handle to DLManagedTensor*/ typedef void *DLManagedTensorHandle; +/*! \brief handle to PyObject*/ +typedef void *PyObjectHandle; typedef void (*ExecutorMonitorCallback)(const char*, NDArrayHandle, @@ -645,14 +647,14 @@ MXNET_DLL int MXNDArraySyncCheckFormat(NDArrayHandle handle, const bool full_che * \param handle the NDArray handle * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayWaitToRead(NDArrayHandle handle); +MXNET_DLL int MXNDArrayWaitForRead(NDArrayHandle handle); /*! * \brief Wait until all the pending read/write with respect NDArray are finished. * Always call this before write data into NDArray synchronizely. * \param handle the NDArray handle * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayWaitToWrite(NDArrayHandle handle); +MXNET_DLL int MXNDArrayWaitForWrite(NDArrayHandle handle); /*! * \brief wait until all delayed operations in * the system is completed @@ -741,13 +743,26 @@ MXNET_DLL int MXNDArrayGetData(NDArrayHandle handle, void **out_pdata); /*! * \brief Create a reference view of NDArray that -* represents as DLManagedTensor. +* represents as DLManagedTensor until +* all the pending writes with respect NDArray are finished. * \param handle the handle to the ndarray * \param out_dlpack pointer holder to get pointer of DLManagedTensor * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayToDLPack(NDArrayHandle handle, - DLManagedTensorHandle *out_dlpack); +MXNET_DLL int MXNDArrayToDLPackForRead(NDArrayHandle handle, + DLManagedTensorHandle *out_dlpack); + +/*! +* \brief Create a reference view of NDArray that +* represents as DLManagedTensor until +* all the pending read/write with respect NDArray are finished. +* \param handle the handle to the ndarray +* \param out_dlpack pointer holder to get pointer of DLManagedTensor +* \return 0 when success, -1 when failure happens +*/ +MXNET_DLL int MXNDArrayToDLPackForWrite(NDArrayHandle handle, + DLManagedTensorHandle *out_dlpack); + /*! * \brief Create a NDArray backed by a dlpack tensor. * @@ -769,6 +784,14 @@ MXNET_DLL int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, * \return 0 when success, -1 when failure happens */ MXNET_DLL int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack); + +/*! + * \brief Delete a dlpack tensor + * \param dlpack_capsule the pointer of a PyCapsule storing DLManagedTensor + * \return 0 when success, -1 when failure happens + */ +MXNET_DLL void MXNDArrayCallDLPackCapsuleDeleter(PyObjectHandle dlpack_capsule); + /*! * \brief get the type of the data in NDArray * \param handle the handle to the narray diff --git a/python/mxnet/_ctypes/ndarray.py b/python/mxnet/_ctypes/ndarray.py index fd7678e29c66..7af00bd4a3ef 100644 --- a/python/mxnet/_ctypes/ndarray.py +++ b/python/mxnet/_ctypes/ndarray.py @@ -31,27 +31,27 @@ class NDArrayBase(object): """Base data structure for ndarray""" - __slots__ = ["handle", "writable", "dlpack_handle"] + __slots__ = ["handle", "writable", "dlpack"] # pylint: disable= no-member - def __init__(self, handle, writable=True, dlpack_handle=None): + def __init__(self, handle, writable=True, dlpack=None): """initialize a new NDArray Parameters ---------- handle : NDArrayHandle NDArray handle of C API + dlpack : PyCapsule (DLPack) + DLPack Object """ if handle is not None: assert isinstance(handle, NDArrayHandle) self.handle = handle self.writable = writable - self.dlpack_handle = dlpack_handle + self.dlpack = dlpack def __del__(self): check_call(_LIB.MXNDArrayFree(self.handle)) - if self.dlpack_handle is not None: - check_call(_LIB.MXNDArrayCallDLPackDeleter(self.dlpack_handle)) def __reduce__(self): return (_ndarray_cls, (None,), self.__getstate__()) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 27ebd491c2e4..cff483671e7c 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -736,6 +736,10 @@ def write_all_str(module_file, module_all_list): ctypes.c_void_p] ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p +ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] + +_LIB.MXNDArrayCallDLPackCapsuleDeleter.restype = None +_LIB.MXNDArrayCallDLPackCapsuleDeleter.argtypes = [ctypes.c_void_p] diff --git a/python/mxnet/cython/ndarray.pyx b/python/mxnet/cython/ndarray.pyx index 3d687363ac8c..346b66d60681 100644 --- a/python/mxnet/cython/ndarray.pyx +++ b/python/mxnet/cython/ndarray.pyx @@ -30,7 +30,7 @@ cdef class NDArrayBase: # handle for symbolic operator. cdef NDArrayHandle chandle cdef int cwritable - cdef DLPackHandle cdlpack_handle + cdef object dlpack cdef _set_handle(self, handle): cdef unsigned long long ptr @@ -53,15 +53,13 @@ cdef class NDArrayBase: def __get__(self): return bool(self.cwritable) - def __init__(self, handle, writable=True, dlpack_handle=None): + def __init__(self, handle, writable=True, dlpack=None): self._set_handle(handle) self.cwritable = writable - self.cdlpack_handle = dlpack_handle + self.dlpack = dlpack def __dealloc__(self): CALL(MXNDArrayFree(self.chandle)) - if self.cdlpack_handle: - CALL(MXNDArrayCallDLPackDeleter(self.cdlpack_handle)) def __reduce__(self): return (_ndarray_cls, (None,), self.__getstate__()) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 776f0bb2e4eb..03a7f30fdd50 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -47,7 +47,7 @@ "imdecode", "lesser", "lesser_equal", "logical_and", "logical_or", "logical_xor", "maximum", "minimum", "moveaxis", "modulo", "multiply", "not_equal", "onehot_encode", "power", "subtract", "true_divide", "waitall", "_new_empty_handle", "histogram", - "to_dlpack", "from_dlpack"] + "to_dlpack_for_read", "to_dlpack_for_write", "from_dlpack"] _STORAGE_TYPE_UNDEFINED = -1 _STORAGE_TYPE_DEFAULT = 0 @@ -179,7 +179,6 @@ class NDArray(NDArrayBase): # See C++ side of definition(kTVMNDArrayTypeCode) at include/mxmet/tensor_blob.h _tvm_tcode = 19 # pylint: disable= no-member, undefined-variable - @property def _tvm_handle(self): return self.handle.value @@ -361,7 +360,7 @@ def __len__(self): def __getstate__(self): handle = self.handle - this = {'handle' : None} + this = {'handle' : None, 'dlpack' : self.dlpack} if handle is not None: length = ctypes.c_size_t() cptr = ctypes.POINTER(ctypes.c_char)() @@ -383,6 +382,7 @@ def __setstate__(self, state): self.handle = handle else: self.handle = None + self.dlpack = state['dlpack'] # pylint: disable=line-too-long def __setitem__(self, key, value): @@ -2206,8 +2206,9 @@ def tostype(self, stype): """ return op.cast_storage(self, stype=stype) - def asdlpack(self): - """Returns a reference view of NDArray that represents as DLManagedTensor. + def to_dlpack_for_read(self): + """Returns a reference view of NDArray that represents as DLManagedTensor until + all previous write operations on the current array are finished. Returns ------- @@ -2217,11 +2218,40 @@ def asdlpack(self): Examples -------- >>> x = mx.nd.ones((2,3)) - >>> y = x.asdlpack() + >>> y = mx.nd.to_dlpack_for_read(x) >>> type(y) + >>> z = mx.nd.from_dlpack(y) + >>> z + [[1. 1. 1.] + [1. 1. 1.]] + """ - return to_dlpack(self) + return to_dlpack_for_read(self) + + def to_dlpack_for_write(self): + """Returns a reference view of NDArray that represents as DLManagedTensor until + all previous read/write operations on the current array are finished. + + Returns + ------- + PyCapsule (the pointer of DLManagedTensor) + a reference view of NDArray that represents as DLManagedTensor. + + Examples + -------- + >>> x = mx.nd.ones((2,3)) + >>> w = mx.nd.to_dlpack_for_write(x) + >>> type(w) + + >>> u = mx.nd.from_dlpack(w) + >>> u += 1 + >>> x + [[2. 2. 2.] + [2. 2. 2.]] + + """ + return to_dlpack_for_write(self) def _get_indexing_dispatch_code(key): """Returns a dispatch code for calling basic or advanced indexing functions.""" @@ -3869,24 +3899,41 @@ def histogram(a, bins=10, range=None): raise ValueError("bins argument should be either an integer or an NDArray") # pylint: enable= no-member, protected-access, redefined-builtin -def pycapsule_dlpack_deleter(dlpack): - """The deleter of DLPack Tensor +pycapsule_dlpack_deleter = ctypes.CFUNCTYPE(None, ctypes.c_void_p)(_LIB.MXNDArrayCallDLPackCapsuleDeleter) + +def to_dlpack_for_read(data): + """Returns a reference view of NDArray that represents as DLManagedTensor until + all previous write operations on the current array are finished. Parameters ---------- - dlpack: void * + data: NDArray + input data. + + Returns + ------- + PyCapsule (the pointer of DLManagedTensor) + a reference view of NDArray that represents as DLManagedTensor. + + Examples + -------- + >>> x = mx.nd.ones((2,3)) + >>> y = mx.nd.to_dlpack_for_read(x) + >>> type(y) + + >>> z = mx.nd.from_dlpack(y) + >>> z + [[1. 1. 1.] + [1. 1. 1.]] + """ - ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.c_void_p, ctypes.c_char_p] - try: - dlpack_handle = ctypes.c_void_p( - ctypes.pythonapi.PyCapsule_GetPointer( - ctypes.c_void_p(dlpack), b'dltensor')) - check_call(_LIB.MXNDArrayCallDLPackDeleter(dlpack_handle)) - except ValueError: - pass + dlpack = DLPackHandle() + check_call(_LIB.MXNDArrayToDLPackForRead(data.handle, ctypes.byref(dlpack))) + return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) -def to_dlpack(data): - """Returns a reference view of NDArray that represents as DLManagedTensor. +def to_dlpack_for_write(data): + """Returns a reference view of NDArray that represents as DLManagedTensor until + all previous read/write operations on the current array are finished. Parameters ---------- @@ -3901,14 +3948,19 @@ def to_dlpack(data): Examples -------- >>> x = mx.nd.ones((2,3)) - >>> y = mx.nd.to_dlpack(x) - >>> type(y) + >>> w = mx.nd.to_dlpack_for_write(x) + >>> type(w) + >>> u = mx.nd.from_dlpack(w) + >>> u += 1 + >>> x + [[2. 2. 2.] + [2. 2. 2.]] + """ dlpack = DLPackHandle() - check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) - func_def = ctypes.CFUNCTYPE(None, ctypes.c_void_p) - return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', func_def(pycapsule_dlpack_deleter)) + check_call(_LIB.MXNDArrayToDLPackForWrite(data.handle, ctypes.byref(dlpack))) + return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) def from_dlpack(dlpack): """Returns a NDArray backed by a dlpack tensor. @@ -3926,7 +3978,7 @@ def from_dlpack(dlpack): Examples -------- >>> x = mx.nd.ones((2,3)) - >>> y = mx.nd.to_dlpack(x) + >>> y = mx.nd.to_dlpack_for_read(x) >>> type(y) >>> z = mx.nd.from_dlpack(y) @@ -3936,12 +3988,23 @@ def from_dlpack(dlpack): [[ 1. 1. 1.] [ 1. 1. 1.]] + + >>> w = mx.nd.to_dlpack_for_write(x) + >>> type(w) + + >>> u = mx.nd.from_dlpack(w) + >>> u += 1 + >>> x + [[2. 2. 2.] + [2. 2. 2.]] + """ handle = NDArrayHandle() - ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, b'dltensor')) assert dlpack_handle.value != 0, ValueError( 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) + # copy dlpack + dlpack_copy = ctypes.pythonapi.PyCapsule_New(dlpack_handle, b'dltensor', pycapsule_dlpack_deleter) ctypes.pythonapi.PyCapsule_SetName(dlpack, b'used_dltensor') - return NDArray(handle=handle, dlpack_handle=dlpack_handle) + return NDArray(handle=handle, dlpack=dlpack_copy) diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index 5a6703f4846b..fca48d96039f 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -494,9 +494,19 @@ int MXNDArrayGetData(NDArrayHandle handle, API_END(); } -int MXNDArrayToDLPack(NDArrayHandle handle, +int MXNDArrayToDLPackForRead(NDArrayHandle handle, DLManagedTensorHandle *out_dlpack) { API_BEGIN(); + MXNDArrayWaitToRead(handle); + NDArray *arr = static_cast(handle); + *out_dlpack = arr->ToDLPack(); + API_END(); +} + +int MXNDArrayToDLPackForWrite(NDArrayHandle handle, + DLManagedTensorHandle *out_dlpack) { + API_BEGIN(); + MXNDArrayWaitToWrite(handle); NDArray *arr = static_cast(handle); *out_dlpack = arr->ToDLPack(); API_END(); @@ -514,12 +524,37 @@ int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack) { API_BEGIN(); - DLManagedTensor *p_dlpack = static_cast(dlpack); - if (p_dlpack) - p_dlpack->deleter(p_dlpack); + if (dlpack) { + DLManagedTensor *p_dlpack = static_cast(dlpack); + p_dlpack->deleter(p_dlpack); + } API_END(); } + +typedef struct { + char py_object[16]; + void *pointer; + const char *name; + void *context; + void (*destructor)(void *); +} FakePyCapsule; + +void* PyCapsule_GetPointer(PyObjectHandle o, const char *name) { + FakePyCapsule *p_capsule = static_cast(o); + if (p_capsule == nullptr || p_capsule->pointer == nullptr) { + LOG(FATAL) << "PyCapsule_GetPointer called with invalid PyCapsule"; + } + if (strncmp(p_capsule->name, name, strlen(name)) != 0) { + return nullptr; + } + return p_capsule->pointer; +} + +void MXNDArrayCallDLPackCapsuleDeleter(PyObjectHandle dlpack_capsule) { + MXNDArrayCallDLPackDeleter(PyCapsule_GetPointer(dlpack_capsule, "dltensor")); +} + int MXNDArrayGetDType(NDArrayHandle handle, int *out_dtype) { API_BEGIN(); diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index 078a8fe5b62e..83135eabd9bd 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -315,7 +315,6 @@ NDArray NDArray::data_ndarray() const { struct NDArrayDLManager { NDArray handle; // ref NDArray DLManagedTensor tensor; - TShape strides; // store variable strides }; DLManagedTensor* NDArray::ToDLPack() const { @@ -323,22 +322,6 @@ DLManagedTensor* NDArray::ToDLPack() const { dlmanager->handle = *this; if (!is_none()) { dlmanager->tensor.dl_tensor = data().dltensor(); - // assign value for dl_tensor.strides - if (!dlmanager->tensor.dl_tensor.strides) { - TShape &strides_ = dlmanager->strides; - strides_ = TShape(shape_.ndim()); - const uint32_t ndim = shape_.ndim(); - if (ndim >= 1) { - strides_[ndim - 1] = 1; - for (uint32_t u = 1, i = ndim - 2; u < ndim; ++u, --i) { - strides_[i] = shape_[i + 1] * strides_[i + 1]; - } - } - dlmanager->tensor.dl_tensor.strides = strides_.data(); - } else { - dlmanager->strides = TShape(dlmanager->tensor.dl_tensor.strides, - dlmanager->tensor.dl_tensor.strides + dlmanager->tensor.dl_tensor.ndim); - } } dlmanager->tensor.manager_ctx = dlmanager; dlmanager->tensor.deleter = [](DLManagedTensor* dlmanager){ @@ -349,6 +332,28 @@ DLManagedTensor* NDArray::ToDLPack() const { NDArray NDArray::FromDLPack(DLManagedTensor* tensor) { const DLTensor &dl_tensor = tensor->dl_tensor; + if (dl_tensor.strides != nullptr) { + // check strides + const int &ndim = dl_tensor.ndim; + const int64_t *shape = dl_tensor.shape; + const int64_t *strides = dl_tensor.strides; + if (ndim >= 1) { + bool err = false; + if (strides[ndim - 1] != 1) { + err = true; + } else { + for (int i = ndim - 2; i >= 0; --i) { + if (strides[i] != shape[i + 1] * strides[i + 1]) { + err = true; + break; + } + } + } + if (err) { + LOG(FATAL) << "Unsupported DLPack because MXNet only support compact tensor now"; + } + } + } return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id); } diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index e4456106e675..dc2e2c5ab380 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -1347,18 +1347,30 @@ def test_dlpack(): a = mx.nd.random.uniform(shape = shape) a_np = a.asnumpy() - pack = a.asdlpack() + pack = a.to_dlpack_for_read() b = mx.nd.from_dlpack(pack) - pack2 = mx.nd.to_dlpack(a) + a_copy = a.copy() + pack2 = a_copy.to_dlpack_for_write() c = mx.nd.from_dlpack(pack2) - del a, pack, pack2 + pack3 = mx.nd.to_dlpack_for_read(a) + d = mx.nd.from_dlpack(pack3) + + a_copy = a.copy() + pack4 = mx.nd.to_dlpack_for_write(a_copy) + e = mx.nd.from_dlpack(pack4) + + del a, pack, pack2, pack3, pack4 b_np = b.asnumpy() c_np = c.asnumpy() + d_np = d.asnumpy() + e_np = e.asnumpy() mx.test_utils.assert_almost_equal(a_np, b_np) mx.test_utils.assert_almost_equal(a_np, c_np) + mx.test_utils.assert_almost_equal(a_np, d_np) + mx.test_utils.assert_almost_equal(a_np, e_np) if __name__ == '__main__': import nose From afc1518f9c42c969a4e26c6c252337266ba88094 Mon Sep 17 00:00:00 2001 From: wkcn Date: Tue, 7 Aug 2018 14:54:35 +0800 Subject: [PATCH 11/26] fix lint for ndarray.py and fix typo in c_api.h --- include/mxnet/c_api.h | 2 +- python/mxnet/ndarray/ndarray.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index d19c9281519f..90a46593add1 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -755,7 +755,7 @@ MXNET_DLL int MXNDArrayToDLPackForRead(NDArrayHandle handle, /*! * \brief Create a reference view of NDArray that * represents as DLManagedTensor until -* all the pending read/write with respect NDArray are finished. +* all the pending reads/writes with respect NDArray are finished. * \param handle the handle to the ndarray * \param out_dlpack pointer holder to get pointer of DLManagedTensor * \return 0 when success, -1 when failure happens diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 03a7f30fdd50..1117022219ef 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3899,7 +3899,8 @@ def histogram(a, bins=10, range=None): raise ValueError("bins argument should be either an integer or an NDArray") # pylint: enable= no-member, protected-access, redefined-builtin -pycapsule_dlpack_deleter = ctypes.CFUNCTYPE(None, ctypes.c_void_p)(_LIB.MXNDArrayCallDLPackCapsuleDeleter) +pycapsule_dlpack_deleter = ctypes.CFUNCTYPE(None, ctypes.c_void_p)( + _LIB.MXNDArrayCallDLPackCapsuleDeleter) def to_dlpack_for_read(data): """Returns a reference view of NDArray that represents as DLManagedTensor until @@ -4005,6 +4006,7 @@ def from_dlpack(dlpack): 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) # copy dlpack - dlpack_copy = ctypes.pythonapi.PyCapsule_New(dlpack_handle, b'dltensor', pycapsule_dlpack_deleter) + dlpack_copy = ctypes.pythonapi.PyCapsule_New( + dlpack_handle, b'dltensor', pycapsule_dlpack_deleter) ctypes.pythonapi.PyCapsule_SetName(dlpack, b'used_dltensor') return NDArray(handle=handle, dlpack=dlpack_copy) From 8b397fd4264236e2e9e8307bda8ba9316695a2e7 Mon Sep 17 00:00:00 2001 From: wkcn Date: Tue, 7 Aug 2018 15:03:02 +0800 Subject: [PATCH 12/26] fix function name error in c_api --- include/mxnet/c_api.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index 90a46593add1..b8b2feb2b3a0 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -647,14 +647,14 @@ MXNET_DLL int MXNDArraySyncCheckFormat(NDArrayHandle handle, const bool full_che * \param handle the NDArray handle * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayWaitForRead(NDArrayHandle handle); +MXNET_DLL int MXNDArrayWaitToRead(NDArrayHandle handle); /*! * \brief Wait until all the pending read/write with respect NDArray are finished. * Always call this before write data into NDArray synchronizely. * \param handle the NDArray handle * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayWaitForWrite(NDArrayHandle handle); +MXNET_DLL int MXNDArrayWaitToWrite(NDArrayHandle handle); /*! * \brief wait until all delayed operations in * the system is completed From d48074a665090e41c03b4eb03a0fc8530cb76cfc Mon Sep 17 00:00:00 2001 From: wkcn Date: Tue, 7 Aug 2018 20:44:56 +0800 Subject: [PATCH 13/26] update code indent in tensor_blob.h ans c_api.cc, remove unused type in cython/base.pyi --- include/mxnet/tensor_blob.h | 54 ++++++++++++++++++------------------ python/mxnet/cython/base.pyi | 1 - src/c_api/c_api.cc | 2 +- 3 files changed, 28 insertions(+), 29 deletions(-) diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 25ccabc4ba78..6ad8fb4acfd0 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -346,35 +346,35 @@ class TBlob { } static int DLDataTypeTransform(DLDataType dldata_type) { if (dldata_type.lanes != 1) { - LOG(FATAL) << "Unsupported DLDataType whose lanes != 1"; + LOG(FATAL) << "Unsupported DLDataType whose lanes != 1"; } switch (dldata_type.code) { - case kDLFloat: - switch (dldata_type.bits) { - case 16: - return mshadow::kFloat16; - case 32: - return mshadow::kFloat32; - case 64: - return mshadow::kFloat64; - } - break; - case kDLUInt: - switch (dldata_type.bits) { - case 8: - return mshadow::kUint8; - } - break; - case kDLInt: - switch (dldata_type.bits) { - case 8: - return mshadow::kInt8; - case 32: - return mshadow::kInt32; - case 64: - return mshadow::kInt64; - } - break; + case kDLFloat: + switch (dldata_type.bits) { + case 16: + return mshadow::kFloat16; + case 32: + return mshadow::kFloat32; + case 64: + return mshadow::kFloat64; + } + break; + case kDLUInt: + switch (dldata_type.bits) { + case 8: + return mshadow::kUint8; + } + break; + case kDLInt: + switch (dldata_type.bits) { + case 8: + return mshadow::kInt8; + case 32: + return mshadow::kInt32; + case 64: + return mshadow::kInt64; + } + break; } LOG(FATAL) << "Unknown DLDataType{" << dldata_type.code << ", " << dldata_type.bits diff --git a/python/mxnet/cython/base.pyi b/python/mxnet/cython/base.pyi index 8e2332e67b19..d73e1a7d0194 100644 --- a/python/mxnet/cython/base.pyi +++ b/python/mxnet/cython/base.pyi @@ -8,7 +8,6 @@ ctypedef void* SymbolHandle ctypedef void* NDArrayHandle ctypedef void* OpHandle ctypedef void* CachedOpHandle -ctypedef void* DLPackHandle ctypedef unsigned nn_uint cdef py_str(const char* x): diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index fca48d96039f..b4c2edca8193 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -517,7 +517,7 @@ int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, API_BEGIN(); NDArray *pdata = new NDArray(); *pdata = NDArray::FromDLPack( - static_cast(dlpack)); + static_cast(dlpack)); *out_handle = pdata; API_END(); } From 58c5d87543458fca9c98f0efa6b196d4412885ed Mon Sep 17 00:00:00 2001 From: wkcn Date: Thu, 9 Aug 2018 15:13:14 +0800 Subject: [PATCH 14/26] use MXNDArrayToDLPack in c_api and add compactness check in TBlob --- include/mxnet/c_api.h | 18 ++++-------------- include/mxnet/tensor_blob.h | 23 +++++++++++++++++++++++ python/mxnet/ndarray/ndarray.py | 6 ++++-- src/c_api/c_api.cc | 12 +----------- src/ndarray/ndarray.cc | 22 ---------------------- 5 files changed, 32 insertions(+), 49 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index b8b2feb2b3a0..5ea367f68814 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -743,26 +743,16 @@ MXNET_DLL int MXNDArrayGetData(NDArrayHandle handle, void **out_pdata); /*! * \brief Create a reference view of NDArray that -* represents as DLManagedTensor until -* all the pending writes with respect NDArray are finished. +* represents as DLManagedTensor +* Notice: MXNet uses asynchronous execution. Please call MXNDArrayWaitToRead or +* MXNDArrayWaitToWrite before calling MXNDArrayToDLPack. * \param handle the handle to the ndarray * \param out_dlpack pointer holder to get pointer of DLManagedTensor * \return 0 when success, -1 when failure happens */ -MXNET_DLL int MXNDArrayToDLPackForRead(NDArrayHandle handle, +MXNET_DLL int MXNDArrayToDLPack(NDArrayHandle handle, DLManagedTensorHandle *out_dlpack); -/*! -* \brief Create a reference view of NDArray that -* represents as DLManagedTensor until -* all the pending reads/writes with respect NDArray are finished. -* \param handle the handle to the ndarray -* \param out_dlpack pointer holder to get pointer of DLManagedTensor -* \return 0 when success, -1 when failure happens -*/ -MXNET_DLL int MXNDArrayToDLPackForWrite(NDArrayHandle handle, - DLManagedTensorHandle *out_dlpack); - /*! * \brief Create a NDArray backed by a dlpack tensor. * diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index 6ad8fb4acfd0..f66f329b052c 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -111,6 +111,29 @@ class TBlob { explicit TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { + // compactness check for DLTensor + if (dltensor.strides != nullptr) { + // check strides + const int &ndim = dltensor.ndim; + const int64_t *shape = dltensor.shape; + const int64_t *strides = dltensor.strides; + if (ndim >= 1) { + bool err = false; + if (strides[ndim - 1] != 1) { + err = true; + } else { + for (int i = ndim - 2; i >= 0; --i) { + if (strides[i] != shape[i + 1] * strides[i + 1]) { + err = true; + break; + } + } + } + if (err) { + LOG(FATAL) << "Unsupported DLPack because MXNet only support compact tensor now"; + } + } + } } /*! * \brief constructor from tensor diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 1117022219ef..f0a382f326fc 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3928,8 +3928,9 @@ def to_dlpack_for_read(data): [1. 1. 1.]] """ + data.wait_to_read() dlpack = DLPackHandle() - check_call(_LIB.MXNDArrayToDLPackForRead(data.handle, ctypes.byref(dlpack))) + check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) def to_dlpack_for_write(data): @@ -3959,8 +3960,9 @@ def to_dlpack_for_write(data): [2. 2. 2.]] """ + check_call(_LIB.MXNDArrayWaitToWrite(data.handle)) dlpack = DLPackHandle() - check_call(_LIB.MXNDArrayToDLPackForWrite(data.handle, ctypes.byref(dlpack))) + check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) def from_dlpack(dlpack): diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index b4c2edca8193..fc6e53420365 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -494,19 +494,9 @@ int MXNDArrayGetData(NDArrayHandle handle, API_END(); } -int MXNDArrayToDLPackForRead(NDArrayHandle handle, +int MXNDArrayToDLPack(NDArrayHandle handle, DLManagedTensorHandle *out_dlpack) { API_BEGIN(); - MXNDArrayWaitToRead(handle); - NDArray *arr = static_cast(handle); - *out_dlpack = arr->ToDLPack(); - API_END(); -} - -int MXNDArrayToDLPackForWrite(NDArrayHandle handle, - DLManagedTensorHandle *out_dlpack) { - API_BEGIN(); - MXNDArrayWaitToWrite(handle); NDArray *arr = static_cast(handle); *out_dlpack = arr->ToDLPack(); API_END(); diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index 83135eabd9bd..d8863ccf8b5c 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -332,28 +332,6 @@ DLManagedTensor* NDArray::ToDLPack() const { NDArray NDArray::FromDLPack(DLManagedTensor* tensor) { const DLTensor &dl_tensor = tensor->dl_tensor; - if (dl_tensor.strides != nullptr) { - // check strides - const int &ndim = dl_tensor.ndim; - const int64_t *shape = dl_tensor.shape; - const int64_t *strides = dl_tensor.strides; - if (ndim >= 1) { - bool err = false; - if (strides[ndim - 1] != 1) { - err = true; - } else { - for (int i = ndim - 2; i >= 0; --i) { - if (strides[i] != shape[i + 1] * strides[i + 1]) { - err = true; - break; - } - } - } - if (err) { - LOG(FATAL) << "Unsupported DLPack because MXNet only support compact tensor now"; - } - } - } return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id); } From ef8ffcd99cd62a0c974ccf8e574eb7e36364a0fc Mon Sep 17 00:00:00 2001 From: wkcn Date: Sat, 11 Aug 2018 14:26:41 +0800 Subject: [PATCH 15/26] use python function as destructor of DLPack --- include/mxnet/c_api.h | 7 ------- python/mxnet/base.py | 11 +---------- python/mxnet/ndarray/ndarray.py | 26 ++++++++++++++++++-------- src/c_api/c_api.cc | 24 ------------------------ 4 files changed, 19 insertions(+), 49 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index 380086d1f9d0..d22a63ee5705 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -784,13 +784,6 @@ MXNET_DLL int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, */ MXNET_DLL int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack); -/*! - * \brief Delete a dlpack tensor - * \param dlpack_capsule the pointer of a PyCapsule storing DLManagedTensor - * \return 0 when success, -1 when failure happens - */ -MXNET_DLL void MXNDArrayCallDLPackCapsuleDeleter(PyObjectHandle dlpack_capsule); - /*! * \brief get the type of the data in NDArray * \param handle the handle to the narray diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 3505f4a3c767..6f5cd1844fff 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -748,14 +748,5 @@ def checked_call(f, *args): assert error_t == 0, "Failing cuda call %s returns %s." % (f.__name__, error_t) ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object -ctypes.pythonapi.PyCapsule_New.argtypes = [ctypes.c_void_p, ctypes.c_char_p, - ctypes.c_void_p] - ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p -ctypes.pythonapi.PyCapsule_GetPointer.argtypes = [ctypes.py_object, ctypes.c_char_p] - -ctypes.pythonapi.PyCapsule_SetName.restype = ctypes.c_int -ctypes.pythonapi.PyCapsule_SetName.argtypes = [ctypes.py_object, ctypes.c_char_p] - -_LIB.MXNDArrayCallDLPackCapsuleDeleter.restype = None -_LIB.MXNDArrayCallDLPackCapsuleDeleter.argtypes = [ctypes.c_void_p] +_LIB.MXNDArrayCallDLPackDeleter.argtypes = [ctypes.c_void_p] diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index f0a382f326fc..c1b059999da3 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -34,7 +34,7 @@ from functools import reduce # pylint: disable=redefined-builtin import numpy as np from ..base import _LIB, numeric_types, integer_types -from ..base import c_array, c_array_buf, c_handle_array, mx_real_t +from ..base import c_str, c_array, c_array_buf, c_handle_array, mx_real_t from ..base import mx_uint, NDArrayHandle, check_call, DLPackHandle from ..base import ctypes2buffer from ..context import Context, current_context @@ -3899,8 +3899,16 @@ def histogram(a, bins=10, range=None): raise ValueError("bins argument should be either an integer or an NDArray") # pylint: enable= no-member, protected-access, redefined-builtin -pycapsule_dlpack_deleter = ctypes.CFUNCTYPE(None, ctypes.c_void_p)( - _LIB.MXNDArrayCallDLPackCapsuleDeleter) +PyCapsuleDestructor = ctypes.CFUNCTYPE(None, ctypes.c_void_p) +_c_str_dltensor = c_str('dltensor') + +def _dlpack_deleter(pycapsule): + pycapsule = ctypes.c_void_p(pycapsule) + if ctypes.pythonapi.PyCapsule_IsValid(pycapsule, _c_str_dltensor): + ptr = ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor) + check_call(_LIB.MXNDArrayCallDLPackDeleter(ptr)) + +_c_dlpack_deleter = PyCapsuleDestructor(_dlpack_deleter) def to_dlpack_for_read(data): """Returns a reference view of NDArray that represents as DLManagedTensor until @@ -3931,7 +3939,7 @@ def to_dlpack_for_read(data): data.wait_to_read() dlpack = DLPackHandle() check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) - return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) + return ctypes.pythonapi.PyCapsule_New(dlpack, _c_str_dltensor, _c_dlpack_deleter) def to_dlpack_for_write(data): """Returns a reference view of NDArray that represents as DLManagedTensor until @@ -3963,7 +3971,7 @@ def to_dlpack_for_write(data): check_call(_LIB.MXNDArrayWaitToWrite(data.handle)) dlpack = DLPackHandle() check_call(_LIB.MXNDArrayToDLPack(data.handle, ctypes.byref(dlpack))) - return ctypes.pythonapi.PyCapsule_New(dlpack, b'dltensor', pycapsule_dlpack_deleter) + return ctypes.pythonapi.PyCapsule_New(dlpack, _c_str_dltensor, _c_dlpack_deleter) def from_dlpack(dlpack): """Returns a NDArray backed by a dlpack tensor. @@ -4003,12 +4011,14 @@ def from_dlpack(dlpack): """ handle = NDArrayHandle() - dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, b'dltensor')) + dlpack = ctypes.py_object(dlpack) + dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, _c_str_dltensor)) assert dlpack_handle.value != 0, ValueError( 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) + # delete the deleter of the old dlpack + ctypes.pythonapi.PyCapsule_SetDestructor(dlpack, None) # copy dlpack dlpack_copy = ctypes.pythonapi.PyCapsule_New( - dlpack_handle, b'dltensor', pycapsule_dlpack_deleter) - ctypes.pythonapi.PyCapsule_SetName(dlpack, b'used_dltensor') + dlpack_handle, _c_str_dltensor, _c_dlpack_deleter) return NDArray(handle=handle, dlpack=dlpack_copy) diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index 8ad797112c55..9a810c1cbda5 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -527,30 +527,6 @@ int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack) { API_END(); } - -typedef struct { - char py_object[16]; - void *pointer; - const char *name; - void *context; - void (*destructor)(void *); -} FakePyCapsule; - -void* PyCapsule_GetPointer(PyObjectHandle o, const char *name) { - FakePyCapsule *p_capsule = static_cast(o); - if (p_capsule == nullptr || p_capsule->pointer == nullptr) { - LOG(FATAL) << "PyCapsule_GetPointer called with invalid PyCapsule"; - } - if (strncmp(p_capsule->name, name, strlen(name)) != 0) { - return nullptr; - } - return p_capsule->pointer; -} - -void MXNDArrayCallDLPackCapsuleDeleter(PyObjectHandle dlpack_capsule) { - MXNDArrayCallDLPackDeleter(PyCapsule_GetPointer(dlpack_capsule, "dltensor")); -} - int MXNDArrayGetDType(NDArrayHandle handle, int *out_dtype) { API_BEGIN(); From afa1898c5055f2cc6d34db7b0549981cf9e14121 Mon Sep 17 00:00:00 2001 From: wkcn Date: Sat, 11 Aug 2018 14:35:22 +0800 Subject: [PATCH 16/26] remove unused PyObjectHandle and update DLDataTypeTransform --- include/mxnet/c_api.h | 2 -- include/mxnet/tensor_blob.h | 21 +++++++-------------- 2 files changed, 7 insertions(+), 16 deletions(-) diff --git a/include/mxnet/c_api.h b/include/mxnet/c_api.h index d22a63ee5705..a01cc6a77940 100644 --- a/include/mxnet/c_api.h +++ b/include/mxnet/c_api.h @@ -95,8 +95,6 @@ typedef void *CudaKernelHandle; typedef void *ProfileHandle; /*! \brief handle to DLManagedTensor*/ typedef void *DLManagedTensorHandle; -/*! \brief handle to PyObject*/ -typedef void *PyObjectHandle; typedef void (*ExecutorMonitorCallback)(const char*, NDArrayHandle, diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index f66f329b052c..a5f0b1822b80 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -374,28 +374,21 @@ class TBlob { switch (dldata_type.code) { case kDLFloat: switch (dldata_type.bits) { - case 16: - return mshadow::kFloat16; - case 32: - return mshadow::kFloat32; - case 64: - return mshadow::kFloat64; + case 16: return mshadow::kFloat16; + case 32: return mshadow::kFloat32; + case 64: return mshadow::kFloat64; } break; case kDLUInt: switch (dldata_type.bits) { - case 8: - return mshadow::kUint8; + case 8: return mshadow::kUint8; } break; case kDLInt: switch (dldata_type.bits) { - case 8: - return mshadow::kInt8; - case 32: - return mshadow::kInt32; - case 64: - return mshadow::kInt64; + case 8: return mshadow::kInt8; + case 32: return mshadow::kInt32; + case 64: return mshadow::kInt64; } break; } From a4d3aee762a9cdee347fc57d90422f383fc899a6 Mon Sep 17 00:00:00 2001 From: wkcn Date: Sat, 11 Aug 2018 19:39:38 +0800 Subject: [PATCH 17/26] update from_dlpack code --- python/mxnet/base.py | 1 - python/mxnet/ndarray/ndarray.py | 3 ++- src/c_api/c_api.cc | 8 +++----- 3 files changed, 5 insertions(+), 7 deletions(-) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index 6f5cd1844fff..f1e77064ea95 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -749,4 +749,3 @@ def checked_call(f, *args): ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p -_LIB.MXNDArrayCallDLPackDeleter.argtypes = [ctypes.c_void_p] diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index c1b059999da3..be36b86d2a98 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3905,7 +3905,8 @@ def histogram(a, bins=10, range=None): def _dlpack_deleter(pycapsule): pycapsule = ctypes.c_void_p(pycapsule) if ctypes.pythonapi.PyCapsule_IsValid(pycapsule, _c_str_dltensor): - ptr = ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor) + ptr = ctypes.c_void_p( + ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor)) check_call(_LIB.MXNDArrayCallDLPackDeleter(ptr)) _c_dlpack_deleter = PyCapsuleDestructor(_dlpack_deleter) diff --git a/src/c_api/c_api.cc b/src/c_api/c_api.cc index 9a810c1cbda5..56e318097a3c 100644 --- a/src/c_api/c_api.cc +++ b/src/c_api/c_api.cc @@ -511,16 +511,14 @@ int MXNDArrayToDLPack(NDArrayHandle handle, int MXNDArrayFromDLPack(DLManagedTensorHandle dlpack, NDArrayHandle *out_handle) { API_BEGIN(); - NDArray *pdata = new NDArray(); - *pdata = NDArray::FromDLPack( - static_cast(dlpack)); - *out_handle = pdata; + *out_handle = new NDArray(NDArray::FromDLPack( + static_cast(dlpack))); API_END(); } int MXNDArrayCallDLPackDeleter(DLManagedTensorHandle dlpack) { API_BEGIN(); - if (dlpack) { + if (dlpack != nullptr) { DLManagedTensor *p_dlpack = static_cast(dlpack); p_dlpack->deleter(p_dlpack); } From 493deb05c1f4f3762f147d6ded85b416860a6093 Mon Sep 17 00:00:00 2001 From: wkcn Date: Sat, 11 Aug 2018 19:47:43 +0800 Subject: [PATCH 18/26] fix pylint in ndarray.py --- python/mxnet/ndarray/ndarray.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index be36b86d2a98..fbfe83aa338c 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3906,7 +3906,7 @@ def _dlpack_deleter(pycapsule): pycapsule = ctypes.c_void_p(pycapsule) if ctypes.pythonapi.PyCapsule_IsValid(pycapsule, _c_str_dltensor): ptr = ctypes.c_void_p( - ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor)) + ctypes.pythonapi.PyCapsule_GetPointer(pycapsule, _c_str_dltensor)) check_call(_LIB.MXNDArrayCallDLPackDeleter(ptr)) _c_dlpack_deleter = PyCapsuleDestructor(_dlpack_deleter) From adf36efa6c89b4da7283441e0e2a6158a98ee24c Mon Sep 17 00:00:00 2001 From: wkcn Date: Sun, 12 Aug 2018 08:49:58 +0800 Subject: [PATCH 19/26] rename dlpack after using it --- python/mxnet/ndarray/ndarray.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index fbfe83aa338c..4c8749ded50e 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -3901,6 +3901,7 @@ def histogram(a, bins=10, range=None): PyCapsuleDestructor = ctypes.CFUNCTYPE(None, ctypes.c_void_p) _c_str_dltensor = c_str('dltensor') +_c_str_used_dltensor = c_str('used_dltensor') def _dlpack_deleter(pycapsule): pycapsule = ctypes.c_void_p(pycapsule) @@ -4013,13 +4014,15 @@ def from_dlpack(dlpack): """ handle = NDArrayHandle() dlpack = ctypes.py_object(dlpack) - dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, _c_str_dltensor)) - assert dlpack_handle.value != 0, ValueError( + assert ctypes.pythonapi.PyCapsule_IsValid(dlpack, _c_str_dltensor), ValueError( 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') + dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, _c_str_dltensor)) check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) + # Rename PyCapsule (DLPack) + ctypes.pythonapi.PyCapsule_SetName(dlpack, _c_str_used_dltensor) # delete the deleter of the old dlpack ctypes.pythonapi.PyCapsule_SetDestructor(dlpack, None) # copy dlpack dlpack_copy = ctypes.pythonapi.PyCapsule_New( - dlpack_handle, _c_str_dltensor, _c_dlpack_deleter) + dlpack_handle, _c_str_used_dltensor, _c_dlpack_deleter) return NDArray(handle=handle, dlpack=dlpack_copy) From dec838dea794cee8c74739bf563ad52c708fe0f9 Mon Sep 17 00:00:00 2001 From: wkcn Date: Wed, 22 Aug 2018 14:32:54 +0800 Subject: [PATCH 20/26] DLManagedTensor manages itself --- include/mxnet/ndarray.h | 2 +- python/mxnet/_ctypes/ndarray.py | 7 ++----- python/mxnet/cython/ndarray.pyx | 4 +--- python/mxnet/ndarray/ndarray.py | 12 ++---------- src/ndarray/ndarray.cc | 2 +- tests/python/unittest/test_ndarray.py | 3 ++- 6 files changed, 9 insertions(+), 21 deletions(-) diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index 446abab9e5ab..783bc65fa416 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -537,7 +537,7 @@ class NDArray { * * \return The created NDArray view. */ - static NDArray FromDLPack(DLManagedTensor* tensor); + static NDArray FromDLPack(const DLManagedTensor* tensor); /*! * \brief Update ndarray chunk storage handles using existing ndarray storage handles diff --git a/python/mxnet/_ctypes/ndarray.py b/python/mxnet/_ctypes/ndarray.py index 7af00bd4a3ef..f324545a2352 100644 --- a/python/mxnet/_ctypes/ndarray.py +++ b/python/mxnet/_ctypes/ndarray.py @@ -31,24 +31,21 @@ class NDArrayBase(object): """Base data structure for ndarray""" - __slots__ = ["handle", "writable", "dlpack"] + __slots__ = ["handle", "writable"] # pylint: disable= no-member - def __init__(self, handle, writable=True, dlpack=None): + def __init__(self, handle, writable=True): """initialize a new NDArray Parameters ---------- handle : NDArrayHandle NDArray handle of C API - dlpack : PyCapsule (DLPack) - DLPack Object """ if handle is not None: assert isinstance(handle, NDArrayHandle) self.handle = handle self.writable = writable - self.dlpack = dlpack def __del__(self): check_call(_LIB.MXNDArrayFree(self.handle)) diff --git a/python/mxnet/cython/ndarray.pyx b/python/mxnet/cython/ndarray.pyx index 346b66d60681..319dc492dbb8 100644 --- a/python/mxnet/cython/ndarray.pyx +++ b/python/mxnet/cython/ndarray.pyx @@ -30,7 +30,6 @@ cdef class NDArrayBase: # handle for symbolic operator. cdef NDArrayHandle chandle cdef int cwritable - cdef object dlpack cdef _set_handle(self, handle): cdef unsigned long long ptr @@ -53,10 +52,9 @@ cdef class NDArrayBase: def __get__(self): return bool(self.cwritable) - def __init__(self, handle, writable=True, dlpack=None): + def __init__(self, handle, writable=True): self._set_handle(handle) self.cwritable = writable - self.dlpack = dlpack def __dealloc__(self): CALL(MXNDArrayFree(self.chandle)) diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index 4c8749ded50e..ebbefcf4b0f0 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -360,7 +360,7 @@ def __len__(self): def __getstate__(self): handle = self.handle - this = {'handle' : None, 'dlpack' : self.dlpack} + this = {'handle' : None} if handle is not None: length = ctypes.c_size_t() cptr = ctypes.POINTER(ctypes.c_char)() @@ -382,7 +382,6 @@ def __setstate__(self, state): self.handle = handle else: self.handle = None - self.dlpack = state['dlpack'] # pylint: disable=line-too-long def __setitem__(self, key, value): @@ -4018,11 +4017,4 @@ def from_dlpack(dlpack): 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, _c_str_dltensor)) check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) - # Rename PyCapsule (DLPack) - ctypes.pythonapi.PyCapsule_SetName(dlpack, _c_str_used_dltensor) - # delete the deleter of the old dlpack - ctypes.pythonapi.PyCapsule_SetDestructor(dlpack, None) - # copy dlpack - dlpack_copy = ctypes.pythonapi.PyCapsule_New( - dlpack_handle, _c_str_used_dltensor, _c_dlpack_deleter) - return NDArray(handle=handle, dlpack=dlpack_copy) + return NDArray(handle=handle) diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index d8863ccf8b5c..d3972ee1e503 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -330,7 +330,7 @@ DLManagedTensor* NDArray::ToDLPack() const { return &(dlmanager->tensor); } -NDArray NDArray::FromDLPack(DLManagedTensor* tensor) { +NDArray NDArray::FromDLPack(const DLManagedTensor* tensor) { const DLTensor &dl_tensor = tensor->dl_tensor; return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id); } diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index dc2e2c5ab380..0c493a4186e9 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -1361,7 +1361,7 @@ def test_dlpack(): pack4 = mx.nd.to_dlpack_for_write(a_copy) e = mx.nd.from_dlpack(pack4) - del a, pack, pack2, pack3, pack4 + del a b_np = b.asnumpy() c_np = c.asnumpy() @@ -1371,6 +1371,7 @@ def test_dlpack(): mx.test_utils.assert_almost_equal(a_np, c_np) mx.test_utils.assert_almost_equal(a_np, d_np) mx.test_utils.assert_almost_equal(a_np, e_np) + del pack, pack2, pack3, pack4 if __name__ == '__main__': import nose From 850c3dcb0e43cd4c9ddf1a697e53557d47477946 Mon Sep 17 00:00:00 2001 From: wkcn Date: Wed, 22 Aug 2018 16:28:20 +0800 Subject: [PATCH 21/26] add deleter for TBlob and Chunk in NDArray --- include/mxnet/ndarray.h | 14 +++++++++----- include/mxnet/tensor_blob.h | 22 +++++++++++++++------- python/mxnet/ndarray/ndarray.py | 4 ++++ src/ndarray/ndarray.cc | 11 +++++++++-- tests/python/unittest/test_ndarray.py | 3 +-- 5 files changed, 38 insertions(+), 16 deletions(-) diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index 783bc65fa416..f0229abc7fce 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -785,13 +785,15 @@ class NDArray { // The shape of aux data. The default value for the shape depends on the type of storage. // If aux_shapes[i].Size() is zero, aux data i is empty. std::vector aux_shapes; + // The pointer to the deleter function + std::function deleter_; /*! \brief default cosntructor */ - Chunk() : static_data(true), delay_alloc(false) {} + Chunk() : static_data(true), delay_alloc(false), deleter_(nullptr) {} /*! \brief construct a new chunk */ Chunk(TShape shape, Context ctx_, bool delay_alloc_, int dtype) - : static_data(false), delay_alloc(true), ctx(ctx_) { + : static_data(false), delay_alloc(true), ctx(ctx_), deleter_(nullptr) { auto size = shape.Size(); storage_shape = shape; var = Engine::Get()->NewVariable(); @@ -815,10 +817,11 @@ class NDArray { shandle.dptr = data.dptr_; shandle.size = data.shape_.Size() * mshadow::mshadow_sizeof(data.type_flag_); storage_shape = data.shape_; + deleter_ = data.deleter_; } Chunk(int shared_pid, int shared_id, const TShape& shape, int dtype) - : static_data(false), delay_alloc(false) { + : static_data(false), delay_alloc(false), deleter_(nullptr) { var = Engine::Get()->NewVariable(); ctx = Context::CPUShared(0); shandle.size = shape.Size() * mshadow::mshadow_sizeof(dtype); @@ -834,7 +837,8 @@ class NDArray { const std::vector &aux_shapes_) : static_data(false), delay_alloc(delay_alloc_), storage_type(storage_type_), aux_types(aux_types_), ctx(ctx_), storage_shape(storage_shape_), - aux_shapes(aux_shapes_) { + aux_shapes(aux_shapes_), + deleter_(nullptr) { shandle.ctx = ctx; var = Engine::Get()->NewVariable(); // aux_handles always reflect the correct number of aux data @@ -851,7 +855,7 @@ class NDArray { Chunk(const NDArrayStorageType storage_type_, const TBlob &data, const std::vector &aux_data, int dev_id) - : static_data(true), delay_alloc(false), storage_type(storage_type_) { + : static_data(true), delay_alloc(false), storage_type(storage_type_), deleter_(nullptr) { using namespace mshadow; CHECK_NE(storage_type, kDefaultStorage); // init var diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index a5f0b1822b80..a666bc8db62c 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -72,11 +72,14 @@ class TBlob { TShape shape_; /*! \brief type flag of the tensor blob */ int type_flag_; + // The pointer to the deleter function + std::function deleter_; /*! \brief default constructor, default copy assign will work */ TBlob(void) : dptr_(NULL), - type_flag_(mshadow::DataType::kFlag) { + type_flag_(mshadow::DataType::kFlag), + deleter_(nullptr) { SetDLTensor(cpu::kDevMask, 0); } /*! @@ -89,7 +92,8 @@ class TBlob { template TBlob(DType *dptr, const TShape &shape, int dev_mask, int dev_id = -1) : dptr_(dptr), shape_(shape), - type_flag_(mshadow::DataType::kFlag) { + type_flag_(mshadow::DataType::kFlag), + deleter_(nullptr) { SetDLTensor(dev_mask, dev_id); } /*! @@ -101,16 +105,20 @@ class TBlob { * \param dev_id the device id */ TBlob(void *dptr, const TShape &shape, int dev_mask, int type_flag, int dev_id = -1) - : dptr_(dptr), shape_(shape), type_flag_(type_flag) { + : dptr_(dptr), shape_(shape), type_flag_(type_flag), + deleter_(nullptr) { SetDLTensor(dev_mask, dev_id); } /*! * \brief constructor that construct TBlob from DLTensor - * \param DLTensor Object + * \param DLTensor Object, the deleter function */ - explicit TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), - shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), - type_flag_(DLDataTypeTransform(dltensor.dtype)), dltensor_(dltensor) { + explicit TBlob(const DLTensor &dltensor, const std::function& deleter = nullptr) + : dptr_(dltensor.data), + shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), + type_flag_(DLDataTypeTransform(dltensor.dtype)), + deleter_(deleter), + dltensor_(dltensor) { // compactness check for DLTensor if (dltensor.strides != nullptr) { // check strides diff --git a/python/mxnet/ndarray/ndarray.py b/python/mxnet/ndarray/ndarray.py index ebbefcf4b0f0..8e0632b5c46a 100644 --- a/python/mxnet/ndarray/ndarray.py +++ b/python/mxnet/ndarray/ndarray.py @@ -4017,4 +4017,8 @@ def from_dlpack(dlpack): 'Invalid DLPack Tensor. DLTensor capsules can be consumed only once.') dlpack_handle = ctypes.c_void_p(ctypes.pythonapi.PyCapsule_GetPointer(dlpack, _c_str_dltensor)) check_call(_LIB.MXNDArrayFromDLPack(dlpack_handle, ctypes.byref(handle))) + # Rename PyCapsule (DLPack) + ctypes.pythonapi.PyCapsule_SetName(dlpack, _c_str_used_dltensor) + # delete the deleter of the old dlpack + ctypes.pythonapi.PyCapsule_SetDestructor(dlpack, None) return NDArray(handle=handle) diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index d3972ee1e503..7f60597d4884 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -109,11 +109,12 @@ NDArray::Chunk::~Chunk() { ChunkMem mem; mem.h = this->shandle; mem.aux_h = this->aux_handles; + std::function deleter(this->deleter_); #if MXNET_USE_MKLDNN == 1 // We want to delete mkldnn memory after deleting the variable. mem.mem = this->mkl_mem_; #endif - Engine::Get()->DeleteVariable([mem, skip_free](RunContext s) { + Engine::Get()->DeleteVariable([mem, skip_free, deleter](RunContext s) { if (skip_free == false) { #if MXNET_USE_MKLDNN == 1 if (mem.mem) { @@ -125,6 +126,7 @@ NDArray::Chunk::~Chunk() { for (size_t i = 0; i < mem.aux_h.size(); i++) { if (mem.aux_h[i].size > 0) Storage::Get()->Free(mem.aux_h[i]); } + if (deleter != nullptr) deleter(); } }, shandle.ctx, var); } @@ -332,7 +334,12 @@ DLManagedTensor* NDArray::ToDLPack() const { NDArray NDArray::FromDLPack(const DLManagedTensor* tensor) { const DLTensor &dl_tensor = tensor->dl_tensor; - return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id); + auto deleter = [tensor](){ + if (tensor->deleter != nullptr) { + tensor->deleter(const_cast(tensor)); + } + }; + return NDArray(TBlob(dl_tensor, deleter), dl_tensor.ctx.device_id); } bool NDArray::fresh_out_grad() const { diff --git a/tests/python/unittest/test_ndarray.py b/tests/python/unittest/test_ndarray.py index 0c493a4186e9..dc2e2c5ab380 100644 --- a/tests/python/unittest/test_ndarray.py +++ b/tests/python/unittest/test_ndarray.py @@ -1361,7 +1361,7 @@ def test_dlpack(): pack4 = mx.nd.to_dlpack_for_write(a_copy) e = mx.nd.from_dlpack(pack4) - del a + del a, pack, pack2, pack3, pack4 b_np = b.asnumpy() c_np = c.asnumpy() @@ -1371,7 +1371,6 @@ def test_dlpack(): mx.test_utils.assert_almost_equal(a_np, c_np) mx.test_utils.assert_almost_equal(a_np, d_np) mx.test_utils.assert_almost_equal(a_np, e_np) - del pack, pack2, pack3, pack4 if __name__ == '__main__': import nose From fc99323b16a3be394b65e69544df57583b8f4fe1 Mon Sep 17 00:00:00 2001 From: wkcn Date: Wed, 22 Aug 2018 16:42:20 +0800 Subject: [PATCH 22/26] remove used code in python/mxnet/base.py --- python/mxnet/base.py | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/python/mxnet/base.py b/python/mxnet/base.py index f1e77064ea95..91d5058ee034 100644 --- a/python/mxnet/base.py +++ b/python/mxnet/base.py @@ -731,21 +731,5 @@ def write_all_str(module_file, module_all_list): write_all_str(module_internal_file, module_internal_all) module_internal_file.close() -def cint(init_val=0): - """create a C int with an optional initial value""" - return C.c_int(init_val) - -def int_addr(x): - """given a c_int, return it's address as an int ptr""" - x_addr = C.addressof(x) - int_p = C.POINTER(C.c_int) - x_int_addr = C.cast(x_addr, int_p) - return x_int_addr - -def checked_call(f, *args): - """call a cuda function and check for success""" - error_t = f(*args) - assert error_t == 0, "Failing cuda call %s returns %s." % (f.__name__, error_t) - ctypes.pythonapi.PyCapsule_New.restype = ctypes.py_object ctypes.pythonapi.PyCapsule_GetPointer.restype = ctypes.c_void_p From ffe60c6d551455cb436db023c4dacee07fff9c52 Mon Sep 17 00:00:00 2001 From: wkcn Date: Wed, 22 Aug 2018 21:36:03 +0800 Subject: [PATCH 23/26] retrigger CI From cbb17c3dfd205dffc3b7b44ddf056a96ef672c8a Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 10 Sep 2018 11:22:18 +0800 Subject: [PATCH 24/26] add deleter for shared_ptr --- include/mxnet/ndarray.h | 34 +++++++++++++++++++++++++--------- include/mxnet/tensor_blob.h | 16 +++++----------- src/ndarray/ndarray.cc | 2 +- 3 files changed, 31 insertions(+), 21 deletions(-) diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index f0229abc7fce..5ac1bd8592d9 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -116,6 +116,26 @@ class NDArray { dtype_(data.type_flag_), storage_type_(kDefaultStorage), entry_({nullptr, 0, 0}) { } + + /*! + * \brief constructing a static NDArray that shares data with TBlob which is with deleter + * Use with caution: allocate ONLY ONE NDArray for each TBlob, + * make sure the memory region is available through out the life of NDArray + * \param data the memory content of static data + * \param dev_id the device id this tensor sits at + * \param deleter the function pointer of custom deleter + */ + NDArray(const TBlob &data, int dev_id, const std::function& deleter) + : ptr_(std::shared_ptr(new Chunk(data, dev_id), + [](Chunk *p) { + deleter(); // call custom deleter + delete p; // delete Chunk object + }), + shape_(data.shape_), + dtype_(data.type_flag_), storage_type_(kDefaultStorage), + entry_({nullptr, 0, 0}) { + } + /*! \brief create ndarray from shared memory */ NDArray(int shared_pid, int shared_id, const TShape& shape, int dtype) : ptr_(std::make_shared(shared_pid, shared_id, shape, dtype)), shape_(shape), @@ -785,15 +805,13 @@ class NDArray { // The shape of aux data. The default value for the shape depends on the type of storage. // If aux_shapes[i].Size() is zero, aux data i is empty. std::vector aux_shapes; - // The pointer to the deleter function - std::function deleter_; /*! \brief default cosntructor */ - Chunk() : static_data(true), delay_alloc(false), deleter_(nullptr) {} + Chunk() : static_data(true), delay_alloc(false) {} /*! \brief construct a new chunk */ Chunk(TShape shape, Context ctx_, bool delay_alloc_, int dtype) - : static_data(false), delay_alloc(true), ctx(ctx_), deleter_(nullptr) { + : static_data(false), delay_alloc(true), ctx(ctx_) { auto size = shape.Size(); storage_shape = shape; var = Engine::Get()->NewVariable(); @@ -817,11 +835,10 @@ class NDArray { shandle.dptr = data.dptr_; shandle.size = data.shape_.Size() * mshadow::mshadow_sizeof(data.type_flag_); storage_shape = data.shape_; - deleter_ = data.deleter_; } Chunk(int shared_pid, int shared_id, const TShape& shape, int dtype) - : static_data(false), delay_alloc(false), deleter_(nullptr) { + : static_data(false), delay_alloc(false) { var = Engine::Get()->NewVariable(); ctx = Context::CPUShared(0); shandle.size = shape.Size() * mshadow::mshadow_sizeof(dtype); @@ -837,8 +854,7 @@ class NDArray { const std::vector &aux_shapes_) : static_data(false), delay_alloc(delay_alloc_), storage_type(storage_type_), aux_types(aux_types_), ctx(ctx_), storage_shape(storage_shape_), - aux_shapes(aux_shapes_), - deleter_(nullptr) { + aux_shapes(aux_shapes_) { shandle.ctx = ctx; var = Engine::Get()->NewVariable(); // aux_handles always reflect the correct number of aux data @@ -855,7 +871,7 @@ class NDArray { Chunk(const NDArrayStorageType storage_type_, const TBlob &data, const std::vector &aux_data, int dev_id) - : static_data(true), delay_alloc(false), storage_type(storage_type_), deleter_(nullptr) { + : static_data(true), delay_alloc(false), storage_type(storage_type_) { using namespace mshadow; CHECK_NE(storage_type, kDefaultStorage); // init var diff --git a/include/mxnet/tensor_blob.h b/include/mxnet/tensor_blob.h index a666bc8db62c..496e8c7cfced 100755 --- a/include/mxnet/tensor_blob.h +++ b/include/mxnet/tensor_blob.h @@ -72,14 +72,11 @@ class TBlob { TShape shape_; /*! \brief type flag of the tensor blob */ int type_flag_; - // The pointer to the deleter function - std::function deleter_; /*! \brief default constructor, default copy assign will work */ TBlob(void) : dptr_(NULL), - type_flag_(mshadow::DataType::kFlag), - deleter_(nullptr) { + type_flag_(mshadow::DataType::kFlag) { SetDLTensor(cpu::kDevMask, 0); } /*! @@ -92,8 +89,7 @@ class TBlob { template TBlob(DType *dptr, const TShape &shape, int dev_mask, int dev_id = -1) : dptr_(dptr), shape_(shape), - type_flag_(mshadow::DataType::kFlag), - deleter_(nullptr) { + type_flag_(mshadow::DataType::kFlag) { SetDLTensor(dev_mask, dev_id); } /*! @@ -105,19 +101,17 @@ class TBlob { * \param dev_id the device id */ TBlob(void *dptr, const TShape &shape, int dev_mask, int type_flag, int dev_id = -1) - : dptr_(dptr), shape_(shape), type_flag_(type_flag), - deleter_(nullptr) { + : dptr_(dptr), shape_(shape), type_flag_(type_flag) { SetDLTensor(dev_mask, dev_id); } /*! * \brief constructor that construct TBlob from DLTensor - * \param DLTensor Object, the deleter function + * \param DLTensor Object */ - explicit TBlob(const DLTensor &dltensor, const std::function& deleter = nullptr) + explicit TBlob(const DLTensor &dltensor) : dptr_(dltensor.data), shape_(TShape(dltensor.shape, dltensor.shape + dltensor.ndim)), type_flag_(DLDataTypeTransform(dltensor.dtype)), - deleter_(deleter), dltensor_(dltensor) { // compactness check for DLTensor if (dltensor.strides != nullptr) { diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index 7f60597d4884..b25b19f1f9e6 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -339,7 +339,7 @@ NDArray NDArray::FromDLPack(const DLManagedTensor* tensor) { tensor->deleter(const_cast(tensor)); } }; - return NDArray(TBlob(dl_tensor, deleter), dl_tensor.ctx.device_id); + return NDArray(TBlob(dl_tensor), dl_tensor.ctx.device_id, deleter); } bool NDArray::fresh_out_grad() const { From b1204bcbb1ef0e8d8963676fd0954de924fb8758 Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 10 Sep 2018 11:54:17 +0800 Subject: [PATCH 25/26] compilation okay --- include/mxnet/ndarray.h | 4 ++-- src/ndarray/ndarray.cc | 4 +--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index fb74fe1b8ef9..1c35882cf30a 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -126,8 +126,8 @@ class NDArray { * \param deleter the function pointer of custom deleter */ NDArray(const TBlob &data, int dev_id, const std::function& deleter) - : ptr_(std::shared_ptr(new Chunk(data, dev_id), - [](Chunk *p) { + : ptr_(new Chunk(data, dev_id), + [deleter](Chunk *p) { deleter(); // call custom deleter delete p; // delete Chunk object }), diff --git a/src/ndarray/ndarray.cc b/src/ndarray/ndarray.cc index b25b19f1f9e6..5bcb1c2bf485 100644 --- a/src/ndarray/ndarray.cc +++ b/src/ndarray/ndarray.cc @@ -109,12 +109,11 @@ NDArray::Chunk::~Chunk() { ChunkMem mem; mem.h = this->shandle; mem.aux_h = this->aux_handles; - std::function deleter(this->deleter_); #if MXNET_USE_MKLDNN == 1 // We want to delete mkldnn memory after deleting the variable. mem.mem = this->mkl_mem_; #endif - Engine::Get()->DeleteVariable([mem, skip_free, deleter](RunContext s) { + Engine::Get()->DeleteVariable([mem, skip_free](RunContext s) { if (skip_free == false) { #if MXNET_USE_MKLDNN == 1 if (mem.mem) { @@ -126,7 +125,6 @@ NDArray::Chunk::~Chunk() { for (size_t i = 0; i < mem.aux_h.size(); i++) { if (mem.aux_h[i].size > 0) Storage::Get()->Free(mem.aux_h[i]); } - if (deleter != nullptr) deleter(); } }, shandle.ctx, var); } From fe1387f4459ec1ea1f1e9e9b37a40f7d1d13eb1f Mon Sep 17 00:00:00 2001 From: wkcn Date: Mon, 10 Sep 2018 11:59:59 +0800 Subject: [PATCH 26/26] fix cpplint --- include/mxnet/ndarray.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/include/mxnet/ndarray.h b/include/mxnet/ndarray.h index 1c35882cf30a..afae5dcfcffe 100644 --- a/include/mxnet/ndarray.h +++ b/include/mxnet/ndarray.h @@ -128,8 +128,8 @@ class NDArray { NDArray(const TBlob &data, int dev_id, const std::function& deleter) : ptr_(new Chunk(data, dev_id), [deleter](Chunk *p) { - deleter(); // call custom deleter - delete p; // delete Chunk object + deleter(); // call custom deleter + delete p; // delete Chunk object }), shape_(data.shape_), dtype_(data.type_flag_), storage_type_(kDefaultStorage),