Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Add NHWC layout support to Pooling (cpu, gpu cuda, gpu cuDNN) #13749

Merged
merged 33 commits into from
Feb 16, 2019
Merged
Show file tree
Hide file tree
Changes from 29 commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
cf0dc09
Adds layout support: mx.sym.Pooling(..., layout='NHWC',...) with tests.
DickJC123 Feb 26, 2018
55ebfb7
Docs changes
ptrendx Nov 21, 2018
79e07fd
Trigger
marcoabreu Nov 23, 2018
4e0d0e9
Skip NHWC pooling tests on non-cuDNN platforms
ptrendx Nov 26, 2018
c7e06a9
Fix pylint NHWC pooling
ptrendx Dec 3, 2018
7fcbdac
Fixes from review
ptrendx Dec 5, 2018
1d19ae5
Add CuDNNPoolingOp::Supports() in place of Forward()/Backward() bool …
DickJC123 Dec 7, 2018
6d5d8cb
Add layout support to cpu implementation of Pooling, with tests.
DickJC123 Dec 12, 2018
934f84c
Fix cpplint.
DickJC123 Dec 12, 2018
73fbae9
Fix bug in cpu nhwc impl.
DickJC123 Dec 21, 2018
8654d50
Add MXNet CUDA pooling in NWC, NHWC and NDHWC. Turn on 3D cuDNN pool…
DickJC123 Dec 21, 2018
78d08a1
Add PoolingParam::GetLayout() for better default layout handling.
DickJC123 Dec 21, 2018
226854b
Fix cpplint.
DickJC123 Dec 21, 2018
b08747b
Throw exception for quantization pooling not NCHW.
DickJC123 Dec 21, 2018
ed31a60
Expand nhwc pooling test coverage.
DickJC123 Dec 22, 2018
0dd3fba
SupportMKLDNNPooling() to examine layout param.
DickJC123 Dec 22, 2018
0625542
Compare 'std' and 'v1' pooling versions only when op definitions permit.
DickJC123 Dec 29, 2018
19d9453
Add pooling test diagnostic output.
DickJC123 Jan 1, 2019
086ba9e
Fix syntax.
DickJC123 Jan 2, 2019
e913e0f
Fix pooling FInplaceOption so it can be shared by all implementations.
DickJC123 Jan 3, 2019
179a5bf
Add missing param definition.
DickJC123 Jan 3, 2019
21336d9
Fix #if logic.
DickJC123 Jan 3, 2019
403bd70
Temp switch to DickJC123/mshadow: shows effect of half round-to-neare…
DickJC123 Jan 15, 2019
d927767
Merge remote-tracking branch 'mxnet/master' into pr_nhwc_pooling2
DickJC123 Jan 15, 2019
95469db
Move back to dmlc/mshadow.git, now with float->half rounding.
DickJC123 Jan 28, 2019
098bc49
Avoid underflow of lp pooling calc for dtype=float16.
DickJC123 Feb 5, 2019
0627df4
Remove redundant pooling test.
DickJC123 Feb 5, 2019
16bfef0
Minor variable naming fixes.
DickJC123 Feb 6, 2019
7506c1b
Modify FInplaceOption handling per reviewer comments. Expand testing.
DickJC123 Feb 13, 2019
7ac90ee
Correct gluon Pooling layout param description.
DickJC123 Feb 15, 2019
a2a2103
Correct Symbol Pooling description.
DickJC123 Feb 15, 2019
a86be07
Use 'CHECK(x)' rather than 'if (x) LOG(FATAL)'.
DickJC123 Feb 15, 2019
f773a90
Empty commit to trigger CI.
DickJC123 Feb 15, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion 3rdparty/mshadow
Submodule mshadow updated 1 files
+94 −28 mshadow/half.h
99 changes: 63 additions & 36 deletions python/mxnet/gluon/nn/conv_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -673,7 +673,7 @@ def __init__(self, channels, kernel_size, strides=(1, 1, 1), padding=(0, 0, 0),
class _Pooling(HybridBlock):
"""Abstract class for different pooling layers."""
def __init__(self, pool_size, strides, padding, ceil_mode, global_pool,
pool_type, count_include_pad=None, **kwargs):
pool_type, layout, count_include_pad=None, **kwargs):
super(_Pooling, self).__init__(**kwargs)
if strides is None:
strides = pool_size
Expand All @@ -684,6 +684,7 @@ def __init__(self, pool_size, strides, padding, ceil_mode, global_pool,
self._kwargs = {
'kernel': pool_size, 'stride': strides, 'pad': padding,
'global_pool': global_pool, 'pool_type': pool_type,
'layout': layout,
'pooling_convention': 'full' if ceil_mode else 'valid'}
if count_include_pad is not None:
self._kwargs['count_include_pad'] = count_include_pad
Expand All @@ -695,7 +696,8 @@ def hybrid_forward(self, F, x):
return F.Pooling(x, name='fwd', **self._kwargs)

def __repr__(self):
s = '{name}(size={kernel}, stride={stride}, padding={pad}, ceil_mode={ceil_mode})'
s = '{name}(size={kernel}, stride={stride}, padding={pad}, ceil_mode={ceil_mode}'
s += ', global_pool={global_pool}, pool_type={pool_type}, layout={layout})'
return s.format(name=self.__class__.__name__,
ceil_mode=self._kwargs['pooling_convention'] == 'full',
**self._kwargs)
Expand All @@ -716,7 +718,8 @@ class MaxPool1D(_Pooling):
If padding is non-zero, then the input is implicitly
zero-padded on both sides for padding number of points.
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
Dimension ordering of data and weight. Only supports 'NCW' and 'NWC'
(only with cuDNN) layouts for now.
szha marked this conversation as resolved.
Show resolved Hide resolved
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. Pooling is applied on the W dimension.
ceil_mode : bool, default False
Expand All @@ -738,12 +741,13 @@ class MaxPool1D(_Pooling):
"""
def __init__(self, pool_size=2, strides=None, padding=0, layout='NCW',
ceil_mode=False, **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
assert layout in ('NCW', 'NWC'),\
"Only NCW and NWC layouts are valid for 1D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)
assert len(pool_size) == 1, "pool_size must be a number or a list of 1 ints"
super(MaxPool1D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'max', **kwargs)
pool_size, strides, padding, ceil_mode, False, 'max', layout, **kwargs)


class MaxPool2D(_Pooling):
Expand All @@ -761,7 +765,8 @@ class MaxPool2D(_Pooling):
If padding is non-zero, then the input is implicitly
zero-padded on both sides for padding number of points.
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCHW' and 'NHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively. padding is applied on 'H' and 'W' dimension.
ceil_mode : bool, default False
Expand All @@ -786,12 +791,13 @@ class MaxPool2D(_Pooling):
"""
def __init__(self, pool_size=(2, 2), strides=None, padding=0, layout='NCHW',
ceil_mode=False, **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
assert layout in ('NCHW', 'NHWC'),\
"Only NCHW and NHWC layouts are valid for 2D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)*2
assert len(pool_size) == 2, "pool_size must be a number or a list of 2 ints"
super(MaxPool2D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'max', **kwargs)
pool_size, strides, padding, ceil_mode, False, 'max', layout, **kwargs)


class MaxPool3D(_Pooling):
Expand All @@ -809,7 +815,8 @@ class MaxPool3D(_Pooling):
If padding is non-zero, then the input is implicitly
zero-padded on both sides for padding number of points.
layout : str, default 'NCDHW'
Dimension ordering of data and weight. Only supports 'NCDHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCDHW' and 'NDHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W', 'D' stands for batch, channel, height, width and
depth dimensions respectively. padding is applied on 'D', 'H' and 'W'
dimension.
Expand All @@ -836,12 +843,13 @@ class MaxPool3D(_Pooling):
"""
def __init__(self, pool_size=(2, 2, 2), strides=None, padding=0,
ceil_mode=False, layout='NCDHW', **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
assert layout in ('NCDHW', 'NDHWC'),\
"Only NCDHW and NDHWC layouts are valid for 3D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)*3
assert len(pool_size) == 3, "pool_size must be a number or a list of 3 ints"
super(MaxPool3D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'max', **kwargs)
pool_size, strides, padding, ceil_mode, False, 'max', layout, **kwargs)


class AvgPool1D(_Pooling):
Expand All @@ -858,7 +866,8 @@ class AvgPool1D(_Pooling):
If padding is non-zero, then the input is implicitly
zero-padded on both sides for padding number of points.
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
Dimension ordering of data and weight. Only supports 'NCW' or 'NWC'
(only with cuDNN) layouts for now.
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. padding is applied on 'W' dimension.
ceil_mode : bool, default False
Expand All @@ -882,12 +891,14 @@ class AvgPool1D(_Pooling):
"""
def __init__(self, pool_size=2, strides=None, padding=0, layout='NCW',
ceil_mode=False, count_include_pad=True, **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
assert layout in ('NCW', 'NWC'),\
"Only NCW and NWC layouts are valid for 1D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)
assert len(pool_size) == 1, "pool_size must be a number or a list of 1 ints"
super(AvgPool1D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'avg', count_include_pad, **kwargs)
pool_size, strides, padding, ceil_mode, False, 'avg', layout, count_include_pad,
**kwargs)


class AvgPool2D(_Pooling):
Expand All @@ -904,7 +915,8 @@ class AvgPool2D(_Pooling):
If padding is non-zero, then the input is implicitly
zero-padded on both sides for padding number of points.
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCHW' or 'NHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively. padding is applied on 'H' and 'W' dimension.
ceil_mode : bool, default False
Expand All @@ -931,12 +943,14 @@ class AvgPool2D(_Pooling):
"""
def __init__(self, pool_size=(2, 2), strides=None, padding=0,
ceil_mode=False, layout='NCHW', count_include_pad=True, **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
assert layout in ('NCHW', 'NHWC'),\
"Only NCHW and NHWC layouts are valid for 2D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)*2
assert len(pool_size) == 2, "pool_size must be a number or a list of 2 ints"
super(AvgPool2D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'avg', count_include_pad, **kwargs)
pool_size, strides, padding, ceil_mode, False, 'avg', layout, count_include_pad,
**kwargs)


class AvgPool3D(_Pooling):
Expand Down Expand Up @@ -982,12 +996,14 @@ class AvgPool3D(_Pooling):
"""
def __init__(self, pool_size=(2, 2, 2), strides=None, padding=0,
ceil_mode=False, layout='NCDHW', count_include_pad=True, **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
assert layout in ('NCDHW', 'NDHWC'),\
"Only NCDHW and NDHWC layouts are valid for 3D Pooling"
if isinstance(pool_size, numeric_types):
pool_size = (pool_size,)*3
assert len(pool_size) == 3, "pool_size must be a number or a list of 3 ints"
super(AvgPool3D, self).__init__(
pool_size, strides, padding, ceil_mode, False, 'avg', count_include_pad, **kwargs)
pool_size, strides, padding, ceil_mode, False, 'avg', layout, count_include_pad,
**kwargs)


class GlobalMaxPool1D(_Pooling):
Expand All @@ -997,7 +1013,8 @@ class GlobalMaxPool1D(_Pooling):
Parameters
----------
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
Dimension ordering of data and weight. Only supports 'NCW' or 'NWC'
(only with cuDNN) layouts for now.
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. Pooling is applied on the W dimension.

Expand All @@ -1011,9 +1028,10 @@ class GlobalMaxPool1D(_Pooling):
when `layout` is `NCW`.
"""
def __init__(self, layout='NCW', **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
assert layout in ('NCW', 'NWC'),\
"Only NCW and NWC layouts are valid for 1D Pooling"
super(GlobalMaxPool1D, self).__init__(
(1,), None, 0, True, True, 'max', **kwargs)
(1,), None, 0, True, True, 'max', layout, **kwargs)


class GlobalMaxPool2D(_Pooling):
Expand All @@ -1023,7 +1041,8 @@ class GlobalMaxPool2D(_Pooling):
Parameters
----------
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCHW' or 'NHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively. padding is applied on 'H' and 'W' dimension.

Expand All @@ -1038,9 +1057,10 @@ class GlobalMaxPool2D(_Pooling):
`(batch_size, channels, 1, 1)` when `layout` is `NCHW`.
"""
def __init__(self, layout='NCHW', **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
assert layout in ('NCHW', 'NHWC'),\
"Only NCHW and NHWC layouts are valid for 2D Pooling"
super(GlobalMaxPool2D, self).__init__(
(1, 1), None, 0, True, True, 'max', **kwargs)
(1, 1), None, 0, True, True, 'max', layout, **kwargs)


class GlobalMaxPool3D(_Pooling):
Expand All @@ -1050,7 +1070,8 @@ class GlobalMaxPool3D(_Pooling):
Parameters
----------
layout : str, default 'NCDHW'
Dimension ordering of data and weight. Only supports 'NCDHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCDHW' or 'NDHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W', 'D' stands for batch, channel, height, width and
depth dimensions respectively. padding is applied on 'D', 'H' and 'W'
dimension.
Expand All @@ -1066,9 +1087,10 @@ class GlobalMaxPool3D(_Pooling):
`(batch_size, channels, 1, 1, 1)` when `layout` is `NCDHW`.
"""
def __init__(self, layout='NCDHW', **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
assert layout in ('NCDHW', 'NDHWC'),\
"Only NCDHW and NDHWC layouts are valid for 3D Pooling"
super(GlobalMaxPool3D, self).__init__(
(1, 1, 1), None, 0, True, True, 'max', **kwargs)
(1, 1, 1), None, 0, True, True, 'max', layout, **kwargs)


class GlobalAvgPool1D(_Pooling):
Expand All @@ -1077,7 +1099,8 @@ class GlobalAvgPool1D(_Pooling):
Parameters
----------
layout : str, default 'NCW'
Dimension ordering of data and weight. Only supports 'NCW' layout for now.
Dimension ordering of data and weight. Only supports 'NCW' or 'NWC'
(only with cuDNN) layouts for now.
'N', 'C', 'W' stands for batch, channel, and width (time) dimensions
respectively. padding is applied on 'W' dimension.

Expand All @@ -1090,9 +1113,10 @@ class GlobalAvgPool1D(_Pooling):
- **out**: 3D output tensor with shape `(batch_size, channels, 1)`.
"""
def __init__(self, layout='NCW', **kwargs):
assert layout == 'NCW', "Only supports 'NCW' layout for now"
assert layout in ('NCW', 'NWC'),\
"Only NCW and NWC layouts are valid for 1D Pooling"
super(GlobalAvgPool1D, self).__init__(
(1,), None, 0, True, True, 'avg', **kwargs)
(1,), None, 0, True, True, 'avg', layout, **kwargs)


class GlobalAvgPool2D(_Pooling):
Expand All @@ -1101,7 +1125,8 @@ class GlobalAvgPool2D(_Pooling):
Parameters
----------
layout : str, default 'NCHW'
Dimension ordering of data and weight. Only supports 'NCHW' layout for now.
Dimension ordering of data and weight. Only supports 'NCHW' or 'NHWC'
(only with cuDNN) layouts for now.
'N', 'C', 'H', 'W' stands for batch, channel, height, and width
dimensions respectively.

Expand All @@ -1116,9 +1141,10 @@ class GlobalAvgPool2D(_Pooling):
`(batch_size, channels, 1, 1)` when `layout` is `NCHW`.
"""
def __init__(self, layout='NCHW', **kwargs):
assert layout == 'NCHW', "Only supports 'NCHW' layout for now"
assert layout in ('NCHW', 'NHWC'),\
"Only NCHW and NHWC layouts are valid for 2D Pooling"
super(GlobalAvgPool2D, self).__init__(
(1, 1), None, 0, True, True, 'avg', **kwargs)
(1, 1), None, 0, True, True, 'avg', layout, **kwargs)


class GlobalAvgPool3D(_Pooling):
Expand All @@ -1143,9 +1169,10 @@ class GlobalAvgPool3D(_Pooling):
`(batch_size, channels, 1, 1, 1)` when `layout` is `NCDHW`.
"""
def __init__(self, layout='NCDHW', **kwargs):
assert layout == 'NCDHW', "Only supports 'NCDHW' layout for now"
assert layout in ('NCDHW', 'NDHWC'),\
"Only NCDHW and NDHWC layouts are valid for 3D Pooling"
super(GlobalAvgPool3D, self).__init__(
(1, 1, 1), None, 0, True, True, 'avg', **kwargs)
(1, 1, 1), None, 0, True, True, 'avg', layout, **kwargs)


class ReflectionPad2D(HybridBlock):
Expand Down
Loading