Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Fix reverse shape inference in LayerNorm #17683

Merged
merged 2 commits into from
Feb 25, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 6 additions & 4 deletions src/operator/nn/layer_norm.cc
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,12 @@ static bool LayerNormShape(const nnvm::NodeAttrs& attrs,
if (!mxnet::ndim_is_known(dshape)) {
return false;
}

in_shape->at(layernorm::kGamma) = mxnet::TShape(Shape1(channelCount));
in_shape->at(layernorm::kBeta) = mxnet::TShape(Shape1(channelCount));

SHAPE_ASSIGN_CHECK(*in_shape,
layernorm::kGamma,
mxnet::TShape(Shape1(channelCount)));
SHAPE_ASSIGN_CHECK(*in_shape,
layernorm::kBeta,
mxnet::TShape(Shape1(channelCount)));
out_shape->clear();
out_shape->push_back(dshape); // kOut
mxnet::TShape moments_shape(dshape.begin(), dshape.end());
Expand Down
10 changes: 8 additions & 2 deletions tests/python/unittest/test_gluon.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import mxnet as mx
from mxnet import gluon
from mxnet.gluon import nn
from mxnet.base import py_str
from mxnet.base import py_str, MXNetError
from mxnet.test_utils import assert_almost_equal
from mxnet.util import is_np_array
from mxnet.ndarray.ndarray import _STORAGE_TYPE_STR_TO_ID
Expand Down Expand Up @@ -894,7 +894,13 @@ def test_instancenorm():
def test_layernorm():
layer = nn.LayerNorm(in_channels=10)
check_layer_forward(layer, (2, 10, 10, 10))

# Check for the case of error raising
for hybridize in [False, True]:
layer = nn.LayerNorm(in_channels=10)
layer.initialize()
if hybridize:
layer.hybridize()
assert_raises(MXNetError, lambda: layer(mx.nd.ones((2, 11))))

@with_seed()
def test_groupnorm():
Expand Down