Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions python/tvm/relay/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,14 +131,14 @@ def _convert_advanced_activation(inexpr, keras_layer, etab, data_layout, input_s

if act_type == "Softmax":
axis = keras_layer.axis
dims = len(input_shape)
dims = len(input_shape) if input_shape else 0
if isinstance(axis, list):
raise tvm.error.OpAttributeUnImplemented(f"Softmax with axes {axis} is not supported.")
if data_layout == "NCHW":
if axis == -1:
if input_shape and axis == -1:
axis = 1
else:
axis = axis + 1 if axis < dims - 1 else 1
axis = axis + 1 if axis <= dims - 1 else 1
return _op.nn.softmax(inexpr, axis=axis)
if act_type == "ReLU":
if np.isnan(keras_layer.threshold).any():
Expand Down
7 changes: 7 additions & 0 deletions tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,13 @@ def test_forward_activations(self, keras_mod):
keras_model = keras_mod.models.Model(data, x)
verify_keras_frontend(keras_model)
verify_keras_frontend(keras_model, need_transpose=False, layout="NHWC")
# Test the input dimension = 1
data = keras_mod.layers.Input(shape=(11,))
act_func = keras_mod.layers.Softmax()
x = act_func(data)
keras_model = keras_mod.models.Model(data, x)
verify_keras_frontend(keras_model)
verify_keras_frontend(keras_model, need_transpose=False, layout="NHWC")

def test_forward_activations_except(self, keras_mod):
"""
Expand Down