diff --git a/python/tvm/relay/frontend/keras.py b/python/tvm/relay/frontend/keras.py index 1913d4a2681a..ec960b9f0b12 100644 --- a/python/tvm/relay/frontend/keras.py +++ b/python/tvm/relay/frontend/keras.py @@ -254,6 +254,8 @@ def _convert_dense( weightList = keras_layer.get_weights() weight = etab.new_const(weightList[0].transpose([1, 0])) params = {"weight": weight, "units": weightList[0].shape[1]} + units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" if input_shape is None: input_shape = keras_layer.input_shape input_dim = len(input_shape) @@ -1008,6 +1010,7 @@ def _convert_lstm( if keras_layer.go_backwards: in_data = _op.reverse(in_data, axis=1) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" time_steps = in_shape[1] in_data = _op.squeeze(in_data, axis=[0]) in_data = _op.split(in_data, indices_or_sections=time_steps, axis=0) @@ -1051,6 +1054,7 @@ def _convert_simple_rnn( if keras_layer.use_bias: in_bias = etab.new_const(weightList[2]) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" in_data = _op.nn.batch_flatten(in_data) ixh = _op.nn.dense(in_data, kernel_weight, units=units) if keras_layer.use_bias: @@ -1080,6 +1084,7 @@ def _convert_gru( if keras_layer.use_bias: in_bias = etab.new_const(weightList[2]) units = list(weightList[0].shape)[1] + assert units > 0, "The value of units must be a positive integer" in_data = _op.nn.batch_flatten(in_data) matrix_x = _op.nn.dense(in_data, kernel_weight, units=units) if keras_layer.use_bias: diff --git a/tests/python/frontend/keras/test_forward.py b/tests/python/frontend/keras/test_forward.py index 50a0e9850559..2f6e1098df78 100644 --- a/tests/python/frontend/keras/test_forward.py +++ b/tests/python/frontend/keras/test_forward.py @@ -244,7 +244,7 @@ def test_forward_activations_except(self, keras_mod): ): act_funcs = [ keras_mod.layers.LeakyReLU(alpha=None), - keras_mod.layers.LEU(2, 3, 4), + keras_mod.layers.ELU(2, 3, 4), keras_mod.layers.ReLU(threshold=None), ] data = keras_mod.layers.Input(shape=(2, 3, 4))