Skip to content

Commit dffdc3e

Browse files
authored
[Relax][Frontend] Add op tanh, exp, negative, and permute (#16711)
1 parent 6a877df commit dffdc3e

File tree

2 files changed

+100
-0
lines changed

2 files changed

+100
-0
lines changed

python/tvm/relax/frontend/nn/op.py

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -978,6 +978,100 @@ def softmax(x: Tensor, axis: int = -1, name: str = "softmax") -> Tensor:
978978
return wrap_nested(_op.nn.softmax(x._expr, axis), name)
979979

980980

981+
def tanh(x: Tensor, name: str = "tanh") -> Tensor:
982+
r"""Applies the hyperbolic tangent function.
983+
984+
.. math::
985+
\text{Tanh}(x) = \frac{e^x - e^{-x}}{e^x + e^{-x}}
986+
987+
Parameters
988+
----------
989+
x : Tensor
990+
The input data to the operator.
991+
992+
name : str
993+
Name hint.
994+
995+
Returns
996+
-------
997+
result : Tensor
998+
The computed result.
999+
1000+
Note
1001+
----
1002+
The input tensor is required to have float dtype
1003+
"""
1004+
return wrap_nested(_op.tanh(x._expr), name)
1005+
1006+
1007+
def exp(x: Tensor, name: str = "exp") -> Tensor:
1008+
r"""Applies the exponential function.
1009+
1010+
.. math::
1011+
\text{Exp}(x) = e^x
1012+
1013+
Parameters
1014+
----------
1015+
x : Tensor
1016+
The input data to the operator.
1017+
1018+
name : str
1019+
Name hint.
1020+
1021+
Returns
1022+
-------
1023+
result : Tensor
1024+
The computed result.
1025+
1026+
Note
1027+
----
1028+
The input tensor is required to have float dtype
1029+
"""
1030+
return wrap_nested(_op.exp(x._expr), name)
1031+
1032+
1033+
def permute(x: Tensor, axes: Optional[List[int]], name: str = "permute") -> Tensor:
1034+
"""Permutes the dimensions of the input tensor.
1035+
1036+
Parameters
1037+
----------
1038+
x : Tensor
1039+
The input data to the operator.
1040+
1041+
axes : Optional[List[int]]
1042+
The target axes order.
1043+
1044+
name : str
1045+
Name hint.
1046+
1047+
Returns
1048+
-------
1049+
result : Tensor
1050+
The transposed result.
1051+
"""
1052+
1053+
return wrap_nested(_op.permute_dims(x._expr, axes=axes), name)
1054+
1055+
1056+
def negative(x: Tensor, name: str = "neg") -> Tensor:
1057+
"""Numerical negative of the input tensor.
1058+
1059+
Parameters
1060+
----------
1061+
x : Tensor
1062+
The input data to the operator.
1063+
1064+
name : str
1065+
Name hint.
1066+
1067+
Returns
1068+
-------
1069+
result : Tensor
1070+
The computed result.
1071+
"""
1072+
return wrap_nested(_op.negative(x._expr), name)
1073+
1074+
9811075
def layer_norm(
9821076
x: Tensor,
9831077
normalized_shape: Union[int, List[int]],

tests/python/relax/test_frontend_nn_op.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,9 @@ def test(self, x: Tensor, weight: Tensor, bias: Tensor):
338338
silu_out = op.silu(x)
339339
gelu_out = op.gelu(x)
340340
sigmoid_out = op.sigmoid(x)
341+
tanh_out = op.tanh(x)
342+
exp_out = op.exp(x)
343+
negative_out = op.negative(x)
341344
softmax_out = op.softmax(x, axis=2)
342345
rms_norm_out = op.rms_norm(x, weight, axes=[-2, -1])
343346
rms_norm_with_bias_out = op.rms_norm(x, weight, axes=[-2, -1])
@@ -357,6 +360,9 @@ def test(
357360
silu: R.Tensor((2, 3, 4, 5), dtype="float32") = R.nn.silu(x)
358361
gelu: R.Tensor((2, 3, 4, 5), dtype="float32") = R.nn.gelu(x)
359362
sigmoid: R.Tensor((2, 3, 4, 5), dtype="float32") = R.sigmoid(x)
363+
tanh: R.Tensor((2, 3, 4, 5), dtype="float32") = R.tanh(x)
364+
exp: R.Tensor((2, 3, 4, 5), dtype="float32") = R.exp(x)
365+
negative: R.Tensor((2, 3, 4, 5), dtype="float32") = R.negative(x)
360366
softmax: R.Tensor((2, 3, 4, 5), dtype="float32") = R.nn.softmax(x, axis=2)
361367
rms_norm: R.Tensor((2, 3, 4, 5), dtype="float32") = R.nn.rms_norm(
362368
x, weight, axes=[-2, -1], epsilon=1.0000000000000001e-05

0 commit comments

Comments
 (0)