Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_elu.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -42,5 +44,6 @@ def test_elu_f32_multi_dim(self, flow: TestFlow) -> None:
def test_elu_f32_alpha(self, flow: TestFlow) -> None:
self._test_op(Model(alpha=0.5), (torch.randn(3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

IIRC we can only do copy_ and index_put_ today e2e

def test_elu_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_hardsigmoid.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -38,6 +40,7 @@ def test_hardsigmoid_f32_single_dim(self, flow: TestFlow) -> None:
def test_hardsigmoid_f32_multi_dim(self, flow: TestFlow) -> None:
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_hardsigmoid_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_hardswish.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -38,6 +40,7 @@ def test_hardswish_f32_single_dim(self, flow: TestFlow) -> None:
def test_hardswish_f32_multi_dim(self, flow: TestFlow) -> None:
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_hardswish_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_hardtanh.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -45,6 +47,7 @@ def test_hardtanh_f32_multi_dim(self, flow: TestFlow) -> None:
def test_hardtanh_f32_custom_range(self, flow: TestFlow) -> None:
self._test_op(Model(min_val=-2.0, max_val=2.0), (torch.randn(3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_hardtanh_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_leaky_relu.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -44,6 +46,7 @@ def test_leaky_relu_f32_multi_dim(self, flow: TestFlow) -> None:
def test_leaky_relu_f32_custom_slope(self, flow: TestFlow) -> None:
self._test_op(Model(negative_slope=0.1), (torch.randn(3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_leaky_relu_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_relu.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -38,5 +40,6 @@ def test_relu_f32_single_dim(self, flow: TestFlow) -> None:
def test_relu_f32_multi_dim(self, flow: TestFlow) -> None:
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_relu_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_silu.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -38,6 +40,7 @@ def test_silu_f32_single_dim(self, flow: TestFlow) -> None:
def test_silu_f32_multi_dim(self, flow: TestFlow) -> None:
self._test_op(Model(), (torch.randn(2, 3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_silu_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
3 changes: 3 additions & 0 deletions backends/test/suite/operators/test_threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
# pyre-unsafe


import unittest

import torch
from executorch.backends.test.suite.flow import TestFlow

Expand Down Expand Up @@ -51,6 +53,7 @@ def test_threshold_f32_custom_value(self, flow: TestFlow) -> None:
def test_threshold_f32_custom_threshold_value(self, flow: TestFlow) -> None:
self._test_op(Model(threshold=0.5, value=1.0), (torch.randn(3, 4, 5),), flow)

@unittest.skip("In place activations aren't properly defunctionalized yet.")
def test_threshold_f32_inplace(self, flow: TestFlow) -> None:
self._test_op(Model(inplace=True), (torch.randn(3, 4, 5),), flow)

Expand Down
Loading