Skip to content

Commit f4520c4

Browse files
authored
[TVMC] Improve --desired-layouts functionality (#14272)
this aims to make the `--desired-layout` argument more powerful based on the previously merged changes from #14010 by introducing two new features: 1. Allow passing multiple arguments to `--desired-layout` instead of only one, to specify one layout per transformed operator specified in `--desired-layout-ops`. (Number of arguments has to bei either 1 or match the number of transformed operators) 2. Optionally, you can now specify a non-default kernel layout as follows: `NHWC:HWIO` Example Usage: `tvmc compile … --desired-layout nn.max_pool2d qnn.conv2d --desired-layout-ops NCHW NHWC:HWIO` I also added unit tests for the new use-cases. ### Known Limitations: * It would make sense to specify individual kernel layouts for regular convolutions and depthwise ones. However since both are usually implemented as generalized `nn.conv2d`, we can not transform them individually. Are there any good workarounds for this? * The arguments of `--desired-layouts` have previously been checked for validity during cmdline parsing (e.g. only NCHW and NHWC are allowed) which is not possible anymore. Should I add a regular expression for that?
1 parent 946581a commit f4520c4

File tree

2 files changed

+117
-11
lines changed

2 files changed

+117
-11
lines changed

python/tvm/driver/tvmc/transform.py

Lines changed: 33 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -103,15 +103,17 @@ def convert_to_mixed_precision(mod, ops=None, calculation_type="float16", acc_ty
103103
raise TVMCException("Error converting mixed precision : {0}".format(str(err)))
104104

105105

106-
def convert_graph_layout(mod, desired_layout, ops=None):
106+
def convert_graph_layout(mod, desired_layouts, ops=None):
107107
"""Alter the layout of the input graph.
108108
109109
Parameters
110110
----------
111111
mod : tvm.IRModule
112112
The relay module to convert.
113-
desired_layout : str
114-
The layout to convert to.
113+
desired_layouts : list[str]
114+
The layouts to convert to.
115+
Expects either a single element or one str per operator.
116+
Can be only data layouts or combination of both, e.g. NHWC:HWIO
115117
ops : list
116118
List of operators to be layout converted.
117119
@@ -123,7 +125,27 @@ def convert_graph_layout(mod, desired_layout, ops=None):
123125
if ops is None:
124126
ops = ["nn.conv2d", "nn.conv2d_transpose", "qnn.conv2d"]
125127

126-
desired_layouts = {op: [desired_layout, "default"] for op in ops}
128+
if not isinstance(desired_layouts, list):
129+
# For backwards compatibility
130+
assert isinstance(desired_layouts, str)
131+
desired_layouts = [desired_layouts]
132+
133+
if len(desired_layouts) != len(ops):
134+
if len(desired_layouts) != 1:
135+
raise TVMCException(
136+
"Expected 1 or {} layouts but got {}".format(len(ops), len(desired_layouts))
137+
)
138+
desired_layouts = desired_layouts * len(ops)
139+
140+
def layout_helper(layout):
141+
if ":" in layout:
142+
data_layout, kernel_layout = layout.split(":", 1)
143+
else:
144+
data_layout = layout
145+
kernel_layout = "default"
146+
return [data_layout, kernel_layout]
147+
148+
desired_layouts = {op: layout_helper(desired_layouts[i]) for i, op in enumerate(ops)}
127149

128150
# Convert the layout of the graph where possible.
129151
seq = transform.Sequential(
@@ -137,7 +159,7 @@ def convert_graph_layout(mod, desired_layout, ops=None):
137159
try:
138160
return seq(mod)
139161
except Exception as err:
140-
raise TVMCException("Error converting layout to {0}: {1}".format(desired_layout, str(err)))
162+
raise TVMCException("Error converting layouts: {}".format(str(err)))
141163

142164

143165
def apply_graph_transforms(mod, args):
@@ -159,7 +181,7 @@ def apply_graph_transforms(mod, args):
159181
return mod
160182

161183
# AlterLayout
162-
if args.get("desired_layout", False):
184+
if args.get("desired_layout", None):
163185
mod = convert_graph_layout(
164186
mod, args["desired_layout"], args.get("desired_layout_ops", None)
165187
)
@@ -210,9 +232,11 @@ def generate_transform_args(parser):
210232
# AlterLayout
211233
parser.add_argument(
212234
"--desired-layout",
213-
choices=["NCHW", "NHWC"],
214-
default=None,
215-
help="Change the data layout of the whole graph.",
235+
nargs="+",
236+
help="Change the data/kernel layout of the graph. (i.e. NCHW or NHWC:HWIO)"
237+
"This option can be provided multiple times to specify per-operator layouts, "
238+
"e.g. '--desired-layout NHWC:HWIO' (Apply same layout for every operator)."
239+
"e.g. '--desired-layout-ops nn.conv2d nn.avg_pool2d --desired-layout NCHW NHWC'.",
216240
)
217241
parser.add_argument(
218242
"--desired-layout-ops",

tests/python/driver/tvmc/test_transform.py

Lines changed: 84 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
# specific language governing permissions and limitations
1616
# under the License.
1717

18+
import pytest
1819
from unittest.mock import MagicMock
1920

2021
import tvm
@@ -23,6 +24,7 @@
2324
from tvm.relay.expr_functor import ExprMutator
2425
from tvm.ir.instrument import pass_instrument
2526
from tvm.driver.tvmc.transform import apply_graph_transforms
27+
from tvm.driver.tvmc.model import TVMCException
2628

2729

2830
def test_layout_transform_fold_constant(relay_conv2d):
@@ -41,7 +43,7 @@ def run_after_pass(self, _, info):
4143

4244
pass_names = CollectPassNames()
4345
with tvm.transform.PassContext(opt_level=3, instruments=[pass_names]):
44-
apply_graph_transforms(relay_conv2d, {"desired_layout": desired_layout})
46+
apply_graph_transforms(relay_conv2d, {"desired_layout": [desired_layout]})
4547

4648
names = pass_names.names
4749
assert "ConvertLayout" in names
@@ -61,7 +63,7 @@ def test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
6163
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
6264

6365
with tvm.transform.PassContext(opt_level=3):
64-
apply_graph_transforms(relay_conv2d, {"desired_layout": desired_layout})
66+
apply_graph_transforms(relay_conv2d, {"desired_layout": [desired_layout]})
6567

6668
mock_convert_layout.assert_called_once_with(
6769
{
@@ -72,6 +74,86 @@ def test_layout_transform_convert_layout_pass_args(relay_conv2d, monkeypatch):
7274
)
7375

7476

77+
def test_layout_transform_convert_kernel_layout_pass_args(relay_conv2d, monkeypatch):
78+
"""
79+
Check the convert layout desired layouts arugment is what is expected when
80+
a non-default kernel layout is provided.
81+
"""
82+
desired_layout = "NHWC:HWIO"
83+
desired_layout_ops = ["nn.conv2d"]
84+
85+
mock_convert_layout = MagicMock()
86+
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
87+
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
88+
89+
with tvm.transform.PassContext(opt_level=3):
90+
apply_graph_transforms(
91+
relay_conv2d,
92+
{"desired_layout": [desired_layout], "desired_layout_ops": desired_layout_ops},
93+
)
94+
95+
mock_convert_layout.assert_called_once_with(
96+
{
97+
"nn.conv2d": ["NHWC", "HWIO"],
98+
}
99+
)
100+
101+
102+
def test_layout_transform_convert_layout_pass_args_multiple(relay_conv2d, monkeypatch):
103+
"""
104+
Check the convert layout desired layouts arugment is what is expected when
105+
a multiple desired layouts are provided.
106+
"""
107+
desired_layout = ["NHWC", "NCHW"]
108+
desired_layout_ops = ["nn.max_pool2d", "qnn.conv2d"]
109+
110+
mock_convert_layout = MagicMock()
111+
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
112+
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
113+
114+
with tvm.transform.PassContext(opt_level=3):
115+
apply_graph_transforms(
116+
relay_conv2d,
117+
{"desired_layout": desired_layout, "desired_layout_ops": desired_layout_ops},
118+
)
119+
120+
mock_convert_layout.assert_called_once_with(
121+
{
122+
"nn.max_pool2d": ["NHWC", "default"],
123+
"qnn.conv2d": ["NCHW", "default"],
124+
}
125+
)
126+
127+
128+
@pytest.mark.parametrize(
129+
"desired",
130+
[
131+
(["NHWC", "NCHW"], ["nn.max_pool2d"]),
132+
(["NHWC", "NCHW"], None),
133+
],
134+
)
135+
def test_layout_transform_convert_layout_pass_args_multiple_invalid(
136+
relay_conv2d,
137+
monkeypatch,
138+
desired,
139+
):
140+
"""
141+
Check invalid cases when passing multiple values to the desired layouts argument.
142+
"""
143+
desired_layout, desired_layout_ops = desired
144+
145+
mock_convert_layout = MagicMock()
146+
mock_convert_layout.return_value = relay.transform.ConvertLayout({})
147+
monkeypatch.setattr(relay.transform, "ConvertLayout", mock_convert_layout)
148+
149+
with pytest.raises(TVMCException):
150+
with tvm.transform.PassContext(opt_level=3):
151+
apply_graph_transforms(
152+
relay_conv2d,
153+
{"desired_layout": desired_layout, "desired_layout_ops": desired_layout_ops},
154+
)
155+
156+
75157
def test_layout_transform_to_mixed_precision_pass_args_mock(relay_conv2d, monkeypatch):
76158
"""
77159
Check the mixed precision arugments which are expected when

0 commit comments

Comments
 (0)