Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 1 addition & 22 deletions python/tvm/relay/frontend/onnx.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@
from .. import ty as _ty
from .. import vision as _vision
from .common import (
autopad,
AttrCvt,
Renamer,
autopad,
ensure_scalar_shape,
fold_constant,
get_name,
Expand Down Expand Up @@ -523,23 +523,6 @@ def _impl_v1(cls, inputs, attr, params):
raise tvm.error.OpAttributeInvalid(msg.format(attr["auto_pad"]))
attr.pop("auto_pad")

# Check if the requested convolution is a group conv1d, if so convert it to conv2d.
# TODO(jwfromm) Remove once proper group_conv1d is supported.
group_conv1d = False
if dimension_picker("conv")(attr) == "conv1d" and attr.get("group") != 1:
group_conv1d = True
# Expand input from NCW to NCHW
data = _op.expand_dims(data, axis=2)
# Expand kernel from OIW to OIHW
kernel = _op.expand_dims(kernel, axis=2)
# Add new value to kernel_shape, strices, dilation, pads, if needed
attr["kernel_shape"] = [1] + list(attr["kernel_shape"])
if "strides" in attr:
attr["strides"] = [1] + list(attr["strides"])
if "dilations" in attr:
attr["dilations"] = [1] + list(attr["dilations"])
if "pads" in attr:
attr["pads"] = [0, attr["pads"][0], 0, attr["pads"][1]]
attr["channels"] = kernel_shapes[0][0]
out = AttrCvt(
op_name=dimension_picker("conv"),
Expand All @@ -552,10 +535,6 @@ def _impl_v1(cls, inputs, attr, params):
custom_check=dimension_constraint(),
)([data, kernel], attr, params)

# If this was a group_conv1d, squish output back to NCW.
if group_conv1d:
out = _op.squeeze(out, axis=[2])

use_bias = len(inputs) == 3
if use_bias:
out = _op.nn.bias_add(out, inputs[2])
Expand Down
3 changes: 1 addition & 2 deletions src/relay/op/nn/convolution.h
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,7 @@ bool Conv1DRel(const Array<Type>& types, int num_inputs, const Attrs& attrs,
if (param->kernel_size.defined() && param->channels.defined()) {
Array<IndexExpr> wshape;

wshape = {{param->channels, dshape_ncw[1], param->kernel_size[0]}};

wshape = {{param->channels, indexdiv(dshape_ncw[1], param->groups), param->kernel_size[0]}};
wshape = trans_kernel_layout.BackwardShape(wshape);
channels = param->channels;
dilated_ksize = 1 + (param->kernel_size[0] - 1) * param->dilation[0];
Expand Down
58 changes: 57 additions & 1 deletion tests/python/relay/test_op_level2.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,61 @@ def test_conv1d_infer_type():
assert yy.checked_type == relay.TensorType((n, w, 16), "int32")


@tvm.testing.uses_gpu
def test_grouped_conv1d_run():
# TODO(AndrewZhaoLuo): support ground truth function to have grouped support
# and then combine with test_conv1d_run
def run_test_conv1d(
dtype,
scale,
dshape,
kshape,
padding=(1, 1),
fref=None,
groups=1,
dilation=1,
except_targets=None,
**attrs,
):
if except_targets is None:
except_targets = []

x = relay.var("x", shape=dshape, dtype=dtype)
w = relay.var("w", dtype=dtype)
y = relay.nn.conv1d(x, w, padding=padding, dilation=dilation, groups=groups, **attrs)
func = relay.Function([x, w], y)
data = np.random.uniform(-scale, scale, size=dshape).astype(dtype)
kernel = np.random.uniform(-scale, scale, size=kshape).astype(dtype)

for target, dev in tvm.testing.enabled_targets():
if target in except_targets:
continue
dev = tvm.device(target, 0)
relay.create_executor("graph", device=dev, target=target).evaluate(func)(data, kernel)

dshape = (1, 6, 224)
run_test_conv1d(
"float32",
1,
dshape,
kshape=(10, 1, 3),
padding=(1, 1),
channels=10,
kernel_size=3,
groups=6,
)
run_test_conv1d(
"float32",
1,
dshape,
kshape=(10, 2, 3),
padding=(1, 1),
channels=10,
kernel_size=3,
groups=3,
)


@tvm.testing.uses_gpu
def test_conv1d_run():
def run_test_conv1d(
Expand All @@ -81,6 +136,7 @@ def run_test_conv1d(
kshape,
padding=(1, 1),
fref=None,
groups=1,
dilation=1,
except_targets=None,
**attrs,
Expand All @@ -90,7 +146,7 @@ def run_test_conv1d(

x = relay.var("x", shape=dshape, dtype=dtype)
w = relay.var("w", dtype=dtype)
y = relay.nn.conv1d(x, w, padding=padding, dilation=dilation, **attrs)
y = relay.nn.conv1d(x, w, padding=padding, dilation=dilation, groups=groups, **attrs)
func = relay.Function([x, w], y)
data = np.random.uniform(-scale, scale, size=dshape).astype(dtype)
kernel = np.random.uniform(-scale, scale, size=kshape).astype(dtype)
Expand Down