Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Depthwise ConvTranspose2d to MAX78002 #241

Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 110 additions & 8 deletions ai8x.py
Original file line number Diff line number Diff line change
Expand Up @@ -763,8 +763,8 @@ def __init__( # pylint: disable=too-many-arguments
assert dev.device != 84
opn = nn.ConvTranspose2d(in_channels, out_channels,
kernel_size=kernel_size, stride=stride,
output_padding=1,
padding=padding, dilation=dilation, bias=bias)
output_padding=1, padding=padding,
dilation=dilation, bias=bias, groups=groups)
else:
raise ValueError('Unsupported operation')
else:
Expand Down Expand Up @@ -1016,6 +1016,32 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, op='ConvTranspose2d', **kwargs)


class FusedConvTranspose2dReLU(ConvTranspose2d):
"""
Fused Transposed 2D Convolution and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, activation='ReLU', **kwargs)


class FusedConvTranspose2dAbs(ConvTranspose2d):
"""
Fused Transposed 2D Convolution and Abs
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, activation='Abs', **kwargs)


class FusedConvTranspose2dBNReLU(FusedConvTranspose2dReLU):
"""
Fused Transposed 2D Convolution and BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
if 'batchnorm' not in kwargs:
kwargs['batchnorm'] = 'Affine'
super().__init__(*args, **kwargs)


class FusedMaxPoolConvTranspose2d(ConvTranspose2d):
"""
Fused 2D Max Pool, Transposed 2D Convolution and Activation ('ReLU', 'Abs', None)
Expand All @@ -1032,6 +1058,16 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, activation='ReLU', **kwargs)


class FusedMaxPoolConvTranspose2dBNReLU(FusedMaxPoolConvTranspose2dReLU):
"""
Fused 2d Max Pool, Transposed 2D Convolution and BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
if 'batchnorm' not in kwargs:
kwargs['batchnorm'] = 'Affine'
super().__init__(*args, **kwargs)


class FusedMaxPoolConvTranspose2dAbs(FusedMaxPoolConvTranspose2d):
"""
Fused 2D Max Pool, Transposed 2D Convolution and Abs
Expand All @@ -1056,6 +1092,16 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, activation='ReLU', **kwargs)


class FusedAvgPoolConvTranspose2dBNReLU(FusedAvgPoolConvTranspose2dReLU):
"""
Fused 2d Avg Pool, Transposed 2D Convolution, BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
if 'batchnorm' not in kwargs:
kwargs['batchnorm'] = 'Affine'
super().__init__(*args, **kwargs)


class FusedAvgPoolConvTranspose2dAbs(FusedAvgPoolConvTranspose2d):
"""
Fused 2D Avg Pool, Transposed 2D Convolution and Abs
Expand All @@ -1064,20 +1110,76 @@ def __init__(self, *args, **kwargs):
super().__init__(*args, activation='Abs', **kwargs)


class FusedConvTranspose2dReLU(ConvTranspose2d):
class DepthwiseConvTranspose2d(ConvTranspose2d):
"""
Fused Transposed 2D Convolution and ReLU
AI8X - Depthwise Transposed 2D Convolution
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, activation='ReLU', **kwargs)
super().__init__(*args, groups=args[0], **kwargs)


class FusedConvTranspose2dAbs(ConvTranspose2d):
class FusedDepthwiseConvTranspose2dReLU(FusedConvTranspose2dReLU):
"""
Fused Transposed 2D Convolution and Abs
AI8X - Fused Depthwise Transposed 2D Convolution and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, activation='Abs', **kwargs)
super().__init__(*args, groups=args[0], **kwargs)


class FusedDepthwiseConvTranspose2dBNReLU(FusedConvTranspose2dBNReLU):
"""
AI8X - Fused Depthwise Transposed 2D Convolution, BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedAvgPoolDepthwiseConvTranspose2d(FusedAvgPoolConvTranspose2d):
"""
AI8X - Fused 2D Avg Pool, Depthwise Transposed 2D Convolution and no activation
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedAvgPoolDepthwiseConvTranspose2dReLU(FusedAvgPoolConvTranspose2dReLU):
"""
AI8X - Fused 2D Avg Pool, Depthwise Transposed 2D Convolution and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedAvgPoolDepthwiseConvTranspose2dBNReLU(FusedAvgPoolConvTranspose2dBNReLU):
"""
AI8X - Fused 2D Avg Pool, Depthwise Transposed 2D Convolution, BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedMaxPoolDepthwiseConvTranspose2d(FusedMaxPoolConvTranspose2d):
"""
AI8X - Fused 2D Max Pool, Depthwise Transposed 2D Convolution and no activation
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedMaxPoolDepthwiseConvTranspose2dReLU(FusedMaxPoolConvTranspose2dReLU):
"""
AI8X - Fused 2D Max Pool, Depthwise Transposed 2D Convolution and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedMaxPoolDepthwiseConvTranspose2dBNReLU(FusedMaxPoolConvTranspose2dBNReLU):
"""
AI8X - Fused 2D Max Pool, Depthwise Transposed 2D Convolution, BatchNorm and ReLU
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, groups=args[0], **kwargs)


class FusedSoftwareLinearReLU(nn.Module):
Expand Down