Skip to content

Commit

Permalink
add causal variant
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jul 27, 2020
1 parent 6871021 commit 321a620
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 3 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ from conformer.conformer import ConformerConvModule

layer = ConformerConvModule(
dim = 512,
causal = False, # auto-regressive or not - 1d conv will be made causal with padding if so
expansion_factor = 2, # what multiple of the dimension to expand for the depthwise convolution
kernel_size = 32, # kernel size, 17 - 32 was said to be optimal
dropout = 0. # dropout at the very end
Expand Down
7 changes: 5 additions & 2 deletions conformer/conformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,22 @@ class ConformerConvModule(nn.Module):
def __init__(
self,
dim,
causal = False,
expansion_factor = 2,
kernel_size = 31,
dropout = 0.):
super().__init__()

inner_dim = dim * expansion_factor
padding = calc_same_padding(kernel_size) if not causal else (kernel_size - 1, 0)

self.net = nn.Sequential(
nn.LayerNorm(dim),
Transpose((1, 2)),
nn.Conv1d(dim, inner_dim * 2, 1),
GLU(dim=1),
DepthWiseConv1d(inner_dim, inner_dim, kernel_size = kernel_size, padding = calc_same_padding(kernel_size)),
nn.BatchNorm1d(inner_dim),
DepthWiseConv1d(inner_dim, inner_dim, kernel_size = kernel_size, padding = padding),
nn.BatchNorm1d(inner_dim) if not causal else nn.Identity(),
Swish(),
nn.Conv1d(inner_dim, dim, 1),
Transpose((1, 2)),
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'conformer',
packages = find_packages(),
version = '0.0.1',
version = '0.0.2',
license='MIT',
description = 'The convolutional module from the Conformer paper',
author = 'Phil Wang',
Expand Down

0 comments on commit 321a620

Please sign in to comment.