diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-06-05 21:18:56 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-06-05 21:18:56 +0200 |
commit | 425af1bce8362efd97682a5042e76a60bfc28060 (patch) | |
tree | a06bba00f825c0f7cb69a8df2f4d424af8070ef7 /text_recognizer | |
parent | 6e1ad65edd7cbb0f8eb7a48991e9f000f554761d (diff) |
Remove depth wise conv class
Diffstat (limited to 'text_recognizer')
-rw-r--r-- | text_recognizer/networks/conformer/conv.py | 13 | ||||
-rw-r--r-- | text_recognizer/networks/conformer/depth_wise_conv.py | 17 |
2 files changed, 9 insertions, 21 deletions
diff --git a/text_recognizer/networks/conformer/conv.py b/text_recognizer/networks/conformer/conv.py index f031dc7..ac13f5d 100644 --- a/text_recognizer/networks/conformer/conv.py +++ b/text_recognizer/networks/conformer/conv.py @@ -4,7 +4,6 @@ from einops.layers.torch import Rearrange from torch import nn, Tensor -from text_recognizer.networks.conformer.depth_wise_conv import DepthwiseConv1D from text_recognizer.networks.conformer.glu import GLU @@ -21,12 +20,18 @@ class ConformerConv(nn.Module): self.layers = nn.Sequential( nn.LayerNorm(dim), Rearrange("b n c -> b c n"), - nn.Conv1D(dim, 2 * inner_dim, 1), + nn.Conv1d(dim, 2 * inner_dim, 1), GLU(dim=1), - DepthwiseConv1D(inner_dim, inner_dim, kernel_size), + nn.Conv1d( + in_channels=inner_dim, + out_channels=inner_dim, + kernel_size=kernel_size, + groups=inner_dim, + padding="same", + ), nn.BatchNorm1d(inner_dim), nn.Mish(inplace=True), - nn.Conv1D(inner_dim, dim, 1), + nn.Conv1d(inner_dim, dim, 1), Rearrange("b c n -> b n c"), nn.Dropout(dropout), ) diff --git a/text_recognizer/networks/conformer/depth_wise_conv.py b/text_recognizer/networks/conformer/depth_wise_conv.py deleted file mode 100644 index 1dbd0b8..0000000 --- a/text_recognizer/networks/conformer/depth_wise_conv.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Depthwise 1D convolution.""" -from torch import nn, Tensor - - -class DepthwiseConv1D(nn.Module): - def __init__(self, in_channels: int, out_channels: int, kernel_size: int) -> None: - super().__init__() - self.conv = nn.Conv1d( - in_channels=in_channels, - out_channels=out_channels, - kernel_size=kernel_size, - groups=in_channels, - padding="same", - ) - - def forward(self, x: Tensor) -> Tensor: - return self.conv(x) |