From 8bb76745e43c6b4967c8e91ebaf4c4295d0b8d0b Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sun, 12 Jun 2022 23:16:20 +0200 Subject: Remove conformer --- text_recognizer/networks/conformer/conv.py | 40 ------------------------------ 1 file changed, 40 deletions(-) delete mode 100644 text_recognizer/networks/conformer/conv.py (limited to 'text_recognizer/networks/conformer/conv.py') diff --git a/text_recognizer/networks/conformer/conv.py b/text_recognizer/networks/conformer/conv.py deleted file mode 100644 index ac13f5d..0000000 --- a/text_recognizer/networks/conformer/conv.py +++ /dev/null @@ -1,40 +0,0 @@ -"""Conformer convolutional block.""" -from einops import rearrange -from einops.layers.torch import Rearrange -from torch import nn, Tensor - - -from text_recognizer.networks.conformer.glu import GLU - - -class ConformerConv(nn.Module): - def __init__( - self, - dim: int, - expansion_factor: int = 2, - kernel_size: int = 31, - dropout: int = 0.0, - ) -> None: - super().__init__() - inner_dim = expansion_factor * dim - self.layers = nn.Sequential( - nn.LayerNorm(dim), - Rearrange("b n c -> b c n"), - nn.Conv1d(dim, 2 * inner_dim, 1), - GLU(dim=1), - nn.Conv1d( - in_channels=inner_dim, - out_channels=inner_dim, - kernel_size=kernel_size, - groups=inner_dim, - padding="same", - ), - nn.BatchNorm1d(inner_dim), - nn.Mish(inplace=True), - nn.Conv1d(inner_dim, dim, 1), - Rearrange("b c n -> b n c"), - nn.Dropout(dropout), - ) - - def forward(self, x: Tensor) -> Tensor: - return self.layers(x) -- cgit v1.2.3-70-g09d2