diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-06-05 21:16:32 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-06-05 21:17:56 +0200 |
commit | 6e1ad65edd7cbb0f8eb7a48991e9f000f554761d (patch) | |
tree | fc6ad52d2447928a25d2298e21a9b8ec261b8d1e /text_recognizer/networks/conformer/mlp.py | |
parent | 0982e09066a3c31cb8b2fc32b5ecbc2bb64952fb (diff) |
Rename mlp to ff
Rename mlp to ff
Diffstat (limited to 'text_recognizer/networks/conformer/mlp.py')
-rw-r--r-- | text_recognizer/networks/conformer/mlp.py | 17 |
1 files changed, 0 insertions, 17 deletions
diff --git a/text_recognizer/networks/conformer/mlp.py b/text_recognizer/networks/conformer/mlp.py deleted file mode 100644 index 031bde9..0000000 --- a/text_recognizer/networks/conformer/mlp.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Conformer feedforward block.""" -from torch import nn, Tensor - - -class MLP(nn.Module): - def __init__(self, dim: int, mult: int = 4, dropout: float = 0.0) -> None: - super().__init__() - self.layers = nn.Sequential( - nn.Linear(dim, mult * dim), - nn.Mish(inplace=True), - nn.Dropout(dropout), - nn.Linear(mult * dim, dim), - nn.Dropout(dropout), - ) - - def forward(self, x: Tensor) -> Tensor: - return self.layers(x) |