From 6e1ad65edd7cbb0f8eb7a48991e9f000f554761d Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sun, 5 Jun 2022 21:16:32 +0200 Subject: Rename mlp to ff Rename mlp to ff --- text_recognizer/networks/conformer/block.py | 4 ++-- text_recognizer/networks/conformer/ff.py | 19 +++++++++++++++++++ text_recognizer/networks/conformer/mlp.py | 17 ----------------- 3 files changed, 21 insertions(+), 19 deletions(-) create mode 100644 text_recognizer/networks/conformer/ff.py delete mode 100644 text_recognizer/networks/conformer/mlp.py (limited to 'text_recognizer') diff --git a/text_recognizer/networks/conformer/block.py b/text_recognizer/networks/conformer/block.py index d9782e8..4b31aec 100644 --- a/text_recognizer/networks/conformer/block.py +++ b/text_recognizer/networks/conformer/block.py @@ -5,7 +5,7 @@ from typing import Optional from torch import nn, Tensor from text_recognizer.networks.conformer.conv import ConformerConv -from text_recognizer.networks.conformer.mlp import MLP +from text_recognizer.networks.conformer.ff import Feedforward from text_recognizer.networks.conformer.scale import Scale from text_recognizer.networks.transformer.attention import Attention from text_recognizer.networks.transformer.norm import PreNorm @@ -15,7 +15,7 @@ class ConformerBlock(nn.Module): def __init__( self, dim: int, - ff: MLP, + ff: Feedforward, attn: Attention, conv: ConformerConv, ) -> None: diff --git a/text_recognizer/networks/conformer/ff.py b/text_recognizer/networks/conformer/ff.py new file mode 100644 index 0000000..2ef4245 --- /dev/null +++ b/text_recognizer/networks/conformer/ff.py @@ -0,0 +1,19 @@ +"""Conformer feedforward block.""" +from torch import nn, Tensor + + +class Feedforward(nn.Module): + def __init__( + self, dim: int, expansion_factor: int = 4, dropout: float = 0.0 + ) -> None: + super().__init__() + self.layers = nn.Sequential( + nn.Linear(dim, expansion_factor * dim), + nn.Mish(inplace=True), + nn.Dropout(dropout), + nn.Linear(expansion_factor * dim, dim), + nn.Dropout(dropout), + ) + + def forward(self, x: Tensor) -> Tensor: + return self.layers(x) diff --git a/text_recognizer/networks/conformer/mlp.py b/text_recognizer/networks/conformer/mlp.py deleted file mode 100644 index 031bde9..0000000 --- a/text_recognizer/networks/conformer/mlp.py +++ /dev/null @@ -1,17 +0,0 @@ -"""Conformer feedforward block.""" -from torch import nn, Tensor - - -class MLP(nn.Module): - def __init__(self, dim: int, mult: int = 4, dropout: float = 0.0) -> None: - super().__init__() - self.layers = nn.Sequential( - nn.Linear(dim, mult * dim), - nn.Mish(inplace=True), - nn.Dropout(dropout), - nn.Linear(mult * dim, dim), - nn.Dropout(dropout), - ) - - def forward(self, x: Tensor) -> Tensor: - return self.layers(x) -- cgit v1.2.3-70-g09d2