summaryrefslogtreecommitdiff
path: root/text_recognizer
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-05 21:16:32 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-05 21:17:56 +0200
commit6e1ad65edd7cbb0f8eb7a48991e9f000f554761d (patch)
treefc6ad52d2447928a25d2298e21a9b8ec261b8d1e /text_recognizer
parent0982e09066a3c31cb8b2fc32b5ecbc2bb64952fb (diff)
Rename mlp to ff
Rename mlp to ff
Diffstat (limited to 'text_recognizer')
-rw-r--r--text_recognizer/networks/conformer/block.py4
-rw-r--r--text_recognizer/networks/conformer/ff.py (renamed from text_recognizer/networks/conformer/mlp.py)10
2 files changed, 8 insertions, 6 deletions
diff --git a/text_recognizer/networks/conformer/block.py b/text_recognizer/networks/conformer/block.py
index d9782e8..4b31aec 100644
--- a/text_recognizer/networks/conformer/block.py
+++ b/text_recognizer/networks/conformer/block.py
@@ -5,7 +5,7 @@ from typing import Optional
from torch import nn, Tensor
from text_recognizer.networks.conformer.conv import ConformerConv
-from text_recognizer.networks.conformer.mlp import MLP
+from text_recognizer.networks.conformer.ff import Feedforward
from text_recognizer.networks.conformer.scale import Scale
from text_recognizer.networks.transformer.attention import Attention
from text_recognizer.networks.transformer.norm import PreNorm
@@ -15,7 +15,7 @@ class ConformerBlock(nn.Module):
def __init__(
self,
dim: int,
- ff: MLP,
+ ff: Feedforward,
attn: Attention,
conv: ConformerConv,
) -> None:
diff --git a/text_recognizer/networks/conformer/mlp.py b/text_recognizer/networks/conformer/ff.py
index 031bde9..2ef4245 100644
--- a/text_recognizer/networks/conformer/mlp.py
+++ b/text_recognizer/networks/conformer/ff.py
@@ -2,14 +2,16 @@
from torch import nn, Tensor
-class MLP(nn.Module):
- def __init__(self, dim: int, mult: int = 4, dropout: float = 0.0) -> None:
+class Feedforward(nn.Module):
+ def __init__(
+ self, dim: int, expansion_factor: int = 4, dropout: float = 0.0
+ ) -> None:
super().__init__()
self.layers = nn.Sequential(
- nn.Linear(dim, mult * dim),
+ nn.Linear(dim, expansion_factor * dim),
nn.Mish(inplace=True),
nn.Dropout(dropout),
- nn.Linear(mult * dim, dim),
+ nn.Linear(expansion_factor * dim, dim),
nn.Dropout(dropout),
)