diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2023-09-11 22:12:41 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2023-09-11 22:12:41 +0200 |
commit | fe693b4799cc48476d720fca5bb9fbf00fe58b23 (patch) | |
tree | 39cc519d082f3c22e8904f110e0b20c2ddbc1d75 /text_recognizer/network | |
parent | 684da19a2ca83ee61011c37e36fa71b9eeb5ca6a (diff) |
Remove separate ff class
Diffstat (limited to 'text_recognizer/network')
-rw-r--r-- | text_recognizer/network/transformer/ff.py | 22 |
1 files changed, 0 insertions, 22 deletions
diff --git a/text_recognizer/network/transformer/ff.py b/text_recognizer/network/transformer/ff.py deleted file mode 100644 index 9181323..0000000 --- a/text_recognizer/network/transformer/ff.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Feedforward layer in transformer.""" -from torch import Tensor, nn - - -class FeedForward(nn.Module): - def __init__( - self, - dim: int, - inner_dim: int, - dropout_rate: float = 0.0, - ) -> None: - super().__init__() - self.ff = nn.Sequential( - nn.LayerNorm(dim), - nn.Linear(dim, inner_dim), - nn.GELU(), - nn.Dropout(dropout_rate), - nn.Linear(inner_dim, dim), - ) - - def forward(self, x: Tensor) -> Tensor: - return self.ff(x) |