diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-01-29 16:05:15 +0100 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-01-29 16:05:15 +0100 |
commit | 7a65b06c55689578cfe6f0380c2e475b5d4571df (patch) | |
tree | 1acd76cc4c7db74e002c31fc4e5eee27fd3da20c | |
parent | 262159039c221839792acbdd7a8a6c7b80fb0dac (diff) |
feat(norm): add prenorm
-rw-r--r-- | text_recognizer/networks/transformer/norm.py | 16 |
1 files changed, 16 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py index 2b416e6..be38346 100644 --- a/text_recognizer/networks/transformer/norm.py +++ b/text_recognizer/networks/transformer/norm.py @@ -4,6 +4,8 @@ Copied from lucidrains: https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py """ +from typing import Dict, Type + import torch from torch import nn from torch import Tensor @@ -22,3 +24,17 @@ class RMSNorm(nn.Module): """Applies normalization.""" norm = torch.norm(x, dim=-1, keepdim=True) * self.scale return x / norm.clamp(min=self.eps) * self.g + + +class PreNorm(nn.Module): + """Applies layer normalization then function.""" + + def __init__(self, normalized_shape: int, fn: Type[nn.Module]) -> None: + super().__init__() + self.norm = nn.LayerNorm(normalized_shape) + self.fn = fn + + def forward(self, x: Tensor, **kwargs: Dict) -> Tensor: + """Applies pre norm.""" + x = self.norm(x) + return self.fn(x, **kwargs) |