diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-09-13 19:07:53 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2022-09-13 19:07:53 +0200 |
commit | 9732617e402a5152ba822eb1459d6ff776aba15c (patch) | |
tree | 69cd4d4895f486bf41d20e7a74cad683ebd64294 /text_recognizer/networks/convnext/norm.py | |
parent | 3c411a90422af3e97f5b71402dcf897642b24db7 (diff) |
Add convnext module
Diffstat (limited to 'text_recognizer/networks/convnext/norm.py')
-rw-r--r-- | text_recognizer/networks/convnext/norm.py | 15 |
1 files changed, 15 insertions, 0 deletions
diff --git a/text_recognizer/networks/convnext/norm.py b/text_recognizer/networks/convnext/norm.py new file mode 100644 index 0000000..2d896e5 --- /dev/null +++ b/text_recognizer/networks/convnext/norm.py @@ -0,0 +1,15 @@ +"""Layer norm for conv layers.""" +import torch +from torch import nn, Tensor + + +class LayerNorm(nn.Module): + def __init__(self, dim: int) -> None: + super().__init__() + self.gamma = nn.Parameter(torch.ones(1, dim, 1, 1)) + + def forward(self, x: Tensor) -> Tensor: + eps = 1e-5 if x.dtype == torch.float32 else 1e-3 + var = torch.var(x, dim=1, unbiased=False, keepdim=True) + mean = torch.mean(x, dim=1, keepdim=True) + return (x - mean) / (var + eps).sqrt() * self.gamma |