summaryrefslogtreecommitdiff
path: root/text_recognizer/network/convnext/norm.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
commit49ca6ade1a19f7f9c702171537fe4be0dfcda66d (patch)
tree20062ed1910758481f3d5fff11159706c7b990c6 /text_recognizer/network/convnext/norm.py
parent0421daf6bd97596703f426ba61c401599b538eeb (diff)
Rename and add flash atten
Diffstat (limited to 'text_recognizer/network/convnext/norm.py')
-rw-r--r--text_recognizer/network/convnext/norm.py18
1 files changed, 18 insertions, 0 deletions
diff --git a/text_recognizer/network/convnext/norm.py b/text_recognizer/network/convnext/norm.py
new file mode 100644
index 0000000..3355de9
--- /dev/null
+++ b/text_recognizer/network/convnext/norm.py
@@ -0,0 +1,18 @@
+"""Layer norm for conv layers."""
+import torch
+from torch import Tensor, nn
+
+
+class LayerNorm(nn.Module):
+ """Layer norm for convolutions."""
+
+ def __init__(self, dim: int) -> None:
+ super().__init__()
+ self.gamma = nn.Parameter(torch.ones(1, dim, 1, 1))
+
+ def forward(self, x: Tensor) -> Tensor:
+ """Applies layer norm."""
+ eps = 1e-5 if x.dtype == torch.float32 else 1e-3
+ var = torch.var(x, dim=1, unbiased=False, keepdim=True)
+ mean = torch.mean(x, dim=1, keepdim=True)
+ return (x - mean) / (var + eps).sqrt() * self.gamma