summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/norm.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-04 23:11:44 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-04 23:11:44 +0200
commit53450493e0a13d835fd1d2457c49a9d60bee0e18 (patch)
treeed30fcdc9dca78ef403622842493273dabac3110 /text_recognizer/networks/transformer/norm.py
parentaa8b4d6089763e8023a1baccb14dd53b6eceea39 (diff)
Nyströmer implemented but not tested
Diffstat (limited to 'text_recognizer/networks/transformer/norm.py')
-rw-r--r--text_recognizer/networks/transformer/norm.py17
1 files changed, 14 insertions, 3 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py
index 9160876..58c8770 100644
--- a/text_recognizer/networks/transformer/norm.py
+++ b/text_recognizer/networks/transformer/norm.py
@@ -4,7 +4,7 @@ Copied from lucidrains:
https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py
"""
-from typing import Callable, Dict
+from typing import Callable, Dict, Type
import torch
from torch import nn
@@ -31,5 +31,16 @@ class ScaleNorm(nn.Module):
def forward(self, x: Tensor) -> Tensor:
norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
- return x / norm.clamp(min=self.eps) self.g
-
+ return x / norm.clamp(min=self.eps) * self.g
+
+
+class PreNorm(nn.Module):
+ def __init__(self, dim: int, fn: Type[nn.Module]) -> None:
+ super().__init__()
+ self.norm = nn.LayerNorm(dim)
+ self.fn = fn
+
+ def forward(self, x: Tensor, **kwargs: Dict) -> Tensor:
+ """Norm tensor."""
+ x = self.norm(x)
+ return self.fn(x, **kwargs)