From 53450493e0a13d835fd1d2457c49a9d60bee0e18 Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Tue, 4 May 2021 23:11:44 +0200 Subject: Nyströmer implemented but not tested MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- text_recognizer/networks/transformer/norm.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) (limited to 'text_recognizer/networks/transformer/norm.py') diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py index 9160876..58c8770 100644 --- a/text_recognizer/networks/transformer/norm.py +++ b/text_recognizer/networks/transformer/norm.py @@ -4,7 +4,7 @@ Copied from lucidrains: https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py """ -from typing import Callable, Dict +from typing import Callable, Dict, Type import torch from torch import nn @@ -31,5 +31,16 @@ class ScaleNorm(nn.Module): def forward(self, x: Tensor) -> Tensor: norm = torch.norm(x, dim=-1, keepdim=True) * self.scale - return x / norm.clamp(min=self.eps) self.g - + return x / norm.clamp(min=self.eps) * self.g + + +class PreNorm(nn.Module): + def __init__(self, dim: int, fn: Type[nn.Module]) -> None: + super().__init__() + self.norm = nn.LayerNorm(dim) + self.fn = fn + + def forward(self, x: Tensor, **kwargs: Dict) -> Tensor: + """Norm tensor.""" + x = self.norm(x) + return self.fn(x, **kwargs) -- cgit v1.2.3-70-g09d2