diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:16:04 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:16:04 +0200 |
commit | 810d8b2403dd0a229063c5693deac694871243f6 (patch) | |
tree | 66ec18e3f90c41d393c269c1e4d3313dd73e0b09 /text_recognizer/networks/transformer/norm.py | |
parent | 8a7d47e9a432ec927993cc546dacb89a97a05cda (diff) |
Add comments to transformer modules
Diffstat (limited to 'text_recognizer/networks/transformer/norm.py')
-rw-r--r-- | text_recognizer/networks/transformer/norm.py | 5 |
1 files changed, 3 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py index 4930adf..c59744a 100644 --- a/text_recognizer/networks/transformer/norm.py +++ b/text_recognizer/networks/transformer/norm.py @@ -4,7 +4,7 @@ Copied from lucidrains: https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py """ -from typing import Callable, Dict, Type +from typing import Dict, Type import torch from torch import nn @@ -19,6 +19,7 @@ class ScaleNorm(nn.Module): self.g = nn.Parameter(torch.ones(1)) def forward(self, x: Tensor) -> Tensor: + """Applies scale norm.""" norm = torch.norm(x, dim=-1, keepdim=True) * self.scale return x / norm.clamp(min=self.eps) * self.g @@ -30,6 +31,6 @@ class PreNorm(nn.Module): self.fn = fn def forward(self, x: Tensor, **kwargs: Dict) -> Tensor: - """Norm tensor.""" + """Applies pre norm.""" x = self.norm(x) return self.fn(x, **kwargs) |