summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:16:04 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:16:04 +0200
commit810d8b2403dd0a229063c5693deac694871243f6 (patch)
tree66ec18e3f90c41d393c269c1e4d3313dd73e0b09 /text_recognizer/networks/transformer
parent8a7d47e9a432ec927993cc546dacb89a97a05cda (diff)
Add comments to transformer modules
Diffstat (limited to 'text_recognizer/networks/transformer')
-rw-r--r--text_recognizer/networks/transformer/norm.py5
-rw-r--r--text_recognizer/networks/transformer/residual.py2
2 files changed, 5 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py
index 4930adf..c59744a 100644
--- a/text_recognizer/networks/transformer/norm.py
+++ b/text_recognizer/networks/transformer/norm.py
@@ -4,7 +4,7 @@ Copied from lucidrains:
https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py
"""
-from typing import Callable, Dict, Type
+from typing import Dict, Type
import torch
from torch import nn
@@ -19,6 +19,7 @@ class ScaleNorm(nn.Module):
self.g = nn.Parameter(torch.ones(1))
def forward(self, x: Tensor) -> Tensor:
+ """Applies scale norm."""
norm = torch.norm(x, dim=-1, keepdim=True) * self.scale
return x / norm.clamp(min=self.eps) * self.g
@@ -30,6 +31,6 @@ class PreNorm(nn.Module):
self.fn = fn
def forward(self, x: Tensor, **kwargs: Dict) -> Tensor:
- """Norm tensor."""
+ """Applies pre norm."""
x = self.norm(x)
return self.fn(x, **kwargs)
diff --git a/text_recognizer/networks/transformer/residual.py b/text_recognizer/networks/transformer/residual.py
index 1547df6..825a0fc 100644
--- a/text_recognizer/networks/transformer/residual.py
+++ b/text_recognizer/networks/transformer/residual.py
@@ -3,6 +3,8 @@ from torch import nn, Tensor
class Residual(nn.Module):
+ """Residual block."""
+
def forward(self, x: Tensor, residual: Tensor) -> Tensor:
"""Applies the residual function."""
return x + residual