diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-04-26 22:04:47 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-04-26 22:04:47 +0200 |
commit | 7ae1f8f9654dcea0a9a22310ac0665a5d3202f0f (patch) | |
tree | 3e332af009cdf24fc207c779b01a8ba973fec2db /text_recognizer/networks/transformer/norm.py | |
parent | 9426cc794d8c28a65bbbf5ae5466a0a343078558 (diff) |
Reformatting transformer (work in progress)
Diffstat (limited to 'text_recognizer/networks/transformer/norm.py')
-rw-r--r-- | text_recognizer/networks/transformer/norm.py | 22 |
1 files changed, 22 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py new file mode 100644 index 0000000..99a5291 --- /dev/null +++ b/text_recognizer/networks/transformer/norm.py @@ -0,0 +1,22 @@ +"""Normalization layers for transfromers. + +Copied from lucidrains: + https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py + +""" +from typing import Callable, Dict + +import torch +from torch import nn +from torch import Tensor + + +class Rezero(nn.Module): + def __init__(self, fn: Callable) -> None: + super().__init__() + self.fn = fn + self.g = nn.Parameter(torch.zeros(1)) + + def forward(self, x: Tensor, **kwargs: Dict) -> Tensor: + x, *rest = self.fn(x, **kwargs) + return (x * self.g, *rest) |