summaryrefslogtreecommitdiff
path: root/text_recognizer/networks
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2022-09-05 00:04:25 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2022-09-05 00:04:25 +0200
commitfc7fb0df5aa704aab3d73eab964631c8be924c42 (patch)
treeab045f83136c40d5f98485665efa2d9ef3463558 /text_recognizer/networks
parent281c8602b4d945cf329d5bead104729acf47ed9c (diff)
Update norm
Diffstat (limited to 'text_recognizer/networks')
-rw-r--r--text_recognizer/networks/transformer/norm.py16
1 files changed, 14 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/norm.py b/text_recognizer/networks/transformer/norm.py
index 537246d..4cd3b5b 100644
--- a/text_recognizer/networks/transformer/norm.py
+++ b/text_recognizer/networks/transformer/norm.py
@@ -4,7 +4,7 @@ Copied from lucidrains:
https://github.com/lucidrains/x-transformers/blob/main/x_transformers/x_transformers.py
"""
-from typing import Dict, Type
+from typing import Dict, Optional, Type
import torch
from torch import nn
@@ -29,12 +29,24 @@ class RMSNorm(nn.Module):
class PreNorm(nn.Module):
"""Applies layer normalization then function."""
- def __init__(self, normalized_shape: int, fn: Type[nn.Module]) -> None:
+ def __init__(
+ self,
+ normalized_shape: int,
+ fn: Type[nn.Module],
+ context_dim: Optional[int] = None,
+ ) -> None:
super().__init__()
self.norm = nn.LayerNorm(normalized_shape)
self.fn = fn
+ self.norm_context = (
+ nn.LayerNorm(context_dim) if context_dim is not None else None
+ )
def forward(self, x: Tensor, **kwargs) -> Tensor:
"""Applies pre norm."""
x = self.norm(x)
+ if self.norm_context is not None:
+ context = kwargs["context"]
+ normed_context = self.norm_context(context)
+ kwargs.update(context=normed_context)
return self.fn(x, **kwargs)