summaryrefslogtreecommitdiff
path: root/text_recognizer/network/transformer/embedding/token.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
commit49ca6ade1a19f7f9c702171537fe4be0dfcda66d (patch)
tree20062ed1910758481f3d5fff11159706c7b990c6 /text_recognizer/network/transformer/embedding/token.py
parent0421daf6bd97596703f426ba61c401599b538eeb (diff)
Rename and add flash atten
Diffstat (limited to 'text_recognizer/network/transformer/embedding/token.py')
-rw-r--r--text_recognizer/network/transformer/embedding/token.py18
1 files changed, 18 insertions, 0 deletions
diff --git a/text_recognizer/network/transformer/embedding/token.py b/text_recognizer/network/transformer/embedding/token.py
new file mode 100644
index 0000000..1df2fd6
--- /dev/null
+++ b/text_recognizer/network/transformer/embedding/token.py
@@ -0,0 +1,18 @@
+from torch import nn, Tensor
+
+from text_recognizer.network.transformer.embedding.l2_norm import l2_norm
+
+
+class TokenEmbedding(nn.Module):
+ def __init__(self, num_tokens: int, dim: int, use_l2: bool = True) -> None:
+ super().__init__()
+ self.use_l2 = use_l2
+ self.to_embedding = nn.Embedding(num_tokens, dim)
+ if self.use_l2:
+ nn.init.normal_(self.to_embedding.weight, std=1e-5)
+ else:
+ nn.init.kaiming_normal_(self.to_embedding.weight)
+
+ def forward(self, x: Tensor) -> Tensor:
+ embedding = self.to_embedding(x)
+ return l2_norm(embedding) if self.use_l2 else embedding