summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py')
-rw-r--r--text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py16
1 files changed, 16 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py b/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
new file mode 100644
index 0000000..9466f6e
--- /dev/null
+++ b/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
@@ -0,0 +1,16 @@
+"""Absolute positional embedding."""
+from torch import nn, Tensor
+
+
+class AbsolutePositionalEmbedding(nn.Module):
+ def __init__(self, dim: int, max_seq_len: int) -> None:
+ super().__init__()
+ self.emb = nn.Embedding(max_seq_len, dim)
+ self._weight_init()
+
+ def _weight_init(self) -> None:
+ nn.init.normal_(self.emb.weight, std=0.02)
+
+ def forward(self, x: Tensor) -> Tensor:
+ n = torch.arange(x.shape[1], device=x.device)
+ return self.emb(n)[None, :, :]