summaryrefslogtreecommitdiff
path: root/text_recognizer
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer')
-rw-r--r--text_recognizer/networks/transformer/embeddings/absolute.py17
1 files changed, 0 insertions, 17 deletions
diff --git a/text_recognizer/networks/transformer/embeddings/absolute.py b/text_recognizer/networks/transformer/embeddings/absolute.py
deleted file mode 100644
index 7140537..0000000
--- a/text_recognizer/networks/transformer/embeddings/absolute.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Absolute positional embedding."""
-import torch
-from torch import nn, Tensor
-
-
-class AbsolutePositionalEmbedding(nn.Module):
- def __init__(self, dim: int, max_seq_len: int) -> None:
- super().__init__()
- self.emb = nn.Embedding(max_seq_len, dim)
- self._weight_init()
-
- def _weight_init(self) -> None:
- nn.init.normal_(self.emb.weight, std=0.02)
-
- def forward(self, x: Tensor) -> Tensor:
- n = torch.arange(x.shape[1], device=x.device)
- return self.emb(n)[None, :, :]