summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/embeddings
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-28 21:20:45 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-28 21:20:45 +0200
commit863c42ea67823ad616ac7485fc8a1e5018cb4233 (patch)
tree8babb81d06d2e879e9842429322bc6e77a7c8e19 /text_recognizer/networks/transformer/embeddings
parentbeab369f59c54de888e522d2f50602e758e3cc4b (diff)
Remove absolute positional embedding
Diffstat (limited to 'text_recognizer/networks/transformer/embeddings')
-rw-r--r--text_recognizer/networks/transformer/embeddings/absolute.py17
1 files changed, 0 insertions, 17 deletions
diff --git a/text_recognizer/networks/transformer/embeddings/absolute.py b/text_recognizer/networks/transformer/embeddings/absolute.py
deleted file mode 100644
index 7140537..0000000
--- a/text_recognizer/networks/transformer/embeddings/absolute.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Absolute positional embedding."""
-import torch
-from torch import nn, Tensor
-
-
-class AbsolutePositionalEmbedding(nn.Module):
- def __init__(self, dim: int, max_seq_len: int) -> None:
- super().__init__()
- self.emb = nn.Embedding(max_seq_len, dim)
- self._weight_init()
-
- def _weight_init(self) -> None:
- nn.init.normal_(self.emb.weight, std=0.02)
-
- def forward(self, x: Tensor) -> Tensor:
- n = torch.arange(x.shape[1], device=x.device)
- return self.emb(n)[None, :, :]