summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:13:54 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:13:54 +0200
commitfb90a53b1235fd836dee74452f3f2a621e0f363a (patch)
treedaae44aa5e7c1309a41a059594ce0c3fc92cbc26 /text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
parent8c7a59d58e2ce6b18384c9fcdba2fd49e5450b0e (diff)
Rename transformer embeddings
Diffstat (limited to 'text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py')
-rw-r--r--text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py17
1 files changed, 0 insertions, 17 deletions
diff --git a/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py b/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
deleted file mode 100644
index 7140537..0000000
--- a/text_recognizer/networks/transformer/positional_encodings/absolute_embedding.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Absolute positional embedding."""
-import torch
-from torch import nn, Tensor
-
-
-class AbsolutePositionalEmbedding(nn.Module):
- def __init__(self, dim: int, max_seq_len: int) -> None:
- super().__init__()
- self.emb = nn.Embedding(max_seq_len, dim)
- self._weight_init()
-
- def _weight_init(self) -> None:
- nn.init.normal_(self.emb.weight, std=0.02)
-
- def forward(self, x: Tensor) -> Tensor:
- n = torch.arange(x.shape[1], device=x.device)
- return self.emb(n)[None, :, :]