From 863c42ea67823ad616ac7485fc8a1e5018cb4233 Mon Sep 17 00:00:00 2001
From: Gustaf Rydholm <gustaf.rydholm@gmail.com>
Date: Thu, 28 Oct 2021 21:20:45 +0200
Subject: Remove absolute positional embedding

---
 .../networks/transformer/embeddings/absolute.py         | 17 -----------------
 1 file changed, 17 deletions(-)
 delete mode 100644 text_recognizer/networks/transformer/embeddings/absolute.py

diff --git a/text_recognizer/networks/transformer/embeddings/absolute.py b/text_recognizer/networks/transformer/embeddings/absolute.py
deleted file mode 100644
index 7140537..0000000
--- a/text_recognizer/networks/transformer/embeddings/absolute.py
+++ /dev/null
@@ -1,17 +0,0 @@
-"""Absolute positional embedding."""
-import torch
-from torch import nn, Tensor
-
-
-class AbsolutePositionalEmbedding(nn.Module):
-    def __init__(self, dim: int, max_seq_len: int) -> None:
-        super().__init__()
-        self.emb = nn.Embedding(max_seq_len, dim)
-        self._weight_init()
-
-    def _weight_init(self) -> None:
-        nn.init.normal_(self.emb.weight, std=0.02)
-
-    def forward(self, x: Tensor) -> Tensor:
-        n = torch.arange(x.shape[1], device=x.device)
-        return self.emb(n)[None, :, :]
-- 
cgit v1.2.3-70-g09d2