summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/embeddings
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2022-09-18 18:11:21 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2022-09-18 18:11:53 +0200
commit2cc6aa059139b57057609817913ad515063c2eab (patch)
tree5433f69a5eaf63e064a100bf900783127c7b1ff4 /text_recognizer/networks/transformer/embeddings
parent88caa5c466225d4752541c352c5777235f8f0c61 (diff)
Format imports
Format imports
Diffstat (limited to 'text_recognizer/networks/transformer/embeddings')
-rw-r--r--text_recognizer/networks/transformer/embeddings/absolute.py6
-rw-r--r--text_recognizer/networks/transformer/embeddings/axial.py5
-rw-r--r--text_recognizer/networks/transformer/embeddings/fourier.py3
-rw-r--r--text_recognizer/networks/transformer/embeddings/rotary.py3
4 files changed, 8 insertions, 9 deletions
diff --git a/text_recognizer/networks/transformer/embeddings/absolute.py b/text_recognizer/networks/transformer/embeddings/absolute.py
index e5cdc18..9274b55 100644
--- a/text_recognizer/networks/transformer/embeddings/absolute.py
+++ b/text_recognizer/networks/transformer/embeddings/absolute.py
@@ -1,9 +1,9 @@
"""Absolute positional embedding."""
-from einops import rearrange
import torch
-from torch import nn
import torch.nn.functional as F
+from einops import rearrange
+from torch import nn
def l2norm(t, groups=1):
@@ -15,7 +15,7 @@ def l2norm(t, groups=1):
class AbsolutePositionalEmbedding(nn.Module):
def __init__(self, dim, max_seq_len, l2norm_embed=False):
super().__init__()
- self.scale = dim ** -0.5 if not l2norm_embed else 1.0
+ self.scale = dim**-0.5 if not l2norm_embed else 1.0
self.max_seq_len = max_seq_len
self.l2norm_embed = l2norm_embed
self.emb = nn.Embedding(max_seq_len, dim)
diff --git a/text_recognizer/networks/transformer/embeddings/axial.py b/text_recognizer/networks/transformer/embeddings/axial.py
index 7b84e12..25d8f60 100644
--- a/text_recognizer/networks/transformer/embeddings/axial.py
+++ b/text_recognizer/networks/transformer/embeddings/axial.py
@@ -3,10 +3,11 @@
Stolen from:
https://github.com/lucidrains/axial-attention/blob/eff2c10c2e76c735a70a6b995b571213adffbbb7/axial_attention/axial_attention.py#L100
"""
+from functools import reduce
+from operator import mul
+
import torch
from torch import nn
-from operator import mul
-from functools import reduce
class AxialPositionalEmbedding(nn.Module):
diff --git a/text_recognizer/networks/transformer/embeddings/fourier.py b/text_recognizer/networks/transformer/embeddings/fourier.py
index 7843c60..28da7a1 100644
--- a/text_recognizer/networks/transformer/embeddings/fourier.py
+++ b/text_recognizer/networks/transformer/embeddings/fourier.py
@@ -1,8 +1,7 @@
"""Fourier positional embedding."""
import numpy as np
import torch
-from torch import nn
-from torch import Tensor
+from torch import Tensor, nn
class PositionalEncoding(nn.Module):
diff --git a/text_recognizer/networks/transformer/embeddings/rotary.py b/text_recognizer/networks/transformer/embeddings/rotary.py
index 722478e..cc91206 100644
--- a/text_recognizer/networks/transformer/embeddings/rotary.py
+++ b/text_recognizer/networks/transformer/embeddings/rotary.py
@@ -7,8 +7,7 @@ Explanation of roatary:
https://blog.eleuther.ai/rotary-embeddings/
"""
import torch
-from torch import nn
-from torch import Tensor
+from torch import Tensor, nn
class RotaryEmbedding(nn.Module):