summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/attention.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-11-21 21:35:50 +0100
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-11-21 21:35:50 +0100
commitfe43389ad26559cc09f2fd00441c02556ff674c3 (patch)
tree0ce6a70c0dc2ea54bac7e19b539af5aec4ebab19 /text_recognizer/networks/transformer/attention.py
parenta9363f3944f1ad31590c48d5d51c45df3bbf43b1 (diff)
Format files
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r--text_recognizer/networks/transformer/attention.py2
1 files changed, 0 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py
index b86636e..87792a9 100644
--- a/text_recognizer/networks/transformer/attention.py
+++ b/text_recognizer/networks/transformer/attention.py
@@ -20,7 +20,6 @@ class Attention(nn.Module):
"""Standard attention."""
def __attrs_pre_init__(self) -> None:
- """Pre init constructor."""
super().__init__()
dim: int = attr.ib()
@@ -34,7 +33,6 @@ class Attention(nn.Module):
fc: nn.Linear = attr.ib(init=False)
def __attrs_post_init__(self) -> None:
- """Post init configuration."""
self.scale = self.dim ** -0.5
inner_dim = self.num_heads * self.dim_head