summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/attention.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-13 23:02:20 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-13 23:02:20 +0200
commit8c7768e8d321efec558e12bff9b89b2de615d541 (patch)
tree67f5928c5584e8826c01834d06d34cd7e60546ba /text_recognizer/networks/transformer/attention.py
parentc9c60678673e19ad3367339eb8e7a093e5a98474 (diff)
Decoder module working
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r--text_recognizer/networks/transformer/attention.py1
1 files changed, 1 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py
index eabeadf..a3b53f0 100644
--- a/text_recognizer/networks/transformer/attention.py
+++ b/text_recognizer/networks/transformer/attention.py
@@ -23,6 +23,7 @@ class Attention(nn.Module):
dropout_rate: float = 0.0,
causal: bool = False,
) -> None:
+ super().__init__()
self.scale = dim ** -0.5
self.num_heads = num_heads
self.causal = causal