diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-11-21 21:35:50 +0100 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-11-21 21:35:50 +0100 |
commit | fe43389ad26559cc09f2fd00441c02556ff674c3 (patch) | |
tree | 0ce6a70c0dc2ea54bac7e19b539af5aec4ebab19 /text_recognizer/networks/transformer/attention.py | |
parent | a9363f3944f1ad31590c48d5d51c45df3bbf43b1 (diff) |
Format files
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r-- | text_recognizer/networks/transformer/attention.py | 2 |
1 files changed, 0 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py index b86636e..87792a9 100644 --- a/text_recognizer/networks/transformer/attention.py +++ b/text_recognizer/networks/transformer/attention.py @@ -20,7 +20,6 @@ class Attention(nn.Module): """Standard attention.""" def __attrs_pre_init__(self) -> None: - """Pre init constructor.""" super().__init__() dim: int = attr.ib() @@ -34,7 +33,6 @@ class Attention(nn.Module): fc: nn.Linear = attr.ib(init=False) def __attrs_post_init__(self) -> None: - """Post init configuration.""" self.scale = self.dim ** -0.5 inner_dim = self.num_heads * self.dim_head |