diff options
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r-- | text_recognizer/networks/transformer/attention.py | 2 |
1 files changed, 0 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py index b86636e..87792a9 100644 --- a/text_recognizer/networks/transformer/attention.py +++ b/text_recognizer/networks/transformer/attention.py @@ -20,7 +20,6 @@ class Attention(nn.Module): """Standard attention.""" def __attrs_pre_init__(self) -> None: - """Pre init constructor.""" super().__init__() dim: int = attr.ib() @@ -34,7 +33,6 @@ class Attention(nn.Module): fc: nn.Linear = attr.ib(init=False) def __attrs_post_init__(self) -> None: - """Post init configuration.""" self.scale = self.dim ** -0.5 inner_dim = self.num_heads * self.dim_head |