summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/layers.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-06-06 23:19:35 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-06-06 23:19:35 +0200
commit01d6e5fc066969283df99c759609df441151e9c5 (patch)
treeecd1459e142356d0c7f50a61307b760aca813248 /text_recognizer/networks/transformer/layers.py
parentf4688482b4898c0b342d6ae59839dc27fbf856c6 (diff)
Working on fixing decoder transformer
Diffstat (limited to 'text_recognizer/networks/transformer/layers.py')
-rw-r--r--text_recognizer/networks/transformer/layers.py5
1 files changed, 2 insertions, 3 deletions
diff --git a/text_recognizer/networks/transformer/layers.py b/text_recognizer/networks/transformer/layers.py
index b2c703f..a44a525 100644
--- a/text_recognizer/networks/transformer/layers.py
+++ b/text_recognizer/networks/transformer/layers.py
@@ -1,8 +1,6 @@
"""Generates the attention layer architecture."""
from functools import partial
-from typing import Any, Dict, Optional, Type
-
-from click.types import Tuple
+from typing import Any, Dict, Optional, Tuple, Type
from torch import nn, Tensor
@@ -30,6 +28,7 @@ class AttentionLayers(nn.Module):
pre_norm: bool = True,
) -> None:
super().__init__()
+ self.dim = dim
attn_fn = partial(attn_fn, dim=dim, num_heads=num_heads, **attn_kwargs)
norm_fn = partial(norm_fn, dim)
ff_fn = partial(ff_fn, dim=dim, **ff_kwargs)