diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-06-06 23:19:35 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-06-06 23:19:35 +0200 |
commit | 01d6e5fc066969283df99c759609df441151e9c5 (patch) | |
tree | ecd1459e142356d0c7f50a61307b760aca813248 /text_recognizer/networks/transformer/transformer.py | |
parent | f4688482b4898c0b342d6ae59839dc27fbf856c6 (diff) |
Working on fixing decoder transformer
Diffstat (limited to 'text_recognizer/networks/transformer/transformer.py')
-rw-r--r-- | text_recognizer/networks/transformer/transformer.py | 7 |
1 files changed, 4 insertions, 3 deletions
diff --git a/text_recognizer/networks/transformer/transformer.py b/text_recognizer/networks/transformer/transformer.py index 60ab1ce..31088b4 100644 --- a/text_recognizer/networks/transformer/transformer.py +++ b/text_recognizer/networks/transformer/transformer.py @@ -19,7 +19,9 @@ class Transformer(nn.Module): emb_dropout: float = 0.0, use_pos_emb: bool = True, ) -> None: + super().__init__() dim = attn_layers.dim + self.attn_layers = attn_layers emb_dim = emb_dim if emb_dim is not None else dim self.max_seq_len = max_seq_len @@ -32,7 +34,6 @@ class Transformer(nn.Module): ) self.project_emb = nn.Linear(emb_dim, dim) if emb_dim != dim else nn.Identity() - self.attn_layers = attn_layers self.norm = nn.LayerNorm(dim) self._init_weights() @@ -45,12 +46,12 @@ class Transformer(nn.Module): def forward( self, x: Tensor, - mask: Optional[Tensor], + mask: Optional[Tensor] = None, return_embeddings: bool = False, **kwargs: Any ) -> Tensor: b, n, device = *x.shape, x.device - x += self.token_emb(x) + x = self.token_emb(x) if self.pos_emb is not None: x += self.pos_emb(x) x = self.emb_dropout(x) |