diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-11-03 22:13:08 +0100 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-11-03 22:13:08 +0100 |
commit | 913cf87dec92ed474a3c211ec8b305665513186a (patch) | |
tree | c934e528e884e7b0eea980268cd19d9f0e550439 | |
parent | 5e9a7a611284c37b7382f271d989d1ef70546d10 (diff) |
Fix output from attn modules
-rw-r--r-- | text_recognizer/networks/transformer/layers.py | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/text_recognizer/networks/transformer/layers.py b/text_recognizer/networks/transformer/layers.py index 941c141..f740244 100644 --- a/text_recognizer/networks/transformer/layers.py +++ b/text_recognizer/networks/transformer/layers.py @@ -91,9 +91,9 @@ class AttentionLayers(nn.Module): x = norm(x) if layer_type == "a": - out, _ = block(x=x, mask=mask) + out = block(x=x, mask=mask) elif layer_type == "c": - out, _ = block(x, context=context, mask=mask, context_mask=context_mask) + out = block(x, context=context, mask=mask, context_mask=context_mask) elif layer_type == "f": out = block(x) |