From 913cf87dec92ed474a3c211ec8b305665513186a Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Wed, 3 Nov 2021 22:13:08 +0100 Subject: Fix output from attn modules --- text_recognizer/networks/transformer/layers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'text_recognizer/networks/transformer/layers.py') diff --git a/text_recognizer/networks/transformer/layers.py b/text_recognizer/networks/transformer/layers.py index 941c141..f740244 100644 --- a/text_recognizer/networks/transformer/layers.py +++ b/text_recognizer/networks/transformer/layers.py @@ -91,9 +91,9 @@ class AttentionLayers(nn.Module): x = norm(x) if layer_type == "a": - out, _ = block(x=x, mask=mask) + out = block(x=x, mask=mask) elif layer_type == "c": - out, _ = block(x, context=context, mask=mask, context_mask=context_mask) + out = block(x, context=context, mask=mask, context_mask=context_mask) elif layer_type == "f": out = block(x) -- cgit v1.2.3-70-g09d2