summaryrefslogtreecommitdiff
path: root/training/conf/network/decoder/transformer_decoder.yaml
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-07-30 23:15:03 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-07-30 23:15:03 +0200
commit7268035fb9e57342612a8cc50a1fe04e8841ca2f (patch)
tree8d4cf3743975bd25f2c04d6a56ff3d4608a7e8d9 /training/conf/network/decoder/transformer_decoder.yaml
parent92fc1c7ed2f9f64552be8f71d9b8ab0d5a0a88d4 (diff)
attr bug fix, properly loading network
Diffstat (limited to 'training/conf/network/decoder/transformer_decoder.yaml')
-rw-r--r--training/conf/network/decoder/transformer_decoder.yaml7
1 files changed, 3 insertions, 4 deletions
diff --git a/training/conf/network/decoder/transformer_decoder.yaml b/training/conf/network/decoder/transformer_decoder.yaml
index 60c5762..3122de1 100644
--- a/training/conf/network/decoder/transformer_decoder.yaml
+++ b/training/conf/network/decoder/transformer_decoder.yaml
@@ -1,21 +1,20 @@
+defaults:
+ - rotary_emb: null
+
_target_: text_recognizer.networks.transformer.Decoder
dim: 256
depth: 2
num_heads: 8
attn_fn: text_recognizer.networks.transformer.attention.Attention
attn_kwargs:
- num_heads: 8
dim_head: 64
dropout_rate: 0.2
norm_fn: torch.nn.LayerNorm
ff_fn: text_recognizer.networks.transformer.mlp.FeedForward
ff_kwargs:
- dim: 256
dim_out: null
expansion_factor: 4
glu: true
dropout_rate: 0.2
-rotary_emb: null
-rotary_emb_dim: null
cross_attend: true
pre_norm: true