summaryrefslogtreecommitdiff
path: root/training/conf/experiment/conv_transformer_paragraphs_wp.yaml
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:16:39 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-10-27 22:16:39 +0200
commit3e24b92ee1bac124ea8c7bddb15236ccc5fe300d (patch)
tree81803764977345b9264b166926024559908cb066 /training/conf/experiment/conv_transformer_paragraphs_wp.yaml
parent4a6550ddef7d1f1971737bc22715db6381441f79 (diff)
Update to configs
Diffstat (limited to 'training/conf/experiment/conv_transformer_paragraphs_wp.yaml')
-rw-r--r--training/conf/experiment/conv_transformer_paragraphs_wp.yaml6
1 files changed, 3 insertions, 3 deletions
diff --git a/training/conf/experiment/conv_transformer_paragraphs_wp.yaml b/training/conf/experiment/conv_transformer_paragraphs_wp.yaml
index ebaa17a..91fba9a 100644
--- a/training/conf/experiment/conv_transformer_paragraphs_wp.yaml
+++ b/training/conf/experiment/conv_transformer_paragraphs_wp.yaml
@@ -103,14 +103,14 @@ network:
attn_fn: text_recognizer.networks.transformer.attention.Attention
attn_kwargs:
dim_head: 32
- dropout_rate: 0.2
+ dropout_rate: 0.05
norm_fn: text_recognizer.networks.transformer.norm.ScaleNorm
ff_fn: text_recognizer.networks.transformer.mlp.FeedForward
ff_kwargs:
dim_out: null
expansion_factor: 4
glu: true
- dropout_rate: 0.2
+ dropout_rate: 0.05
cross_attend: true
pre_norm: true
rotary_emb:
@@ -124,7 +124,7 @@ network:
token_pos_embedding:
_target_: text_recognizer.networks.transformer.positional_encodings.PositionalEncoding
hidden_dim: *hidden_dim
- dropout_rate: 0.2
+ dropout_rate: 0.05
max_len: *max_output_len
model: