summaryrefslogtreecommitdiff
path: root/training/conf/experiment/conv_transformer_lines.yaml
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-19 20:58:36 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-19 20:58:36 +0200
commit8689e8da65ed02a13975956d70216d7f33db1688 (patch)
treeb5ce932f0db01ec3fe1bc0b634992a6c11e05c61 /training/conf/experiment/conv_transformer_lines.yaml
parent2a6f02f22a58d281519d3827ea956af5c02a8ea2 (diff)
Fix lr and optim conf
Diffstat (limited to 'training/conf/experiment/conv_transformer_lines.yaml')
-rw-r--r--training/conf/experiment/conv_transformer_lines.yaml8
1 files changed, 4 insertions, 4 deletions
diff --git a/training/conf/experiment/conv_transformer_lines.yaml b/training/conf/experiment/conv_transformer_lines.yaml
index 48df78d..eb9bc9e 100644
--- a/training/conf/experiment/conv_transformer_lines.yaml
+++ b/training/conf/experiment/conv_transformer_lines.yaml
@@ -6,8 +6,8 @@ defaults:
- override /datamodule: iam_lines
- override /network: conv_transformer
- override /model: lit_transformer
- - override /lr_schedulers: null
- - override /optimizers: null
+ - override /lr_scheduler: null
+ - override /optimizer: null
epochs: &epochs 512
ignore_index: &ignore_index 3
@@ -28,7 +28,7 @@ callbacks:
annealing_strategy: cos
device: null
-optimizers:
+optimizer:
_target_: torch.optim.RAdam
lr: 3.0e-4
betas: [0.9, 0.999]
@@ -36,7 +36,7 @@ optimizers:
eps: 1.0e-8
parameters: network
-lr_schedulers:
+lr_scheduler:
_target_: torch.optim.lr_scheduler.OneCycleLR
max_lr: 3.0e-4
total_steps: null