From 8689e8da65ed02a13975956d70216d7f33db1688 Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sun, 19 Jun 2022 20:58:36 +0200 Subject: Fix lr and optim conf --- training/conf/experiment/conv_transformer_paragraphs.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) (limited to 'training/conf/experiment/conv_transformer_paragraphs.yaml') diff --git a/training/conf/experiment/conv_transformer_paragraphs.yaml b/training/conf/experiment/conv_transformer_paragraphs.yaml index d0d0314..7c6e231 100644 --- a/training/conf/experiment/conv_transformer_paragraphs.yaml +++ b/training/conf/experiment/conv_transformer_paragraphs.yaml @@ -6,8 +6,8 @@ defaults: - override /datamodule: iam_extended_paragraphs - override /network: conv_transformer - override /model: lit_transformer - - override /lr_schedulers: null - - override /optimizers: null + - override /lr_scheduler: null + - override /optimizer: null epochs: &epochs 600 num_classes: &num_classes 58 @@ -28,7 +28,7 @@ callbacks: annealing_strategy: cos device: null -optimizers: +optimizer: _target_: torch.optim.RAdam lr: 3.0e-4 betas: [0.9, 0.999] @@ -36,7 +36,7 @@ optimizers: eps: 1.0e-8 parameters: network -lr_schedulers: +lr_scheduler: _target_: torch.optim.lr_scheduler.OneCycleLR max_lr: 3.0e-4 total_steps: null -- cgit v1.2.3-70-g09d2