summaryrefslogtreecommitdiff
path: root/training/conf
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-19 20:58:36 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2022-06-19 20:58:36 +0200
commit8689e8da65ed02a13975956d70216d7f33db1688 (patch)
treeb5ce932f0db01ec3fe1bc0b634992a6c11e05c61 /training/conf
parent2a6f02f22a58d281519d3827ea956af5c02a8ea2 (diff)
Fix lr and optim conf
Diffstat (limited to 'training/conf')
-rw-r--r--training/conf/config.yaml4
-rw-r--r--training/conf/experiment/conv_transformer_lines.yaml8
-rw-r--r--training/conf/experiment/conv_transformer_paragraphs.yaml8
-rw-r--r--training/conf/lr_scheduler/cosine_annealing.yaml (renamed from training/conf/lr_schedulers/cosine_annealing.yaml)0
-rw-r--r--training/conf/lr_scheduler/one_cycle.yaml (renamed from training/conf/lr_schedulers/one_cycle.yaml)0
-rw-r--r--training/conf/lr_scheduler/reduce_on_plateau.yaml (renamed from training/conf/lr_schedulers/reduce_on_plateau.yaml)0
-rw-r--r--training/conf/optimizer/radam.yaml (renamed from training/conf/optimizers/radam.yaml)0
7 files changed, 10 insertions, 10 deletions
diff --git a/training/conf/config.yaml b/training/conf/config.yaml
index fdeb5d2..46a1e43 100644
--- a/training/conf/config.yaml
+++ b/training/conf/config.yaml
@@ -7,10 +7,10 @@ defaults:
- datamodule: iam_extended_paragraphs
- hydra: default
- logger: wandb
- - lr_schedulers: cosine_annealing
+ - lr_scheduler: cosine_annealing
- model: lit_transformer
- network: conv_transformer
- - optimizers: radam
+ - optimizer: radam
- trainer: default
seed: 4711
diff --git a/training/conf/experiment/conv_transformer_lines.yaml b/training/conf/experiment/conv_transformer_lines.yaml
index 48df78d..eb9bc9e 100644
--- a/training/conf/experiment/conv_transformer_lines.yaml
+++ b/training/conf/experiment/conv_transformer_lines.yaml
@@ -6,8 +6,8 @@ defaults:
- override /datamodule: iam_lines
- override /network: conv_transformer
- override /model: lit_transformer
- - override /lr_schedulers: null
- - override /optimizers: null
+ - override /lr_scheduler: null
+ - override /optimizer: null
epochs: &epochs 512
ignore_index: &ignore_index 3
@@ -28,7 +28,7 @@ callbacks:
annealing_strategy: cos
device: null
-optimizers:
+optimizer:
_target_: torch.optim.RAdam
lr: 3.0e-4
betas: [0.9, 0.999]
@@ -36,7 +36,7 @@ optimizers:
eps: 1.0e-8
parameters: network
-lr_schedulers:
+lr_scheduler:
_target_: torch.optim.lr_scheduler.OneCycleLR
max_lr: 3.0e-4
total_steps: null
diff --git a/training/conf/experiment/conv_transformer_paragraphs.yaml b/training/conf/experiment/conv_transformer_paragraphs.yaml
index d0d0314..7c6e231 100644
--- a/training/conf/experiment/conv_transformer_paragraphs.yaml
+++ b/training/conf/experiment/conv_transformer_paragraphs.yaml
@@ -6,8 +6,8 @@ defaults:
- override /datamodule: iam_extended_paragraphs
- override /network: conv_transformer
- override /model: lit_transformer
- - override /lr_schedulers: null
- - override /optimizers: null
+ - override /lr_scheduler: null
+ - override /optimizer: null
epochs: &epochs 600
num_classes: &num_classes 58
@@ -28,7 +28,7 @@ callbacks:
annealing_strategy: cos
device: null
-optimizers:
+optimizer:
_target_: torch.optim.RAdam
lr: 3.0e-4
betas: [0.9, 0.999]
@@ -36,7 +36,7 @@ optimizers:
eps: 1.0e-8
parameters: network
-lr_schedulers:
+lr_scheduler:
_target_: torch.optim.lr_scheduler.OneCycleLR
max_lr: 3.0e-4
total_steps: null
diff --git a/training/conf/lr_schedulers/cosine_annealing.yaml b/training/conf/lr_scheduler/cosine_annealing.yaml
index e8364f0..e8364f0 100644
--- a/training/conf/lr_schedulers/cosine_annealing.yaml
+++ b/training/conf/lr_scheduler/cosine_annealing.yaml
diff --git a/training/conf/lr_schedulers/one_cycle.yaml b/training/conf/lr_scheduler/one_cycle.yaml
index 20eab9f..20eab9f 100644
--- a/training/conf/lr_schedulers/one_cycle.yaml
+++ b/training/conf/lr_scheduler/one_cycle.yaml
diff --git a/training/conf/lr_schedulers/reduce_on_plateau.yaml b/training/conf/lr_scheduler/reduce_on_plateau.yaml
index 9aa9de4..9aa9de4 100644
--- a/training/conf/lr_schedulers/reduce_on_plateau.yaml
+++ b/training/conf/lr_scheduler/reduce_on_plateau.yaml
diff --git a/training/conf/optimizers/radam.yaml b/training/conf/optimizer/radam.yaml
index d11fcb5..d11fcb5 100644
--- a/training/conf/optimizers/radam.yaml
+++ b/training/conf/optimizer/radam.yaml