summaryrefslogtreecommitdiff
path: root/training/conf/experiment/vqvae.yaml
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-09-18 17:43:23 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-09-18 17:43:23 +0200
commit9ef2857c2d24d9c0a8fba3c5db58c7303124c79b (patch)
treedc7eb4a179b8cd706e39b650dd3d215bb667db85 /training/conf/experiment/vqvae.yaml
parent0b8924f37fbab57a3d6f59421e9cd16421c9af4b (diff)
Update experiment configs
Diffstat (limited to 'training/conf/experiment/vqvae.yaml')
-rw-r--r--training/conf/experiment/vqvae.yaml38
1 files changed, 32 insertions, 6 deletions
diff --git a/training/conf/experiment/vqvae.yaml b/training/conf/experiment/vqvae.yaml
index d3db471..d9fa2c4 100644
--- a/training/conf/experiment/vqvae.yaml
+++ b/training/conf/experiment/vqvae.yaml
@@ -2,26 +2,52 @@
defaults:
- override /network: vqvae
- - override /criterion: mae
+ - override /criterion: mse
- override /model: lit_vqvae
- override /callbacks: wandb_vae
- - override /lr_schedulers:
- - cosine_annealing
+ - override /optimizers: null
+ # - override /lr_schedulers:
+ # - cosine_annealing
+
+# lr_schedulers: null
+# network:
+# _target_: torch.optim.lr_scheduler.OneCycleLR
+# max_lr: 1.0e-2
+# total_steps: null
+# epochs: 100
+# steps_per_epoch: 200
+# pct_start: 0.1
+# anneal_strategy: cos
+# cycle_momentum: true
+# base_momentum: 0.85
+# max_momentum: 0.95
+# div_factor: 25
+# final_div_factor: 1.0e4
+# three_phase: true
+# last_epoch: -1
+# verbose: false
+
+# # Non-class arguments
+# interval: step
+# monitor: val/loss
optimizers:
network:
_target_: madgrad.MADGRAD
- lr: 3.0e-4
+ lr: 1.0e-4
momentum: 0.9
weight_decay: 0
- eps: 1.0e-6
+ eps: 1.0e-7
parameters: network
trainer:
- max_epochs: 256
+ max_epochs: 128
+ limit_train_batches: 0.01
+ limit_val_batches: 0.1
datamodule:
batch_size: 8
+ # resize: [288, 320]
summary: null