summaryrefslogtreecommitdiff
path: root/training/conf/experiment
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-08-08 19:59:55 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-08-08 19:59:55 +0200
commit240f5e9f20032e82515fa66ce784619527d1041e (patch)
treeb002d28bbfc9abe9b6af090f7db60bea0aeed6e8 /training/conf/experiment
parentd12f70402371dda586d457af2a3df7fb5b3130ad (diff)
Add VQGAN and loss function
Diffstat (limited to 'training/conf/experiment')
-rw-r--r--training/conf/experiment/vqgan.yaml55
-rw-r--r--training/conf/experiment/vqvae.yaml11
-rw-r--r--training/conf/experiment/vqvae_pixelcnn.yaml24
3 files changed, 85 insertions, 5 deletions
diff --git a/training/conf/experiment/vqgan.yaml b/training/conf/experiment/vqgan.yaml
new file mode 100644
index 0000000..3d97892
--- /dev/null
+++ b/training/conf/experiment/vqgan.yaml
@@ -0,0 +1,55 @@
+# @package _global_
+
+defaults:
+ - override /network: vqvae
+ - override /criterion: vqgan_loss
+ - override /model: lit_vqgan
+ - override /callbacks: wandb_vae
+ - override /lr_schedulers: null
+
+datamodule:
+ batch_size: 8
+
+lr_schedulers:
+ - generator:
+ T_max: 256
+ eta_min: 0.0
+ last_epoch: -1
+
+ interval: epoch
+ monitor: val/loss
+
+ - discriminator:
+ T_max: 256
+ eta_min: 0.0
+ last_epoch: -1
+
+ interval: epoch
+ monitor: val/loss
+
+optimizer:
+ - generator:
+ _target_: torch.optim.lr_scheduler.CosineAnnealingLR
+ T_max: 256
+ eta_min: 0.0
+ last_epoch: -1
+
+ interval: epoch
+ monitor: val/loss
+ parameters: network
+
+ - discriminator:
+ _target_: torch.optim.lr_scheduler.CosineAnnealingLR
+ T_max: 256
+ eta_min: 0.0
+ last_epoch: -1
+
+ interval: epoch
+ monitor: val/loss
+ parameters: loss_fn
+
+trainer:
+ max_epochs: 256
+ # gradient_clip_val: 0.25
+
+summary: null
diff --git a/training/conf/experiment/vqvae.yaml b/training/conf/experiment/vqvae.yaml
index 7a9e643..397a039 100644
--- a/training/conf/experiment/vqvae.yaml
+++ b/training/conf/experiment/vqvae.yaml
@@ -2,17 +2,18 @@
defaults:
- override /network: vqvae
- - override /criterion: mse
+ - override /criterion: mae
- override /model: lit_vqvae
- override /callbacks: wandb_vae
- - override /lr_scheduler: cosine_annealing
+ - override /lr_schedulers:
+ - cosine_annealing
trainer:
- max_epochs: 64
+ max_epochs: 256
# gradient_clip_val: 0.25
datamodule:
- batch_size: 16
+ batch_size: 8
# lr_scheduler:
# epochs: 64
@@ -21,4 +22,4 @@ datamodule:
# optimizer:
# lr: 1.0e-3
-summary: [1, 576, 640]
+summary: null
diff --git a/training/conf/experiment/vqvae_pixelcnn.yaml b/training/conf/experiment/vqvae_pixelcnn.yaml
new file mode 100644
index 0000000..4fae782
--- /dev/null
+++ b/training/conf/experiment/vqvae_pixelcnn.yaml
@@ -0,0 +1,24 @@
+# @package _global_
+
+defaults:
+ - override /network: vqvae_pixelcnn
+ - override /criterion: mae
+ - override /model: lit_vqvae
+ - override /callbacks: wandb_vae
+ - override /lr_schedulers:
+ - cosine_annealing
+
+trainer:
+ max_epochs: 256
+ # gradient_clip_val: 0.25
+
+datamodule:
+ batch_size: 8
+
+# lr_scheduler:
+ # epochs: 64
+ # steps_per_epoch: 1245
+
+# optimizer:
+ # lr: 1.0e-3
+