summaryrefslogtreecommitdiff
path: root/training/experiments/embedding_experiment.yml
blob: 1e5f9411b10d4eb39379c51bef4ce6beb6465837 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
experiment_group: Embedding Experiments
experiments:
    - train_args:
        transformer_model: false
        batch_size: &batch_size 256
        max_epochs: &max_epochs 32
        input_shape: [[1, 28, 28]]
      dataset:
        type: EmnistDataset
        args:
          sample_to_balance: true
          subsample_fraction: null
          transform: null
          target_transform: null
          seed: 4711
        train_args:
          num_workers: 8
          train_fraction: 0.85
          batch_size: *batch_size
      model: CharacterModel
      metrics: []
      network:
        type: DenseNet
        args:
          growth_rate: 4
          block_config: [4, 4]
          in_channels: 1
          base_channels: 24
          num_classes: 128
          bn_size: 4
          dropout_rate: 0.1
          classifier: true
          activation: elu
      criterion:
        type: EmbeddingLoss
        args:
          margin: 0.2
          type_of_triplets: semihard
      optimizer:
        type: AdamW
        args:
          lr: 1.e-02
          betas: [0.9, 0.999]
          eps: 1.e-08
          weight_decay: 5.e-4
          amsgrad: false
      lr_scheduler:
        type: CosineAnnealingLR
        args:
          T_max: *max_epochs
      callbacks: [Checkpoint, ProgressBar, WandbCallback]
      callback_args:
        Checkpoint:
          monitor: val_loss
          mode: min
        ProgressBar:
          epochs: *max_epochs
        WandbCallback:
          log_batch_frequency: 10
      verbosity: 1 # 0, 1, 2
      resume_experiment: null
      train: true
      test: true
      test_metric: mean_average_precision_at_r