blob: eb6f76561e2cca9368a0b5067150112ffa7ea30f (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
|
# @package _global_
defaults:
- override /criterion: cross_entropy
- override /callbacks: htr
- override /datamodule: iam_lines
- override /network: mammut_lines
- override /model: lit_mammut
- override /lr_scheduler: cosine_annealing
- override /optimizer: adan
tags: [lines, vit]
epochs: &epochs 320
ignore_index: &ignore_index 3
# summary: [[1, 1, 56, 1024], [1, 89]]
logger:
wandb:
tags: ${tags}
criterion:
ignore_index: *ignore_index
# label_smoothing: 0.05
decoder:
max_output_len: 89
# callbacks:
# stochastic_weight_averaging:
# _target_: pytorch_lightning.callbacks.StochasticWeightAveraging
# swa_epoch_start: 0.75
# swa_lrs: 1.0e-5
# annealing_epochs: 10
# annealing_strategy: cos
# device: null
lr_scheduler:
T_max: *epochs
datamodule:
batch_size: 16
train_fraction: 0.95
model:
max_output_len: 89
optimizer:
lr: 1.0e-3
trainer:
fast_dev_run: false
gradient_clip_val: 1.0
max_epochs: *epochs
accumulate_grad_batches: 1
limit_train_batches: 1.0
limit_val_batches: 1.0
limit_test_batches: 1.0
|