1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
|
"""Base PyTorch Lightning model."""
from typing import Any, Dict, Optional, Tuple, Type
import hydra
import torch
from loguru import logger as log
from omegaconf import DictConfig
from pytorch_lightning import LightningModule
from torch import nn, Tensor
from torchmetrics import Accuracy
from text_recognizer.data.mappings import EmnistMapping
class LitBase(LightningModule):
"""Abstract PyTorch Lightning class."""
def __init__(
self,
network: Type[nn.Module],
loss_fn: Type[nn.Module],
optimizer_config: DictConfig,
lr_scheduler_config: Optional[DictConfig],
mapping: EmnistMapping,
ignore_index: Optional[int] = None,
) -> None:
super().__init__()
self.network = network
self.loss_fn = loss_fn
self.optimizer_config = optimizer_config
self.lr_scheduler_config = lr_scheduler_config
self.mapping = mapping
# Placeholders
self.train_acc = Accuracy(mdmc_reduce="samplewise", ignore_index=ignore_index)
self.val_acc = Accuracy(mdmc_reduce="samplewise", ignore_index=ignore_index)
self.test_acc = Accuracy(mdmc_reduce="samplewise", ignore_index=ignore_index)
def optimizer_zero_grad(
self,
epoch: int,
batch_idx: int,
optimizer: Type[torch.optim.Optimizer],
optimizer_idx: int,
) -> None:
"""Optimal way to set grads to zero."""
optimizer.zero_grad(set_to_none=True)
def _configure_optimizer(self) -> Type[torch.optim.Optimizer]:
"""Configures the optimizer."""
log.info(f"Instantiating optimizer <{self.optimizer_config._target_}>")
return hydra.utils.instantiate(
self.optimizer_config, params=self.network.parameters()
)
def _configure_lr_schedulers(
self, optimizer: Type[torch.optim.Optimizer]
) -> Dict[str, Any]:
"""Configures the lr scheduler."""
log.info(
f"Instantiating learning rate scheduler <{self.lr_scheduler_config._target_}>"
)
monitor = self.lr_scheduler_config.monitor
interval = self.lr_scheduler_config.interval
del self.lr_scheduler_config.monitor
del self.lr_scheduler_config.interval
return {
"monitor": monitor,
"interval": interval,
"scheduler": hydra.utils.instantiate(
self.lr_scheduler_config, optimizer=optimizer
),
}
def configure_optimizers(
self,
) -> Dict[str, Any]:
"""Configures optimizer and lr scheduler."""
optimizer = self._configure_optimizer()
scheduler = self._configure_lr_schedulers(optimizer)
return {"optimizer": optimizer, "lr_scheduler": scheduler}
def forward(self, data: Tensor) -> Tensor:
"""Feedforward pass."""
return self.network(data)
def training_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> Tensor:
"""Training step."""
pass
def validation_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> None:
"""Validation step."""
pass
def test_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> None:
"""Test step."""
pass
|