summaryrefslogtreecommitdiff
path: root/text_recognizer
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer')
-rw-r--r--text_recognizer/models/base.py12
-rw-r--r--text_recognizer/models/vqvae.py3
2 files changed, 11 insertions, 4 deletions
diff --git a/text_recognizer/models/base.py b/text_recognizer/models/base.py
index 57c5964..ab3fa35 100644
--- a/text_recognizer/models/base.py
+++ b/text_recognizer/models/base.py
@@ -26,8 +26,6 @@ class BaseLitModel(LightningModule):
loss_fn: Type[nn.Module] = attr.ib()
optimizer_config: DictConfig = attr.ib()
lr_scheduler_config: DictConfig = attr.ib()
- interval: str = attr.ib()
- monitor: str = attr.ib(default="val/loss")
train_acc: torchmetrics.Accuracy = attr.ib(
init=False, default=torchmetrics.Accuracy()
)
@@ -58,12 +56,18 @@ class BaseLitModel(LightningModule):
self, optimizer: Type[torch.optim.Optimizer]
) -> Dict[str, Any]:
"""Configures the lr scheduler."""
+ # Extract non-class arguments.
+ monitor = self.lr_scheduler_config.monitor
+ interval = self.lr_scheduler_config.interval
+ del self.lr_scheduler_config.monitor
+ del self.lr_scheduler_config.interval
+
log.info(
f"Instantiating learning rate scheduler <{self.lr_scheduler_config._target_}>"
)
scheduler = {
- "monitor": self.monitor,
- "interval": self.interval,
+ "monitor": monitor,
+ "interval": interval,
"scheduler": hydra.utils.instantiate(
self.lr_scheduler_config, optimizer=optimizer
),
diff --git a/text_recognizer/models/vqvae.py b/text_recognizer/models/vqvae.py
index 7f79b78..76b7ba6 100644
--- a/text_recognizer/models/vqvae.py
+++ b/text_recognizer/models/vqvae.py
@@ -23,6 +23,7 @@ class VQVAELitModel(BaseLitModel):
reconstructions, vq_loss = self(data)
loss = self.loss_fn(reconstructions, data)
loss = loss + self.latent_loss_weight * vq_loss
+ self.log("train/vq_loss", vq_loss)
self.log("train/loss", loss)
return loss
@@ -32,6 +33,7 @@ class VQVAELitModel(BaseLitModel):
reconstructions, vq_loss = self(data)
loss = self.loss_fn(reconstructions, data)
loss = loss + self.latent_loss_weight * vq_loss
+ self.log("val/vq_loss", vq_loss)
self.log("val/loss", loss, prog_bar=True)
def test_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> None:
@@ -40,4 +42,5 @@ class VQVAELitModel(BaseLitModel):
reconstructions, vq_loss = self(data)
loss = self.loss_fn(reconstructions, data)
loss = loss + self.latent_loss_weight * vq_loss
+ self.log("test/vq_loss", vq_loss)
self.log("test/loss", loss)