summaryrefslogtreecommitdiff
path: root/text_recognizer/models
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer/models')
-rw-r--r--text_recognizer/models/vq_transformer.py9
1 files changed, 5 insertions, 4 deletions
diff --git a/text_recognizer/models/vq_transformer.py b/text_recognizer/models/vq_transformer.py
index 71ca2ef..a0d3892 100644
--- a/text_recognizer/models/vq_transformer.py
+++ b/text_recognizer/models/vq_transformer.py
@@ -1,11 +1,10 @@
"""PyTorch Lightning model for base Transformers."""
-from typing import Tuple, Type, Set
+from typing import Tuple
import attr
import torch
from torch import Tensor
-from text_recognizer.models.metrics import CharacterErrorRate
from text_recognizer.models.transformer import TransformerLitModel
@@ -13,6 +12,8 @@ from text_recognizer.models.transformer import TransformerLitModel
class VqTransformerLitModel(TransformerLitModel):
"""A PyTorch Lightning model for transformer networks."""
+ alpha: float = attr.ib(default=1.0)
+
def forward(self, data: Tensor) -> Tensor:
"""Forward pass with the transformer network."""
return self.predict(data)
@@ -21,7 +22,7 @@ class VqTransformerLitModel(TransformerLitModel):
"""Training step."""
data, targets = batch
logits, commitment_loss = self.network(data, targets[:-1])
- loss = self.loss_fn(logits, targets[1:]) + commitment_loss
+ loss = self.loss_fn(logits, targets[1:]) + self.alpha * commitment_loss
self.log("train/loss", loss)
self.log("train/commitment_loss", commitment_loss)
return loss
@@ -32,7 +33,7 @@ class VqTransformerLitModel(TransformerLitModel):
# Compute the loss.
logits, commitment_loss = self.network(data, targets[:-1])
- loss = self.loss_fn(logits, targets[1:]) + commitment_loss
+ loss = self.loss_fn(logits, targets[1:]) + self.alpha * commitment_loss
self.log("val/loss", loss, prog_bar=True)
self.log("val/commitment_loss", commitment_loss)