From 64b263995159994e2cd37c1f657dfd4c98f182f7 Mon Sep 17 00:00:00 2001
From: Gustaf Rydholm <gustaf.rydholm@gmail.com>
Date: Thu, 30 Sep 2021 23:04:46 +0200
Subject: Bug fix transformer dim, comment CER/WER metrics

---
 text_recognizer/models/transformer.py    | 18 +++++++++---------
 text_recognizer/models/vq_transformer.py | 22 +++++++++++-----------
 2 files changed, 20 insertions(+), 20 deletions(-)

(limited to 'text_recognizer/models')

diff --git a/text_recognizer/models/transformer.py b/text_recognizer/models/transformer.py
index 75f7523..50bf73d 100644
--- a/text_recognizer/models/transformer.py
+++ b/text_recognizer/models/transformer.py
@@ -52,16 +52,16 @@ class TransformerLitModel(BaseLitModel):
         data, targets = batch
 
         # Compute the loss.
-        logits = self.network(data, targets[:-1])
-        loss = self.loss_fn(logits, targets[1:])
+        logits = self.network(data, targets[:, :-1])
+        loss = self.loss_fn(logits, targets[:, 1:])
         self.log("val/loss", loss, prog_bar=True)
 
         # Get the token prediction.
-        pred = self(data)
-        self.val_cer(pred, targets)
-        self.log("val/cer", self.val_cer, on_step=False, on_epoch=True, prog_bar=True)
-        self.test_acc(pred, targets)
-        self.log("val/acc", self.test_acc, on_step=False, on_epoch=True)
+        # pred = self(data)
+        # self.val_cer(pred, targets)
+        # self.log("val/cer", self.val_cer, on_step=False, on_epoch=True, prog_bar=True)
+        # self.test_acc(pred, targets)
+        # self.log("val/acc", self.test_acc, on_step=False, on_epoch=True)
 
     def test_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> None:
         """Test step."""
@@ -98,8 +98,8 @@ class TransformerLitModel(BaseLitModel):
 
         for Sy in range(1, self.max_output_len):
             context = output[:, :Sy]  # (B, Sy)
-            logits = self.network.decode(z, context)  # (B, Sy, C)
-            tokens = torch.argmax(logits, dim=-1)  # (B, Sy)
+            logits = self.network.decode(z, context)  # (B, C, Sy)
+            tokens = torch.argmax(logits, dim=1)  # (B, Sy)
             output[:, Sy : Sy + 1] = tokens[:, -1:]
 
             # Early stopping of prediction loop if token is end or padding token.
diff --git a/text_recognizer/models/vq_transformer.py b/text_recognizer/models/vq_transformer.py
index a0d3892..339ce09 100644
--- a/text_recognizer/models/vq_transformer.py
+++ b/text_recognizer/models/vq_transformer.py
@@ -21,8 +21,8 @@ class VqTransformerLitModel(TransformerLitModel):
     def training_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> Tensor:
         """Training step."""
         data, targets = batch
-        logits, commitment_loss = self.network(data, targets[:-1])
-        loss = self.loss_fn(logits, targets[1:]) + self.alpha * commitment_loss
+        logits, commitment_loss = self.network(data, targets[:, :-1])
+        loss = self.loss_fn(logits, targets[:, 1:]) + self.alpha * commitment_loss
         self.log("train/loss", loss)
         self.log("train/commitment_loss", commitment_loss)
         return loss
@@ -32,17 +32,17 @@ class VqTransformerLitModel(TransformerLitModel):
         data, targets = batch
 
         # Compute the loss.
-        logits, commitment_loss = self.network(data, targets[:-1])
-        loss = self.loss_fn(logits, targets[1:]) + self.alpha * commitment_loss
+        logits, commitment_loss = self.network(data, targets[:, :-1])
+        loss = self.loss_fn(logits, targets[:, 1:]) + self.alpha * commitment_loss
         self.log("val/loss", loss, prog_bar=True)
         self.log("val/commitment_loss", commitment_loss)
 
         # Get the token prediction.
-        pred = self(data)
-        self.val_cer(pred, targets)
-        self.log("val/cer", self.val_cer, on_step=False, on_epoch=True, prog_bar=True)
-        self.test_acc(pred, targets)
-        self.log("val/acc", self.test_acc, on_step=False, on_epoch=True)
+        # pred = self(data)
+        # self.val_cer(pred, targets)
+        # self.log("val/cer", self.val_cer, on_step=False, on_epoch=True, prog_bar=True)
+        # self.test_acc(pred, targets)
+        # self.log("val/acc", self.test_acc, on_step=False, on_epoch=True)
 
     def test_step(self, batch: Tuple[Tensor, Tensor], batch_idx: int) -> None:
         """Test step."""
@@ -79,8 +79,8 @@ class VqTransformerLitModel(TransformerLitModel):
 
         for Sy in range(1, self.max_output_len):
             context = output[:, :Sy]  # (B, Sy)
-            logits = self.network.decode(z, context)  # (B, Sy, C)
-            tokens = torch.argmax(logits, dim=-1)  # (B, Sy)
+            logits = self.network.decode(z, context)  # (B, C, Sy)
+            tokens = torch.argmax(logits, dim=1)  # (B, Sy)
             output[:, Sy : Sy + 1] = tokens[:, -1:]
 
             # Early stopping of prediction loop if token is end or padding token.
-- 
cgit v1.2.3-70-g09d2