summaryrefslogtreecommitdiff
path: root/text_recognizer/models
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-04-05 23:24:20 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-04-05 23:24:20 +0200
commitdedf8deb025ac9efdad5e9baf9165ef63d6829ff (patch)
tree56b10fcaef479d8abe9b0e6c05e07ad5e02b9ab0 /text_recognizer/models
parent532286b516b17d279c321358bf03dddc8adc8029 (diff)
Pre-commit fixes, optimizer loading fix
Diffstat (limited to 'text_recognizer/models')
-rw-r--r--text_recognizer/models/base.py6
-rw-r--r--text_recognizer/models/transformer.py6
2 files changed, 5 insertions, 7 deletions
diff --git a/text_recognizer/models/base.py b/text_recognizer/models/base.py
index 1004f48..11d1eb1 100644
--- a/text_recognizer/models/base.py
+++ b/text_recognizer/models/base.py
@@ -15,7 +15,7 @@ class LitBaseModel(pl.LightningModule):
def __init__(
self,
- network: Type[nn,Module],
+ network: Type[nn.Module],
optimizer: Union[OmegaConf, Dict],
lr_scheduler: Union[OmegaConf, Dict],
criterion: Union[OmegaConf, Dict],
@@ -40,14 +40,14 @@ class LitBaseModel(pl.LightningModule):
args = {} or criterion.args
return getattr(nn, criterion.type)(**args)
- def _configure_optimizer(self) -> type:
+ def _configure_optimizer(self) -> torch.optim.Optimizer:
"""Configures the optimizer."""
args = {} or self._optimizer.args
if self._optimizer.type == "MADGRAD":
optimizer_class = madgrad.MADGRAD
else:
optimizer_class = getattr(torch.optim, self._optimizer.type)
- return optimizer_class(parameters=self.parameters(), **args)
+ return optimizer_class(params=self.parameters(), **args)
def _configure_lr_scheduler(self) -> Dict[str, Any]:
"""Configures the lr scheduler."""
diff --git a/text_recognizer/models/transformer.py b/text_recognizer/models/transformer.py
index 3625ab2..983e274 100644
--- a/text_recognizer/models/transformer.py
+++ b/text_recognizer/models/transformer.py
@@ -19,16 +19,14 @@ class LitTransformerModel(LitBaseModel):
def __init__(
self,
- network: Type[nn,Module],
+ network: Type[nn, Module],
optimizer: Union[OmegaConf, Dict],
lr_scheduler: Union[OmegaConf, Dict],
criterion: Union[OmegaConf, Dict],
monitor: str = "val_loss",
mapping: Optional[List[str]] = None,
) -> None:
- super().__init__(
- network, optimizer, lr_scheduler, criterion, monitor
- )
+ super().__init__(network, optimizer, lr_scheduler, criterion, monitor)
self.mapping, ignore_tokens = self.configure_mapping(mapping)
self.val_cer = CharacterErrorRate(ignore_tokens)