summaryrefslogtreecommitdiff
path: root/training
diff options
context:
space:
mode:
Diffstat (limited to 'training')
-rw-r--r--training/run.py37
-rw-r--r--training/utils.py84
2 files changed, 90 insertions, 31 deletions
diff --git a/training/run.py b/training/run.py
index ed1b372..5f7c927 100644
--- a/training/run.py
+++ b/training/run.py
@@ -14,35 +14,12 @@ from pytorch_lightning import (
from pytorch_lightning.loggers import LightningLoggerBase
from torch import nn
-from utils import configure_logging
-
-
-def configure_callbacks(
- config: DictConfig,
-) -> List[Type[Callback]]:
- """Configures lightning callbacks."""
- callbacks = []
- if config.get("callbacks"):
- for callback_config in config.callbacks.values():
- if config.get("_target_"):
- log.info(f"Instantiating callback <{callback_config._target_}>")
- callbacks.append(hydra.utils.instantiate(callback_config))
- return callbacks
-
-
-def configure_logger(config: DictConfig) -> List[Type[LightningLoggerBase]]:
- logger = []
- if config.get("logger"):
- for logger_config in config.logger.values():
- if config.get("_target_"):
- log.info(f"Instantiating callback <{logger_config._target_}>")
- logger.append(hydra.utils.instantiate(logger_config))
- return logger
+import utils
def run(config: DictConfig) -> Optional[float]:
"""Runs experiment."""
- configure_logging(config.logging)
+ utils.configure_logging(config.logging)
log.info("Starting experiment...")
if config.get("seed"):
@@ -65,8 +42,8 @@ def run(config: DictConfig) -> Optional[float]:
)
# Load callback and logger.
- callbacks = configure_callbacks(config)
- logger = configure_logger(config)
+ callbacks: List[Type[Callback]] = utils.configure_callbacks(config)
+ logger: List[Type[LightningLoggerBase]] = utils.configure_logger(config)
log.info(f"Instantiating trainer <{config.trainer._target_}>")
trainer: Trainer = hydra.utils.instantiate(
@@ -74,6 +51,7 @@ def run(config: DictConfig) -> Optional[float]:
)
# Log hyperparameters
+ utils.log_hyperparameters(config=config, model=model, trainer=trainer)
if config.debug:
log.info("Fast development run...")
@@ -81,7 +59,7 @@ def run(config: DictConfig) -> Optional[float]:
return None
if config.tune:
- log.info("Tuning learning rate and batch size...")
+ log.info("Tuning hyperparameters...")
trainer.tune(model, datamodule=datamodule)
if config.train:
@@ -92,4 +70,5 @@ def run(config: DictConfig) -> Optional[float]:
log.info("Testing network...")
trainer.test(model, datamodule=datamodule)
- # Make sure everything closes properly
+ log.info(f"Best checkpoint path:\n{trainer.checkpoint_callback.best_model_path}")
+ utils.finish(trainer)
diff --git a/training/utils.py b/training/utils.py
index 7717fc5..4c31dc3 100644
--- a/training/utils.py
+++ b/training/utils.py
@@ -1,19 +1,52 @@
"""Util functions for training hydra configs and pytorch lightning."""
+from typing import Any, List, Type
import warnings
+import hydra
from omegaconf import DictConfig, OmegaConf
import loguru.logger as log
+from pytorch_lightning import (
+ Callback,
+ LightningModule,
+ Trainer,
+)
+from pytorch_lightning.loggers import LightningLoggerBase
from pytorch_lightning.loggers.wandb import WandbLogger
from pytorch_lightning.utilities import rank_zero_only
from tqdm import tqdm
+import wandb
@rank_zero_only
-def configure_logging(level: str) -> None:
+def configure_logging(config: DictConfig) -> None:
"""Configure the loguru logger for output to terminal and disk."""
# Remove default logger to get tqdm to work properly.
log.remove()
- log.add(lambda msg: tqdm.write(msg, end=""), colorize=True, level=level)
+ log.add(lambda msg: tqdm.write(msg, end=""), colorize=True, level=config.logging)
+
+
+def configure_callbacks(
+ config: DictConfig,
+) -> List[Type[Callback]]:
+ """Configures Lightning callbacks."""
+ callbacks = []
+ if config.get("callbacks"):
+ for callback_config in config.callbacks.values():
+ if config.get("_target_"):
+ log.info(f"Instantiating callback <{callback_config._target_}>")
+ callbacks.append(hydra.utils.instantiate(callback_config))
+ return callbacks
+
+
+def configure_logger(config: DictConfig) -> List[Type[LightningLoggerBase]]:
+ """Configures Lightning loggers."""
+ logger = []
+ if config.get("logger"):
+ for logger_config in config.logger.values():
+ if config.get("_target_"):
+ log.info(f"Instantiating callback <{logger_config._target_}>")
+ logger.append(hydra.utils.instantiate(logger_config))
+ return logger
def extras(config: DictConfig) -> None:
@@ -54,3 +87,50 @@ def extras(config: DictConfig) -> None:
# Disable adding new keys to config
OmegaConf.set_struct(config, True)
+
+
+def empty(*args: Any, **kwargs: Any) -> None:
+ pass
+
+
+@rank_zero_only
+def log_hyperparameters(
+ config: DictConfig,
+ model: LightningModule,
+ trainer: Trainer,
+) -> None:
+ """This method saves hyperparameters with the logger."""
+ hparams = {}
+
+ # choose which parts of hydra config will be saved to loggers
+ hparams["trainer"] = config["trainer"]
+ hparams["model"] = config["model"]
+ hparams["datamodule"] = config["datamodule"]
+ if "callbacks" in config:
+ hparams["callbacks"] = config["callbacks"]
+
+ # save number of model parameters
+ hparams["model/params_total"] = sum(p.numel() for p in model.parameters())
+ hparams["model/params_trainable"] = sum(
+ p.numel() for p in model.parameters() if p.requires_grad
+ )
+ hparams["model/params_not_trainable"] = sum(
+ p.numel() for p in model.parameters() if not p.requires_grad
+ )
+
+ # send hparams to all loggers
+ trainer.logger.log_hyperparams(hparams)
+
+ # disable logging any more hyperparameters for all loggers
+ # this is just a trick to prevent trainer from logging hparams of model,
+ # since we already did that above
+ trainer.logger.log_hyperparams = empty
+
+
+def finish(
+ logger: List[Type[LightningLoggerBase]],
+) -> None:
+ """Makes sure everything closed properly."""
+ for lg in logger:
+ if isinstance(lg, WandbLogger):
+ wandb.finish()