summaryrefslogtreecommitdiff
path: root/training/utils.py
blob: f0a0d4d3fbfff38e2946d8b564a513a64cfeb046 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
"""Util functions for training with hydra and pytorch lightning."""
import warnings
from typing import List, Type

import hydra
from loguru import logger as log
from omegaconf import DictConfig, OmegaConf
from pytorch_lightning import (
    Callback,
    LightningModule,
    Trainer,
)
from pytorch_lightning.loggers import Logger
from pytorch_lightning.loggers.wandb import WandbLogger
from pytorch_lightning.utilities import rank_zero_only
from tqdm import tqdm

import wandb


def save_config(config: DictConfig) -> None:
    """Save config to experiment directory."""
    with open("config.yaml", "w") as f:
        OmegaConf.save(config, f=f)


def print_config(config: DictConfig) -> None:
    """Prints config."""
    print(OmegaConf.to_yaml(config))


@rank_zero_only
def configure_logging(config: DictConfig) -> None:
    """Configure the loguru logger for output to terminal and disk."""
    # Remove default logger to get tqdm to work properly.
    log.remove()
    log.add(lambda msg: tqdm.write(msg, end=""), colorize=True, level=config.logging)


def configure_callbacks(
    config: DictConfig,
) -> List[Type[Callback]]:
    """Configures Lightning callbacks."""

    def load_callback(callback_config: DictConfig) -> Type[Callback]:
        log.info(f"Instantiating callback <{callback_config._target_}>")
        return hydra.utils.instantiate(callback_config)

    def load_callbacks(callback_configs: DictConfig) -> List[Type[Callback]]:
        callbacks = []
        for callback_config in callback_configs.values():
            if callback_config.get("_target_"):
                callbacks.append(load_callback(callback_config))
            else:
                callbacks += load_callbacks(callback_config)
        return callbacks

    if config.get("callbacks"):
        callbacks = load_callbacks(config.callbacks)
    return callbacks


def configure_logger(config: DictConfig) -> List[Type[Logger]]:
    """Configures Lightning loggers."""

    def load_logger(logger_config: DictConfig) -> Type[Logger]:
        log.info(f"Instantiating logger <{logger_config._target_}>")
        return hydra.utils.instantiate(logger_config)

    logger = []
    if config.get("logger"):
        for logger_config in config.logger.values():
            if logger_config.get("_target_"):
                logger.append(load_logger(logger_config))
    return logger


def extras(config: DictConfig) -> None:
    """Sets optional utilities."""
    # Enable adding new keys.
    OmegaConf.set_struct(config, False)

    if config.get("ignore_warnings"):
        log.info("Disabling python warnings! <config.ignore_warnings=True>")
        warnings.filterwarnings("ignore")

    if config.get("debug"):
        log.info("Running in debug mode! <config.debug=True>")
        config.trainer.fast_dev_run = True

    if config.trainer.get("fast_dev_run"):
        log.info(
            "Forcing debugger friendly configuration! <config.trainer.fast_dev_run=True>"
        )
        # Debuggers do not like GPUs and multiprocessing.
        if config.trainer.get("gpus"):
            config.trainer.gpus = 0
        if config.trainer.get("precision"):
            config.trainer.precision = 32
        if config.datamodule.get("pin_memory"):
            config.datamodule.pin_memory = False
        if config.datamodule.get("num_workers"):
            config.datamodule.num_workers = 0

    # Disable adding new keys to config
    OmegaConf.set_struct(config, True)


@rank_zero_only
def log_hyperparameters(
    config: DictConfig,
    model: LightningModule,
    trainer: Trainer,
) -> None:
    """This method saves hyperparameters with the logger."""
    hparams = {}

    # choose which parts of hydra config will be saved to loggers
    hparams["trainer"] = config["trainer"]
    hparams["model"] = config["model"]
    hparams["datamodule"] = config["datamodule"]

    # save number of model parameters
    hparams["model/params/total"] = sum(p.numel() for p in model.parameters())
    hparams["model/params/trainable"] = sum(
        p.numel() for p in model.parameters() if p.requires_grad
    )
    hparams["model/params/non_trainable"] = sum(
        p.numel() for p in model.parameters() if not p.requires_grad
    )
    hparams["callbacks"] = config.get("callbacks")
    hparams["tags"] = config.get("tags")
    hparams["ckpt_path"] = config.get("ckpt_path")
    hparams["seed"] = config.get("seed")

    # send hparams to all loggers
    trainer.logger.log_hyperparams(hparams)


def finish(
    logger: List[Type[Logger]],
) -> None:
    """Makes sure everything closed properly."""
    for lg in logger:
        if isinstance(lg, WandbLogger):
            wandb.finish()