summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/vqvae/residual.py
blob: bdff9ebb5eb48cba094963c117b17d7a74557b61 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
"""Residual block."""
import attr
from torch import nn
from torch import Tensor

from text_recognizer.networks.util import activation_function
from text_recognizer.networks.vqvae.norm import Normalize


@attr.s(eq=False)
class Residual(nn.Module):
    in_channels: int = attr.ib()
    residual_channels: int = attr.ib()
    use_norm: bool = attr.ib(default=False)
    activation: str = attr.ib(default="relu")

    def __attrs_post_init__(self) -> None:
        """Post init configuration."""
        super().__init__()
        self.block = self._build_res_block()

    def _build_res_block(self) -> nn.Sequential:
        """Build residual block."""
        block = []
        activation_fn = activation_function(activation=self.activation)

        if self.use_norm:
            block.append(Normalize(num_channels=self.in_channels))

        block += [
            activation_fn,
            nn.Conv2d(
                self.in_channels,
                self.residual_channels,
                kernel_size=3,
                padding=1,
                bias=False,
            ),
        ]

        if self.use_norm:
            block.append(Normalize(num_channels=self.residual_channels))

        block += [
            activation_fn,
            nn.Conv2d(
                self.residual_channels, self.in_channels, kernel_size=1, bias=False
            ),
        ]
        return nn.Sequential(*block)

    def forward(self, x: Tensor) -> Tensor:
        """Apply the residual forward pass."""
        return x + self.block(x)