summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/axial_attention/encoder.py
blob: 3082bd67d37ec956d8b84ef3bbb6a53707e9e54d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
"""Axial transformer encoder."""

from typing import List

import attr
from torch import nn, Tensor

from text_recognizer.networks.transformer.axial_attention.self_attention import (
    SelfAttention,
)
from text_recognizer.networks.transformer.axial_attention.utils import (
    calculate_permutations,
    PermuteToForm,
    Sequential,
)
from text_recognizer.networks.transformer.norm import PreNorm


@attr.s(eq=False)
class AxialEncoder(nn.Module):
    """Axial transfomer encoder."""

    def __attrs_pre_init__(self) -> None:
        super().__init__()

    shape: List[int] = attr.ib()
    dim: int = attr.ib()
    depth: int = attr.ib()
    heads: int = attr.ib()
    dim_head: int = attr.ib()
    dim_index: int = attr.ib()
    fn: nn.Sequential = attr.ib(init=False)

    def __attrs_post_init__(self) -> None:
        self._build()

    def _build(self) -> None:
        permutations = calculate_permutations(2, self.dim_index)
        get_ff = lambda: nn.Sequential(
            nn.LayerNorm([self.dim, *self.shape]),
            nn.Conv2d(
                in_channels=self.dim,
                out_channels=4 * self.dim,
                kernel_size=3,
                padding=1,
            ),
            nn.Mish(inplace=True),
            nn.Conv2d(
                in_channels=4 * self.dim,
                out_channels=self.dim,
                kernel_size=3,
                padding=1,
            ),
        )

        layers = nn.ModuleList([])
        for _ in range(self.depth):
            attns = nn.ModuleList(
                [
                    PermuteToForm(
                        permutation=permutation,
                        fn=PreNorm(
                            self.dim,
                            SelfAttention(
                                dim=self.dim, heads=self.heads, dim_head=self.dim_head
                            ),
                        ),
                    )
                    for permutation in permutations
                ]
            )
            convs = nn.ModuleList([get_ff(), get_ff()])
            layers.append(attns)
            layers.append(convs)

        self.fn = Sequential(layers)

    def forward(self, x: Tensor) -> Tensor:
        """Applies fn to input."""
        return self.fn(x)