summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/nystromer/nystromer.py
blob: 2113f1fd285fd348c13fe4aa9d3f32d2b56bd719 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
"""Nyströmer encoder.

Stolen from:
    https://github.com/lucidrains/nystrom-attention/blob/main/nystrom_attention/nystrom_attention.py

"""
from typing import Optional

from torch import nn, Tensor

from text_recognizer.networks.transformer.mlp import FeedForward
from text_recognizer.networks.transformer.norm import PreNorm
from text_recognizer.networks.transformer.nystromer.attention import NystromAttention


class Nystromer(nn.Module):
    def __init__(
        self,
        *,
        dim: int,
        depth: int,
        dim_head: int = 64,
        num_heads: int = 8,
        num_landmarks: int = 256,
        inverse_iter: int = 6,
        residual: bool = True,
        residual_conv_kernel: int = 33,
        dropout_rate: float = 0.0,
        glu: bool = True,
    ) -> None:
        super().__init__()
        self.dim = dim
        self.layers = nn.ModuleList(
            [
                nn.ModuleList(
                    [
                        PreNorm(
                            dim,
                            NystromAttention(
                                dim=dim,
                                dim_head=dim_head,
                                num_heads=num_heads,
                                num_landmarks=num_landmarks,
                                inverse_iter=inverse_iter,
                                residual=residual,
                                residual_conv_kernel=residual_conv_kernel,
                                dropout_rate=dropout_rate,
                            ),
                        ),
                        PreNorm(
                            dim,
                            FeedForward(dim=dim, glu=glu, dropout_rate=dropout_rate),
                        ),
                    ]
                )
                for _ in range(depth)
            ]
        )

    def forward(self, x: Tensor, mask: Optional[Tensor] = None) -> Tensor:
        for attn, ff in self.layers:
            x = attn(x, mask=mask) + x
            x = ff(x) + x
        return x