summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/coat/factor_attention.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-04-24 23:09:20 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-04-24 23:09:20 +0200
commit4e60c836fb710baceba570c28c06437db3ad5c9b (patch)
tree21caf6d1792bd83a47fb3d372ee7120211e83f18 /text_recognizer/networks/coat/factor_attention.py
parent1ca8b0b9e0613c1e02f6a5d8b49e20c4d6916412 (diff)
Implementing CoaT transformer, continue tomorrow...
Diffstat (limited to 'text_recognizer/networks/coat/factor_attention.py')
-rw-r--r--text_recognizer/networks/coat/factor_attention.py9
1 files changed, 9 insertions, 0 deletions
diff --git a/text_recognizer/networks/coat/factor_attention.py b/text_recognizer/networks/coat/factor_attention.py
new file mode 100644
index 0000000..f91c5ef
--- /dev/null
+++ b/text_recognizer/networks/coat/factor_attention.py
@@ -0,0 +1,9 @@
+"""Factorized attention with convolutional relative positional encodings."""
+from torch import nn
+
+
+class FactorAttention(nn.Module):
+ """Factorized attention with relative positional encodings."""
+ def __init__(self, dim: int, num_heads: int) -> None:
+ pass
+