summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/attention_layers.py
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer/networks/transformer/attention_layers.py')
-rw-r--r--text_recognizer/networks/transformer/attention_layers.py19
1 files changed, 19 insertions, 0 deletions
diff --git a/text_recognizer/networks/transformer/attention_layers.py b/text_recognizer/networks/transformer/attention_layers.py
new file mode 100644
index 0000000..721fa27
--- /dev/null
+++ b/text_recognizer/networks/transformer/attention_layers.py
@@ -0,0 +1,19 @@
+"""Generates the attention layer architecture."""
+from typing import Type
+
+import torch
+from torch import nn, Tensor
+
+
+class AttentionLayers(nn.Module):
+ def __init__(
+ self,
+ dim: int,
+ depth: int,
+ num_heads: int,
+ norm_layer: Type[nn.Module],
+ causal: bool = False,
+ cross_attend: bool = False,
+ only_cross: bool = False,
+ ) -> None:
+ pass