summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/attention_layers.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-09 00:36:55 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2021-05-09 00:36:55 +0200
commit548f52b35062e258622ea638ed1b132d6759a07a (patch)
treee9262d0f934ac4f9392f20cb4fcf7be6033e3cb7 /text_recognizer/networks/transformer/attention_layers.py
parent805d5726c17b83e00dcea0d2608dcd83a91f723d (diff)
Attention layer soon done
Diffstat (limited to 'text_recognizer/networks/transformer/attention_layers.py')
-rw-r--r--text_recognizer/networks/transformer/attention_layers.py19
1 files changed, 0 insertions, 19 deletions
diff --git a/text_recognizer/networks/transformer/attention_layers.py b/text_recognizer/networks/transformer/attention_layers.py
deleted file mode 100644
index 721fa27..0000000
--- a/text_recognizer/networks/transformer/attention_layers.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"""Generates the attention layer architecture."""
-from typing import Type
-
-import torch
-from torch import nn, Tensor
-
-
-class AttentionLayers(nn.Module):
- def __init__(
- self,
- dim: int,
- depth: int,
- num_heads: int,
- norm_layer: Type[nn.Module],
- causal: bool = False,
- cross_attend: bool = False,
- only_cross: bool = False,
- ) -> None:
- pass