summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/attention.py
diff options
context:
space:
mode:
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r--text_recognizer/networks/transformer/attention.py4
1 files changed, 1 insertions, 3 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py
index e098b63..3d2ece1 100644
--- a/text_recognizer/networks/transformer/attention.py
+++ b/text_recognizer/networks/transformer/attention.py
@@ -10,9 +10,7 @@ from torch import nn
from torch import Tensor
import torch.nn.functional as F
-from text_recognizer.networks.transformer.positional_encodings.rotary_embedding import (
- apply_rotary_pos_emb,
-)
+from text_recognizer.networks.transformer.embeddings.rotary import apply_rotary_pos_emb
@attr.s(eq=False)