diff options
author | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:13:54 +0200 |
---|---|---|
committer | Gustaf Rydholm <gustaf.rydholm@gmail.com> | 2021-10-27 22:13:54 +0200 |
commit | fb90a53b1235fd836dee74452f3f2a621e0f363a (patch) | |
tree | daae44aa5e7c1309a41a059594ce0c3fc92cbc26 /text_recognizer/networks/transformer/attention.py | |
parent | 8c7a59d58e2ce6b18384c9fcdba2fd49e5450b0e (diff) |
Rename transformer embeddings
Diffstat (limited to 'text_recognizer/networks/transformer/attention.py')
-rw-r--r-- | text_recognizer/networks/transformer/attention.py | 4 |
1 files changed, 1 insertions, 3 deletions
diff --git a/text_recognizer/networks/transformer/attention.py b/text_recognizer/networks/transformer/attention.py index e098b63..3d2ece1 100644 --- a/text_recognizer/networks/transformer/attention.py +++ b/text_recognizer/networks/transformer/attention.py @@ -10,9 +10,7 @@ from torch import nn from torch import Tensor import torch.nn.functional as F -from text_recognizer.networks.transformer.positional_encodings.rotary_embedding import ( - apply_rotary_pos_emb, -) +from text_recognizer.networks.transformer.embeddings.rotary import apply_rotary_pos_emb @attr.s(eq=False) |