summaryrefslogtreecommitdiff
path: root/text_recognizer/networks/transformer/embeddings/rotary.py
diff options
context:
space:
mode:
authorGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
committerGustaf Rydholm <gustaf.rydholm@gmail.com>2023-08-25 23:19:14 +0200
commit49ca6ade1a19f7f9c702171537fe4be0dfcda66d (patch)
tree20062ed1910758481f3d5fff11159706c7b990c6 /text_recognizer/networks/transformer/embeddings/rotary.py
parent0421daf6bd97596703f426ba61c401599b538eeb (diff)
Rename and add flash atten
Diffstat (limited to 'text_recognizer/networks/transformer/embeddings/rotary.py')
-rw-r--r--text_recognizer/networks/transformer/embeddings/rotary.py67
1 files changed, 0 insertions, 67 deletions
diff --git a/text_recognizer/networks/transformer/embeddings/rotary.py b/text_recognizer/networks/transformer/embeddings/rotary.py
deleted file mode 100644
index ca0a260..0000000
--- a/text_recognizer/networks/transformer/embeddings/rotary.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""Roatary embedding.
-
-Stolen from lucidrains:
- https://github.com/lucidrains/rotary-embedding-torch
-
-Explanation of roatary:
- https://blog.eleuther.ai/rotary-embeddings/
-"""
-from inspect import isfunction
-
-from einops import rearrange, repeat
-import torch
-from torch import Tensor, nn
-
-
-class RotaryEmbedding(nn.Module):
- """Rotary positional embedding."""
-
- def __init__(self, dim: int) -> None:
- super().__init__()
- inv_freqs = 1.0 / (10000 ** (torch.arange(0, dim, 2).float() / dim))
- self.register_buffer("inv_freqs", inv_freqs)
- self.cache = {}
-
- def rotate(self, t: Tensor, dim: int = -2) -> Tensor:
- """Rotate vector."""
- device, n = t.device, t.shape[dim]
- freqs = self.forward(lambda: torch.arange(n, device=device), cache_key=n)
- return apply_rotary_emb(t, freqs)
-
- def forward(self, t: Tensor, cache_key: int) -> Tensor:
- """Encodes tensor x with rotary embeddings."""
- if cache_key in self.cache:
- return self.cache[cache_key]
-
- if isfunction(t):
- t = t()
-
- freqs = self.inv_freqs
- freqs = torch.einsum("..., f -> ... f", t.type(freqs.dtype), freqs)
- freqs = repeat(freqs, "... n -> ... (n r)", r=2)
- self.cache[cache_key] = freqs
- return freqs
-
-
-def rotate_half(x: Tensor) -> Tensor:
- x = rearrange(x, "... (d r) -> ... d r", r=2)
- x1, x2 = x.unbind(dim=-1)
- x = torch.stack((-x2, x1), dim=-1)
- return rearrange(x, "... d r -> ... (d r)")
-
-
-def apply_rotary_emb(t: Tensor, freqs: Tensor, start_index: int = 0) -> Tensor:
- freqs = freqs.to(t)
- rot_dim = freqs.shape[-1]
- end_index = start_index + rot_dim
- assert rot_dim <= t.shape[-1], (
- f"feature dimension {t.shape[-1]} is not of sufficient size to rotate"
- f"in all the positions {rot_dim}"
- )
- t_left, t, t_right = (
- t[..., :start_index],
- t[..., start_index:end_index],
- t[..., end_index:],
- )
- t = (t * freqs.cos()) + (rotate_half(t) * freqs.sin())
- return torch.cat((t_left, t, t_right), dim=-1)