blob: 7140537c8c4af7f1f94ee3a57b6e766560b9b6bf (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
|
"""Absolute positional embedding."""
import torch
from torch import nn, Tensor
class AbsolutePositionalEmbedding(nn.Module):
def __init__(self, dim: int, max_seq_len: int) -> None:
super().__init__()
self.emb = nn.Embedding(max_seq_len, dim)
self._weight_init()
def _weight_init(self) -> None:
nn.init.normal_(self.emb.weight, std=0.02)
def forward(self, x: Tensor) -> Tensor:
n = torch.arange(x.shape[1], device=x.device)
return self.emb(n)[None, :, :]
|