blob: f91c5ef2db731cd3d98f000d744efde00a21ebdd (
plain)
1
2
3
4
5
6
7
8
9
|
"""Factorized attention with convolutional relative positional encodings."""
from torch import nn
class FactorAttention(nn.Module):
"""Factorized attention with relative positional encodings."""
def __init__(self, dim: int, num_heads: int) -> None:
pass
|