File size: 1,270 Bytes
4135502 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
# -*- coding: utf-8 -*-
# Copyright (c) 2023-2025, Songlin Yang, Yu Zhang
from .abc import ABCAttention
from .attn import Attention
from .based import BasedLinearAttention
from .bitattn import BitAttention
from .delta_net import DeltaNet
from .forgetting_attn import ForgettingAttention
from .gated_deltanet import GatedDeltaNet
from .gated_deltaproduct import GatedDeltaProduct
from .gla import GatedLinearAttention
from .gsa import GatedSlotAttention
from .hgrn import HGRNAttention
from .hgrn2 import HGRN2Attention
from .lightnet import LightNetAttention
from .linear_attn import LinearAttention
from .multiscale_retention import MultiScaleRetention
from .nsa import NativeSparseAttention
from .rebased import ReBasedLinearAttention
from .rwkv6 import RWKV6Attention
from .rwkv7 import RWKV7Attention
__all__ = [
'ABCAttention',
'Attention',
'BasedLinearAttention',
'BitAttention',
'DeltaNet',
'ForgettingAttention',
'GatedDeltaNet',
'GatedDeltaProduct',
'GatedLinearAttention',
'GatedSlotAttention',
'HGRNAttention',
'HGRN2Attention',
'LightNetAttention',
'LinearAttention',
'MultiScaleRetention',
'NativeSparseAttention',
'ReBasedLinearAttention',
'RWKV6Attention',
'RWKV7Attention',
]
|