zaydzuhri's picture
Add files using upload-large-folder tool
0298ad2 verified
raw
history blame
2.33 kB
# -*- coding: utf-8 -*-
from fla.layers import (
ABCAttention,
Attention,
BasedLinearAttention,
BitAttention,
DeltaNet,
GatedDeltaNet,
GatedDeltaProduct,
GatedLinearAttention,
GatedSlotAttention,
HGRN2Attention,
HGRNAttention,
LightNetAttention,
LinearAttention,
MultiScaleRetention,
NativeSparseAttention,
ReBasedLinearAttention,
RWKV6Attention,
RWKV7Attention
)
from fla.models import (
ABCForCausalLM,
ABCModel,
BitNetForCausalLM,
BitNetModel,
DeltaNetForCausalLM,
DeltaNetModel,
GatedDeltaNetForCausalLM,
GatedDeltaNetModel,
GatedDeltaProductForCausalLM,
GatedDeltaProductModel,
GLAForCausalLM,
GLAModel,
GSAForCausalLM,
GSAModel,
HGRN2ForCausalLM,
HGRN2Model,
HGRNForCausalLM,
LightNetForCausalLM,
LightNetModel,
LinearAttentionForCausalLM,
LinearAttentionModel,
NSAForCausalLM,
NSAModel,
RetNetForCausalLM,
RetNetModel,
RWKV6ForCausalLM,
RWKV6Model,
RWKV7ForCausalLM,
RWKV7Model,
TransformerForCausalLM,
TransformerModel
)
__all__ = [
'ABCAttention',
'Attention',
'BasedLinearAttention',
'BitAttention',
'DeltaNet',
'GatedDeltaNet',
'GatedDeltaProduct',
'GatedLinearAttention',
'GatedSlotAttention',
'HGRNAttention',
'HGRN2Attention',
'LightNetAttention',
'LinearAttention',
'MultiScaleRetention',
'NativeSparseAttention',
'ReBasedLinearAttention',
'RWKV6Attention',
'RWKV7Attention',
'ABCForCausalLM',
'ABCModel',
'BitNetForCausalLM',
'BitNetModel',
'DeltaNetForCausalLM',
'DeltaNetModel',
'GatedDeltaNetForCausalLM',
'GatedDeltaNetModel',
'GatedDeltaProductForCausalLM',
'GatedDeltaProductModel',
'GLAForCausalLM',
'GLAModel',
'GSAForCausalLM',
'GSAModel',
'HGRNForCausalLM',
'HGRNModel',
'HGRN2ForCausalLM',
'HGRN2Model',
'LightNetForCausalLM',
'LightNetModel',
'LinearAttentionForCausalLM',
'LinearAttentionModel',
'NSAForCausalLM',
'NSAModel',
'RetNetForCausalLM',
'RetNetModel',
'RWKV6ForCausalLM',
'RWKV6Model',
'RWKV7ForCausalLM',
'RWKV7Model',
'TransformerForCausalLM',
'TransformerModel',
]
__version__ = '0.1.2'