File size: 299 Bytes
6a34ba8
47f4949
b1e85c3
 
 
47f4949
b1e85c3
47f4949
6a34ba8
b1e85c3
47f4949
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.2/flash_attn-2.6.2+cu123torch2.1cxx11abiFALSE-cp310-cp310-linux_x86_64.whl

torch==2.1.2
torchaudio==2.1.2
torchvision==0.16.2

transformers==4.44.2

numpy==1.26.4
librosa==0.9.0
decord

vector-quantize-pytorch==1.18.5
vocos==0.1.0