Shanshan Wang commited on
Commit
0500e0e
·
1 Parent(s): f109f0a
Files changed (1) hide show
  1. requirements.txt +1 -1
requirements.txt CHANGED
@@ -11,6 +11,7 @@ torch==2.4.0
11
  torchvision==0.19.0
12
  pillow==10.2.0
13
  transformers @ git+https://github.com/huggingface/transformers@673440d073d5f534a6d6bedeeca94869afd8d0a7
 
14
  peft==0.11.1
15
  absl-py==2.1.0
16
  accelerate==0.30.1
@@ -50,7 +51,6 @@ fastapi-cli==0.0.4
50
  fastchat==0.1.0
51
  ffmpy==0.3.2
52
  filelock==3.13.1
53
- flash-attn==2.6.3
54
  flatbuffers==24.3.25
55
  fonttools==4.51.0
56
  frozenlist==1.4.1
 
11
  torchvision==0.19.0
12
  pillow==10.2.0
13
  transformers @ git+https://github.com/huggingface/transformers@673440d073d5f534a6d6bedeeca94869afd8d0a7
14
+ flash-attn @ git+https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.8/flash_attn-2.5.8+cu122torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
15
  peft==0.11.1
16
  absl-py==2.1.0
17
  accelerate==0.30.1
 
51
  fastchat==0.1.0
52
  ffmpy==0.3.2
53
  filelock==3.13.1
 
54
  flatbuffers==24.3.25
55
  fonttools==4.51.0
56
  frozenlist==1.4.1