bump versions of deps (#1621)
Browse files* bump versions of deps
* bump transformers too
* fix xformers deps and include s3fs install
- docker/Dockerfile +1 -1
- requirements.txt +8 -8
- setup.py +10 -7
docker/Dockerfile
CHANGED
|
@@ -11,7 +11,7 @@ ARG PYTORCH_VERSION="2.1.2"
|
|
| 11 |
ENV PYTORCH_VERSION=$PYTORCH_VERSION
|
| 12 |
|
| 13 |
RUN apt-get update && \
|
| 14 |
-
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev
|
| 15 |
|
| 16 |
WORKDIR /workspace
|
| 17 |
|
|
|
|
| 11 |
ENV PYTORCH_VERSION=$PYTORCH_VERSION
|
| 12 |
|
| 13 |
RUN apt-get update && \
|
| 14 |
+
apt-get install -y --allow-change-held-packages vim curl nano libnccl2 libnccl-dev s3fs
|
| 15 |
|
| 16 |
WORKDIR /workspace
|
| 17 |
|
requirements.txt
CHANGED
|
@@ -1,22 +1,22 @@
|
|
| 1 |
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
|
| 2 |
packaging==23.2
|
| 3 |
peft==0.10.0
|
| 4 |
-
transformers
|
| 5 |
-
tokenizers==0.
|
| 6 |
-
bitsandbytes==0.43.
|
| 7 |
-
accelerate==0.
|
| 8 |
-
deepspeed==0.
|
| 9 |
pydantic==2.6.3
|
| 10 |
addict
|
| 11 |
fire
|
| 12 |
PyYAML>=6.0
|
| 13 |
requests
|
| 14 |
-
datasets==2.
|
| 15 |
-
flash-attn==2.5.
|
| 16 |
sentencepiece
|
| 17 |
wandb
|
| 18 |
einops
|
| 19 |
-
xformers==0.0.
|
| 20 |
optimum==1.16.2
|
| 21 |
hf_transfer
|
| 22 |
colorama
|
|
|
|
| 1 |
--extra-index-url https://huggingface.github.io/autogptq-index/whl/cu118/
|
| 2 |
packaging==23.2
|
| 3 |
peft==0.10.0
|
| 4 |
+
transformers==4.40.2
|
| 5 |
+
tokenizers==0.19.1
|
| 6 |
+
bitsandbytes==0.43.1
|
| 7 |
+
accelerate==0.30.1
|
| 8 |
+
deepspeed==0.14.2
|
| 9 |
pydantic==2.6.3
|
| 10 |
addict
|
| 11 |
fire
|
| 12 |
PyYAML>=6.0
|
| 13 |
requests
|
| 14 |
+
datasets==2.19.1
|
| 15 |
+
flash-attn==2.5.8
|
| 16 |
sentencepiece
|
| 17 |
wandb
|
| 18 |
einops
|
| 19 |
+
xformers==0.0.23.post1
|
| 20 |
optimum==1.16.2
|
| 21 |
hf_transfer
|
| 22 |
colorama
|
setup.py
CHANGED
|
@@ -30,7 +30,7 @@ def parse_requirements():
|
|
| 30 |
|
| 31 |
try:
|
| 32 |
if "Darwin" in platform.system():
|
| 33 |
-
_install_requires.pop(_install_requires.index("xformers==0.0.
|
| 34 |
else:
|
| 35 |
torch_version = version("torch")
|
| 36 |
_install_requires.append(f"torch=={torch_version}")
|
|
@@ -45,9 +45,12 @@ def parse_requirements():
|
|
| 45 |
else:
|
| 46 |
raise ValueError("Invalid version format")
|
| 47 |
|
| 48 |
-
if (major, minor) >= (2,
|
| 49 |
-
_install_requires.pop(_install_requires.index("xformers==0.0.
|
| 50 |
-
_install_requires.append("xformers>=0.0.
|
|
|
|
|
|
|
|
|
|
| 51 |
except PackageNotFoundError:
|
| 52 |
pass
|
| 53 |
|
|
@@ -68,13 +71,13 @@ setup(
|
|
| 68 |
dependency_links=dependency_links,
|
| 69 |
extras_require={
|
| 70 |
"flash-attn": [
|
| 71 |
-
"flash-attn==2.5.
|
| 72 |
],
|
| 73 |
"fused-dense-lib": [
|
| 74 |
-
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.
|
| 75 |
],
|
| 76 |
"deepspeed": [
|
| 77 |
-
"deepspeed==0.
|
| 78 |
"deepspeed-kernels",
|
| 79 |
],
|
| 80 |
"mamba-ssm": [
|
|
|
|
| 30 |
|
| 31 |
try:
|
| 32 |
if "Darwin" in platform.system():
|
| 33 |
+
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
| 34 |
else:
|
| 35 |
torch_version = version("torch")
|
| 36 |
_install_requires.append(f"torch=={torch_version}")
|
|
|
|
| 45 |
else:
|
| 46 |
raise ValueError("Invalid version format")
|
| 47 |
|
| 48 |
+
if (major, minor) >= (2, 3):
|
| 49 |
+
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
| 50 |
+
_install_requires.append("xformers>=0.0.26.post1")
|
| 51 |
+
elif (major, minor) >= (2, 2):
|
| 52 |
+
_install_requires.pop(_install_requires.index("xformers==0.0.23.post1"))
|
| 53 |
+
_install_requires.append("xformers>=0.0.25.post1")
|
| 54 |
except PackageNotFoundError:
|
| 55 |
pass
|
| 56 |
|
|
|
|
| 71 |
dependency_links=dependency_links,
|
| 72 |
extras_require={
|
| 73 |
"flash-attn": [
|
| 74 |
+
"flash-attn==2.5.8",
|
| 75 |
],
|
| 76 |
"fused-dense-lib": [
|
| 77 |
+
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.5.8#subdirectory=csrc/fused_dense_lib",
|
| 78 |
],
|
| 79 |
"deepspeed": [
|
| 80 |
+
"deepspeed==0.14.2",
|
| 81 |
"deepspeed-kernels",
|
| 82 |
],
|
| 83 |
"mamba-ssm": [
|