lldacing commited on
Commit
081e6c4
·
verified ·
1 Parent(s): 1f33c7c

support specific CUDA Compute Capability

Browse files

WindowsWhlBuilder_cuda.bat CUDA_ARCH="80;120"

Files changed (1) hide show
  1. WindowsWhlBuilder_cuda.bat +89 -77
WindowsWhlBuilder_cuda.bat CHANGED
@@ -1,78 +1,90 @@
1
- @echo off
2
- setlocal enabledelayedexpansion
3
-
4
- set MAX_JOBS=1
5
-
6
- :parseArgs
7
- rem Assigning a value to MAX_JOBS via a variable does not work in ninja, I don't know why
8
- rem if [%1] == [WORKERS] set MAX_JOBS=%2 & shift & shift & goto :parseargs
9
- if [%1] == [FORCE_CXX11_ABI] set FLASH_ATTENTION_FORCE_CXX11_ABI=%2 & shift & shift & goto :parseargs
10
- goto :buildContinue
11
- :end
12
-
13
- :buildFinalize
14
- set MAX_JOBS=
15
- set BUILD_TARGET=
16
- set DISTUTILS_USE_SDK=
17
- set FLASH_ATTENTION_FORCE_BUILD=
18
- set FLASH_ATTENTION_FORCE_CXX11_ABI=
19
- set dist_dir=
20
- set tmpname=
21
- endlocal
22
- goto :eof
23
- :end
24
-
25
- :buildContinue
26
- echo MAX_JOBS: %MAX_JOBS%
27
- echo FLASH_ATTENTION_FORCE_CXX11_ABI: %FLASH_ATTENTION_FORCE_CXX11_ABI%
28
- rem # We want setuptools >= 49.6.0 otherwise we can't compile the extension if system CUDA version is 11.7 and pytorch cuda version is 11.6
29
- rem # https://github.com/pytorch/pytorch/blob/664058fa83f1d8eede5d66418abff6e20bd76ca8/torch/utils/cpp_extension.py#L810
30
- rem # However this still fails so I'm using a newer version of setuptools
31
- rem pip install setuptools==68.0.0
32
- pip install "setuptools>=49.6.0" packaging wheel psutil
33
- rem # Limit MAX_JOBS otherwise the github runner goes OOM
34
- rem # CUDA 11.8 can compile with 2 jobs, but CUDA 12.3 goes OOM
35
- set FLASH_ATTENTION_FORCE_BUILD=TRUE
36
- set BUILD_TARGET=cuda
37
- set DISTUTILS_USE_SDK=1
38
- set dist_dir=dist
39
-
40
- python setup.py bdist_wheel --dist-dir=%dist_dir%
41
-
42
-
43
- rem rename whl
44
-
45
- rem just major version, such as cu12torch24cxx11abiFALSE
46
- rem for /f "delims=" %%i in ('python -c "import sys; from packaging.version import parse; import torch; python_version = f'cp{sys.version_info.major}{sys.version_info.minor}'; cxx11_abi=str(torch._C._GLIBCXX_USE_CXX11_ABI).upper(); torch_cuda_version = parse(torch.version.cuda); torch_cuda_version = parse(\"11.8\") if torch_cuda_version.major == 11 else parse(\"12.4\"); cuda_version = f'{torch_cuda_version.major}'; torch_version_raw = parse(torch.__version__); torch_version = f'{torch_version_raw.major}.{torch_version_raw.minor}'; wheel_filename = f'cu{cuda_version}torch{torch_version}cxx11abi{cxx11_abi}'; print(wheel_filename);"') do set wheel_filename=%%i
47
-
48
- rem such as cu124torch240cxx11abiFALSE
49
- for /f "delims=" %%i in ('python -c "import sys; from packaging.version import parse; import torch; python_version = f'cp{sys.version_info.major}{sys.version_info.minor}'; cxx11_abi=str(torch._C._GLIBCXX_USE_CXX11_ABI).upper(); torch_cuda_version = parse(torch.version.cuda); cuda_version = \"\".join(map(str, torch_cuda_version.release)); torch_version_raw = parse(torch.__version__); torch_version = \".\".join(map(str, torch_version_raw.release)); wheel_filename = f'cu{cuda_version}torch{torch_version}cxx11abi{cxx11_abi}'; print(wheel_filename);"') do set wheel_filename=%%i
50
-
51
- set tmpname=%wheel_filename%
52
-
53
-
54
- for %%i in (%dist_dir%\*.whl) do (
55
- set "filename=%%~nxi"
56
-
57
- rem check if contains +
58
- echo !filename! | findstr /c:+ >nul
59
- if errorlevel 1 (
60
- rem replace second '-' to wheel_filename
61
- set "count=0"
62
- for /l %%j in (0, 1, 1000) do (
63
- if "!filename:~%%j,1!"=="-" set /a count+=1
64
- if "!filename:~%%j,1!"=="-" if "!count!"=="2" (
65
- set "new_filename=!filename:~0,%%j!+%tmpname%!filename:~%%j!"
66
-
67
- echo Renaming !filename! to !new_filename!
68
- move "%%i" "!dist_dir!/!new_filename!"
69
- goto :next
70
- )
71
- )
72
- )
73
- :next
74
- rem continue
75
- )
76
-
77
- goto :buildFinalize
 
 
 
 
 
 
 
 
 
 
 
 
78
  :end
 
1
+ @echo off
2
+ setlocal enabledelayedexpansion
3
+
4
+ set MAX_JOBS=1
5
+
6
+ :parseArgs
7
+ rem Assigning a value to MAX_JOBS via a variable does not work in ninja, I don't know why
8
+ rem if [%1] == [WORKERS] set MAX_JOBS=%2 & shift & shift & goto :parseargs
9
+ if "%~1" == "FORCE_CXX11_ABI" (
10
+ set "FLASH_ATTENTION_FORCE_CXX11_ABI=%~2"
11
+ shift & shift
12
+ goto :parseArgs
13
+ )
14
+ if "%~1" == "CUDA_ARCH" (
15
+ set "FLASH_ATTN_CUDA_ARCHS=%~2"
16
+ shift & shift
17
+ goto :parseArgs
18
+ )
19
+ goto :buildContinue
20
+ :end
21
+
22
+ :buildFinalize
23
+ set MAX_JOBS=
24
+ set BUILD_TARGET=
25
+ set DISTUTILS_USE_SDK=
26
+ set FLASH_ATTENTION_FORCE_BUILD=
27
+ set FLASH_ATTENTION_FORCE_CXX11_ABI=
28
+ set dist_dir=
29
+ set FLASH_ATTN_CUDA_ARCHS=
30
+ set tmpname=
31
+ endlocal
32
+ goto :eof
33
+ :end
34
+
35
+ :buildContinue
36
+ echo MAX_JOBS: %MAX_JOBS%
37
+ echo FLASH_ATTENTION_FORCE_CXX11_ABI: %FLASH_ATTENTION_FORCE_CXX11_ABI%
38
+ echo FLASH_ATTN_CUDA_ARCHS: %FLASH_ATTN_CUDA_ARCHS%
39
+ rem # We want setuptools >= 49.6.0 otherwise we can't compile the extension if system CUDA version is 11.7 and pytorch cuda version is 11.6
40
+ rem # https://github.com/pytorch/pytorch/blob/664058fa83f1d8eede5d66418abff6e20bd76ca8/torch/utils/cpp_extension.py#L810
41
+ rem # However this still fails so I'm using a newer version of setuptools
42
+ rem pip install setuptools==68.0.0
43
+ pip install "setuptools>=49.6.0" packaging wheel psutil
44
+ rem # Limit MAX_JOBS otherwise the github runner goes OOM
45
+ rem # CUDA 11.8 can compile with 2 jobs, but CUDA 12.3 goes OOM
46
+ set FLASH_ATTENTION_FORCE_BUILD=TRUE
47
+ set BUILD_TARGET=cuda
48
+ set DISTUTILS_USE_SDK=1
49
+ set dist_dir=dist
50
+ rem set FLASH_ATTN_CUDA_ARCHS=80;120
51
+
52
+ python setup.py bdist_wheel --dist-dir=%dist_dir%
53
+
54
+
55
+ rem rename whl
56
+
57
+ rem just major version, such as cu12torch24cxx11abiFALSE
58
+ rem for /f "delims=" %%i in ('python -c "import sys; from packaging.version import parse; import torch; python_version = f'cp{sys.version_info.major}{sys.version_info.minor}'; cxx11_abi=str(torch._C._GLIBCXX_USE_CXX11_ABI).upper(); torch_cuda_version = parse(torch.version.cuda); torch_cuda_version = parse(\"11.8\") if torch_cuda_version.major == 11 else parse(\"12.4\"); cuda_version = f'{torch_cuda_version.major}'; torch_version_raw = parse(torch.__version__); torch_version = f'{torch_version_raw.major}.{torch_version_raw.minor}'; wheel_filename = f'cu{cuda_version}torch{torch_version}cxx11abi{cxx11_abi}'; print(wheel_filename);"') do set wheel_filename=%%i
59
+
60
+ rem such as cu124torch240cxx11abiFALSE
61
+ for /f "delims=" %%i in ('python -c "import sys; from packaging.version import parse; import torch; python_version = f'cp{sys.version_info.major}{sys.version_info.minor}'; cxx11_abi=str(torch._C._GLIBCXX_USE_CXX11_ABI).upper(); torch_cuda_version = parse(torch.version.cuda); cuda_version = \"\".join(map(str, torch_cuda_version.release)); torch_version_raw = parse(torch.__version__); torch_version = \".\".join(map(str, torch_version_raw.release)); wheel_filename = f'cu{cuda_version}torch{torch_version}cxx11abi{cxx11_abi}'; print(wheel_filename);"') do set wheel_filename=%%i
62
+
63
+ set tmpname=%wheel_filename%
64
+
65
+
66
+ for %%i in (%dist_dir%\*.whl) do (
67
+ set "filename=%%~nxi"
68
+
69
+ rem check if contains +
70
+ echo !filename! | findstr /c:+ >nul
71
+ if errorlevel 1 (
72
+ rem replace second '-' to wheel_filename
73
+ set "count=0"
74
+ for /l %%j in (0, 1, 1000) do (
75
+ if "!filename:~%%j,1!"=="-" set /a count+=1
76
+ if "!filename:~%%j,1!"=="-" if "!count!"=="2" (
77
+ set "new_filename=!filename:~0,%%j!+%tmpname%!filename:~%%j!"
78
+
79
+ echo Renaming !filename! to !new_filename!
80
+ move "%%i" "!dist_dir!/!new_filename!"
81
+ goto :next
82
+ )
83
+ )
84
+ )
85
+ :next
86
+ rem continue
87
+ )
88
+
89
+ goto :buildFinalize
90
  :end