Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -29,11 +29,6 @@ import re # <--- ADD THIS LINE FOR THE NAMEERROR
|
|
29 |
os.environ["CUDA_HOME"] = "/usr/local/cuda"
|
30 |
|
31 |
|
32 |
-
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
33 |
-
# True
|
34 |
-
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
35 |
-
# Tesla T4
|
36 |
-
|
37 |
# Flag to check if GPU is present
|
38 |
HAS_GPU = False # Initialize to False, let pynvml determine
|
39 |
GPU_COUNT = 0
|
@@ -49,6 +44,12 @@ model_file = "rwkv-5-h-world-3B" # Stick with 3B for now
|
|
49 |
|
50 |
# Get the GPU count (this part is fine, though pynvml might warn)
|
51 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
52 |
nvmlInit()
|
53 |
GPU_COUNT = nvmlDeviceGetCount()
|
54 |
if GPU_COUNT > 0:
|
|
|
29 |
os.environ["CUDA_HOME"] = "/usr/local/cuda"
|
30 |
|
31 |
|
|
|
|
|
|
|
|
|
|
|
32 |
# Flag to check if GPU is present
|
33 |
HAS_GPU = False # Initialize to False, let pynvml determine
|
34 |
GPU_COUNT = 0
|
|
|
44 |
|
45 |
# Get the GPU count (this part is fine, though pynvml might warn)
|
46 |
try:
|
47 |
+
|
48 |
+
print(f"Is CUDA available: {torch.cuda.is_available()}")
|
49 |
+
# True
|
50 |
+
print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
|
51 |
+
# Tesla T4
|
52 |
+
|
53 |
nvmlInit()
|
54 |
GPU_COUNT = nvmlDeviceGetCount()
|
55 |
if GPU_COUNT > 0:
|