Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,14 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
from llama_cpp import Llama
|
|
|
|
| 3 |
|
| 4 |
-
#
|
| 5 |
model_path = "DeepSeek-R1-Zero-Q4_K_M/DeepSeek-R1-Zero-Q4_K_M-00001-of-00009.gguf"
|
| 6 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
# Initialize the model
|
| 8 |
try:
|
| 9 |
model = Llama(model_path=model_path, n_threads=8)
|
|
@@ -16,7 +21,8 @@ def predict(prompt):
|
|
| 16 |
try:
|
| 17 |
# Generate output using the model
|
| 18 |
output = model(prompt)
|
| 19 |
-
|
|
|
|
| 20 |
except Exception as e:
|
| 21 |
return f"Error during inference: {e}"
|
| 22 |
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
from llama_cpp import Llama
|
| 3 |
+
import os
|
| 4 |
|
| 5 |
+
# Path to the first shard of the model
|
| 6 |
model_path = "DeepSeek-R1-Zero-Q4_K_M/DeepSeek-R1-Zero-Q4_K_M-00001-of-00009.gguf"
|
| 7 |
|
| 8 |
+
# Debugging: Verify working directory and model path
|
| 9 |
+
print("Current working directory:", os.getcwd())
|
| 10 |
+
print("Full model path:", os.path.join(os.getcwd(), model_path))
|
| 11 |
+
|
| 12 |
# Initialize the model
|
| 13 |
try:
|
| 14 |
model = Llama(model_path=model_path, n_threads=8)
|
|
|
|
| 21 |
try:
|
| 22 |
# Generate output using the model
|
| 23 |
output = model(prompt)
|
| 24 |
+
# Extract and return the text from the response
|
| 25 |
+
return output["choices"][0]["text"]
|
| 26 |
except Exception as e:
|
| 27 |
return f"Error during inference: {e}"
|
| 28 |
|