Spaces:
Running
Running
minor
Browse files
app.py
CHANGED
@@ -63,15 +63,15 @@ logs_token = os.getenv("HF_LOGS_TOKEN")
|
|
63 |
logs_file = Path("logs/") / f"data_{uuid.uuid4()}.json"
|
64 |
logs_folder = logs_file.parent
|
65 |
|
66 |
-
scheduler = CommitScheduler(
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
)
|
75 |
|
76 |
@spaces.GPU
|
77 |
def stream_chat(
|
@@ -125,10 +125,10 @@ def stream_chat(
|
|
125 |
new_history[-1][1] = buffer
|
126 |
yield new_history
|
127 |
|
128 |
-
with scheduler.lock:
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
|
133 |
def clear_input():
|
134 |
return ""
|
|
|
63 |
logs_file = Path("logs/") / f"data_{uuid.uuid4()}.json"
|
64 |
logs_folder = logs_file.parent
|
65 |
|
66 |
+
# scheduler = CommitScheduler(
|
67 |
+
# repo_id="PowerInfer/SmallThinker-3B-Preview",
|
68 |
+
# repo_type="model",
|
69 |
+
# folder_path=logs_folder,
|
70 |
+
# path_in_repo="data",
|
71 |
+
# every=5,
|
72 |
+
# token=logs_token,
|
73 |
+
# private=True,
|
74 |
+
# )
|
75 |
|
76 |
@spaces.GPU
|
77 |
def stream_chat(
|
|
|
125 |
new_history[-1][1] = buffer
|
126 |
yield new_history
|
127 |
|
128 |
+
# with scheduler.lock:
|
129 |
+
# with logs_file.open("a") as f:
|
130 |
+
# f.write(json.dumps({"input": input_text.replace(SYSTEM_PROMPT, ""), "output": buffer.replace(SYSTEM_PROMPT, ""), "model": "SmallThinker-3B"}))
|
131 |
+
# f.write("\n")
|
132 |
|
133 |
def clear_input():
|
134 |
return ""
|