Spaces:
Build error
Build error
fix?: system prompt
Browse files
README.md
CHANGED
|
@@ -9,5 +9,5 @@ app_file: app.py
|
|
| 9 |
pinned: false
|
| 10 |
short_description: Reason about papers using LLMs
|
| 11 |
license: agpl-3.0
|
| 12 |
-
models: [HuggingFaceTB/SmolLM2-
|
| 13 |
---
|
|
|
|
| 9 |
pinned: false
|
| 10 |
short_description: Reason about papers using LLMs
|
| 11 |
license: agpl-3.0
|
| 12 |
+
models: [HuggingFaceTB/SmolLM2-135M-Instruct]
|
| 13 |
---
|
app.py
CHANGED
|
@@ -14,7 +14,7 @@ HistoryType: TypeAlias = List[Dict[str, str]]
|
|
| 14 |
|
| 15 |
# Initialize the LLM and Weave client
|
| 16 |
client = weave.init("papersai")
|
| 17 |
-
checkpoint: str = "HuggingFaceTB/SmolLM2-
|
| 18 |
pipe = pipeline(
|
| 19 |
model=checkpoint,
|
| 20 |
torch_dtype=torch.bfloat16,
|
|
@@ -68,7 +68,7 @@ def invoke(history: HistoryType):
|
|
| 68 |
BaseMessage: Response from the model
|
| 69 |
"""
|
| 70 |
input_text = pipe.tokenizer.apply_chat_template(
|
| 71 |
-
history
|
| 72 |
tokenize=False,
|
| 73 |
)
|
| 74 |
response = pipe(input_text, do_sample=True, top_p=0.95, max_new_tokens=100)[0][
|
|
@@ -105,6 +105,9 @@ def update_state(history: HistoryType, message: Optional[Dict[str, str]]):
|
|
| 105 |
state.context = " ".join(doc_context)[
|
| 106 |
: pipe.model.config.max_position_embeddings
|
| 107 |
]
|
|
|
|
|
|
|
|
|
|
| 108 |
except Exception as e:
|
| 109 |
history.append(
|
| 110 |
{"role": "assistant", "content": f"Error loading file: {str(e)}"}
|
|
|
|
| 14 |
|
| 15 |
# Initialize the LLM and Weave client
|
| 16 |
client = weave.init("papersai")
|
| 17 |
+
checkpoint: str = "HuggingFaceTB/SmolLM2-135M-Instruct"
|
| 18 |
pipe = pipeline(
|
| 19 |
model=checkpoint,
|
| 20 |
torch_dtype=torch.bfloat16,
|
|
|
|
| 68 |
BaseMessage: Response from the model
|
| 69 |
"""
|
| 70 |
input_text = pipe.tokenizer.apply_chat_template(
|
| 71 |
+
history,
|
| 72 |
tokenize=False,
|
| 73 |
)
|
| 74 |
response = pipe(input_text, do_sample=True, top_p=0.95, max_new_tokens=100)[0][
|
|
|
|
| 105 |
state.context = " ".join(doc_context)[
|
| 106 |
: pipe.model.config.max_position_embeddings
|
| 107 |
]
|
| 108 |
+
history.append(
|
| 109 |
+
{"role": "system", "content": f"Context: {state.context}\n"}
|
| 110 |
+
)
|
| 111 |
except Exception as e:
|
| 112 |
history.append(
|
| 113 |
{"role": "assistant", "content": f"Error loading file: {str(e)}"}
|