Spaces:
Running
on
Zero
Running
on
Zero
jedick
commited on
Commit
·
00c763e
1
Parent(s):
ef0d090
Revert to all caps for label coding
Browse files
app.py
CHANGED
@@ -54,15 +54,15 @@ def prediction_to_df(prediction=None):
|
|
54 |
"""
|
55 |
if prediction is None or prediction == "":
|
56 |
# Show an empty plot for app initialization or auto-reload
|
57 |
-
prediction = {"
|
58 |
elif "Model" in prediction:
|
59 |
# Show full-height bars when the model is changed
|
60 |
-
prediction = {"
|
61 |
else:
|
62 |
# Convert predictions text to dictionary
|
63 |
prediction = eval(prediction)
|
64 |
# Use custom order for labels (pipe() returns labels in descending order of softmax score)
|
65 |
-
labels = ["
|
66 |
prediction = {k: prediction[k] for k in labels}
|
67 |
# Convert dictionary to DataFrame with one column (Probability)
|
68 |
df = pd.DataFrame.from_dict(prediction, orient="index", columns=["Probability"])
|
@@ -93,7 +93,7 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
93 |
with gr.Column(scale=3):
|
94 |
with gr.Row():
|
95 |
gr.Markdown("# AI4citations")
|
96 |
-
gr.Markdown("## *AI-powered
|
97 |
claim = gr.Textbox(
|
98 |
label="Claim",
|
99 |
info="aka hypothesis",
|
@@ -131,20 +131,23 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
131 |
x="Class",
|
132 |
y="Probability",
|
133 |
color="Class",
|
134 |
-
color_map={"
|
135 |
inputs=prediction,
|
136 |
y_lim=([0, 1]),
|
137 |
visible=False,
|
138 |
)
|
139 |
-
label = gr.Label(label="
|
140 |
with gr.Accordion("Feedback"):
|
141 |
gr.Markdown(
|
142 |
-
"*
|
143 |
),
|
144 |
with gr.Row():
|
145 |
flag_support = gr.Button("Support")
|
146 |
flag_nei = gr.Button("NEI")
|
147 |
flag_refute = gr.Button("Refute")
|
|
|
|
|
|
|
148 |
with gr.Accordion("Examples"):
|
149 |
gr.Markdown("*Examples are run when clicked*"),
|
150 |
with gr.Row():
|
@@ -197,12 +200,12 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
197 |
with gr.Column(scale=2):
|
198 |
gr.Markdown(
|
199 |
"""
|
200 |
-
### To make
|
201 |
|
202 |
- Hit 'Enter' in the **Claim** text box OR
|
203 |
- Hit 'Shift-Enter' in the **Evidence** text box
|
204 |
|
205 |
-
|
206 |
"""
|
207 |
)
|
208 |
|
@@ -220,7 +223,9 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
220 |
value=MODEL_NAME,
|
221 |
label="Model",
|
222 |
)
|
223 |
-
radio = gr.Radio(
|
|
|
|
|
224 |
with gr.Accordion("Sources", open=False, elem_classes=["center_content"]):
|
225 |
gr.Markdown(
|
226 |
"""
|
@@ -247,8 +252,8 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
247 |
"""
|
248 |
#### *Other sources*
|
249 |
- <i class="fa-brands fa-github"></i> [xhluca/bm25s](https://github.com/xhluca/bm25s) (evidence retrieval)
|
|
|
250 |
- <img src="https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg" style="height: 1.2em; display: inline-block;"> [nyu-mll/multi_nli](https://huggingface.co/datasets/nyu-mll/multi_nli/viewer/default/train?row=37&views%5B%5D=train) (MNLI example)
|
251 |
-
- <img src="https://plos.org/wp-content/uploads/2020/01/logo-color-blue.svg" style="height: 1.4em; display: inline-block;"> [Medicine](https://doi.org/10.1371/journal.pmed.0030197), <i class="fa-brands fa-wikipedia-w"></i> [CRISPR](https://en.wikipedia.org/wiki/CRISPR) (get evidence examples)
|
252 |
- <img src="https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg" style="height: 1.2em; display: inline-block;"> [NoCrypt/miku](https://huggingface.co/spaces/NoCrypt/miku) (theme)
|
253 |
"""
|
254 |
)
|
@@ -272,14 +277,14 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
272 |
}
|
273 |
# Rename dictionary keys to use consistent labels across models
|
274 |
prediction = {
|
275 |
-
("
|
276 |
for k, v in prediction.items()
|
277 |
}
|
278 |
prediction = {
|
279 |
("NEI" if k in ["NEI", "neutral"] else k): v for k, v in prediction.items()
|
280 |
}
|
281 |
prediction = {
|
282 |
-
("
|
283 |
for k, v in prediction.items()
|
284 |
}
|
285 |
# Return two instances of the prediction to send to different Gradio components
|
@@ -361,9 +366,9 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
361 |
if is_running_in_hf_spaces():
|
362 |
# Use a thread lock to avoid concurrent writes from different users.
|
363 |
with scheduler.lock:
|
364 |
-
append_feedback(*args, user_label="
|
365 |
else:
|
366 |
-
append_feedback(*args, user_label="
|
367 |
|
368 |
def save_feedback_nei(*args) -> None:
|
369 |
"""
|
@@ -383,9 +388,9 @@ with gr.Blocks(theme=my_theme, css=custom_css, head=font_awesome_html) as demo:
|
|
383 |
if is_running_in_hf_spaces():
|
384 |
# Use a thread lock to avoid concurrent writes from different users.
|
385 |
with scheduler.lock:
|
386 |
-
append_feedback(*args, user_label="
|
387 |
else:
|
388 |
-
append_feedback(*args, user_label="
|
389 |
|
390 |
# Event listeners
|
391 |
|
|
|
54 |
"""
|
55 |
if prediction is None or prediction == "":
|
56 |
# Show an empty plot for app initialization or auto-reload
|
57 |
+
prediction = {"SUPPORT": 0, "NEI": 0, "REFUTE": 0}
|
58 |
elif "Model" in prediction:
|
59 |
# Show full-height bars when the model is changed
|
60 |
+
prediction = {"SUPPORT": 1, "NEI": 1, "REFUTE": 1}
|
61 |
else:
|
62 |
# Convert predictions text to dictionary
|
63 |
prediction = eval(prediction)
|
64 |
# Use custom order for labels (pipe() returns labels in descending order of softmax score)
|
65 |
+
labels = ["SUPPORT", "NEI", "REFUTE"]
|
66 |
prediction = {k: prediction[k] for k in labels}
|
67 |
# Convert dictionary to DataFrame with one column (Probability)
|
68 |
df = pd.DataFrame.from_dict(prediction, orient="index", columns=["Probability"])
|
|
|
93 |
with gr.Column(scale=3):
|
94 |
with gr.Row():
|
95 |
gr.Markdown("# AI4citations")
|
96 |
+
gr.Markdown("## *AI-powered citation verification*")
|
97 |
claim = gr.Textbox(
|
98 |
label="Claim",
|
99 |
info="aka hypothesis",
|
|
|
131 |
x="Class",
|
132 |
y="Probability",
|
133 |
color="Class",
|
134 |
+
color_map={"SUPPORT": "green", "NEI": "#888888", "REFUTE": "#FF8888"},
|
135 |
inputs=prediction,
|
136 |
y_lim=([0, 1]),
|
137 |
visible=False,
|
138 |
)
|
139 |
+
label = gr.Label(label="Prediction")
|
140 |
with gr.Accordion("Feedback"):
|
141 |
gr.Markdown(
|
142 |
+
"*Provide the correct label to help improve this app*<br>**NOTE:** The claim and evidence will also be saved"
|
143 |
),
|
144 |
with gr.Row():
|
145 |
flag_support = gr.Button("Support")
|
146 |
flag_nei = gr.Button("NEI")
|
147 |
flag_refute = gr.Button("Refute")
|
148 |
+
gr.Markdown(
|
149 |
+
"Feedback is uploaded every 5 minutes to [AI4citations-feedback](https://huggingface.co/datasets/jedick/AI4citations-feedback)"
|
150 |
+
),
|
151 |
with gr.Accordion("Examples"):
|
152 |
gr.Markdown("*Examples are run when clicked*"),
|
153 |
with gr.Row():
|
|
|
200 |
with gr.Column(scale=2):
|
201 |
gr.Markdown(
|
202 |
"""
|
203 |
+
### To make the prediction:
|
204 |
|
205 |
- Hit 'Enter' in the **Claim** text box OR
|
206 |
- Hit 'Shift-Enter' in the **Evidence** text box
|
207 |
|
208 |
+
_The prediction is also made after clicking **Get Evidence**_
|
209 |
"""
|
210 |
)
|
211 |
|
|
|
223 |
value=MODEL_NAME,
|
224 |
label="Model",
|
225 |
)
|
226 |
+
radio = gr.Radio(
|
227 |
+
["label", "barplot"], value="label", label="Prediction"
|
228 |
+
)
|
229 |
with gr.Accordion("Sources", open=False, elem_classes=["center_content"]):
|
230 |
gr.Markdown(
|
231 |
"""
|
|
|
252 |
"""
|
253 |
#### *Other sources*
|
254 |
- <i class="fa-brands fa-github"></i> [xhluca/bm25s](https://github.com/xhluca/bm25s) (evidence retrieval)
|
255 |
+
- <img src="https://plos.org/wp-content/uploads/2020/01/logo-color-blue.svg" style="height: 1.4em; display: inline-block;"> [Medicine](https://doi.org/10.1371/journal.pmed.0030197), <i class="fa-brands fa-wikipedia-w"></i> [CRISPR](https://en.wikipedia.org/wiki/CRISPR) (evidence retrieval examples)
|
256 |
- <img src="https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg" style="height: 1.2em; display: inline-block;"> [nyu-mll/multi_nli](https://huggingface.co/datasets/nyu-mll/multi_nli/viewer/default/train?row=37&views%5B%5D=train) (MNLI example)
|
|
|
257 |
- <img src="https://huggingface.co/datasets/huggingface/brand-assets/resolve/main/hf-logo.svg" style="height: 1.2em; display: inline-block;"> [NoCrypt/miku](https://huggingface.co/spaces/NoCrypt/miku) (theme)
|
258 |
"""
|
259 |
)
|
|
|
277 |
}
|
278 |
# Rename dictionary keys to use consistent labels across models
|
279 |
prediction = {
|
280 |
+
("SUPPORT" if k in ["SUPPORT", "entailment"] else k): v
|
281 |
for k, v in prediction.items()
|
282 |
}
|
283 |
prediction = {
|
284 |
("NEI" if k in ["NEI", "neutral"] else k): v for k, v in prediction.items()
|
285 |
}
|
286 |
prediction = {
|
287 |
+
("REFUTE" if k in ["REFUTE", "contradiction"] else k): v
|
288 |
for k, v in prediction.items()
|
289 |
}
|
290 |
# Return two instances of the prediction to send to different Gradio components
|
|
|
366 |
if is_running_in_hf_spaces():
|
367 |
# Use a thread lock to avoid concurrent writes from different users.
|
368 |
with scheduler.lock:
|
369 |
+
append_feedback(*args, user_label="SUPPORT")
|
370 |
else:
|
371 |
+
append_feedback(*args, user_label="SUPPORT")
|
372 |
|
373 |
def save_feedback_nei(*args) -> None:
|
374 |
"""
|
|
|
388 |
if is_running_in_hf_spaces():
|
389 |
# Use a thread lock to avoid concurrent writes from different users.
|
390 |
with scheduler.lock:
|
391 |
+
append_feedback(*args, user_label="REFUTE")
|
392 |
else:
|
393 |
+
append_feedback(*args, user_label="REFUTE")
|
394 |
|
395 |
# Event listeners
|
396 |
|