Lord-Raven
commited on
Commit
·
6dd3e08
1
Parent(s):
374cec5
Trying to fix parameter issue.
Browse files- app.py +3 -3
- requirements.txt +1 -0
app.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
import gradio as gr
|
| 2 |
-
|
| 3 |
from transformers import AutoTokenizer, pipeline
|
| 4 |
from optimum.onnxruntime import ORTModelForSequenceClassification
|
| 5 |
|
|
@@ -18,12 +18,12 @@ classifier = pipeline(
|
|
| 18 |
)
|
| 19 |
|
| 20 |
def predict(param_0):
|
| 21 |
-
return classifier(param_0)
|
| 22 |
|
| 23 |
demo = gr.Interface(
|
| 24 |
fn = predict,
|
| 25 |
inputs = 'text',
|
| 26 |
-
outputs =
|
| 27 |
)
|
| 28 |
|
| 29 |
# demo = gr.Interface.from_pipeline(classifier)
|
|
|
|
| 1 |
import gradio as gr
|
| 2 |
+
import json
|
| 3 |
from transformers import AutoTokenizer, pipeline
|
| 4 |
from optimum.onnxruntime import ORTModelForSequenceClassification
|
| 5 |
|
|
|
|
| 18 |
)
|
| 19 |
|
| 20 |
def predict(param_0):
|
| 21 |
+
return json.dumps(classifier(param_0))
|
| 22 |
|
| 23 |
demo = gr.Interface(
|
| 24 |
fn = predict,
|
| 25 |
inputs = 'text',
|
| 26 |
+
outputs = gr.Textbox(label="JSON Output"),
|
| 27 |
)
|
| 28 |
|
| 29 |
# demo = gr.Interface.from_pipeline(classifier)
|
requirements.txt
CHANGED
|
@@ -2,3 +2,4 @@ torch==2.4.0
|
|
| 2 |
huggingface_hub==0.26.0
|
| 3 |
transformers==4.36
|
| 4 |
optimum[exporters,onnxruntime]==1.21.3
|
|
|
|
|
|
| 2 |
huggingface_hub==0.26.0
|
| 3 |
transformers==4.36
|
| 4 |
optimum[exporters,onnxruntime]==1.21.3
|
| 5 |
+
json5==0.9.25
|