Fasika
commited on
Commit
·
eebd998
1
Parent(s):
09d4351
app.py
CHANGED
@@ -5,12 +5,20 @@ from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
|
5 |
checkpoint = "distilbert-base-uncased-finetuned-sst-2-english"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
7 |
model = AutoModelForSequenceClassification.from_pretrained(checkpoint)
|
|
|
8 |
sequences = ["I've been waiting for a HuggingFace course my whole life.", "So have I!"]
|
9 |
|
10 |
tokens = tokenizer(sequences, padding=True, truncation=True, return_tensors="pt")
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12 |
app = FastAPI()
|
13 |
|
14 |
@app.get("/")
|
15 |
def greet_json():
|
16 |
-
return {"Hello":
|
|
|
5 |
checkpoint = "distilbert-base-uncased-finetuned-sst-2-english"
|
6 |
tokenizer = AutoTokenizer.from_pretrained(checkpoint)
|
7 |
model = AutoModelForSequenceClassification.from_pretrained(checkpoint)
|
8 |
+
|
9 |
sequences = ["I've been waiting for a HuggingFace course my whole life.", "So have I!"]
|
10 |
|
11 |
tokens = tokenizer(sequences, padding=True, truncation=True, return_tensors="pt")
|
12 |
+
|
13 |
+
# Perform inference without gradient tracking
|
14 |
+
with torch.no_grad():
|
15 |
+
output = model(**tokens)
|
16 |
+
|
17 |
+
# Convert logits to a list for JSON serialization
|
18 |
+
logits = output.logits.tolist()
|
19 |
+
|
20 |
app = FastAPI()
|
21 |
|
22 |
@app.get("/")
|
23 |
def greet_json():
|
24 |
+
return {"Hello": logits}
|