Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -27,7 +27,9 @@ def predict(text, seed, out_seq_length, min_gen_length, sampling_strategy,
|
|
| 27 |
"min_gen_length": min_gen_length,
|
| 28 |
"sampling_strategy": sampling_strategy,
|
| 29 |
"num_beams": num_beams,
|
| 30 |
-
"max_tokens": out_seq_length
|
|
|
|
|
|
|
| 31 |
})
|
| 32 |
|
| 33 |
headers = {
|
|
@@ -36,7 +38,8 @@ def predict(text, seed, out_seq_length, min_gen_length, sampling_strategy,
|
|
| 36 |
|
| 37 |
response = requests.request("POST", url, headers=headers, data=payload)
|
| 38 |
|
| 39 |
-
|
|
|
|
| 40 |
|
| 41 |
answer = response.json()['result']['output']['raw']
|
| 42 |
if isinstance(answer, list):
|
|
|
|
| 27 |
"min_gen_length": min_gen_length,
|
| 28 |
"sampling_strategy": sampling_strategy,
|
| 29 |
"num_beams": num_beams,
|
| 30 |
+
"max_tokens": out_seq_length,
|
| 31 |
+
"no_repeat_ngram": no_repeat_ngram,
|
| 32 |
+
"seed": seed
|
| 33 |
})
|
| 34 |
|
| 35 |
headers = {
|
|
|
|
| 38 |
|
| 39 |
response = requests.request("POST", url, headers=headers, data=payload)
|
| 40 |
|
| 41 |
+
if reponse.json()['status'] == 1:
|
| 42 |
+
return 'Please give smaller text than max_tokens or give larger max_tokens.'
|
| 43 |
|
| 44 |
answer = response.json()['result']['output']['raw']
|
| 45 |
if isinstance(answer, list):
|