Spaces:
Sleeping
Sleeping
- requirements.txt +2 -1
- src/summarizer.py +1 -1
requirements.txt
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
bs4
|
2 |
-
streamlit
|
|
|
3 |
torch
|
4 |
requests
|
5 |
transformers
|
|
|
1 |
bs4
|
2 |
+
streamlit
|
3 |
+
lxml
|
4 |
torch
|
5 |
requests
|
6 |
transformers
|
src/summarizer.py
CHANGED
@@ -11,6 +11,6 @@ class Summarizer:
|
|
11 |
|
12 |
def summarize(self, text):
|
13 |
input_ids = self.tokenizer(text, return_tensors="pt").input_ids.to(self.device)
|
14 |
-
outputs = self.model.generate(input_ids, max_length=20, min_length=20, length_penalty=2.0, num_beams=
|
15 |
|
16 |
return self.tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
11 |
|
12 |
def summarize(self, text):
|
13 |
input_ids = self.tokenizer(text, return_tensors="pt").input_ids.to(self.device)
|
14 |
+
outputs = self.model.generate(input_ids, max_length=20, min_length=20, length_penalty=2.0, num_beams=2, early_stopping=True)
|
15 |
|
16 |
return self.tokenizer.decode(outputs[0], skip_special_tokens=True)
|