summarynews / app.py
pjbmask's picture
Rename newsurl to app.py
a97ce47 verified
raw
history blame
1.51 kB
import streamlit as st
from transformers import BartForConditionalGeneration, PreTrainedTokenizerFast
import requests
from bs4 import BeautifulSoup
# KoBART ๋ชจ๋ธ ๋กœ๋”ฉ
model = BartForConditionalGeneration.from_pretrained("digit82/kobart-summarization")
tokenizer = PreTrainedTokenizerFast.from_pretrained("digit82/kobart-summarization")
# ๋‰ด์Šค ๋ณธ๋ฌธ ์ถ”์ถœ ํ•จ์ˆ˜
def get_article_text(naver_url):
headers = {"User-Agent": "Mozilla/5.0"}
response = requests.get(naver_url, headers=headers)
soup = BeautifulSoup(response.text, "html.parser")
article_body = soup.select_one("article") or soup.select_one("#dic_area")
return article_body.get_text(strip=True) if article_body else None
# ๋‰ด์Šค ์š”์•ฝ ํ•จ์ˆ˜
def summarize_korean_news(text):
input_ids = tokenizer.encode(text, return_tensors="pt", max_length=1024, truncation=True)
summary_ids = model.generate(input_ids, max_length=128, min_length=30, num_beams=4, early_stopping=True)
return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
# Streamlit ์•ฑ UI
st.title("HL๋งŒ๋„ ๋‰ด์Šค ์š”์•ฝ๊ธฐ")
st.write("HL๋งŒ๋„ ๊ด€๋ จ ์ตœ์‹  ๋‰ด์Šค๋ฅผ ์š”์•ฝํ•ด ๋“œ๋ฆฝ๋‹ˆ๋‹ค!")
# ์‚ฌ์šฉ์ž๋กœ๋ถ€ํ„ฐ URL ์ž…๋ ฅ ๋ฐ›๊ธฐ
url = st.text_input("๋‰ด์Šค URL์„ ์ž…๋ ฅํ•˜์„ธ์š”:")
if url:
article = get_article_text(url)
if article:
summary = summarize_korean_news(article)
st.subheader("๋‰ด์Šค ์š”์•ฝ")
st.write(summary)
else:
st.write("๊ธฐ์‚ฌ ๋ณธ๋ฌธ์„ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")