Spaces:
Runtime error
Runtime error
File size: 6,976 Bytes
dee81af e930297 90e5033 bbd33b5 cab1480 90e5033 dee81af e930297 bbd33b5 e930297 205f7ce e930297 dd1f93b 4e47031 95b45fe d2b5ac1 bbd33b5 e04080f 4d9a7ab 4e47031 e930297 b763acd 205f7ce e930297 4e47031 b763acd 4e47031 b763acd bd14329 e930297 bd14329 205f7ce 95b45fe bbd33b5 90e5033 1f74d7a d2dbba3 1fccbea d2dbba3 fbf8bda 90e5033 dee81af 90e5033 b20bf0e d2b5ac1 1fccbea d2b5ac1 1fccbea d2b5ac1 1fccbea 86074e5 d2b5ac1 53717a6 1fccbea d2b5ac1 53717a6 1fccbea 53717a6 b028844 1dd44df 1fccbea b028844 1fccbea d2dbba3 1fccbea 53717a6 1fccbea 90e5033 855d0a5 dee81af e6c03c9 1d04a42 e6c03c9 fc70bd6 1d04a42 855d0a5 1d04a42 c9f3fd9 e6c03c9 8c0e2df ef7e473 d2b5ac1 855d0a5 e6c03c9 fc70bd6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 |
import streamlit as st
from gnews import GNews
from transformers import GPT2Tokenizer, GPT2LMHeadModel
from transformers import pipeline
from gnews import GNews
from newspaper import Article
def fetch_news(topic):
google_news = GNews(language='german', country='Germany') # You can customize this
news_list = google_news.get_news(topic)
articles = []
for news in news_list[:5]: # Get top 5 news articles
articles.append({
'title': news['title'],
'published_date': news['published date'],
'description': news['description'],
'url': news['url'],
'publisher': news['publisher']
})
return articles
def page_trending_niche():
st.title("What is trending in my niche?")
niche = st.text_input('Enter your niche', 'Technology')
# page_trending_niche function
if niche:
news_items = fetch_news(niche)
article_titles = [item['title'] for item in news_items]
selected_article = st.selectbox("Select an article to generate a social media post about:", article_titles)
selected_article_description = next((item['description'] for item in news_items if item['title'] == selected_article), None)
# Save the selected article's description in the session state to use in another page
st.session_state['selected_article_description'] = selected_article_description
for item in news_items:
st.write(f"**Title:** {item['title']}")
st.write(f"**Published Date:** {item['published_date']}")
st.write(f"**Description:** {item['description']}")
st.write(f"**Publisher:** {item['publisher']}")
st.write(f"**URL:** [Read more]({item['url']})")
st.write("---")
def fetch_full_article(url):
"""Fetches the full text of an article given its URL."""
article = Article(url)
article.download()
article.parse()
return article.text
# Initialize the summarization pipeline with BART
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")
def split_text_into_chunks(text, chunk_size, overlap_size):
chunks = []
index = 0
while index < len(text):
# End index for the current chunk
end_index = index + chunk_size
# Extend the end index to include the overlap, if possible
end_index_with_overlap = min(end_index + overlap_size, len(text))
# Extract the chunk with the overlap
chunk = text[index:end_index_with_overlap]
chunks.append(chunk)
# Move the index to the start of the next chunk, which is end_index
index = end_index
return chunks
def generate_social_media_post(article_text):
chunk_size = 900 # This is close to the model's maximum length for BART
overlap_size = 50 # Overlap size to ensure continuity in the text
chunks = split_text_into_chunks(article_text, chunk_size, overlap_size)
summarized_text = ''
for chunk in chunks:
# Call the summarizer for each chunk
summary = summarizer(chunk, max_length=130, min_length=30, do_sample=False)[0]['summary_text']
summarized_text += summary + ' '
return summarized_text.strip()
def page_article_to_social_post():
st.title("Article to Social Media Post")
# User input for niche
niche = st.text_input('Enter your niche', 'Technology')
if niche:
# Fetch news articles
google_news = GNews(language='german', country='Germany') # You can customize this
news_list = google_news.get_news(niche)
if not news_list:
st.write("No news found for the given niche.")
return
# Display article titles in a selectbox
article_titles = [news['title'] for news in news_list[:5]]
selected_title = st.selectbox("Select an article:", article_titles)
selected_article = next((item for item in news_list if item['title'] == selected_title), None)
if selected_article:
selected_url = selected_article['url']
if st.button('Fetch Full Article'):
# Fetch the full article text
article_text = fetch_full_article(selected_url)
# Use an expander to display the article text in a scrollable view
with st.expander("See full article"):
st.text_area("Article Text", article_text, height=500) # Height is in pixels
# Store the full article text in session state for later use
st.session_state.full_article_text = article_text
if st.button('Generate Social Media Post') and 'full_article_text' in st.session_state:
with st.spinner('Generating...'):
# Generate a summary based on the full article text
post_content = generate_social_media_post(st.session_state.full_article_text)
st.success('Generated Content:')
st.write(post_content)
def page_vaccation():
import streamlit as st
import pandas as pd
import pydeck as pdk
# Input for start and end points
start_point = st.text_input("Enter start point", "Location A")
end_point = st.text_input("Enter end point", "Location B")
# Assume function to calculate route and places of interest (mock data for demonstration)
# You would replace this with real data obtained from mapping APIs
route_data = pd.DataFrame({
'lat': [start_lat, end_lat],
'lon': [start_lon, end_lon]
})
places_of_interest = pd.DataFrame({
'lat': [poi1_lat, poi2_lat], # Latitudes of places of interest
'lon': [poi1_lon, poi2_lon], # Longitudes of places of interest
'name': ['Place 1', 'Place 2'] # Names of places of interest
})
# Display the map
st.pydeck_chart(pdk.Deck(
map_style='mapbox://styles/mapbox/light-v9',
initial_view_state=pdk.ViewState(
latitude=route_data['lat'].mean(),
longitude=route_data['lon'].mean(),
zoom=11,
pitch=50,
),
layers=[
pdk.Layer(
'ScatterplotLayer',
data=places_of_interest,
get_position='[lon, lat]',
get_color='[200, 30, 0, 160]',
get_radius=200,
),
],
))
# Setup the sidebar with page selection
st.sidebar.title("Anne's Current Projects :star2:")
page = st.sidebar.selectbox(
'What project do you like to see first?',
('trending_niche', 'page_article_to_social_post', 'Vaccation Page'))
# Display the selected page
if page == 'trending_niche':
page_trending_niche()
elif page == 'page_article_to_social_post':
page_article_to_social_post()
elif page == 'Vaccation Page':
page_test()
|