Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from gnews import GNews | |
| from transformers import GPT2Tokenizer, GPT2LMHeadModel | |
| from transformers import pipeline | |
| from gnews import GNews | |
| from newspaper import Article | |
| import pandas as pd | |
| import numpy as np | |
| import requests | |
| import json | |
| import os | |
| import streamlit as st | |
| import pydeck as pdk | |
| import pandas as pd | |
| import requests | |
| import os | |
| import json | |
| # Make sure to define or import other necessary functions or variables here | |
| def page_va | |
| def fetch_news(topic): | |
| google_news = GNews(language='german', country='Germany') # You can customize this | |
| news_list = google_news.get_news(topic) | |
| articles = [] | |
| for news in news_list[:5]: # Get top 5 news articles | |
| articles.append({ | |
| 'title': news['title'], | |
| 'published_date': news['published date'], | |
| 'description': news['description'], | |
| 'url': news['url'], | |
| 'publisher': news['publisher'] | |
| }) | |
| return articles | |
| def page_trending_niche(): | |
| st.title("What is trending in my niche?") | |
| niche = st.text_input('Enter your niche', 'Technology') | |
| # page_trending_niche function | |
| if niche: | |
| news_items = fetch_news(niche) | |
| article_titles = [item['title'] for item in news_items] | |
| selected_article = st.selectbox("Select an article to generate a social media post about:", article_titles) | |
| selected_article_description = next((item['description'] for item in news_items if item['title'] == selected_article), None) | |
| # Save the selected article's description in the session state to use in another page | |
| st.session_state['selected_article_description'] = selected_article_description | |
| for item in news_items: | |
| st.write(f"**Title:** {item['title']}") | |
| st.write(f"**Published Date:** {item['published_date']}") | |
| st.write(f"**Description:** {item['description']}") | |
| st.write(f"**Publisher:** {item['publisher']}") | |
| st.write(f"**URL:** [Read more]({item['url']})") | |
| st.write("---") | |
| def fetch_full_article(url): | |
| """Fetches the full text of an article given its URL.""" | |
| article = Article(url) | |
| article.download() | |
| article.parse() | |
| return article.text | |
| # Initialize the summarization pipeline with BART | |
| summarizer = pipeline("summarization", model="facebook/bart-large-cnn") | |
| def split_text_into_chunks(text, chunk_size, overlap_size): | |
| chunks = [] | |
| index = 0 | |
| while index < len(text): | |
| # End index for the current chunk | |
| end_index = index + chunk_size | |
| # Extend the end index to include the overlap, if possible | |
| end_index_with_overlap = min(end_index + overlap_size, len(text)) | |
| # Extract the chunk with the overlap | |
| chunk = text[index:end_index_with_overlap] | |
| chunks.append(chunk) | |
| # Move the index to the start of the next chunk, which is end_index | |
| index = end_index | |
| return chunks | |
| def generate_social_media_post(article_text): | |
| chunk_size = 900 # This is close to the model's maximum length for BART | |
| overlap_size = 50 # Overlap size to ensure continuity in the text | |
| chunks = split_text_into_chunks(article_text, chunk_size, overlap_size) | |
| summarized_text = '' | |
| for chunk in chunks: | |
| # Call the summarizer for each chunk | |
| summary = summarizer(chunk, max_length=130, min_length=30, do_sample=False)[0]['summary_text'] | |
| summarized_text += summary + ' ' | |
| return summarized_text.strip() | |
| def page_article_to_social_post(): | |
| st.title("Article to Social Media Post") | |
| # User input for niche | |
| niche = st.text_input('Enter your niche', 'Technology') | |
| if niche: | |
| # Fetch news articles | |
| google_news = GNews(language='german', country='Germany') # You can customize this | |
| news_list = google_news.get_news(niche) | |
| if not news_list: | |
| st.write("No news found for the given niche.") | |
| return | |
| # Display article titles in a selectbox | |
| article_titles = [news['title'] for news in news_list[:5]] | |
| selected_title = st.selectbox("Select an article:", article_titles) | |
| selected_article = next((item for item in news_list if item['title'] == selected_title), None) | |
| if selected_article: | |
| selected_url = selected_article['url'] | |
| if st.button('Fetch Full Article'): | |
| # Fetch the full article text | |
| article_text = fetch_full_article(selected_url) | |
| # Use an expander to display the article text in a scrollable view | |
| with st.expander("See full article"): | |
| st.text_area("Article Text", article_text, height=500) # Height is in pixels | |
| # Store the full article text in session state for later use | |
| st.session_state.full_article_text = article_text | |
| if st.button('Generate Social Media Post') and 'full_article_text' in st.session_state: | |
| with st.spinner('Generating...'): | |
| # Generate a summary based on the full article text | |
| post_content = generate_social_media_post(st.session_state.full_article_text) | |
| st.success('Generated Content:') | |
| st.write(post_content) | |
| import os | |
| import requests | |
| import streamlit as st | |
| import pandas as pd | |
| def get_route(start_coord, end_coord): | |
| api_key = os.getenv('OPENSTREET_API') | |
| if not api_key: | |
| st.error("API key not found. Please set the ORS_API_KEY environment variable.") | |
| return | |
| headers = { | |
| 'Accept': 'application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8', | |
| } | |
| url = f"https://api.openrouteservice.org/v2/directions/driving-car?api_key={api_key}&start={start_coord[0]},{start_coord[1]}&end={end_coord[0]},{end_coord[1]}" | |
| response = requests.get(url, headers=headers) | |
| if response.status_code == 200: | |
| route = response.json()['features'][0]['geometry']['coordinates'] | |
| return route | |
| else: | |
| st.error(f"Failed to get route data. Status code: {response.status_code}") | |
| return None | |
| def page_vacation(): | |
| st.title("Route Finder with OpenRouteService") | |
| # Inputs for start and end coordinates | |
| st.header("Start Point") | |
| start_lat = st.number_input("Start Latitude", value=49.41461) | |
| start_lon = st.number_input("Start Longitude", value=8.681495) | |
| st.header("End Point") | |
| end_lat = st.number_input("End Latitude", value=49.420318) | |
| end_lon = st.number_input("End Longitude", value=8.687872) | |
| if st.button("Find Route"): | |
| route = get_route([start_lon, start_lat], [end_lon, end_lat]) | |
| if route: | |
| # Convert the route to a DataFrame | |
| route_df = pd.DataFrame(route, columns=['lon', 'lat']) | |
| # Display the route using PyDeck | |
| st.pydeck_chart(pdk.Deck( | |
| map_style='mapbox://styles/mapbox/light-v9', | |
| initial_view_state=pdk.ViewState( | |
| latitude=49.41461, | |
| longitude=8.681495, | |
| zoom=11, | |
| pitch=50, | |
| ), | |
| layers=[ | |
| pdk.Layer( | |
| 'LineLayer', | |
| data=route_df, | |
| get_source_position='[lon, lat]', | |
| get_target_position='[lon, lat]', | |
| get_color='[255, 0, 0, 160]', | |
| get_width=5, | |
| pickable=True | |
| ), | |
| ], | |
| )) | |
| # Setup the sidebar with page selection | |
| st.sidebar.title("Anne's Current Projects :star2:") | |
| page = st.sidebar.selectbox( | |
| 'What project do you like to see first?', | |
| ('trending_niche', 'page_article_to_social_post', 'Vacation Page')) | |
| # Display the selected page | |
| if page == 'trending_niche': | |
| page_trending_niche() | |
| elif page == 'page_article_to_social_post': | |
| page_article_to_social_post() | |
| elif page == 'Vacation Page': | |
| page_vacation() |