File size: 11,682 Bytes
dee81af
e930297
90e5033
bbd33b5
 
cab1480
da0305b
 
bc1f973
 
2d0a178
597a1cc
 
90e5033
dee81af
e930297
bbd33b5
e930297
205f7ce
e930297
 
 
 
 
 
 
 
 
 
dd1f93b
4e47031
95b45fe
d2b5ac1
bbd33b5
e04080f
4d9a7ab
4e47031
e930297
b763acd
205f7ce
e930297
4e47031
 
b763acd
4e47031
b763acd
 
 
bd14329
 
e930297
 
 
 
bd14329
205f7ce
 
95b45fe
 
 
 
 
 
 
bbd33b5
90e5033
1f74d7a
 
 
d2dbba3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1fccbea
d2dbba3
 
 
 
 
 
 
 
 
 
fbf8bda
90e5033
dee81af
90e5033
b20bf0e
d2b5ac1
 
1fccbea
d2b5ac1
1fccbea
d2b5ac1
1fccbea
86074e5
d2b5ac1
 
53717a6
 
 
1fccbea
 
d2b5ac1
53717a6
1fccbea
53717a6
 
 
b028844
 
1dd44df
1fccbea
 
b028844
 
 
 
 
1fccbea
 
 
d2dbba3
1fccbea
53717a6
1fccbea
 
 
 
90e5033
39e53bb
 
 
 
 
 
 
94dd87a
 
 
39e53bb
89a792e
94dd87a
 
 
 
 
bc1f973
39e53bb
bc1f973
39e53bb
 
94dd87a
 
39e53bb
 
bc1f973
89a792e
ae7fc57
89a792e
 
94dd87a
 
 
89a792e
 
 
 
bc1f973
94dd87a
 
bc1f973
 
048a55f
 
89a792e
048a55f
 
 
 
 
 
 
 
 
 
 
bc1f973
3ea1ab2
ad68b29
3ea1ab2
ad68b29
 
 
3ea1ab2
ad68b29
 
 
3ea1ab2
048a55f
ad68b29
3ea1ab2
ad68b29
 
 
 
3ea1ab2
ad68b29
b08fece
048a55f
ad68b29
048a55f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bc1f973
 
 
71a4b8d
5f4a028
 
855d0a5
bc1f973
5f4a028
 
bc1f973
39e53bb
 
bc1f973
 
 
 
53cdd83
 
 
bc1f973
 
 
 
53cdd83
 
bc1f973
53cdd83
bc1f973
 
 
53cdd83
 
 
 
 
 
39e53bb
bc1f973
 
 
dee81af
e6c03c9
1d04a42
e6c03c9
fc70bd6
1d04a42
bc1f973
c9f3fd9
e6c03c9
8c0e2df
ef7e473
d2b5ac1
 
bc1f973
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
import streamlit as st
from gnews import GNews
from transformers import GPT2Tokenizer, GPT2LMHeadModel
from transformers import pipeline
from gnews import GNews
from newspaper import Article
import pandas as pd 
import numpy as np
import requests
import json
import os
import pydeck as pdk



def fetch_news(topic):
    google_news = GNews(language='german', country='Germany')  # You can customize this
    news_list = google_news.get_news(topic)
    
    articles = []
    for news in news_list[:5]:  # Get top 5 news articles
        articles.append({
            'title': news['title'],
            'published_date': news['published date'],
            'description': news['description'],
            'url': news['url'],
            'publisher': news['publisher']
        })
    return articles






def page_trending_niche():
    st.title("What is trending in my niche?")
    niche = st.text_input('Enter your niche', 'Technology')
    # page_trending_niche function
    if niche:
        news_items = fetch_news(niche)
        article_titles = [item['title'] for item in news_items]
        selected_article = st.selectbox("Select an article to generate a social media post about:", article_titles)
        selected_article_description = next((item['description'] for item in news_items if item['title'] == selected_article), None)
        
        # Save the selected article's description in the session state to use in another page
        st.session_state['selected_article_description'] = selected_article_description

        for item in news_items:
            st.write(f"**Title:** {item['title']}")
            st.write(f"**Published Date:** {item['published_date']}")
            st.write(f"**Description:** {item['description']}")
            st.write(f"**Publisher:** {item['publisher']}")
            st.write(f"**URL:** [Read more]({item['url']})")
            st.write("---")


def fetch_full_article(url):
    """Fetches the full text of an article given its URL."""
    article = Article(url)
    article.download()
    article.parse()
    return article.text
    


# Initialize the summarization pipeline with BART
summarizer = pipeline("summarization", model="facebook/bart-large-cnn")

def split_text_into_chunks(text, chunk_size, overlap_size):
    chunks = []
    index = 0
    while index < len(text):
        # End index for the current chunk
        end_index = index + chunk_size
        # Extend the end index to include the overlap, if possible
        end_index_with_overlap = min(end_index + overlap_size, len(text))
        # Extract the chunk with the overlap
        chunk = text[index:end_index_with_overlap]
        chunks.append(chunk)
        # Move the index to the start of the next chunk, which is end_index
        index = end_index
    return chunks
    

def generate_social_media_post(article_text):
    chunk_size = 900  # This is close to the model's maximum length for BART
    overlap_size = 50  # Overlap size to ensure continuity in the text
    chunks = split_text_into_chunks(article_text, chunk_size, overlap_size)
    
    summarized_text = ''
    for chunk in chunks:
        # Call the summarizer for each chunk
        summary = summarizer(chunk, max_length=130, min_length=30, do_sample=False)[0]['summary_text']
        summarized_text += summary + ' '
    return summarized_text.strip()



        
def page_article_to_social_post():
    st.title("Article to Social Media Post")
    
    # User input for niche
    niche = st.text_input('Enter your niche', 'Technology')
    
    if niche:
        # Fetch news articles
        google_news = GNews(language='german', country='Germany')  # You can customize this
        news_list = google_news.get_news(niche)
        
        if not news_list:
            st.write("No news found for the given niche.")
            return
        
        # Display article titles in a selectbox
        article_titles = [news['title'] for news in news_list[:5]]
        selected_title = st.selectbox("Select an article:", article_titles)
        
        selected_article = next((item for item in news_list if item['title'] == selected_title), None)
        if selected_article:
            selected_url = selected_article['url']


            if st.button('Fetch Full Article'):
                # Fetch the full article text
                article_text = fetch_full_article(selected_url)
                
                # Use an expander to display the article text in a scrollable view
                with st.expander("See full article"):
                    st.text_area("Article Text", article_text, height=500)  # Height is in pixels

                
                # Store the full article text in session state for later use
                st.session_state.full_article_text = article_text

            if st.button('Generate Social Media Post') and 'full_article_text' in st.session_state:
                with st.spinner('Generating...'):
                    # Generate a summary based on the full article text
                    post_content = generate_social_media_post(st.session_state.full_article_text)
                    st.success('Generated Content:')
                    st.write(post_content)


import os
import requests
import streamlit as st
import pandas as pd

def get_route(start_coord, end_coord):
    api_key = os.getenv('OPENSTREET_API')
    
    # Debugging: Confirm API key retrieval
    if not api_key:
        st.error("API key not found. Please set the OPENSTREET_API environment variable.")
        return None
    else:
        st.success("API key successfully retrieved.")

    # Prepare the request details
    headers = {
        'Accept': 'application/json, application/geo+json, application/gpx+xml, img/png; charset=utf-8',
    }
    url = f"https://api.openrouteservice.org/v2/directions/driving-car?api_key={api_key}&start={start_coord[0]},{start_coord[1]}&end={end_coord[0]},{end_coord[1]}"
    

    # Make the API request
    response = requests.get(url, headers=headers)

    if response.status_code == 200:
        data = response.json()
                
        if 'features' in data and len(data['features']) > 0:
            route = data['features'][0]['geometry']['coordinates']
            
            # Debugging: Confirm route data extraction
            st.success("Route data successfully extracted.")
            return route
        else:
            st.error("Route data is missing from the response.")
            return None
    else:
        response_text = response.text[:500]  # Limit the amount of text displayed for clarity
        st.error(f"Failed to get route data. Status code: {response.status_code}, Response: {response_text}")
        return None

        
from geopy.geocoders import Nominatim

def geocode_location(location_name):
    geolocator = Nominatim(user_agent="streamlit_route_finder")
    location = geolocator.geocode(location_name)
    if location:
        return (location.longitude, location.latitude)
    else:
        st.error(f"Could not geocode location name: {location_name}")
        return None

        
# The updated page_vacation function using location names
def page_vacation():
    main_title_column, main_logo_column = st.columns([3, 1])
    
    with main_title_column:
        st.title("Scenic Routes & Stops: Your Vacation Odyssey")
    
    # Add content to the right column (picture)
    with main_logo_column:
        st.image("Vaccation_Route_finder_Logo.png")

    # Create two columns
    start_column, end_column = st.columns(2)
    
    # Inputs for start location name
    with start_column:
        st.header("Start Point")
        start_location_name = st.text_input("Enter Start Location Name", "Österbergstraße 3, 72074 Tübingen, Germany")
    
    # Inputs for end location name
    with end_column:
        st.header("End Point")
        end_location_name = st.text_input("Enter End Location Name", "Burg Hohenzollern, 72379 Germany")

    
    if st.button("Find Route"):
        # Geocode the location names to get the coordinates
        start_coords = geocode_location(start_location_name)
        end_coords = geocode_location(end_location_name)

        if start_coords and end_coords:
            # Fetch the route using the geocoded coordinates
            route = get_route(start_coords, end_coords)
            if route:
                # Convert the route to a DataFrame
                route_df = pd.DataFrame(route, columns=['lon', 'lat'])
                
                # Create the route data for PyDeck
                route_data = [{'path': route}]
                
                # Display the route using PyDeck
                st.pydeck_chart(pdk.Deck(
                    map_style='mapbox://styles/mapbox/light-v9',
                    initial_view_state=pdk.ViewState(
                        latitude=start_coords[1],
                        longitude=start_coords[0],
                        zoom=11,
                        pitch=50,
                    ),
                    layers=[
                        pdk.Layer(
                            'PathLayer',
                            data=route_data,
                            get_path='path',
                            width_scale=20,
                            width_min_pixels=2,
                            get_color=[255, 140, 0],  # Orange color
                            pickable=True
                        ),
                    ],
                ))

def page_vacation_old():
    st.title("Route Finder with OpenRouteService")

    # Inputs for start and end coordinates
    st.header("Start Point")
    start_lat = st.number_input("Start Latitude", value=48.5216)
    start_lon = st.number_input("Start Longitude", value=9.0576)

    st.header("End Point")
    end_lat = st.number_input("End Latitude", value=48.7758)
    end_lon = st.number_input("End Longitude", value=9.1829)

    if st.button("Find Route"):
        route = get_route([start_lon, start_lat], [end_lon, end_lat])
        if route:
            # Convert the route to a DataFrame
            route_df = pd.DataFrame(route, columns=['lon', 'lat'])
            
            # Create the route data for PyDeck, making sure each coordinate pair is a list within the 'path' list
            route_data = [{'path': route}]
            
            # Display the route using PyDeck
            st.pydeck_chart(pdk.Deck(
                map_style='mapbox://styles/mapbox/light-v9',
                initial_view_state=pdk.ViewState(
                    latitude=48.5216,  # This should ideally be the midpoint of the route
                    longitude=9.0576,  # This should ideally be the midpoint of the route
                    zoom=11,
                    pitch=0,
                ),
                layers=[
                    pdk.Layer(
                        'PathLayer',
                        data=route_data,
                        get_path='path',
                        width_scale=20,
                        width_min_pixels=2,
                        get_color=[255, 140, 0],  # Color the path with a visible color
                        pickable=True
                    ),
                ],
            ))

# Setup the sidebar with page selection
st.sidebar.title("Anne's Current Projects :star2:")

page = st.sidebar.selectbox(
    'What project do you like to see first?',
    ('trending_niche', 'page_article_to_social_post', 'Vacation Page'))

# Display the selected page
if page == 'trending_niche':
    page_trending_niche()
elif page == 'page_article_to_social_post':
    page_article_to_social_post()
elif page == 'Vacation Page':
    page_vacation()