# -*- coding: utf-8 -*- """ Created on Thu Jun 8 03:39:02 2023 @author: mritchey """ # streamlit run "C:\Users\mritchey\.spyder-py3\Python Scripts\streamlit projects\hail\hail all.py" import pandas as pd import numpy as np import streamlit as st from geopy.extra.rate_limiter import RateLimiter from geopy.geocoders import Nominatim import folium from streamlit_folium import st_folium from vincenty import vincenty import duckdb st.set_page_config(layout="wide") @st.cache_data def convert_df(df): return df.to_csv(index=0).encode('utf-8') def duck_sql(sql_code): con = duckdb.connect() con.execute("PRAGMA threads=2") con.execute("PRAGMA enable_object_cache") return con.execute(sql_code).df() def get_data(lat, lon, date_str): code = f""" select "#ZTIME" as "Date_utc", LON, LAT, MAXSIZE from 'data/*.parquet' where LAT<={lat}+1 and LAT>={lat}-1 and LON<={lon}+1 and LON>={lon}-1 and "#ZTIME"<={date_str} """ return duck_sql(code) def map_location(address, lat, lon): m = folium.Map(location=[lat, lon], zoom_start=6, height=400) folium.Marker( location=[lat, lon], tooltip=f'Address: {address}', ).add_to(m) return m def distance(x): left_coords = (x[0], x[1]) right_coords = (x[2], x[3]) return vincenty(left_coords, right_coords, miles=True) def geocode(address): try: address2 = address.replace(' ', '+').replace(',', '%2C') df = pd.read_json( f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json') results = df.iloc[:1, 0][0][0]['coordinates'] lat, lon = results['y'], results['x'] except: geolocator = Nominatim(user_agent="GTA Lookup") geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1) location = geolocator.geocode(address) lat, lon = location.latitude, location.longitude return lat, lon #Side Bar address = st.sidebar.text_input( "Address", "Dallas, TX") date = st.sidebar.date_input( "Loss Date", pd.Timestamp(2023, 7, 14), key='date') date_str = date.strftime("%Y%m%d") #Geocode Addreses lat, lon = geocode(address) #Filter Data df_hail_cut = get_data(lat, lon, date_str) df_hail_cut["Lat_address"] = lat df_hail_cut["Lon_address"] = lon df_hail_cut['Miles to Hail'] = [ distance(i) for i in df_hail_cut[['LAT', 'LON', 'Lat_address', 'Lon_address']].values] df_hail_cut['MAXSIZE'] = df_hail_cut['MAXSIZE'].round(1) df_hail_cut = df_hail_cut.query("`Miles to Hail`<10") df_hail_cut['Category'] = np.where(df_hail_cut['Miles to Hail'] < 1, "Within 1 Mile", np.where(df_hail_cut['Miles to Hail'] < 3, "Within 3 Miles", np.where( df_hail_cut['Miles to Hail'] < 5, "Within 5 Miles", np.where(df_hail_cut['Miles to Hail'] < 10, "Within 10 Miles", 'Other')))) df_hail_cut_group = pd.pivot_table(df_hail_cut, index='Date_utc', columns='Category', values='MAXSIZE', aggfunc='max') cols = df_hail_cut_group.columns cols_focus = [ "Within 1 Mile","Within 5 Miles", "Within 3 Miles", "Within 10 Miles"] missing_cols = set(cols_focus)-set(cols) for c in missing_cols: df_hail_cut_group[c] = np.nan df_hail_cut_group2 = df_hail_cut_group[cols_focus].query( "`Within 3 Miles`==`Within 3 Miles`") for i in range(len(cols)): df_hail_cut_group2[cols_focus[i+1]] = np.where(df_hail_cut_group2[cols_focus[i+1]].fillna(0) < df_hail_cut_group2[cols_focus[i]].fillna(0), df_hail_cut_group2[cols_focus[i]], df_hail_cut_group2[cols_focus[i+1]]) df_hail_cut_group2 = df_hail_cut_group2.sort_index(ascending=False) df_hail_cut_group2.index = pd.to_datetime( df_hail_cut_group2.index, format='%Y%m%d').strftime("%Y-%m-%d") #Map Data m = map_location(address, lat, lon) #Display col1, col2 = st.columns((3, 2)) with col1: st.header('Estimated Maximum Hail Size') st.write('Data from 2010 to 2023-09-24') df_hail_cut_group2 csv2 = convert_df(df_hail_cut_group2.reset_index()) st.download_button( label="Download data as CSV", data=csv2, file_name=f'{address}_{date_str}.csv', mime='text/csv') with col2: st.header('Map') st_folium(m, height=400)