|
|
|
""" |
|
Created on Thu Jun 8 03:39:02 2023 |
|
|
|
@author: mritchey |
|
""" |
|
|
|
|
|
import pandas as pd |
|
import numpy as np |
|
import streamlit as st |
|
from geopy.extra.rate_limiter import RateLimiter |
|
from geopy.geocoders import Nominatim |
|
import folium |
|
from streamlit_folium import st_folium |
|
from vincenty import vincenty |
|
import duckdb |
|
import os |
|
import requests |
|
import urllib |
|
|
|
geocode_key=os.environ["geocode_key"] |
|
st.set_page_config(layout="wide") |
|
|
|
|
|
@st.cache_data |
|
def convert_df(df): |
|
return df.to_csv(index=0).encode('utf-8') |
|
|
|
|
|
def duck_sql(sql_code): |
|
con = duckdb.connect() |
|
con.execute("PRAGMA threads=2") |
|
con.execute("PRAGMA enable_object_cache") |
|
return con.execute(sql_code).df() |
|
|
|
|
|
def get_data(lat, lon, date_str): |
|
code = f""" |
|
select "#ZTIME" as "Date_utc", LON, LAT, MAXSIZE |
|
from |
|
'data/*.parquet' |
|
where LAT<={lat}+1 and LAT>={lat}-1 |
|
and LON<={lon}+1 and LON>={lon}-1 |
|
and "#ZTIME"<={date_str} |
|
|
|
""" |
|
return duck_sql(code) |
|
|
|
|
|
def map_location(address, lat, lon): |
|
|
|
m = folium.Map(location=[lat, lon], |
|
|
|
zoom_start=9, |
|
height=400) |
|
folium.Marker( |
|
location=[lat, lon], |
|
tooltip=f'Address: {address}', |
|
).add_to(m) |
|
|
|
return m |
|
|
|
|
|
def distance(x): |
|
left_coords = (x[0], x[1]) |
|
right_coords = (x[2], x[3]) |
|
return vincenty(left_coords, right_coords, miles=True) |
|
|
|
|
|
def geocode(address): |
|
try: |
|
try: |
|
address2 = address.replace(' ', '+').replace(',', '%2C') |
|
df = pd.read_json( |
|
f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json') |
|
results = df.iloc[:1, 0][0][0]['coordinates'] |
|
lat, lon = results['y'], results['x'] |
|
except: |
|
geolocator = Nominatim(user_agent="GTA Lookup") |
|
geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1) |
|
location = geolocator.geocode(address) |
|
lat, lon = location.latitude, location.longitude |
|
except: |
|
try: |
|
address = urllib.parse.quote(address) |
|
url = 'https://api.geocod.io/v1.7/geocode?q=+'+address+f'&api_key={geocode_key}' |
|
json_reponse=requests.get(url,verify=False).json() |
|
lat,lon = json_reponse['results'][0]['location'].values() |
|
except: |
|
st.header("Sorry...Did not Find Address. Try to Correct with Google or just use City, State & Zip.") |
|
st.header("") |
|
st.header("") |
|
return lat, lon |
|
|
|
|
|
|
|
|
|
address = st.sidebar.text_input("Address", "Dallas, TX") |
|
date = st.sidebar.date_input("Loss Date (Max)", pd.Timestamp(2024, 12, 11), key='date') |
|
show_data = st.sidebar.selectbox('Show Data At Least Within:', ('Show All', '1 Mile', '3 Miles', '5 Miles')) |
|
|
|
|
|
date_str=date.strftime("%Y%m%d") |
|
|
|
lat, lon = geocode(address) |
|
|
|
|
|
df_hail_cut = get_data(lat,lon, date_str) |
|
|
|
|
|
df_hail_cut["Lat_address"] = lat |
|
df_hail_cut["Lon_address"] = lon |
|
df_hail_cut['Miles to Hail'] = [ |
|
distance(i) for i in df_hail_cut[['LAT', 'LON', 'Lat_address', 'Lon_address']].values] |
|
df_hail_cut['MAXSIZE'] = df_hail_cut['MAXSIZE'].round(2) |
|
|
|
df_hail_cut = df_hail_cut.query("`Miles to Hail`<10") |
|
df_hail_cut['Category'] = np.where(df_hail_cut['Miles to Hail'] < 1, "Within 1 Mile", |
|
np.where(df_hail_cut['Miles to Hail'] < 3, "Within 3 Miles", |
|
np.where( df_hail_cut['Miles to Hail'] < 5, "Within 5 Miles", |
|
np.where(df_hail_cut['Miles to Hail'] < 10, "Within 10 Miles", 'Other')))) |
|
|
|
df_hail_cut_group = pd.pivot_table(df_hail_cut, index='Date_utc', |
|
columns='Category', |
|
values='MAXSIZE', |
|
aggfunc='max') |
|
|
|
cols = df_hail_cut_group.columns |
|
cols_focus = [ "Within 1 Mile","Within 3 Miles", |
|
"Within 5 Miles", "Within 10 Miles"] |
|
|
|
missing_cols = set(cols_focus)-set(cols) |
|
for c in missing_cols: |
|
df_hail_cut_group[c] = np.nan |
|
|
|
|
|
df_hail_cut_group2 = df_hail_cut_group[cols_focus] |
|
|
|
if show_data=='Show All': |
|
pass |
|
else: |
|
df_hail_cut_group2 = df_hail_cut_group2.query( |
|
f"`Within {show_data}`==`Within {show_data}`") |
|
|
|
for i in range(len(cols_focus)-1): |
|
df_hail_cut_group2[cols_focus[i+1]] = np.where(df_hail_cut_group2[cols_focus[i+1]].fillna(0) < |
|
df_hail_cut_group2[cols_focus[i]].fillna(0), |
|
df_hail_cut_group2[cols_focus[i]], |
|
df_hail_cut_group2[cols_focus[i+1]]) |
|
|
|
|
|
df_hail_cut_group2 = df_hail_cut_group2.sort_index(ascending=False) |
|
|
|
df_hail_cut_group2.index=pd.to_datetime(df_hail_cut_group2.index,format='%Y%m%d') |
|
df_hail_cut_group2.index=df_hail_cut_group2.index.strftime("%Y-%m-%d") |
|
|
|
|
|
|
|
m = map_location(address, lat, lon) |
|
|
|
|
|
col1, col2 = st.columns((3, 2)) |
|
|
|
with col1: |
|
st.header('Estimated Maximum Hail Size') |
|
st.write('Data from 2010 to 2024-12-11') |
|
df_hail_cut_group2 |
|
|
|
data=df_hail_cut_group2.reset_index() |
|
data['Address']='' |
|
data.loc[0,'Address']=address |
|
csv2 = convert_df(data) |
|
|
|
st.download_button( |
|
label="Download data as CSV", |
|
data=csv2, |
|
file_name=f'{address}_{date_str}.csv', |
|
mime='text/csv') |
|
with col2: |
|
st.header('Map') |
|
st_folium(m, height=400) |
|
|