File size: 4,033 Bytes
21fafcc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
657cbfc
21fafcc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
657cbfc
21fafcc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
657cbfc
 
 
 
 
21fafcc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# -*- coding: utf-8 -*-
"""
Created on Thu Jun  8 03:39:02 2023

@author: mritchey
"""

import pandas as pd
import numpy as np
import streamlit as st
from geopy.extra.rate_limiter import RateLimiter
from geopy.geocoders import Nominatim
import folium
from streamlit_folium import st_folium
from vincenty import vincenty

st.set_page_config(layout="wide")

@st.cache_data
def convert_df(df):
    return df.to_csv(index=0).encode('utf-8')

@st.cache_data
def get_data(file='hail2010-20230920_significant_bulk_all.parquet'):
    return pd.read_parquet(file) 


def map_perimeters(address,lat ,lon):

    m = folium.Map(location=[lat, lon],

                    zoom_start=6,
                    height=400)
    folium.Marker(
        location=[lat, lon],
        tooltip=f'Address: {address}',
    ).add_to(m)

    return m

def distance(x):
    left_coords = (x[0], x[1])
    right_coords = (x[2], x[3])
    return vincenty(left_coords, right_coords, miles=True)

def geocode(address):
    try:
        address2 = address.replace(' ', '+').replace(',', '%2C')
        df = pd.read_json(
            f'https://geocoding.geo.census.gov/geocoder/locations/onelineaddress?address={address2}&benchmark=2020&format=json')
        results = df.iloc[:1, 0][0][0]['coordinates']
        lat, lon = results['y'], results['x']
    except:
        geolocator = Nominatim(user_agent="GTA Lookup")
        geocode = RateLimiter(geolocator.geocode, min_delay_seconds=1)
        location = geolocator.geocode(address)
        lat, lon = location.latitude, location.longitude
    return lat, lon





#Side Bar
address = st.sidebar.text_input(
    "Address", "Dallas, TX")
date = st.sidebar.date_input("Loss Date",  pd.Timestamp(2023, 7, 14), key='date')
df_hail=get_data()


#Geocode Addreses
lat, lon = geocode(address)

#Filter DAta
df_hail_cut=df_hail.query(f"{lat}-1<=LAT<={lat}+1 and {lon}-1<=LON<={lon}+1 ")
df_hail_cut=df_hail_cut.query("Date_est<=@date")


df_hail_cut["Lat_address"] = lat
df_hail_cut["Lon_address"] = lon
df_hail_cut['Miles to Hail'] = [
    distance(i) for i in df_hail_cut[['LAT','LON','Lat_address','Lon_address']].values]
df_hail_cut['MAXSIZE'] = df_hail_cut['MAXSIZE'].round(2)

df_hail_cut=df_hail_cut.query("`Miles to Hail`<10")
df_hail_cut['Category']=np.where(df_hail_cut['Miles to Hail']<.25,"At Location",
                                 np.where(df_hail_cut['Miles to Hail']<1,"Within 1 Mile",
                                  np.where(df_hail_cut['Miles to Hail']<3,"Within 3 Miles",       
                                    np.where(df_hail_cut['Miles to Hail']<10,"Within 10 Miles",'Other'))))

df_hail_cut_group=pd.pivot_table(df_hail_cut,index='Date_est',
                                 columns='Category',
                                 values='MAXSIZE',
                                 aggfunc='max')

cols=df_hail_cut_group.columns
cols_focus=['At Location',"Within 1 Mile","Within 3 Miles","Within 10 Miles"]

missing_cols=set(cols_focus)-set(cols)
for c in missing_cols:
    df_hail_cut_group[c]=np.nan
 
df_hail_cut_group2=df_hail_cut_group[cols_focus]  

for i in range(3):
    df_hail_cut_group2[cols_focus[i+1]] = np.where(df_hail_cut_group2[cols_focus[i+1]].fillna(0) <
                                                   df_hail_cut_group2[cols_focus[i]].fillna(0),
                                                   df_hail_cut_group2[cols_focus[i]],
                                                   df_hail_cut_group2[cols_focus[i+1]])



df_hail_cut_group2=df_hail_cut_group2.sort_index(ascending=False)

#Map Data
m = map_perimeters(address,lat, lon)

#Display
col1, col2 = st.columns((3, 2))
with col1:
    st.header('Estimated Maximum Hail Size')
    st.write('Data from 2010 to 2023-09-20')
    df_hail_cut_group2
    csv2 = convert_df(df_hail_cut_group2.reset_index())
    st.download_button(
        label="Download data as CSV",
        data=csv2,
        file_name=f'{address}_{date}.csv',
        mime='text/csv')
with col2:

    st.header('Map')
    st_folium(m, height=400)