Spaces:
Runtime error
Runtime error
poooja2012
commited on
Commit
•
00ac9b2
1
Parent(s):
c0594e6
data folder
Browse files- .DS_Store +0 -0
- .gitattributes +2 -0
- .streamlit/config.toml +29 -0
- .streamlit/secrets.toml +13 -0
- Dockerfile +26 -0
- app.py +42 -0
- app.yaml +12 -0
- docker-compose.yml +9 -0
- historicalData/.DS_Store +0 -0
- historicalData/data.rtf +8 -0
- precipitation_function.py +233 -0
- references/image.png +0 -0
- requirements.txt +16 -0
- stlib/__init__.py +0 -0
- stlib/__pycache__/__init__.cpython-39.pyc +0 -0
- stlib/__pycache__/precipitation.cpython-39.pyc +0 -0
- stlib/__pycache__/temperature.cpython-39.pyc +0 -0
- stlib/precipitation.py +493 -0
- stlib/temperature.py +484 -0
- temperature_functions.py +284 -0
.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
.gitattributes
CHANGED
@@ -29,3 +29,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
30 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
29 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
30 |
*.zstandard filter=lfs diff=lfs merge=lfs -text
|
31 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
32 |
+
historicalData filter=lfs diff=lfs merge=lfs -text
|
33 |
+
historicalData/ filter=lfs diff=lfs merge=lfs -text
|
.streamlit/config.toml
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[server]
|
2 |
+
|
3 |
+
port = 8501
|
4 |
+
|
5 |
+
headless = true
|
6 |
+
|
7 |
+
|
8 |
+
[browser]
|
9 |
+
|
10 |
+
# Internet address where users should point their browsers in order to connect to the app. Can be IP address or DNS name and path.
|
11 |
+
# This is used to: - Set the correct URL for CORS and XSRF protection purposes. - Show the URL on the terminal - Open the browser
|
12 |
+
# Default: 'localhost'
|
13 |
+
serverAddress = "localhost"
|
14 |
+
|
15 |
+
# Whether to send usage statistics to Streamlit.
|
16 |
+
# Default: true
|
17 |
+
gatherUsageStats = true
|
18 |
+
|
19 |
+
# Port where users should point their browsers in order to connect to the app.
|
20 |
+
# This is used to: - Set the correct URL for CORS and XSRF protection purposes. - Show the URL on the terminal - Open the browser
|
21 |
+
# Default: whatever value is set in server.port.
|
22 |
+
serverPort = 8501
|
23 |
+
|
24 |
+
|
25 |
+
|
26 |
+
[theme]
|
27 |
+
|
28 |
+
# The preset Streamlit theme that your custom theme inherits from. One of "light" or "dark".
|
29 |
+
base = "light"
|
.streamlit/secrets.toml
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
[gcp_service_account]
|
4 |
+
type = "data-visualizationproject"
|
5 |
+
project_id = "fe5615839c19ff35f46a8262eaa0cc48e5a21193"
|
6 |
+
private_key_id = "fe5615839c19ff35f46a8262eaa0cc48e5a21193"
|
7 |
+
private_key = "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDKI+L0Ll9Rz2C/\n5Kban6SlVYtmfZZPZlHyZSU4FzrIKj1YuBW96LHgPM1b00jGRaRizK+tVqnY0+8W\nWAdJnCPDZBoeSnFdcKlGvCQSxxGuWkJWhOc9IdxK1Rk4ckgJD2kU3JLta/fzqJhl\n8dyNE42x6Wch+O9LCSiHhjzfCKnTjNwkR6cstQFxklwydYPa44F4K4AgOwsz0vBf\nDHYr8c9bwaXJOx8ZhZ+VgzLy5HFbENyN/9rXug+B1nWS5WRnBPyYhlNKLPFVf0Dy\nmTksOkISse3H2jljSl+1HM8mehnLF7GPq2sAwd+LMnT4AKIjjFPz/2vcjaJJmTt8\nXY5w9Q1BAgMBAAECggEAAsLv3rq7swEakvKJaEEQMXFIS86VSSlGXzGsKw6PrzR7\nBTFSHDefpvbjrhCGTO2egdpGn99IR5nZGUC7fEEjoK+nVvC4yOBRMrH/KunA60Uo\nxPhwtYZG9XeA7kNK6kNFcetTv0Y3Y2LzLojZ5Mw+pj6gWd+mCrwRu3y9LAWY9tau\neYcOHtoPn8Y0qVqe1M/NXDuJrIg7zNk763T/6pRTUu12f6K4l3iVJch7Qx0WVAMV\ni+FXmikLXgdkpk2P/NZrKCOyzBdNOjVVH0wE4iIZt5P2PS/rRKjGJZRDZg/NBRzJ\n5WWO8fNhTwhMkAlwIzjOd3BkUNO5XUdCEFqyW66WQQKBgQD+39Fz6A6OdW20zjXL\nEldqorIaJ59Tjfpv3JmD18eG5UsuIgj4GdSawqRtqgulYmVJp5z8XQDih3i4+0uD\n2pjXxetm6gRsQEWfklMCez+PgxQ7pBzbi5W4p2s104006e9P6aXQqkApjP8XLjzV\nREbbPGGEfLwipwjdNECZUEJIYQKBgQDLCHFeWH0RMGkU9WecNZxSAyCtqrfARhoM\nC08m2LKhLDwNAeYwmBkQlduhWtUPfqxOxL/WmP2Kq/tJ6ZXl8DjkT2craae5EP2P\nrsIma8m4dgJuveJfO2UlnwJvZTQs5+6RGKQFBjonP2qT90OLfZZfZUE5ZH0377sk\ngkMIf6lw4QKBgQCvqrAARScTRRhyD9a/ukqJ8szyffCwdCDyTId6eZTKAuvIRikM\nZzFD4XfXJeaU/LZ3B5GJ1sgvWdP5Gyq3HElQHtfavNyDh+D01LNl+HCch2nAmaJS\nJ2jwNmQMW1zt5XbOC2n/4fgLK2T+Ix5Gl6KGpJ1J5Xb+I0ZWwBbuFIU6wQKBgHTj\n6Hkb0u1phKYnXz5k8xmYaWDyz5WoBmj2vdwsmDMIh44pkMBbYNrWnDr6U333PPag\neZ3wONB0kVkNXVqjge2X9VreGk0HBiSJxYdfOSS09FOTiNexF1ugf0No7bAI26W3\nnP6zhhmAfjiv+7g8Vq2XWwTS4PCqWYD8aeFjtkahAoGBANyoPpnY1h1qFmMtwKI5\nIlAxNBZjDOzbVeE8lSnJgVnSdeqiGPrrm85UYNgpuss+yfdhcPSTql9/k7ido0x3\nEZmPErHU9pff+fYjLTJoiS3tx/bY8F1EdFI+9KwkDJJgzFClWwXkH1LnO0f4xvvk\nWS7uakfu/xsYJ/WhBAFZn8Jo\n-----END PRIVATE KEY-----\n"
|
8 |
+
client_email ="pooja-yadav-2012@data-visualizationproject.iam.gserviceaccount.com"
|
9 |
+
client_id = "117601902564876434051"
|
10 |
+
auth_uri = "https://accounts.google.com/o/oauth2/auth"
|
11 |
+
token_uri = "https://oauth2.googleapis.com/token"
|
12 |
+
auth_provider_x509_cert_url = "https://www.googleapis.com/oauth2/v1/certs"
|
13 |
+
client_x509_cert_url = "https://www.googleapis.com/robot/v1/metadata/x509/pooja-yadav-2012%40data-visualizationproject.iam.gserviceaccount.com"
|
Dockerfile
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.9.12
|
2 |
+
|
3 |
+
WORKDIR /ethio_hydro
|
4 |
+
|
5 |
+
# Upgrade pip and install requirements
|
6 |
+
|
7 |
+
COPY requirements.txt requirements.txt
|
8 |
+
RUN pip install -U pip
|
9 |
+
RUN pip install -r requirements.txt
|
10 |
+
|
11 |
+
# Expose port you want your app on
|
12 |
+
EXPOSE 8501
|
13 |
+
|
14 |
+
# Copy app code and set working directory
|
15 |
+
COPY app.py app.py
|
16 |
+
COPY references references
|
17 |
+
COPY stlib stlib
|
18 |
+
COPY precipitation_function.py precipitation_function.py
|
19 |
+
COPY temperature_functions.py temperature_functions.py
|
20 |
+
COPY .streamlit .streamlit
|
21 |
+
|
22 |
+
|
23 |
+
|
24 |
+
# Run
|
25 |
+
RUN pip install streamlit
|
26 |
+
ENTRYPOINT ["streamlit", "run", "app.py"]
|
app.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#standard imports
|
2 |
+
import pandas as pd
|
3 |
+
import streamlit as st
|
4 |
+
from PIL import Image
|
5 |
+
|
6 |
+
|
7 |
+
|
8 |
+
# Setting the Page Layout as wide
|
9 |
+
|
10 |
+
st.set_page_config(
|
11 |
+
page_title="AI GERD Dashboard",
|
12 |
+
layout="wide")
|
13 |
+
|
14 |
+
# # Creating Container for Logo and Title
|
15 |
+
with st.container():
|
16 |
+
col1,col2 = st.columns(2)
|
17 |
+
#Code for adding Logo
|
18 |
+
with col1:
|
19 |
+
image = Image.open('references/image.png')
|
20 |
+
st.image(image)
|
21 |
+
#Code for Title
|
22 |
+
with col2:
|
23 |
+
col2.markdown("<h1 style='text-align:centre; color: black;'>ETHIO HYDRO & CLIMATE HUB</h1>", unsafe_allow_html=True)
|
24 |
+
|
25 |
+
message = """
|
26 |
+
__Select an application from the list below__
|
27 |
+
"""
|
28 |
+
|
29 |
+
from stlib import precipitation
|
30 |
+
from stlib import temperature
|
31 |
+
|
32 |
+
|
33 |
+
with st.sidebar:
|
34 |
+
st.markdown(message)
|
35 |
+
page = st.selectbox(' ',['Temperature',"Precipitation"])
|
36 |
+
|
37 |
+
|
38 |
+
if page == 'Temperature':
|
39 |
+
temperature.run()
|
40 |
+
|
41 |
+
elif page == 'Precipitation':
|
42 |
+
precipitation.run()
|
app.yaml
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
runtime: custom
|
2 |
+
env: flex
|
3 |
+
# entrypoint: find ${VIRTUAL_ENV}/lib/python3.9/site-packages/streamlit -type f \( -iname \*.py -o -iname \*.js \) -print0 | xargs -0 sed -i 's/healthz/health-check/g' && streamlit run sim_v3.py --server.port $PORT --server.enableCORS=false
|
4 |
+
|
5 |
+
# runtime_config:
|
6 |
+
# python_version: 3
|
7 |
+
#
|
8 |
+
# manual_scaling:
|
9 |
+
# instances: 1
|
10 |
+
#
|
11 |
+
# network:
|
12 |
+
# session_affinity: true
|
docker-compose.yml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: "3.7"
|
2 |
+
services:
|
3 |
+
streamlit:
|
4 |
+
build:
|
5 |
+
context: streamlit/
|
6 |
+
volumes:
|
7 |
+
- ./streamlit:/app
|
8 |
+
ports:
|
9 |
+
- 8080:8080
|
historicalData/.DS_Store
ADDED
Binary file (6.15 kB). View file
|
|
historicalData/data.rtf
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{\rtf1\ansi\ansicpg1252\cocoartf2638
|
2 |
+
\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;}
|
3 |
+
{\colortbl;\red255\green255\blue255;}
|
4 |
+
{\*\expandedcolortbl;;}
|
5 |
+
\margl1440\margr1440\vieww11520\viewh8400\viewkind0
|
6 |
+
\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0
|
7 |
+
|
8 |
+
\f0\fs24 \cf0 Hello}
|
precipitation_function.py
ADDED
@@ -0,0 +1,233 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
|
6 |
+
|
7 |
+
#imports for finding the nearest lat long using haversine distance
|
8 |
+
|
9 |
+
#visualization libraries to visualize different plots
|
10 |
+
import plotly.express as px
|
11 |
+
import plotly.graph_objects as go
|
12 |
+
import altair as alt
|
13 |
+
import io
|
14 |
+
|
15 |
+
#for Logo plotting
|
16 |
+
from PIL import Image
|
17 |
+
|
18 |
+
#disabling warnings
|
19 |
+
import warnings
|
20 |
+
warnings.filterwarnings("ignore")
|
21 |
+
|
22 |
+
#For parallel processing
|
23 |
+
from pandarallel import pandarallel
|
24 |
+
pandarallel.initialize(progress_bar=True)
|
25 |
+
|
26 |
+
from google.oauth2 import service_account
|
27 |
+
from google.cloud import storage
|
28 |
+
|
29 |
+
# Create API client.
|
30 |
+
# credentials = service_account.Credentials.from_service_account_info(
|
31 |
+
# st.secrets["gcp_service_account"]
|
32 |
+
# )
|
33 |
+
# client = storage.Client(credentials=credentials)
|
34 |
+
#
|
35 |
+
# @st.cache(allow_output_mutation = True)
|
36 |
+
# def read_file(bucket_name, file_path):
|
37 |
+
# bucket = client.bucket(bucket_name)
|
38 |
+
# data = bucket.blob(file_path).download_as_bytes()
|
39 |
+
# df = pd.read_csv(io.BytesIO(data),compression='zip')
|
40 |
+
# return df
|
41 |
+
|
42 |
+
|
43 |
+
|
44 |
+
|
45 |
+
@st.cache
|
46 |
+
def date_split(df):
|
47 |
+
df[['Year','Month','Day']] = df['date'].str.split('-',expand = True)
|
48 |
+
return df
|
49 |
+
|
50 |
+
|
51 |
+
@st.cache(allow_output_mutation = True)
|
52 |
+
def lat_long_process_precp(df):
|
53 |
+
df['lat_long'] = df['lat'].astype(str)+','+df['long'].astype(str)
|
54 |
+
return df
|
55 |
+
|
56 |
+
|
57 |
+
|
58 |
+
@st.cache
|
59 |
+
def drop_dup_funct(x):
|
60 |
+
x.drop_duplicates(inplace = True)
|
61 |
+
return x
|
62 |
+
|
63 |
+
@st.cache(allow_output_mutation = True)
|
64 |
+
def concat_func(x,y,a,b):
|
65 |
+
z = pd.concat([x,y,a,b],ignore_index = True)
|
66 |
+
return z
|
67 |
+
|
68 |
+
@st.cache
|
69 |
+
def lat_long_type(nn_value):
|
70 |
+
if isinstance(nn_value,str):
|
71 |
+
return nn_value
|
72 |
+
else:
|
73 |
+
return nn_value.item((0))
|
74 |
+
|
75 |
+
@st.cache
|
76 |
+
def cumulative(df,start,end):
|
77 |
+
df1 = df.groupby(['Year','Month'])['precip'].sum()
|
78 |
+
df1 = df1.reset_index()
|
79 |
+
df1 = df1.set_index('Year')
|
80 |
+
df1 = df1.loc[str(start):str(end)]
|
81 |
+
return df1
|
82 |
+
|
83 |
+
@st.cache
|
84 |
+
def cumulative_plot(df):
|
85 |
+
fig = px.line(df, y='precip',title = 'Monthly Cumulative Precipitation')
|
86 |
+
fig.update_traces(line_color = 'blue')
|
87 |
+
fig.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
88 |
+
fig.update_yaxes(ticklabelposition="inside top", title= 'Monthly Cumulative Precipitation in mm',gridcolor = 'whitesmoke')
|
89 |
+
fig.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
90 |
+
fig.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
91 |
+
fig.update_layout(title = "Monthly Cumulative Precipitation")
|
92 |
+
return fig
|
93 |
+
|
94 |
+
|
95 |
+
@st.cache(allow_output_mutation=True)
|
96 |
+
def daily_precp_data(precipitation_temp,start,end,option):
|
97 |
+
df_daily = precipitation_temp.get_group(option)
|
98 |
+
df_daily.set_index('date',inplace = True)
|
99 |
+
# df_2 = df_daily.loc[str(start):str(end)]
|
100 |
+
# df_3=df_2.reset_index()
|
101 |
+
return df_daily
|
102 |
+
|
103 |
+
@st.cache
|
104 |
+
def daily_precp_plot(df):
|
105 |
+
fig = px.line(df,y='precip',title = 'Daily Precipitation')
|
106 |
+
fig.update_traces(line_color = 'blue')
|
107 |
+
fig.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
108 |
+
fig.update_yaxes(ticklabelposition="inside top", title= 'Daily Precipitation in mm',gridcolor = 'whitesmoke')
|
109 |
+
fig.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
110 |
+
fig.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
111 |
+
fig.update_layout(title = "Daily Precipitation")
|
112 |
+
return fig
|
113 |
+
|
114 |
+
|
115 |
+
def start_end_date_ui(start,end,key1,key2):
|
116 |
+
st.markdown('**Enter Start Date**')
|
117 |
+
start = st.date_input("",value = start,key = key1)
|
118 |
+
if start < pd.to_datetime('2001/01/01'):
|
119 |
+
st.write('Start date should not be less than 2001/01/01')
|
120 |
+
|
121 |
+
st.markdown('**Enter End Date**')
|
122 |
+
end = st.date_input("",value = end, key = key2)
|
123 |
+
if end > pd.to_datetime('2019/12/31'):
|
124 |
+
st.write('End date should not be greater than 2019/12/31')
|
125 |
+
|
126 |
+
return start,end
|
127 |
+
|
128 |
+
|
129 |
+
def lat_long_ui(key1,key2):
|
130 |
+
st.markdown('**Enter the latitude**')
|
131 |
+
latitude_input = st.text_input('','12.55',key = key1)
|
132 |
+
st.markdown('**Enter the longitude**')
|
133 |
+
longitude_input = st.text_input('','42.45',key = key2)
|
134 |
+
return latitude_input,longitude_input
|
135 |
+
|
136 |
+
|
137 |
+
def year_selection_ui(key1,key2):
|
138 |
+
st.markdown('**Select the Start Year**')
|
139 |
+
start_year = st.selectbox('',
|
140 |
+
('2001','2002','2003','2004','2005','2006','2007','2008','2009',
|
141 |
+
'2010','2011','2012','2013','2014','2015','2016','2017','2018','2019'),key = key1)
|
142 |
+
|
143 |
+
st.markdown('**Select the End Year**')
|
144 |
+
end_year = st.selectbox('',
|
145 |
+
('2001','2002','2003','2004','2005','2006','2007','2008','2009',
|
146 |
+
'2010','2011','2012','2013','2014','2015','2016','2017','2018','2019'),key = key2)
|
147 |
+
|
148 |
+
return start_year,end_year
|
149 |
+
|
150 |
+
@st.cache(allow_output_mutation=True)
|
151 |
+
def monthly_mean_plot(df):
|
152 |
+
title_text = "Monthly Mean Precipitation"
|
153 |
+
highlight = alt.selection(
|
154 |
+
type='single', on='mouseover', fields=['Year'], nearest=True)
|
155 |
+
base = alt.Chart(df,title = title_text).encode(
|
156 |
+
x = alt.X('Month:Q',scale = alt.Scale(domain=[1,12]),axis=alt.Axis(tickMinStep=1)),
|
157 |
+
y = alt.Y('precip:Q',scale = alt.Scale(domain=[df['precip'].min(),df['precip'].max()])),
|
158 |
+
color = alt.Color('Year:O',scale = alt.Scale(scheme = 'magma'))
|
159 |
+
)
|
160 |
+
points = base.mark_circle().encode(
|
161 |
+
opacity=alt.value(0),
|
162 |
+
tooltip=[
|
163 |
+
alt.Tooltip('Year:O', title='Year'),
|
164 |
+
alt.Tooltip('Month:Q', title='Month'),
|
165 |
+
alt.Tooltip('precip:Q', title='Monthly Mean Precipitation')
|
166 |
+
]).add_selection(highlight)
|
167 |
+
|
168 |
+
lines = base.mark_line().encode(
|
169 |
+
size=alt.condition(~highlight, alt.value(1), alt.value(3)))
|
170 |
+
|
171 |
+
mean_chart = (points + lines).properties(width=1000, height=400).interactive()
|
172 |
+
return mean_chart
|
173 |
+
|
174 |
+
@st.cache
|
175 |
+
def annual_max_precip_plot(df):
|
176 |
+
fig_max = px.line(df, x = 'Year',y='precip',title = 'Annual Maximum Precipitation')
|
177 |
+
fig_max.update_traces(line_color = 'maroon')
|
178 |
+
fig_max.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
179 |
+
fig_max.update_yaxes(ticklabelposition="inside top", title= 'Annual Maximum Precipitation in mm',gridcolor = 'whitesmoke')
|
180 |
+
fig_max.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
181 |
+
fig_max.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
182 |
+
fig_max.update_layout(title = "Annual Maximum Precipitation")
|
183 |
+
return fig_max
|
184 |
+
|
185 |
+
|
186 |
+
def annual_min_precip_plot(df):
|
187 |
+
fig_min = px.line(df, x = 'Year',y='precip',title = 'Annual Minimum Precipitation')
|
188 |
+
fig_min.update_traces(line_color = 'blue')
|
189 |
+
fig_min.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
190 |
+
fig_min.update_yaxes(ticklabelposition="inside top", title= 'Annual Minimum Precipitation in mm',gridcolor = 'whitesmoke')
|
191 |
+
fig_min.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
192 |
+
fig_min.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
193 |
+
fig_min.update_layout(title = "Annual Minimum Precipitation")
|
194 |
+
return fig_min
|
195 |
+
|
196 |
+
def annual_avg_plot(df):
|
197 |
+
fig_avg = px.line(df, x = 'Year',y='precip',title = 'Annual Average Precipitation')
|
198 |
+
fig_avg.update_traces(line_color = 'dimgray')
|
199 |
+
fig_avg.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
200 |
+
fig_avg.update_yaxes(ticklabelposition="inside top", title= 'Annual Average Precipitation in mm',gridcolor = 'whitesmoke')
|
201 |
+
fig_avg.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
202 |
+
fig_avg.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
203 |
+
fig_avg.update_layout(title = "Annual Average Precipitation")
|
204 |
+
return fig_avg
|
205 |
+
|
206 |
+
|
207 |
+
@st.cache
|
208 |
+
def max_precip(precipitation_temp,option,start_year,end_year):
|
209 |
+
maximum_precip_df = precipitation_temp.get_group(option)
|
210 |
+
maximum_precip_df = date_split(maximum_precip_df)
|
211 |
+
Annual_max_precip = maximum_precip_df.groupby('Year')['precip'].max()
|
212 |
+
Annual_max_precip = Annual_max_precip.loc[str(start_year):str(end_year)]
|
213 |
+
Annual_max_precip = Annual_max_precip.reset_index()
|
214 |
+
return Annual_max_precip
|
215 |
+
|
216 |
+
@st.cache
|
217 |
+
def min_precip(precipitation_temp,option,start_year,end_year):
|
218 |
+
minimum_precip_df = precipitation_temp.get_group(option)
|
219 |
+
minimum_precip_df = date_split(minimum_precip_df)
|
220 |
+
minimum_precip_df = minimum_precip_df.where(minimum_precip_df['precip']>0)
|
221 |
+
minimum_precip_df = minimum_precip_df.groupby('Year')['precip'].min()
|
222 |
+
minimum_precip_df = minimum_precip_df.loc[str(start_year):str(end_year)]
|
223 |
+
minimum_precip_df = minimum_precip_df.reset_index()
|
224 |
+
return minimum_precip_df
|
225 |
+
|
226 |
+
@st.cache
|
227 |
+
def avg_precip(precipitation_temp,option,start_year,end_year):
|
228 |
+
avg_precip_df = precipitation_temp.get_group(option)
|
229 |
+
avg_precip_df = date_split(avg_precip_df)
|
230 |
+
avg_precip_df = avg_precip_df.groupby('Year')['precip'].mean()
|
231 |
+
avg_precip_df_s_e = avg_precip_df.loc[str(start_year):str(end_year)]
|
232 |
+
avg_precip_df_s_e = avg_precip_df_s_e.reset_index()
|
233 |
+
return avg_precip_df_s_e
|
references/image.png
ADDED
requirements.txt
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
altair==4.2.0
|
2 |
+
branca==0.5.0
|
3 |
+
folium==0.12.1.post1
|
4 |
+
geopy==2.2.0
|
5 |
+
numpy==1.21.5
|
6 |
+
pandarallel==1.6.1
|
7 |
+
pandas==1.4.2
|
8 |
+
Pillow==9.2.0
|
9 |
+
plotly==5.6.0
|
10 |
+
protobuf
|
11 |
+
seaborn==0.11.2
|
12 |
+
streamlit==1.11.0
|
13 |
+
streamlit_folium==0.6.13
|
14 |
+
google-cloud-storage==2.4.0
|
15 |
+
swifter
|
16 |
+
modin
|
stlib/__init__.py
ADDED
File without changes
|
stlib/__pycache__/__init__.cpython-39.pyc
ADDED
Binary file (150 Bytes). View file
|
|
stlib/__pycache__/precipitation.cpython-39.pyc
ADDED
Binary file (10.7 kB). View file
|
|
stlib/__pycache__/temperature.cpython-39.pyc
ADDED
Binary file (10.6 kB). View file
|
|
stlib/precipitation.py
ADDED
@@ -0,0 +1,493 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pandas as pd
|
2 |
+
import numpy as np
|
3 |
+
import streamlit as st
|
4 |
+
import folium
|
5 |
+
import folium.plugins
|
6 |
+
|
7 |
+
from streamlit_folium import st_folium
|
8 |
+
from branca.element import Figure
|
9 |
+
|
10 |
+
|
11 |
+
from precipitation_function import date_split,lat_long_process_precp
|
12 |
+
from precipitation_function import concat_func, drop_dup_funct,lat_long_type
|
13 |
+
from precipitation_function import daily_precp_plot,start_end_date_ui,lat_long_ui,year_selection_ui
|
14 |
+
from precipitation_function import monthly_mean_plot, annual_max_precip_plot,daily_precp_data,cumulative_plot
|
15 |
+
from precipitation_function import annual_max_precip_plot,annual_min_precip_plot,annual_avg_plot,max_precip,min_precip,avg_precip,cumulative
|
16 |
+
|
17 |
+
from temperature_functions import convert_df, map_creation, search_func
|
18 |
+
|
19 |
+
def run():
|
20 |
+
fig = Figure(width = 550,height = 350)
|
21 |
+
|
22 |
+
nearest_lat_long = 0
|
23 |
+
nn = 0
|
24 |
+
start = pd.to_datetime('2001/01/01')
|
25 |
+
end = pd.to_datetime('2019/12/31')
|
26 |
+
|
27 |
+
#precipitation data
|
28 |
+
# bucket_name = "timeseries_data_storage"
|
29 |
+
# file_path_1 = "precip_data/precip1.zip"
|
30 |
+
# file_path_2 = "precip_data/precip2.zip"
|
31 |
+
# file_path_3 = "precip_data/precip3.zip"
|
32 |
+
# file_path_4 = "precip_data/precip4.zip"
|
33 |
+
# file_path_5 = "precip_data/precip5.zip"
|
34 |
+
# file_path_6 = "precip_data/precip6.zip"
|
35 |
+
# file_path_7 = "precip_data/precip7.zip"
|
36 |
+
# file_path_8 = "precip_data/precip8.zip"
|
37 |
+
# file_path_9 = "precip_data/precip9.zip"
|
38 |
+
# file_path_10 = "precip_data/precip10.zip"
|
39 |
+
# file_path_11 = "precip_data/precip11.zip"
|
40 |
+
# file_path_12 = "precip_data/precip12.zip"
|
41 |
+
# file_path_13 = "precip_data/precip13.zip"
|
42 |
+
# file_path_14 = "precip_data/precip14.zip"
|
43 |
+
# file_path_15 = "precip_data/precip15.zip"
|
44 |
+
# file_path_16 = "precip_data/precip16.zip"
|
45 |
+
|
46 |
+
|
47 |
+
|
48 |
+
precipitation_1_og = pd.read_csv('historicalData/precip1.csv')
|
49 |
+
precipitation_2_og = pd.read_csv('historicalData/precip2.csv')
|
50 |
+
precipitation_3_og = pd.read_csv('historicalData/precip3.csv')
|
51 |
+
precipitation_4_og = pd.read_csv('historicalData/precip4.csv')
|
52 |
+
precipitation_5_og = pd.read_csv('historicalData/precip5.csv')
|
53 |
+
precipitation_6_og = pd.read_csv('historicalData/precip6.csv')
|
54 |
+
precipitation_7_og = pd.read_csv('historicalData/precip7.csv')
|
55 |
+
precipitation_8_og = pd.read_csv('historicalData/precip8.csv')
|
56 |
+
precipitation_9_og = pd.read_csv('historicalData/precip9.csv')
|
57 |
+
precipitation_10_og = pd.read_csv('historicalData/precip10.csv')
|
58 |
+
precipitation_11_og = pd.read_csv('historicalData/precip11.csv')
|
59 |
+
precipitation_12_og = pd.read_csv('historicalData/precip12.csv')
|
60 |
+
precipitation_13_og = pd.read_csv('historicalData/precip13.csv')
|
61 |
+
precipitation_14_og = pd.read_csv('historicalData/precip14.csv')
|
62 |
+
precipitation_15_og = pd.read_csv('historicalData/precip15.csv')
|
63 |
+
precipitation_16_og = pd.read_csv('historicalData/precip16.csv')
|
64 |
+
|
65 |
+
|
66 |
+
lat_long_precipitation_1 = precipitation_1_og['lat_long']
|
67 |
+
lat_long_precipitation_2 = precipitation_2_og['lat_long']
|
68 |
+
lat_long_precipitation_3 = precipitation_3_og['lat_long']
|
69 |
+
lat_long_precipitation_4 = precipitation_4_og['lat_long']
|
70 |
+
lat_long_precipitation_5 = precipitation_5_og['lat_long']
|
71 |
+
lat_long_precipitation_6 = precipitation_6_og['lat_long']
|
72 |
+
lat_long_precipitation_7 = precipitation_7_og['lat_long']
|
73 |
+
lat_long_precipitation_8 = precipitation_8_og['lat_long']
|
74 |
+
lat_long_precipitation_9 = precipitation_9_og['lat_long']
|
75 |
+
lat_long_precipitation_10 = precipitation_10_og['lat_long']
|
76 |
+
lat_long_precipitation_11 = precipitation_11_og['lat_long']
|
77 |
+
lat_long_precipitation_12 = precipitation_12_og['lat_long']
|
78 |
+
lat_long_precipitation_13 = precipitation_13_og['lat_long']
|
79 |
+
lat_long_precipitation_14 = precipitation_14_og['lat_long']
|
80 |
+
lat_long_precipitation_15 = precipitation_15_og['lat_long']
|
81 |
+
lat_long_precipitation_16 = precipitation_16_og['lat_long']
|
82 |
+
|
83 |
+
|
84 |
+
|
85 |
+
lat_long_precipitation_list_1 = concat_func(lat_long_precipitation_1,lat_long_precipitation_2,lat_long_precipitation_3,lat_long_precipitation_4)
|
86 |
+
lat_long_precipitation_list_2 = concat_func(lat_long_precipitation_5,lat_long_precipitation_6,lat_long_precipitation_7,lat_long_precipitation_8)
|
87 |
+
lat_long_precipitation_list_3 = concat_func(lat_long_precipitation_9,lat_long_precipitation_10,lat_long_precipitation_11,lat_long_precipitation_12)
|
88 |
+
lat_long_precipitation_list_4 = concat_func(lat_long_precipitation_13,lat_long_precipitation_14,lat_long_precipitation_15,lat_long_precipitation_16)
|
89 |
+
lat_long_precipitation_list = concat_func(lat_long_precipitation_list_1,lat_long_precipitation_list_2,lat_long_precipitation_list_3,lat_long_precipitation_list_4)
|
90 |
+
|
91 |
+
|
92 |
+
precipitation_conc_1 = concat_func(precipitation_1_og,precipitation_2_og,precipitation_3_og,precipitation_4_og)
|
93 |
+
precipitation_conc_2 = concat_func(precipitation_5_og,precipitation_6_og,precipitation_7_og,precipitation_8_og)
|
94 |
+
precipitation_conc_3 = concat_func(precipitation_9_og,precipitation_10_og,precipitation_11_og,precipitation_12_og)
|
95 |
+
precipitation_conc_4 = concat_func(precipitation_13_og,precipitation_14_og,precipitation_15_og,precipitation_16_og)
|
96 |
+
precipitation = concat_func(precipitation_conc_1,precipitation_conc_2,precipitation_conc_3,precipitation_conc_4)
|
97 |
+
|
98 |
+
# precipitation = precipitation.drop([Unnamed:0.2,Unnamed:0.1,Unnamed:0],axis =1)
|
99 |
+
precipitation.drop(precipitation.filter(regex="Unnamed"),axis=1, inplace=True)
|
100 |
+
# st.write(precipitation.head())
|
101 |
+
precipitation_temp = precipitation.copy()
|
102 |
+
precipitation_temp = precipitation_temp.groupby('lat_long')
|
103 |
+
|
104 |
+
with st.sidebar:
|
105 |
+
data_type = st.radio("Select Data Type to View",
|
106 |
+
('Daily Precipitation','Cumulative Monthly Precipitation','Monthly Average Precipitation',
|
107 |
+
'Annual Maximum Precipitation','Annual Minimum Precipitation',
|
108 |
+
'Annual Average Precipitation','Annual Max, Min, & Average Precipitation'))
|
109 |
+
|
110 |
+
if data_type == "Cumulative Monthly Precipitation":
|
111 |
+
col1,col2 = st.columns(2)
|
112 |
+
with col1:
|
113 |
+
start_year,end_year = year_selection_ui(99,100)
|
114 |
+
with col2:
|
115 |
+
#calling the ui method to create a lat long input UI
|
116 |
+
latitude_input,longitude_input = lat_long_ui(300,400)
|
117 |
+
|
118 |
+
|
119 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
120 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
121 |
+
option = lat_long_type(nearest_lat_long)
|
122 |
+
|
123 |
+
with col1:
|
124 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
125 |
+
|
126 |
+
df_cumulative = precipitation_temp.get_group(option)
|
127 |
+
df_cumulative = date_split(df_cumulative)
|
128 |
+
|
129 |
+
lat = float(option.split(',')[0])
|
130 |
+
long = float(option.split(',')[1])
|
131 |
+
m = map_creation(lat,long,0,0)
|
132 |
+
last_click = m['last_clicked']
|
133 |
+
|
134 |
+
with col2:
|
135 |
+
if last_click is not None:
|
136 |
+
clicked_lat = last_click['lat']
|
137 |
+
clicked_long = last_click['lng']
|
138 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
139 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
140 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
141 |
+
nn = lat_long_type(nn)
|
142 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
143 |
+
st.markdown(" ")
|
144 |
+
|
145 |
+
else:
|
146 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
147 |
+
|
148 |
+
if last_click is not None:
|
149 |
+
clicked_lat = last_click['lat']
|
150 |
+
clicked_long = last_click['lng']
|
151 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
152 |
+
|
153 |
+
df_c = cumulative(df_cumulative,start_year,end_year)
|
154 |
+
fig_cumulative = cumulative_plot(df_c)
|
155 |
+
st.plotly_chart(fig_cumulative,use_container_width=True)
|
156 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
157 |
+
with c3:
|
158 |
+
st.download_button("Download Data",data = convert_df(df_c),
|
159 |
+
file_name='cumulative_precipitation.csv',
|
160 |
+
mime='text/csv',)
|
161 |
+
|
162 |
+
|
163 |
+
elif data_type == "Daily Precipitation":
|
164 |
+
col1,col2 = st.columns(2)
|
165 |
+
with col1:
|
166 |
+
#calling the ui method to create a start end date input UI
|
167 |
+
start,end = start_end_date_ui(start,end,11,22)
|
168 |
+
with col2:
|
169 |
+
latitude_input,longitude_input = lat_long_ui(1,2)
|
170 |
+
|
171 |
+
|
172 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
173 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
174 |
+
option = lat_long_type(nearest_lat_long)
|
175 |
+
|
176 |
+
with col1:
|
177 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
178 |
+
|
179 |
+
|
180 |
+
lat = float(option.split(',')[0])
|
181 |
+
long = float(option.split(',')[1])
|
182 |
+
m = map_creation(lat,long,0,0)
|
183 |
+
last_click = m['last_clicked']
|
184 |
+
|
185 |
+
with col2:
|
186 |
+
if last_click is not None:
|
187 |
+
clicked_lat = last_click['lat']
|
188 |
+
clicked_long = last_click['lng']
|
189 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
190 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
191 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
192 |
+
nn = lat_long_type(nn)
|
193 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
194 |
+
st.markdown(" ")
|
195 |
+
|
196 |
+
else:
|
197 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
198 |
+
|
199 |
+
if last_click is not None:
|
200 |
+
clicked_lat = last_click['lat']
|
201 |
+
clicked_long = last_click['lng']
|
202 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
203 |
+
|
204 |
+
|
205 |
+
daily_precp_df = daily_precp_data(precipitation_temp,start,end,option)
|
206 |
+
# st.write(daily_precp_df.head())
|
207 |
+
daily_precp_df.drop_duplicates(inplace = True)
|
208 |
+
daily_df = daily_precp_df.loc[str(start):str(end)]
|
209 |
+
# st.write(daily_df)
|
210 |
+
fig_daily_precp = daily_precp_plot(daily_df)
|
211 |
+
st.plotly_chart(fig_daily_precp,use_container_width = True)
|
212 |
+
|
213 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
214 |
+
with c3:
|
215 |
+
st.download_button("Download Data",data = convert_df(daily_precp_df),
|
216 |
+
file_name='daily_precipitation.csv',
|
217 |
+
mime='text/csv',)
|
218 |
+
|
219 |
+
|
220 |
+
elif data_type == 'Monthly Average Precipitation':
|
221 |
+
col1,col2 = st.columns(2)
|
222 |
+
with col1:
|
223 |
+
start_year,end_year = year_selection_ui(9,10)
|
224 |
+
with col2:
|
225 |
+
#calling the ui method to create a lat long input UI
|
226 |
+
latitude_input,longitude_input = lat_long_ui(3,4)
|
227 |
+
|
228 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
229 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
230 |
+
option = lat_long_type(nearest_lat_long)
|
231 |
+
|
232 |
+
lat = float(option.split(',')[0])
|
233 |
+
long = float(option.split(',')[1])
|
234 |
+
m = map_creation(lat,long,0,0)
|
235 |
+
|
236 |
+
last_click = m['last_clicked']
|
237 |
+
|
238 |
+
with col1:
|
239 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
240 |
+
|
241 |
+
with col2:
|
242 |
+
if last_click is not None:
|
243 |
+
clicked_lat = last_click['lat']
|
244 |
+
clicked_long = last_click['lng']
|
245 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
246 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
247 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
248 |
+
nn = lat_long_type(nn)
|
249 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
250 |
+
st.markdown(" ")
|
251 |
+
|
252 |
+
else:
|
253 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
254 |
+
|
255 |
+
if last_click is not None:
|
256 |
+
clicked_lat = last_click['lat']
|
257 |
+
clicked_long = last_click['lng']
|
258 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
259 |
+
|
260 |
+
|
261 |
+
precipitation_monthly_avg = precipitation_temp.get_group(option)
|
262 |
+
precipitation_monthly_avg = date_split(precipitation_monthly_avg)
|
263 |
+
precipt_monthly_avg_df = precipitation_monthly_avg.groupby(['Year','Month'],as_index=False)['precip'].mean()
|
264 |
+
precipt_monthly_avg_df = precipt_monthly_avg_df.set_index('Year')
|
265 |
+
precipitation_monthly_avg_df = precipt_monthly_avg_df.loc[str(start_year):str(end_year)]
|
266 |
+
precipitation_monthly_avg_df.reset_index(inplace = True)
|
267 |
+
mean_chart_plot = monthly_mean_plot(precipitation_monthly_avg_df)
|
268 |
+
st.altair_chart(mean_chart_plot)
|
269 |
+
|
270 |
+
|
271 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
272 |
+
with c3:
|
273 |
+
st.download_button("Download Data",data = convert_df(precipitation_monthly_avg_df),
|
274 |
+
file_name='Monthly_avg_precipitation.csv',
|
275 |
+
mime='text/csv',)
|
276 |
+
|
277 |
+
|
278 |
+
elif data_type == 'Annual Maximum Precipitation':
|
279 |
+
col1,col2 = st.columns(2)
|
280 |
+
with col1:
|
281 |
+
start_year,end_year = year_selection_ui(11,12)
|
282 |
+
with col2:
|
283 |
+
#calling the ui method to create a lat long input UI
|
284 |
+
latitude_input,longitude_input = lat_long_ui(5,6)
|
285 |
+
|
286 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
287 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
288 |
+
option = lat_long_type(nearest_lat_long)
|
289 |
+
|
290 |
+
lat = float(option.split(',')[0])
|
291 |
+
long = float(option.split(',')[1])
|
292 |
+
m = map_creation(lat,long,0,0)
|
293 |
+
last_click = m['last_clicked']
|
294 |
+
|
295 |
+
with col1:
|
296 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
297 |
+
|
298 |
+
with col2:
|
299 |
+
if last_click is not None:
|
300 |
+
clicked_lat = last_click['lat']
|
301 |
+
clicked_long = last_click['lng']
|
302 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
303 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
304 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
305 |
+
nn = lat_long_type(nn)
|
306 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
307 |
+
st.markdown(" ")
|
308 |
+
|
309 |
+
else:
|
310 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
311 |
+
|
312 |
+
if last_click is not None:
|
313 |
+
clicked_lat = last_click['lat']
|
314 |
+
clicked_long = last_click['lng']
|
315 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
316 |
+
|
317 |
+
Annual_max_precip = max_precip(precipitation_temp,option,start_year,end_year)
|
318 |
+
fig_max = annual_max_precip_plot(Annual_max_precip)
|
319 |
+
st.plotly_chart(fig_max,use_container_width=True)
|
320 |
+
|
321 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
322 |
+
with c3:
|
323 |
+
st.download_button("Download Data",data = convert_df(Annual_max_precip),
|
324 |
+
file_name='Annual_max_precipitation.csv',
|
325 |
+
mime='text/csv',)
|
326 |
+
|
327 |
+
|
328 |
+
elif data_type == 'Annual Minimum Precipitation':
|
329 |
+
col1,col2 = st.columns(2)
|
330 |
+
with col1:
|
331 |
+
start_year,end_year = year_selection_ui(13,14)
|
332 |
+
with col2:
|
333 |
+
#calling the ui method to create a lat long input UI
|
334 |
+
latitude_input,longitude_input = lat_long_ui(7,8)
|
335 |
+
|
336 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
337 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
338 |
+
option = lat_long_type(nearest_lat_long)
|
339 |
+
|
340 |
+
lat = float(option.split(',')[0])
|
341 |
+
long = float(option.split(',')[1])
|
342 |
+
m = map_creation(lat,long,0,0)
|
343 |
+
last_click = m['last_clicked']
|
344 |
+
|
345 |
+
with col1:
|
346 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
347 |
+
|
348 |
+
with col2:
|
349 |
+
if last_click is not None:
|
350 |
+
clicked_lat = last_click['lat']
|
351 |
+
clicked_long = last_click['lng']
|
352 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
353 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
354 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
355 |
+
nn = lat_long_type(nn)
|
356 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
357 |
+
st.markdown(" ")
|
358 |
+
else:
|
359 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
360 |
+
|
361 |
+
if last_click is not None:
|
362 |
+
clicked_lat = last_click['lat']
|
363 |
+
clicked_long = last_click['lng']
|
364 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
365 |
+
|
366 |
+
|
367 |
+
minimum_precip_df = min_precip(precipitation_temp,option,start_year,end_year)
|
368 |
+
fig_min = annual_min_precip_plot(minimum_precip_df)
|
369 |
+
st.plotly_chart(fig_min,use_container_width = True)
|
370 |
+
|
371 |
+
|
372 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
373 |
+
with c3:
|
374 |
+
st.download_button("Download Data",data = convert_df(minimum_precip_df),
|
375 |
+
file_name='Annual_minimum_precipitation.csv',
|
376 |
+
mime='text/csv',)
|
377 |
+
|
378 |
+
|
379 |
+
|
380 |
+
elif data_type == 'Annual Average Precipitation':
|
381 |
+
col1,col2 = st.columns(2)
|
382 |
+
with col1:
|
383 |
+
start_year,end_year = year_selection_ui(15,16)
|
384 |
+
with col2:
|
385 |
+
#calling the ui method to create a lat long input UI
|
386 |
+
latitude_input,longitude_input = lat_long_ui(9,10)
|
387 |
+
|
388 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
389 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
390 |
+
option = lat_long_type(nearest_lat_long)
|
391 |
+
|
392 |
+
lat = float(option.split(',')[0])
|
393 |
+
long = float(option.split(',')[1])
|
394 |
+
m = map_creation(lat,long,0,0)
|
395 |
+
last_click = m['last_clicked']
|
396 |
+
|
397 |
+
with col1:
|
398 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
399 |
+
|
400 |
+
with col2:
|
401 |
+
if last_click is not None:
|
402 |
+
clicked_lat = last_click['lat']
|
403 |
+
clicked_long = last_click['lng']
|
404 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
405 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
406 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
407 |
+
nn = lat_long_type(nn)
|
408 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
409 |
+
st.markdown(" ")
|
410 |
+
else:
|
411 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
412 |
+
|
413 |
+
if last_click is not None:
|
414 |
+
clicked_lat = last_click['lat']
|
415 |
+
clicked_long = last_click['lng']
|
416 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
417 |
+
|
418 |
+
avg_precip_df_se = avg_precip(precipitation_temp,option,start_year,end_year)
|
419 |
+
fig_avg = annual_avg_plot(avg_precip_df_se)
|
420 |
+
st.plotly_chart(fig_avg,use_container_width = True)
|
421 |
+
|
422 |
+
c1,c2,c3,c4,c5 = st.columns(5)
|
423 |
+
with c3:
|
424 |
+
st.download_button("Download Data",data = convert_df(avg_precip_df_se),
|
425 |
+
file_name='Annual_avg_precipitation.csv',
|
426 |
+
mime='text/csv',)
|
427 |
+
|
428 |
+
elif data_type == 'Annual Max, Min, & Average Precipitation':
|
429 |
+
col1,col2 = st.columns(2)
|
430 |
+
with col1:
|
431 |
+
start_year,end_year = year_selection_ui(17,18)
|
432 |
+
|
433 |
+
with col2:
|
434 |
+
latitude_input,longitude_input = lat_long_ui(11,12)
|
435 |
+
|
436 |
+
latitude_input,longitude_input= float(latitude_input),float(longitude_input)
|
437 |
+
nearest_lat_long = search_func(latitude_input,longitude_input,lat_long_precipitation_list,precipitation)
|
438 |
+
option = lat_long_type(nearest_lat_long)
|
439 |
+
|
440 |
+
lat = float(option.split(',')[0])
|
441 |
+
long = float(option.split(',')[1])
|
442 |
+
m = map_creation(lat,long,0,0)
|
443 |
+
|
444 |
+
last_click = m['last_clicked']
|
445 |
+
with col1:
|
446 |
+
st.write("**Nearest latitude and longitude from the entered latitude longitude is :**",option)
|
447 |
+
|
448 |
+
with col2:
|
449 |
+
if last_click is not None:
|
450 |
+
clicked_lat = last_click['lat']
|
451 |
+
clicked_long = last_click['lng']
|
452 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
453 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
454 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_precipitation_list,precipitation)
|
455 |
+
nn = lat_long_type(nn)
|
456 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
457 |
+
st.markdown(" ")
|
458 |
+
else:
|
459 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
460 |
+
|
461 |
+
if last_click is not None:
|
462 |
+
clicked_lat = last_click['lat']
|
463 |
+
clicked_long = last_click['lng']
|
464 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
465 |
+
|
466 |
+
col11,col22,col33 = st.columns(3)
|
467 |
+
with col11:
|
468 |
+
annual_max_data = max_precip(precipitation_temp,option,start_year,end_year)
|
469 |
+
fig_max_2 = annual_max_precip_plot(annual_max_data)
|
470 |
+
st.plotly_chart(fig_max_2,use_container_width=True)
|
471 |
+
st.download_button("Download Data",data = convert_df(annual_max_data),
|
472 |
+
file_name='Annual_max_precipitation_data.csv',
|
473 |
+
mime='text/csv',)
|
474 |
+
|
475 |
+
with col22:
|
476 |
+
annual_min_data = min_precip(precipitation_temp,option,start_year,end_year)
|
477 |
+
fig_min_2 = annual_min_precip_plot(annual_min_data)
|
478 |
+
st.plotly_chart(fig_min_2,use_container_width = True)
|
479 |
+
st.download_button("Download Data",data = convert_df(annual_min_data),
|
480 |
+
file_name='Annual_min_precipitation_data.csv',
|
481 |
+
mime='text/csv',)
|
482 |
+
|
483 |
+
with col33:
|
484 |
+
annual_avg_data = avg_precip(precipitation_temp,option,start_year,end_year)
|
485 |
+
fig_avg_2 = annual_avg_plot(annual_avg_data)
|
486 |
+
st.plotly_chart(fig_avg_2, use_container_width = True)
|
487 |
+
st.download_button("Download Data",data = convert_df(annual_avg_data),
|
488 |
+
file_name='Annual_avg_precipitation_data.csv',
|
489 |
+
mime='text/csv',)
|
490 |
+
|
491 |
+
|
492 |
+
if __name__ == 'main':
|
493 |
+
run()
|
stlib/temperature.py
ADDED
@@ -0,0 +1,484 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
#standard imports
|
4 |
+
import pandas as pd
|
5 |
+
import numpy as np
|
6 |
+
import streamlit as st
|
7 |
+
import swifter
|
8 |
+
swifter.register_modin()
|
9 |
+
from typing import Dict, List, Optional
|
10 |
+
|
11 |
+
from temperature_functions import annual_avg, df_date_split, lat_long_process,lat_long_list_creation
|
12 |
+
from temperature_functions import group_df, daily_avg, annual_avg_plot, avg_temp_plot, annual_min_plot, annual_max_plot
|
13 |
+
from temperature_functions import daily_avg_calc, daily_avg_plot, monthly_mean_calc, selecting_mean, plot_mean_data, max_temp_plot
|
14 |
+
from temperature_functions import min_temp_plot, convert_df, map_creation, search_func
|
15 |
+
import folium
|
16 |
+
from streamlit_folium import st_folium
|
17 |
+
from branca.element import Figure
|
18 |
+
|
19 |
+
|
20 |
+
|
21 |
+
import warnings
|
22 |
+
warnings.filterwarnings("ignore")
|
23 |
+
|
24 |
+
#For parallel processing
|
25 |
+
from pandarallel import pandarallel
|
26 |
+
pandarallel.initialize(progress_bar=True)
|
27 |
+
|
28 |
+
|
29 |
+
|
30 |
+
def run():
|
31 |
+
|
32 |
+
#nearest neighbor
|
33 |
+
nn = 0
|
34 |
+
# bucket_name = 'timeseries_data_storage'
|
35 |
+
# file_path1 = 'temperature1.zip'
|
36 |
+
# file_path2 = 'temperature2.zip'
|
37 |
+
tempe1 = pd.read_csv('historicalData/temperature1.csv')
|
38 |
+
tempe2 = pd.read_csv('historicalData/temperature2.csv')
|
39 |
+
temperatureDF = pd.concat([tempe1,tempe2],axis =0 )
|
40 |
+
|
41 |
+
#creating copy of our dataframe
|
42 |
+
temperatureDataFrame = temperatureDF.copy()
|
43 |
+
|
44 |
+
#applying the function on the dataframe
|
45 |
+
temperatureDataFrame = lat_long_process(temperatureDataFrame)
|
46 |
+
|
47 |
+
#applying the lat long creation function on the dataframe
|
48 |
+
lat_long_list = lat_long_list_creation(temperatureDataFrame)
|
49 |
+
|
50 |
+
#applying the function on the dataframe
|
51 |
+
result = group_df(temperatureDataFrame,lat_long_list)
|
52 |
+
|
53 |
+
#Dataframe containing daily average
|
54 |
+
result['daily_avg'] = result.swifter.apply(lambda x: daily_avg(x.tmin,x.tmax),axis=1)
|
55 |
+
|
56 |
+
|
57 |
+
### Dashboard Creation
|
58 |
+
#creating the Sidebar Menu
|
59 |
+
with st.sidebar.empty():
|
60 |
+
with st.container():
|
61 |
+
data_type = st.radio("Select Data Type to View",
|
62 |
+
('Daily Average','Monthly Mean Temperature','Annual Maximum Temperature','Annual Minimum Temperature','Annual Average Temperature','Annual Max, Min, & Average Temperature'))
|
63 |
+
|
64 |
+
#dividing the screen column into 2 sections for daily_average
|
65 |
+
if data_type == 'Daily Average':
|
66 |
+
ab = st.empty()
|
67 |
+
a = st.empty()
|
68 |
+
b = st.empty()
|
69 |
+
|
70 |
+
start = pd.to_datetime('2001/01/01')
|
71 |
+
end = pd.to_datetime('2019/12/31')
|
72 |
+
|
73 |
+
with ab:
|
74 |
+
col1,col2 = st.columns(2)
|
75 |
+
#creating dropdown menu for entering start and end date
|
76 |
+
with col1:
|
77 |
+
st.markdown('**Enter Start Date**')
|
78 |
+
start = st.date_input("",value = start,key = 1)
|
79 |
+
if start < pd.to_datetime('2001/01/01'):
|
80 |
+
st.write('Start date should not be less than 2001/01/01')
|
81 |
+
|
82 |
+
st.markdown('**Enter End Date**')
|
83 |
+
end = st.date_input("",value = end, key = 2)
|
84 |
+
if end > pd.to_datetime('2019/12/31'):
|
85 |
+
st.write('End date should not be greater than 2019/12/31')
|
86 |
+
#dropdown menu for selecting lat long values
|
87 |
+
with col2:
|
88 |
+
st.markdown('**Select the Latitude and Longitude**')
|
89 |
+
option = st.selectbox("",pd.DataFrame(lat_long_list),key = 3)
|
90 |
+
lat = float(option.split(',')[0])
|
91 |
+
long = float(option.split(',')[1])
|
92 |
+
m = map_creation(lat,long,0,0)
|
93 |
+
last_click = m['last_clicked']
|
94 |
+
with a:
|
95 |
+
dataframe_s_e = daily_avg_calc(result,option,start,end)
|
96 |
+
fig = daily_avg_plot(dataframe_s_e,option)
|
97 |
+
st.plotly_chart(fig,use_container_width = True)
|
98 |
+
with b:
|
99 |
+
col11,col22,col33 = st.columns(3)
|
100 |
+
with col22:
|
101 |
+
st.download_button("Download Data",data = convert_df(dataframe_s_e),
|
102 |
+
file_name='daily_average_temperature.csv',
|
103 |
+
mime='text/csv',)
|
104 |
+
#finding the clicked lat long and the nearest lat long
|
105 |
+
with col2:
|
106 |
+
if last_click is not None:
|
107 |
+
clicked_lat = last_click['lat']
|
108 |
+
clicked_long = last_click['lng']
|
109 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
110 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
111 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
112 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
113 |
+
|
114 |
+
else:
|
115 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
116 |
+
st.markdown(" ")
|
117 |
+
st.markdown(" ")
|
118 |
+
if last_click is not None:
|
119 |
+
clicked_lat = last_click['lat']
|
120 |
+
clicked_long = last_click['lng']
|
121 |
+
map_creation(lat,long,clicked_lat,clicked_long)
|
122 |
+
|
123 |
+
|
124 |
+
#plotting the nearest neighbors graph
|
125 |
+
if nn != 0 :
|
126 |
+
df = daily_avg_calc(result,nn,start,end)
|
127 |
+
fig_nn = daily_avg_plot(df,nn)
|
128 |
+
with a:
|
129 |
+
st.plotly_chart(fig_nn,use_container_width = True)
|
130 |
+
with b:
|
131 |
+
col11,col22,col33 = st.columns(3)
|
132 |
+
with col22:
|
133 |
+
st.download_button("Download Data",data = convert_df(df),
|
134 |
+
file_name='daily_average_temperature_nn.csv',
|
135 |
+
mime='text/csv',)
|
136 |
+
|
137 |
+
|
138 |
+
elif data_type == 'Monthly Mean Temperature':
|
139 |
+
#creating the UI for selecting the year,lat long
|
140 |
+
c = st.empty()
|
141 |
+
d = st.empty()
|
142 |
+
e = st.empty()
|
143 |
+
with c:
|
144 |
+
col1,col2 = st.columns(2)
|
145 |
+
with col1:
|
146 |
+
st.markdown('**Select the Start Year**')
|
147 |
+
start_year = st.selectbox('',
|
148 |
+
('2001','2002','2003','2004','2005','2006','2007','2008','2009',
|
149 |
+
'2010','2011','2012','2013','2014','2015','2016','2017','2018','2019'),key = 4)
|
150 |
+
|
151 |
+
st.markdown('**Select the End Year**')
|
152 |
+
end_year = st.selectbox('',
|
153 |
+
('2001','2002','2003','2004','2005','2006','2007','2008','2009',
|
154 |
+
'2010','2011','2012','2013','2014','2015','2016','2017','2018','2019'),key = 5)
|
155 |
+
with col2:
|
156 |
+
st.markdown('**Select the Latitude and Longitude**')
|
157 |
+
option_mean = st.selectbox("",pd.DataFrame(lat_long_list),key = 6)
|
158 |
+
|
159 |
+
|
160 |
+
|
161 |
+
#Finding the monthly mean/avg temperature
|
162 |
+
temperature_monthly_df = result.copy()
|
163 |
+
#function for calculating mean
|
164 |
+
df_mean = monthly_mean_calc(temperature_monthly_df,lat_long_list)
|
165 |
+
#function for selecting the specified group of lat long along with the start and end date
|
166 |
+
df = selecting_mean(df_mean,option_mean,start_year,end_year)
|
167 |
+
#function for plotting the mean data
|
168 |
+
mean_chart = plot_mean_data(df,option_mean)
|
169 |
+
with d:
|
170 |
+
st.altair_chart(mean_chart)
|
171 |
+
|
172 |
+
#for map
|
173 |
+
lat_mean = float(option_mean.split(',')[0])
|
174 |
+
long_mean = float(option_mean.split(',')[1])
|
175 |
+
|
176 |
+
with col2:
|
177 |
+
m_mean = map_creation(lat_mean,long_mean,0,0)
|
178 |
+
last_click = m_mean['last_clicked']
|
179 |
+
if last_click is not None:
|
180 |
+
clicked_lat = last_click['lat']
|
181 |
+
clicked_long = last_click['lng']
|
182 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
183 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
184 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
185 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
186 |
+
|
187 |
+
|
188 |
+
else:
|
189 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
190 |
+
st.markdown(" ")
|
191 |
+
st.markdown(" ")
|
192 |
+
|
193 |
+
if last_click is not None:
|
194 |
+
clicked_lat = last_click['lat']
|
195 |
+
clicked_long = last_click['lng']
|
196 |
+
map_creation(lat_mean,long_mean,clicked_lat,clicked_long)
|
197 |
+
|
198 |
+
with e:
|
199 |
+
col11,col22,col33 = st.columns(3)
|
200 |
+
with col22:
|
201 |
+
st.download_button("Download Data",data = convert_df(df),
|
202 |
+
file_name='Monthly_mean_temperature.csv',
|
203 |
+
mime='text/csv',)
|
204 |
+
|
205 |
+
st.markdown(" ")
|
206 |
+
|
207 |
+
#code for plotting the nearest neighbors data
|
208 |
+
|
209 |
+
if nn != 0 :
|
210 |
+
df_nn_mean = selecting_mean(df_mean,nn,start_year,end_year)
|
211 |
+
fig_nn_mean = plot_mean_data(df_nn_mean,nn)
|
212 |
+
with d:
|
213 |
+
st.altair_chart(fig_nn_mean)
|
214 |
+
with e:
|
215 |
+
col11,col22,col33 = st.columns(3)
|
216 |
+
with col22:
|
217 |
+
st.download_button("Download Data",data = convert_df(df_nn_mean),
|
218 |
+
file_name='Monthly_mean_temperature_nn.csv',
|
219 |
+
mime='text/csv',)
|
220 |
+
|
221 |
+
#code for annual maximum temperature
|
222 |
+
elif data_type == 'Annual Maximum Temperature':
|
223 |
+
g = st.empty()
|
224 |
+
h = st.empty()
|
225 |
+
i = st.empty()
|
226 |
+
Annual_temp = result.copy()
|
227 |
+
with g:
|
228 |
+
col1,col2 = st.columns(2)
|
229 |
+
with col1:
|
230 |
+
st.markdown('**Select the Latitude and Longitude**.')
|
231 |
+
option_annual_temp = st.selectbox("",pd.DataFrame(lat_long_list),key = 7)
|
232 |
+
|
233 |
+
df_max = annual_max_plot(Annual_temp,option_annual_temp,lat_long_list)
|
234 |
+
fig_max = max_temp_plot(df_max,option_annual_temp)
|
235 |
+
with h:
|
236 |
+
st.plotly_chart(fig_max, use_container_width=True)
|
237 |
+
|
238 |
+
#For maps
|
239 |
+
lat_annual_max = float(option_annual_temp.split(',')[0])
|
240 |
+
long_annual_max = float(option_annual_temp.split(',')[1])
|
241 |
+
|
242 |
+
with g:
|
243 |
+
with col2:
|
244 |
+
m_max = map_creation(lat_annual_max,long_annual_max,0,0)
|
245 |
+
last_click = m_max['last_clicked']
|
246 |
+
if last_click is not None:
|
247 |
+
clicked_lat = last_click['lat']
|
248 |
+
clicked_long = last_click['lng']
|
249 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
250 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
251 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
252 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
253 |
+
else:
|
254 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
255 |
+
st.markdown(" ")
|
256 |
+
st.markdown(" ")
|
257 |
+
|
258 |
+
if last_click is not None:
|
259 |
+
clicked_lat = last_click['lat']
|
260 |
+
clicked_long = last_click['lng']
|
261 |
+
map_creation(lat_annual_max,long_annual_max,clicked_lat,clicked_long)
|
262 |
+
with i:
|
263 |
+
col11,col22,col33 = st.columns(3)
|
264 |
+
with col22:
|
265 |
+
st.download_button("Download Data",data = convert_df(df_max),
|
266 |
+
file_name='Annual_maximum_temperature.csv',
|
267 |
+
mime='text/csv',)
|
268 |
+
|
269 |
+
with st.container():
|
270 |
+
if nn!=0:
|
271 |
+
df_max_temp_nn = annual_max_plot(Annual_temp,nn,lat_long_list)
|
272 |
+
fig_max_nn = max_temp_plot(df_max_temp_nn,nn)
|
273 |
+
with h:
|
274 |
+
st.plotly_chart(fig_max_nn, use_container_width = True)
|
275 |
+
with i:
|
276 |
+
col11,col22,col33 = st.columns(3)
|
277 |
+
with col22:
|
278 |
+
st.download_button("Download Data",data = convert_df(df_max_temp_nn),
|
279 |
+
file_name='Annual_maximum_temperature_nn.csv',
|
280 |
+
mime='text/csv',)
|
281 |
+
|
282 |
+
|
283 |
+
elif data_type == 'Annual Minimum Temperature':
|
284 |
+
j = st.empty()
|
285 |
+
k = st.empty()
|
286 |
+
l = st.empty()
|
287 |
+
|
288 |
+
with j:
|
289 |
+
col1,col2 = st.columns(2)
|
290 |
+
with col1:
|
291 |
+
st.markdown('**Select the Latitude and Longitude**.')
|
292 |
+
option_annual_min_temp = st.selectbox("", pd.DataFrame(lat_long_list),key = 8)
|
293 |
+
|
294 |
+
Annual_temp_min = result.copy()
|
295 |
+
df_min = annual_min_plot(Annual_temp_min,option_annual_min_temp,lat_long_list)
|
296 |
+
fig_min = min_temp_plot(df_min,option_annual_min_temp)
|
297 |
+
|
298 |
+
with k:
|
299 |
+
st.plotly_chart(fig_min,use_container_width = True)
|
300 |
+
|
301 |
+
#For maps
|
302 |
+
lat_min = float(option_annual_min_temp.split(',')[0])
|
303 |
+
long_min = float(option_annual_min_temp.split(',')[1])
|
304 |
+
|
305 |
+
with j:
|
306 |
+
with col2:
|
307 |
+
m_min = map_creation(lat_min,long_min,0,0)
|
308 |
+
last_click = m_min['last_clicked']
|
309 |
+
if last_click is not None:
|
310 |
+
clicked_lat = last_click['lat']
|
311 |
+
clicked_long = last_click['lng']
|
312 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
313 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
314 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
315 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
316 |
+
|
317 |
+
else:
|
318 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
319 |
+
st.markdown(" ")
|
320 |
+
st.markdown(" ")
|
321 |
+
with l:
|
322 |
+
col100,col200,col300 = st.columns(3)
|
323 |
+
with col200:
|
324 |
+
st.download_button("Download Data",data = convert_df(df_min),
|
325 |
+
file_name='Annual_minimum_temperature.csv',
|
326 |
+
mime='text/csv',)
|
327 |
+
|
328 |
+
with st.container():
|
329 |
+
if nn!=0:
|
330 |
+
nn_min_temp_df = annual_min_plot(Annual_temp_min,nn,lat_long_list)
|
331 |
+
fig_min_nn = min_temp_plot(nn_min_temp_df,nn)
|
332 |
+
with k:
|
333 |
+
st.plotly_chart(fig_min_nn, use_container_width = True)
|
334 |
+
with l:
|
335 |
+
col11,col22,col33 = st.columns(3)
|
336 |
+
with col22:
|
337 |
+
st.download_button("Download Data",data = convert_df(nn_min_temp_df),
|
338 |
+
file_name='Annual_minimum_temperature_nn.csv',
|
339 |
+
mime='text/csv',)
|
340 |
+
|
341 |
+
|
342 |
+
elif data_type == 'Annual Average Temperature':
|
343 |
+
m = st.empty()
|
344 |
+
n = st.empty()
|
345 |
+
o = st.empty()
|
346 |
+
annual_avg_df = result.copy()
|
347 |
+
with m:
|
348 |
+
col1,col2 = st.columns(2)
|
349 |
+
with col1:
|
350 |
+
st.markdown('**Select the Latitude and Longitude**')
|
351 |
+
annual_avg_option = st.selectbox("",pd.DataFrame(lat_long_list),key = 9)
|
352 |
+
annual_temp = annual_avg_plot(annual_avg_df,annual_avg_option,lat_long_list)
|
353 |
+
with n:
|
354 |
+
fig_avg = avg_temp_plot(annual_temp,annual_avg_option)
|
355 |
+
st.plotly_chart(fig_avg,use_container_width = True)
|
356 |
+
with o:
|
357 |
+
col11,col22,col33 = st.columns(3)
|
358 |
+
with col22:
|
359 |
+
st.download_button("Download Data",
|
360 |
+
data = convert_df(annual_temp),
|
361 |
+
file_name='Annual_average_temperature.csv',
|
362 |
+
mime='text/csv',)
|
363 |
+
#For maps
|
364 |
+
lat_avg = float(annual_avg_option.split(',')[0])
|
365 |
+
long_avg = float(annual_avg_option.split(',')[1])
|
366 |
+
with m:
|
367 |
+
with col2:
|
368 |
+
m_avg = map_creation(lat_avg,long_avg,0,0)
|
369 |
+
last_click = m_avg['last_clicked']
|
370 |
+
if last_click is not None:
|
371 |
+
clicked_lat = last_click['lat']
|
372 |
+
clicked_long = last_click['lng']
|
373 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
374 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
375 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
376 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
377 |
+
|
378 |
+
else:
|
379 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
380 |
+
|
381 |
+
with st.container():
|
382 |
+
if nn!=0:
|
383 |
+
nn_avg_temp_df = annual_avg_plot(annual_avg_df,nn,lat_long_list)
|
384 |
+
fig_avg_nn = avg_temp_plot(nn_avg_temp_df,nn)
|
385 |
+
with n:
|
386 |
+
st.plotly_chart(fig_avg_nn, use_container_width = True)
|
387 |
+
with o:
|
388 |
+
col11,col22,col33 = st.columns(3)
|
389 |
+
with col22:
|
390 |
+
st.download_button("Download Data",
|
391 |
+
data = convert_df(nn_avg_temp_df),
|
392 |
+
file_name='Annual_average_temperature_nn.csv',
|
393 |
+
mime='text/csv',)
|
394 |
+
|
395 |
+
|
396 |
+
elif data_type == 'Annual Max, Min, & Average Temperature':
|
397 |
+
df = result.copy()
|
398 |
+
col1,col2 = st.columns(2)
|
399 |
+
with col1:
|
400 |
+
st.markdown('**Select the Latitude and Longitude**.')
|
401 |
+
option_lat_long = st.selectbox("",pd.DataFrame(lat_long_list),key = 10)
|
402 |
+
|
403 |
+
with col2:
|
404 |
+
#For maps
|
405 |
+
lat_avg_3 = float(option_lat_long.split(',')[0])
|
406 |
+
long_avg_3 = float(option_lat_long.split(',')[1])
|
407 |
+
m_max_min_avg = map_creation(lat_avg_3,long_avg_3,0,0)
|
408 |
+
|
409 |
+
last_click = m_max_min_avg['last_clicked']
|
410 |
+
if last_click is not None:
|
411 |
+
clicked_lat = last_click['lat']
|
412 |
+
clicked_long = last_click['lng']
|
413 |
+
st.markdown("**Last Clicked Latitude Longitude point is:**")
|
414 |
+
st.markdown(" ")
|
415 |
+
st.markdown(" ")
|
416 |
+
st.write("{:0.2f},{:0.2f}".format(clicked_lat,clicked_long))
|
417 |
+
|
418 |
+
nn = search_func(clicked_lat,clicked_long,lat_long_list,result)
|
419 |
+
st.write("**Nearest Latitude and Longitude is:**",nn)
|
420 |
+
|
421 |
+
else:
|
422 |
+
st.markdown("**Click on the map to fetch the Latitude and Longitude**")
|
423 |
+
|
424 |
+
col1,col2,col3 = st.columns(3)
|
425 |
+
with col1:
|
426 |
+
df1 = annual_max_plot(df,option_lat_long,lat_long_list)
|
427 |
+
fig_2 = max_temp_plot(df1,option_lat_long)
|
428 |
+
st.plotly_chart(fig_2, use_container_width=True)
|
429 |
+
st.download_button("Download Data",
|
430 |
+
data = convert_df(df1),
|
431 |
+
file_name='Annual_maximum_temperature.csv',
|
432 |
+
mime='text/csv',)
|
433 |
+
|
434 |
+
with st.container():
|
435 |
+
if nn!=0:
|
436 |
+
df_annual_max_2 = annual_max_plot(df,nn,lat_long_list)
|
437 |
+
fig_annual_max_nn_2 = max_temp_plot(df_annual_max_2,nn)
|
438 |
+
st.plotly_chart(fig_annual_max_nn_2, use_container_width=True)
|
439 |
+
st.download_button("Download Data",
|
440 |
+
data = convert_df(df_annual_max_2),
|
441 |
+
file_name='Annual_maximum_temperature_nn.csv',
|
442 |
+
mime='text/csv',)
|
443 |
+
|
444 |
+
with col2:
|
445 |
+
with st.container():
|
446 |
+
df2 = annual_min_plot(df,option_lat_long,lat_long_list)
|
447 |
+
fig_3 = min_temp_plot(df2,option_lat_long)
|
448 |
+
st.plotly_chart(fig_3,use_container_width = True)
|
449 |
+
st.download_button("Download Data",
|
450 |
+
data = convert_df(df2),
|
451 |
+
file_name='Annual_minimum_temperature.csv',
|
452 |
+
mime='text/csv',)
|
453 |
+
with st.container():
|
454 |
+
if nn!=0:
|
455 |
+
df_annual_min_2 = annual_min_plot(df,nn,lat_long_list)
|
456 |
+
fig_annual_min_nn_2 = min_temp_plot(df_annual_min_2,nn)
|
457 |
+
st.plotly_chart(fig_annual_min_nn_2, use_container_width=True)
|
458 |
+
st.download_button("Download Data",
|
459 |
+
data = convert_df(df_annual_min_2),
|
460 |
+
file_name='Annual_minimum_temperature_nn.csv',
|
461 |
+
mime='text/csv',)
|
462 |
+
with col3:
|
463 |
+
with st.container():
|
464 |
+
annual_average = annual_avg_plot(df,option_lat_long,lat_long_list)
|
465 |
+
fig_4 = avg_temp_plot(annual_average,option_lat_long)
|
466 |
+
st.plotly_chart(fig_4,use_container_width = True)
|
467 |
+
|
468 |
+
st.download_button("Download Data",
|
469 |
+
data = convert_df(annual_average),
|
470 |
+
file_name='Annual_average_temperature.csv',
|
471 |
+
mime='text/csv',)
|
472 |
+
|
473 |
+
with st.container():
|
474 |
+
if nn!=0:
|
475 |
+
df_annual_avg_nn_2 = annual_avg_plot(df,nn,lat_long_list)
|
476 |
+
fig_nn_4 = avg_temp_plot(df_annual_avg_nn_2,nn)
|
477 |
+
st.plotly_chart(fig_nn_4,use_container_width=True)
|
478 |
+
st.download_button("Download Data",
|
479 |
+
data = convert_df(df_annual_avg_nn_2),
|
480 |
+
file_name='Annual_average_temperature_nn.csv',
|
481 |
+
mime='text/csv',)
|
482 |
+
|
483 |
+
if __name__ == 'main':
|
484 |
+
run()
|
temperature_functions.py
ADDED
@@ -0,0 +1,284 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#standard imports
|
2 |
+
import pandas as pd
|
3 |
+
import numpy as np
|
4 |
+
import streamlit as st
|
5 |
+
import os
|
6 |
+
import glob
|
7 |
+
|
8 |
+
|
9 |
+
#imports for finding the nearest lat long using haversine distance
|
10 |
+
from math import radians, cos, sin, asin, sqrt
|
11 |
+
import math
|
12 |
+
|
13 |
+
#visualization libraries to visualize different plots
|
14 |
+
import plotly.express as px
|
15 |
+
import plotly.graph_objects as go
|
16 |
+
import altair as alt
|
17 |
+
|
18 |
+
|
19 |
+
#for Logo plotting
|
20 |
+
from PIL import Image
|
21 |
+
|
22 |
+
#for Folium Map Creation
|
23 |
+
from typing import Dict, List, Optional
|
24 |
+
import folium
|
25 |
+
from streamlit_folium import st_folium
|
26 |
+
from branca.element import Figure
|
27 |
+
from precipitation_function import lat_long_type
|
28 |
+
|
29 |
+
#disabling warnings
|
30 |
+
import warnings
|
31 |
+
warnings.filterwarnings("ignore")
|
32 |
+
|
33 |
+
#For parallel processing
|
34 |
+
from pandarallel import pandarallel
|
35 |
+
pandarallel.initialize(progress_bar=True)
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
fig = Figure(width = 550,height = 350)
|
40 |
+
|
41 |
+
#function for calculating annual average
|
42 |
+
@st.cache
|
43 |
+
def annual_avg(df,year_lst):
|
44 |
+
annual_avg_df_lst = list()
|
45 |
+
for i in year_lst:
|
46 |
+
d = df.get_group((i))
|
47 |
+
d['Annual avg'] = (d['daily_avg'].sum())/len(d)
|
48 |
+
annual_avg_df_lst.append(d)
|
49 |
+
return annual_avg_df_lst
|
50 |
+
|
51 |
+
|
52 |
+
#function for splitting the date column values into year, month and day
|
53 |
+
def df_date_split(df,lat_long_list):
|
54 |
+
df_date_list = list()
|
55 |
+
for i in lat_long_list:
|
56 |
+
grouped_df = df.get_group(i)
|
57 |
+
grouped_df[['Year','Month','Day']] = grouped_df['date'].str.split('-',expand = True)
|
58 |
+
df_date_list.append(grouped_df)
|
59 |
+
date_split_df = pd.concat(df_date_list)
|
60 |
+
return date_split_df
|
61 |
+
|
62 |
+
|
63 |
+
@st.cache
|
64 |
+
#function for creating 2 separate columns one for country code with 'et' values and 2nd with lat long concated values
|
65 |
+
def lat_long_process(df):
|
66 |
+
df['lat_long'] = df.loc[:,'lat'].astype(str)+','+df.loc[:,'long'].astype(str)
|
67 |
+
df.drop_duplicates(inplace = True)
|
68 |
+
return df
|
69 |
+
|
70 |
+
@st.cache
|
71 |
+
#Creating a separate dataframe for lat long values and returning a list of lat_long values
|
72 |
+
def lat_long_list_creation(df):
|
73 |
+
lat_long_df = df[['lat','long']]
|
74 |
+
lat_long_df['lat_long'] = lat_long_df.loc[:,'lat'].astype(str)+','+lat_long_df.loc[:,'long'].astype(str)
|
75 |
+
lat_long_df.drop_duplicates(inplace=True)
|
76 |
+
lat_long_list = lat_long_df['lat_long']
|
77 |
+
return lat_long_list
|
78 |
+
|
79 |
+
|
80 |
+
#function for grouping the data on lat long and returning only the et lat long
|
81 |
+
def group_df(df,lat_long_lst):
|
82 |
+
in_names = df.groupby(df['lat_long'])
|
83 |
+
temperaturedf_new = list()
|
84 |
+
for i in lat_long_lst:
|
85 |
+
df1 = in_names.get_group(i)
|
86 |
+
temperaturedf_new.append(df1)
|
87 |
+
result = pd.concat(temperaturedf_new)
|
88 |
+
return result
|
89 |
+
|
90 |
+
|
91 |
+
#function for calculating daily average
|
92 |
+
def daily_avg(x,y):
|
93 |
+
return (x+y)/2
|
94 |
+
|
95 |
+
|
96 |
+
|
97 |
+
@st.cache
|
98 |
+
#creating avg dataframe
|
99 |
+
def annual_avg_plot(annual_avg_df,lat_long_option,lat_long_list):
|
100 |
+
annual_avg_df = annual_avg_df.groupby('lat_long')
|
101 |
+
annual_avg_df = df_date_split(annual_avg_df,lat_long_list)
|
102 |
+
annual_avg_df = annual_avg_df.groupby('lat_long')
|
103 |
+
annual_avg_df = annual_avg_df.get_group(lat_long_option)
|
104 |
+
#returns the list of unique year values
|
105 |
+
year_lst = annual_avg_df['Year'].unique()
|
106 |
+
annual_avg_dataframe = annual_avg_df.groupby('Year')
|
107 |
+
annual_avg_result = annual_avg(annual_avg_dataframe,year_lst)
|
108 |
+
annual_average = pd.concat(annual_avg_result,axis = 0)
|
109 |
+
return annual_average
|
110 |
+
|
111 |
+
|
112 |
+
#creating average temp plotly chart
|
113 |
+
def avg_temp_plot(annual_temp,lat_long_val):
|
114 |
+
#CODE FOR PLOTTING ANNUAL AVERAGE TEMPERATURE
|
115 |
+
fig_avg = px.line(annual_temp, x= 'Year',y='Annual avg')
|
116 |
+
fig_avg.update_layout(
|
117 |
+
yaxis = dict(tickfont = dict(size=15)),
|
118 |
+
xaxis = dict(tickfont = dict(size=15)),
|
119 |
+
plot_bgcolor = 'rgba(0,0,0,0)')
|
120 |
+
fig_avg.update_traces(line_color ='dimgray')
|
121 |
+
fig_avg.update_xaxes(gridcolor='whitesmoke')
|
122 |
+
fig_avg.update_yaxes(gridcolor = 'whitesmoke')
|
123 |
+
fig_avg.update_yaxes(title = 'Annual Average Temperature (C)')
|
124 |
+
# fig_avg.update_layout(title = "Annual Average Temperature: "+str(lat_long_val))
|
125 |
+
|
126 |
+
return fig_avg
|
127 |
+
|
128 |
+
|
129 |
+
@st.cache
|
130 |
+
def annual_min_plot(Annual_temp_min,option_annual_min_temp,lat_long_list):
|
131 |
+
Annual_temp_min = Annual_temp_min.groupby('lat_long')
|
132 |
+
Annual_temp_min = df_date_split(Annual_temp_min,lat_long_list)
|
133 |
+
Annual_temp_min = Annual_temp_min.groupby(['lat_long','Year'])[['tmin']].min()
|
134 |
+
Annual_temp_min.rename(columns = {'tmin':'Yearly_minimum_temp'},inplace = True)
|
135 |
+
Annual_temp_min.reset_index(inplace = True)
|
136 |
+
Annual_temp_min = Annual_temp_min.groupby('lat_long')
|
137 |
+
df2 = Annual_temp_min.get_group(option_annual_min_temp)
|
138 |
+
return df2
|
139 |
+
|
140 |
+
@st.cache
|
141 |
+
def annual_max_plot(Annual_temp,option_annual_temp,lat_long_list):
|
142 |
+
Annual_temp = Annual_temp.groupby('lat_long')
|
143 |
+
Annual_temp = df_date_split(Annual_temp,lat_long_list)
|
144 |
+
Annual_temp = Annual_temp.groupby(['lat_long','Year'])[['tmax']].max()
|
145 |
+
Annual_temp.rename(columns = {'tmax':'Yearly_maximum_temp'},inplace = True)
|
146 |
+
Annual_temp.reset_index(inplace = True)
|
147 |
+
Annual_temp = Annual_temp.groupby('lat_long')
|
148 |
+
df1 = Annual_temp.get_group(option_annual_temp)
|
149 |
+
return df1
|
150 |
+
|
151 |
+
|
152 |
+
@st.cache
|
153 |
+
def daily_avg_calc(result,option,start,end):
|
154 |
+
grouped_temperature_df = result.groupby('lat_long')
|
155 |
+
data_frame = grouped_temperature_df.get_group(option)
|
156 |
+
data_frame.set_index('date',inplace = True)
|
157 |
+
data_frame_start_end = data_frame.loc[str(start):str(end)]
|
158 |
+
data_frame_start_end = data_frame_start_end.reset_index()
|
159 |
+
return data_frame_start_end
|
160 |
+
|
161 |
+
def daily_avg_plot(data_frame_start_end,lat_long_val):
|
162 |
+
#Plotting the line chart of the daily average
|
163 |
+
fig = px.line(data_frame_start_end, x = 'date',y='daily_avg',title = 'Daily Average Temperature')
|
164 |
+
fig.update_traces(line_color = 'blue')
|
165 |
+
fig.update_xaxes(title_text = 'Year',gridcolor = 'whitesmoke')
|
166 |
+
fig.update_yaxes(ticklabelposition="inside top", title= 'Daily Average Temperature (C)',gridcolor = 'whitesmoke')
|
167 |
+
fig.update_layout(margin = dict(l=25,r=25,t=25,b=25))
|
168 |
+
fig.update_layout(plot_bgcolor = 'rgba(0,0,0,0)')
|
169 |
+
# fig.update_layout(title = "Daily Average Temperature: "+str(lat_long_val))
|
170 |
+
return fig
|
171 |
+
|
172 |
+
@st.cache
|
173 |
+
def monthly_mean_calc(temperature_monthly_df,lat_long_list):
|
174 |
+
#monthly mean calculation
|
175 |
+
temperature_monthly_df = temperature_monthly_df.groupby('lat_long')
|
176 |
+
date_split_df = df_date_split(temperature_monthly_df,lat_long_list)
|
177 |
+
monthly_avg_temp = date_split_df.groupby(['lat_long','Year','Month'])[['daily_avg']].mean()
|
178 |
+
monthly_avg_temp.rename(columns = {'daily_avg':'Monthly mean temperature'},inplace = True)
|
179 |
+
return monthly_avg_temp
|
180 |
+
|
181 |
+
|
182 |
+
@st.cache(ttl=24*60*60)
|
183 |
+
def selecting_mean(monthly_avg_temp,option_mean,start_year,end_year):
|
184 |
+
monthly_avg_temp = monthly_avg_temp.reset_index().set_index('Year').groupby('lat_long')
|
185 |
+
grouped_monthly_mean = monthly_avg_temp.get_group(option_mean)
|
186 |
+
df = grouped_monthly_mean.loc[start_year:end_year]
|
187 |
+
df = df.reset_index()
|
188 |
+
return df
|
189 |
+
|
190 |
+
|
191 |
+
def plot_mean_data(df,lat_long_val):
|
192 |
+
title_text = "Monthly Mean Temperature: "+str(lat_long_val)
|
193 |
+
highlight = alt.selection(
|
194 |
+
type='single', on='mouseover', fields=['Year'], nearest=True)
|
195 |
+
base = alt.Chart(df,title = title_text).encode(
|
196 |
+
x = alt.X('Month:Q',scale = alt.Scale(domain=[1,12]),axis=alt.Axis(tickMinStep=1)),
|
197 |
+
y = alt.Y('Monthly mean temperature:Q',scale = alt.Scale(domain =[int(df['Monthly mean temperature'].min()),int(df['Monthly mean temperature'].max())])),
|
198 |
+
color = alt.Color('Year:O',scale = alt.Scale(scheme = 'magma'))
|
199 |
+
)
|
200 |
+
points = base.mark_circle().encode(
|
201 |
+
opacity=alt.value(0),
|
202 |
+
tooltip=[
|
203 |
+
alt.Tooltip('Year:O', title='Year'),
|
204 |
+
alt.Tooltip('Month:Q', title='Month'),
|
205 |
+
alt.Tooltip('Monthly mean temperature:Q', title='Mean temp')
|
206 |
+
]).add_selection(highlight)
|
207 |
+
|
208 |
+
lines = base.mark_line().encode(
|
209 |
+
size=alt.condition(~highlight, alt.value(1), alt.value(3)))
|
210 |
+
|
211 |
+
mean_chart = (points + lines).properties(width=1000, height=450).interactive()
|
212 |
+
return mean_chart
|
213 |
+
|
214 |
+
def max_temp_plot(df_max,lat_long_val):
|
215 |
+
fig_max = px.line(df_max,x = 'Year',y='Yearly_maximum_temp')
|
216 |
+
fig_max.update_traces(line_color = 'maroon')
|
217 |
+
fig_max.update_layout(
|
218 |
+
yaxis = dict(tickfont = dict(size=15)),
|
219 |
+
xaxis = dict(tickfont = dict(size=15)),
|
220 |
+
plot_bgcolor = 'rgba(0,0,0,0)')
|
221 |
+
fig_max.update_xaxes(gridcolor='whitesmoke')
|
222 |
+
fig_max.update_yaxes(gridcolor = 'whitesmoke')
|
223 |
+
fig_max.update_yaxes(title = "Annual Maximum Temperature (C)")
|
224 |
+
# fig_max.update_layout(title = "Yearly Maximum Temperature: "+str(lat_long_val))
|
225 |
+
|
226 |
+
return fig_max
|
227 |
+
|
228 |
+
|
229 |
+
def min_temp_plot(df_min,lat_long_val):
|
230 |
+
fig_min = px.line(df_min, x= 'Year',y = 'Yearly_minimum_temp')
|
231 |
+
fig_min.update_traces(line_color ='blue')
|
232 |
+
fig_min.update_layout(
|
233 |
+
yaxis = dict(tickfont = dict(size=15)),
|
234 |
+
xaxis = dict(tickfont = dict(size=15)),
|
235 |
+
plot_bgcolor = 'rgba(0,0,0,0)')
|
236 |
+
fig_min.update_xaxes(gridcolor='whitesmoke')
|
237 |
+
fig_min.update_yaxes(gridcolor = 'whitesmoke')
|
238 |
+
fig_min.update_yaxes(title = "Annual Minimum Temperature (C)")
|
239 |
+
# fig_min.update_layout(title = "Yearly Minimum Temperature: "+str(lat_long_val))
|
240 |
+
|
241 |
+
return fig_min
|
242 |
+
|
243 |
+
|
244 |
+
#code for downloading Data as a CSV File
|
245 |
+
@st.cache
|
246 |
+
def convert_df(df):
|
247 |
+
return df.to_csv().encode('utf-8')
|
248 |
+
|
249 |
+
# a = st.sidebar.empty()
|
250 |
+
|
251 |
+
#result dataframe contains the daily average value as well.
|
252 |
+
#Function for creating folium map and returning the latitude and Longitude of the clicked location
|
253 |
+
def map_creation(lat,long,clicked_lat,clicked_long):
|
254 |
+
with st.sidebar:
|
255 |
+
m = folium.Map(location = [9.14,40],zoom_start =7)
|
256 |
+
fig.add_child(m)
|
257 |
+
tile = folium.TileLayer(
|
258 |
+
tiles = 'https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}',
|
259 |
+
attr = 'Esri',
|
260 |
+
name = 'Esri Satellite',
|
261 |
+
overlay = False,
|
262 |
+
control = True).add_to(m)
|
263 |
+
folium.TileLayer('Stamen Terrain').add_to(m)
|
264 |
+
folium.TileLayer('Stamen Water Color').add_to(m)
|
265 |
+
folium.LayerControl().add_to(m)
|
266 |
+
if (clicked_lat == 0) and (clicked_long == 0):
|
267 |
+
folium.Marker([lat,long]).add_to(m)
|
268 |
+
else:
|
269 |
+
folium.Marker([lat,long]).add_to(m)
|
270 |
+
folium.Marker([clicked_lat,clicked_long]).add_to(m)
|
271 |
+
st_data = st_folium(m,key = 'map_fig_1')
|
272 |
+
return st_data
|
273 |
+
|
274 |
+
#df is the initial df that contains only et data
|
275 |
+
def search_func(latitude,longitude,lt_lng_lst,df):
|
276 |
+
df['lat_radian'],df['long_radian'] = np.radians(df['lat']),np.radians(df['long'])
|
277 |
+
df['dLON'] = df['long_radian'] - math.radians(longitude)
|
278 |
+
df['dLAT'] = df['lat_radian'] - math.radians(latitude)
|
279 |
+
df['distance'] = 6371 * 2 * np.arcsin(np.sqrt(np.sin(df['dLAT']/2)**2 + math.cos(math.radians(longitude)) * np.cos(df['lat_radian']) * np.sin(df['dLON']/2)**2))
|
280 |
+
a = df['distance'].idxmin()
|
281 |
+
nearest_neighbor = df._get_value(a,'lat_long')
|
282 |
+
nearest_neighbor = lat_long_type(nearest_neighbor)
|
283 |
+
# st.write("**Nearest Latitude and Longitude is :**",nearest_neighbor)
|
284 |
+
return nearest_neighbor
|