Arijit-hazra commited on
Commit
4391e28
1 Parent(s): 1e7b63b

new version

Browse files
Files changed (4) hide show
  1. api_data.py +36 -0
  2. app.py +83 -0
  3. data_processing.py +75 -0
  4. requirements.txt +4 -0
api_data.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import datetime
3
+
4
+ def fetch_data(
5
+ lookback,
6
+ span = 24,
7
+ offset = 3, # data from the api is only upto 3 hours before now
8
+
9
+ url = "https://visual-crossing-weather.p.rapidapi.com/history",
10
+ headers = {
11
+ "X-RapidAPI-Key": "12c5552beamshb548b266489323cp1c7cb2jsnf8b18027cebc",
12
+ "X-RapidAPI-Host": "visual-crossing-weather.p.rapidapi.com"
13
+ }
14
+ ):
15
+ # Calculate the start and end date and time for the last 24 hours
16
+ end_datetime = datetime.datetime.now() - datetime.timedelta(hours=lookback)
17
+ start_datetime = end_datetime - datetime.timedelta(hours=(span+offset))
18
+ querystring = {
19
+ "startDateTime": start_datetime.strftime("%Y-%m-%dT%H:%M:%S"),
20
+ "aggregateHours": "1",
21
+ "location": "Kolkata",
22
+ "endDateTime": end_datetime.strftime("%Y-%m-%dT%H:%M:%S"),
23
+ "unitGroup": "us",
24
+ "dayStartTime": "00:00:00",
25
+ "contentType": "csv",
26
+ "dayEndTime": "23:59:00",
27
+ "shortColumnNames": "0"
28
+ }
29
+ response = requests.get(url, headers=headers, params=querystring)
30
+
31
+
32
+ if response.status_code == 200:
33
+ # Parse the CSV response
34
+ return response
35
+ else:
36
+ print("Error:", response.status_code)
app.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from data_processing import process_data, standardizeX
2
+ from sklearn.preprocessing import OneHotEncoder
3
+ from api_data import fetch_data
4
+ from tensorflow import keras
5
+ import pandas as pd
6
+ import gradio as gr
7
+ import numpy as np
8
+ import pickle
9
+ import io
10
+
11
+ #========================================================================================================================
12
+ # load the models
13
+ #========================================================================================================================
14
+ temp_model = keras.models.load_model('results/temp_model.h5')
15
+ hum_model = keras.models.load_model('results/hum_model.h5')
16
+ overview_model = keras.models.load_model('results/overview_model.h5')
17
+
18
+ #========================================================================================================================
19
+ # retrieve mean and std and encoder
20
+ #========================================================================================================================
21
+ encoder = pickle.load(open("results/encoder.pkl", "rb"))
22
+ scale = pickle.load(open("results/mean_std.pkl", "rb"))
23
+ mean = scale["mean"]
24
+ std = scale["std"]
25
+
26
+ temp_mean = mean[0]
27
+ temp_std = std[0]
28
+
29
+ hum_mean = mean[2]
30
+ hum_std = std[2]
31
+
32
+ # overview dictionary
33
+ overview = {
34
+ 0:'Clear',
35
+ 1: 'Clear',
36
+ 2: 'Partially cloudy',
37
+ 2: 'Rain',
38
+ 3: 'Overcast',
39
+ }
40
+
41
+
42
+ #========================================================================================================================
43
+ # predict function
44
+ #========================================================================================================================
45
+ def forecast(time):
46
+ time = int(time)
47
+ if time>9 or time<0:
48
+ return "please enter valid time", "0 to 9", ""
49
+
50
+ response = fetch_data(9-time)
51
+ if response is not None:
52
+ # processing data to better fit the model
53
+ df = pd.read_csv(io.StringIO(response.content.decode('utf-8')))
54
+ df = process_data(df)
55
+ X = np.array(df[-24:]).reshape(-1, 24, 17)
56
+ X = standardizeX(X, mean, std)
57
+
58
+ # predictions
59
+ temp = temp_model.predict(X)
60
+ hum = hum_model.predict(X)
61
+ ovrvw = overview_model.predict(X)
62
+
63
+ # reverse scaling
64
+ temp = (temp[0][0]*temp_std) + temp_mean
65
+ hum = (hum[0][0]*hum_std) + hum_mean
66
+ ovrvw = encoder.inverse_transform(ovrvw)
67
+
68
+ # output formatting
69
+ temp = str(round(((temp-32)*5)/9, 2)) + " c"
70
+ hum = str(round(hum, 2)) + " %"
71
+ ovrvw = overview[ovrvw[0][0]]
72
+ return temp, hum, ovrvw
73
+
74
+ else:
75
+ return "API access denied", "", ""
76
+
77
+
78
+ gr.Interface(fn=forecast,
79
+ inputs = gr.Textbox(placeholder="Provide value between 0 to 9, 0 means present, 9 means 9 hrs in future "),
80
+ outputs = [gr.Textbox(label="Temperature"),gr.Textbox(label="Humidity"),gr.Textbox(label="Overview")]
81
+ ).launch()
82
+
83
+
data_processing.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pandas as pd
3
+
4
+
5
+ # function to map weather values to numerical values
6
+ def map_weather_to_numerical(weather, ordinal_values):
7
+ val = 0
8
+ for w in weather.split(","):
9
+ w = w.strip()
10
+ if w in ordinal_values:
11
+ val = max(ordinal_values[w], val)
12
+
13
+ return val
14
+
15
+ def standardizeX(X, mean, std):
16
+ for i in range(len(mean)):
17
+ X[:,:,i] = (X[:, :, i]-mean[i])/std[i]
18
+ return X
19
+
20
+ def process_data(df):
21
+ # Drop redundant columns
22
+ redundant_cols = [col for col in ["Unnamed: 0", "Minimum Temperature",
23
+ "Maximum Temperature", "Snow Depth", "Heat Index",
24
+ "Precipitation Cover", "Wind Gust", "Wind Chill",
25
+ "Snow Depth", "Info", "Latitude",
26
+ "Longitude", "Address", "Resolved Address", "Name"] if col in df.columns]
27
+ df.drop(redundant_cols, axis=1, inplace=True)
28
+
29
+ # Interpolate missing values
30
+ df = df.interpolate()
31
+
32
+ # Fill missing values in 'Conditions' and 'Weather Type' columns
33
+ df['Conditions'].fillna("Clear", inplace=True)
34
+ df['Weather Type'].fillna("", inplace=True)
35
+
36
+ # Rename column and convert to datetime format
37
+ df.rename(columns={"Relative Humidity": "Humidity"}, inplace=True)
38
+ df['DATETIME'] = pd.to_datetime(df['Date time'])
39
+
40
+ # Drop 'Date time' column
41
+ df.drop(["Date time"], axis=1, inplace=True)
42
+
43
+ # proceessing wind direction
44
+ angle = (df["Wind Direction"]*np.pi)/360
45
+ df.drop("Wind Direction", axis=1, inplace=True)
46
+ df["sin(wind)"] = np.sin(angle)
47
+ df["cos(wind)"] = np.cos(angle)
48
+
49
+ # Map weather values to numerical values
50
+ rain_values = {'Heavy Rain': 7, 'Snow And Rain Showers': 6, 'Rain Showers': 5, 'Rain': 4, 'Light Rain': 3, 'Light Drizzle': 2, 'Drizzle': 1}
51
+ storm_values = {'Dust storm': 1, 'Lightning Without Thunder': 2, 'Thunderstorm Without Precipitation': 3, 'Thunderstorm': 4}
52
+ overview = {'Clear': 1, 'Partially cloudy': 2, 'Rain': 2, 'Overcast': 3}
53
+
54
+ df["Rain"] = df['Weather Type'].apply(lambda s: map_weather_to_numerical(s, rain_values))
55
+ df["Storm"] = df['Weather Type'].apply(lambda s: map_weather_to_numerical(s, storm_values))
56
+ df["Overview"] = df['Conditions'].apply(lambda s: map_weather_to_numerical(s, overview))
57
+
58
+ # Drop 'Weather Type' and 'Conditions' columns
59
+ df.drop(["Weather Type", "Conditions"], axis=1, inplace=True)
60
+
61
+ # Convert DATETIME to seconds
62
+ df["seconds"] = df["DATETIME"].map(pd.Timestamp.timestamp)
63
+ df.drop("DATETIME", axis=1, inplace=True)
64
+
65
+ # Process seconds to represent periodic nature of days and years
66
+ day_in_seconds = 24 * 3600
67
+ year_in_seconds = day_in_seconds * 365.2425
68
+ df["sin(day)"] = np.sin((df["seconds"] * (2 * np.pi)) / day_in_seconds)
69
+ df["cos(day)"] = np.cos((df["seconds"] * (2 * np.pi)) / day_in_seconds)
70
+ df["sin(year)"] = np.sin((df["seconds"] * (2 * np.pi)) / year_in_seconds)
71
+ df["cos(year)"] = np.cos((df["seconds"] * (2 * np.pi)) / year_in_seconds)
72
+ df.drop("seconds", axis=1, inplace=True)
73
+
74
+ return df
75
+
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ tensorflow
2
+ pandas
3
+ numpy
4
+ gradio