Orangefish00 commited on
Commit
249a185
1 Parent(s): b5d0c55

Upload functions.py

Browse files
Files changed (1) hide show
  1. functions.py +248 -0
functions.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+ import requests
3
+ import os
4
+ import joblib
5
+ import pandas as pd
6
+
7
+ import json
8
+
9
+ from dotenv import load_dotenv
10
+ load_dotenv()
11
+
12
+
13
+ def decode_features(df, feature_view):
14
+ """Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
15
+ df_res = df.copy()
16
+
17
+ import inspect
18
+
19
+
20
+ td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
21
+
22
+ res = {}
23
+ for feature_name in td_transformation_functions:
24
+ if feature_name in df_res.columns:
25
+ td_transformation_function = td_transformation_functions[feature_name]
26
+ sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
27
+ param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
28
+ if td_transformation_function.name == "min_max_scaler":
29
+ df_res[feature_name] = df_res[feature_name].map(
30
+ lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
31
+
32
+ elif td_transformation_function.name == "standard_scaler":
33
+ df_res[feature_name] = df_res[feature_name].map(
34
+ lambda x: x * param_dict['std_dev'] + param_dict["mean"])
35
+ elif td_transformation_function.name == "label_encoder":
36
+ dictionary = param_dict['value_to_index']
37
+ dictionary_ = {v: k for k, v in dictionary.items()}
38
+ df_res[feature_name] = df_res[feature_name].map(
39
+ lambda x: dictionary_[x])
40
+ return df_res
41
+
42
+
43
+ def get_model(project, model_name, evaluation_metric, sort_metrics_by):
44
+ """Retrieve desired model or download it from the Hopsworks Model Registry.
45
+
46
+ In second case, it will be physically downloaded to this directory"""
47
+ TARGET_FILE = "model.pkl"
48
+ list_of_files = [os.path.join(dirpath,filename) for dirpath, _, filenames \
49
+ in os.walk('.') for filename in filenames if filename == TARGET_FILE]
50
+
51
+ if list_of_files:
52
+ model_path = list_of_files[0]
53
+ model = joblib.load(model_path)
54
+ else:
55
+ if not os.path.exists(TARGET_FILE):
56
+ mr = project.get_model_registry()
57
+ # get best model based on custom metrics
58
+ model = mr.get_best_model(model_name,
59
+ evaluation_metric,
60
+ sort_metrics_by)
61
+ model_dir = model.download()
62
+ model = joblib.load(model_dir + "/model.pkl")
63
+
64
+ return model
65
+
66
+
67
+ def get_air_json(AIR_QUALITY_API_KEY):
68
+ return requests.get(f'https://api.waqi.info/feed/Helsinki/?token={AIR_QUALITY_API_KEY}').json()['data']
69
+
70
+
71
+
72
+ def get_air_quality_data1():
73
+
74
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
75
+ json = get_air_json(AIR_QUALITY_API_KEY)
76
+
77
+ print(json)
78
+ # iaqi = json['iaqi']
79
+ # forecast = json['forecast']['daily']
80
+ return [
81
+ json['date'], # AQI
82
+ json['pm25'],
83
+ json['pm10'],
84
+ json['o3'],
85
+ json['no2'],
86
+
87
+ ]
88
+
89
+ def get_air_quality_data():
90
+ AIR_QUALITY_API_KEY = os.getenv('AIR_QUALITY_API_KEY')
91
+ json = get_air_json(AIR_QUALITY_API_KEY)
92
+ iaqi = json['iaqi']
93
+ forecast = json['forecast']['daily']
94
+ return [
95
+ json['aqi'], # AQI
96
+ json['time']['s'][:10], # Date
97
+ iaqi['h']['v'],
98
+ iaqi['p']['v'],
99
+ iaqi['pm10']['v'],
100
+ iaqi['t']['v'],
101
+ forecast['o3'][0]['avg'],
102
+ forecast['o3'][0]['max'],
103
+ forecast['o3'][0]['min'],
104
+ forecast['pm10'][0]['avg'],
105
+ forecast['pm10'][0]['max'],
106
+ forecast['pm10'][0]['min'],
107
+ forecast['pm25'][0]['avg'],
108
+ forecast['pm25'][0]['max'],
109
+ forecast['pm25'][0]['min'],
110
+ forecast['uvi'][0]['avg'],
111
+ forecast['uvi'][0]['avg'],
112
+ forecast['uvi'][0]['avg']
113
+ ]
114
+
115
+ def get_air_quality_df1(data):
116
+ col_names = [
117
+ 'aqi',
118
+ 'date',
119
+ 'pm25',
120
+ 'pm10',
121
+ 'o3',
122
+ 'no2',
123
+
124
+ ]
125
+
126
+ new_data = pd.DataFrame(
127
+ data,
128
+ columns=col_names
129
+ )
130
+ new_data.date = new_data.date.apply(timestamp_2_time)
131
+
132
+ return new_data
133
+
134
+ def get_air_quality_df(data):
135
+ col_names = [
136
+ 'aqi',
137
+ 'date',
138
+ 'iaqi_h',
139
+ 'iaqi_p',
140
+ 'iaqi_pm10',
141
+ 'iaqi_t',
142
+ 'o3_avg',
143
+ 'o3_max',
144
+ 'o3_min',
145
+ 'pm10_avg',
146
+ 'pm10_max',
147
+ 'pm10_min',
148
+ 'pm25_avg',
149
+ 'pm25_max',
150
+ 'pm25_min',
151
+ 'uvi_avg',
152
+ 'uvi_max',
153
+ 'uvi_min',
154
+ ]
155
+
156
+ new_data = pd.DataFrame(
157
+ data,
158
+ columns=col_names
159
+ )
160
+ new_data.date = new_data.date.apply(timestamp_2_time1)
161
+
162
+ return new_data
163
+
164
+
165
+ def get_weather_json(date, WEATHER_API_KEY):
166
+ return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/helsinki/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
167
+
168
+
169
+ def get_weather_data(date):
170
+ WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
171
+ json = get_weather_json(date, WEATHER_API_KEY)
172
+ data = json['days'][0]
173
+
174
+ return [
175
+ json['address'].capitalize(),
176
+ data['datetime'],
177
+ data['tempmax'],
178
+ data['tempmin'],
179
+ data['temp'],
180
+ data['feelslikemax'],
181
+ data['feelslikemin'],
182
+ data['feelslike'],
183
+ data['dew'],
184
+ data['humidity'],
185
+ data['precip'],
186
+ data['precipprob'],
187
+ data['precipcover'],
188
+ data['snow'],
189
+ data['snowdepth'],
190
+ data['windgust'],
191
+ data['windspeed'],
192
+ data['winddir'],
193
+ data['pressure'],
194
+ data['cloudcover'],
195
+ data['visibility'],
196
+ data['solarradiation'],
197
+ data['solarenergy'],
198
+ data['uvindex'],
199
+ data['conditions']
200
+ ]
201
+
202
+
203
+ def get_weather_df(data):
204
+ col_names = [
205
+ 'city',
206
+ 'date',
207
+ 'tempmax',
208
+ 'tempmin',
209
+ 'temp',
210
+ 'feelslikemax',
211
+ 'feelslikemin',
212
+ 'feelslike',
213
+ 'dew',
214
+ 'humidity',
215
+ 'precip',
216
+ 'precipprob',
217
+ 'precipcover',
218
+ 'snow',
219
+ 'snowdepth',
220
+ 'windgust',
221
+ 'windspeed',
222
+ 'winddir',
223
+ 'pressure',
224
+ 'cloudcover',
225
+ 'visibility',
226
+ 'solarradiation',
227
+ 'solarenergy',
228
+ 'uvindex',
229
+ 'conditions'
230
+ ]
231
+
232
+ new_data = pd.DataFrame(
233
+ data,
234
+ columns=col_names
235
+ )
236
+ new_data.date = new_data.date.apply(timestamp_2_time1)
237
+
238
+ return new_data
239
+
240
+ def timestamp_2_time1(x):
241
+ dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
242
+ dt_obj = dt_obj.timestamp() * 1000
243
+ return int(dt_obj)
244
+
245
+ def timestamp_2_time(x):
246
+ dt_obj = datetime.strptime(str(x), '%m/%d/%Y')
247
+ dt_obj = dt_obj.timestamp() * 1000
248
+ return int(dt_obj)