temperature_pred / functions.py
Campfireman's picture
Update functions.py
82326ea
raw
history blame
4.28 kB
from datetime import datetime
import requests
import os
import joblib
import pandas as pd
import json
def decode_features(df, feature_view):
"""Decodes features in the input DataFrame using corresponding Hopsworks Feature Store transformation functions"""
df_res = df.copy()
print(df_res)
import inspect
td_transformation_functions = feature_view._batch_scoring_server._transformation_functions
res = {}
for feature_name in td_transformation_functions:
if feature_name in df_res.columns:
td_transformation_function = td_transformation_functions[feature_name]
sig, foobar_locals = inspect.signature(td_transformation_function.transformation_fn), locals()
param_dict = dict([(param.name, param.default) for param in sig.parameters.values() if param.default != inspect._empty])
if td_transformation_function.name == "min_max_scaler":
df_res[feature_name] = df_res[feature_name].map(
lambda x: x * (param_dict["max_value"] - param_dict["min_value"]) + param_dict["min_value"])
elif td_transformation_function.name == "standard_scaler":
df_res[feature_name] = df_res[feature_name].map(
lambda x: x * param_dict['std_dev'] + param_dict["mean"])
elif td_transformation_function.name == "label_encoder":
dictionary = param_dict['value_to_index']
dictionary_ = {v: k for k, v in dictionary.items()}
df_res[feature_name] = df_res[feature_name].map(
lambda x: dictionary_[x])
return df_res
def get_weather_json(date, WEATHER_API_KEY):
return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/helsinki/{date}?unitGroup=metric&include=days&key={WEATHER_API_KEY}&contentType=json').json()
def get_weather_csv():
return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/shanghai?unitGroup=metric&include=days&key=FYYH5HKD9558HBXD2D6KWXDGH&contentType=csv').csv()
def get_weather_json_quick(date):
return requests.get(f'https://weather.visualcrossing.com/VisualCrossingWebServices/rest/services/timeline/shanghai/{date}?unitGroup=metric&include=days&key=FYYH5HKD9558HBXD2D6KWXDGH&contentType=json').json()
def get_weather_data(json):
#WEATHER_API_KEY = os.getenv('WEATHER_API_KEY')
#csv = get_weather_csv()
data = json['days'][0]
print("data parsed sccessfully")
#return [
# #json['address'].capitalize(),
# data['datetime'],
# data['feelslikemax'],
# data['feelslikemin'],
# data['feelslike'],
# data['dew'],
# data['humidity'],
# data['precip'],
# data['precipprob'],
# data['precipcover'],
# data['snow'],
# data['snowdepth'],
# data['windgust'],
# data['windspeed'],
# data['winddir'],
# data['pressure'],
# data['cloudcover'],
# data['visibility'],
# data['solarradiation'],
# data['solarenergy'],
# data['uvindex'],
# data['conditions']
#]
return data
def get_weather_df(data):
col_names = [
'name',
'datetime',
'tempmax',
'tempmin',
'temp',
'feelslikemax',
'feelslikemin',
'feelslike',
'dew',
'humidity',
'precip',
'precipprob',
'precipcover',
'snow',
'snowdepth',
'windgust',
'windspeed',
'winddir',
'sealevelpressure',
'cloudcover',
'visibility',
'solarradiation',
'solarenergy',
'uvindex',
'conditions'
]
new_data = pd.DataFrame(
data,
columns=col_names
)
new_data.datetime = new_data.datetime.apply(timestamp_2_time1)
#new_data.rename(columes={'pressure':'sealevelpressure'})
return new_data
def timestamp_2_time1(x):
dt_obj = datetime.strptime(str(x), '%Y-%m-%d')
dt_obj = dt_obj.timestamp() * 1000
return int(dt_obj)
def timestamp_2_time(x):
dt_obj = datetime.strptime(str(x), '%m/%d/%Y')
dt_obj = dt_obj.timestamp() * 1000
return int(dt_obj)