tab / Generate.py
GhoulMac's picture
Upload 8 files
d47fa16
raw
history blame
4.86 kB
import sklearn
import pandas as pd
from tsai.basics import *
import config
from tsai.inference import load_learner
import pandas as pd
def get_inputs_from_user():
return 0
def preprocess_data(DataFrame:pd.DataFrame):
preproc=load_object()
return DataFrame
def preprocess_data_transform_generate_splits_Train(DataFrame:pd.DataFrame):
DataFrame=DataFrame.drop(config.DROP_COLOUMNS,axis=1)
preproc_pipe=load_object(config.PREPROCESSOR_PATH)
exp_pipe=load_object(config.SCALING_DATA)
DataFrame=preproc_pipe.fit_transform(DataFrame)
print("dataframe processed and ready for splitting")
splits=get_forecasting_splits(DataFrame,fcst_history=config.FCST_HISTORY,fcst_horizon=config.FCST_HORIZON,datetime_col=config.DATETIME_COL,
valid_size=config.VALID_SIZE,test_size=config.TEST_SIZE)
X,y=prepare_forecasting_data(DataFrame,fcst_history=config.FCST_HISTORY,fcst_horizon=config.FCST_HORIZON,x_vars=config.COLOUMNS,y_vars=config.COLOUMNS)
learn=TSForecaster(X,y,splits=splits,
batch_size=16,path='models',
arch='InceptionTimePlus',#"PatchTST" when PatchTST is to be used
pipelines=[preproc_pipe,exp_pipe],
#arch_config=config.ARCH_CONFIG, #uncomment only if PatchTST is used
metrics=[mse,mape],
cbs=ShowGraph()
)
lr_max=learn.lr_find().valley
learn.fit_one_cycle(n_epoch=config.N_EPOCH,lr_max=lr_max)
learn.export("model_in.pt")
return 0
#when using PatchTst model use the below function
def inference_Aircomp(fcst_date:string,DataFrame:pd.DataFrame):
pre=load_object(config.AIR_PREPROCESSOR_PATH)
DataFrame=pre.fit_transform(DataFrame)
dates=pd.date_range(start=None,end=fcst_date,periods=config.FCST_HISTORY,freq=config.FREQUENCY)
new_df=DataFrame[DataFrame[config.AIR_DATETIME_COL].isin(dates)].reset_index(drop=True)
predict=load_learner(config.MODEL_PATH_ITP_AIR)
new_df=predict.transform(new_df)
new_x,__=prepare_forecasting_data(new_df,fcst_history=config.FCST_HISTORY,fcst_horizon=0,x_vars=config.AIR_COLOUMNS,y_vars=config.AIR_COLOUMNS)
new_scaled_preds, *_ = predict.get_X_preds(new_x)
new_scaled_preds=to_np(new_scaled_preds).swapaxes(1,2).reshape(-1,len(config.AIR_COLOUMNS))
dates=pd.date_range(start=fcst_date, periods=config.FCST_HORIZON+1,freq='1H')[1:]
preds_df=pd.DataFrame(dates,columns=[config.AIR_DATETIME_COL])
preds_df.loc[:, config.AIR_COLOUMNS]=new_scaled_preds
preds_df=predict.inverse_transform(preds_df)
return preds_df
def inference_Energy(fcst_date:string,DataFrame:pd.DataFrame):
pre=load_object(config.ENER_PREPROCESSOR_PATH)
DataFrame[config.ENERGY_DATETIME]=pd.to_datetime(DataFrame[config.ENERGY_DATETIME],format='mixed')
DataFrame=pre.fit_transform(DataFrame)
dates=pd.date_range(start=None,end=fcst_date,periods=config.FCST_HISTORY,freq=config.FREQUENCY)
new_df=DataFrame[DataFrame[config.ENERGY_DATETIME].isin(dates)].reset_index(drop=True)
predict=load_learner(config.MODEL_PATH_ITP_ENER)
new_df=predict.transform(new_df)
new_x,__=prepare_forecasting_data(new_df,fcst_history=config.FCST_HISTORY,fcst_horizon=0,x_vars=config.ENERGY_COLOUMNS,y_vars=config.ENERGY_COLOUMNS)
new_scaled_preds, *_ = predict.get_X_preds(new_x)
new_scaled_preds=to_np(new_scaled_preds).swapaxes(1,2).reshape(-1,len(config.ENERGY_COLOUMNS))
dates=pd.date_range(start=fcst_date, periods=config.FCST_HORIZON+1,freq='1H')[1:]
preds_df=pd.DataFrame(dates,columns=[config.ENERGY_DATETIME])
preds_df.loc[:, config.ENERGY_COLOUMNS]=new_scaled_preds
preds_df=predict.inverse_transform(preds_df)
return preds_df
def inference_boiler(fcst_date:string,DataFrame:pd.DataFrame):
pre=load_object(config.BOILER_PREPROCESSOR_PATH)
DataFrame=pre.fit_transform(DataFrame)
dates=pd.date_range(start=None,end=fcst_date,periods=config.FCST_HISTORY,freq=config.FREQUENCY)
new_df=DataFrame[DataFrame[config.BOILER_DATETIME].isin(dates)].reset_index(drop=True)
predict=load_learner(config.MODEL_PATH_ITP_BOIL)
new_df=predict.transform(new_df)
new_x,__=prepare_forecasting_data(new_df,fcst_history=config.FCST_HISTORY,fcst_horizon=0,x_vars=config.BOILER_COLOUMNS,y_vars=config.BOILER_COLOUMNS)
new_scaled_preds, *_ = predict.get_X_preds(new_x)
new_scaled_preds=to_np(new_scaled_preds).swapaxes(1,2).reshape(-1,len(config.BOILER_COLOUMNS))
dates=pd.date_range(start=fcst_date, periods=config.FCST_HORIZON+1,freq='1H')[1:]
preds_df=pd.DataFrame(dates,columns=[config.BOILER_DATETIME])
preds_df.loc[:, config.BOILER_COLOUMNS]=new_scaled_preds
preds_df=predict.inverse_transform(preds_df)
return preds_df