import tensorflow as tf
import os
import pandas as pd
import numpy as np
from sklearn.metrics import mean_squared_error as mse # mse
from sklearn.metrics import mean_absolute_error as mae # mae
from sklearn.metrics import mean_absolute_percentage_error as mape # mape
from sklearn.preprocessing import StandardScaler
import pandas as pd
import numpy as np
from skimage.restoration import denoise_wavelet
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers import *
from keras.callbacks import ModelCheckpoint
from keras.losses import MeanSquaredError
from keras.metrics import RootMeanSquaredError
from keras.optimizers import Adam
from keras.models import *
from keras.layers import *
from keras.layers import concatenate
import tensorflow as tf
from tcn import TCN

df = pd.read_csv('../data_process/big_data.csv')

dates = pd.to_datetime(df['date'])
cols = list(df)[1:10]
df = df[cols].astype(float)

def df_to_X_y(df, window_size=14):
  df_as_np = df.to_numpy()
  X = []
  y = []
  for i in range(len(df_as_np)-window_size):
    row = [r for r in df_as_np[i:i+window_size]]
    X.append(row)
    label = df_as_np[i+window_size][0]
    y.append(label)
  return np.array(X), np.array(y)

def plot_predictions1(model, X, y, start=0, end=100):
  predictions = model.predict(X)

  return predictions, mse(y, predictions), mae(y, predictions), mape(y, predictions)

scaler = StandardScaler()
scaler = scaler.fit(df)
df_scaled = scaler.transform(df)

origin_data = df_scaled[:, 8]
data_denoise = denoise_wavelet(origin_data, method='BayesShrink', mode='soft', wavelet_levels=3, wavelet='sym8', rescale_sigma='True')

df_scaled = pd.DataFrame(df_scaled, columns=['close','total_cases','new_cases_smoothed','total_deaths','new_deaths_smoothed','stringency_index','open','highest','lowest'])
x2, y2 = df_to_X_y(df_scaled, window_size=7)

X2_train, y2_train = x2[:558], y2[:558]
X2_val, y2_val = x2[558:744], y2[558:744]
X2_test, y2_test = x2[744:], y2[744:]

print('完成数据处理')

