import h2o
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.preprocessing import MinMaxScaler, StandardScaler
import os
import numpy as np
np.random.seed(225)  # for reproducibility
import tensorflow as tf
import keras.backend.tensorflow_backend as KTF
from keras.datasets import mnist
from keras.models import Model, Sequential  # 泛型模型
from keras.layers import Dense, Input, Dropout

##进行配置，每个GPU使用90%上限现存
os.environ["CUDA_VISIBLE_DEVICES"]="0" # 使用编号为0，1号的GPU
config = tf.ConfigProto(gpu_options = tf.GPUOptions(allow_growth=True))
config.gpu_options.per_process_gpu_memory_fraction = 0.9 # 每个GPU上限控制在90%以内
session = tf.Session(config=config)
##设置session
KTF.set_session(session)

class H2Odeeplearning:

	def __init__(self, data):
		self.data = data
		self.data = self.data.reset_index(drop=True)
		self.data = self.data.rename(
			columns={self.data.columns.values[0]: 'Returns'}, inplace=False)
		self.data = self.data.rename(
			columns={self.data.columns.values[1]: 'Date'}, inplace=False)

		# transform a data frame to a h2o frame
		self.h2odata = h2o.H2OFrame.from_python(self.data, column_names=list(self.data.columns))
		self.h2odata = self.h2odata[1:self.h2odata.shape[0], :]

	def online_dl_with_autoencoder_once_a_day(self, insample_date_until,
		HiddenLayer=[7, 5, 3], HiddenDropout=[0.2, 0.2, 0.2]):
		"""
		this function makes prediction once everyday based on a data set
		as it's name calls, it's online learning.
		once a model is trained, it then make predictions on the following day
		it rolls forward until the very end
		There must be one row called 'Date', say '2007-06-17', a datetime.date() object
		There must be another row called 'Returns', say 0.015,
		that is, the return for certain time interval for a single day,
		say in 2007-06-17, the return from 11:15 to 15:00 is 0.015
		the framework of the autoencoder is based on
		https://github.com/h2oai/h2o-3/blob/master/h2o-py/tests/testdir_algos/deeplearning/pyunit_autoencoderDeepLearning_large.py
		"""
		# get the index for that day
		start_index = np.array(self.data.index)[self.data['Date'] >= pd.to_datetime(insample_date_until).date()][0]

		# result container
		result = pd.DataFrame()

		# define the pretrained model
		# (autoencoder for weight initialization)
		pretrainedmodel = h2o.estimators.H2OAutoEncoderEstimator(
			activation="Tanh",
			model_id="pretrainedmodel",
			hidden=HiddenLayer,
			hidden_dropout_ratios=HiddenDropout,
			input_dropout_ratio=0,
			standardize=True,
			seed=225,
			train_samples_per_iteration=-1,
			epochs=30)

		# pretrainedmodel_summary = h2o.get_model("pretrainedmodel")
		# print(pretrainedmodel_summary)

		# define the deep learning model
		model = h2o.estimators.H2ODeepLearningEstimator(
			activation="Tanh",
			pretrained_autoencoder="pretrainedmodel",
			model_id="model",
			hidden=HiddenLayer,
			hidden_dropout_ratios=HiddenDropout,
			input_dropout_ratio=0,
			standardize=True,
			seed=225,
			train_samples_per_iteration=-1,
			epochs=30)

		# model_summary = h2o.get_model("model")
		# print(model_summary)

		for i in range(start_index, self.data.shape[0]):
			# add a decay factor to the h2o frame
			# h2odatadecay=h2o.H2OFrame.from_python(np.exp(np.arange(i)[::-1]*-0.005).reshape(i,1))

			training_frame = self.h2odata[range(0, i), :]
			# train the pretrained model
			pretrainedmodel.train(
				x=list(self.data.columns.difference(["Returns", "Date"])),
				training_frame=self.h2odata[range(0, i), :])  # *h2odatadecay)
			# validation_frame=self.h2odata[i,:])

			pred = pretrainedmodel.predict(self.h2odata[i, :]).as_data_frame()

			# train the deep learning model
			model.train(
				x=list(self.data.columns.difference(["Returns", "Date"])),
				y="Returns",
				training_frame=self.h2odata[range(0, i), :])  # *h2odatadecay)
			# validation_frame=self.h2odata[i,:])

			# make prediction based on all the remaining data
			pred = model.predict(self.h2odata[i, :])
			pred = pred.as_data_frame()
			pred = pd.DataFrame(
				[self.data.iloc[i]["Date"], pred.values[0][0]]).transpose()
			pred.columns = ["Date", "Prediction"]
			result = result.append(pred, ignore_index=True)
			print(pred)

		return result

class KerasDeeplearning:
	def __init__(self, data, train_id):
		self.data = data
		self.data = self.data.reset_index(drop=True)
		self.data = self.data.rename(
			columns={self.data.columns.values[0]: 'Returns'}, inplace=False)
		self.data = self.data.rename(
			columns={self.data.columns.values[1]: 'Date'}, inplace=False)
		self.data = self.data[1:self.data.shape[0]]
		self.train_id = train_id

	def ANN_model(self, input_dim, HiddenLayer, HiddenDropout):
		# model = Sequential()
		# #输入层
		# model.add(Dense(input_dim, input_shape=(input_dim,)))
		# #隐藏层
		# for layer_id in range(len(HiddenLayer)):
		# 	model.add(Dense(HiddenLayer[layer_id], activation='tanh'))
		# 	model.add(Dropout(HiddenDropout[layer_id]))
		# #输出层
		# model.add(Dense(1, activation='linear'))

		#输入层
		input = Input(shape=(input_dim,))
		#训练器隐藏层
		for layer_id in range(len(HiddenLayer)):
			#把自编码器输出层当做输入层
			if layer_id == 0:
				hidden = Dense(HiddenLayer[0], activation='tanh')(input)
				hidden = Dropout(HiddenDropout[0])(hidden)
			else:
				hidden = Dense(HiddenLayer[layer_id], activation='tanh')(hidden)
				hidden = Dropout(HiddenDropout[layer_id])(hidden)

		output = Dense(1, activation='linear')(hidden)
		model = Model(inputs=input, outputs=output)
		print('训练器------------------------------------------------------------------\n', model.summary())
		model.compile(optimizer='adam', loss='mse')
		return model

	def auto_encoder(self, input_dim, HiddenLayer, HiddenDropout):
		#输入层
		encoder_input = Input(shape=(input_dim,))
		# 编码层
		for layer_id in range(len(HiddenLayer)):
			if layer_id == 0:
				hidden = Dense(HiddenLayer[0], activation='tanh')(encoder_input)
				hidden = Dropout(HiddenDropout[0])(hidden)
			else:
				hidden = Dense(HiddenLayer[layer_id], activation='tanh')(hidden)
				hidden = Dropout(HiddenDropout[layer_id])(hidden)
		#输出层
		encoder_output = Dense(input_dim, activation='tanh')(hidden)
		# 构建编码模型
		encoder = Model(inputs=encoder_input, outputs=encoder_output)
		print('自编码器------------------------------------------------------------------\n', encoder.summary())
		# compile autoencoder
		encoder.compile(optimizer='adam', loss='mse')
		return encoder

	def DL_rolling_fusionmodel(self, insample_date_until,
							   HiddenLayer=[7, 5, 3], HiddenDropout=[0.2, 0.2, 0.2]):
		# get the index for that day
		start_index = np.array(self.data.index)[self.data['Date'] >= pd.to_datetime(insample_date_until).date()][0]

		# result container
		result = pd.DataFrame()

		# 构建数据集
		X = self.data[self.data.columns.difference(["Returns", "Date"])]
		# scaler_final = MinMaxScaler(feature_range=(-1, 1))
		scaler_final = StandardScaler()
		X = scaler_final.fit_transform(X)
		Y = self.data["Returns"].values

		# x的列数
		input_dim = X.shape[1]
		fushion_model = self.fusion_model(input_dim, HiddenLayer, HiddenDropout)

		for i in range(start_index, X.shape[0]):
			混合模型训练
			X_train = X[0:i, :]
			Y_train = Y[range(0, i)]
			fushion_model.fit(X[0:i, :], Y[range(0, i)], epochs=30, batch_size=256, verbose=False)
			X_test = np.array([X[i]])
			pred = fushion_model.predict(X_test)
			pred = pd.DataFrame(
				[self.data.iloc[i]["Date"], pred[0][0]]).transpose()
			pred.columns = ["Date", "Prediction"]
			result = result.append(pred, ignore_index=True)
			print(pred)

		return result

	def DL_rolling_autoencoder_ANN(self, insample_date_until, HiddenLayer=[7, 5, 3], HiddenDropout=[0.2, 0.2, 0.2]):
		# get the index for that day
		start_index = np.array(self.data.index)[self.data['Date'] >= pd.to_datetime(insample_date_until).date()][0]

		# result container
		result = pd.DataFrame()

		# 构建数据集
		X = self.data[self.data.columns.difference(["Returns", "Date"])]
		# scaler_final = MinMaxScaler(feature_range=(-1, 1))
		scaler_final = StandardScaler()
		X = scaler_final.fit_transform(X)
		Y = self.data["Returns"].values

		# x的列数
		input_dim = X.shape[1]
		pretrainedmodel = self.auto_encoder(input_dim, HiddenLayer, HiddenDropout)
		model = self.ANN_model(input_dim, HiddenLayer, HiddenDropout)

		for i in range(start_index, X.shape[0]):
			X_train = X[0:i, :]
			Y_train = Y[range(0, i)]
			X_test = np.array([X[i]])
			pretrainedmodel.fit(X_train, X_train, epochs=30, batch_size=4096, verbose=False)
			# 存取权重
			pretrainedmodel.save_weights('weigths' + self.train_id + '.h5')
			model.load_weights('weigths' + self.train_id + '.h5', by_name=True)
			model.fit(X_train, Y_train, epochs=30, batch_size=256, verbose=False)
			pred = model.predict(X_test)

			pred = pd.DataFrame(
				[self.data.iloc[i]["Date"], pred[0][0]]).transpose()
			pred.columns = ["Date", "Prediction"]
			result = result.append(pred, ignore_index=True)
			print(pred)

		return result

	def fusion_model(self, input_dim, HiddenLayer, HiddenDropout):
		#自编码器输入层
		input_img = Input(shape=(input_dim,))
		#自编码器编码层
		for layer_id in range(len(HiddenLayer)):
			if layer_id == 0:
				encoded = Dense(HiddenLayer[0], activation='tanh')(input_img)
				encoded = Dropout(HiddenDropout[0])(encoded)
			else:
				encoded = Dense(HiddenLayer[layer_id], activation='tanh')(encoded)
				encoded = Dropout(HiddenDropout[layer_id])(encoded)
		#自编码器输出层
		encoder_output = Dense(input_dim, activation='tanh')(encoded)

		#训练器隐藏层
		for layer_id in range(len(HiddenLayer)):
			#把自编码器输出层当做输入层
			if layer_id == 0:
				hidden = Dense(HiddenLayer[0], activation='tanh')(encoder_output)
				hidden = Dropout(HiddenDropout[0])(hidden)
			else:
				hidden = Dense(HiddenLayer[layer_id], activation='tanh')(hidden)
				hidden = Dropout(HiddenDropout[layer_id])(hidden)

		#训练器输出层
		model_output = Dense(1, activation='linear')(hidden)
		# 构建混合模型
		model = Model(inputs=input_img, outputs=model_output)
		print('混合模型----------------------------------------------------------------------\n', model.summary())

		# compile autoencoder
		model.compile(optimizer='adam', loss='mse')

		return model

