import numpy as np
import pandas as pd
from tensorflow.keras.callbacks import ReduceLROnPlateau, EarlyStopping
from tensorflow.keras.layers import LSTM, Dense, Dropout, Activation
from tensorflow.keras.models import Sequential
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_absolute_percentage_error
from sklearn.model_selection import train_test_split
from tensorflow.keras.layers import LeakyReLU
from keras.layers import LSTM,  Dense, Dropout, ELU, Bidirectional
from keras import backend as K
from sklearn.metrics import mean_absolute_percentage_error
from tensorflow.keras.layers import Reshape
from tensorflow.keras.models import load_model
import os
# 读取数据
data = pd.read_csv('D:/Mypython/生猪价格数据集-学生.csv', encoding='gbk')
# 选择输入变量
input_variables = ['生猪价格', 'M2', '社会消费品零售总额', '城镇居民可支配收入', '能繁母猪存栏', '养殖成本', '牛肉价格',
                   '白条鸡价格', '百度搜索指数', '申万行业指数——生猪养殖']
#, '生猪存栏', '豆粕价格', '玉米价格','规模以上生猪定点屠宰企业屠宰量',
# 提取训练集中的指定变量数据
data_features = data[input_variables].values

# 归一化数据
scaler = MinMaxScaler(feature_range=(0, 1))
data_features_scaled = scaler.fit_transform(data_features)

# 提取生猪价格作为目标变量
data_target = data['生猪价格'].values

# 归一化目标变量
target_scaler = MinMaxScaler(feature_range=(0, 1))
data_target_scaled = target_scaler.fit_transform(data_target.reshape(-1, 1))

# 构建训练集的输入序列
def create_sequences(features, target, time_steps):
    X = []
    y = []
    for i in range(time_steps, len(features)):
        X.append(features[i - time_steps:i, :])
        y.append(target[i])
    return np.array(X), np.array(y)

time_steps = 24
X, y = create_sequences(data_features_scaled, data_target_scaled, time_steps)
train_X, test_X, train_y, test_y = train_test_split(X, y, test_size=1/3, random_state=1)

# 构建LSTM模型
model = Sequential()
#model.add(LSTM(units=128, input_shape=(time_steps, len(input_variables))))
#model.add(Dropout(0.2))
#model.add(LSTM(units=160, input_shape=(time_steps, len(input_variables)),return_sequences=True))
#model.add(Dropout(0.2))
model.add(Bidirectional(LSTM(units=128, return_sequences=True, input_shape=(time_steps, len(input_variables))), merge_mode='concat'))
model.add(Dropout(0.2))
model.add(Activation('relu'))
#model.add(ELU(alpha=3))
model.add(Dense(64))

model.add(Dropout(0.2))
model.add(Activation('relu'))
#model.add(ELU(alpha=3))
model.add(Dense(32))
model.add(Activation('relu'))
#model.add(ELU(alpha=3))
model.add(Dense(16))
model.add(Dropout(0.2))
#model.add(ELU(alpha=3))




model.add(Activation('relu'))
#model.add(Activation('ELU'))
#model.add(LeakyReLU(alpha=0.1))
model.add(Dense(1))
# 定义损失函数和优化器
model.compile(loss='mean_squared_error', optimizer='adam')



# 设置回调函数
#reduce_lr = ReduceLROnPlateau(monitor='loss', factor=0.8, patience=20, verbose=1, mode='auto', min_lr=0)
reduce_lr = ReduceLROnPlateau(monitor='loss', factor=0.8, patience=20, verbose=1, mode='auto', min_lr=0)
early_stopping = EarlyStopping(monitor='loss', patience=10, verbose=1)

# 训练模型

model.fit(train_X, train_y, epochs=80, batch_size=32, callbacks=[reduce_lr, early_stopping])
predictions = model.predict(test_X)[:,-1,:]
#predictions = model.predict(test_X)
print(predictions)

# 反归一化预测结果

predictions = target_scaler.inverse_transform(predictions)
test_y = target_scaler.inverse_transform(test_y)
# 计算 MAPE
#predictions=predictions.reshape(-1,1)
mape = mean_absolute_percentage_error(test_y, predictions)
print("MAPE:%.2f%%"%(mape*100))
# 保存模型
#folder_path='D:/Mypython/Mymodel'
#os.makedirs(folder_path,exist_ok=True)
model.save('D:/Mypython/My_model2.h5')
'''plt.figure()
loss = hist.history['loss']
val_loss = hist.history['val_loss']
epochs = range(len(loss))
plt.plot(epochs, loss, 'blue', label='Training loss')
plt.plot(epochs, val_loss, 'red', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()'''

'''
# 绘制预测结果图表
from matplotlib import pyplot as plt
plt.rcParams['font.family'] = ['sans-serif']
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.plot(y_pred_future, color='green', label='LSTM预测结果')
plt.title('预测')
plt.xlabel('时间')
plt.ylabel('生猪价格/元')
plt.legend()
plt.show()'''
