from setting import Setting
import data
import os
import numpy as np
import matplotlib.pyplot as plt
import keras
from keras import backend
from keras import layers
from keras import models
from keras import regularizers
from keras.models import Sequential

def myModel(set, num):
  # 这是一个多输入模型，inputs用来保存所有的输入层
  inputs = []
  # 这是一个多输出模型，outputs用来保存所有的输出层
  outputs = []

  # 输入层
  x_input = layers.Input((set.MAX_STEPS, set.FRONT_VOCAB_SIZE))
  # 双向循环神经网络
  x = layers.Bidirectional(layers.LSTM(set.LSTM_UNITS, return_sequences=True))(x_input)
  # 随机失活
  x = layers.Dropout(rate=set.DROPOUT_RATE)(x)
  # x = layers.Bidirectional(layers.LSTM(LSTM_UNITS, return_sequences=True))(x)
  x = layers.TimeDistributed(layers.Dense(set.FRONT_VOCAB_SIZE * 3))(x)
  x = layers.Dropout(rate=set.DROPOUT_RATE)(x)
  # 平铺
  x = layers.Flatten()(x)
  # 全连接
  x = layers.Dense(set.FRONT_VOCAB_SIZE, activation='relu')(x)
  # 保存输入层
  inputs.append(x_input)
  # x = layers.Dense(FRONT_VOCAB_SIZE, activation='relu')(x)
  outputs.append(x)

  # 创建模型
  model = models.Model(inputs, outputs)
  # 指定优化器和损失函数
  model.compile(optimizer=keras.optimizers.Adam(), metrics = ['accuracy'],
    loss=keras.losses.categorical_crossentropy,
    loss_weights=[1, 1, 1, 1, 1])
  # 查看网络结构
  # model.summary()

  # 获取数据
  train_x, train_y, test_x, test_y, predict_x, predict_y = data.getDate(set)
  # 获取文件夹地址
  path = os.path.split(os.path.realpath(__file__))[0]
  # print(path)
  
  # 创建保存权重的文件夹
  if not os.path.exists('{}/{}'.format(path,set.CHECKPOINTS_PATH)):
    os.mkdir('{}/{}'.format(path,set.CHECKPOINTS_PATH))

  # 开始训练
  epoch = set.EPOCHS
  H = model.fit(train_x, train_y, validation_data=(test_x, test_y), batch_size=set.BATCH_SIZE, epochs=epoch)
  # 保持模型
  model.save('{}/{}/{}model_{}_{}_{}_{}_{}.h5'.format(path,set.CHECKPOINTS_PATH, num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))

  x = np.arange(0, epoch)
  plt.plot(x, H.history["loss"], label="loss")
  plt.plot(x, H.history["val_loss"], label="val_loss")
  plt.plot(x, H.history["accuracy"], label="accuracy")
  plt.plot(x, H.history["val_accuracy"], label="val_accuracy")
  plt.title('epoch:{}, BATCH_SIZE:{}, MAX_STEPS:{}, DROPOUT_RATE:{}, LSTM_UNITS:{}'.format(epoch,set.BATCH_SIZE,set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))
  plt.legend()
  plt.savefig('{}/{}/{}model_{}_{}_{}_{}_{}.png'.format(path,set.CHECKPOINTS_PATH, num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))
  # plt.show()
  plt.close()

  print('模型训练完成：{}_{}_{}_{}_{}'.format(epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))



def simple(set, num):
  # 这是一个多输入模型，inputs用来保存所有的输入层
  inputs = []
  # 这是一个多输出模型，outputs用来保存所有的输出层
  outputs = []
  # 输入层
  x_input = layers.Input((set.MAX_STEPS, set.FRONT_VOCAB_SIZE))
  # 双向循环神经网络
  # x = layers.Bidirectional(layers.LSTM(set.LSTM_UNITS, return_sequences=True))(x_input)
  # x = layers.Bidirectional(layers.LSTM(set.LSTM_UNITS, return_sequences=True, kernel_regularizer=regularizers.l2(0.3),activity_regularizer=regularizers.l2(0.3)))(x_input)
  x = layers.Bidirectional(layers.LSTM(set.LSTM_UNITS, 
    return_sequences=True,
    use_bias=True,bias_initializer=keras.initializers.Zeros(),
    recurrent_dropout=set.DROPOUT_RATE,
    kernel_regularizer=regularizers.l2(set.REGULARIZERS),
    activity_regularizer=regularizers.l2(set.REGULARIZERS)))(x_input)
  # 随机失活
  # x = layers.Dropout(rate=set.DROPOUT_RATE)(x)
  # x = layers.TimeDistributed(layers.Dense(set.FRONT_VOCAB_SIZE * 3))(x)
  # x = layers.Dropout(rate=set.DROPOUT_RATE)(x)
  # 平铺
  x = layers.Flatten()(x)
  # 全连接
  x = layers.Dense(set.FRONT_VOCAB_SIZE, activation='relu')(x)
  # 保存输入层
  inputs.append(x_input)
  outputs.append(x)
  # 创建模型
  model = models.Model(inputs, outputs)
  # 指定优化器和损失函数
  model.compile(optimizer=keras.optimizers.Adam(), metrics = ['accuracy'],
    loss=keras.losses.categorical_crossentropy,
    loss_weights=[1, 1, 1, 1, 1])

  # 获取数据
  train_x, train_y, test_x, test_y, predict_x, predict_y = data.getDate(set)
  # 获取文件夹地址
  path = os.path.split(os.path.realpath(__file__))[0]
  # print(path)
  # 创建保存模型的文件夹
  if not os.path.exists('{}/{}'.format(path,set.CHECKPOINTS_PATH)):
    os.mkdir('{}/{}'.format(path,set.CHECKPOINTS_PATH))
  # 创建保存图片的文件夹
  if not os.path.exists('{}/{}'.format(path,set.CHECKPOINTS_PATH+"_PNG")):
    os.mkdir('{}/{}'.format(path,set.CHECKPOINTS_PATH+"_PNG"))

  # 开始训练
  epoch = set.EPOCHS
  print('模型训练开始：{}_{}_{}_{}_{}_{}_{}'.format(num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))
  loss=[]
  val_loss=[]
  accuracy=[]
  val_accuracy=[]
  for i in range(epoch):
    H = model.fit(train_x, train_y, validation_data=(test_x, test_y), batch_size=set.BATCH_SIZE, epochs=1)
    if np.isnan(H.history["loss"][0]):
      backend.clear_session()
      return ""
    else:
      loss.append(H.history["loss"][0])
      val_loss.append(H.history["val_loss"][0])
      accuracy.append(H.history["accuracy"][0])
      val_accuracy.append(H.history["val_accuracy"][0])

  x = np.arange(0, epoch)
  plt.plot(x, loss, label="loss")
  plt.plot(x, val_loss, label="val_loss")
  plt.plot(x, accuracy, label="accuracy")
  plt.plot(x, val_accuracy, label="val_accuracy")
  plt.title('epoch:{}, BATCH_SIZE:{}, MAX_STEPS:{}, DROPOUT_RATE:{}, LSTM_UNITS:{}'.format(epoch,set.BATCH_SIZE,set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))
  plt.legend()
  plt.savefig('{}/{}/{}_{}_{}_{}_{}_{}_{}.jpg'.format(path,set.CHECKPOINTS_PATH+"_PNG", num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS), bbox_inches = 'tight')
  # 保持模型
  model.save('{}/{}/{}_{}_{}_{}_{}_{}_{}.h5'.format(path,set.CHECKPOINTS_PATH, num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))
  # 除之前的模型.
  backend.clear_session()
  plt.close()

  print('模型训练完成：{}_{}_{}_{}_{}_{}_{}'.format(num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))

def trainModel(set, num):
  inputs = []
  outputs = []
  x_input = layers.Input((set.MAX_STEPS, set.FRONT_VOCAB_SIZE))
  x = layers.Bidirectional(layers.LSTM(set.LSTM_UNITS, 
    return_sequences=True,
    use_bias=True,bias_initializer=keras.initializers.Zeros(),
    recurrent_dropout=set.DROPOUT_RATE,
    kernel_regularizer=regularizers.l2(set.REGULARIZERS),
    activity_regularizer=regularizers.l2(set.REGULARIZERS)))(x_input)
  x = layers.Flatten()(x)
  x = layers.Dense(set.FRONT_VOCAB_SIZE, activation='relu')(x)
  inputs.append(x_input)
  outputs.append(x)
  model = models.Model(inputs, outputs)
  model.compile(optimizer=keras.optimizers.Adam(), metrics = ['accuracy'],
    loss=keras.losses.categorical_crossentropy,
    loss_weights=[1, 1, 1, 1, 1])
  
  # 获取数据
  data_ = data.getDateFromExeQuery(set)
  train_x = data_["train_x"]
  train_y = data_["train_y"]
  test_x = data_["test_x"]
  test_y = data_["test_y"]
  # 获取文件夹地址
  path = os.path.split(os.path.realpath(__file__))[0]
  # print(path)
  # 创建保存模型的文件夹
  if not os.path.exists('{}/{}'.format(path,set.CHECKPOINTS_PATH)):
    os.mkdir('{}/{}'.format(path,set.CHECKPOINTS_PATH))
  # 创建保存图片的文件夹
  if not os.path.exists('{}/{}'.format(path,set.CHECKPOINTS_PATH+"_PNG")):
    os.mkdir('{}/{}'.format(path,set.CHECKPOINTS_PATH+"_PNG"))

  # 开始训练
  epoch = set.EPOCHS
  print('模型训练开始：{}_{}_{}_{}_{}_{}_{}'.format(num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))
  loss=[]
  val_loss=[]
  accuracy=[]
  val_accuracy=[]
  for i in range(epoch):
    H = model.fit(train_x, train_y, validation_data=(test_x, test_y), batch_size=set.BATCH_SIZE, epochs=1)
    if np.isnan(H.history["loss"][0]):
      backend.clear_session()
      return ""
    else:
      loss.append(H.history["loss"][0])
      val_loss.append(H.history["val_loss"][0])
      accuracy.append(H.history["accuracy"][0])
      val_accuracy.append(H.history["val_accuracy"][0])

  x = np.arange(0, epoch)
  plt.plot(x, loss, label="loss")
  plt.plot(x, val_loss, label="val_loss")
  plt.plot(x, accuracy, label="accuracy")
  plt.plot(x, val_accuracy, label="val_accuracy")
  plt.title('epoch:{}, BATCH_SIZE:{}, MAX_STEPS:{}, DROPOUT_RATE:{}, LSTM_UNITS:{}'.format(epoch,set.BATCH_SIZE,set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS))
  plt.legend()
  plt.savefig('{}/{}/{}_{}_{}_{}_{}_{}_{}.jpg'.format(path,set.CHECKPOINTS_PATH+"_PNG", num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS), bbox_inches = 'tight')
  # 保持模型
  model.save('{}/{}/{}_{}_{}_{}_{}_{}_{}.h5'.format(path,set.CHECKPOINTS_PATH, num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))
  # 除之前的模型.
  backend.clear_session()
  plt.close()

  print('模型训练完成：{}_{}_{}_{}_{}_{}_{}'.format(num, epoch, set.BATCH_SIZE, set.MAX_STEPS,set.DROPOUT_RATE,set.LSTM_UNITS,set.REGULARIZERS))

if __name__ == '__main__':
  set = Setting()
  # set.setDef(EPOCHS_=30,BATCH_SIZE_=25, MAX_STEPS_=30,DROPOUT_RATE_=0.6, LSTM_UNITS_=32)
  # simple(set,"b0005")

  EPOCHS_=[20,60]
  BATCH_SIZE_=[8, 16, 32, 64]
  MAX_STEPS_=[8, 16, 32, 64]
  DROPOUT_RATE_=[0.3, 0.6]
  LSTM_UNITS_=[16, 32, 64]
  REGULARIZERS_=[0.3, 0.1, 0.01]
  trainList = []

  for e in EPOCHS_:
    for b in BATCH_SIZE_:
      for m in MAX_STEPS_:
        for d in DROPOUT_RATE_:
          for l in LSTM_UNITS_:
            for r in REGULARIZERS_:
              trainList.append([e, b, m, d, l, r])
  # print(len(trainList))

  for i in range(151, len(trainList)):
    item = trainList[i]
    set.setDef(
      EPOCHS_=item[0],
      BATCH_SIZE_=item[1],
      MAX_STEPS_=item[2],
      DROPOUT_RATE_=item[3],
      LSTM_UNITS_=item[4],
      REGULARIZERS_=item[5],
      LOTTO_DOWNLOAD_URL_=3000)
    num = "a0000"
    if i < 10:
      num = num + "{}_".format(i)
    elif i < 100:
      num = num[:-1] + "{}_".format(i)
    else:
      num = num[:-2] + "{}_".format(i)
    try:
      simple(set,num)
    except Exception as e:
      print(e)
