import numpy as np
from tensorflow import keras
from dataset.MCsvDatasetOneHot import MCsvDatasetOneHot
from model.MOneHotDecisionModel import MOneHotDecisionModel
import matplotlib.pyplot as plt
from dataset.Utils import Utils

task_name = "min15_es_decision_m999"
day_csv_url = "/Users/aloudata/Downloads/train_data/M999/M9999-DAY.XDCE.csv"
min_csv_url = "/Users/aloudata/Downloads/train_data/M999/M999-MIN15.csv"
min_lookup_offset = 15
learning_rate = 0.0002
kernel_initializer = keras.initializers.glorot_normal
activation = keras.activations.sigmoid
min_shape = (min_lookup_offset, 6)
epochs = 500
if_second_train = False



def get_model_name(name_prefix):
    return f'{name_prefix}_{min_lookup_offset}'


modelCreator = MOneHotDecisionModel(activation=activation,
                                    kernel_initializer=kernel_initializer,
                                    learning_rate=learning_rate,
                                    name=get_model_name(task_name),
                                    min_shape=min_shape,
                                    epochs=epochs)
if if_second_train:
    model = modelCreator.load_model()
else:
    model = modelCreator.create_model()
model.summary()

# 打印model 结构图
# keras.utils.plot_model(model, to_file=task_name + ".png", show_shapes=True, rankdir="LR")


dataset_reader = MCsvDatasetOneHot(day_csv=day_csv_url, min_csv=min_csv_url,
                                   predict_offset=1,
                                   min_lookup=min_lookup_offset)
day_dataset, min_dataset, target_dataset = dataset_reader.read_min_and_day(normalization=True,
                                                                           # ds_cnt=20000,
                                                                           filter_no_change=True,
                                                                           start_code="M2005")

total_cnt = len(min_dataset)
val_cnt = int(total_cnt * 0.05)
train_cnt = total_cnt - val_cnt

train_min_ds = min_dataset[0:train_cnt]
train_target_ds = target_dataset[0:train_cnt]

val_day_ds = day_dataset[train_cnt:]
val_min_ds = min_dataset[train_cnt:]
val_target_ds = target_dataset[train_cnt:]

history = modelCreator.train_model(train_data=train_min_ds,
                                   train_target=train_target_ds,
                                   val_data=val_min_ds,
                                   val_target=val_target_ds
                                   )

modelCreator.save_model()


def visualize_loss(history, title):
    loss = history.history["loss"]
    val_loss = history.history["val_loss"]
    epochs = range(len(loss))
    plt.figure()
    plt.plot(epochs, loss, "b", label="Training loss")
    plt.plot(epochs, val_loss, "r", label="Validation loss")
    plt.title(title)
    plt.xlabel("Epochs")
    plt.ylabel("Loss")
    plt.legend()
    plt.show()


visualize_loss(history, "Training and Validation Loss")

predict_day_ds = day_dataset[total_cnt - 50:]
predict_min_ds = min_dataset[total_cnt - 50:]
predict_target_ds = target_dataset[total_cnt - 50:]

result = model.predict(predict_min_ds)

right_cnt = 0
up_down_cnt = np.array(predict_target_ds).sum(axis=0)
up_right_cnt = 0
down_right_cnt = 0
print(f'{result}')
for i in range(len(result)):
    predict_target = predict_target_ds[i]
    if Utils.is_predict_right_one_hot(result[i][0], predict_target[0]):
        right_cnt = right_cnt + 1
        if predict_target_ds[i][0] == 1:
            up_right_cnt = up_right_cnt + 1
        else:
            down_right_cnt = down_right_cnt + 1

print(
    f'up right {up_right_cnt}/{up_down_cnt[0]}, down right {down_right_cnt}/{len(result) - up_down_cnt[0]}, '
    f'total={right_cnt}/{len(predict_target_ds)}')

print(f"Predict accuracy: {round(right_cnt * 100 / len(result), 2)}%")
