##########################   show contrast results SGD,ADAM, RMS ,LSTM ###############################
import copy
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import torch
import LTL

#################   优化器模型参数  ##############################
DIM=10
Layers = 2
Hidden_nums = 20
Input_DIM = DIM
Output_DIM = DIM
output_scale_value = 1
batch_size=128
#######   构造一个优化器  #######
LSTM_Optimizee = LTL.LSTM_Optimizee
flag=False
if flag == True:
    print('\n==== > load best LSTM model')
    last_state_dict = copy.deepcopy(LSTM_Optimizee.state_dict())
    torch.save(LSTM_Optimizee.state_dict(), 'final_LSTM_optimizer.pth')
    LSTM_Optimizee.load_state_dict(torch.load('best_LSTM_optimizer.pth'))

LTL.LSTM_Optimizee.load_state_dict(torch.load('best_LSTM_optimizer.pth'))
# LSTM_Optimizee.load_state_dict(torch.load('final_LSTM_optimizer.pth'))
STEPS =30
x = np.arange(STEPS)

Adam = 'Adam'  # 因为这里Adam使用Pytorch

for _ in range(10):  # 可以多试几次测试实验，LSTM不稳定

    SGD_Learner = LTL.Learner(LTL.f, LTL.SGD, STEPS, eval_flag=True, reset_theta=True, )
    RMS_Learner = LTL.Learner(LTL.f, LTL.RMS, STEPS, eval_flag=True, reset_theta=True, )
    Adam_Learner = LTL.Learner(LTL.f, Adam, STEPS, eval_flag=True, reset_theta=True, )
    LSTM_learner = LTL.Learner(LTL.f, LTL.LSTM_Optimizee, STEPS, eval_flag=True, reset_theta=True, retain_graph_flag=True)

    sgd_losses, sgd_sum_loss = SGD_Learner()
    rms_losses, rms_sum_loss = RMS_Learner()
    adam_losses, adam_sum_loss = Adam_Learner()
    lstm_losses, lstm_sum_loss = LSTM_learner()

    p1, = plt.plot(x, sgd_losses, label='SGD')
    p2, = plt.plot(x, rms_losses, label='RMS')
    p3, = plt.plot(x, adam_losses, label='Adam')
    p4, = plt.plot(x, lstm_losses, label='LSTM')
    p1.set_dashes([2, 2, 2, 2])  # 2pt line, 2pt break, 10pt line, 2pt break
    p2.set_dashes([4, 2, 8, 2])  # 2pt line, 2pt break, 10pt line, 2pt break
    p3.set_dashes([3, 2, 10, 2])  # 2pt line, 2pt break, 10pt line, 2pt break
    # p4.set_dashes([2, 2, 10, 2])  # 2pt line, 2pt break, 10pt line, 2pt break
    plt.yscale('log')
    plt.legend(handles=[p1, p2, p3, p4])
    plt.title('Losses')
    plt.show()
    print("\n\nsum_loss:sgd={},rms={},adam={},lstm={}".format(sgd_sum_loss, rms_sum_loss, adam_sum_loss, lstm_sum_loss))
