# -*- coding:utf8 -*-
import argparse
import datetime
import os

import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from numpy import mean, absolute

"""
midea_draw_loss.py
使用说明
本脚本需在linux下运行。
1. 使用前安装依赖
pip install argparse -i https://pypi.tuna.tsinghua.edu.cn/simple
pip install matplotlib -i https://pypi.tuna.tsinghua.edu.cn/simple
2. 参考: 不用执行
#截取npu_loss
grep 'loss:' 0610mindspore.log | awk -F 'step:' '{print $2}'| awk -F 'loss:' '{print $2}'| awk -F 'loss:' '{print $1}' |  awk -F ',' '{print $1}' > npu_loss.txt
#截取gpu_loss
grep 'lm loss:' 0610torch_gpu.log | awk -F 'lm loss:' '{print $2}' |  awk -F '|' '{print $1}'  > gpu_loss.txt
#截取npu_lr
grep 'loss:' 0610mindspore.log | awk -F 'lr:' '{print $2}'| awk -F ',' '{print $1}' > npu_lr.txt
#截取gpu_lr
grep 'lm loss:' 0610_gpu.log | awk -F 'learning rate' '{print $2}' | awk -F '|' '{print $1}' > gpu_lr.txt 
"""

LOG_BASE_DIR = "E:/电信AI_telechat/log"
LOSS_FILE_DICT = {
    # "3b_64_wechat_high_precision": LOG_BASE_DIR + "/npu_loss.txt",
}
LR_FILE_DICT = {
    # "3b_64_wechat_high_precision": LOG_BASE_DIR + "/npu_lr.txt",
}

COST_FILE_DICT = {
    # "3b_64_wechat_high_precision": LOG_BASE_DIR + "/npu_lr.txt",
}

NORM_FILE_DICT = {
    # "3b_64_wechat_high_precision": LOG_BASE_DIR + "/npu_lr.txt",
}

TITLE_NAME = ""
DATASET_NAME = ""
# NPU每多少步打印一次LOSS
PRINT_LOSS_STEPS = 1
DATE_STR = ""
PLOT_DIFF = False
LINE_WIDTH = 0.4


def pre_process_npu(file_path):
    print(f"\n------pre_process_npu-------")
    if not os.path.exists(file_path):
        print(f"file not exists:{file_path}")
        return
    loss_np_file = os.path.splitext(file_path)[0] + ".loss"
    lr_np_file = os.path.splitext(file_path)[0] + ".lr"
    cost_np_file = os.path.splitext(file_path)[0] + ".cost"
    norm_np_file = os.path.splitext(file_path)[0] + ".norm"
    cmd_trans2unix = f"dos2unix {file_path}"
    cmd_loss = "grep 'loss:' " + \
               file_path + " | awk -F 'step:' '{print $2}'| awk -F 'loss:' '{print $2}'| awk -F 'loss:' '{print $1}' |  awk -F ',' '{print $1}' > " \
               + loss_np_file
    cmd_lr = "grep 'loss:' " + \
             file_path + " | awk -F 'lr:' '{print $2}'| awk -F ',' '{print $1}' > " \
             + lr_np_file
    cmd_cost = "grep 'loss:' " + \
               file_path + " | awk -F 'per_step_time:' '{print $2}'| awk -F ',' '{print $1}' | awk -F 'ms' '{print $1}'> " \
               + cost_np_file
    cmd_norm = "grep 'loss:' " + file_path + " | awk -F 'global_norm:' '{print $2}' | awk -F ',' '{print $1}' | sed 's/[^0-9.]//g' > " + norm_np_file
    print(cmd_trans2unix)
    print(cmd_loss)
    print(cmd_lr)
    print(cmd_cost)
    print(cmd_norm)
    os.system(cmd_trans2unix)
    os.system(cmd_loss)
    os.system(cmd_lr)
    os.system(cmd_cost)
    os.system(cmd_norm)
    return loss_np_file, lr_np_file, cost_np_file, norm_np_file


def pre_process_gpu(file_path):
    print(f"\n------pre_process_gpu-------")
    if not os.path.exists(file_path):
        print(f"file not exists:{file_path}")
        return
    loss_np_file = os.path.splitext(file_path)[0] + ".loss"
    lr_np_file = os.path.splitext(file_path)[0] + ".lr"
    cost_np_file = os.path.splitext(file_path)[0] + ".cost"
    norm_np_file = os.path.splitext(file_path)[0] + ".norm"
    cmd_trans2unix = f"dos2unix {file_path}"
    cmd_loss = "grep 'lm loss:' " + \
               file_path + " | awk -F 'lm loss:' '{print $2}' |  awk -F '|' '{print $1}'  >" \
               + loss_np_file
    cmd_lr = "grep 'lm loss:' " + \
             file_path + " | awk -F 'learning rate' '{print $2}' | awk -F '|' '{print $1}' > " \
             + lr_np_file
    cmd_cost = "grep 'lm loss:' " + \
               file_path + " | awk -F 'elapsed time per iteration' '{print $2}' | awk -F '|' '{print $1}' | awk -F ':' '{print $2}' > " \
               + cost_np_file
    cmd_norm = "grep 'lm loss:' " + \
               file_path + " | awk -F 'grad norm:' '{print $2}' | awk -F '|' '{print $1}' > " \
               + norm_np_file
    print(cmd_trans2unix)
    print(cmd_loss)
    print(cmd_lr)
    print(cmd_cost)
    print(cmd_norm)
    os.system(cmd_trans2unix)
    os.system(cmd_loss)
    os.system(cmd_lr)
    os.system(cmd_cost)
    os.system(cmd_norm)
    return loss_np_file, lr_np_file, cost_np_file, norm_np_file


def get_date_str():
    return datetime.date.strftime(datetime.datetime.now(), "%Y-%m-%d %H:%m")


def plot_lr():
    print("\n---plot_lr----")
    lr_dict = LR_FILE_DICT
    matplotlib.use('TkAgg')
    lr_data = {}
    for title, path in lr_dict.items():
        if not os.path.exists(path):
            print(f"file not exists:{path}")
            continue
        if "gpu" in title:
            # Pytorch print loss every step,MindSpore print loss every 2 step
            lr_data[title] = np.loadtxt(path)[::PRINT_LOSS_STEPS]
            print(title, lr_data[title])
        else:
            if "3b_old+dpmppp+no_drop.lr" in path:
                lr_data[title] = np.loadtxt(path)[::2]
                print("3b_old+mppp+no_drop.lr")
                continue
            lr_data[title] = np.loadtxt(path)
    labels = list(lr_data.keys())
    min_data_len = min([len(loss) for loss in list(lr_data.values())])
    data_plot = [loss[:min_data_len] for loss in list(lr_data.values())]
    axis = np.arange(0, min_data_len * PRINT_LOSS_STEPS, PRINT_LOSS_STEPS)
    print(">>>axis.shape:", axis.shape)
    print(">>>axis::", axis)
    print(labels)
    plt.figure(figsize=(10, 6))
    for d in data_plot:
        plt.plot(axis, d, linewidth=LINE_WIDTH)
        print(d)
    plt.legend(labels=labels)
    plt.xlabel(f"step  \n {DATE_STR}")
    plt.ylabel("learning rate")
    title = f"{TITLE_NAME} Learning Rate" if len(DATASET_NAME) == 0 \
        else f"{TITLE_NAME} Learning Rate \n dataset:{DATASET_NAME}"
    plt.title(title)
    plt.grid(True)
    plt.savefig(f'{LOG_BASE_DIR}/lr.png', dpi=600, bbox_inches='tight')
    plt.show()


def plot_time_cost():
    print("\n---plot_time_cost----")
    cost_dict = COST_FILE_DICT
    matplotlib.use('TkAgg')
    cost_data = {}
    for title, path in cost_dict.items():
        if not os.path.exists(path):
            print(f"file not exists:{path}")
            continue
        if "gpu" in title:
            # Pytorch print loss every step,MindSpore print loss every 2 step
            cost_data[title] = np.loadtxt(path)[::PRINT_LOSS_STEPS]
        else:
            if "3b_old+dpmppp+no_drop.cost" in path:
                cost_data[title] = np.loadtxt(path)[::2]
                print("3b_old+mppp+no_drop.cost")
                continue
            cost_data[title] = np.loadtxt(path)
    labels = list(cost_data.keys())
    min_data_len = min([len(loss) for loss in list(cost_data.values())])
    data_plot = [loss[:min_data_len] for loss in list(cost_data.values())]
    axis = np.arange(0, min_data_len * PRINT_LOSS_STEPS, PRINT_LOSS_STEPS)
    print(">>>axis.shape:", axis.shape)
    print(">>>axis::", axis)
    print(labels)
    plt.figure(figsize=(10, 6))
    for d in data_plot:
        plt.plot(axis, d, linewidth=LINE_WIDTH)
        print(d)
    plt.legend(labels=labels)
    plt.xlabel(f"step  \n {DATE_STR}")
    plt.ylabel("per step time")
    title = f"{TITLE_NAME} Per Step Time" if len(DATASET_NAME) == 0 \
        else f"{TITLE_NAME} Per Step Time \n dataset:{DATASET_NAME}"
    plt.title(title)
    plt.grid(True)
    plt.savefig(f'{LOG_BASE_DIR}/time_cost.png', dpi=600, bbox_inches='tight')
    plt.show()


def plot_global_norm():
    print("\n---plot_global_norm----")
    norm_dict = NORM_FILE_DICT
    matplotlib.use('TkAgg')
    norm_data = {}
    for title, path in norm_dict.items():
        if not os.path.exists(path):
            print(f"file not exists:{path}")
            continue
        if "gpu" in title:
            # Pytorch print loss every step,MindSpore print loss every 2 step
            norm_data[title] = np.loadtxt(path)[::PRINT_LOSS_STEPS]
        else:
            if "3b_old+dpmppp+no_drop.norm" in path:
                norm_data[title] = np.loadtxt(path)[::2]
                print("3b_old+mppp+no_drop.norm")
                continue
            norm_data[title] = np.loadtxt(path)
    labels = list(norm_data.keys())
    min_data_len = min([len(loss) for loss in list(norm_data.values())])
    data_plot = [loss[:min_data_len] for loss in list(norm_data.values())]
    axis = np.arange(0, min_data_len * PRINT_LOSS_STEPS, PRINT_LOSS_STEPS)
    plt.figure(figsize=(10, 6))
    print(">>>axis.shape:", axis.shape)
    print(">>>axis::", axis)
    print(labels)
    for d in data_plot:
        plt.plot(axis, d, linewidth=LINE_WIDTH)
        print(d)
    plt.legend(labels=labels)
    plt.xlabel(f"step  \n {DATE_STR}")
    plt.ylabel("Global Norm")
    title = f"{TITLE_NAME} Global Norm" if len(DATASET_NAME) == 0 \
        else f"{TITLE_NAME} Global Norm \n dataset:{DATASET_NAME}"
    plt.title(title)
    plt.grid(True)
    plt.savefig(f'{LOG_BASE_DIR}/global_norm.png', dpi=600, bbox_inches='tight')
    plt.show()


def plot_loss():
    print("\n---plot_loss----")
    loss_dict = LOSS_FILE_DICT
    matplotlib.use('TkAgg')
    loss_data = {}
    for title, path in loss_dict.items():
        if not os.path.exists(path):
            print(f"file not exists:{path}")
            continue
        if "gpu" in title:
            # Pytorch print loss every step,MindSpore print loss every 2 step
            loss_data[title] = np.loadtxt(path)[::PRINT_LOSS_STEPS]
        else:
            if "3b_old+dpmppp+no_drop.loss" in path:
                loss_data[title] = np.loadtxt(path)[::2]
                print("3b_old+mppp+no_drop.loss")
                continue
            loss_data[title] = np.loadtxt(path)
    labels = list(loss_data.keys())
    min_data_len = min([len(loss) for loss in list(loss_data.values())])
    data_plot = [loss[:min_data_len] for loss in list(loss_data.values())]
    axis = np.arange(0, min_data_len * PRINT_LOSS_STEPS, PRINT_LOSS_STEPS)
    print(">>>axis.shape:", axis.shape)
    print(">>>axis::", axis)
    print(labels)
    plt.figure(figsize=(10, 6))
    for d in data_plot:
        plt.plot(axis, d, linewidth=LINE_WIDTH)
        print(d)
    plt.legend(labels=labels)
    plt.xlabel(f"step  \n {DATE_STR}")
    plt.ylabel("loss")
    title = f"{TITLE_NAME} LOSS" if len(DATASET_NAME) == 0 \
        else f"{TITLE_NAME} LOSS \n dataset:{DATASET_NAME}"
    plt.title(title)
    plt.grid(True)
    plt.savefig(f'{LOG_BASE_DIR}/loss.png', dpi=1200, bbox_inches='tight')
    plt.show()

    if len(data_plot) == 2 and PLOT_DIFF:
        print("\n---plot_diff----")
        axis = np.arange(0, min_data_len * PRINT_LOSS_STEPS, PRINT_LOSS_STEPS)
        plt.figure(figsize=(10, 6))
        d = data_plot[1] - data_plot[0]
        abs_mean = mean(absolute(d - mean(d)))
        print(d)
        print("abs_mean:", abs_mean)
        plt.plot(axis, d, linewidth=LINE_WIDTH)
        plt.xlabel(f"step[abs_mean:{abs_mean}]\n {DATE_STR}")
        plt.ylabel("diff")
        plt.title("Diff")
        plt.grid(True)
        plt.savefig(f'{LOG_BASE_DIR}/diff.png', dpi=1200, bbox_inches='tight')
        plt.show()


def main(loss_log_dir):
    LOSS_FILE_DICT.clear()
    LR_FILE_DICT.clear()
    if not os.path.isdir(loss_log_dir):
        print(f"input is not dir:{loss_log_dir}")
        return

    loss_log_files = sorted([f for f in os.listdir(loss_log_dir)])
    print("----list file---")
    for i, log_file in enumerate(loss_log_files):
        log_file_path = os.path.join(loss_log_dir, log_file)
        if not os.path.isfile(log_file_path) or not log_file_path.endswith(".log"):
            print(f"{log_file_path} is not log file")
            continue
        data_key = os.path.splitext(os.path.basename(log_file))[0]
        if "gpu" in data_key:
            gpu_loss_np_file, gpu_lr_np_file, gpu_cost_np_file, norm_np_file = pre_process_gpu(log_file_path)
            LOSS_FILE_DICT[data_key] = gpu_loss_np_file
            LR_FILE_DICT[data_key] = gpu_lr_np_file
            COST_FILE_DICT[data_key] = gpu_cost_np_file
            NORM_FILE_DICT[data_key] = norm_np_file
        else:
            npu_loss_np_file, npu_lr_np_file, npu_cost_np_file, norm_np_file = pre_process_npu(log_file_path)
            LOSS_FILE_DICT[data_key] = npu_loss_np_file
            LR_FILE_DICT[data_key] = npu_lr_np_file
            COST_FILE_DICT[data_key] = npu_cost_np_file
            NORM_FILE_DICT[data_key] = norm_np_file
    print(f"LOSS_FILE_DICT:{LOSS_FILE_DICT}")
    print(f"LR_FILE_DICT:{LR_FILE_DICT}")
    print(f"COST_FILE_DICT:{COST_FILE_DICT}")
    print(f"NORM_FILE_DICT:{NORM_FILE_DICT}")
    if len(LOSS_FILE_DICT.items()) == 0 or len(LR_FILE_DICT.items()) == 0:
        print(f"no log file in {LOG_BASE_DIR}")
        return
    plot_loss()
    # plot_lr()
    # plot_time_cost()
    # plot_global_norm()


if __name__ == '__main__':
    parser = argparse.ArgumentParser(description="telechat plot charts")
    parser.add_argument('-log_dir', default=None, required=True,
                        help='log file dir,not file! log file should end with .log')
    parser.add_argument('-plot_diff', default="False", required=False,
                        help='plot error or not when comparing two sets of data ,default False')
    parser.add_argument('-line_width', default="0.2", required=False,
                        help='plot line width')
    args = parser.parse_args()
    LOG_BASE_DIR = args.log_dir
    PLOT_DIFF = args.plot_diff
    LINE_WIDTH = args.line_width
    print(f"log_dir:{LOG_BASE_DIR}")
    print(f"PLOT_ERROR:{PLOT_DIFF}")
    print(f"line_width:{LINE_WIDTH}")
    DATE_STR = get_date_str()
    main(LOG_BASE_DIR)
