# code_sample/predo-srunout-log2json.py

#!/usr/bin/env python3
# -*- coding: utf-8 -*

import os
import re
import json
import argparse
import subprocess
from multiprocessing import Pool
import sys 

sys.path.append("..")
from package.utility import ensure_exists
from package.utility import find_targets
from package.utility import scan_runcases
from package.utility import load_runcase_npt_config
from package.utility import load_pidtimer_data
from package.utility import get_minmaxmean
from package.utility import scan_srunoutlog

'''
功能: 将每次提交重复运行的 srun_out.log 合并为 srun_out.json
输出格式:
{
    "reapet0": {
        "Setup":{
            "tmin": 
            "tmax":
            "tave":
        }
        "Solve":{
            "tmin": 
            "tmax":
            "tave":
        },
},
实现思路:
1. 找到所有的 srun_out.log 形成对应的 srun_out.json 。
2. 提取 Setup, Solve 的三类时间。

日期: 2023-01-14T9:40
作者: 毛润彰
修改自: 田鸿运
'''

def make_parser(): 
    parser = argparse.ArgumentParser(
        prog="jxpamg-predo-srunout-log2json",
        description="jxpamg srunout data preprocess tool, transform log to json")
    parser.add_argument(
            "LOGDIR",
            metavar="LOGDIR",
            help="Destination log directories."
        )
    parser.add_argument(
            "-p", "--proc",
            dest="num_proc",
            default=14,
            help="Number of processes used to run the task, default:%(default)s"
        )
    parser.add_argument(
            "--clean-cache",
            dest="clean_cache",
            action="store_true",
            default=False,
            help="Remove json data before transform."
        )
    return parser

def srunoutlog2json(args):
    """transform srunout log's data to json format."""
    srunoutlog, clean_cache = args
    srunoutlog = os.path.abspath(srunoutlog)
    logdir = os.path.dirname(srunoutlog)
    basename = os.path.basename(srunoutlog)
    output = os.path.join(logdir, basename.replace(".result", ".json"))
    
    # remove output if clean_cache is set as True.
    if clean_cache is True and os.path.exists(output):
        os.remove(output)

    if os.path.exists(output):
        return

    ensure_exists(srunoutlog)
    jsondata = {}
    reapetnum = 0
    with open(srunoutlog, "r", encoding="utf-8") as fp:
        loglines = fp.readlines()
    for logline in loglines:
        
        reapetmatch = re.compile("====run program (\w+)====").match(logline)
        if(reapetmatch):
            programname = reapetmatch.group(1)
            jsondata[programname] = {}

        reapetmatch = re.compile(" >> repeat_(\d+)").match(logline)
        if(reapetmatch):
            reapetnum = reapetmatch.group(1)

        if programname.split("_")[0].lower() == "jxpamg":
            for i in ["PAMG_Setup", "PAMG_Solve", "Total_Solve_Time"]:
                timekey = f"reapet{reapetnum}_{i}"
                restring = f" >> {i}: .*? = \((\d+.\d+), (\d+.\d+), (\d+.\d+)\) seconds"
                RunTime = re.compile(restring).match(logline)
                if(RunTime):
                    jsondata[programname][timekey] = {
                        "tmin": RunTime.group(1),
                        "tmax": RunTime.group(2),
                        "tave": RunTime.group(3)
                    }
        elif programname.split("_")[0].lower() == "hypre":
            restring = f"BoomerAMG (\w+):"
            phase = re.compile(restring).match(logline)
            if(phase):
                AMGphase = phase.group(1)
                timekey = f"reapet{reapetnum}BoomerAMG_{AMGphase}"
            restring = f"\s+wall clock time = (\d+.\d+) seconds"
            RunTime = re.compile(restring).match(logline)
            if(RunTime):
                jsondata[programname][timekey] = {
                        "tmin": RunTime.group(1),
                        "tmax": RunTime.group(1),
                        "tave": RunTime.group(1)
                }

    print(f">> dump to {output}")
    with open(output, "w", encoding="utf-8") as fp:
        json.dump(jsondata, fp, indent=2, sort_keys=True)


def main():
    parser = make_parser()
    args = parser.parse_args()
    pool = Pool(int(args.num_proc))

    runcases = scan_srunoutlog(args.LOGDIR)

    tasks = [(i, args.clean_cache) for i in runcases]
    pool.map(srunoutlog2json, tasks)

# def my_test():
#     file = "/home/mrz/data/result_23_1_13_tian/result-20230112T103924/jxpamg_lixue_matrix/N112-P3584-T1/1095597_0112T110502/srun_out.log"
#     output = "./srun_out.json"
#     jsondata = {}
#     reapetnum = 0
#     with open(file, "r", encoding="utf-8") as fp:
#         loglines = fp.readlines()
#     for logline in loglines:

#         reapetmatch = re.compile(" >> repeat_(\d+)").match(logline)
#         if(reapetmatch):
#             reapetnum = reapetmatch.group(1)

#         for i in ["PAMG_Setup", "PAMG_Solve", "Total_Solve_Time"]:
#             tmp_i = i
#             timekey = f"reapet{reapetnum}_"
#             for k in tmp_i.split("_"):
#                 timekey = timekey + k
#             restring = f" >> {i}: .*? = \((\d+.\d+), (\d+.\d+), (\d+.\d+)\) seconds"
#             RunTime = re.compile(restring).match(logline)
#             if(RunTime):
#                 jsondata[timekey] = {
#                     "tmin": RunTime.group(1),
#                     "tmax": RunTime.group(2),
#                     "tave": RunTime.group(3)
#                 }
#                 break
#     print(f">> dump to {output}")
#     with open(output, "w", encoding="utf-8") as fp:
#         json.dump(jsondata, fp, indent=2, sort_keys=True)

if __name__ == "__main__":
    main()
    # my_test()


