import pandas as pd
import os
import json
from util.HyperSaver import HyperSaver
from datetime import datetime

class LoadHyperFromFiles(object):
    def __init__(self, opts_path, train_log_path=None, val_log_path=None,
                 hype_template_path='./template.xlsx'):
        self.opts_path = opts_path
        self.train_log_path = train_log_path
        self.val_log_path = val_log_path
        if os.path.exists(opts_path):
            with open(opts_path, 'r') as f:
                self.input_json = json.load(f)
        else:
            raise Exception('File {} not exists.'.format(opts_path))

        if os.path.exists(train_log_path):
            train_df = pd.read_csv(train_log_path, sep='\t')
            train_loss = train_df.iloc[-1]['loss']
            train_cc = train_df.iloc[-1]['cc']
            train_nss = train_df.iloc[-1]['nss']
            train_loss = float(train_loss.split(",")[0].split("(")[1])
            train_cc = float(train_cc.split(",")[0].split("(")[1])
            train_nss = float(train_nss.split(",")[0].split("(")[1])
        else:
            raise Exception('File {} not exists.'.format(train_log_path))

        if os.path.exists(val_log_path):
            valid_df = pd.read_csv(val_log_path, sep='\t')
            valid_loss = valid_df.iloc[-1]['loss']
            valid_cc = valid_df.iloc[-1]['cc']
            valid_nss = valid_df.iloc[-1]['nss']
            valid_loss = float(valid_loss.split(",")[0].split("(")[1])
            valid_cc = float(valid_cc.split(",")[0].split("(")[1])
            valid_nss = float(valid_nss.split(",")[0].split("(")[1])
        else:
            raise Exception('File {} not exists.'.format(val_log_path))
        self.performance_dict = {}
        self.performance_dict['train_epoch_loss'] = train_loss
        self.performance_dict['train_cc'] = train_cc
        self.performance_dict['train_nss'] = train_nss

        self.performance_dict['val_epoch_loss'] = valid_loss
        self.performance_dict['val_cc'] = valid_cc
        self.performance_dict['val_nss'] = valid_nss

        self.hyperSaver = HyperSaver(hype_template_path)

    def get_modify_time(self):
        mtime = os.path.getmtime(self.val_log_path)
        mtime = datetime.fromtimestamp(mtime)
        mtime = mtime.strftime("%Y%m%d%H%M")
        return mtime

    def process(self, explain=''):
        self.hyperSaver.set_config(self.input_json)
        self.hyperSaver.set_config(self.performance_dict)
        self.hyperSaver.time_str = self.get_modify_time()
        self.hyperSaver.output_dict['explain'] =  explain

    def save(self, save_path):
        self.hyperSaver.save_config(save_path)

    def append(self, append_path):
        self.hyperSaver.append_config(append_path)


if __name__ == '__main__':
    import argparse

    parser = argparse.ArgumentParser(description='STAViS options and parameters')

    # Root and results path
    parser.add_argument(
        '--opts_path',
        default='./experiments/',
        type=str,
        help='Root directory path of STAViS experiments')

    parser.add_argument(
        '--train_log_path',
        default='./experiments/',
        type=str,
        help='Root directory path of STAViS experiments')

    parser.add_argument(
        '--val_log_path',
        default='./experiments/',
        type=str)

    parser.add_argument(
        '--hype_template_path',
        default='./hype_template.xlsx',
        type=str)

    parser.add_argument(
        '--explain',
        default='',
        type=str)

    # parser.add_argument(
    #     '--save_path',
    #     default='./experiments/experiment_logs/template.csv',
    #     type=str)

    args = parser.parse_args()
    loadHype = LoadHypeFromFiles(args.opts_path,
                      args.train_log_path,
                      args.val_log_path,
                      args.hype_template_path)
    loadHype.process(args.explain)
    # loadHype.save(os.path.join("./experiments/experiment_logs", "{}.csv".format(loadHype.get_modify_time())))
    loadHype.append("./experiments/experiment_logs/results.csv")
