import os.path as osp
from glob import glob
from argparse import ArgumentParser
from tensorboard.backend.event_processing import event_accumulator
import pandas as pd
import numpy as np


def format_steps(num_steps):
    from humanfriendly import disk_size_units
    from humanfriendly.text import pluralize
    for unit in reversed(disk_size_units):
        if num_steps >= unit.decimal.divider:
            number = '%.0f' % (float(num_steps) / unit.decimal.divider)
            return pluralize(number, unit.decimal.symbol, unit.decimal.symbol)[:-1].replace(" ", "")
    return num_steps


def format_seconds(secs):
    from humanfriendly import format_timespan
    return format_timespan(secs)


def parse_tensorboard(path, scalars):
    """returns a dictionary of pandas dataframes for each requested scalar"""

    if osp.isdir(path):
        files = list(filter(osp.isfile, glob(f"{path}/**/*tfevents*", recursive=True)))
        files.sort(key=lambda f:osp.getctime(osp.abspath(f)))
        path = files[-1]

    ea = event_accumulator.EventAccumulator(
        path,
        size_guidance={event_accumulator.SCALARS: 0},
    )
    _absorb_print = ea.Reload()
    # print("All possible scalars are", ea.Tags()["scalars"])
    # make sure the scalars are in the event accumulator tags
    assert all(
        s in ea.Tags()["scalars"] for s in scalars
    ), "some scalars were not found in the event accumulator"
    return {k: pd.DataFrame(ea.Scalars(k)) for k in scalars}


def analysis_series(df, name, eps=0.1, short=False):
    ser = df.value.to_numpy()
    ser3 = np.stack((ser[1:-1], ser[:-2], ser[2:]))
    ser3sorted = np.sort(ser3, axis=0)
    assert (ser3sorted[0] <= ser3sorted[1]).all()
    assert (ser3sorted[1] <= ser3sorted[2]).all()
    unstable_rate = (np.abs(ser3sorted[1] - ser3[0]) > eps).sum() / len(ser)

    ser3med = ser3sorted[1]
    steps_all = df.step.to_numpy()
    steps = steps_all[1:-1]
    report = []

    if short:
        _format_value = lambda x: f"{x:.3f}"
    else:
        _format_value = lambda x: f"{x}"

    report.append(f"{format_steps(steps_all[0].item())}/{_format_value(ser[0].item())}")
    k = 2
    while True:
        try:
            report.append(f"{format_steps(steps[k-2].item())}/{_format_value(ser3med[k-2].item())}")
        except IndexError:  # OOB
            break
        k = k * 2
    report.append(f"{format_steps(steps_all[-1].item())}/{_format_value(ser[-1].item())}")
    report.append(f"J/{unstable_rate*100:.1f}%")
    
    print(f"{name}:", ", ".join(report))
    


if __name__ == "__main__":
    argp = ArgumentParser()
    argp.add_argument("path")
    argp.add_argument("-s", "--short", action="store_true")
    argp.add_argument("metrics", nargs="*", default=[])
    args = argp.parse_args()

    # load ckpt {
    path = args.path

    if osp.isdir(path):
        files = list(filter(osp.isfile, glob(f"{path}/**/*tfevents*", recursive=True)))
        files.sort(key=lambda f:osp.getctime(osp.abspath(f)))
        path = files[-1]

    ea = event_accumulator.EventAccumulator(
        path,
        size_guidance={event_accumulator.SCALARS: 0},
    )
    _absorb_print = ea.Reload()
    # }

    if not args.metrics:
        print("Please select one or more metrics from: ", end="")
        print(", ".join(ea.Tags()["scalars"]) + ".")

    else:
        wall_time_s = None

        for i, name in enumerate(args.metrics):
            df = pd.DataFrame(ea.Scalars(name))
            analysis_series(df, name, short=args.short)
            if i == 0:
                try:
                    latest_wall_time = df['wall_time'].values[-1]
                    first_wall_time = ea.FirstEventTimestamp()
                    wall_time_s = latest_wall_time - first_wall_time
                except: pass

        if wall_time_s is not None:
            print(f"The training has been lasting for {format_seconds(wall_time_s)} until the latest saved {args.metrics[0]}.")
