import os
import sys
# sys.path.append('/home/hch/Desktop/trjcompress/')
# sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from RLOnline.data_utils import to_traj
from method.error_compression import err_compression
from method.squish import squish
from method.squish_e import squish_e
from method.sttrace import STTrace
# from method.rlts import RLAgent
import pickle
import time
import numpy as np

from utils import getPED4GPS, getMaxError


def compress_our(points, max_ratio, metric):
    # 计时开始

    err = []
    compRes = []
    timelist = []
    timelist2 = []
    for seq in points:
        tic1 = time.time()
        ps = [[i[0], i[1]] for i in seq]
        _, idx, maxErr = err_compression(ps, 0, int(max_ratio * len(seq)) - 1)
        tic2 = time.time()
        timelist.append((tic2 - tic1) / len(seq))
        maxErr_ = 0

        for i in range(len(idx) - 1):
            _, e = getMaxError(idx[i], idx[i + 1], seq, metric)
            maxErr_ = max(maxErr_, e)
        err.append(maxErr_)

    print(f"our 压缩率 {max_ratio},耗时 {np.mean(timelist)}, error {np.mean(err)}")
    # print(f"squish-e压缩率 {max_ratio},耗时 {np.min(timelist2)}")
    return compRes

def compress_squish(points, max_ratio, metric):
    # 计时开始

    err = []
    compRes = []
    timelist = []
    timelist2 = []
    for seq in points:
        tic1 = time.time()
        _, idx, maxErr = squish(seq, int(max_ratio * len(seq)), mode=metric)
        tic2 = time.time()
        timelist.append((tic2 - tic1) / len(seq))

        assert len(idx) == int(max_ratio * len(seq))

        maxErr_ = 0

        for i in range(len(idx) - 1):
            _, e = getMaxError(idx[i], idx[i + 1], seq, metric)
            maxErr_ = max(maxErr_, e)
        err.append(maxErr_)

    print(f"squish压缩率 {max_ratio},耗时 {np.mean(timelist)}, error {np.mean(err)}")
    # print(f"squish-e压缩率 {max_ratio},耗时 {np.min(timelist2)}")
    return compRes


def compress_squish_e(points, max_ratio, metric):
    # 计时开始

    err = []
    compRes = []
    timelist = []
    for seq in points:
        tic1 = time.time()
        try:
            _, idx, maxErr = squish_e(seq, int(max_ratio * len(seq)), mode=metric)
        except Exception as e:
            continue
        tic2 = time.time()
        timelist.append((tic2 - tic1) / len(seq))

        assert len(idx) == int(max_ratio * len(seq))

        maxErr_ = 0
        for i in range(len(idx) - 1):
            _, e = getMaxError(idx[i], idx[i + 1], seq, metric)
            maxErr_ = max(maxErr_, e)

        err.append(maxErr_)

    print(f"squish_e压缩率 {max_ratio},耗时 {np.mean(timelist)}, error {np.mean(err)}")
    return compRes


def compress_sttrace(points, max_ratio, metric):
    # 计时开始

    err = []
    compRes = []
    timelist = []
    for seq in points:
        tic1 = time.time()
        _, idx, maxErr = STTrace(seq, int(max_ratio * len(seq)), mode=metric)
        tic2 = time.time()
        timelist.append((tic2 - tic1) / len(seq))

        assert len(idx) == int(max_ratio * len(seq))

        maxErr_ = 0
        for i in range(len(idx) - 1):
            _, e = getMaxError(idx[i], idx[i + 1], seq, metric)
            maxErr_ = max(maxErr_, e)

        err.append(maxErr_)

    print(f"sttrace压缩率 {max_ratio},耗时 {np.mean(timelist)}, error {np.mean(err)}")
    return compRes


if __name__ == '__main__':
    # E:\download-firefox\Geolife Trajectories 1.3\Geolife Trajectories 1.3
    datasets = "./Geolife_out"
    data_size = 10

    trjs = []
    for i in range(data_size):
        dpath = os.path.join(datasets, str(i))
        trj = to_traj(dpath)
        trjs.append(trj)

    ratio = 0.1
    metric = "ped"
    compress_our(trjs, ratio, metric)
    compress_squish(trjs, ratio, metric)
    compress_squish_e(trjs, ratio, metric)
    compress_sttrace(trjs, ratio, metric)

    # metric = 'dad'
    # agent = RLAgent(datasets, data_size, metric)

    # for i in range(4, 5):
    #     ratio = 0.1 * (i + 1)
    #     compress_sttrace(src, points, ratio, metric)
    #     compress_squish(src, points, ratio, metric)
    #     compress_squish_e(src, points, ratio, metric)
    #     agent.run_all(len(points), ratio, metric)
