from .InfluxdbHandler import InfluxdbHandler
from util.BasicAuth import BasicAuth
from util.time_transform import unix2str
import json
from urllib.parse import urlencode
import time
import math
from tornado import gen
from util.logConf import log
from util.my_error import InfluxdbError
from influxdb import InfluxDBClient


class LastGroupHandler3(InfluxdbHandler):
    @BasicAuth
    @gen.coroutine
    def get(self):
        start_search=time.time()
        clientid = self.get_argument('client', None)
        start_date = self.get_argument('create1', None)
        end_date = self.get_argument('create2', None)
        message_type = self.get_argument('message_type', None)
        fidis_name = self.get_argument('fidis_name', None)
        interval = self.get_argument('interval', "1")
        unit = self.get_argument('unit', 'm')
        group_by = "{}{}".format(interval, unit)
        str_unix_start, str_unix_end = self.verify_date(start_date, end_date, 0, 0)
        int_unix_start = int(str_unix_start)
        int_unix_end = int(str_unix_end)

        keys = self.get_argument('keys', "*")
        limit = self.get_argument("limit", 50)
        page = self.get_argument('page_index', 1)
        if unit not in ['m', 'h', 's']:
            dt = {
                "success": False,
                "error": "invalid unit"
            }
            self.write(dt)
            self.set_status(400)
            return
        try:
            offset = (int(page) - 1) * int(limit)
        except Exception as e:
            dt = {
                "success": False,
                "error": e.args
            }
            self.write(dt)
            self.set_status(400)
            return

        fidis_name, ip = self.get_fidis_ip(fidis_name, clientid)
        log.info(fidis_name)
        log.info(ip)
        influxdb_config = {
            "host": ip,
            "port": "8086"
        }
        unit_dict = {
            "m": 60,
            "h": 3600,
            "s": 1
        }
        select_str = ",".join(["last({})".format(key) for key in keys.split(',')])
        url = "http://{}:{}/query?".format(influxdb_config["host"], influxdb_config["port"])
        # 先获取在这个区间内到该设备存在数据的最早时间
        sql0 = "select * from \"{}\" where time >={} limit 1".format(clientid, str_unix_start)
        sql01 = "select {} from \"{}\" where time <= {} order by time desc limit 1".format(select_str,
                                                                                          clientid, str_unix_start)
        dt = {
            "db": fidis_name,
            "epoch": "ns",
            "q": sql0 + ";" + sql01
        }
        url_with_encode_params = url + urlencode(dt)
        log.debug(sql0)
        log.debug(sql01)
        log.debug("section 1, used time {}".format(time.time()-start_search))

        try:
            body = yield self.query_influxdb(url_with_encode_params)
        except InfluxdbError as e:
            self.write({"success": False, "error": e.reason})
            return
        res_dict = json.loads(body)
        result_list = res_dict.get('results')
        log.debug("result_list[1]")
        log.debug(result_list[1])
        log.debug("section 2, used time {}".format(time.time() - start_search))
        try:
            # 当传入的起始时间之前也没有数据时，则将起始时间改为开始有数据的时间
            if not result_list[1].get("series"):
                int_unix_start = result_list[0].get("series")[0].get("values")[0][0]
        except:
            # 如果在起始时间之前和截止时间之内都没有数据，返回空。
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str_unix_start[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return

        # 输入截止时间大于当前时间，则认为输入的截止时间为当前时间
        if int_unix_end > int(self.str2unix(None)):
            str_unix_end = self.str2unix(None)
        if int_unix_start > int(self.str2unix(None)):
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str_unix_start[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return

        # 根据输入的分页参数和起始截止日期计算查询参数的起始时间和截止时间
        time_interval = float(interval) * unit_dict.get(unit) * 1000000000
        time_offset = float(time_interval) * offset
        # int(limit) * float(time_interval)
        str_start_time = str("{:.0f}".format(int_unix_start + time_offset))
        str_end_time = str("{:.0f}".format(int(str_start_time) + int(limit) * float(time_interval)))

        # 如果计算后的截止时间大于输入截止时间，则以输入截止时间为准
        if int(str_end_time) > int_unix_end:
            str_end_time = str_unix_end
        # 如果由于分页参数过大，或sql0查询的起始时间过大，导致起始时间大于截止时间的话，返回空
        if int(str_start_time) >= int_unix_end:
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str_start_time[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return
        log.debug("section 3, used time {}".format(time.time() - start_search))
        log.debug("#######################")
        log.debug("time_interval={}".format(time_interval))
        log.debug("str_unix_end={}".format(str_unix_end))
        log.debug("int_unix_start={}".format(int_unix_start))
        log.debug("#######################")
        total_count = math.floor(int(str_unix_end)/time_interval) - math.floor(int_unix_start/time_interval) + 1

        # log.debug("#################")
        # log.debug(time_interval)
        # log.debug(time_offset)
        # log.debug(int_unix_start)
        # log.debug(str_start_time)
        # log.debug(str_unix_end)
        # log.debug(str_end_time)
        # log.debug("#################")
        # 查询截止到  计算后的起始时间 的值
        sql1 = "select {} from \"{}\" where time <= {}".format(select_str, clientid, str_start_time)
        # 查询截止到  用户传入参数的终止时间
        sql2 = "select {} from \"{}\" where time <= {}".format(select_str, clientid, str_unix_end)
        # 查询 计算后的起始时间到计算后的截止时间 之间的值
        sql3 = "select {} from \"{}\" where time >= {}  and  time <= {}  group by time({}) limit {}".format(
            select_str, clientid, str_start_time, str_end_time, group_by, limit)
        log.debug(sql1)
        log.debug(sql2)
        log.debug(sql3)
        dt = {
            "db": fidis_name,
            "epoch": "s",
            "q": sql1 + ';' + sql2 + ";" + sql3
        }
        url_with_encode_params = url + urlencode(dt)
        try:
            body = yield self.query_influxdb(url_with_encode_params)
        except InfluxdbError as e:
            self.write({"success": False, "error": e.reason})
            return
        log.debug("section 4, used time {}".format(time.time() - start_search))
        res_dict = json.loads(body)
        result_list = res_dict.get('results')
        mosic_dict = {}
        return_value_list =[]
        return_key_list = []
        if result_list:
            sql1_res = result_list[0]
            sql2_res = result_list[1]
            sql3_res = result_list[2]
            # 根据截止时间的项来决定有哪些KEY，value先设置为None
            if sql2_res.get("series"):
                columns = sql2_res.get("series")[0].get("columns")
                # 构造所有key的值为None的字典拼图
                mosic_dict = dict.fromkeys([key_name for key_name in columns], None)
            # 根据起始时间获取的数据初始化拼图的初始值
            if sql1_res.get("series"):
                columns = sql1_res.get("series")[0].get("columns")
                values = sql1_res.get("series")[0].get("values")[0]
                for i in range(len(columns)):
                    key_name = columns[i]
                    key_value = values[i]
                    mosic_dict[key_name] = key_value
                mosic_dict["time"] = unix2str(mosic_dict["time"])
            return_value_list = []
            # 根据每一项的值，如果不为None则覆盖拼图，然后存到return_value_list中
            if sql3_res.get("series"):
                # 如果查询的不是last(*)而是last(key1),last(key2)时，返回的columns不是last_key而是last_1,last_2
                columns = sql3_res.get("series")[0].get("columns")
                values_list = sql3_res.get("series")[0].get("values")
                for values in values_list:
                    for i in range(len(columns)):
                        key_name = columns[i]
                        key_value = values[i]
                        if key_value is not None:
                            mosic_dict[key_name] = key_value
                    mosic_dict["time"] = unix2str(mosic_dict["time"])
                    temp_mosic = mosic_dict.copy()
                    # 这么做的目的是把time的值放在列表第一项
                    return_value_list.append([temp_mosic.pop("time")] + list(temp_mosic.values()))
            else:
                int_end_time = int(str_end_time)
                int_start_time = int(str_start_time)
                page_count = math.ceil((int_end_time - int_start_time) / time_interval)
                log.debug(int_end_time)
                log.debug(int_start_time)
                log.debug(time_interval)
                for i in range(int(page_count)):
                    int_timestamp = int(str_start_time) + time_interval * i
                    mosic_dict["time"] = unix2str(int_timestamp/1000000000)
                    temp_mosic = mosic_dict.copy()
                    return_value_list.append([temp_mosic.pop("time")] + list(temp_mosic.values()))
            if keys == "*":
                temp_mosic = mosic_dict.copy()
                temp_mosic.pop("time")
                return_key_list = ["time"] + [k[5:] for k in list(temp_mosic.keys())]
            else:
                return_key_list = ["time"] + keys.split(",")
        log.error(return_key_list)
        log.debug("section 5, used time {}".format(time.time() - start_search))
        if "payload_time" in return_key_list:
            payload_time_index = return_key_list.index("payload_time")
            return_key_list.remove("payload_time")
            return_key_list.insert(1, "payload_time")
            for i in range(len(return_value_list)):
                payload_time = return_value_list[i].pop(payload_time_index)
                if payload_time:
                    return_value_list[i].insert(1, self.unix2str(str(payload_time)[:10]))
                else:
                    return_value_list[i].insert(1, None)

        dt = {
            "key_list": return_key_list,
            "total_count": total_count,
            "count": len(return_value_list),
            "start_date": self.unix2str(str_start_time[:10]),
            "values_lists": return_value_list
        }
        end_search = time.time()
        log.info("time used {}".format(end_search-start_search))

        self.write(json.dumps(dt))
        return


class LastGroupHandler4(InfluxdbHandler):
    @BasicAuth
    @gen.coroutine
    def get(self):
        start_search=time.time()
        clientid = self.get_argument('client', None)
        start_date = self.get_argument('create1', None)
        end_date = self.get_argument('create2', None)
        message_type = self.get_argument('message_type', None)
        fidis_name = self.get_argument('fidis_name', None)
        interval = self.get_argument('interval', "1")
        unit = self.get_argument('unit', 'm')
        group_by = "{}{}".format(interval, unit)
        str_unix_start, str_unix_end = self.verify_date(start_date, end_date, 0, 0)
        int_unix_start = int(str_unix_start)
        int_unix_end = int(str_unix_end)
        keys = self.get_argument('keys', "*")
        page_size = self.get_argument("limit", 50)
        # page = self.get_argument('page_index', 1)
        if unit not in ['m', 'h', 's']:
            dt = {
                "success": False,
                "error": "invalid unit"
            }
            self.write(dt)
            self.set_status(400)
            return
        try:
            interval = int(interval)
            page_size = int(page_size)
            # page = int(page)
            # offset = (page - 1) * page_size
        except Exception as e:
            dt = {
                "success": False,
                "error": e.args
            }
            self.write(dt)
            self.set_status(400)
            return
            # 缓存间隔
        cache_time = 24 * 2 * 3600 * 1000000000
        unit_dict = {
            "h": 3600,
            "m": 60,
            "s": 1
        }
        fidis_name, ip = self.get_fidis_ip(fidis_name, clientid)
        log.info(fidis_name)
        log.info(ip)
        # 输入截止时间大于当前时间，则认为输入的截止时间为当前时间
        if int_unix_end > int(self.str2unix(None)):
            int_unix_end =int(self.str2unix(None))
        if int_unix_start > int(self.str2unix(None)):
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str(int_unix_start)[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return

        if keys != "*" and "payload_time" not in keys:
            keys += ",payload_time"
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        # 如果查询last(*)的话，返回的Key都是last_key名称，如果是last(key),last(key2)的话，则返回的是last，last_1
        payload_time_key = "last_payload_time" if keys == "*" else "last_{}".format(len(keys.split(','))-1)
        select_str = ",".join(["last({})".format(key) for key in keys.split(',')])

        # 根据截至时间拼图获取所有的key
        end_mosaic_sql = "select {} from \"{}\" where time <= {} and time >={}".format(select_str,
                                                                                       clientid, int_unix_end,
                                                                                       int_unix_end-cache_time)
        result_end_mosaic = influx_client.query(end_mosaic_sql)
        mosic_dict = {}
        # 根据截至时间初始化拼图的KEY
        for point_dict in result_end_mosaic.get_points():
            columns = point_dict.keys()
            mosic_dict = dict.fromkeys([key_name for key_name in columns], None)
        # 根据keys的值来设置返回的key列表，不返回payload_time
        if keys == "*":
            temp_mosic = mosic_dict.copy()
            temp_mosic.pop("time")
            temp_mosic.pop(payload_time_key)
            return_key_list = ["time"] + [k[5:] for k in list(temp_mosic.keys())]
        else:
            return_key_list = ["time"] + keys.split(",")
        if "payload_time" not in return_key_list:
            return_key_list.append("payload_time")
            # return_key_list.remove("payload_time")



        # 起始时间拼图
        start_mosaic_sql = "select {} from \"{}\" where time>={} and time <= {}".format(select_str, clientid,
                                                                                        int_unix_start - cache_time,
                                                                                        int_unix_start)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(start_mosaic_sql)
        for point_dict in result.get_points():
            for key, value in point_dict.items():
                mosic_dict[key] = value

        log.debug(start_mosaic_sql)
        log.debug(end_mosaic_sql)
        return_value_list = []
        estimated_value = 1.5
        limit = page_size
        while len(return_value_list) < page_size and int_unix_start < int_unix_end:
            # 先获取在这个区间内到该设备存在数据的最早时间
            sql0 = "select payload_time from \"{}\" where time >={} and topic !='mosaic' limit 1".format(clientid, int_unix_start)
            influx_client = InfluxDBClient(database=fidis_name, host=ip)
            result = influx_client.query(sql0, epoch="ns")
            print(sql0)
            for point in result.get_points():
                int_unix_start = int(point.get("time"))
                print(sql0)
                print("new payload time")
                print(print(unix2str(str(int_unix_start)[:10])))

            # 预估分页查询结束时间
            estimated_time = int(interval * page_size * unit_dict[unit] * estimated_value * 1000000000)
            estimated_end_time = int_unix_start + estimated_time
            estimated_end_time = estimated_end_time if estimated_end_time < int_unix_end else int_unix_end

            # 查询 计算后的起始时间到计算后的截止时间 之间的值
            start_end_mosaic_sql = "select {} from \"{}\" where time>={} " \
                                   "and time <= {}  group by time({}) fill(none) limit{}".format(
                select_str, clientid, int_unix_start, estimated_end_time, group_by, limit)
            print(start_end_mosaic_sql)
            log.debug(start_end_mosaic_sql)
            influx_client = InfluxDBClient(database=fidis_name, host=ip)
            result = influx_client.query(start_end_mosaic_sql, epoch="ns")

            for point_dict in result.get_points():
                # 如果payload_time不为空的话，说明这个时间段内是有上报数据的
                if point_dict.get(payload_time_key):
                    # print(payload_time_key)
                    # print(point_dict.get(payload_time_key))
                    for key, value in point_dict.items():
                        if value:
                            mosic_dict[key] = value
                    # 处理完一条记录，放到return_value_list里
                    temp_mosic = mosic_dict.copy()
                    # 这么做的目的是把time的值放在列表第一项,payload_time在第二项
                    x = str(temp_mosic.pop("time"))[:10]
                    y = str(temp_mosic.pop(payload_time_key))[:10]

                    return_value_list.append([unix2str(x),
                                              unix2str(y)]
                                             + list(temp_mosic.values()))
            print("####################")
            print(unix2str(str(int_unix_start)[:10]))
            print(len(return_value_list))
            print(unix2str(str(estimated_end_time)[:10]))

            int_unix_start = estimated_end_time
            estimated_value *= 2
            limit = page_size - len(return_value_list)



        if len(return_value_list) == 1 and mosic_dict.get(payload_time_key) is None:
            return_value_list = []
        return_value_list = return_value_list[:page_size]

        dt = {
            "key_list": return_key_list,
            # "page": page,
            # "offset": offset,
            "page_size": page_size,
            "values_lists": return_value_list,
            "end_time":return_value_list[-1][0] if return_value_list else unix2str(str(int_unix_start)[:10])
        }
        end_search = time.time()
        log.info("time used {}".format(end_search - start_search))
        self.write(json.dumps(dt))
        return



class LastGroupHandler5(InfluxdbHandler):
    @BasicAuth
    @gen.coroutine
    def get(self):
        start_search=time.time()
        clientid = self.get_argument('client', None)
        start_date = self.get_argument('create1', None)
        end_date = self.get_argument('create2', None)
        message_type = self.get_argument('message_type', None)
        fidis_name = self.get_argument('fidis_name', None)
        interval = self.get_argument('interval', "1")
        unit = self.get_argument('unit', 'm')
        group_by = "{}{}".format(interval, unit)
        str_unix_start, str_unix_end = self.verify_date(start_date, end_date, 0, 0)
        int_unix_start = int(str_unix_start)
        int_unix_end = int(str_unix_end)
        keys = self.get_argument('keys', "*")
        page_size = self.get_argument("limit", 50)
        # page = self.get_argument('page_index', 1)
        if unit not in ['m', 'h', 's']:
            dt = {
                "success": False,
                "error": "invalid unit"
            }
            self.write(dt)
            self.set_status(400)
            return
        try:
            interval = int(interval)
            page_size = int(page_size)
            # page = int(page)
            # offset = (page - 1) * page_size
        except Exception as e:
            dt = {
                "success": False,
                "error": e.args
            }
            self.write(dt)
            self.set_status(400)
            return
            # 缓存间隔
        cache_time = 24 * 2 * 3600 * 1000000000
        unit_dict = {
            "h": 3600,
            "m": 60,
            "s": 1
        }
        fidis_name, ip = self.get_fidis_ip(fidis_name, clientid)
        log.info(fidis_name)
        log.info(ip)
        # 输入截止时间大于当前时间，则认为输入的截止时间为当前时间
        if int_unix_end > int(self.str2unix(None)):
            int_unix_end =int(self.str2unix(None))
        if int_unix_start > int(self.str2unix(None)):
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str(int_unix_start)[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return

        if keys != "*" and "payload_time" not in keys:
            keys += ",payload_time"
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        # 如果查询last(*)的话，返回的Key都是last_key名称，如果是last(key),last(key2)的话，则返回的是last，last_1
        payload_time_key = "last_payload_time" if keys == "*" else "last_{}".format(len(keys.split(','))-1)
        select_str = ",".join(["last({})".format(key) for key in keys.split(',')])

        # 根据截至时间拼图获取所有的key
        end_mosaic_sql = "select {} from \"{}\" where time <= {} and time >={}".format(select_str,
                                                                                       clientid, int_unix_end,
                                                                                       int_unix_end-cache_time)
        result_end_mosaic = influx_client.query(end_mosaic_sql)
        mosic_dict = {}
        # 根据截至时间初始化拼图的KEY
        for point_dict in result_end_mosaic.get_points():
            columns = point_dict.keys()
            mosic_dict = dict.fromkeys([key_name for key_name in columns], None)
        # 根据keys的值来设置返回的key列表，不返回payload_time
        if keys == "*":
            temp_mosic = mosic_dict.copy()
            temp_mosic.pop("time")
            temp_mosic.pop(payload_time_key)
            return_key_list = ["time"] + [k[5:] for k in list(temp_mosic.keys())]
        else:
            return_key_list = ["time"] + keys.split(",")
        if "payload_time" not in return_key_list:
            return_key_list.append("payload_time")
            # return_key_list.remove("payload_time")



        # 起始时间拼图
        start_mosaic_sql = "select {} from \"{}\" where time>={} and time <= {}".format(select_str, clientid,
                                                                                        int_unix_start - cache_time,
                                                                                        int_unix_start)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(start_mosaic_sql)
        for point_dict in result.get_points():
            for key, value in point_dict.items():
                mosic_dict[key] = value

        log.debug(start_mosaic_sql)
        log.debug(end_mosaic_sql)
        return_value_list = []
        estimated_value = 1.5
        # 查询
        # 计算后的起始时间到计算后的截止时间
        # 之间的值
        start_end_mosaic_sql = "select {} from \"{}\" where time>={} and time <= {} " \
                               " group by time({}) fill(none) limit {}".format(
            select_str, clientid, int_unix_start, int_unix_end, group_by, page_size)
        print(start_end_mosaic_sql)
        log.debug(start_end_mosaic_sql)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(start_end_mosaic_sql, epoch="ns")
        for point_dict in result.get_points():
            # 如果payload_time不为空的话，说明这个时间段内是有上报数据的
            if point_dict.get(payload_time_key):
                # print(payload_time_key)
                # print(point_dict.get(payload_time_key))
                for key, value in point_dict.items():
                    if value:
                        mosic_dict[key] = value
                # 处理完一条记录，放到return_value_list里
                temp_mosic = mosic_dict.copy()
                # 这么做的目的是把time的值放在列表第一项,payload_time在第二项
                x = str(temp_mosic.pop("time"))[:10]
                y = str(temp_mosic.pop(payload_time_key))[:10]

                return_value_list.append([unix2str(x),
                                          unix2str(y)]
                                         + list(temp_mosic.values()))
        dt = {
            "key_list": return_key_list,
            # "page": page,
            "count": len(return_value_list),
            "page_size": page_size,
            "values_lists": return_value_list,
            "end_time": return_value_list[-1][0] if return_value_list else unix2str(str(int_unix_end)[:10])
        }
        end_search = time.time()

        log.info("time used {}".format(end_search - start_search))

        self.write(json.dumps(dt))
        return
