from  .InfluxdbHandler import InfluxdbHandler
from .InfluxdbHandler import InfluxdbHandler
from util.BasicAuth import BasicAuth
from util.time_transform import unix2str
from util.time_transform import str2unix
import json
from urllib.parse import urlencode
import time
from influxdb import InfluxDBClient
import math
from tornado import gen
from util.logConf import log
from util.my_error import InfluxdbError


class MosaicHandler(InfluxdbHandler):
    @BasicAuth
    @gen.coroutine
    def get(self):
        start_search = time.time()
        clientid = self.get_argument('client', None)
        # date = self.get_argument('date', None)
        start_date = self.get_argument('create1', None)
        end_date = self.get_argument('create2', None)
        fidis_name = self.get_argument('fidis_name', None)
        interval = self.get_argument('interval', "1")
        unit = self.get_argument('unit', 'm')
        keys = self.get_argument('keys', "*")
        order = self.get_argument('order', 'asc')
        group_by = "{}{}".format(interval, unit)
        str_unix_start, str_unix_end = self.verify_date(start_date, end_date, 0, 0)
        int_unix_start = int(str_unix_start)
        int_unix_end = int(str_unix_end)
        if order not in ['asc', 'desc']:
            dt = {
                "success": False,
                "error": "invalid order"
            }
            self.write(dt)
            self.set_status(400)
            return

        if unit not in ['m', 'h', 's']:
            dt = {
                "success": False,
                "error": "invalid unit"
            }
            self.write(dt)
            self.set_status(400)
            return
        fidis_name, ip = self.get_fidis_ip(fidis_name, clientid)
        log.info(fidis_name)
        log.info(ip)

        # 输入截止时间大于当前时间，则认为输入的截止时间为当前时间
        if int_unix_end > int(self.str2unix(None)):
            int_unix_end = int(self.str2unix(None))
        if int_unix_start > int(self.str2unix(None)):
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str(int_unix_start)[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return
        # 计算在起始时间之后出现报文的最早时间
        sql0 = "select payload_time from \"{}\" where time >={} and topic !='mosaic' limit 1".format(clientid,
                                                                                                     int_unix_start)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(sql0, epoch="ns")
        print(sql0)
        for point in result.get_points():
            int_unix_start = int(point.get("time"))
            log.debug(sql0)
            log.debug("new payload time")
            log.debug(unix2str(str(int_unix_start)[:10]))
        if int_unix_start > int_unix_end:
            dt = {
                "key_list": [],
                "total_count": 0,
                "count": 0,
                "start_date": self.unix2str(str(int_unix_start)[:10]),
                "next_date": unix2str(str(int_unix_start)[:10]),
                "values_lists": []
            }
            self.write(json.dumps(dt))
            return



        in_keys = keys
        if keys != "*" and "payload_time" not in keys:
            keys += ",payload_time"
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        # 如果查询last(*)的话，返回的Key都是last_key名称，如果是last(key),last(key2)的话，则返回的是last，last_1
        payload_time_key = "last_payload_time" if keys == "*" else "last_{}".format(len(keys.split(',')) - 1)
        select_str = ",".join(["last({})".format(key) for key in keys.split(',')])
        cache_time = 24 * 2 * 3600 * 1000000000
        # 根据截至时间拼图获取所有的key
        end_mosaic_sql = "select {} from \"{}\" where time <= {} and time >={}".format(select_str,
                                                                                       clientid, int_unix_end,
                                                                                       int_unix_end - cache_time)
        result_end_mosaic = influx_client.query(end_mosaic_sql)
        mosic_dict = {}
        # 根据截至时间初始化拼图的KEY
        for point_dict in result_end_mosaic.get_points():
            columns = point_dict.keys()
            mosic_dict = dict.fromkeys([key_name for key_name in columns], None)
        # 根据keys的值来设置返回的key列表，不返回payload_time

        temp_mosic = mosic_dict.copy()
        temp_mosic.pop("time")
        temp_mosic.pop(payload_time_key)
        if keys == "*":
            return_key_list = ["time","payload_time"] + [k[5:] for k in list(temp_mosic.keys())]
        else:
            return_key_list = ["time", "payload_time"] + in_keys.split(',')
        # else:
        #     return_key_list = ["time"] + keys.split(",")
        # if "payload_time" not in return_key_list:
        #     return_key_list.append("payload_time")
        #     # return_key_list.remove("payload_time")

        # 起始时间拼图
        start_mosaic_sql = "select {} from \"{}\" where time>={} and time <= {}".format(select_str, clientid,
                                                                                        int_unix_start - cache_time,
                                                                                        int_unix_start)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(start_mosaic_sql)
        for point_dict in result.get_points():
            for key, value in point_dict.items():
                mosic_dict[key] = value

        log.debug(start_mosaic_sql)
        log.debug(end_mosaic_sql)
        return_value_list = []

        # 查询
        # 计算后的起始时间到计算后的截止时间
        # 之间的值
        start_end_mosaic_sql = "select {} from \"{}\" where time>={} and time <= {} " \
                               " group by time({}) fill(none) order by time {} ".format(
            select_str, clientid, int_unix_start, int_unix_end, group_by, order)
        print(start_end_mosaic_sql)
        log.debug(start_end_mosaic_sql)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(start_end_mosaic_sql, epoch="ns")
        for point_dict in result.get_points():
            for key, value in point_dict.items():
                if value:
                    mosic_dict[key] = value
            # 处理完一条记录，放到return_value_list里
            temp_mosic = mosic_dict.copy()
            # 这么做的目的是把time的值放在列表第一项,payload_time在第二项
            x = str(temp_mosic.pop("time"))[:10]
            y = str(temp_mosic.pop(payload_time_key))[:10]

            return_value_list.append([unix2str(x),
                                      unix2str(y)]
                                     + list(temp_mosic.values()))
        end_search = time.time()
        log.info("time used {}".format(end_search - start_search))

        sqlx = "select payload_time from \"{}\" where time >={} and topic !='mosaic' limit 1".format(clientid,
                                                                                                     int_unix_end)
        influx_client = InfluxDBClient(database=fidis_name, host=ip)
        result = influx_client.query(sqlx, epoch="s")
        log.error(sqlx)
        int_next_time = None
        for point in result.get_points():
            int_next_time = int(point.get("time"))
            log.debug("new next_date time")
            log.debug(unix2str(str(int_next_time)[:10]))
        if int_next_time:
            next_date = unix2str(int_next_time)
        else:
            next_date = "No data"
        dt = {
            "key_list": return_key_list,
            "total_count": len(return_value_list),
            "values_lists": return_value_list,
            "start_date": self.unix2str(str(int_unix_start)[:10]),
            "end_date": self.unix2str(str(int_unix_end)[:10]),
            "next_date": next_date,
            "time_used": end_search - start_search
        }
        self.write(json.dumps(dt))
