from  .InfluxdbHandler import InfluxdbHandler
from .InfluxdbHandler import InfluxdbHandler
from util.BasicAuth import BasicAuth
from util.time_transform import unix2str
from util.time_transform import str2unix
import json
from urllib.parse import urlencode
import time
import math
from tornado import gen
from util.logConf import log
from util.my_error import InfluxdbError


class PayloadHandler(InfluxdbHandler):
    @BasicAuth
    @gen.coroutine
    def get(self):
        clientid = self.get_argument('client', None)
        start_date = self.get_argument('create1', None)
        end_date = self.get_argument('create2', None)
        message_type = self.get_argument('message_type', None)
        fidis_name = self.get_argument('fidis_name', None)
        unix_start, unix_end = self.verify_date(start_date, end_date, 1, 1)
        limit = self.get_argument('limit', 50)
        page = self.get_argument('page_index', 1)

        try:
            offset = (int(page)-1) * int(limit)
        except Exception as e:
            dt = {
                "success": False,
                "error": e.args
            }
            self.write(dt)
            self.set_status(400)
            return
        if int(page) < 1:
            dt = {
                "success": False,
                "error": 'page_index must >0 '
            }
            self.write(dt)
            self.set_status(400)
            return
        fidis_name, ip = self.get_fidis_ip(fidis_name, clientid)
        influxdb_config = {
            "host": ip,
            "port": "8086"
        }
        if message_type:
            topic_str = " and topic = '{}' ".format(message_type)
        else:
            topic_str = " and topic != 'mosaic'"
        if start_date and end_date:
            limit_str = "time > {} and time < {} {} order by time".format(unix_start, unix_end, topic_str)
        elif start_date:
            limit_str = "time > {} {} order by time".format(unix_start, topic_str)
        elif end_date:
            limit_str = "time < {}  {} order by time desc".format(unix_end, topic_str)
        else:
            limit_str = "time < {} {} order by time desc".format(str2unix(None), topic_str)

        # sql = "select time, payload, topic from \"{}\" where {} limit {} offset {}".format(clientid, limit_str, limit, offset)
        sql = "select time, payload, topic  from \"{}\" where {} limit {} offset {}".format(clientid, limit_str, limit, offset)
        sql2 = "select count(payload) from \"{}\" where {}".format(clientid, limit_str)
        url = "http://{}:{}/query?".format(influxdb_config["host"], influxdb_config["port"])
        dt = {
            "db": fidis_name,
            "epoch": "s",
            "q": sql + ';' + sql2
        }
        url_with_encode_params = url + urlencode(dt)
        try:
            body = yield self.query_influxdb(url_with_encode_params)
        except InfluxdbError as e:
            self.write({"success": False, "error": e.reason})
            return
        res_dict = json.loads(body)
        result = res_dict.get('results')
        # log.debug(res_dict)
        # log.debug(result)
        if result:
            result2 = result[0]
            series = result2.get("series")
            result_count = result[1]
            series_count = result_count.get('series')
            if series_count:
                total_count = series_count[0].get('values')[0][1]
            else:
                total_count = 0
            rows = []
            if series:
                values = series[0].get("values")
                for value_list in values:
                    value = value_list[1]
                    message_type = value_list[2]
                    insert_time = value_list[0]
                    # log.error(insert_time)
                    value_list = [clientid, message_type, value, unix2str(insert_time)]
                    rows.append(value_list)
            dt = {
                "success": True,
                "page_index": page,
                "page_size": limit,
                "offset": offset,
                "total_count": total_count,
                "rows": rows
            }
            self.write(dt)
            return