from .InfluxdbHandler import InfluxdbHandler
from util.BasicAuth import BasicAuth
from util.PyMysql import PyMysql
import json
import time
from datetime import datetime
import requests
from influxdb import InfluxDBClient


class InfluxdbCache(InfluxdbHandler):
    #重新启动拼图缓存
    @BasicAuth
    def post(self):
        my_sql = PyMysql()
        con = my_sql.get_connection()
        with con.cursor() as cursor:
            sql = "SELECT fidis_id,ip FROM gards_fidis_ip"
            cursor.execute(sql)
            res = cursor.fetchall()

            for r in res:
                fidis_id = r.get("fidis_id")
                ip = r.get('ip')
                if ip == "211.154.155.23":
                    continue
                self.do(ip, fidis_id)

    # 执行一次缓存
    @BasicAuth
    def get(self):
        my_sql = PyMysql()
        con = my_sql.get_connection()
        with con.cursor() as cursor:
            sql = "SELECT fidis_id,ip FROM gards_fidis_ip"
            cursor.execute(sql)
            res = cursor.fetchall()

            for r in res:
                fidis_id = r.get("fidis_id")
                ip = r.get('ip')
                if ip == "211.154.155.23":
                    continue
                self.doJob(ip, fidis_id)
        dt = {
            "success": "ok"
        }
        self.write(json.dumps(dt))

    def do(self, ip, fidis_id):
        client = InfluxDBClient(database=fidis_id, host=ip)
        result = client.query("show measurements")
        time_chuck = 24 * 3600 * 1000000000
        now = int(time.time()) * 1000000000
        for point in result.get_points():
            clientid = point.get("name")
            try:
                res = client.query("select payload_time from \"{}\" order by time limit 1".format(clientid))
            except:
                print("query error {} {} {}".format(clientid, fidis_id, ip))
                continue
            for p in res.get_points():
                payload_time = int(p.get("payload_time"))
                if len(str(payload_time)) <19:
                    print("{}小雨19999999999999999".format(payload_time))
                    continue
                tm = datetime.fromtimestamp(payload_time / 1000000000)
                print("start payload_time is ")
                print(tm.strftime("%Y-%m-%d"))
                end_time = payload_time + time_chuck
                tm = datetime.fromtimestamp(now/1000000000)
                print("now is ")
                print(tm.strftime("%Y-%m-%d"))
                while end_time < now:
                    print("cache once")
                    tm = datetime.fromtimestamp(end_time/1000000000)
                    print(tm.strftime("%Y-%m-%d"))
                    self.cache(fidis_id=fidis_id, clientid=clientid, cache_time=end_time)
                    end_time += time_chuck
                    tm = datetime.fromtimestamp(end_time/1000000000)
                    print("cache done")
                    print(tm.strftime("%Y-%m-%d"))


    def doJob(self, ip, fidis_id):
        client = InfluxDBClient(database=fidis_id, host=ip)
        result = client.query("show measurements")
        # time_chuck = 24 * 3600 * 1000000000
        now = int(time.time()) * 1000000000
        for point in result.get_points():
            clientid = point.get("name")
            self.cache(fidis_id, clientid=clientid, cache_time=now)

    def cache(self, fidis_id, clientid, cache_time):
        sql = "select last(*) from \"{}\" where time <={}".format(clientid, cache_time)
        fidis_name, ip = self.get_fidis_ip(fidis_id, clientid)
        dt = {
            "db": fidis_name,
            "epoch": "s",
            "q": sql
        }
        influxdb_config = {
            "host": ip,
            "port": "8086"
        }
        client = InfluxDBClient(database=fidis_name, host=influxdb_config["host"], port=influxdb_config["port"])

        res = client.query(sql)
        for point in res.get_points():
            # print(point)
            new_point = {}
            post_data = "{},topic=mosaic ".format(clientid)
            kv_list = []

            for k, v in point.items():
                if k == "time":
                    continue
                if v == "None" or v is None:
                    continue

                new_k = k.replace("last_", "")
                if new_k == "payload_time":
                    kv_list.append("{}={}i".format(new_k, v))
                elif new_k == "payload":
                    kv_list.append("{}={}".format(new_k, json.dumps(v)))
                # elif new_k == "gps":
                #     kv_list.append("{}=\"{}\"".format(new_k,v))
                else:
                    if isinstance(v, str):
                        kv_list.append("{}=\"{}\"".format(new_k, v))
                    else:
                        kv_list.append("{}={}".format(new_k, v))
            post_data += ",".join(kv_list) + " {}".format(cache_time)
            url = "http://{}:{}/write?db={}".format(influxdb_config["host"], influxdb_config["port"], fidis_id)
            try:
                r = requests.post(url, data=post_data.encode("utf-8"))
                print("-----------------------------------------------------")
                print(r.status_code)
                # print(r.text)
                print("-----------------------------------------------------")
            except Exception as e:
                print(post_data)
                print(e)
                print(e.args)
                time.sleep(10000)



            # try:
            #     client.write_points([json_point])
            #     print("write point")
            # except Exception as e:
            #     print(e.args)
            #     print("write error, {}, {}, {}".format(time, clientid, fidis_id))
            # pass

        # try:
        #     body = yield self.query_influxdb(url_with_encode_params)
        # except InfluxdbError as e:
        #     print("IIIIIIIIIIIIIIEEERROR")
        #     # self.write({"success": False, "error": e.reason})
        #     return
        # try:
        #     [columns, value_list] = self.get_result_columns_values(body)
        # except:
        #     return
        # return
        # values = value_list[0]
        # columns = [column.replace("last_", "") for column in columns]