import datetime
import json

from db.client_service import DbClientService, MigrateConfig, DbServerService
import sqlparse
from sqlparse.sql import Identifier, Statement, Token, Where
from influxdb import InfluxDBClient
from db.connection_cache import cache
from entity import migrate_pb2
from util import date_util


class InfluxdbClientService(DbClientService):
    fix_num = None
    # mysql,db_client
    conn:InfluxDBClient = None
    sharding_result = []

    def __init__(self, origin_url, dest_url, request, fix_num=50):
        """

        :param fix_num: 解析时候的分片数量
        """
        self.fix_num = fix_num
        self.config.original_url = origin_url
        self.config.dest_url = dest_url
        self.request = request

        host, port, db, user, password = self.config.parse_url(origin_url)

        client = InfluxDBClient(host=host, port=int(port), username=user, password=password,
                                database=db)

        self.conn = client

    def parse_sql(self):
        """
        根据请求解析数据库，条件
        :param request:
        :return:
        """
        for r in self.request:
            statements = sqlparse.parse(r)
            for statement in statements:
                # token: Token
                table = dict()
                for token in statement.tokens:
                    if isinstance(token, Identifier):
                        table_name = str(token)
                        table['table_name'] = table_name
                    if isinstance(token, Where):
                        condition = str(token)
                        table['condition'] = condition
                self.config.tables.append(table)

    def generate_sharding(self, table):
        fix_num = self.fix_num

        table_name = table['table_name']
        condition:str = table.get('condition', 'where 1=1')

        gt_str = None
        lt_str = None
        gt = None
        lt = None
        # condition = "where time >= '2018-07-03 00:00:00'"
        # condition = "where time >= '2018-07-03 00:00:00' and time <= '{dt_end_tz}'"
        # condition = "where time >= '2018-07-03 00:00:00' and time <= '{dt_end_tz}'"
        condition_list = condition.replace("where", "").strip().split("and")
        for sub_cond in condition_list:
            if ">=" in sub_cond:
                gt_str = sub_cond.split(">=")[1].replace("'", "").strip()
            if "<=" in sub_cond:
                lt_str = sub_cond.split("<=")[1].replace("'", "").strip()
        if gt_str:
            gt = datetime.datetime.strptime(gt_str, "%Y-%m-%dT%H:%M:%SZ")
        if lt_str:
            lt = datetime.datetime.strptime(lt_str, "%Y-%m-%dT%H:%M:%SZ")
        if gt is None and lt is not None:
            gt = lt - datetime.timedelta(days=30)
        if gt is not None and lt is None:
            lt = gt + datetime.timedelta(days=30)

        is_first = True
        start_time = gt

        index = 0
        temp_sharing_info = []
        while True:
            end_time = start_time+datetime.timedelta(days=1)
            if end_time >=lt:
                temp_sharing_info.append(dict(
                    left=start_time,
                    right=lt,
                    table_name=table_name,
                    is_first=is_first,
                    batch_num=index
                ))
                break

            temp_sharing_info.append(dict(
                left=start_time,
                right=end_time,
                table_name=table_name,
                is_first=is_first,
                batch_num=index
            ))
            start_time = end_time
            is_first = False
            index = index + 1

        print("当前表{}的迁移将被分为{}片,每片最大跨度1天".format(
            table_name,
            temp_sharing_info.__len__()))
        self.sharding_info.extend(temp_sharing_info)


    def get_sharding_result(self, sharding_info):
        """
        生成可以用来插入的sql
        :return:
        """

        start = sharding_info['left']
        end = sharding_info['right']
        table_name = sharding_info['table_name']
        # condition = sharding_info.get("condition")
        is_first = sharding_info.get("is_first")

        if is_first:
            sql = "select * from {} where time>='{}' and time<='{}'".format(table_name,
                                                                        date_util.dt_to_str(start),
                                                                        date_util.dt_to_str(end))
        else:
            sql = "select * from {} where  time>'{}' and time<='{}'".format(table_name,
                                                                        date_util.dt_to_str(start),
                                                                        date_util.dt_to_str(end))

        resp = list(self.conn.query(sql))
        if len(resp) ==0:
            return []

        results = resp[0]

        row_list = [ dict(measurement=table_name,time=result['time'],fields=result) for result in results]

        return row_list

    def start_sharding(self):
        for table in self.config.tables:
            self.generate_sharding(table)

    def show_create_table(self, stub, db):
        """
        influxdb插入数据自动建表
        :param stub:
        :param db:
        :return:
        """
        for table in self.config.tables:
            table_name = table['table_name']
            schema = None

            params = dict()
            params['key'] = self.config.dest_url
            params['dest_url'] = self.config.dest_url
            params['schema'] = schema
            params['table_name'] = table_name

            response = stub.start(migrate_pb2.Data(db=db, type='createTable',
                                                   data=json.dumps(params, ensure_ascii=False)))
            if response.message == 'ok':
                print("数据库建表执行完毕:{}".format(table_name))


class InfluxServerService(DbServerService):

    def __init__(self, key, db_type, dest_url):
        super().__init__(dest_url)

        self.config.dest_url = dest_url
        host, port, db, user, password = self.config.parse_url(dest_url)
        connection = cache.get(key, db_type, host, port, user, password, db)
        self.conn = connection.get_conn()

    def create_table(self, table_name, schema):
        """
        influxdb自动建表
        :param table_name:
        :param schema:
        :return:
        """
        self.conn.drop_measurement(table_name)

    def execute_insert(self, row_list):
        self.conn.write_points(row_list)


if __name__ == "__main__":
    gt_str = None
    lt_str = None
    gt = None
    lt = None
    condition = "where time >= '2018-07-03 00:00:00'"
    # condition = "where time >= '2018-07-03 00:00:00' and time <= '{dt_end_tz}'"
    # condition = "where time >= '2018-07-03 00:00:00' and time <= '{dt_end_tz}'"
    condition_list = condition.replace("where","").strip().split("and")
    for sub_cond in condition_list:
        if ">=" in sub_cond:
            gt_str = sub_cond.split(">=")[1].replace("'","").strip()
        if "<=" in sub_cond:
            lt_str = sub_cond.split("<=")[1].replace("'","").strip()
    if gt_str:
        gt = datetime.datetime.strptime(gt_str,"%Y-%m-%d %H:%M:%S")
    if lt_str:
        lt = datetime.datetime.strptime(lt_str,"%Y-%m-%d %H:%M:%S")
    if gt is None and lt is not None:
        gt = lt-datetime.timedelta(days=30)
    if gt is not None and lt is None:
        lt = gt+datetime.timedelta(days=30)
    pass



    # service = MysqlClientService(
    #     "10.8.0.2:3306/ks-tequila-v2?username=root&password=Luck158,",
    #     "",
    #     "select * from task_record"
    # )
    # service.start()
    #
    # all = []
    # sharding_info_list = service.sharding_info

