from db.client_service import DbClientService, MigrateConfig, DbServerService
import sqlparse
from sqlparse.sql import Identifier, Statement, Token, Where
import pymysql
from typing import List

from entity import migrate_pb2
from util import sql_util
from db.connection_cache import cache
import json


class MysqlClientService(DbClientService):
    fix_num = None
    # mysql,db_client
    conn = None
    sharding_result = []

    def __init__(self, origin_url, dest_url, request, fix_num=50):
        """

        :param fix_num: 解析时候的分片数量
        """
        self.fix_num = fix_num
        self.config.original_url = origin_url
        self.config.dest_url = dest_url
        self.request = request

        host, port, db, user, password = self.config.parse_url(origin_url)

        conn = pymysql.Connect(
            host=host,  # 测试环境
            port=int(port),
            user=user, passwd=password,
            db=db, charset='utf8',
            cursorclass=pymysql.cursors.DictCursor)

        self.conn = conn

    def parse_sql(self):
        """
        根据请求解析数据库，条件
        :param request:
        :return:
        """
        statements = sqlparse.parse(self.request)
        statement: Statement = statements[0]
        # token: Token
        table = dict()
        for token in statement.tokens:
            if isinstance(token, Identifier):
                table_name = str(token)
                table['table_name'] = table_name
            if isinstance(token, Where):
                condition = str(token)
                table['condition'] = condition
        self.config.tables.append(table)

    def generate_sharding(self, table):
        fix_num = self.fix_num

        table_name = table['table_name']
        condition = table.get('condition', 'where 1=1')

        # 先获取起始的_rowid
        first_sql = "select _rowid from {} {}".format(table_name, condition)
        first_sql = first_sql + " and _rowid>0 order by _rowid asc limit 1"
        cursor = self.conn.cursor()
        cursor.execute(first_sql)
        result: dict = cursor.fetchone()

        is_first = True

        if result is None:
            return
        row_id = result['_rowid']
        index = 0
        while True:
            sql = "select _rowid from {} {}".format(table_name, condition)
            # 初次判断，需要包含_rowid,不然会导致统计总数的时候，出错
            if is_first:
                sql_with_rowid = sql + " and _rowid>={} order by _rowid limit {}".format(row_id, fix_num)
            else:
                sql_with_rowid = sql + " and _rowid>{} order by _rowid limit {}".format(row_id, fix_num)
            cursor.execute(sql_with_rowid)
            results: List = cursor.fetchall()
            if len(results) == 0:
                break
            left = row_id
            row_id = results[len(results) - 1]['_rowid']

            right = row_id

            self.sharding_info.append(dict(
                left=left,
                right=right,
                condition=condition,
                table_name=table_name,
                size=len(results),
                is_first=is_first,
                batch_num=index
            ))
            is_first = False
            index = index + 1

        print("当前表{}的迁移将被分为{}片,每片最大{}条".format(
            table_name,
            self.sharding_info.__len__(),
            fix_num))

        cursor.close()

    def get_sharding_result(self, sharding_info):
        """
        生成可以用来插入的sql
        :return:
        """

        left = sharding_info['left']
        right = sharding_info['right']
        table_name = sharding_info['table_name']
        condition = sharding_info.get("condition")
        is_first = sharding_info.get("is_first")

        if is_first:
            sql = "select * from {} {} and _rowid>={} and _rowid<={}"
        else:
            sql = "select * from {} {} and _rowid>{} and _rowid<={}"

        cursor = self.conn.cursor()
        cursor.execute(sql.format(table_name, condition, left, right))
        results = cursor.fetchall()

        sql_list = [sql_util.generate_insert_sql(result, table_name) for result in results]
        cursor.close()

        return sql_list

    def start_sharding(self):
        for table in self.config.tables:
            self.generate_sharding(table)

    def show_create_table(self, stub, db):
        for table in self.config.tables:
            table_name = table['table_name']
            cursor = self.conn.cursor()
            cursor.execute("show create table " + table_name)
            result = cursor.fetchone()
            schema = result['Create Table']

            params = dict()
            params['key'] = self.config.dest_url
            params['dest_url'] = self.config.dest_url
            params['schema'] = schema
            params['table_name'] = table_name

            response = stub.start(migrate_pb2.Data(db=db, type='createTable',
                                                   data=json.dumps(params, ensure_ascii=False)))
            if response.message == 'ok':
                print("数据库建表执行完毕:{}".format(table_name))


class MySqlServerService(DbServerService):

    def __init__(self, key, db_type, dest_url):
        super().__init__(dest_url)

        self.config.dest_url = dest_url
        host, port, db, user, password = self.config.parse_url(dest_url)
        connection = cache.get(key, db_type, host, port, user, password, db)
        self.conn = connection.get_conn()

    def create_table(self, table_name, schema):
        cursor = self.conn.cursor()
        cursor.execute("drop table if exists " + table_name)
        cursor.execute(schema)
        self.conn.commit()
        cursor.close()

    def execute_insert(self, sql_list):
        cursor = self.conn.cursor()
        for sql in sql_list:
            cursor.execute(sql,None)
        self.conn.commit()
        cursor.close()


if __name__ == "__main__":
    service = MysqlClientService(
        "10.8.0.2:3306/ks-tequila-v2?username=root&password=Luck158,",
        "",
        "select * from task_record"
    )
    service.start()

    all = []
    sharding_info_list = service.sharding_info

    pass
