#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@author: guoguangchuan
"""
import os
import sys
import traceback
import sqlite3
import json
import datetime

from logger import process_log
from tools import get_md5, row2str, get_db_tb_key
from opends import OpenDS

reload(sys)
sys.setdefaultencoding("utf-8")


class MD5_Diff:
    def __init__(self):
        self.fetch = 10000

    def full_sync_md5_diff(self, info):
        database = info['db_database']
        table = info['db_table_name']
        ds_token = info['ds_token']
        ds_tb_id = info['ds_tb_id']
        ds_fields = info['ds_fields']
        field_names = [field['name'] for field in ds_fields]
        OpenDS().tb_clean(ds_token, ds_tb_id)
        conn = info['db'].conn
        cursor = info['db'].cursor
        host = info['db'].conn.get_host_info()
        port = info['db'].conn.port
        record = '%s%s%s%s' % (host, port, database, table)
        record = get_md5(record)
        if not os.path.exists('check'):
            os.mkdir('check')
        database_file = 'check/%s' % record
        if os.path.exists(database_file):
            os.remove(database_file)
        ####
        sqlite_conn = sqlite3.connect(database_file)
        sqlite_cursor = sqlite_conn.cursor()
        field = '*'
        key_sql = "select lower(column_name), data_type, ordinal_position from information_schema.columns where table_schema = '%s' and table_name = '%s' and column_key='PRI'" % (
            database, table)
        cursor.execute(key_sql)
        key_res = cursor.fetchall()
        if key_res:
            key_res = key_res[0]
        else:
            process_log().info('Increase full_sync:there is no primary key in %s' % table)
            return

        try:
            sqlite_cursor.execute(
                "create table if not exists %s (%s %s primary key, md5 varchar(32));" % (table, key_res[0], key_res[1]))
            sqlite_conn.commit()
        except Exception as e:
            process_log().error('Increase full_sync:%s' % traceback.format_exc())

        start_time = datetime.datetime.now()
        all_sql = 'select %s from %s.%s' % (field, database, table)
        cursor.execute(all_sql)
        res = cursor.fetchmany(self.fetch)
        while res:
            process_log().info('insert %s rows' % len(res))
            temp_res = []
            for row in res:
                temp_res.append(row2str(row))
            OpenDS().tb_insert(ds_token, ds_tb_id, field_names, temp_res)
            keys = []
            for col in temp_res:
                keys.append((col[key_res[2] - 1], get_md5(json.dumps(col))))
            sqlite_cursor.executemany('insert into %s values (?, ?)' % table, keys)
            sqlite_conn.commit()
            res = cursor.fetchmany(self.fetch)
        OpenDS().tb_commit(ds_token, ds_tb_id)
        end_time = datetime.datetime.now()
        info['db_table_last_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
        process_log().info(
            'full_sync cost %s seconds' % (end_time - start_time).seconds)

    def increase_sync(self, info):
        if info['type'] == 'full':
            self.full_sync_md5_diff(info)
            return
        ds_fields = info['ds_fields']
        field_names = [f['name'] for f in ds_fields]
        database = info['db_database']
        table = info['db_table_name']
        db_table_key_name = info['db_table_key_name']
        db_table_key_position = info['db_table_key_position']
        ds_tb_id = info['ds_tb_id']
        ds_token = info['ds_token']
        is_sync = False
        for sync_data in self.md5_diff_increase(info):
            if 'insert' in sync_data:
                process_log().info(
                    '[%s] total insert %s rows' % (get_db_tb_key(database, table), len(sync_data['insert'])))
                OpenDS().tb_insert(ds_token, ds_tb_id, field_names, sync_data['insert'])
                is_sync = True
            elif 'delete' in sync_data:
                process_log().info(
                    '[%s] total delete %s rows' % (get_db_tb_key(database, table), len(sync_data['delete'])))
                delete = {
                    'data': sync_data['delete']
                }
                OpenDS().data_delete(ds_token, ds_tb_id, [db_table_key_name], delete)
                is_sync = True
        if is_sync:
            OpenDS().tb_commit(ds_token, ds_tb_id)

    def md5_diff_increase(self, info):
        start_time = datetime.datetime.now()
        database = info['db_database']
        table = info['db_table_name']
        key_position = info['db_table_key_position']
        key_name = info['db_table_key_name']
        conn = info['db'].conn
        cursor = info['db'].cursor
        host = conn.get_host_info()
        port = conn.port
        record = '%s%s%s%s' % (host, port, database, table)
        record = get_md5(record)
        if not os.path.exists('check'):
            os.mkdir('check')
        database_file = 'check/%s' % (record)
        ####
        sqlite_conn = sqlite3.connect(database_file)
        sqlite_cursor = sqlite_conn.cursor()
        test_sql = 'select * from %s limit 1' % (table)
        try:
            sqlite_cursor.execute(test_sql)
            res = sqlite_cursor.fetchall()
        except:
            self.full_sync_md5_diff(info)
            return
        # 哨兵字段
        db_guard = 0
        sl_guard = 0
        shard_len = self.fetch
        db_gone = False
        sl_gone = False
        insert_res = {"insert": []}
        delete_res = {"delete": []}
        while True:
            inc_res = {'delete': [], 'update': [], 'insert': []}
            db_shard_sql = 'select * from %s.%s limit %s, %s' % (database, table, db_guard, shard_len)
            cursor.execute(db_shard_sql)
            db_shard_res = cursor.fetchall()
            process_log().info('md5_diff_increase db_guard:%s' % db_guard)
            if not db_shard_res:
                db_gone = True
            #
            sl_shard_sql = 'select * from %s  limit %s, %s' % (table, sl_guard, shard_len)
            sqlite_cursor.execute(sl_shard_sql)
            sl_shard_res = sqlite_cursor.fetchall()
            process_log().info('md5_diff_increase sl_guard:%s' % sl_guard)
            if not sl_shard_res:
                sl_gone = True
            if sl_gone or db_gone:
                break
            #
            sl_index = 0
            sl_shard_len = len(sl_shard_res)
            for db_row in db_shard_res:
                db_row = row2str(db_row)
                while sl_index < sl_shard_len:
                    sl_row = sl_shard_res[sl_index]
                    sl_row = row2str(sl_row)
                    sl_index += 1
                    sl_guard += 1
                    sl_key, sl_md5 = sl_row
                    db_key = db_row[key_position]
                    if str(db_key) != str(sl_key):
                        if sl_index > 0:
                            sl_row = sl_shard_res[sl_index - 1]
                            sl_key_m = str(sl_row[0])
                            if str(db_key) == sl_key_m:
                                sl_guard -= 1
                                continue
                        inc_res['delete'].append(sl_key)
                    else:
                        db_guard += 1
                        if get_md5(json.dumps(db_row)) != sl_md5:
                            inc_res['update'].append([db_key, db_row])
                        break
                if sl_index >= sl_shard_len:
                    break
            if inc_res['delete'] or inc_res['update'] or inc_res['insert']:
                self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res, table, key_name)

                for row in inc_res['update']:
                    insert_res['insert'].append(row[1])
                if len(insert_res['insert']) >= self.fetch:
                    yield insert_res
                    insert_res = {"insert": []}
                for row in inc_res['delete']:
                    delete_res['delete'].append([row])
                if len(delete_res['delete']) >= self.fetch:
                    yield delete_res
                    delete_res = {"delete": []}
                if inc_res['delete']:
                    sl_guard -= len(inc_res['delete'])
        if db_gone and sl_gone:
            pass
        elif db_gone and not sl_gone:
            while True:
                inc_res = {'delete': [], 'update': [], 'insert': []}
                sl_shard_sql = 'select * from %s limit %s, %s' % (table, sl_guard, shard_len)
                sqlite_cursor.execute(sl_shard_sql)
                sl_shard_res = sqlite_cursor.fetchall()
                if sl_shard_res:
                    for row in sl_shard_res:
                        row = row2str(row)
                        inc_res['delete'].append(row[0])
                    self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res, table, key_name)
                    for row in inc_res['delete']:
                        delete_res['delete'].append([row])
                    if len(delete_res['delete']) > self.fetch:
                        yield delete_res
                        delete_res = {"delete": []}
                    sl_guard += self.fetch
                else:
                    break
        elif not db_gone and sl_gone:
            while True:
                inc_res = {'delete': [], 'update': [], 'insert': []}
                sl_shard_sql = 'select * from %s.%s limit %s, %s' % (database, table, db_guard, shard_len)
                cursor.execute(sl_shard_sql)
                db_shard_res = cursor.fetchall()
                if db_shard_res:
                    for row in db_shard_res:
                        row = row2str(row)
                        inc_res['insert'].append([row[key_position], row])
                    self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res, table, key_name)
                    for row in inc_res['insert']:
                        insert_res['insert'].append(row[1])
                    if len(insert_res['insert']) > self.fetch:
                        yield insert_res
                        insert_res = {"insert": []}
                    db_guard += self.fetch
                else:
                    break
        if insert_res['insert']:
            yield insert_res
        if delete_res['delete']:
            yield delete_res
        end_time = datetime.datetime.now()
        info['db_table_last_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
        process_log().info(
            'full_sync cost %s seconds' % (end_time - start_time).seconds)

    def del_sqlite(self, sqlite_cursor, sqlite_conn, inc_res, table, key_name):
        if inc_res:
            insert = inc_res['insert']
            delete = inc_res['delete']
            update = inc_res['update']
            if delete:
                ele = []
                for de in delete:
                    ele.append((de,))
                sqlite_cursor.executemany('delete from %s where %s=(?)' % (table, key_name), ele)
                sqlite_conn.commit()
            if update:
                for up in update:
                    sqlite_cursor.execute("update %s set md5='%s' where %s='%s'" % (
                        table, get_md5(json.dumps(up[1])), key_name, up[0]))
                sqlite_conn.commit()
            if insert:
                key_md5s = []
                for key, row in insert:
                    key_md5s.append((key, get_md5(json.dumps(row))))
                try:
                    sqlite_cursor.executemany('replace into %s values (?, ?)' % table, key_md5s)
                except Exception, e:
                    process_log().error("primary key conflict:%s" % e)
                sqlite_conn.commit()
