#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
@author: guoguangchuan
"""
import os
import sys
import time
import re
import urllib2
import tarfile
import traceback
import sqlite3
import process_binlog
from logger import process_log
from tools import get_md5, row2str
import json

reload(sys)
sys.setdefaultencoding("utf-8")


class Increase:
    def __init__(self, info):
        self.info = info
        self.db = info['db']
        self.conn = self.db.conn
        self.cursor = self.conn.cursor()
        self.database = info['db_database']
        self.table = info['db_table_name']
        self.key_name = info['db_table_key_name']
        self.key_position = info['db_table_key_position']
        self.fetch = self.db.fetch

    def increase_sync(self):
        db_increase_type = self.info['db_increase_type']
        if db_increase_type == 'aliyun_binlogs':
            for res in self.aliyun_binlogs():
                yield res
        elif db_increase_type == 'local_binlogs':
            for res in self.local_binlogs():
                yield res
        elif db_increase_type == 'md5_diff':
            for res in self.md5_diff():
                yield res
        elif db_increase_type == 'where':
            pass

    def aliyun_binlogs(self):
        db_binlog_files = self.info['db_binlog_files']
        db_table_last_time = self.info['db_table_last_time']
        db_table_last_time_temp = db_table_last_time
        try:
            for link in db_binlog_files:
                '''
                end_time = link['LogEndTime']
                if end_time > db_table_last_time:
                    db_table_last_time = end_time
                '''
                link_str = link["DownloadLink"]
                pattern = re.compile(r"mysql-bin\.\d+\.tar\.gz")
                gz_file_name = pattern.search(link_str)
                if gz_file_name:
                    gz_file_name = gz_file_name.group()
                    file_name = gz_file_name[:-7]
                    process_log().info("----------------------------%s" % file_name)
                    final_name = '%s_%s' % (self.info['db_binlog_instance'], file_name)
                    if os.path.exists(final_name):
                        pass
                    else:
                        f = urllib2.urlopen(link_str)
                        with open("binlogs/%s" % gz_file_name, "wb") as file_:
                            file_.write(f.read())
                        t = tarfile.open('binlogs/%s' % gz_file_name)
                        t.extractall(path='binlogs')
                        t.close()
                        os.remove('binlogs/%s' % gz_file_name)
                        os.rename('binlogs/%s' % file_name, 'binlogs/%s' % final_name)
                    sqls = process_binlog.parse_mysqlbinlog('binlogs/%s' % final_name, db_table_last_time_temp, '',
                                                            [self.database], [self.table],
                                                            self.info['db_binlog']['mysqlbinlog'])
                    process_log().info('mysqlbinlog analysed')
                    del_type = ""
                    res = {"insert": [], "delete": []}
                    for sql in sqls:
                        # print sql
                        # 如果要修改表结构，那么就先提交insert和delete数据
                        if 'alter' in sql:
                            if res['insert'] or res['delete']:
                                yield res
                                res = {"insert": [], "delete": []}
                                del_type = ''
                            yield sql
                            continue
                        if 's_time' in sql:
                            if sql['s_time'] > db_table_last_time:
                                db_table_last_time = sql['s_time']
                            continue
                        sql_res = self.del_sql(sql)
                        if del_type == '':

                            del_type = sql_res['type']
                        elif del_type != sql_res['type']:
                            if res[del_type]:
                                yield res
                                del_type = sql_res['type']
                                res = {"insert": [], "delete": []}
                        res[del_type].append(sql_res['value'])
                        if len(res[del_type]) >= self.fetch:
                            yield res
                            res = {"insert": [], "delete": []}
                    if del_type and res[del_type]:
                        yield res
            self.info['db_table_last_time'] = db_table_last_time
        except Exception, e:
            process_log().error('aliyun get_response:%s' % traceback.format_exc())

    def local_binlogs(self):
        db_table_last_time = self.info['db_table_last_time']
        db_table_last_time_temp = db_table_last_time
        binlog_path = self.info['db_binlog']['path']
        binlog_fiels = os.listdir(binlog_path)
        final_binlogs = []
        for binlog_file in binlog_fiels:
            file_path = os.path.join(binlog_path, binlog_file)
            file_ctime = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime(os.path.getctime(file_path)))
            if file_ctime > db_table_last_time:
                final_binlogs.append([file_ctime, file_path])
        final_binlogs.sort(key=lambda x: x[1])
        try:
            for final_name in final_binlogs:
                final_name = final_name[1]
                sqls = process_binlog.parse_mysqlbinlog(final_name, db_table_last_time_temp, '', [self.database],
                                                        [self.table], self.info['db_binlog']['mysqlbinlog'])
                process_log().info('mysqlbinlog analysed')
                del_type = ""
                res = {"insert": [], "delete": []}
                for sql in sqls:
                    # print sql
                    # 如果要修改表结构，那么就先提交insert和delete数据
                    if 'alter' in sql:
                        if res['insert'] or res['delete']:
                            yield res
                            res = {"insert": [], "delete": []}
                            del_type = ''
                        yield sql
                        continue
                    if 's_time' in sql:
                        if sql['s_time'] > db_table_last_time:
                            db_table_last_time = sql['s_time']
                        continue
                    sql_res = self.del_sql(sql)
                    if del_type == '':
                        del_type = sql_res['type']
                    elif del_type != sql_res['type']:
                        if res[del_type]:
                            yield res
                            del_type = sql_res['type']
                            res = {"insert": [], "delete": []}
                    res[del_type].append(sql_res['value'])
                    if len(res[del_type]) >= self.fetch:
                        yield res
                        res = {"insert": [], "delete": []}
                if del_type and res[del_type]:
                    yield res
            self.info['db_table_last_time'] = db_table_last_time
        except Exception, e:
            process_log().error('aliyun get_response:%s' % traceback.format_exc())

    def md5_diff(self):
        database = self.info['db_database']
        table = self.info['db_table_name']
        host = self.conn.get_host_info()
        port = self.conn.port
        record = '%s%s%s%s' % (host, port, database, table)
        record = get_md5(record)
        if not os.path.exists('check'):
            os.mkdir('check')
        database_file = 'check/%s' % (record)
        ####
        sqlite_conn = sqlite3.connect(database_file)
        sqlite_cursor = sqlite_conn.cursor()
        # 哨兵字段
        db_guard = 0
        sl_guard = 0
        shard_len = self.fetch
        db_gone = False
        sl_gone = False
        # processing_logger().info('sync percent:%.1f%%' % (float(i)*100/len_json_md5))
        insert_res = {"insert": []}
        delete_res = {"delete": []}
        while True:
            inc_res = {'delete': [], 'update': [], 'insert': []}
            db_shard_sql = 'select * from %s.%s limit %s, %s' % (self.database, self.table, db_guard, shard_len)
            self.cursor.execute(db_shard_sql)
            db_shard_res = self.cursor.fetchall()
            if not db_shard_res:
                db_gone = True
            #
            sl_shard_sql = 'select * from %s  limit %s, %s' % (self.table, sl_guard, shard_len)
            sqlite_cursor.execute(sl_shard_sql)
            sl_shard_res = sqlite_cursor.fetchall()
            if not sl_shard_res:
                sl_gone = True
            if sl_gone or db_gone:
                break
            #
            sl_index = 0
            sl_shard_len = len(sl_shard_res)
            for db_row in db_shard_res:
                db_row = row2str(db_row)
                while sl_index < sl_shard_len:
                    sl_row = sl_shard_res[sl_index]
                    sl_row = row2str(sl_row)
                    sl_index += 1
                    sl_guard += 1
                    sl_key, sl_md5 = sl_row
                    db_key = db_row[self.key_position]
                    if str(db_key) != str(sl_key):
                        if sl_index > 0:
                            sl_row = sl_shard_res[sl_index-1]
                            sl_key_m = str(sl_row[0])
                            if str(db_key) == sl_key_m:
                                sl_guard -= 1
                                continue
                        inc_res['delete'].append(sl_key)
                    else:
                        db_guard += 1
                        if get_md5(json.dumps(db_row)) != sl_md5:
                            inc_res['update'].append([db_key, db_row])
                        break
                if sl_index >= sl_shard_len:
                    break
            if inc_res['delete'] or inc_res['update'] or inc_res['insert']:
                self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res)

                for row in inc_res['update']:
                    insert_res['insert'].append(row[1])
                if len(insert_res['insert']) >= self.fetch:
                    yield insert_res
                    insert_res = {"insert": []}
                for row in inc_res['delete']:
                    delete_res['delete'].append([row])
                if len(delete_res['delete']) >= self.fetch:
                    yield delete_res
                    delete_res = {"delete": []}
                if inc_res['delete']:
                    sl_guard -= len(inc_res['delete'])
        if db_gone and sl_gone:
            pass
        elif db_gone and not sl_gone:
            while True:
                inc_res = {'delete': [], 'update': [], 'insert': []}
                sl_shard_sql = 'select * from %s limit %s, %s' % (self.table, sl_guard, shard_len)
                sqlite_cursor.execute(sl_shard_sql)
                sl_shard_res = sqlite_cursor.fetchall()
                if sl_shard_res:
                    for row in sl_shard_res:
                        row = row2str(row)
                        inc_res['delete'].append(row[0])
                    self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res)
                    for row in inc_res['delete']:
                        delete_res['delete'].append([row])
                    if len(delete_res['delete']) > self.fetch:
                        yield delete_res
                        delete_res = {"delete": []}
                    sl_guard += self.fetch
                else:
                    break
        elif not db_gone and sl_gone:
            while True:
                inc_res = {'delete': [], 'update': [], 'insert': []}
                sl_shard_sql = 'select * from %s.%s limit %s, %s' % (self.database, self.table, db_guard, shard_len)
                self.cursor.execute(sl_shard_sql)
                db_shard_res = self.cursor.fetchall()
                if db_shard_res:
                    for row in db_shard_res:
                        row = row2str(row)
                        inc_res['insert'].append([row[self.key_position], row])
                    self.del_sqlite(sqlite_cursor, sqlite_conn, inc_res)
                    for row in inc_res['insert']:
                        insert_res['insert'].append(row[1])
                    if len(insert_res['insert']) > self.fetch:
                        yield insert_res
                        insert_res = {"insert": []}
                    db_guard += self.fetch
                else:
                    break
        if insert_res['insert']:
            yield insert_res
        if delete_res['delete']:
            yield delete_res

    def del_sql(self, sql):
        res = {}
        if 'update' in sql:
            res['type'] = 'insert'
            res['value'] = sql['update']
        if 'insert' in sql:
            res['type'] = 'insert'
            res['value'] = sql['insert']
        if 'delete' in sql:
            res['type'] = 'delete'
            res['value'] = [sql['delete'][self.key_position]]
        return res

    def del_sqlite(self, sqlite_cursor, sqlite_conn, inc_res):
        if inc_res:
            insert = inc_res['insert']
            delete = inc_res['delete']
            update = inc_res['update']
            if delete:
                ele = []
                for de in delete:
                    ele.append((de,))
                sqlite_cursor.executemany('delete from %s where %s=(?)' % (self.table, self.key_name), ele)
                sqlite_conn.commit()
            if update:
                for up in update:
                    sqlite_cursor.execute("update %s set md5='%s' where %s='%s'" % (
                        self.table, get_md5(json.dumps(up[1])), self.key_name, up[0]))
                sqlite_conn.commit()
            if insert:
                key_md5s = []
                for key, row in insert:
                    key_md5s.append((key, get_md5(json.dumps(row))))
                try:
                    sqlite_cursor.executemany('insert OR IGNORE into %s values (?, ?)' % self.table, key_md5s)
                except Exception, e:
                    process_log().error("primary key conflict:%s" % e)
                sqlite_conn.commit()


if __name__ == '__main__':
    sqlite_conn = sqlite3.connect('test.sql')
    sqlite_cursor = sqlite_conn.cursor()
    sqlite_cursor.execute("CREATE TABLE IF NOT EXISTS test (id INT, md5 INT);")
    sqlite_conn.commit()
    sqlite_cursor.execute("DELETE FROM test WHERE id!=-1;")
    sqlite_conn.commit()
    sqlite_cursor.executemany("INSERT INTO test (id, md5) VALUES (?, ?);", ((1, 1), (2, 2)))
    sqlite_conn.commit()
