#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import re
import tarfile
import traceback
import urllib2
import time

from util.process_binlog2 import parse_mysqlbinlog
from util.logger import process_log
from util.opends import OpenDS
from util.tools import is_binlog
from util.tools import get_md5

reload(sys)
sys.setdefaultencoding("utf-8")
__author__ = 'guoguangchuan'


class Increase:
    def init_schema(self, ds_token, db, database, ds_id, table_name, ds_tb_id, binlog_sync_type):
        table_schema = db.get_table_info(database, table_name)
        field_names = []
        uniq_key = []
        for t in table_schema:
            field_names.append(t['name'])
            if t['uniq_index']:
                uniq_key.append(t['name'])
        if ds_tb_id:
            ds_fields = OpenDS().field_list(ds_token, ds_tb_id)
            ds_field_names = [f['name'] for f in ds_fields]
            for ds_field_name in ds_field_names:
                if ds_field_name not in field_names and not binlog_sync_type:
                    OpenDS().field_del(ds_token, ds_tb_id, ds_field_name)
                    process_log().info('sync delete field:%s' % ds_field_name)
            for field in table_schema:
                field_name = field['name']
                if field_name not in ds_field_names and not binlog_sync_type:
                    OpenDS().field_add(ds_token, ds_tb_id, field['name'], field['type'], field['uniq_index'])
                    process_log().info('sync add field:%s' % field_name)
        else:
            ds_tb_id = OpenDS().tb_create(ds_token, ds_id, table_name, table_schema, uniq_key)['tb_id']
            process_log().info(
                'init table %s with schema %s and uniq index is %s' % (table_name, table_schema, uniq_key))
        return {'ds_tb_id': ds_tb_id, 'ds_fields': table_schema}

    def increase_sync(self, data_pool):
        end_time = None
        end_times = {}
        for sync_data in self.factory_binlog(data_pool):
            pool_type = sync_data['pool_type']
            db_tb = sync_data['db_tb']
            data = sync_data['data']
            if pool_type != 'end_time':
                ds_fields = data_pool[db_tb]['ds_fields']
                field_names = [f['name'] for f in ds_fields]
                db_table_key_name = data_pool[db_tb]['db_table_key_name']
                db_table_key_position = data_pool[db_tb]['db_table_key_position']
                ds_tb_id = data_pool[db_tb]['ds_tb_id']
                ds_token = data_pool[db_tb]['ds_token']
                if pool_type == 'insert':
                    process_log().info('[%s] total insert %s rows' % (db_tb, len(data)))
                    OpenDS().tb_insert(ds_token, ds_tb_id, field_names, data)
                    data_pool[db_tb]['modify'] = 1

                elif pool_type == 'delete':
                    process_log().info('[%s] total delete %s rows' % (db_tb, len(data)))
                    delete = {
                        'data': []
                    }
                    for d in data:
                        delete['data'].append([d[db_table_key_position]])
                    OpenDS().data_delete(ds_token, ds_tb_id, [db_table_key_name], delete)
                    data_pool[db_tb]['modify'] = 1
                elif pool_type == 'alter':
                    for al in data:
                        t = al[0]
                        if t == 'add':
                            if al[1] not in field_names:
                                OpenDS().tb_commit(ds_token, ds_tb_id)
                                OpenDS().field_add(ds_token, ds_tb_id, al[1], data_pool['db'].trans_field_type(al[2]),
                                                   0)
                                process_log().info('%s add field: %s' % (db_tb, al[1]))
                                ds_fields = OpenDS().field_list(ds_token, ds_tb_id)
                                data_pool[db_tb]['ds_fields'] = ds_fields
                        elif t == 'change':
                            OpenDS().tb_commit(ds_token, ds_tb_id)
                            pass
                        elif t == 'drop':
                            if al[1] in field_names:
                                OpenDS().tb_commit(ds_token, ds_tb_id)
                                OpenDS().field_del(ds_token, ds_tb_id, al[1])
                                process_log().info('%s drop field: %s' % (db_tb, al[1]))
                                ds_fields = OpenDS().field_list(ds_token, ds_tb_id)
                                data_pool[db_tb]['ds_fields'] = ds_fields
            else:
                end_time = data
        db_increase_type = data_pool['db_increase_type']
        if db_increase_type == 'aliyun_binlogs':
            sync_end_time = data_pool['aliyun_binlog_end_time']
        elif db_increase_type == 'local_binlogs':
            sync_end_time = data_pool['local_binlog_end_time']
        if sync_end_time > end_time:
            end_time = sync_end_time
        for k, v in data_pool.items():
            if len(k.split('+++')) > 1:
                if end_time:
                    end_times[k] = end_time
                if 'modify' in v and v['modify'] == 1:
                    process_log().info('[%s] commit' % k)
                    OpenDS().tb_commit(v['ds_token'], v['ds_tb_id'])
        return end_times

    def factory_binlog(self, data_pool):
        if data_pool['db_increase_type'] == 'aliyun_binlogs':
            for sync_data in self.aliyun_binlogs(data_pool):
                yield sync_data
        elif data_pool['db_increase_type'] == 'local_binlogs':
            for sync_data in self.local_binlogs(data_pool):
                yield sync_data

    def aliyun_binlogs(self, data_pool):
        global_db = data_pool['global_db']
        data_pool = data_pool
        aliyun_binlog_end_time = data_pool['aliyun_binlog_end_time']
        '''
        global_db = {
            'haizhi':['t_warn_raw_log_13w', 't_chart_raw_log_200w']
        }
        data_pool = {'haizhi+++t_warn_raw_log_13w':{'data':[], 'pool_type':''}, 'haizhi+++t_chart_raw_log_200w':{'data':[], 'pool_type':''}}
        '''
        db_binlog_files = data_pool['db_binlog_files']
        try:
            for link in db_binlog_files:
                link_str = link["DownloadLink"]
                link_md5 = get_md5(link_str[:link_str.find('?')])
                gz_file_name = '%s.tar.gz' % link_md5
                if gz_file_name:
                    final_name = 'mysql-bin.%s' % link_md5
                    process_log().info("----------------------------%s" % final_name)
                    if os.path.exists('binlogs/%s' % final_name):
                        process_log().info("%s已存在跳过下载" % final_name)
                        pass
                    else:
                        f = urllib2.urlopen(link_str)
                        with open("binlogs/%s" % gz_file_name, "wb") as file_:
                            file_.write(f.read())
                        t = tarfile.open('binlogs/%s' % gz_file_name)
                        file_name = t.getnames()[0]
                        t.extractall(path='binlogs')
                        t.close()
                        os.remove('binlogs/%s' % gz_file_name)
                        os.rename('binlogs/%s' % file_name, 'binlogs/%s' % final_name)
                        process_log().info("%s下载完毕" % final_name)
                    sqls = parse_mysqlbinlog('binlogs/%s' % final_name, aliyun_binlog_end_time, '',
                                             data_pool['db_binlog']['mysqlbinlog'], global_db, data_pool)
                    for line in sqls:
                        line_date = line['data']
                        line_type = line['line_type']
                        if line_type == 'end_time':
                            yield {'pool_type': 'end_time', 'data': line_date, 'db_tb': None}
                            continue
                        d = data_pool[line['db_tb']]
                        pool_data = d['data']
                        pool_type = d['pool_type']

                        if line_type == 'insert' or line_type == 'delete':
                            if pool_type != line_type:
                                if pool_type != '':
                                    yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                                    d['data'] = [line_date]
                                    d['pool_type'] = line_type
                                    continue
                            if pool_type == '':
                                d['pool_type'] = line_type
                            ds_fields = data_pool[line['db_tb']]['ds_fields']
                            if len(line_date) == len(ds_fields):
                                pool_data.append(line_date)
                            if len(pool_data) >= 10000:
                                yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                                d['data'] = []

                        elif line_type == 'alter':
                            if pool_data:
                                yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                            yield {'pool_type': line_type, 'data': line_date, 'db_tb': line['db_tb']}
                            d['pool_type'] = ''
                            d['data'] = []
            for k, v in data_pool.items():
                if len(k.split('+++')) > 1:
                    if len(v['data']) > 0:
                        yield {'pool_type': v['pool_type'], 'data': v['data'], 'db_tb': k, }
        except:
            process_log().error('aliyun get_response:%s' % traceback.format_exc())

    def local_binlogs(self, data_pool):
        global_db = data_pool['global_db']
        data_pool = data_pool
        binlog_path = data_pool['db_binlog']['path']
        local_binlog_end_time = data_pool['local_binlog_end_time']
        binlog_fiels = os.listdir(binlog_path)
        final_binlogs = []
        for binlog_file in binlog_fiels:
            file_path = os.path.join(binlog_path, binlog_file)
            file_ctime = time.strftime('%Y-%m-%dT%H:%M:%SZ',
                                       time.strptime(time.ctime(os.path.getmtime(file_path)), '%a %b %d %H:%M:%S %Y'))
            if file_ctime > local_binlog_end_time:
                if is_binlog(file_path):
                    final_binlogs.append([file_ctime, file_path])
        final_binlogs.sort(key=lambda x: x[1])
        try:
            for final_name in final_binlogs:
                final_name = final_name[1]
                process_log().info(final_name)
                sqls = parse_mysqlbinlog(final_name, local_binlog_end_time, '',
                                         data_pool['db_binlog']['mysqlbinlog'], global_db, data_pool)
                for line in sqls:
                    line_date = line['data']
                    line_type = line['line_type']
                    if line_type == 'end_time':
                        # yield {'pool_type': 'end_time', 'data': line_date, 'db_tb': None}
                        continue
                    d = data_pool[line['db_tb']]
                    pool_data = d['data']
                    pool_type = d['pool_type']

                    if line_type == 'insert' or line_type == 'delete':
                        if pool_type != line_type:
                            if pool_type != '':
                                yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                                d['data'] = [line_date]
                                d['pool_type'] = line_type
                                continue
                        if pool_type == '':
                            d['pool_type'] = line_type
                        ds_fields = data_pool[line['db_tb']]['ds_fields']
                        if len(line_date) == len(ds_fields):
                            pass
                        pool_data.append(line_date)
                        if len(pool_data) >= 10000:
                            yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                            d['data'] = []

                    elif line_type == 'alter':
                        if pool_data:
                            yield {'pool_type': pool_type, 'data': pool_data, 'db_tb': line['db_tb']}
                        yield {'pool_type': line_type, 'data': line_date, 'db_tb': line['db_tb']}
                        d['pool_type'] = ''
                        d['data'] = []
            for k, v in data_pool.items():
                if len(k.split('+++')) > 1:
                    if len(v['data']) > 0:
                        yield {'pool_type': v['pool_type'], 'data': v['data'], 'db_tb': k, }
            if final_binlogs:
                yield {'pool_type': 'end_time', 'data': final_binlogs[-1][0], 'db_tb': None}
        except:
            process_log().error('aliyun get_response:%s' % traceback.format_exc())


if __name__ == '__main__':
    for data in main():
        print data['pool_type'], len(data['data']), data['db_tb']
