#!/usr/bin/python
# -*- coding: utf-8 -*-
import json
import time
import os
import shutil
import traceback
import atexit
from optparse import OptionParser
from traceback import format_exc
import datetime
from odbc_p.DB import loader
from odbc_p.odbc import SyncException
from util.binlog_increase import Increase
from util.config import Configuration
from util.opends import OpenDS
from util.logger import error_log
from util.logger import process_log
from util.md5_diff import MD5_Diff
from util.app_global import g, runtime_path
from util.tools import one_instance, get_db_tb_key
from util.synchelper import GEA


class OpenDSClient:
    """sync with bdp"""

    access_token = None
    pwd = None
    uid = None
    port = None
    db_type = None
    modify = None
    database = None
    email = None
    append_table = None
    replace_table = None
    ds = None
    ds_id = None
    exist_tables = None
    tb_list = None

    def combin_sync(self, info):
        # ds_table_info = self.get_ds_table_info(info['ds_token'], info['db_database'], info['db_table_name'])
        info['ds_id'] = info['ds_id']
        info['ds_tb_id'] = info['ds_tb_id']
        self.info = info
        if info['db_increase_type'] == 'force_full':
            process_log().info(u'db_increase_type:force_full强制全量')
            info['db'].full_sync(info)
        elif not info['db_table_last_time'] or not info['ds_table_exist']:
            if info['db_increase_type'] == 'md5_diff':
                process_log().info(u'db_increase_type:md5_diff,强制全量')
                info['type'] = 'full'
                MD5_Diff().full_sync_md5_diff(info)
            else:
                process_log().info(u'db_increase_type:%s,强制全量' % info['db_increase_type'])
                info['db'].full_sync(info)
        else:
            process_log().info(u'db_increase_type:%s,增量同步' % info['db_increase_type'])
            if info['db_increase_type'] == 'md5_diff':
                info['type'] = 'increase'
                MD5_Diff().increase_sync(info)
            else:
                info['db'].increase_sync(info)
        return True

    def where_sync(self, db_info, user_info):
        # init the db and tables
        if not self.check_init(db_info, user_info):
            return
        db_type = db_info.get('type')
        server = db_info.get('server')
        if db_type == 'ACCESS' and os.path.exists(server):
            if not os.path.isdir('tmp'):
                os.mkdir('tmp')
            tmp = 'tmp/%s' % os.path.basename(server)
            try:
                shutil.copyfile(server, 'tmp/%s' % os.path.basename(server))
                process_log().info("Copy database from `%s` to `%s`" % (server, tmp))
            except Exception, e:
                error_log().error(format_exc())
                raise e
        elif db_type == 'GEA':
            return GEA(db_info=db_info, user_info=user_info).sync()

        self.db = loader(db_info)
        # sync list
        self.sync_schema()
        self.check_init(db_info, user_info)
        self.sync_data()

    def sync_data(self):
        # sync data
        tables = []
        for table in self.tb_list:
            tables.append({
                'access_token': self.access_token,
                'database': self.database,
                'table': table,
                'ds_id': self.ds_id,
                'tb_id': self.exist_tables.get(table['name'], None)
            })
        update_tbs = []
        for table in tables:
            tb_id = table.get('tb_id')
            if tb_id:
                update_tbs.append(tb_id)
                self.sync_table(table)
            else:
                error_log().error('Table %s is not exist in bdp platform or create failed, skipped.'
                                  % table['table']['name'])
        if len(update_tbs) > 0:
            OpenDS().tb_update(self.access_token, update_tbs)

    def sync_table(self, table):
        try:
            g.sync_table_state(self.database, table['table']['name'], g.STATE_SYNCING_COLOR)
            self.db.sync_data(table)
            g.sync_table_state(self.database, table['table']['name'], g.STATE_SUCCESS_COLOR)
        except SyncException, e:
            self.sync_error_handler(e)
        except Exception, e:
            info = str(e).decode('unicode_escape')
            g.sync_table_state(self.database, table['table']['name'], g.STATE_FAILED_COLOR)
            process_log().error(info)
            error_log().error(format_exc())

    def sync_error_handler(self, error):
        if error.error == error.ERROR_SYNCING:
            process_log().info('Waiting for 20s to sync `%s` again' % error.table.get('table', {}).get('name'))
            time.sleep(20)
            table = error.table
            table['breakpoint'] = error.breakpoint
            self.sync_table(error.table)

    def sync_schema(self):
        if not self.tb_list or self.tb_list == []:
            self.tb_list = self.db.get_all_tables(self.database)
        process_log().info('Sync Table list: %s' % [tb['name'] for tb in self.tb_list])
        for table in self.tb_list:
            self.db.modify_schema(self.access_token, self.database, table, self.ds_id, self.exist_tables, self.modify)
        return

    @staticmethod
    def ds_exist(token, name):

        try:
            exist_ds_list = OpenDS().ds_list(token)
        except Exception, e:
            process_log().error(e.message)
            raise e
        for e in exist_ds_list['data_source']:
            if e['name'] == name:
                result = {'ds_id': e['ds_id']}
                exist_tables = {}
                for c in e['tables']:
                    exist_tables[c[0]] = c[1]
                if not exist_tables:
                    exist_tables = {}
                result['exist_tables'] = exist_tables
                return result
        ds_id = OpenDS().ds_create(token, name)['ds_id']
        return {'ds_id': ds_id, 'exist_tables': None}

    def check_init(self, db_info, user_info):
        if user_info.get('token'):
            self.access_token = user_info.get('token')
        else:
            self.access_token = OpenDS().ds_login(user_info['domain'],
                                                  user_info['username'],
                                                  user_info['password']).get('access_token')
        # conn info
        self.pwd = db_info['pwd'] if 'pwd' in db_info else None
        self.uid = db_info['uid'] if 'uid' in db_info else None
        self.port = db_info['port'] if 'port' in db_info else None
        self.db_type = db_info['type'] if 'type' in db_info else 'mysql'
        self.modify = True if 'modify' in db_info else False
        if 'database' in db_info:
            if db_info['database'] == '':
                return False
            self.database = db_info['database']
        elif 'schema' in db_info:
            self.database = db_info['schema']
        else:
            error_log().info('no database in conf file, can not sync data')
            return False

        self.ds = db_info['ds_name']
        res = self.ds_exist(self.access_token, self.ds)
        self.email = db_info['email'] if 'email' in db_info else None
        self.append_table = db_info['append_table'] if 'append_table' in db_info else []
        self.replace_table = db_info['replace_table'] if 'replace_table' in db_info else []
        self.ds_id = res['ds_id']
        self.exist_tables = res['exist_tables']
        if 'tb_list' not in self.__dict__:
            self.tb_list = []
            for table in self.append_table:
                self.tb_list.append(table)
            self.tb_list + self.replace_table

        return True

    def get_ds_table_info(self, token, database, table):
        table_info = {'ds_id': None, 'ds_tb_id': None}
        try:
            exist_ds_list = OpenDS().ds_list(token)
        except Exception, e:
            process_log().error(e.message)
            return table_info
        for db in exist_ds_list['data_source']:
            if db['name'] == database:
                table_info['ds_id'] = db['ds_id']
                for tb in db['tables']:
                    if table == tb[0]:
                        table_info['ds_tb_id'] = tb[1]
                        return table_info
        ds_id = OpenDS().ds_create(token, database)['ds_id']
        table_info['ds_id'] = ds_id
        return table_info


def _sync():
    os.environ['NLS_LANG'] = 'SIMPLIFIED CHINESE_CHINA.UTF8'
    parser = OptionParser(usage="usage:%prog [options] filepath")
    parser.add_option("-f", "--force_full",
                      action="store_true",
                      dest="force_full",
                      default=False,
                      help="Specify analysis execution time limit"
                      )
    (options, args) = parser.parse_args()
    force_full = options.force_full
    db_force_full_name = None
    db_force_full_table_name = None
    if force_full:
        if len(args) == 2:
            db_force_full_name = args[0]
            db_force_full_table_name = args[1]
            process_log().info(u"强制全量同步%s.%s" % (db_force_full_name, db_force_full_table_name))
        else:
            process_log().error(u'强制全量同步，请指定数据库名和表名')
            return

    configuration = Configuration()
    check_file_json = configuration.conf_check_time()
    res = configuration.conf_check(check_file_json)
    if res['status']:
        dbs = configuration.config['db']
        user_info = configuration.config['user_info']
        aliyun_binlog_data_pool = {}
        local_binlog_data_pool = {}
        has_aliyun_binlogs = False
        has_local_binlogs = False
        db_exist_tb_ids = []
        for db in dbs:
            if db.get('db_increase_type') == 'file':
                continue
            for tb in db['append_table']:
                if db['db_increase_type'] != 'origin' and tb["db_table_exist"]:
                    db_exist_tb_ids.append(tb['ds_tb_id'])
            # 组装aliyun_binlogs所需参数
            if db['db_increase_type'] == 'aliyun_binlogs':
                aliyun_binlog_data_pool['db_increase_type'] = 'aliyun_binlogs'
                aliyun_binlog_data_pool['db_binlog'] = db['db_binlog']
                aliyun_binlog_data_pool['db'] = db['db']
                aliyun_binlog_data_pool['ds_token'] = user_info['ds_token']
                aliyun_binlog_data_pool['aliyun_binlog_end_time'] = configuration.config['aliyun_binlog_end_time']
                aliyun_binlog_data_pool['db_binlog_instance'] = configuration.config['db_binlog_instance']
                aliyun_binlog_data_pool['db_binlog_files'] = configuration.config['db_binlog_files']
                db_name = db['database']
                if 'global_db' not in aliyun_binlog_data_pool:
                    aliyun_binlog_data_pool['global_db'] = {db_name: []}
                else:
                    if db_name not in aliyun_binlog_data_pool['global_db']:
                        aliyun_binlog_data_pool['global_db'][db_name] = []
                change_force_tables = []
                for tb in db['append_table']:
                    tb_name = tb['name']
                    db_tb_time_key = get_db_tb_key(db_name, tb_name)
                    if db_tb_time_key in check_file_json and tb["db_table_exist"] and tb['ds_table_exist']:
                        has_aliyun_binlogs = True
                        aliyun_binlog_data_pool['global_db'][db_name].append(tb_name)
                        aliyun_binlog_data_pool[db_tb_time_key] = {'data': [], 'pool_type': '',
                                                                   'db_table_key_name': tb['db_table_key_name'],
                                                                   'db_table_key_position': tb['db_table_key_position'],
                                                                   'ds_token': user_info['ds_token'],
                                                                   'ds_tb_id': tb['ds_tb_id'],
                                                                   'ds_fields': tb['ds_fields'],
                                                                   'db_table_charset': tb['charset']}
                    else:
                        change_force_tables.append(tb)
                db['append_table'] = change_force_tables
                db['db_increase_type'] = 'force_full'
            # 组装local_binlogs所需参数
            elif db['db_increase_type'] == 'local_binlogs':
                local_binlog_data_pool['db_increase_type'] = 'local_binlogs'
                local_binlog_data_pool['db_binlog'] = db['db_binlog']
                local_binlog_data_pool['db'] = db['db']
                local_binlog_data_pool['ds_token'] = user_info['ds_token']
                local_binlog_data_pool['local_binlog_end_time'] = configuration.config['local_binlog_end_time']
                db_name = db['database']
                if 'global_db' not in local_binlog_data_pool:
                    local_binlog_data_pool['global_db'] = {db_name: []}
                else:
                    if db_name not in local_binlog_data_pool['global_db']:
                        local_binlog_data_pool['global_db'][db_name] = []
                change_force_tables = []
                for tb in db['append_table']:
                    tb_name = tb['name']
                    db_tb_time_key = get_db_tb_key(db_name, tb_name)
                    if db_tb_time_key in check_file_json and tb["db_table_exist"] and tb['ds_table_exist']:
                        has_local_binlogs = True
                        local_binlog_data_pool['global_db'][db_name].append(tb_name)
                        local_binlog_data_pool[db_tb_time_key] = {'data': [], 'pool_type': '',
                                                                  'db_table_key_name': tb['db_table_key_name'],
                                                                  'db_table_key_position': tb['db_table_key_position'],
                                                                  'ds_token': user_info['ds_token'],
                                                                  'ds_tb_id': tb['ds_tb_id'],
                                                                  'ds_fields': tb['ds_fields'],
                                                                  'db_table_charset': tb['charset']}
                    else:
                        change_force_tables.append(tb)
                db['append_table'] = change_force_tables
                db['db_increase_type'] = 'force_full'
        if has_aliyun_binlogs:
            try:
                process_log().info(
                    '================【aliyun_binlogs sync begin】==============')
                end_times = Increase().increase_sync(aliyun_binlog_data_pool)
                if end_times:
                    check_file_json = dict(check_file_json, **end_times)
                    configuration.dump_check_time(check_file_json)
                process_log().info(
                    '================【aliyun_binlogs sync finished】================')
            except:
                process_log().error(traceback.format_exc())
        if has_local_binlogs:
            try:
                process_log().info(
                    '================【local_binlogs sync begin】==============')
                end_times = Increase().increase_sync(local_binlog_data_pool)
                if end_times:
                    check_file_json = dict(check_file_json, **end_times)
                    configuration.dump_check_time(check_file_json)
                process_log().info(
                    '================【local_binlogs sync finished】================')
            except:
                process_log().error(traceback.format_exc())
        for db in dbs:
            if db['db_increase_type'] in ('origin', 'file'):
                OpenDSClient().where_sync(db, user_info)
                continue
            for tb in db['append_table']:
                if not tb["db_table_exist"]:
                    continue
                info = {
                    'ds_name': user_info['username'],
                    'ds_password': user_info['password'],
                    'ds_domain': user_info['domain'],
                    'ds_token': user_info['ds_token'],
                    'db_host': db['server'],
                    'db_port': db['port'],
                    'db_database': db['database'],
                    'db_password': db['pwd'],
                    'ds_id': tb['ds_id'],
                    'db_type': db['type'],
                    'db': db['db'],
                    'db_table_name': tb['name'],
                    'ds_tb_id': tb['ds_tb_id'],
                    'db_table_key_name': tb['db_table_key_name'],
                    'db_table_key_position': tb['db_table_key_position'],
                    'ds_fields': tb['ds_fields'],
                    'ds_table_exist': tb['ds_table_exist']
                }
                db_increase_type = db['db_increase_type']
                info['db_increase_type'] = db_increase_type
                db_tb_time_key = get_db_tb_key(info['db_database'], info['db_table_name'])
                info['db_table_last_time'] = check_file_json[
                    db_tb_time_key] if db_tb_time_key in check_file_json else None
                info['db_end_time'] = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ')
                if force_full:
                    if db_force_full_name == db['db_database'] and db_force_full_table_name == tb['name']:
                        info['db_table_last_time'] = None
                    else:
                        continue

                try:
                    process_log().info(
                        '================【%s.%s sync begin】==============' % (
                            info['db_database'], info['db_table_name']))
                    OpenDSClient().combin_sync(info)
                    process_log().info(
                        '================【%s.%s sync finished】================' % (
                            info['db_database'], info['db_table_name']))
                    check_file_json[db_tb_time_key] = info['db_table_last_time']
                    configuration.dump_check_time(check_file_json)
                except:
                    process_log().error(traceback.format_exc())
        if db_exist_tb_ids:
            OpenDS().tb_update(user_info['ds_token'], db_exist_tb_ids)


@atexit.register
def exit_hook():
    try:
        report()
    except:
        g.runtime['reportException'] = traceback.format_exc()
        report_error()


def report():
    g.runtime['reportAt'] = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())
    reports = need_report()
    reports.append(g.runtime)
    print reports
    # raise Exception('test')


def report_error():
    _path = runtime_path
    if not os.path.exists(_path):
        os.mkdir(_path)
    _file = os.path.join(_path, 'runtime.%s' % time.time())
    with open(_file, 'w') as rf:
        json.dump(g.runtime, rf)


def need_report():
    runtime_list = []
    for file_no in os.listdir(runtime_path):
        runtime_list.append(file_no)
    return runtime_list


@one_instance
def sync():
    try:
        process_log().info('Sync with url:%s' % OpenDS().url_prefix)
        _start = time.time()
        _sync()
        process_log().info('Sync done, cost %s sec.' % (time.time() - _start))
    except:
        process_log().error(traceback.format_exc())
        g.runtime['exitException'] = traceback.format_exc()
    finally:
        g.send_signal(g.SYNC_STATE, 'done')


if __name__ == '__main__':
    import urllib2


    def request(_param):
        _url = _param[0]
        _index = _param[1]
        print 'start task %s:%s' % (_index, _url)
        _start = time.time()
        urllib2.urlopen(_url)
        print 'task %s:%s, cost %s sec' % (_index, _url, time.time() - _start)


    urls = [
        'http://www.baidu.com',
        'http://www.soso.com',
        'http://www.qq.com',
        'http://www.soso.com',
        'http://www.python.org/',
        'http://www.python.org/getit/',
        'http://www.python.org/community/',
        'https://wiki.python.org/moin/',
        'http://planet.python.org/',
        'https://wiki.python.org/moin/LocalUserGroups',
        'http://www.python.org/psf/',
        'http://docs.python.org/devguide/',
        'http://www.python.org/community/awards/'
        # etc..
    ]
    params = [(url, index) for index, url in enumerate(urls)]
    start = time.time()
    # Make the Pool of workers
    # pool = ThreadPool(13)
    # Open the urls in their own threads
    # and return the results
    # results = pool.map(request, params)
    # close the pool and wait for the work to finish
    # pool.close()
    # pool.join()
    # for param in params:
    #     request(param[0], param[1])
    print "total :%s sec" % (time.time() - start)
