#!/usr/bin/python
# -*- coding: utf-8 -*-
import datetime
import traceback
import MySQLdb
import MySQLdb.cursors as cursors
import os
from DBUtils.PooledDB import PooledDB
import re
import time
from odbc import ODBC, SyncException, CSV_SPLIT
from util.logger import error_log, process_log, warn_log as warn
from util.opends import OpenDS
from util.synchelper import CSVSync
from util.tools import row2str, calculate_date_day


class MYSQL(ODBC):

    sys_db = ['information_schema', 'mysql', 'performance_schema', 'sys']

    pool = None

    def __init__(self, connect_info):
        ODBC.__init__(self, connect_info)
        self.fetch = 10000
        self.pool = self.create_pool(self.connect_info)
        self.cursor = self.pool.connection().cursor()

    @staticmethod
    def create_pool(connect_info):
        """
            Create a mysql pool
        Args:
            connect_info (dict):

        Returns:
            pool (PooledDB):
        """
        try:
            port = int(connect_info.get('port', 3306))
        except:
            raise Exception('Port must be integer.')
        pool = PooledDB(MySQLdb, maxconnections=1,
                        host=connect_info['server'],
                        port=port,
                        user=connect_info.get('uid', ''),
                        passwd=connect_info.get('pwd', ''),
                        db=connect_info.get('database', ''),
                        cursorclass=cursors.SSCursor,
                        charset='utf8')
        return pool

    def re_conn(self):
        re = False
        try:
            self.pool = self.create_pool(self.connect_info)
            re = True
        except:
            process_log().error(traceback.format_exc())
        finally:
            return re

    def get_cursor(self):
        conn = self.pool.connection()
        cursor = conn.cursor()
        return conn, cursor

    def fetch_many(self, sql, fetch_row):
        for i in xrange(5):
            self.re_conn()
            for res in self.fetch_many_loop(sql, fetch_row):
                if res==False:
                    break
                else:
                    yield res

    def fetch_many_loop(self, sql, fetch_row):
        conn = None
        cursor = None
        try:
            conn, cursor = self.get_cursor()
            cursor.execute(sql)
            res = cursor.fetchmany(fetch_row)
            while res:
                yield res
                res = cursor.fetchmany(fetch_row)
        except (MySQLdb.OperationalError, MySQLdb.InternalError):
            process_log().error(traceback.format_exc())
        except:
            process_log().error(traceback.format_exc())
        finally:
            self.close_conn_cursor(conn, cursor)
            yield False

    def fetch_all(self, sql):
        conn = None
        cursor = None
        res = None
        try:
            conn, cursor = self.get_cursor()
            cursor.execute(sql)
            res = cursor.fetchall()
        except (MySQLdb.OperationalError, MySQLdb.InternalError):
            self.re_conn()
        except:
            process_log().error(traceback.format_exc())
        finally:
            self.close_conn_cursor(conn, cursor)
        return res

    def close_conn_cursor(self, conn, cursor):
        if cursor:
            cursor.close()
        if conn:
            conn.close()

    def get_secure_path(self):
        sql = 'SHOW VARIABLES LIKE "secure_file_priv"'
        self.cursor.execute(sql)
        return self.cursor.fetchall()[0][1]

    def fetch_schema(self, database, table_name):
        """
            Fetch the schema of table.
        Args:
            database (str):
            table_name (str):

        Returns:
            tuple
        """
        if not table_name or table_name == []:
            error_log().info('table name is None, so do not sync any table')
            return
        sql = "SELECT `TABLE_NAME`, `COLUMN_NAME`,`DATA_TYPE`,`COLUMN_KEY` " \
              "FROM information_schema.columns " \
              "where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' " % (database, table_name)
        self.cursor.execute(sql)
        return self.cursor.fetchall()

    def field_join(self, table, field):
        raw_type = self.field_dict.get(table, {}).get(field, {}).get('raw_type', '').lower()
        if raw_type == 'bit':
            return u'bin(`%s`.`%s`+0)' % (table, field)
        elif raw_type == 'decimal':
            return u'cast(`%s`.`%s` as char)' % (table, field)
        elif raw_type in ('datetime', 'date'):
            return u'cast(`%s`.`%s` as datetime)' % (table, field)
        else:
            return u'`%s`.`%s`' % (table, field)

    def dump(self, database_name, table_name, fields_name, where, tb_id, return_cursor=False, breakpoint=-1):
        """
            Get the fetch rows cursor by giving info
        Args:
            breakpoint:
            return_cursor (bool): return the cursor.fetchmany if true else csv cached path
            tb_id (str):
            database_name (str):
            table_name (str):
            fields_name (list):
            where (str): the where condition of sync.

        Returns:
            str | cursors.SSCursor
        """

        fields_with_table = map(lambda x: self.field_join(x[0], x[1]), [(table_name, field) for field in fields_name])
        fields_sql = ', '.join(fields_with_table)
        sql = "select %s from `%s`.`%s` %s " % (fields_sql, database_name, table_name, where)
        cursor = self.pool.connection().cursor()
        process_log().info("Fetch data by sql: %s" % sql)

        if return_cursor:
            cursor.execute(sql)
            return cursor

        return self._sql_dump_csv(sql, tb_id, breakpoint)

    def text_convert(self, table, tb_id, field, field_id, delta, fmt):
        max_value = self.read_index(tb_id, field_id)
        max_value = calculate_date_day(max_value, fmt, delta)
        if not max_value:
            return ''
        where = "where `%s`.`%s` >= '%s'" % (table, field, max_value)
        return where

    def count(self, database, table, where):
        """
            Get the count of sync by giving info
        Args:
            database (str):
            table (str):
            where (str):

        Returns:
            sql_count (int):
        """
        sql = 'select count(*) from `%s`.`%s` %s' % (database, table, where)
        process_log().info('Fetch rows count by sql statement: %s' % sql)
        self.cursor.execute(sql)
        res = self.cursor.fetchall()
        return str(res[0][0]) if res else '0'

    def max_value(self, database, table, index, where):
        """
            Get the max value of index field.
        Args:
            database (str):
            table (str):
            index (str):
            where (str):

        Returns:
            max (str):
        """
        if not index:
            return ''
        sql = 'select max(%s) from `%s`.`%s` %s' % (index, database, table, where)
        process_log().info('Fetch max value by sql: %s' % sql)
        self.cursor.execute(sql)
        res = self.cursor.fetchall()
        return str(res[0][0]) if res else ''

    def sync_data(self, kwargs):
        """
        :param kwargs:
                    access_token, the token for access open.bdp.cn
                    database, the database which to sync now
                    table, the table which to sync now
                    ds_id, the data source identity for bdp
                    tb_id, the virtual table identity for bdp
                    encode, the database's coding

        """
        access_token = kwargs.get('access_token', '')
        database = kwargs.get('database', '')
        table = kwargs.get('table', {})
        tb_id = kwargs.get('tb_id', '')
        breakpoint = kwargs.get('breakpoint', -1)
        # encode = kwargs.pop('encode', 'utf8')
        fields = OpenDS().field_list(access_token, tb_id)
        fields_name = [field['name'] for field in fields]
        fields_id = [field['field_id'] for field in fields]

        index_field = table.get('index_field', {}).get('name')
        text_date = table.get('text_date', {})
        try:
            fetch_rows = int(table.get('fetch_rows', '10000'))
        except ValueError:
            warn().warning('The configure fetch_rows for %s.%s is \'%s\'(not a number), set to default 10000.'
                           % (database, table.get('name'), table.get('fetch_rows')))
            fetch_rows = 10000
        where, index_id = self.where(table, tb_id, fields)

        if not (index_field or text_date):
            OpenDS().tb_clean(access_token, tb_id)

        new_max_value = self.max_value(database, table['name'], index_field, where) if index_field else None

        try:
            path = self.dump(database, table['name'], fields_name, where, tb_id, breakpoint=breakpoint)
        except Exception, e:
            warn().warning("Try cache table failed, ignored. reason: %s" % str(e))
            return ODBC.sync_data(self, kwargs)
        csv = CSVSync(path, delimiter=CSV_SPLIT)

        data_count = self.count(database, table['name'], where)
        process_log().info('Begin sync table %s, total data count is %s, schema: %s' %
                           (table['name'], data_count, [field['name'] for field in fields]))
        buff = []
        total = breakpoint if breakpoint != -1 else 0
        end_insert = start = time.time()
        try:
            for line in csv.fetch(breakpoint=breakpoint):
                buff.append(line)
                total += 1
                if len(buff) >= fetch_rows:
                    before_insert = time.time()
                    OpenDS().tb_insert(access_token, tb_id, fields_name, buff)
                    process_log().info('Syncing %s/%s(%s)%% of table:`%s`, read:%f sync:%f' %
                                       (total, data_count, total * 100 / int(data_count), table['name'],
                                        before_insert - end_insert, time.time() - before_insert))
                    end_insert = time.time()
                    buff = []
            else:
                if len(buff) > 0:
                    before_insert = time.time()
                    OpenDS().tb_insert(access_token, tb_id, fields_name, buff)
                    process_log().info('Syncing %s/%s(%s)%% of table:`%s`, read:%f sync:%f' %
                                       (data_count, data_count, 100, table['name'],
                                        before_insert - end_insert, time.time() - before_insert))
            OpenDS().tb_commit(access_token, tb_id)
        except Exception, e:
            error_log().error(traceback.format_exc())
            if breakpoint == -1:
                raise SyncException(database, SyncException.ERROR_SYNCING, 'Sync error',
                                    table=kwargs, breakpoint=total - fetch_rows)
            else:
                raise e
        end = time.time()
        process_log().info(
            'Sync table %s cost %s seconds to write %s record(s)' % (table['name'], int(end - start) + 1, total))
        remote_count = OpenDS().tb_info(access_token, tb_id).get('data_count')
        process_log().info("Remote data count is %s, local is %s." % (remote_count, data_count))

        if index_field and new_max_value != 'None':
            field_id = 'WARNING'
            for index, field in enumerate(fields_name):
                if field == index_field:
                    field_id = fields_id[index]
                    break
            self.write_index(tb_id, field_id, new_max_value)

    def init_schema(self, info):
        ds_token = info['ds_token']
        table_name = info['db_table_name']
        database = info['db_database']
        ds_id = info['ds_id']
        ds_tb_id = info['ds_tb_id']
        if not table_name:
            process_log().info('table name is None, so do not sync any table')
            return
        table_schema = self.get_table_info(database, table_name)
        field_names = []

        uniq_key = []
        for t in table_schema:
            field_names.append(t['name'])
            if t['uniq_index']:
                uniq_key.append(t['name'])
        if ds_tb_id:
            OpenDS().tb_clean(ds_token, ds_tb_id)
            ds_fields = OpenDS().field_list(ds_token, ds_tb_id)
            ds_field_names = [f['name'] for f in ds_fields]
            for ds_field_name in ds_field_names:
                if ds_field_name not in field_names:
                    OpenDS().field_del(ds_token, ds_tb_id, ds_field_name)
            for field in table_schema:
                field_name = field['name']
                if field_name not in ds_field_names:
                    OpenDS().field_add(ds_token, ds_tb_id, field['name'], field['type'], field['uniq_index'])
        else:
            ds_tb_id = OpenDS().tb_create(ds_token, ds_id, table_name, table_schema, uniq_key)['tb_id']

        process_log().info(
            'init table %s with schema %s and uniq index is %s' % (table_name, table_schema, uniq_key))
        info['ds_tb_id'] = ds_tb_id
        info['ds_fields'] = table_schema

    def get_table_info(self, database, table_name):
        sql = "SELECT `TABLE_NAME`, `COLUMN_NAME`,`DATA_TYPE`,`COLUMN_KEY` " \
              "FROM information_schema.columns " \
              "where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s'" % (database, table_name)
        self.cursor.execute(sql)
        columns = self.cursor.fetchall()
        if columns == [] or not columns:
            process_log().info('no columns read from database for table %s' % table_name)
            return []
        table_schema = []
        for col in columns:
            col_name = col[1]
            data_type = self.trans_field_type(col[2])
            table_schema.append({
                'name': col_name.lower(),
                'type': data_type,
                'uniq_index': 1 if col[3] == 'PRI' else 0
            })
        return table_schema

    def full_sync(self, info):
        ds_token = info['ds_token']
        ds_tb_id = info['ds_tb_id']
        OpenDS().tb_clean(ds_token, ds_tb_id)
        start_time = datetime.datetime.now()
        ds_fields = info['ds_fields']
        field_names = [field['name'] for field in ds_fields]

        ds_tb_id = info['ds_tb_id']

        for temp_res in self.get_all_rows(info['db_database'], info['db_table_name']):
            OpenDS().tb_insert(ds_token, ds_tb_id, field_names, temp_res)
        OpenDS().tb_commit(ds_token, ds_tb_id)
        end_time = datetime.datetime.now()
        info['db_table_last_time'] = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
        process_log().info(
            'full_sync cost %s seconds' % (end_time - start_time).seconds)

    def get_key_position(self, database, table):
        key_sql = "select ordinal_position, column_name " \
                  "from information_schema.columns " \
                  "where table_schema = '%s' and table_name = '%s' and column_key='PRI'" % (database, table)
        self.cursor.execute(key_sql)
        key_res = self.cursor.fetchall()
        if key_res:
            key_position = int(key_res[0][0]) - 1
            key_name = key_res[0][1]
            return [key_name, key_position]
        return []

    def fetch_databases(self):
        sql = "SELECT DISTINCT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA"
        self.cursor.execute(sql)
        return self.cursor.fetchall()

    def fetch_tables(self, database, view=True):
        sql = '''SELECT
                    DISTINCT TABLE_NAME
                FROM
                    information_schema.`TABLES`
                WHERE
                    TABLE_SCHEMA = "%s" ''' % database
        if not view:
            sql = '''SELECT
                        DISTINCT TABLE_NAME
                    FROM
                        information_schema.`TABLES`
                    WHERE
                        TABLE_SCHEMA = "%s" AND TABLE_TYPE = 'BASE TABLE' ''' % database
        self.cursor.execute(sql)
        return self.cursor.fetchall()

    def fetch_fields(self, database, table):
        sql = "select distinct COLUMN_NAME,DATA_TYPE from information_schema.columns " \
              "where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s'" % (database, table)
        self.cursor.execute(sql)
        return self.cursor.fetchall()

    def fetch_record_count(self, database, table):
        sql = "select count(1) from `%s`.`%s`" % (database, table)
        self.cursor.execute(sql)
        return self.cursor.fetchall()

    def has_table(self, database, table):
        key_sql = "select * from information_schema.columns where table_schema = '%s' and table_name = '%s'" % (
            database, table)
        self.cursor.execute(key_sql)
        key_res = self.cursor.fetchall()
        if key_res:
            return True
        return False

    def get_table_status(self, database):
        sql = 'show table status  from %s' % database
        self.cursor.execute(sql)
        res = self.cursor.fetchall()
        return res

    def get_table_charset(self, database, table):
        sql = 'show create table %s.%s' % (database, table)
        self.cursor.execute(sql)
        res = self.cursor.fetchall()
        if len(res) == 1:
            res = res[0]
            if len(res) == 2:
                res = res[1].lower().replace('\n', ' ')
                pa = re.match(r'.*charset=(?P<charset>\S+).*', res)
                if pa:
                    return pa.group('charset')
        return 'utf8'

    def get_all_rows(self, database, table):
        name_types = self.fetch_fields(database, table)
        fields = []
        for name_type in name_types:
            name, f_type = name_type
            if f_type.find('date') != -1 or f_type.find('time') != -1 or f_type.find('year') != -1:
                fields.append("DATE_FORMAT(%s, '%%Y-%%m-%%d %%H:%%i:%%S')" % name)
            else:
                fields.append(name)
        fields = ','.join(fields)
        sql = "select %s from %s.%s" % (fields, database, table)
        self.cursor.execute(sql)
        res = self.cursor.fetchmany(self.fetch)
        while res:
            process_log().info('insert %s rows' % len(res))
            temp_res = []
            for row in res:
                temp_res.append(row2str(row))
            yield temp_res
            res = self.cursor.fetchmany(self.fetch)
