#!/usr/bin/python
# -*- coding: utf-8 -*-
import csv
import string
import os
import time
import abc
import sys
from decimal import Decimal
from util.logger import error_log, process_log, warn_log as warn
from util.opends import OpenDS
from util.synchelper import CursorFetcher, quote


class SyncException(Exception):
    ERROR_SYNCING = 0
    ERROR_CONNECT = 1
    """"""
    def __init__(self, database, error, message, table='', breakpoint=0):
        self.database = database
        self.error = error
        self.table = table
        self.breakpoint = breakpoint
        self.message = message

    def __str__(self):
        return u"Exception when syncing [%s], reason:[%s]" % (self.database, self.message)

    def __repr__(self):
        return self.__str__()


CSV_SPLIT = ','


class ODBC:
    type_mapper = {

        # for mysql, sql server, oracle
        "datetime": 'date',
        "datetime2": 'date',
        "date": 'date',
        "time": 'date',
        "timestamp": 'date',
        "smalldatetime": "date",

        "int": 'number',
        "tinyint": 'number',
        "smallint": 'number',
        "bigint": 'number',
        "float": 'number',
        "double": 'number',
        "decimal": 'number',
        "number": 'number',
        "long": "number",
        "mediumint": "number",
        "numeric": "number",
        "real": "number",
        "money": "number",
        "binary_float": "number",
        "binary_double": "number",

        'char': 'string',
        'varchar': 'string',
        'longtext': 'string',
        "tinytext": "string",
        "mediumtext": "string",
        'nchar': 'string',
        "nvchar2": "string",
        "varchar2": "string",

        # for firebird database
        "7": "number",
        "8": "number",
        "9": "number",
        "10": "number",
        "11": "number",
        "12": "date",
        "13": "date",
        "14": "string",
        "16": "number",
        "27": "number",
        "35": "date",
        "37": "string",
        "40": "string",

        # for ms access database
        "counter": "number",
        "integer": "number",
        "longchar": "string",
        "currency": "number",
        "guid": "string"
    }
    sys_db = []
    field_dict = {}
    _cache_path = 'cache'

    def __init__(self, connect_info):
        self.connect_info = connect_info
        self.cursor = None

    @abc.abstractmethod
    def fetch_schema(self, database, table_name):
        """
        get table schema, overwrite by sub class.
        :rtype : tuple
        :param database: the name of database
        :param table_name: the name of table
        :return: a tuple (table, field, type, PRI or '')
        """
        pass

    def modify_schema(self, access_token, database, table, ds_id, exist_tables, modify=True):

        """
            Modify the remote table schema by user conf for the table
        Args:
            access_token: the access_token for open.bdp.com
            database: the database name which contain the sync table
            table: the table structure for sync table, contain the table name, field selection and sync condition
            ds_id: the database id for bdp platform
            exist_tables: the table that has already exist in bdp platform
            modify: modify the schema if modify else not

        Returns: nothing

        """
        table_name = table.get('name', '')
        mask = table.get('mask', [])
        select = table.get('select', [])
        text_date = table.get('text_date', {})
        columns = self.fetch_schema(database, table_name)
        if not columns:
            error_log().error('no columns read from database for table %s' % table_name)
            return False
        columns = filter(lambda x: x[1] in select if select else x[1] not in mask, columns)
        if not columns:
            error_log().error('no columns read from database for table %s' % table_name)
            return
        table_schema = []
        uniq_key = []
        self.field_dict[table_name] = {}
        for col in columns:
            col_name = col[1]
            data_type = 'date' if col[1] == text_date.get('field', None) else self.trans_field_type(col[2])
            table_schema.append({
                'name': col_name,
                'type': data_type,
            })
            self.field_dict[table_name][col_name] = {
                'type': data_type,
                'raw_type': col[2],
                'uniq_index': 1 if col[3] == 'PRI' else 0
            }

            if col[3] == 'PRI':
                uniq_key.append(col_name)
        if (not exist_tables) or (table_name not in exist_tables):
            process_log().info(
                'create table %s with schema %s and uniq index is %s' % (table_name, table_schema, uniq_key))
            OpenDS().tb_create(access_token, ds_id, table_name, table_schema, uniq_key)
        elif modify:
            old_fields = OpenDS().field_list(access_token, exist_tables[table_name])
            for old_field in old_fields:
                if 'aggregator' in old_field:
                    continue
                if old_field['name'] not in self.field_dict[table_name]:
                    process_log().info('delete field %s from table %s' % (old_field['name'], table_name))
                    try:
                        OpenDS().field_del(access_token, exist_tables[table_name], old_field['name'])
                    except Exception, e:
                        warn().warning('delete failed, skip. reason: %s' % str(e))
                    continue
            for f_name, field in self.field_dict[table_name].items():
                if f_name not in [fd['name'] for fd in old_fields]:
                    process_log().info('Add field %s to table %s' % (f_name, table_name))
                    OpenDS().field_add(access_token, exist_tables[table_name], f_name, field['type'], field['uniq_index'])
        else:
            pass
        return True

    @abc.abstractmethod
    def field_join(self, table, field):
        pass

    def fetch_data(self, access_token=None, database=None, table=None, tb_id=None):

        """
            Fetch the data by remote database schema from bdp platform
        Args:
            access_token: the access_token
            database: database name
            table: table name
            tb_id: the table's id in bdp platform
        """
        if not table:
            table = {}

        index_field = table.get('index_field', {})

        # filter fields name which not in local database.
        local_fields_name = [field['name'] for field in self.get_all_fields(database, table['name'])]

        remote_fields = OpenDS().field_list(access_token, tb_id)

        fetch_fields = filter(lambda x: x.get('name') in local_fields_name, remote_fields)

        fetch_fields_name = [field.get('name') for field in fetch_fields]
        fetch_fields_type = [field.get('type') for field in fetch_fields]

        where, index_id = self.where(table, tb_id, remote_fields)

        date_index = [index for index, field in enumerate(remote_fields) if field['type'] == 'date']
        # todo change logic
        if not index_field and not table.get('text_date'):
            OpenDS().tb_clean(access_token, tb_id)

        count = self.count(database, table['name'], where)

        index = index_field.get('name') or table.get('text_date', {}).get('field')
        max_value = self.max_value(database, table['name'], index, where) if index else 0

        cursor = self.dump(database, table['name'], fetch_fields_name, where, tb_id, return_cursor=True)
        result = {
            'remote_fields_name': fetch_fields_name,
            'remote_fields_type': fetch_fields_type,
            'date_index': date_index,
            'data_count': count,
            'new_max_value': max_value,
            'cursor': cursor,
            'index_id': index_id
        }
        return result

    def fetch_many(self, fetch_rows):
        pass

    def create_cursor(self, connect_info):
        pass

    def sync_data(self, kwargs):
        """
        :param kwargs:
                    access_token, the token for access open.bdp.cn
                    database, the database which to sync now
                    table, the table which to sync now
                    ds_id, the data source identity for bdp
                    tb_id, the virtual table identity for bdp
                    encode, the database's coding

        """
        access_token = kwargs.get('access_token')
        database = kwargs.get('database')
        table = kwargs.get('table')
        tb_id = kwargs.get('tb_id')
        try:
            fetch_rows = int(table.get('fetch_rows', '10000'))
        except ValueError:
            warn().warning('The configure fetch_rows for %s.%s is \'%s\'(not a number), set to default 10000.'
                           % (database, table.get('name'), table.get('fetch_rows')))
            fetch_rows = 10000
        # encode = kwargs.pop('encode', 'utf8')

        # overwrite in sub class
        res = self.fetch_data(access_token, database, table, tb_id)
        remote_fields_name = res['remote_fields_name']
        remote_fields_type = res['remote_fields_type']
        data_count = res['data_count']
        new_max_value = res['new_max_value']
        fetcher = CursorFetcher(res['cursor'], row=fetch_rows)
        index_id = res['index_id']

        process_log().info('Begin sync table %s, total data count is %s, schema: %s' %
                           (table['name'], data_count, remote_fields_name))
        buf = []
        total = 0
        _time_start_fetch = _time_start_sync = time.time()
        res = fetcher.fetch()
        scanning_count = 1
        while res:
            _time_start_process = time.time()
            for row in res:
                row_processed = []
                for index, x in enumerate(row):
                    if x is not None:
                        if type(x) is Decimal:
                            x = str(x)
                        if remote_fields_type[index] == 'number':
                            if str(x).find('e') > 0 or str(x).find('E') > 0:
                                try:
                                    x = '%.16f' % x
                                    while x.endswith('0') or x.endswith('.'):
                                        x = x[:-1]
                                except Exception:
                                    pass
                        elif remote_fields_type[index] == 'date':
                            x = str(x).split('.')[0]
                        else:
                            try:
                                if x is not None:
                                    # if os.name == 'nt' and str(self.__class__) == 'odbc_p.mssql.MSSQL':
                                        # x = x.encode('latin1').decode('gbk')
                                    x = u'%s' % x
                            except Exception:
                                x = None
                    row_processed.append(x)
                buf.append(row_processed)
                if len(buf) >= fetch_rows:
                    _time_start_upload = time.time()
                    OpenDS().tb_insert(access_token, tb_id, remote_fields_name, buf)
                    _time_end_upload = time.time()
                    total += fetch_rows
                    log = {
                        'total': total,
                        'count': data_count,
                        'read': _time_start_process - _time_start_fetch,
                        'process': _time_start_upload - _time_start_process,
                        'upload': _time_end_upload - _time_start_upload
                    }
                    process_log().info('Synced %(total)s/%(count)s, read: %(read)f, '
                                       'process: %(process)f, upload: %(upload)f'
                                       % log)
                    buf = []
            scanning_count += 1
            _time_start_fetch = time.time()
            res = fetcher.fetch()
        if len(buf) > 0:
            process_log().info('Syncing %s of %s ...' % (data_count, data_count))
            OpenDS().tb_insert(access_token, tb_id, remote_fields_name, buf)
        OpenDS().tb_commit(access_token, tb_id)
        total += len(buf)
        end = time.time()
        process_log().info(
            'Sync table %s cost %s seconds to write %s record(s)' % (table['name'], int(end - _time_start_sync) + 1, total))
        remote_count = OpenDS().tb_info(access_token, tb_id).get('data_count')
        process_log().info("Remote data count is %s, local is %s." % (remote_count, data_count))

        if new_max_value != 'None' and index_id:
            self.write_index(tb_id, index_id, new_max_value)
        return

    @abc.abstractmethod
    def dump(self, database_name, table_name, fields_name, where, tb_id, return_cursor=False, breakpoint=-1):
        """
            Get the fetch rows cursor by giving info
        Args:
            breakpoint:
            return_cursor (bool): return the cursor.fetchmany if true else csv cached path
            tb_id (str):
            database_name (str):
            table_name (str):
            fields_name (list):
            where (str): the where condition of sync.

        Returns:
            str | cursors.SSCursor
        """
        pass

    def _sql_dump_csv(self, sql, tb_id, breakpoint):
        """
            Dump a sql query result to csv file.
        Args:
            sql (str):
            tb_id (str):
            breakpoint (int):
        """
        if not self.cursor:
            raise Exception('Cursor is not exist')

        if not os.path.exists(self._cache_path):
            os.mkdir(self._cache_path)
        cache_path = os.path.abspath(os.path.join(self._cache_path, tb_id)).replace('\\', '\\\\')

        if breakpoint != -1 and os.path.exists(cache_path):
            return cache_path

        if os.path.exists(cache_path):
            os.remove(cache_path)
        start = time.time()
        self.cursor.execute(sql)
        csv.field_size_limit(sys.maxsize)
        with open(cache_path, 'wb') as cache:
            _csv = csv.writer(cache, quoting=csv.QUOTE_MINIMAL)
            rows = self.cursor.fetchmany(10000)
            while rows:
                _csv.writerows(map(quote, rows))
                rows = self.cursor.fetchmany(10000)
        process_log().info('Cached to `%s`, cost %f second.' % (cache_path, time.time() - start))
        return cache_path

    @abc.abstractmethod
    def count(self, database, table, where):
        """
            Get the count of sync by giving info
        Args:
            database (str):
            table (str):
            where (str):

        Returns:
            sql_count (int):
        """
        pass

    @abc.abstractmethod
    def max_value(self, database, table, index, where):
        """
            Get the max value of index field.
        Args:
            database (str):
            table (str):
            index (str):
            where (str):

        Returns:
            max (str):
        """
        pass

    def where(self, table, tb_id, fields):
        """
            Generate where clause for table
        Args:
            table (dict):
            tb_id (str):
            fields (list):

        Returns:
            str
        """
        if not table and not table.get('name'):
            raise Exception('Table is None, skipped.')

        index_field = table.get('index_field', {})
        where = table.get('where', '')

        text_date = table.get('text_date', {})

        where_clause = ''

        field = index_field.get('name') if index_field.get('name') else text_date.get('field')
        if not field:
            return where_clause, ''

        # get the index field's id
        filter_by_name = filter(lambda x: x.get('name') == field, fields)
        field_id = filter_by_name.pop().get('field_id', 'WARNING') if len(filter_by_name) > 0 else 'WARNING'

        if where:
            # deal with $VALUE$ where condition
            where_clause = self.deal_where(tb_id, field_id, where)
        elif text_date:
            field = text_date.get('field')
            delta = text_date.get('value')
            fmt = text_date.get('format')
            where_clause = self.text_convert(table.get('name'), tb_id, field, field_id, delta, fmt)

        return where_clause, field_id

    @abc.abstractmethod
    def text_convert(self, table, tb_id, field, field_id, delta, fmt):
        pass

    def deal_where(self, tb_id, field_id, where):
        """

        Args:
            field_id (str):
            tb_id (str): the value tb_id
            where (str):

        Returns:
            where (str): where sql statement
        """

        index = where.lower().find('$value$')
        if index < 0:
            return where

        data = self.read_index(tb_id, field_id)

        return where.replace('$VALUE$', data) if data != '' else ''

    @staticmethod
    def read_index(tb_id, field_id):
        where_dir = 'where'
        where_file_old = os.path.join(where_dir, tb_id)
        where_file = os.path.join(where_dir, '%s_%s' % (tb_id, field_id))
        data = ''
        if os.path.exists(where_file_old):
            with open(where_file_old, 'r') as value:
                data = value.readline()

            os.remove(where_file_old)
            process_log().info('Read $VALUE$ from [%s], value:[%s]' % (tb_id, data))

        elif os.path.exists(where_file):
            with open(where_file, 'r') as value:
                data = value.readline()
            process_log().info('Read $VALUE$ from [%s_%s], value:[%s]' % (tb_id, field_id, data))
        return data

    @staticmethod
    def write_index(tb_id, field_id, new_data):
        if not new_data:
            return
        new_data = str(new_data).split('.')[0]
        if not os.path.exists('%s/where' % os.getcwd()):
            os.mkdir('%s/where' % os.getcwd())
        if new_data != 'None':
            with open('./where/%s_%s' % (tb_id, field_id), 'w') as fp:
                fp.write(str(new_data))
                process_log().info('Write $VALUE$ to [%s_%s], value:[%s]' % (tb_id, field_id, new_data))
        return

    def trans_field_type(self, col_type):
        col_type = string.lower(str(col_type)).split('(')[0]
        return self.type_mapper.get(col_type, 'string')

    @abc.abstractmethod
    def fetch_databases(self):
        """

        :rtype : list
        """
        pass

    def get_all_databases(self):
        columns = self.fetch_databases()
        res = [{'name': r[0], 'selected': 0} for r in columns if r[0] not in self.sys_db] if columns else []
        process_log().info('Got %s databases.' % len(res))
        return res

    @abc.abstractmethod
    def fetch_tables(self, database, view=True):
        """

        :rtype : list.
            ----example: [table_name, table_name, ...]

        Args:
            view:
            database:
        """
        pass

    def get_all_tables(self, database, view=True):
        try:
            columns = self.fetch_tables(database, view)
        except Exception, e:
            process_log().error(e)
            return []
        res = [
            {
                'name': u'%s' % r[0],
                'selected': 0,
                'increased': {
                    'cond': '',
                    'value': ''
                },
                'text_date': {
                    'format': '',
                    'value': ''
                },
                'increase_field': ''
            }
            for r in columns] if columns else []
        process_log().info('Got %s tables from database %s.' % (len(res), database))
        return res

    @abc.abstractmethod
    def fetch_fields(self, database, table):
        """
            Fetch field info by a sql query
        Args:
            database (str):
            table (str):
        Returns:
            fields (tuple):
        """
        pass

    def get_all_fields(self, database, table):
        """
            Get all field info of table in database
        Args:
            database (str):
            table (str):

        Returns:
            res (list):
        """
        try:
            columns = self.fetch_fields(database, table)
        except Exception, e:
            process_log().error(e)
            return []
        res = [{'name': u'%s' % r[0], 'type': self.trans_field_type(r[1]), 'selected': 0} for r in columns]
        # processing_logger().info('Got %s fields from %s.%s' % (len(res), database, table))
        return res

    @abc.abstractmethod
    def fetch_record_count(self, database, table):
        pass

    @staticmethod
    def get_record_count(database, table):
        try:
            count = '0'
        except Exception:
            error_log().error('Error when fetch table record form `%s`.`%s`' % (database, table))
            count = '-1'
        return count

    def get_all_db_info(self, database='', view=True):
        """
            Get all db info in give database, including table and field info.
        Args:
            database (str): the database name
            view (bool): get view too if True

        Returns:
            dbs (dict): db info
        """
        if not database:
            dbs = {}
            for db in self.get_all_databases():
                try:
                    tables = self.get_all_tables(db['name'], view)
                except Exception, e:
                    warn().warning(u'Try to fetch tables in database %s failed, skipped. reason:%s' % (db['name'], e))
                    err_name = u"%s (读取表信息失败)" % db['name']
                    dbs[err_name] = {}
                    continue
                dbs[db['name']] = {}
                for tb in tables:
                    tb['fields'] = self.get_all_fields(db['name'], tb['name'])
                    tb['count'] = self.get_record_count(db['name'], tb['name'])
                    dbs[db['name']][tb['name']] = tb
            return dbs
        else:
            dbs = {
                database: {}
            }
            for tb in self.get_all_tables(database, view):
                tb['fields'] = self.get_all_fields(database, tb['name'])
                tb['count'] = self.get_record_count(database, tb['name'])
                dbs[database][tb['name']] = tb
            return dbs

if __name__ == '__main__':
    pass
