# -*- coding: utf-8 -*-
import copy
import json
import threading
import traceback

from bdp.i_crawler.i_data_saver.ttypes import DataSaverRsp

import common
from common import log, TimecostHelper
from conf import config as conf
from data_save_task import DataSaveTask
from merge_manager import MergeManager
from statistics.statistics_writer import StatisticsWriter
from topic_manager import topic_manager
from validate_manager import ValidateManager

from pymongo.errors import DuplicateKeyError

class DataFetcher(object):
    def __init__(self, data_saver):
        self._topic_manager = data_saver.topic_manager

    def fetch(self, task):
        if not (isinstance(task.record_id, str) or isinstance(task.record_id, unicode)):
            raise Exception('DataFetcher.fetch: record_id[%s] is not string or unicode!!' % task.record_id)

        collection = self._topic_manager.topic_dict[task.topic_id]['collection']
        log.debug('DataFetcher.fetch start: topic_id[%s], db[%s], collection[%s], record_id[%s]' %
            (task.topic_id, collection.database.name, collection.name, task.record_id))
        doc = collection.find_one(
            filter = {
                common.FIELDNAME_RECORD_ID: task.record_id
            }
        )
        task.old_entity_data = doc
        # log.debug('DataFetcher.fetch finished: topic_id[%s], db[%s], collection[%s], record_id[%s], doc[%s]' % 
        #     (task.topic_id, collection.database.name, collection.name, task.record_id, repr(doc)))
        log.debug('DataFetcher.fetch finished: topic_id[%s], db[%s], collection[%s], record_id[%s]' % 
            (task.topic_id, collection.database.name, collection.name, task.record_id))

class DataUpdater(object):
    def __init__(self, data_saver):
        self._topic_manager = data_saver.topic_manager

    def update(self, task):
        collection = self._topic_manager.topic_dict[task.topic_id]['collection']
        # log.debug('DataUpdater.update start: topic_id[%s], db[%s], collection[%s], new_document[%s]' 
        #     % (task.topic_id, collection.database.name, collection.name, task.current_entity_data)
        # )
        log.debug('DataUpdater.update start: topic_id[%s], db[%s], collection[%s]' 
            % (task.topic_id, collection.database.name, collection.name)
        )

        # Workaround for mongo bug #SERVER-14322
        # Do client-side retries on DuplicateKeyError
        retry_count = 1000 # Should be more than enough
        succ = False
        while not succ and retry_count > 0:
            retry_count -= 1
            try:
                update_result = collection.replace_one(
                    filter = {
                        common.FIELDNAME_RECORD_ID: task.record_id
                    },
                    upsert = True,
                    replacement = task.current_entity_data
                )
            except DuplicateKeyError, e:
                # if upsert fail with DuplicateKeyError, just retry.
                # if other exceptions occur, no retry
                pass
            else:
                succ = True

        log.debug('DataUpdater.update finished: topic_id[%s], db[%s], collection[%s], record_id[%s], update_result[%s]' 
            % (task.topic_id, collection.database.name, collection.name, task.record_id, update_result.raw_result)
        )
        return update_result

class DataSaver(object):
    lock = threading.Lock()
    obj_pool = []

    @classmethod
    def reload_all(cls, topic_id = None):
        for obj in cls.obj_pool:
            obj._validate_manager.reload(topic_id)

    def reload(self,topic_id):
        self.topic_manager.reload(topic_id)
        DataSaver.reload_all(topic_id)
        return self.get_schema(topic_id)

    def __init__(self):
        self.topic_manager      = topic_manager     # TopicManager(self)
        self._fetcher           = DataFetcher(self)
        self._merge_manager     = MergeManager(self)
        self._validate_manager  = ValidateManager(self)
        self._updater           = DataUpdater(self)
        self._statistics_writer = StatisticsWriter(self)
        # self._kafka_server      = Pykafka(conf.kafka)
        #添加ValidateManager进对象池
        with self.lock:
            DataSaver.obj_pool.append(self)

    def check_data(self, entity_extractor_info):
        return self.save_data(entity_extractor_info, False)

    def save_data(self, entity_extractor_info, do_save = True):
        log.info('processing data, do_save[%s]' % do_save)
        th_total = TimecostHelper()
        th = TimecostHelper()
        task = None
        try:
            if not self.topic_manager.topic_dict.has_key(entity_extractor_info.topic_id):
                raise Exception('topic_id[%s] is unknown !!!' % (entity_extractor_info.topic_id))

            # TODO: fetch和update任务可以以bulk的方式进行, 减少与mongo的往返次数, 提高吞吐量. 架构可能要大改
            task = DataSaveTask(entity_extractor_info)
            log.debug('time_cost_ms[%s] for DataSaveTask create.' % (1000 * th.get_timecost_and_restart()))

            self._fetcher.fetch(task)
            log.debug('time_cost_ms[%s] for fetch.' % (1000 * th.get_timecost_and_restart()))

            self._merge_manager.merge(task)
            log.debug('time_cost_ms[%s] for merge.' % (1000 * th.get_timecost_and_restart()))

            self._validate_manager.validate(task)
            log.debug('time_cost_ms[%s] for validate.' % (1000 * th.get_timecost_and_restart()))

            if do_save:
                update_result = self._updater.update(task)
                if False and task.topic_id==conf.kafka['topic_id']:
                    try:
                        record_to_send = copy.copy(task.current_entity_data)
                        if update_result.upserted_id is not None:
                            record_to_send['_id'] = str(update_result.upserted_id)
                        else:
                            record_to_send['_id'] = str(record_to_send.get('_id', ''))
                        msg_raw = json.dumps(record_to_send)
                        self._kafka_server.produce_message(msg_raw)
                        log.info('kafa\tproduce_message\tsuccess')
                    except Exception as e:
                        log.error('kafka\tproduce_message\tfail')
                log.debug('time_cost_ms[%s] for update.' % (1000 * th.get_timecost_and_restart()))
        except Exception, e:
            error_info = 'data process failed!!! total time_cost_ms[%s]. traceback[%s], entity_extractor_info[%s]' % \
                (1000 * th_total.get_timecost(), traceback.format_exc(), repr(entity_extractor_info))
            log.error(error_info)

            # 失败统计（如果do_save为True）
            if do_save and (not task is None):
                th.restart()
                self._statistics_writer.log_failure(task)
                log.debug('time_cost_ms[%s] for statistics.' % (1000 * th.get_timecost_and_restart()))

            return DataSaverRsp(
                    status = 1,
                    message = error_info,
                    data = None
                )
        else:
            success_info = 'data successfully processed, total time_cost_ms[%s]!' % (1000 * th_total.get_timecost())
            log.info(success_info);

            # 成功统计（如果do_save为True）
            if do_save and (not task is None):
                th.restart()
                if update_result.matched_count > 0:
                    log.debug('UPDATE an existing document !')
                    self._statistics_writer.log_success_update(task)
                if not update_result.upserted_id is None:
                    log.debug('INSERT a new document !')
                    self._statistics_writer.log_success_insert(task)
                log.debug('time_cost_ms[%s] for statistics.' % (1000 * th.get_timecost_and_restart()))

            return DataSaverRsp(
                    status = 0,
                    message = success_info,
                    data = None
                )

    def get_schema(self, topic_id):
        try:
            schema_data = self.topic_manager.topic_dict[topic_id]['schema']
            return DataSaverRsp(
                    status = 0,
                    message = 'OK',
                    data = schema_data.encode('utf8')
                )
        except Exception, e:
            return DataSaverRsp(
                    status = 1,
                    message = repr(e),
                    data = None
                )

datasaver=DataSaver()   # DataSaver单例
