from scrapy.utils.project import get_project_settings

from copy import deepcopy
from amac_spider.utils.log import make_logger
from amac_spider.utils.mysql_connection_pool.server import DBUtilSyncMysqlConnectionPoolMaker


MYSQL_CONFIG = {
    'host': get_project_settings()['MYSQL_HOST'],
    'port': get_project_settings()['MYSQL_PORT'],
    'user': get_project_settings()['MYSQL_USER'],
    'password': get_project_settings()['MYSQL_PASSWORD'],
    'db': get_project_settings()['MYSQL_DB'],
    'charset': get_project_settings()['MYSQL_CHARSET'],
}


class BasePipeline(object):

    def __init__(self, crawler):
        self.crawler = crawler
        self._datas = {}
        self._meta_info = {}

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def open_spider(self, spider):
        '''
        Spider 启动时执行的处理, 一般为连接数据库
        :param spider:
        :return:
        '''
        raise NotImplementedError()

    def close_spider(self, spider):
        '''
        Spider 关闭是执行的处理、一般为释放连接，收集统计数据
        :param spider:
        :return:
        '''
        raise NotImplementedError()

    def add_datas(self, data, target_name, uniques):
        '''
        添加数据缓存，用于批量发送
        :param data: 需要缓存的数据
        :param target_name: 需要写入的标识
        :param uniques: 写入的唯一标识字段（可选）
        :return:
        '''

        data = deepcopy(data)
        if not self._datas.get(target_name, None):
            self._datas[target_name] = {
                'data': [],         # 数据缓存
                'receive': 0,       # 累计存储数量
                'send': 0           # 累计发送数量
            }
        # 收集数据
        if isinstance(data, list):
            self._datas[target_name]['data'].extends(data)
            self._datas[target_name]['receive'] = self._datas[target_name]['receive'] + len(data)
        else:
            self._datas[target_name]['data'].append(data)
            self._datas[target_name]['receive'] = self._datas[target_name]['receive'] + 1

        # 收集存储元信息
        if not self._meta_info.get(target_name, None):
            self._meta_info[target_name] = {
                'uniques': []
            }
        if isinstance(uniques, list):
            self._meta_info[target_name]['uniques'] = uniques
        else:
            self._meta_info[target_name]['uniques'] = [uniques]

    def get_uniques(self, target_name):
        meta_info = self._meta_info.get(target_name, None)
        if meta_info:
            return meta_info['uniques']

    def send_datas(self, target_name, max_size):
        '''
        批量发送
        :param target_name: 目标名称
        :param uniques: 唯一标识
        :param max_size:
        :return:
        '''
        if not self._datas.get(target_name, None):
            return

        if max_size > 0 and len(self._datas[target_name]['data']) < max_size:
            return
        success = self.do_sends(self._datas[target_name]['data'], target_name)
        self._datas[target_name]['send'] = self._datas[target_name]['send'] + success
        # del self._datas[target_name]['data'][:]
        self._datas[target_name]['data'] = []

    def do_sends(self, datas, target_name):
        '''
        实际发送函数，需要应用端按照存储方式重写，需要支持批量发送
        :param datas:
        :param target_name:
        :param uniques:
        :return:
        '''
        return 0

    def send_all_datas(self):
        for k,v in self._datas.items():
            self.send_datas(k, -1)

    def print_datas_info(self, spider_name, target_name):
        if not self._datas.__contains__(target_name):
            print('spider {} current has not target_name:{}'.format(spider_name, target_name))
            return
        data = self._datas[target_name]
        print('spider {} target_name:{} current has datas:{}, receive:{}, send:{}'.format(spider_name, target_name,len(data['datas']), data['receive'], data['send']))


class BaseMysqlPipeline(BasePipeline):
    def __init__(self, crawler):
        super().__init__(crawler)
        self.pool_type = self.crawler.settings.getint('MYSQL_DB_POOL_TYPE')

    def open_spider(self, spider):
        spider.my_logger = make_logger(self.crawler.spidercls.name)
        self.handler = DBUtilSyncMysqlConnectionPoolMaker(**MYSQL_CONFIG)()

    def do_sends(self, datas, target_name):
        uniques = self.get_uniques(target_name)
        self.handler.insert(item=datas, unique=uniques, table_name=target_name)
        return len(datas)

    def close_spider(self, spider):
        # 关闭Mysql链接池
        self.handler.close()