# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
from scrapy.utils.project import get_project_settings

from amac_spider.utils.log import make_logger
from amac_spider.utils.handler import get_unique, get_table_name, get_uniques
from amac_spider.utils.mysql_connection_pool.server import DBUtilSyncMysqlConnectionPoolMaker, TwistedAsyncMysqlConnectionPoolMaker
from amac_spider.utils.base_pipeline import BaseMysqlPipeline


MYSQL_CONFIG = {
    'host': get_project_settings()['MYSQL_HOST'],
    'port': get_project_settings()['MYSQL_PORT'],
    'user': get_project_settings()['MYSQL_USER'],
    'password': get_project_settings()['MYSQL_PASSWORD'],
    'db': get_project_settings()['MYSQL_DB'],
    'charset': get_project_settings()['MYSQL_CHARSET'],
}


class AmacspiderInsertKafkaPipeline(object):

    def __init__(self, crawler):
        self.crawler = crawler

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def process_item(self, item, spider):
        # 将数据放入kafka
        return item

    def close_spider(self, spider):
        # 关闭kafka链接
        pass


class AmacspiderInsertMysqlPipeline(object):

    def __init__(self, crawler):
        self.crawler = crawler

    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler)

    def process_item(self, item, spider):
        # 将数据放入Mysql
        unique = get_unique(spider.name)
        if not isinstance(unique, dict):
            table_name = get_table_name(spider.name)
        else:
            # 如果一个爬虫的item有三个或以上，那么在settings中应该有一个包含唯一索引的字典，对于这
            # 种爬虫，在每一个item中必须有一个type字段，表示这是什么类型的item，以便表的命名
            page_type = item.get('type', None)
            unique = unique.get(page_type, None)
            table_name = get_table_name(spider.name, page_type)
        self.handler.upsert(item=item, unique=unique, table_name=table_name)

    def open_spider(self, spider):
        spider.my_logger = make_logger(self.crawler.spidercls.name)
        self.handler = TwistedAsyncMysqlConnectionPoolMaker(**MYSQL_CONFIG)()

    def close_spider(self, spider):
        self.handler.close()


class TestPipeline(BaseMysqlPipeline):

    def process_item(self, item, spider):
        # 将数据放入Mysql
        page_type = item.get('type', 'detail')
        table_name = get_table_name(spider.name, page_type)
        uniques = get_uniques(spider.name, page_type)
        # print('spider_name:{} table_name:{} page_type:{} uniques:{}'.format(spider.name, table_name, page_type, str(uniques)))
        self.add_datas(item, table_name, uniques)
        self.send_datas(table_name, 20)



