# -*- coding: utf-8 -*-
import logging
from traceback import format_exc
from pymongo import UpdateOne
from scrapy import signals
from zc_core.client.mongo_client import Mongo

logger = logging.getLogger(__name__)


class FixDataExtension(object):

    def __init__(self):
        super()
        self.to_fix_brands = [
            {'_id': '1284039', 'brandName': '佳能'},
        ]

    @classmethod
    def from_crawler(cls, crawler):
        ext = cls()
        crawler.signals.connect(ext.spider_closed, signal=signals.spider_closed)
        return ext

    def spider_closed(self, spider):
        try:
            if spider.name and (spider.name in ['full', 'part'] or 'full' in spider.name):
                if spider.batch_no:
                    batch_no = spider.batch_no
                    self.fix_brand(batch_no)
                    spider.logger.info("close full spider %s with batch %s" % (spider.name, batch_no))
                else:
                    spider.logger.info("close full spider %s without update batch ...", spider.name)
        except Exception as e:
            _ = e
            spider.logger.error(format_exc())

    # 修正错误品牌的商品
    def fix_brand(self, batch_no):
        bulk_list = list()
        for fix in self.to_fix_brands:
            bulk_list.append(UpdateOne({'_id': fix.get('_id')}, {'$set': fix}, upsert=False))

        Mongo(batch_no=batch_no).bulk_write('item_data_pool', bulk_list)
        result = Mongo(batch_no=batch_no).bulk_write('data_{}'.format(batch_no), bulk_list)

        logger.info("update offline items [%s] in batch [%s]" % (result, batch_no))
