# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
import json, re
from Icrawler9.tools import utils, configs


class Icrawler9Pipeline:
    def process_item(self, item, spider):
        return item

class mongodbPipeline(object):
    def __init__(self):
        self.client, self.db = utils.cli_mongo()
        
    def process_item(self, item, spider):
        if item:
            postItem = dict(item)
            for k, vs in postItem.items():
                if isinstance(vs, dict):
                    # for vk, vv in vs.items():
                    #     if isinstance(vv, str): vs[vk] = vv.replace(u'\xa0', ' ')
                    if item.__class__.__name__ in configs.RESOURCE_CLASSNAMES: postItem[k] = json.dumps(vs, ensure_ascii=False).replace(u'\xa0', ' ')
                elif isinstance(vs, list):
                    vs = [v.replace(u'\xa0', ' ') if isinstance(v, str) else v for v in vs]
                    if item.__class__.__name__ in configs.RESOURCE_CLASSNAMES: postItem[k] = json.dumps(vs, ensure_ascii=False)
                elif isinstance(vs, str):
                    postItem[k] = re.sub(r'\s+', ' ', re.sub(r'<(?!(small|/small|sub|/sub|sup|/sup))[^>]*>', '', vs.replace(u'\xa0', ' ')))
            try:
                table = item.table
                coll = self.db[table]
                coll.insert(postItem)
                return item
            except Exception as e:
                if not re.search('duplicate key error', str(e)):
                    print(e)

    def close_spider(self, spider):
        self.client.close()
