# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html

import re
from scrapy.exceptions import DropItem
import pymongo
import logging
logger = logging.getLogger(__name__)


from xiaohongshu.items import *
#Remove Prefix & Suffix
def RemovePrefixInItemURL(s):
    """
    for related_note
    example: '/discovery/item/55943dffb4c4d637bd4cb2e9'==>'55943dffb4c4d637bd4cb2e9'
    item['related_note'] = map(RemovePrefixSuffixInGoodURL,item['related_note'])
    """    
    s_list = s.split("/")
    if len(s_list)<=1:
        return s
    elif s_list[1]=='discovery' and s_list[2]=='item':
        return s_list[-1]
    elif s_list[1]=='user' and s_list[2]=='profile':
        return s_list[-1]
    elif s_list[1]=='selected_board':
        return s_list[-1]
        
def RemovePrefixSuffixInGoodURL(s):
    """
    for recommendations, 
    example: '/goods/55f648011a2b141b49e724ee?xhs_g_s=0032'==>'55f648011a2b141b49e724ee'
    item['recommendation'] = map(RemovePrefixSuffixInGoodURL,item['recommendation'])
    """
    s = s.split("?")[0]
    s_list = s.split("/")
    if s_list[1]=='goods':
        return s_list[2]
            
            

def RemoveEmoji(s):
    """
    Remove moji
    """
    return re.sub(r'\[.*\]','',s)
            
def RemoveBlanks(s):
    if  s==None or len(s)==0:
        return s
    return "".join(s.split())
    
#############################
class DuplicatesPipeline(object):

    def __init__(self):
        self.ids_seen = set()

    def process_item(self, item, spider):
        if isinstance(item, XiaohongshuItem):
            if item['_id'] in self.ids_seen:
                raise DropItem("Duplicate item found: %s" % item)
            else:
                self.ids_seen.add(item['_id'])
                return item
        elif isinstance(item, XiaohongshuAlbum):
            if item['_id'] in self.ids_seen:
                raise DropItem("Duplicate album found: %s" % item)
            else:
                self.ids_seen.add(item['_id'])
                return item
        else:
            return item
#############################            
class XiaohongshuPipeline(object):
    def process_item(self, item, spider):
        if isinstance(item, XiaohongshuItem):
            item['recommendation'] = map(RemovePrefixSuffixInGoodURL, item['recommendation'])
            item['related_note']         = map(RemovePrefixInItemURL, item['related_note'])
            item['related_note']         = list(set(item['related_note']))
            item['album'] = map(RemovePrefixInItemURL, item['album'])
            item['title'] = RemoveBlanks(item['title'])
            item['author'] = RemoveBlanks(item['author'])
            item['author'] = RemoveEmoji(item['author'])
            item['author_id'] = RemovePrefixInItemURL(item['author_id'])
            item['product'] = RemoveBlanks(item['product'])
            item['description'] = RemoveBlanks(item['description'])
            item['description'] = RemoveEmoji(item['description'])        
            #log.msg("added to MongoDB database!", level=log.DEBUG, spider=spider)
            return item
        else:
            return item
           

#############################       
class MongoPipeline(object):
    collection_item = 'items'
    collection_search = 'searches'
    collection_album = 'albums'
    
    def __init__(self, mongo_uri, mongo_db):
        self.mongo_uri = mongo_uri
        self.mongo_db = mongo_db

    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            mongo_uri=crawler.settings.get('MONGO_URI'),
            mongo_db=crawler.settings.get('MONGO_DATABASE')
        )

    def open_spider(self, spider):
        self.client = pymongo.MongoClient(self.mongo_uri)
        self.db = self.client[self.mongo_db]

    def close_spider(self, spider):
        self.client.close()
        
    def process_item(self, item, spider):
        if item != None:
            dict_item = dict(item)
        else:
            logger.debug('Item has been filtered out by some pipline! Check!')
        if isinstance(item, XiaohongshuItem):
            try:
                self.db[self.collection_item].insert(dict_item)
            except pymongo.errors.DuplicateKeyError:
                logger.debug('_id %s has been existed', dict_item["_id"])
        elif isinstance(item, XiaohongshuAlbum):
            try:
                self.db[self.collection_album].insert(dict_item)
            except pymongo.errors.DuplicateKeyError:
                logger.debug('_id %s has been existed', dict_item["_id"])
        elif isinstance(item, XiaohongshuSearch):
            try:
                self.db[self.collection_search].insert(dict_item)
            except pymongo.errors.DuplicateKeyError:
                logger.debug('keyword %s has been existed', dict_item["keyword"])                