from spidertools.utils.time_utils import get_current_datetime,get_current_date
from spidertools.db_utils import mongdb_utils
from crawlab import save_item
import os

class ProjectHtmlPipeline(object):
    def __init__(self):
        self.myclient,self.mydb = mongdb_utils.get_mongo_connect(mongdb_utils.default_mongo_url)
        self.project_env = os.environ.get("PROJECT_ENV")

        if self.project_env != None and self.project_env == 'test':
            self.mycol = self.mydb['project_html_test']
        else:
            self.mycol = self.mydb['project_html']
        self.total_count = 0

    def start_spider(self,spider):
        pass

    def close_spider(self, spider):

        # 这是一个结果，需要为 dict 类型

        current_date = get_current_date()
        result = {current_date: self.total_count}

        # 调用保存结果方法
        save_item(result)
        self.myclient.close()


    def process_item(self, item, spider):

        insert_item = dict(item)
        if hasattr(spider,"convert_dict") and  spider.convert_dict:
            new_insert_item = {}
            for key,value in insert_item.items():
                if key in spider.convert_dict:
                    new_insert_item[spider.convert_dict[key]] = value
                else:
                    new_insert_item[key] = value
            insert_item = new_insert_item
        #self.mycol.insert_one(insert_item)
        search_dict = {"origin_url":insert_item['origin_url']}

        search_result = self.mycol.find(search_dict).count()
        if search_result:
            print("update!!!!!!!")
            insert_item['update_time'] = get_current_datetime()
            update_value = {"$set": insert_item}
            self.mycol.update_one(search_dict,update_value)
        else:
            print("insert!!!!!!!")
            insert_item['create_time'] = get_current_datetime()
            insert_item['update_time'] = get_current_datetime()
            self.mycol.insert_one(insert_item)
        self.total_count += 1

        return item
