# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exceptions import DropItem
import pymongo
from girl.items.city import *
from girl.items.city import girlItem as girl
from girl.items.dmm import *
from girl.items.hotel import *
from girl.shell.city import *
from girl.items import poke
from girl.items import night
from girl.items import host2
# from scrapy.pipelines.images import ImagesPipeline
from scrapy.http import Request
import requests as req
import hashlib
from scrapy.utils.python import to_bytes
from scrapy.utils.misc import md5sum
import redis
import platform
from spiderexceptions import *
from datetime import *
import importlib
from dbShell import conf
from pprint import pprint
# from girl.shell.city import *
# from girl.shell.download import down
from scrapy.utils.project import get_project_settings
setting = get_project_settings()
import sys
sys.path.insert(0, setting.get("DIR") + "/shell/")
from download import down
class NewDataPipeline(object):
    def __init__(self):
        self.client = {}
        self.client["47.75.39.50"] = pymongo.MongoClient("47.75.39.50",socketKeepAlive=True,maxPoolSize=200)
        self.client["47.75.39.50"].admin.authenticate("admin321", "dsf::6666,,<<", mechanism='SCRAM-SHA-1')

        self.client["47.74.21.140"] = pymongo.MongoClient("47.74.21.140",socketKeepAlive=True,maxPoolSize=200)
        self.client["47.74.21.140"].admin.authenticate("admin321", "dsf::6666,,<<", mechanism='SCRAM-SHA-1')
        if platform.system() != 'Darwin':
            self.client["localhost"] = pymongo.MongoClient("localhost",socketKeepAlive=True,maxPoolSize=200)
            self.client["localhost"].admin.authenticate("admin321", "dsf::6666,,<<", mechanism='SCRAM-SHA-1')

    def process_item(self, item, spider):
        self.config = getattr(conf,spider.name)
        self.db = self.client[self.config["from"]['mongoIp']][self.config['mongoDb']][str(item)]
        for x in spider.img_field:
            if item.get(x):
                urls = item.get(x)
                if type(urls) == str or type(urls) == unicode:
                    urls = [urls]
                for url in urls:
                    down.delay(url)
        # if spider.name == "city":
        #     if isinstance(item, storesItem):
        #         city.store_task.run.delay(item)
        #     # elif isinstance(item, diaryItem):
        #     #     self.db.diary.insert(dict(item))
        #     elif isinstance(item, girlItem):
        #         city.girl_task.run.delay(item)

        if spider.name == "city":
            self.db.update({"url":item["url"]},{"$set":dict(item)},upsert=True)
            # self.db.insert_one(dict(item))
        elif spider.name == "agenda":
            self.db.update({"url":item["url"]},{"$set":dict(item)},upsert=True)
        elif spider.name == "newcity":
            self.db.update({"url":item["url"]},{"$set":dict(item)},upsert=True)
        else:
            self.db.insert(dict(item))
        pprint(item)
        # return item


    def close_spider(self,spider):
        self.config = getattr(conf,spider.name)
        self.job = self.client[self.config["from"]['mongoIp']][self.config['mongoDb']]["spiderjob"]
        tdata = spider.crawler.stats.get_value('item_scraped_count')
        sdata = spider.crawler.stats.get_value('sdata')
        error = spider.crawler.stats.get_value('log_count/ERROR')
        stime = spider.crawler.stats.get_value('start_time')
        ftime = spider.crawler.stats.get_value('finish_time')
        # sdata = 10
        # 采集有效数据小于10条 报错

        [x.close() for x in self.client.itervalues()]
        if tdata < 10:
            # self.db.job.insert({"date":str(date.today()),"msg":"littleItem","name":spider.name})
            # raise littleItem('%s itemNum too little tdata:%s' % (spider.name,tdata))
            msg = '%s itemNum too little tdata:%s' % (spider.name,tdata)
        # 采集有效数据和原始网站误差超过100条 报错
        elif abs(sdata - tdata) > 100:
            # self.db.job.insert({"date":str(date.today()),"msg":"differenceItem","name":spider.name})
            # raise differenceItem('%s itemNum too difference sdata:%s tdata:%s' % (spider.name,sdata,tdata))
            msg = '%s itemNum too difference sdata:%s tdata:%s' % (spider.name,sdata,tdata)
        else:
            try:
                obj = importlib.import_module(spider.db_shell_path)(spider.name)
                obj()
            except Exception as e:
                msg = e.message
        self.job.insert({"date":str(date.today()),"sdata":sdata,"tdata":tdata,
            "name":spider.name,"start_time":stime,"error":error,"msg":msg,"end_time":ftime})

        return