# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import redis
import whucs.settings as settings

from pymongo import MongoClient


class LectureUrlPipeline(object):
    def __init__(self):
        self.redis = redis.Redis(host=settings.REDIS_HOST,
                                 port=settings.REDIS_PORT,
                                 db=settings.REDIS_DB)

        self.urls_key = settings.REDIS_LECTURE_URLS_KEY
        self.crawled_urls_key = settings.REDIS_CRAWLED_URLS_KEY

    def process_item(self, item, spider):
        if not self.is_crawled(item['url']):
            print "---------"
            self.redis.lpush(self.urls_key, item['url'])
            return item

    def is_crawled(self, url):
        if self.redis.sismember(self.crawled_urls_key, url):
            return True
        return False


class MtwdUrlPipeline(object):
    def __init__(self):
        self.redis = redis.Redis(host=settings.REDIS_HOST,
                                 port=settings.REDIS_PORT,
                                 db=settings.REDIS_DB)

        self.urls_key = settings.REDIS_MTWD_URLS_KEY
        self.crawled_urls_key = settings.REDIS_CRAWLED_URLS_KEY

    def process_item(self, item, spider):
        if not self.is_crawled(item['url']):
            print "---------"
            self.redis.lpush(self.urls_key, item['url'])
            return item

    def is_crawled(self, url):
        if self.redis.sismember(self.crawled_urls_key, url):
            return True
        return False


class LectureDetailPipeline(object):
    def __init__(self):
        self.conn = MongoClient(settings.MONGODB_SERVER, settings.MONGODB_PORT)
        self.db = self.conn[settings.MONGODB_WHUCS_DB]
        # self.db.authenticate(settings.DB_USER, settings.DB_PASS)
        self.lecture_collection = self.db[settings.MONGODB_LECTURE_COLLECTION]

        self.redis = redis.Redis(host=settings.REDIS_HOST,
                                 port=settings.REDIS_PORT,
                                 db=settings.REDIS_DB)
        self.crawled_urls_key = settings.REDIS_CRAWLED_URLS_KEY

    def process_item(self, item, spider):
        if not self.is_exist(item):
            self.lecture_collection.insert(dict(item))
            self.redis.sadd(self.crawled_urls_key, item['url'])
        return item

    def is_exist(self, item):
        if self.lecture_collection.find_one({"url":item['url']}):
            return True
        return False

    def close_spider(self, spider):
        self.conn.close()


class MtwdDetailPipeline(object):
    def __init__(self):
        self.conn = MongoClient(settings.MONGODB_SERVER, settings.MONGODB_PORT)
        self.db = self.conn[settings.MONGODB_WHUCS_DB]
        # self.db.authenticate(settings.DB_USER, settings.DB_PASS)
        self.lecture_collection = self.db[settings.MONGODB_MTWD_COLLECTION]

        self.redis = redis.Redis(host=settings.REDIS_HOST,
                                 port=settings.REDIS_PORT,
                                 db=settings.REDIS_DB)
        self.crawled_urls_key = settings.REDIS_CRAWLED_URLS_KEY

    def process_item(self, item, spider):
        if not self.is_exist(item):
            self.lecture_collection.insert(dict(item))
            self.redis.sadd(self.crawled_urls_key, item['url'])
        return item

    def is_exist(self, item):
        if self.lecture_collection.find_one({"url":item['url']}):
            return True
        return False

    def close_spider(self, spider):
        self.conn.close()
