import pymongo
import scrapy
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware

class PythonItem(scrapy.Item):
    '''
    Define your item pipelines here

    Don't forget to add your pipeline to the ITEM_PIPELINES setting
    See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html    
    '''
    _id = scrapy.Field()
    questions = scrapy.Field()
    votes = scrapy.Field()
    answers = scrapy.Field()
    views = scrapy.Field()
    links = scrapy.Field()
    time = scrapy.Field()
    tags = scrapy.Field()

class CustomHttpProxyMiddleware(HttpProxyMiddleware):
    '''
    Define here the models for your spider middleware

    See documentation in:
    https://docs.scrapy.org/en/latest/topics/spider-middleware.html    
    '''
    @staticmethod
    def proxy_shadowsocks():
        proxy = "http://127.0.0.1:1080"
        return proxy

class CustomMongoDBPipeline(object):
    '''
    Define your item pipelines here

    Don't forget to add your pipeline to the ITEM_PIPELINES setting
    See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


    useful for handling different item types with a single interface    
    '''
    def __init__(self, mongo_server, mongo_port, mongo_db, mongo_collection):
        self.mongo_server = mongo_server
        self.mongo_port = mongo_port
        self.mongo_db = mongo_db
        self.mongo_collection = mongo_collection


    @classmethod
    def from_crawler(cls, crawler):
        return cls(
            mongo_server=crawler.settings.get('MONGODB_SERVER'),
            mongo_port=crawler.settings.get('MONGODB_PORT'),
            mongo_db=crawler.settings.get('MONGODB_DB'),
            mongo_collection=crawler.settings.get('MONGODB_COLLECTION'),
        )


    def open_spider(self, _):
        self.client = pymongo.MongoClient(host = self.mongo_server, port = self.mongo_port)
        self.db = self.client[self.mongo_db]
        self.collection = self.db[self.mongo_collection]

    def close_spider(self, _):
        self.client.close()

    def process_item(self, item, _):      
        if self.collection is None:
            return
        self.collection.insert_one(item)
