# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
import pymongo


class DoubanPipeline:

    def open_spider(self, spider):
        self.conn = pymysql.Connect(
            host='localhost',
            port=3306,
            user='root',
            passwd='root',
            db='jobcrawler',
            charset='utf8'
        )
        self.cursor = self.conn.cursor()

    def process_item(self, item, spider):
        sql = "INSERT INTO douban_movie.movies(`title`,`rank`,`score`,`abstract`,`describe`)VALUES('%s','%s','%s','%s','%s')"
        self.cursor.execute(
            sql % (
                item['title_detail'],
                item['rank_detail'],
                item['score'],
                item['abstract_detail'],
                item['describe']
            )
        )
        self.conn.commit()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.conn.close()


class DoubanMongoPipeline:

    def open_spider(self, spider):
        self.conn = pymongo.MongoClient(host='localhost', port=27017)
        self.db = self.conn.douban_movie
        self.movies = self.db.movies

    def process_item(self, item, spider):
        self.movies.insert(
            {
                "title": item['title_detail'],
                "rank": item['rank_detail'],
                "score": item['score'],
                "abstract": item['abstract_detail'],
                "describe": item['describe']
            }
        )

    def close_spider(self, spider):
        self.conn.close()
