# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html

# from getpic.mysql.shetu import shetudb
import pymongo
import redis
import json
from scrapy.utils.project import get_project_settings

class ShetuPipeline(object):
    def __init__(self):
        conf = get_project_settings()['MONGODB']
        host = conf['host']
        db = conf['db']
        table = conf['table']
        port = conf['port']
        con = pymongo.MongoClient(host=host, port=port)
        db = con[db]
        self.doc = db[table]


    def process_item(self, item, spider):
        pic = dict()
        item1 = dict(item)
        pic['url'] = item1['url']
        pic['alt'] = item1['alt']
        pic['name'] = item1['name']
        pic['size'] = item1['size']
        pic['pixel'] = item1['pixel']
        # picinfo=dict(pic)
        self.doc.insert(pic)
        return item


class DaomuPipeline(object):
    def process_item(self, item, spider):
        r = redis.Redis()
        old = dict(item)
        nows = dict()
        nows['bookname'] = old['bookname']
        nows['booktitle'] = old['booktitle']
        nows['chaptername'] = old['chaptername']
        nows['chapternum'] = old['chapternum']
        nows['chapterurl'] = old['chapterurl']
        r.lpush('daomu:detail', str(nows))
        return item

class GetpicPipeline(object):
    def process_item(self, item, spider):
        return item
