# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from contentSpider.items import *
from contentSpider.Utils.redisUtils import OPRedis
import pymysql
from twisted.enterprise import adbapi
from contentSpider.settings import CONTENT_QUEUE

class ContentspiderPipeline(object):
    def __init__(self, host, user, password,port,db):
        params = dict(
            host=host,
            user=user,
            password=password,
            db=db,
            port=port,
            charset='utf8',  # 不能用utf-8
            cursorclass=pymysql.cursors.DictCursor
        )
        # 使用Twisted中的adbapi获取数据库连接池对象
        self.dbpool = adbapi.ConnectionPool('pymysql', **params)
        #redis操作
        self.opredis = OPRedis()

    @classmethod
    def from_crawler(cls, crawler):
        # 获取settings文件中的配置
        host = crawler.settings.get('MYSQL_HOST')
        user = crawler.settings.get('MYSQL_USER')
        password = crawler.settings.get('MYSQL_PASSWD')
        db = crawler.settings.get('MYSQL_DBNAME')
        port = crawler.settings.get('MYSQL_PORT')
        return cls(host, user, password,port,db)

    #处理返回的数据 用于入库
    def process_item(self, item, spider):
        if isinstance(item, ContentItem):
            uid = item['uid']
            contentIds = item['contentIds']
            self.redisContentIdsInsert(uid,contentIds)
            # 使用数据库连接池对象进行数据库操作,自动传递cursor对象到第一个参数
            query = self.dbpool.runInteraction(self.do_ContentIdsInsert, uid,contentIds)
            # 设置出错时的回调方法,自动传递出错消息对象failure到第一个参数
            query.addErrback(self.on_error, spider)
        return item


    def redisContentIdsInsert(self,uid,contentIds):
        redisKey = CONTENT_QUEUE.format(uid=uid)
        for contentId in contentIds:
            self.opredis.lpush(redisKey,contentId)


    def do_ContentIdsInsert(self, cursor, uid,contentIds):
        args=[]
        for contentId in contentIds:
            arg = []
            arg.append(uid)
            arg.append(contentId)
            args.append(tuple(arg))
        if(len(args)>0):
            sql = 'insert into comment(user_id,comment_id,create_time) values(%s,%s,now())'
            cursor.executemany(sql, args)

    def do_UsersInsert(self, cursor, item):
        contentId = item['contentId']
        userids = item['userids']
        args=[]
        for userid in userids:
            arg = []
            arg.append(contentId)
            arg.append(userid)
            args.append(tuple(arg))
        if (len(args) > 0):
            sql = 'insert into users(comment_id,user_id,create_time) values(%s,%s,now())'
            cursor.executemany(sql, args)

    def on_error(self, failure, spider):
        spider.logger.error(failure)

    def close_spider(self, spider):
        self.dbpool.close()
