# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
from DBUtils.PooledDB import PooledDB, SharedDBConnection
import redis
from csdn.settings import REDIS_URL,MYSQL_HOST,MYSQL_PORT,MYSQL_PARAMS

class CsdnPipeline(object):
    def __init__(self):
        self.fp = None
        self.POOL=None

    def open_spider(self, spider):
        print('开始爬虫')
        self.POOL = PooledDB(
            creator=pymysql,  # 使用链接数据库的模块
            maxconnections=6,  # 连接池允许的最大连接数，0和None表示不限制连接数
            mincached=2,  # 初始化时，链接池中至少创建的空闲的链接，0表示不创建
            maxcached=5,  # 链接池中最多闲置的链接，0和None不限制
            maxshared=3,
            # 链接池中最多共享的链接数量，0和None表示全部共享。PS: 无用，因为pymysql和MySQLdb等模块的 threadsafety都为1，所有值无论设置为多少，_maxcached永远为0，所以永远是所有链接都共享。
            blocking=True,  # 连接池中如果没有可用连接后，是否阻塞等待。True，等待；False，不等待然后报错
            maxusage=None,  # 一个链接最多被重复使用的次数，None表示无限制
            setsession=[],  # 开始会话前执行的命令列表。如：["set datestyle to ...", "set time zone ..."]
            ping=2,
            # ping MySQL服务端，检查是否服务可用。# 如：0 = None = never, 1 = default = whenever it is requested, 2 = when a cursor is created,
            # 4 = when a query is executed, 7 = always
            host=MYSQL_HOST,
            port=MYSQL_PORT,
            user=MYSQL_PARAMS['user'],
            password=MYSQL_PARAMS['password'],
            database=MYSQL_PARAMS['db'],
            charset='utf8'
        )
        self.sql='insert into csdn_blog values(NULL,%s,%s,%s,%s,%s,%s,%s)'
        self.reds = redis.Redis.from_url(REDIS_URL, db=9, decode_responses=True)

    def connect(self):
        '''
        启动连接
        :return:
        '''
        conn = self.POOL.connection()
        cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)
        return conn, cursor

    def process_item(self, item, spider):
        conn, cursor = self.connect()
        cursor.execute(self.sql, (item['title'],item['content'],item['href'],item['create_time'],item['nickName'],item['time'],item['tag']))
        conn.commit()
        self.connect_close(conn, cursor)
        return item

    def connect_close(self, conn, cursor):
        '''
        关闭连接
        :param conn:
        :param cursor:
        :return:
        '''
        conn.commit()
        cursor.close()
        conn.close()

    def close_spider(self, spider):
        print('结束爬虫')