# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from MasuScrapy.items import *
import configparser
import pymysql


class MasuscrapyPipeline:
    def open_spider(self, spider):
        config = configparser.ConfigParser()
        config.read("DBConfig.ini")
        # 读取数据库配置
        dbHost = config["DATABASE"]["HOST"]
        dbName = config["DATABASE"]["DBNAME"]
        dbUser = config["DATABASE"]["USER"]
        dbPasswd = config["DATABASE"]["PASSWORD"]
        dbPort = config["DATABASE"]["PORT"]

        # 在__init__()方法中创建连接，保证数据库连接只被创建一次
        # 如果考虑写入性能 可以考虑使用数据库连接池
        self.conn = pymysql.connect(host=dbHost, user=dbUser, password=dbPasswd, database=dbName, port=3306)

    def process_item(self, item, spider):
        if isinstance(item, MasuScrapyReportCategory):
            # do some things
            return item

        # 处理MasuScrapyReportArrticle Item
        if isinstance(item, MasuScrapyReportArrticle):
            # do some things
            # 处理MasuSpider yield的MasuScrapyReportArrticle item
            # 写入阿里云RDS
            title = item['title']  # 从item获取标题
            report_date = item['report_date']  # 从item获取公告时间
            report_source = item['report_source']  # 从item获取公告来源
            report_views = item['report_views']  # 从item获取公告浏览数
            content = item['content']  # 从item获取公告内容
            # insert sql
            sql = "INSERT INTO `newstable` (" \
                  "`title`, " \
                  "`report_date`," \
                  "`report_source`, " \
                  "`report_views`," \
                  "`content`) " \
                  "VALUES (%s, %s, %s, %s, %s)"
            with self.conn.cursor() as cursor:
                try:
                    cursor.execute(sql, (title, report_date, report_source, report_views, content))
                except Exception as e:
                    print("数据写入失败")
                    print(e)
                    # 发生错误则回滚
                    self.conn.rollback()
                else:
                    # 没有发生错误则提交写入
                    self.conn.commit()
            return item

    # 爬虫结束时
    def close_spider(self, spider):
        self.conn.close()
