# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface

import pymysql


class CaipiaoPipeline:
    # 打开文件
    def open_spider(self, spider):
        self.f = open("shuagnseqiu.csv", mode='w', encoding="utf-8-sig")
        self.f.write("期,hong_number,lan_number\n")
        print("打开文件写入表头")

    def close_spider(self, spider):
        self.f.close()
        print("关闭文件")

    # 关闭文件
    # 写入文件
    def process_item(self, item, spider):
        if self.f.closed:
            self.f = open("shuagnseqiu.csv", mode='w', encoding="utf-8-sig")
        self.f.write(f"{item['timer']},{item['hong']},{item['lan']}\n")
        return item


class CaipiaoPipelineMysql:
    # 打开文件
    def open_spider(self, spider):
        # 链接mysql
        self.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', password='123456', database='shuangseqiu')

    def close_spider(self, spider):
        self.conn.close()
        print("关闭mysql链接")

    # 关闭文件
    # 写入文件
    def process_item(self, item, spider):
        try:
            # 创建游标
            self.cursor = self.conn.cursor()
            # 写入数据
            sql = "insert into shuangseqiu.shuangseqiu(id, qi, hong, lan) values (%s, %s, %s, %s)"
            # 提交事务
            self.cursor.execute(sql, (item['i'], item['timer'], item['hong'], item['lan']))
            # 数据库没有数据的原因是没有提交事务
            self.conn.commit()
            # 写入失败时回滚数据
        except Exception as e:
            print(e)
            # 如果出现问题，需要进行回滚
            self.conn.rollback()
        return item
