# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import openpyxl
import pymysql

# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from ssqSpider.spiders.ssq import SsqSpider


class SsqspiderPipeline:
    def __init__(self):
        self.wb=openpyxl.Workbook()
        self.ws=self.wb.active
        self.ws.title='双色球开奖信息'
        self.ws.append(['期号','日期','红球','篮球','销售额','奖池'])

    def close_spider(self,spider):
        self.wb.save('双色球开奖信息.xlsx')

    def process_item(self, item, spider):

        qihao=item.get('qihao','')
        riqi=item.get('riqi','')
        kaijianghaoma_red=item.get('kaijianghaoma_red','')
        kaijianghaoma_blue=item.get('kaijianghaoma_blue','')
        jiangchijiner=item.get('jiangchijiner','')
        xiaoshouer=item.get('xiaoshouer','')   
        self.ws.append([qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,xiaoshouer,jiangchijiner])   
        return item


class SsqMysqlDbPipeline:
    def __init__(self):
        self.conn=pymysql.connect(host='localhost',port=3306,
                            user='root',password='123456',database='spider_db',
                            charset='utf8mb4')
        self.curson=self.conn.cursor()
       

    def close_spider(self,spider):
        self.conn.commit()
        self.conn.close()
        

    def process_item(self, item, spider):

        qihao=item.get('qihao','')
        riqi=item.get('riqi','')
        kaijianghaoma_red=item.get('kaijianghaoma_red','')
        kaijianghaoma_blue=item.get('kaijianghaoma_blue','')
        jiangchi=item.get('jiangchijiner',0)
        xiaoshouer=item.get('xiaoshouer',0)   

        self.curson.execute('insert into ssq_info(qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,jiangchi,xiaoshouer) values(%s,%s,%s,%s,%s,%s)',
                            (qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,jiangchi,xiaoshouer))

        # self.ws.append([qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,xiaoshouer,jiangchijiner])   
        return item


class SsqMysqlDbBatchPipeline:
    def __init__(self):
        self.conn=pymysql.connect(host='localhost',port=3306,
                            user='root',password='123456',database='spider_db',
                            charset='utf8mb4')
        self.curson=self.conn.cursor()
        self.data=[]
       

    def close_spider(self,spider):
        if len(self.data)>0:
            self._write_to_db()
        self.conn.close()
        

    def process_item(self, item, spider):
        if not isinstance(spider,SsqSpider):
            return item
        qihao=item.get('qihao','')
        riqi=item.get('riqi','')
        kaijianghaoma_red=item.get('kaijianghaoma_red','')
        kaijianghaoma_blue=item.get('kaijianghaoma_blue','')
        jiangchi=item.get('jiangchijiner',0)
        xiaoshouer=item.get('xiaoshouer',0)   
        self.data.append((qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,jiangchi,xiaoshouer))

        if len(self.data)==100:
            self._write_to_db()
            self.data.clear()
        # self.ws.append([qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,xiaoshouer,jiangchijiner])   
        return item

    def _write_to_db(self):       
        self.curson.executemany('insert into ssq_info(qihao,riqi,kaijianghaoma_red,kaijianghaoma_blue,jiangchi,xiaoshouer) values(%s,%s,%s,%s,%s,%s)',self.data)    
        self.conn.commit()