# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql
import xlwt
j = 1
class ZhaobiaoPipeline(object):
    def __init__(self):
        self.mysql_conn = pymysql.Connection(
            host = 'localhost',
            port = 3306,
            user = 'root',
            password = '123456',
            database = 'zhaobiao',
            charset = 'utf8'
        )
    j = 1
    def process_item(self, item, spider):
        # self.j+=1
        print(self.j)
        # global j
        # 创建光标对象
        cs = self.mysql_conn.cursor()
        # sql_str = f'insert into bilian ()'
        sql_key= ','.join([key for key in item.keys()])
        sql_value = ','.join(['"%s"' % (item[key]) for key in item.keys()])
        sql_str = f'insert into bilian ({sql_key}) value ({sql_value});'
        # print(sql_str)


        # keys = [key for key in item.keys()]
        # print(keys)
        # values = [value for value in item.values()]
        # print(values)
        # # 创建一个workbook, 相当于创建excel文件
        # work_book = xlwt.Workbook(encoding='utf-8')
        # sheet = work_book.add_sheet('bilian')
        # if self.j == 1:
        #     for i in range(len(keys)):
        #         sheet.write(0,i,keys[i])
        #
        # for k in range(len(values)):
        #     sheet.write(self.j,k,values[k])
        # work_book.save('zhaobiao_bilian/bilian.xlsx')
        # print('写入成功')
        # self.j+=1
        # print(self.j)


        cs.execute(sql_str)
        self.mysql_conn.commit()
        return item

