# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql

class ZhaobiaoPipeline:
    def __init__(self):
        # 初始化并创建连接对象
        self.mysql_conn=pymysql.Connect(
            host='58.87.91.119', # 腾讯云
            port=3306,
            user='root',
            password='AS-niuniu1003',
            database='zhaobiao',
            charset='utf8',

        )

    # 处理传过来的数据，此爬虫中处理的是sql_data
    def process_item(self, item, spider):
        # 创建光标对象
        cs=self.mysql_conn.cursor()
        # sql_str='inser into t_zhaobiao (字段,字段) value ("值","值")'
        sql_column=','.join([key for key in item.keys()])
        sql_value=','.join(['"%s"' % item[key] for key in item.keys()])
        sql_str='insert into t_zhaobiao (%s) value (%s);' % (sql_column,sql_value)
        # print(sql_str)
        cs.execute(sql_str)
        self.mysql_conn.commit()
        return item
