# Define your item pipelines here
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from re import S
from itemadapter import ItemAdapter
import pymysql

class StockDataXuweijiePipeline:
    def process_item(self, item, spider):
        print("-----------pipelines----rhb------------")
        print(item)
        return item

class StockDataRHBMysqlPipeline:
    def open_spider(self, spider):
        print("爬虫开始执行储存数据操作····")
        self.db = pymysql.connect(
            host='192.168.18.13', 
            user='root', 
            password='123456', 
            database='rhb', 
            port=33046, 
            charset='utf8'
        )
        self.cursor = self.db.cursor()
        sql_create = "CREATE TABLE\
                    IF NOT EXISTS `stock` (\
	                `data_id` INT (32) PRIMARY KEY AUTO_INCREMENT,\
	                `name` VARCHAR(16) NOT NULL,\
	                `stock_code` VARCHAR(16) NOT NULL DEFAULT '0',\
	                `rank` INT (10) NOT NULL,\
	                `price` FLOAT (10) UNSIGNED NOT NULL DEFAULT '0',\
	                `zdf` VARCHAR (16) NOT NULL DEFAULT '0',\
	                `hsl` VARCHAR (16) NOT NULL DEFAULT '0',\
	                `zj` VARCHAR (40) NOT NULL DEFAULT '0'\
                    ) ENGINE = MyISAM DEFAULT CHARSET = utf8;"
        self.cursor.execute(sql_create)

    def process_item(self, item, spider):
        # 插入数据
        print('-------------------')
        sql_insert = "insert into stock values (null,%s,%s,%s,%s,%s,%s,%s)"
        data = [
            item['name'],
            item['stockCode'], 
            item['rank'],
            item['price'],
            item['zdf'],
            item['hs'],
            item['zj']
        ]
        self.cursor.execute(sql_insert, data)
        self.db.commit()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.db.close()
        print("exit spider crawl")
