# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
import time
import traceback

class TutorialPipeline():
    def __init__(self,host,database,user,password,port):
        self.host = host
        self.database = database
        self.user = user
        self.password = password
        self.port = port
        self.conn = None
        self.cursor = None
    @classmethod ##类方法
    ## 配置中获取数据库名称配置项的值，用于确定具体操作的数据库实例
    def from_crawler(cls,crawler):
        return cls(
            host=crawler.settings.get("MYSQL_HOST"),
            database=crawler.settings.get("MYSQL_DATABASE"),
            user=crawler.settings.get("MYSQL_USER"),
            password=crawler.settings.get("MYSQL_PASSWORD"),
            port=crawler.settings.get("MYSQL_PORT")
        )
    def open_database(self):
        self.conn = pymysql.connect(host=self.host,user=self.user,
                                  password = self.password,database=self.database,
                                  charset = 'utf8', port=self.port)
        self.cursor = self.conn.cursor()
      ##  print(f"conn:{self.conn}")
      ##  print(f"cursor:{self.conn}")
    def close_database(self):
        self.conn.close()

    ## 管道默认是不生效的
    def process_item(self, items, spider):
        try:
            self.open_database()
            dat = items['date_details'] #  从传入的item数据项中获取日期详情信息
            title = items['title_details']
            context = items['text_details']
            sql_delete = "truncate table stock_comment"
            sql_insert = "insert into stock_comment(date,title,text) values(%s,%s,%s)"
            print(f"{time.asctime()}开始更新数据")
            self.cursor.execute(sql_delete)
            for d, t, c in zip(dat, title, context):
                self.cursor.execute(sql_insert,(d,t,c)) #  通过zip函数将日期、标题和文本内容这三个列表对应元素进行打包，然后循环遍历每个打包后的元素组，依次执行插入数据的操作，确保数据能一一对应插入到数据库表中

            self.conn.commit() # 提交事务，将之前执行的插入数据等操作真正应用到数据库中，使数据持久化保存到数据库里
            print(f"{time.asctime()}更新到最新数据")
            self.close_database()
        except:
           traceback.print_exc() ## 打印堆栈信息
        return items
