# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
from datetime import datetime
import pymysql
from huobiao.settings import *

class HuobiaoPipeline:
    def open_spider(self,spider):
        self.new_timestamp = datetime.now()
        self.config = {
            "host": HOST,
            "user": USER,
            "password": PASSWORD,
            "database": DLDATABASE,
            'cursorclass': pymysql.cursors.DictCursor
        }
        self.db = pymysql.connect(**self.config)
        self.cursor = self.db.cursor()
    def process_item(self, item, spider):

        try:
            now_timestamp = datetime.now()
            delta_T = (now_timestamp - self.new_timestamp).seconds / 60  # 分钟
            if delta_T > 2:
                print("判断数据库链接是否存活")
                self.db.ping()
                self.new_timestamp = now_timestamp
        except:
            print("当前数据库链接已关闭，正在重新链接.......")
            self.db = pymysql.connect(**self.config)
            self.cursor = self.db.cursor()
            print("=-=-=-=-= 数据库已连接 -=-=-=-=-=")
        finally:
            # sql = f"INSERT INTO `fa_huobiao`(id,enname,legalperson,phone,enhref,href,title,updatetime,pubdate) VALUES ('{item['id']}','{item['enname']}','{item['legalperson']}','{item['phone']}','{item['enhref']}','{item['href']}','{item['title']}','{item['updatetime']}','{item['pubdate']}')"
            sql = f"INSERT INTO `fa_huobiao`(id,enname,href,title,updatetime,pubdate,search_key,source) VALUES ('{item['id']}','{item['enname']}','{item['href']}','{item['title']}','{item['updatetime']}','{item['pubdate']}','{item['search_key']}','火标网')"
            # print(sql)
            self.cursor.execute(sql)
            self.db.commit()
            print(f'######### {item["id"]} 写入成功 #####################')

        return item

    def close_spider(self,spider):
        try:
            self.db.ping()
            self.cursor.close()
            self.db.close()
        except:
            pass