# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
from  time import sleep
import datetime
class GuoxuenamerPipeline:
    conn =None
    cursor = None
    dateTamp=None
    host = "192.168.0.113"
    port = 3306
    user='coname'
    password = 'xZS6p4LDcKCwn4Yb'
    db="co"
    charset="utf8"

    
    def open_spider(self,spider):
        try:
            self.conn = pymysql.Connection(host=self.host,port=self.port,user=self.user,password=self.password,db=self.db,charset=self.charset)
            print("连接数据成功！")
        except Exception as e:
            print("连接数据失败!>>>",e)
        self.dateTanp='{0:%m%y%d%H%M%S}'.format(datetime.datetime.now())
        self.dateTamp=int(self.dateTanp) 
    def process_item(self, item, spider):
        self.cursor= self.conn.cursor()
        sleep(1)
        # if not self.cursor.execute("select srname from co_books_look where srname='{}'".format(item["art_name"])):
        #     self.cursor.execute("insert into co_books_look (srtype,srname,keywordid) values('{}','{}','{}')".format(item["art_type"],item["art_name"],self.dateTamp))
        #     self.dateTamp=self.dateTamp+1
        #     print("插入书名成功！")
        #     print(item)
        # else:
        #     print("已有书名！")
        #     self.conn.rollback()
        self.cursor.execute("update  co_books_look set fileurl='{}' where srname='{}'".format(item["art_url"],item["art_name"]))
        self.conn.commit()
        print("插入连接成功")
        print(item)
        return item

    def close_spider(self,spider):
        self.cursor.close()
        sleep(1)
        self.conn.close()