# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter

import pymysql
from scrapy.utils.project import get_project_settings

class RotorbuildsPipeline:

    """
    同步操作
    """

    def __init__(self):
        pass

    def process_item(self, item, spider):
        settings = get_project_settings()
        # 建立连接
        conn = pymysql.connect(
            host=settings['MYSQL_HOST'],
            port=settings['MYSQL_PORT'],
            user=settings['MYSQL_USER'],
            password=settings['MYSQL_PASSWD'],
            db=settings['MYSQL_DBNAME'],
            charset='utf8'          # 有中文要存入数据库的话要加charset='utf8'
        )
        # 创建游标
        cursor = conn.cursor()

        import time
        time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
        # sql语句
        insert_sql = "INSERT INTO spider_fpv(url,spider_id,content,photos,parts,update_time,create_time) VALUE (%s,%s,%s,%s,%s,%s,%s)"
        # 执行插入数据到数据库操作
        cursor.execute(insert_sql, (item['url'], item['spider_id'], item['content'], item['photos'],item['parts'],time,time))
        # 提交，不进行提交无法保存到数据库
        conn.commit()

        print("UNIQUE ID : "+item['spider_id']+" save to db")

        # 关闭游标和连接
        cursor.close()
        conn.close()

    def close_spider(self, spider):
        pass

