# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import pymysql
import pymongo


# 存储到文件
class TencentSpiderPipeline:
    def process_item(self, item, spider):
        with open("./tencent.txt", "a", encoding="utf-8") as file:
            file.write(str(item))
            print(item)
        return item


# 存储到mysql数据库
class MysqlPipeline:
    def __init__(self):
        self.coon = pymysql.connect(
            host="127.0.0.1",  # 代表本机地址
            port=3306,
            user="root",
            password="15096962415",
            db="13_class"
        )

        # 建立游标，传递python给mysql的命令
        self.cursor = self.coon.cursor()

    def process_item(self, item, spider):
        try:
            sql = "insert into tencent_data(RecruitPostName, CategoryName, RequireWorkYearsName, LastUpdateTime, Responsibility, Requirement) values (%s,%s,%s,%s,%s,%s)"
            params = [(item["RecruitPostName"], item["CategoryName"], item["RequireWorkYearsName"],
                       item["LastUpdateTime"], item["Responsibility"], item["Requirement"])]
            self.cursor.executemany(sql, params)
            self.coon.commit()

        except Exception as e:
            print(e)
            self.coon.rollback()
        finally:
            return item


# 存储到mongdb数据库
class MongoDBPipeline:
    def __init__(self):
        self.client = pymongo.MongoClient(host="localhost", port=27017)
        self.db = self.client["13_class"]

    def process_item(self, item, spider):
        self.db.tencent.insert_one(dict(item))

