# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


# useful for handling different item types with a single interface
from itemadapter import ItemAdapter


import pandas as pd
from sqlalchemy import create_engine
import pymongo

class TianyanPipeline:
    #定义初始化方法：用于建立与数据库连接
    def __init__(self):
        self.engine = create_engine('mysql+pymysql://root:12345678@127.0.0.1:3306/mysql')  #建立与mysql服务器连接
        self.client = pymongo.MongoClient('localhost',27017)     #建立与mongoDB服务器连接
        self.mysql = self.client['mysql']               #连接mysql数据库
        self.test1 = self.mysql['test1']              #建立movices集合

    def process_item(self, item, spider):
        data = pd.DataFrame(dict(item), index=[0])  # 将item数据转换为字典格式
        data.to_csv('./movices1.csv', mode='a+', index=None, header=None)  # 将数据保存到当前文件下
        data.to_sql('test1', self.engine, if_exists='append', index=None)  # 将数据写入到数据库
        self.test1.insert(dict(item))   #将数据写到服务器下的movices集合当中

        return item
