# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from scrapy.contrib.exporter import JsonItemExporter,CsvItemExporter
import pymysql,pymongo
from .settings import DATABASE, MONGODB

class JobcrawlPipeline(object):
    def process_item(self, item, spider):
        return item


# class JobcrawlJsonPipeline(object):
#     def __init__(self):
#         print("打开文件")
#         self.f =open('myjson.json','a')
#
#     def process_item(self, item, spider):
#         print("处理Spider yield item")
#         item_json = json.dumps(dict(item))
#         self.f.write(item_json)
#         return item
#
#     def close_spider(self,spider):
#         print(spider)
#         print("关闭文件")
#         self.f.close()


# class JsonPipeline(object):
#     """
#     使用自带方法
#     """
#     def __init__(self):
#         self.f = open('job2.json','wb')
#         self.exporter = JsonItemExporter(self.f,encoding='utf-8')
#         self.exporter.start_exporting()
#
#     def process_item(self, item, spider):
#         self.exporter.export_item(item)
#         return item
#
#     def close_spider(self, spider):
#         self.exporter.finish_exporting()


# class CsvPipeline(object):
#     """
#     使用自带方法
#     """
#     def __init__(self):
#         self.f = open('job3.csv', 'wb')
#         self.exporter = CsvItemExporter(self.f, encoding='utf-8')
#         self.exporter.start_exporting()
#
#     def process_item(self, item, spider):
#         self.exporter.export_item(item)
#         return item
#
#     def close_spider(self, spider):
#         self.exporter.finish_exporting()


class MySQLPipeline(object):
    def __init__(self):
        """
        连接数据库
        """
        self.conn = pymysql.connect(
            host=DATABASE.get('host'),
            port=DATABASE.get('port'),
            user=DATABASE.get('user'),
            password=DATABASE.get('password'),
            db=DATABASE.get('db'),
            charset=DATABASE.get('charset')
        )
        self.cursor=self.conn.cursor()

    def process_item(self,item,spider):
        """
        把每条item插入数据库
        执行sql语句
        self.cursor.execute(sql)
        """
        # print(item.get('name'))

        if item.get('name'):
            # sql = 'insert into zhilian(name) values("%s")' % (item['name'])
            # self.cursor.execute(sql)
            # print(item.get('name'))
            # 为防止sql注入攻击,换个写法
            sql = 'insert into zhilian(name) values(%s)'
            self.cursor.execute(sql, (item['name'],))
            self.conn.commit()
        return item

    def close_spider(self, spider):
        """
        关闭spider时,关闭数据库连接
        """
        self.cursor.close()
        self.conn.close()


class MongoPipeline(object):
    def __init__(self):
        self._host = MONGODB.get('host')
        self._port = MONGODB.get('port')
        self._db = MONGODB.get('db')
        self._collection = MONGODB.get('collection')
        self.client = pymongo.MongoClient(host=self._host,port=self._port)

        self.db = self.client[self._db]
        self.coll = self.db[self._collection]

    def process_item(self,item,spider):
        self.coll.insert(dict(item))
        print(item)
        return item

    def close_spider(self,spider):
        self.client.close()