#!/usr/bin/env python
# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html


"""
方法一：写入json文件
"""
# # 使用JsonLinesItemExporter函数写入，写入json文件可以自动换行，相对简单方便
# from scrapy.exporters import JsonLinesItemExporter
#
#
# class FangprojectPipeline:
#     def __init__(self):
#         self.fp = open("zhengzhou_fang_all.json", "wb")
#         self.exporter = JsonLinesItemExporter(self.fp, encoding='utf-8', ensure_ascii=False)
#
#     def open_spider(self, spider):
#         print("这是爬虫开始……")
#
#     def process_item(self, item, spider):
#         self.exporter.export_item(item)
#         return item
#
#     def close_spider(self, spider):
#         self.fp.close()
#         print("爬虫运行结束了……")


"""
方法二：写入mysql数据库
"""
import pymysql


class FangprojectPipeline(object):
    def __init__(self):
        self.db_conn = pymysql.connect(host='127.0.0.1', port=3306, db='scrapy_database', user='root', password='123456')
        self.db_cur = self.db_conn.cursor()

    def open_spider(self, spider):
        print("这是爬虫开始……")


    def process_item(self, item, spider):
        print("数据正在写入……")
        self.insert_db(item)
        return item


    def close_spider(self, spider):
        self.db_conn.commit()
        self.db_conn.close()
        print("爬虫运行结束了……")


    def insert_db(self, item):
        try:
            sql = """insert into scrapy_table(HouseName, HousePirce, HouseAddress, HouseComment, HouseTel) VALUES ("{}",
             "{}", "{}", "{}", "{}")""".format(item['HouseName'], item['HousePirce'], item['HouseAddress'],
                                               item['HouseComment'], item['HouseTel'])
            # print(sql)
            self.db_cur.execute(sql)
        except Exception as e:
            print(e)
        return item














