# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import codecs
import json

# import pymongo
import pymysql

class SoufangPipeline(object):
    pass

    # def __init__(self):
        # from scrapy.conf import settings
        # host = settings['MONGODB_HOST']
        # port = settings['MONGODB_PORT']
        # dbname = settings['MONGODB_DBNAME']
        # sheetname = settings['MONGODB_SHEETNAME']

        # client = pymongo.MongoClient(host=host,port=port)

        # mydb = client[dbname]

        # self.post = mydb[sheetname]


    # def process_item(self, item, spider):
    #     # line = json.dumps(dict(item))+'\n'
    #     # self.file.write(line.decode("unicode_escape"))
    #     # return item
    #     if item['total_price']!=None:
    #         data={item}
    #         self.post.insert(data)
    #         print ('end saving---------->')
    #     return item

class mysqlPipeLine(object):
    conn = None
    cursor = None

    def open_spider(self, spider):
        self.conn = pymysql.Connect('localhost', user="root", passwd="root", db="qiubai")

    def process_item(self, item, spider):
        self.cursor = self.conn.cursor()
        try:
            self.cursor.execute('insert into qiubai values("%s","%s","%s","%s","%s","%s","%s","%s","%s","%s")' % (item["houseing_pattern"], item["total_price"],item["unit_price"], item["covered_area"],item["construction_time"], item["decorate_class"],item["house_category"], item["orientation"],item["address"], item["floor"]))
            self.conn.commit()
        except Exception as e:
            print(e)
            self.conn.rollback()
        return item

    def close_spider(self, spider):
        self.cursor.close()
        self.conn.close()
