# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html

import pymysql

class ChengmaiPipeline(object):

    def __init__(self):
        self.conn = pymysql.Connection(host='localhost', user='root', passwd='', db='demo', port=3306, charset='utf8')
        self.cursor = self.conn.cursor()


    def process_item(self, item, spider):

        #
        # 进行数据存储
        #

        sql = "insert into house2(website,web_url,city,city_id,area,estate,estate_id,estate_url) values('"+str(item['website'])+"','"+str(item['web_url'])+"','"+str(item['city'])+"','"+str(item['city_id'])+"','"+str(item['area'])+"','"+str(item['estate'])+"','"+str(item['estate_id'])+"','"+str(item['estate_url'])+"')"
        print(sql)
        self.cursor.execute(sql)
        self.conn.commit()
