# -*- coding: utf-8 -*-

import pymysql.cursors

class MapspiderPipeline(object):
    def process_item(self, item, spider):
        return item

"""
功能：scrapy数据存储MySQL
"""
class DBPipeline(object):
    @classmethod
    def from_crawler(cls, crawler):
        #get db setting
        cls.MYSQL_DB_NAME = crawler.settings.get("MYSQL_DB_NAME")
        cls.HOST = crawler.settings.get("MYSQL_HOST")
        cls.PORT = crawler.settings.get("MYSQL_PORT")
        cls.USER = crawler.settings.get("MYSQL_USER")
        cls.PASSWD = crawler.settings.get("MYSQL_PASSWORD")
        return cls()

    #--init db connect---
    def open_spider(self, spider):
        #连接DB
        self.connect = pymysql.connect(
            self.HOST,
            self.USER,
            self.PASSWD,
            self.MYSQL_DB_NAME,
            self.PORT
        )
        #cursor 执行db操作
        self.cursor = self.connect.cursor()

    #--data to db
    def process_item(self, item, spider):
        if not item:
            return []

        sql = """INSERT INTO map_info(name,mobile,detail,provice,city,area)
             VALUES (%s, %s, %s, %s, %s, %s)"""
        
        nameList = item.get('name')
        mobileList = item.get('mobile')
        try:
            total = len(nameList)
            i = 0
            while i<total:
                self.cursor.execute(
                                sql, 
                                (nameList[i], mobileList[i],item.get('detail')[i], item.get('provice')[i],item.get('city')[i],item.get('area')[i])
                            )
                self.connect.commit()
                i = i+1

        except expression as identifier:
            self.connect.rollback()

        return item
        
    #--breakout db connect
    def close_spider(self, spider):
        self.connect.close()