# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
# -*- coding: utf-8 -*-
from twisted.enterprise import adbapi
from xiaodian.settings import *

class MysqlDpInfoPipeline:

    @classmethod
    def from_crawler(cls, crawler):
        # 从项目的配置文件中读取相应的参数
        cls.MYSQL_DB_NAME = crawler.settings.get("MYSQL_DB_NAME", 'scrapy_default')
        cls.HOST = crawler.settings.get("MYSQL_HOST", 'localhost')
        cls.PORT = crawler.settings.get("MYSQL_PORT", 3306)
        cls.USER = crawler.settings.get("MYSQL_USER", 'root')
        cls.PASSWD = crawler.settings.get("MYSQL_PASSWORD", '')
        return cls()

    def open_spider(self, spider):
        self.dbpool = adbapi.ConnectionPool('pymysql', host=self.HOST, port=self.PORT, user=self.USER, passwd=self.PASSWD, db=self.MYSQL_DB_NAME, charset='utf8')

    def close_spider(self, spider):
        self.dbpool.close()

    def process_item(self, item, spider):
        if item.__class__ == 'DpInfoItem':
            self.dbpool.runInteraction(self.insert_dp_db, item)
        return item

    def insert_dp_db(self, tx, item):
        sql = """insert into dianping_info(shop_group_id,shop_id,shop_name,shop_type,city_id,address,alt_name,branch_name,business_hours,cross_road,glat,glng,phone_no,phone_no2,gmt_create,gmt_update) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
        tx.execute(sql ,(item['shop_group_id'],
            item['shop_id'],
            item['shop_name'],
            item['shop_type'],
            item['city_id'],
            item['address'],
            item['alt_name'],
            item['branch_name'],
            item['business_hours'],
            item['cross_road'],
            item['glat'],
            item['glng'],
            item['phone_no'],
            item['phone_no2'],
            item['gmt_create'],
            item['gmt_update']))
