# -*- coding: utf-8 -*-
"""
爬取数据入库Mysql
"""
import pymysql
import logging
logging.getLogger().setLevel(logging.INFO)

from scrapy.utils.project import get_project_settings
settings = get_project_settings()


class EarthquakePipeline(object):

    def open_spider(self, spider):
        # Mysql连接参数
        host = settings['MYSQL_HOST']
        port = settings['MYSQL_PORT']
        user = settings['MYSQL_USER']
        password = settings['MYSQL_PASSWORD']
        database = settings['MYSQL_DB']

        try:
            self.conn = pymysql.connect(
                host=host, port=port, user=user, password=password, database=database,charset='utf8')

            logging.info('Mysql数据库连接成功。')
        except Exception as e:
            logging.error('Mysql数据库连接失败！')
            raise e

    def process_item(self, item, spider):
        # 判断是否已存在记录
        select_sql = 'SELECT COUNT(1) AS CNT FROM EARTHQUAKE WHERE DID=\'%s\''%(item['did'])

        try:
            cursor = self.conn.cursor()
            cursor.execute(select_sql)
            result_count,=cursor.fetchone()

            if result_count>0:
                logging.info('数据已存在，无需重复录入！')
                return item
        except Exception as e:
            logging.error('数据查询错误：%s' % select_sql)
            return item

        # 插入新记录
        insert_sql = 'INSERT INTO EARTHQUAKE(DID,EARTHQUAKE_LEVEL,EARTHQUAKE_TIME,EARTHQUAKE_LON,EARTHQUAKE_LAT,EARTHQUAKE_DEPTH,EARTHQUAKE_ADDRESS,VERSION) VALUES(\'%s\',%s,\'%s\',%s,%s,%s,\'%s\',now())' % (
            item['did'], item['earthquake_level'], item['earthquake_time'], item['earthquake_lon'], item['earthquake_lat'], item['earthquake_depth'], item['earthquake_address'])

        try:
            cursor = self.conn.cursor()
            cursor.execute(insert_sql)
            logging.info('插入数据成功!')
        except Exception as e:
            logging.error('插入数据错误：%s' % insert_sql,e)
            self.conn.rollback()
            return item

        self.conn.commit()

        return item

    def close_spider(self, spider):
        self.conn.close()
        logging.info('Mysql数据库连接已关闭。')
