# -*- coding: UTF-8 -*-
import MySQLdb
import MySQLdb.cursors
import logging
from scrapy.utils.project import get_project_settings
from pangolin.settings import DYNAMIC_RENDERING_SPIDER


class DbOperateClass(object):

    def __init__(self):
        settings = get_project_settings()
        self.DB_CONF = settings.get('DB_CONF')

    # 获取数据库连接
    def get_db_conn(self):
        db_conf = self.DB_CONF
        try:
            conn = MySQLdb.connect(host=db_conf['host'],
                                   user=db_conf['user'],
                                   passwd=db_conf['passwd'],
                                   db=db_conf['db'],
                                   port=db_conf['port'],
                                   charset=db_conf['charset'])
            conn.ping(True)
            cur = conn.cursor()
            return (conn, cur)
        except MySQLdb.Error, e:
            logging.error("Mysql db Error:%s" % e)

    @staticmethod
    # 释放数据库连接
    def free_db_conn(conn, cursor):
        conn.commit()
        cursor.close()
        conn.close()

    # 查询方法，使用con.cursor(MySQLdb.cursors.DictCursor),返回结果为字典
    def select_by_sql(self, sql):
        logging.info("select_by_sql:%s" % sql)
        try:
            (conn, cur) = self.get_db_conn()
            count = cur.execute(sql)
            fc = cur.fetchall()
            self.free_db_conn(conn, cur)
            # return fc,count
            return (fc, count)
        except MySQLdb.Error, e:
            logging.error("Mysql db Error:%s" % e)

    # 使用fetchone查询单条记录
    def select_one_by_sql(self, sql):
        logging.info("select_one_by_sql:%s" % sql)
        try:
            (conn, cur) = self.get_db_conn()
            cur.execute(sql)
            data = cur.fetchone()
            self.free_db_conn(conn, cur)
            return data
        except MySQLdb.Error, e:
            logging.error("Mysql db Error:%s" % e)

    # 带参数的更新方法,eg:sql='insert into python test values(%s,%s,%s,now()',params=(6,'C#','good book')
    def update_by_param(self, sql, params):
        logging.info("update_by_param:%s" % sql)
        try:
            (conn, cur) = self.get_db_conn()
            count = cur.execute(sql, params)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)

    # 不带参数的更新方法
    def update_by_sql(self, sql):
        logging.info("update_by_sql:%s" % sql)
        try:
            (conn, cur) = self.get_db_conn()
            count = cur.execute(sql)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)

    # 根据爬虫状态更新cpa_timer表的timerStatus值
    def update_status(self, timer_status, time_type, timer_id):
        try:
            (conn, cur) = self.get_db_conn()
            sql = 'update cpa_timer timer set timer.timer_status = \'%s\' where timer.timer_type= \'%s\' and ' \
                  'timer.timer_id = \'%s\' ' % (timer_status, time_type, timer_id)
            logging.info("UpdateStatusSql:%s" % sql)
            count = cur.execute(sql)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)

     # 更新cpa_miss_product表的deal_flag值
    def update_miss_status(self, deal_flag, url, task_id):
        try:
            (conn, cur) = self.get_db_conn()
            sql = 'update cpa_miss_product product set product.deal_flag = \'%s\' where product.url= \'%s\' and ' \
                  'product.task_id = \'%s\' ' % (deal_flag, url, task_id)
            logging.info("update_miss_status:%s" % sql)
            count = cur.execute(sql)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)        

    # 删除产品列表 表数据
    def delete_pd_list_data(self, table_name, company_code, city_code, task_id, task_date):
        try:
            (conn, cur) = self.get_db_conn()
            sql = 'DELETE FROM %s WHERE company_code = \'%s\' AND city_code = \'%s\'  AND  task_id = \'%s\'  AND task_date= \'%s\'' \
                  % (table_name, company_code, city_code, task_id, task_date)
            logging.info("DeleteSql:%s" % sql)
            count = cur.execute(sql)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)

    # 删除产品表和商品表数据
    def delete_pd_and_cd_data(self, table_name1, table_name2, company_code, city_code, task_id, task_date):
        try:
            (conn, cur) = self.get_db_conn()
            sql = 'DELETE pd,cd FROM %s pd,%s cd WHERE pd.detail_id = cd.detail_id ' \
                  'AND pd.company_code = \'%s\' AND pd.city_code = \'%s\'  AND  pd.task_id = \'%s\'  AND pd.task_date= \'%s\''\
                  % (table_name1, table_name2, company_code, city_code, task_id, task_date)
            logging.info("DeleteSql:%s" % sql)
            count = cur.execute(sql)
            self.free_db_conn(conn, cur)
            return count
        except MySQLdb.Error, e:
            conn.rollback()
            logging.error("Mysql db Error:%s" % e)

    @staticmethod
    # 判断是否开启动态渲染
    def is_dynamic_rendering(spider_name):
        try:
            # 需要动态渲染的爬虫名先配置到setting文件中，后期考虑建表维护
            fc = DYNAMIC_RENDERING_SPIDER
            for row in fc:
                # print row
                if spider_name == row:
                    return True
            return False

        except MySQLdb.Error, e:
            logging.error("Mysql db Error:%s" % e)