# -*- coding: utf-8 -*-
from pymysqlpool import ConnectionPool
from scrapy.log import logger
from jobbole.dao.mysql_config import db_config
from jobbole.items import JobboleArticleItem
from jobbole.service.article_service import ArticleInfoService


class ArticleInfoDao:

    def __init__(self):
        pass
    pool = None

    '''
    实例化一个数据库连接池
    '''
    @classmethod
    def connection_pool(cls):
        if not cls.pool:
            cls.pool = ConnectionPool(**db_config)
            cls.pool.connect()
        return cls.pool


    '''
    根据ID获取文章信息
    '''

    @classmethod
    def get_article_info(cls, xid):
        sql_query = "SELECT id, title, group_1, group_2, group_3, group_4, group_5, url, publish_date " \
                    "FROM zhihu.jobbole_article_info WHERE id = %s"
        with cls.connection_pool().cursor() as cursor:
            cursor.execute(sql_query, (str(xid)))
            for res in cursor:
                article = JobboleArticleItem()
                article["id"] = res["id"]
                article["title"] = res["title"]
                article["group_1"] = res["group_1"]
                article["group_2"] = res["group_2"]
                article["group_3"] = res["group_3"]
                article["group_4"] = res["group_4"]
                article["group_5"] = res["group_5"]
                article["url"] = res["url"]
                article["publish_date"] = res["publish_date"]
                return article
        return None

    '''
    根据ID获取文章详细信息
    '''

    @classmethod
    def get_article_detail(cls, xid):
        sql_query = "SELECT content FROM zhihu.jobbole_article_detail WHERE id = %s"
        with cls.connection_pool().cursor() as cursor:
            cursor.execute(sql_query, (str(xid)))
            for res in cursor:
                return res["content"]
        return None

    '''
    保存文章信息
    '''

    @classmethod
    def save(cls, xid, title, group_1, group_2, group_3, group_4, group_5, content, url, updated_time):

        # 文章基础信息表
        try:
            item = cls.get_article_info(xid)
            if item:
                logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>数据已经存在，ID=" + str(xid) + ", TITLE:" + title)
                pass
            else:
                publish_date = ArticleInfoService.tran_string_to_date(updated_time)
                sql_insert = "INSERT INTO zhihu.jobbole_article_info ( Id, title, group_1, group_2, group_3, " \
                             "group_4, group_5, url, publish_date) VALUES  ( %s, %s, %s, %s, %s, %s, %s, %s, %s)"
                with cls.connection_pool().cursor() as cursor:
                    cursor.execute(sql_insert, (str(xid), title, group_1, group_2, group_3, group_4, group_5, url,
                                                publish_date))
        except Exception as err:
            logger.error(err)
            logger.error(">>>>>>>>>>>>>>>>>>>>>>>>>博客信息保存时发生异常！")



        # 文章详细内容表
        try:
            _content = cls.get_article_detail(xid)
            if _content:
                # logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>数据已经存在，ID=" + str(xid) + ", TITLE:" + title)
                pass
            else:
                content = ArticleInfoService.tran_tag_in_content(content)
                sql_insert = "INSERT INTO zhihu.jobbole_article_detail ( Id, content ) VALUES  ( %s, %s )"
                with cls.connection_pool().cursor() as cursor:
                    cursor.execute(sql_insert, (str(xid), content))
                    # logger.info(">>>>>>>>>>>>>>>>>>>>>>>>>博客信息及内容保存完成:" + title)
        except Exception as err:
            logger.error(err)
            logger.error(">>>>>>>>>>>>>>>>>>>>>>>>>博客内容保存时发生异常！")




