"""
@Description:
@Usage:
@Author: liuxianglong
@Date: 2022/5/12 下午10:13
"""
import json
import logging
import datetime
from twisted.internet import task
from threading import Timer, Lock

logger = logging.getLogger(__name__)


class SpiderInfo:
    def __init__(self, crawler, spider):
        self.host = 'host'
        self.project = crawler.settings.get('BOT_NAME')
        self.spider = spider.name
        self.job_id = getattr(spider, '_job', '000000')
        self.stats = crawler.stats
        # self.api = API.from_crawler(crawler)


class SpiderStats(SpiderInfo):
    def __init__(self, crawler, spider):
        SpiderInfo.__init__(self, crawler, spider)
        self.task = None
        self.interval = 5

    def spider_open(self, spider):
        self.stats.set_value('host', self.host)
        self.stats.set_value('project', self.project)
        self.stats.set_value('spider', self.spider)
        self.stats.set_value('job_id', self.job_id)
        self.task = task.LoopingCall(self.send, spider)
        self.task.start(self.interval)

    def spider_close(self, spider, reason):
        if self.task and self.task.running:
            self.send(spider, finish_reason=reason, close=True)
            self.task.stop()

    def send(self, spider, finish_reason=None, close=False):
        stats = self.stats.get_stats()
        new_stats = {}
        for k, v in stats.items():
            if k == 'start_time' and type(v) != str:
                v = str(stats[k] + datetime.timedelta(hours=8))

            if k == 'finish_time' and type(v) != str:
                v = str(stats[k] + datetime.timedelta(hours=8))

            if k.startswith('memusage') and type(v) != str:
                v = str(round(v / (1024 * 1024), 1)) + 'MB'

            new_stats[k] = v

        d = dict()
        d['主机'] = self.host
        d['项目'] = self.project
        d['爬虫'] = self.spider
        d['任务ID'] = self.job_id
        d['记录时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

        d['日志DEBUG计数'] = new_stats.get('log_count/DEBUG', 0)
        d['日志INFO计数'] = new_stats.get('log_count/INFO', 0)
        d['日志WARNING计数'] = new_stats.get('log_count/WARNING', 0)
        d['日志ERROR计数'] = new_stats.get('log_count/ERROR', 0)
        d['日志CRITICAL计数'] = new_stats.get('log_count/CRITICAL', 0)
        d['下载器请求总数'] = new_stats.get('downloader/request_count', 0)
        d['GET请求数'] = new_stats.get('downloader/request_methods_count/GET', 0)
        d['POST请求数'] = new_stats.get('downloader/request_methods_count/POST', 0)
        d['下载器响应总数'] = new_stats.get('downloader/response_count', 0)
        d['200响应数'] = new_stats.get('downloader/response_status_count/200', 0)
        d['404响应数'] = new_stats.get('downloader/response_status_count/404', 0)
        d['下载器异常总数'] = new_stats.get('downloader/exception_count', 0)
        d['爬虫爬取页面总数'] = new_stats.get('spider/pages_crawled', 0)
        d['抓取ITEM数'] = new_stats.get('item_scraped_count', 0)

        log_total_count = new_stats.get('log_count/log_hourly_total_count', 0)
        log_error_count = new_stats.get('log_count/log_hourly_error_count', 0)
        if log_total_count != 0:
            d['日志每小时错误率'] = round(log_error_count / log_total_count, 4)
        else:
            d['日志每小时错误率'] = 0
        d['内存使用'] = float(new_stats.get('memusage/max', '0MB').replace('MB', '').strip())
        d['日志量'] = float(new_stats.get('log_count/log_hourly_total_count', 0))
        d['日志量err'] = float(new_stats.get('log_count/log_hourly_error_count', 0))
        d['开始时间'] = new_stats.get('start_time')
        d['完成时间'] = new_stats.get('finish_time', None)
        d['完成原因'] = finish_reason
        d['经过的时间'] = new_stats.get('elapsed_time_seconds', None)
        d['状态'] = 1 if not close else 0
        d['统计数据'] = json.dumps(new_stats, ensure_ascii=False)
        # self.api.send_stats_data(raw_json=d)
        print("##################")
        print(json.dumps(d, indent=4, ensure_ascii=False))
        print("##################")
        """
        {
            "主机": "host",
            "项目": "ScrapyStudio",
            "爬虫": "demo",
            "任务ID": "000000",
            "记录时间": "2024-04-26 15:15:21",
            "日志DEBUG计数": 0,
            "日志INFO计数": 9,
            "日志WARNING计数": 0,
            "日志ERROR计数": 0,
            "日志CRITICAL计数": 0,
            "下载器请求总数": 0,
            "GET请求数": 0,
            "POST请求数": 0,
            "下载器响应总数": 0,
            "200响应数": 0,
            "404响应数": 0,
            "下载器异常总数": 0,
            "爬虫爬取页面总数": 0,
            "抓取ITEM数": 0,
            "日志每小时错误率": 0,
            "内存使用": 0.0,
            "日志量": 0.0,
            "日志量err": 0.0,
            "开始时间": "2024-04-26 15:15:21.202489",
            "完成时间": null,
            "完成原因": null,
            "经过的时间": null,
            "状态": 1,
            "统计数据": "{\"log_count/INFO\": 9, \"start_time\": \"2024-04-26 15:15:21.202489\", \"host\": \"host\", \"project\": \"ScrapyStudio\", \"spider\": \"demo\", \"job_id\": \"000000\"}"
        }
        """


class ErrorLogRate(SpiderInfo):

    def __init__(self, crawler, spider):
        SpiderInfo.__init__(self, crawler, spider)
        self.task = None
        self.interval = 5

    def spider_open(self, spider):
        self.task = task.LoopingCall(self.send, spider)
        self.task.start(self.interval)

    def spider_close(self, spider, reason):
        if self.task and self.task.running:
            self.send(spider)
            self.task.stop()

    def send(self, spider):
        today = datetime.datetime.now().date().strftime('%Y-%m-%d')
        hour = datetime.datetime.now().hour

        d = dict()
        d['主机'] = self.host
        d['项目'] = self.project
        d['蜘蛛'] = self.spider
        d['job_id'] = self.job_id
        d['日志日期'] = today
        d['记录时间'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')

        d['日志总数'] = self.stats.get_value('log_count/log_daily_total_count', 0)
        d['日志错误计数'] = self.stats.get_value('log_count/log_daily_error_count', 0)
        if d['日志总数'] != 0:
            d['日志错误率'] = round(d['日志错误计数'] / d['日志总数'], 4)
        else:
            d['日志错误率'] = 0
        # self.api.send_errlog_rate(raw_json=d)

        d['日志总数'] = self.stats.get_value('log_count/log_hourly_total_count', 0)
        d['日志错误计数'] = self.stats.get_value('log_count/log_hourly_error_count', 0)
        if d['日志总数'] != 0:
            d['日志错误率'] = round(d['日志错误计数'] / d['日志总数'], 4)
        else:
            d['日志错误率'] = 0
        d['日志小时'] = hour
        """
        {
            "主机": "host",
            "项目": "ScrapyStudio",
            "蜘蛛": "demo",
            "job_id": "000000",
            "日志日期": "2024-04-26",
            "记录时间": "2024-04-26 15:13:17",
            "日志总数": 54,
            "日志错误计数": 12,
            "日志错误率": 0.2222,
            "日志小时": 15
        }
        """
        # self.api.send_errlog_rate(raw_json=d)


class LogHandler(logging.Handler, SpiderInfo):
    """Record log levels count into a crawler stats"""

    def __init__(self, crawler, spider, *args, **kwargs):
        super().__init__(*args, **kwargs)
        SpiderInfo.__init__(self, crawler, spider)
        self.now_date = None
        self.now_hour = None
        self._buffer = []
        self.buffer_size = crawler.settings.get('CS_ERR_LOG_BUFFER_SIZE', 500)
        self.flush_frequency_in_sec = crawler.settings.get('CS_ERR_LOG_SEND_FREQ', 20)
        self.enable_send_err_text = crawler.settings.get('CS_ENABLE_SEND_ERR_LOG', True)
        self._buffer_lock = Lock()
        self._timer = None

    def flush(self):
        """ 将缓冲区刷新到 Mysql 中
          :return: None
          """
        if self._timer is not None and self._timer.is_alive():
            self._timer.cancel()
        self._timer = None

        if self._buffer:
            try:
                with self._buffer_lock:
                    logs_buffer = self._buffer
                    self._buffer = []
                # self.api.send_errlog_content(raw_json=logs_buffer)
                # print("&&&&&&&&&&&&&&& mysql send log &&&&&&&&&&&&&&&&")
                # print(json.dumps(logs_buffer, indent=4, ensure_ascii=False))
                # print("&&&&&&&&&&&&&&& mysql send log &&&&&&&&&&&&&&&&")
                # TODO: 发送日志到 Mysql
                """
                [
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '冬天睡在雪地里的赤狐4k', 'src': 'https://pic.netbian.com//uploads/allimg/240105/212220-17044609402c9e.jpg', 'path': 'download_file/冬天睡在雪地里的赤狐4k.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '冬天睡在雪地里的赤狐4k', 'src': 'https://pic.netbian.com//uploads/allimg/240105/212220-17044609402c9e.jpg', 'path': 'download_file/冬天睡在雪地里的赤狐4k.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '北极熊妈妈和宝宝4k壁纸', 'src': 'https://pic.netbian.com//uploads/allimg/231129/212056-17012640568637.jpg', 'path': 'download_file/北极熊妈妈和宝宝4k壁纸.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '北极熊妈妈和宝宝4k壁纸', 'src': 'https://pic.netbian.com//uploads/allimg/231129/212056-17012640568637.jpg', 'path': 'download_file/北极熊妈妈和宝宝4k壁纸.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '雪 树枝 果实 乌鸫 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/230130/003956-16750103960871.jpg', 'path': 'download_file/雪 树枝 果实 乌鸫 4k 动.jpg', 'size': '4891x2751'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '雪 树枝 果实 乌鸫 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/230130/003956-16750103960871.jpg', 'path': 'download_file/雪 树枝 果实 乌鸫 4k 动.jpg', 'size': '4891x2751'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '北极熊 雪地 睡觉 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/230103/165720-16727362403205.jpg', 'path': 'download_file/北极熊 雪地 睡觉 4k 动.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '北极熊 雪地 睡觉 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/230103/165720-16727362403205.jpg', 'path': 'download_file/北极熊 雪地 睡觉 4k 动.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '大雪中的狍子4k高清壁纸', 'src': 'https://pic.netbian.com//uploads/allimg/221225/002028-1671898828b946.jpg', 'path': 'download_file/大雪中的狍子4k高清壁纸.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '大雪中的狍子4k高清壁纸', 'src': 'https://pic.netbian.com//uploads/allimg/221225/002028-1671898828b946.jpg', 'path': 'download_file/大雪中的狍子4k高清壁纸.jpg', 'size': '3840x2160'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Error processing {'name': '北极熊 宝宝 雪地 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/221209/181033-16705806330d54.jpg', 'path': 'download_file/北极熊 宝宝 雪地 4k 动.jpg', 'size': '4689x2638'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "scrapy.core.scraper",
                        "lineno": 249,
                        "exc_info": "Traceback (most recent call last):\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\twisted\\internet\\defer.py\",
                 line 892, in _runCallbacks\n    current.result = callback(  # type: ignore[misc]\n  File \"C:\\Users\\22143\\miniconda3\\envs\\spider\\lib\\site-packages\\scra
                py\\utils\\defer.py\", line 150, in f\n    return deferred_from_coro(coro_f(*coro_args, **coro_kwargs))\n  File \"D:\\ScrapyStudio\\ScrapyStudio\\pipelines.py\", line 15, in process_item\n    raise Exception(\"4k is not allowed\")\nException: 4k is not allowed",
                        "func_name": "_itemproc_finished"
                    },
                    {
                        "host": "host",
                        "project": "ScrapyStudio",
                        "spider": "demo",
                        "job_id": "000000",
                        "record_time": "2024-04-26 15:11:34",
                        "content": "Item error: {'name': '北极熊 宝宝 雪地 4k 动', 'src': 'https://pic.netbian.com//uploads/allimg/221209/181033-16705806330d54.jpg', 'path': 'download_file/北极熊 宝宝 雪地 4k 动.jpg', 'size': '4689x2638'}",
                        "level": "ERROR",
                        "log_time": "2024-04-26 15:11:34",
                        "module": "ScrapyStudio.utils.extension",
                        "lineno": 78,
                        "exc_info": null,
                        "func_name": "item_error"
                    }
                ]
                """
            except Exception as exception:
                logger.warning(exception)

    def close(self):
        """ 刷新缓冲区并释放任何未完成的资源
        :return: None
        """
        if self._timer is not None:
            self.flush()
        self._timer = None

    def __schedule_flush(self):
        if self._timer is None:
            self._timer = Timer(self.flush_frequency_in_sec, self.flush)
            self._timer.setDaemon(True)
            self._timer.start()

    def emit(self, record):
        today = datetime.datetime.now().date().strftime('%Y-%m-%d')
        if today != self.now_date:
            self.now_date = today
            self.stats.set_value('log_count/log_daily_total_count', 0)
            self.stats.set_value('log_count/log_daily_error_count', 0)

        hour = datetime.datetime.now().hour
        if hour != self.now_hour:
            self.now_hour = hour
            self.stats.set_value('log_count/log_hourly_total_count', 0)
            self.stats.set_value('log_count/log_hourly_error_count', 0)

        self.stats.inc_value('log_count/log_daily_total_count')
        self.stats.inc_value('log_count/log_hourly_total_count')

        if record.levelname in ('ERROR', 'CRITICAL'):
            self.stats.inc_value('log_count/log_daily_error_count')
            self.stats.inc_value('log_count/log_hourly_error_count')

            if self.enable_send_err_text:
                d = {
                    "host": self.host,
                    "project": self.project,
                    "spider": self.spider,
                    "job_id": self.job_id,
                    "record_time": datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
                    "content": record.message,
                    "level": record.levelname,
                    "log_time": str(record.asctime),
                    "module": record.name,
                    "lineno": record.lineno,
                    "exc_info": record.exc_text,
                    "func_name": record.funcName,
                }
                with self._buffer_lock:
                    self._buffer.append(d)

                if len(self._buffer) >= self.buffer_size:
                    self.flush()
                else:
                    self.__schedule_flush()
