# -*- coding:utf8 -*-
import json
import os
import requests
from datetime import date, datetime
from squirrel_core.item import BaseItem
from squirrel_core.commons.utils.tools import calc_str_md5
from squirrel_core.commons.utils.get_config import get_config

data_back_path = os.environ.get("DATA_BACK_PATH", "/")
common_url = get_config(sections="base").get("common_url", "")
score_url = get_config(sections="base").get("score_url", "")
pic_url = get_config(sections="base").get("pic_url", "")
raceno_url = get_config(sections="base").get("raceno_url", "")
wx_url = get_config(sections="base").get("wx_url", "")
mq_common = get_config(sections="base").get("mq_common", "")
mq_raceno = get_config(sections="base").get("mq_raceno", "")
mq_score = get_config(sections="base").get("mq_score", "")
mq_pic = get_config(sections="base").get("mq_pic", "")
__all__ = ["BasePipeline"]


class BasePipeline(object):
    def __init__(self, crawler, *args, **kwargs):
        settings = crawler.settings
        self.statics_item = settings.get('STATICS_ITEM', [])
        self.data_table_name = settings.get('TABLE_NAME', '')
        self.__dict__.update(**kwargs)
        self.store_dupe_filter_key = kwargs.get('store_dupe_filter_key', "")
        self.db_reconnect_retry = kwargs.get("db_reconnect_retry", 0)
        self.sleep_time_once_failed = kwargs.get("sleep_time_once_failed", 0)

        self.upstream_queue = crawler.spider.queues[1]


    @classmethod
    def get_init_args(cls, settings, db_setting_key, *args, **kwargs):
        """
            prepare pipeline initial common argument
        Args:
            settings:
            db_setting_key:
            *args:
            **kwargs:

        Returns:
            a tuple contain two element, first is a list and second is a dict
        """
        _args = []
        _kwargs = {}
        db_reconnect_retry = settings["db_reconnect_retry"]
        sleep_time_once_failed = settings["sleep_time_once_failed"]
        store_dupe_filter_key = settings["DUPE_FILTER_KEY"]
        parser_dupe_filter_key = settings["PARSER_DUPE_FILTER_KEY"]
        store_dupe_filter_pattern = settings["STORE_DUPEFILTER_KEY_PATTERN"]
        parser_dupe_filter_pattern = settings["PARSER_DUPEFILTER_KEY_PATTERN"]
        key = store_dupe_filter_pattern % store_dupe_filter_key
        parser_key = parser_dupe_filter_pattern % parser_dupe_filter_key
        _kwargs.update({
            "db_setting": settings[db_setting_key],
            "store_dupe_filter_key": key,
            "parser_dupe_filter_key": parser_key,
            "db_reconnect_retry": db_reconnect_retry,
            "sleep_time_once_failed": sleep_time_once_failed,
        })
        return _args, _kwargs

    def clear_store_dupefilter(self, settings, server):
        store_dupe_filter_key = settings["DUPE_FILTER_KEY"]
        parser_dupe_filter_key = settings["PARSER_DUPE_FILTER_KEY"]
        store_dupe_filter_pattern = settings["STORE_DUPEFILTER_KEY_PATTERN"]
        parser_dupe_filter_pattern = settings["PARSER_DUPEFILTER_KEY_PATTERN"]
        key = store_dupe_filter_pattern % store_dupe_filter_key
        parser_key = parser_dupe_filter_pattern % parser_dupe_filter_key
        crawl_increment = settings.get('CRAWL_INCREMENT', True)
        if not crawl_increment:
            server.set_clear(key)
            server.set_clear(parser_key)

    @classmethod
    def from_crawler(cls, crawler):
        args, kwargs = cls.get_init_args(crawler.settings, "DATABASE_DATA_SETTING")
        instance = cls(crawler, *args, **kwargs)
        instance.clear_store_dupefilter(crawler.settings, crawler.server)
        instance.crawler = crawler
        return instance
        # return cls(crawler)

    # def __init__(self, crawler):
    #     self.upstream_queue = crawler.spider.queues[1]

    def get_dup_str(self, item):
        dup_str = ""
        dupe_filter_key = ""
        if isinstance(item, (BaseItem, )):
            dup_str = str(item.get("_dup_str", ""))
            dupe_filter_key = str(item.get("_dup_key", ""))
        return dup_str, dupe_filter_key

    def check_dup_exist(self, spider, dup_str, dupe_filter_key):
        if not all([spider.need_ssdbstore_dup, dup_str, dupe_filter_key, isinstance(dup_str, str)]):
            return False
        return self._is_dup_str_exists(spider, dup_str, dupe_filter_key)

    def _is_dup_str_exists(self, inst_spider, dup_str, dupe_filter_key):
        exist_f = False
        try:
            md5_key = calc_str_md5(dup_str)
            status = inst_spider.server.set_add(dupe_filter_key, md5_key)
            exist_f = not status
        except Exception as e:
            pass
        return exist_f

    def send_data(self, data, logger):
        data.pop("_dup_str", "")
        logger.info(f"准备发送数据")
        if data.get("webType", "") == "signup":
            _url = raceno_url
            mq_queue_name = mq_raceno
        elif data.get("crawlerType", "") == "pic":
            _url = pic_url
            mq_queue_name = mq_pic
        elif data.get("crawlerType", "") == "score":
            _url = score_url
            mq_queue_name = mq_score
        else:
            _url = common_url
            mq_queue_name = mq_common
        i = 0
        save_flag = False
        while i < 3:
            try:
                try:
                    resp = requests.post(_url, json=data, headers={"Content-Type": "application/json"}, timeout=10)
                    content = resp.text
                    logger.info(f"发送结果[{_url}]：{content}")
                    save_flag = True
                    break
                except Exception as e:
                    logger.info(f"数据推送接口访问异常：{e}")
                    i += 1
            except Exception as e:
                i += 1
                logger.info(f"数据推送失败,准备重试：{e}")
        if not save_flag:
            j = 0
            while j < 3:
                try:
                    logger.info(f"准备将推送失败的数据发送至：{mq_queue_name}")
                    from squirrel_core.commons.utils.mq_connection import MQConnectionSingleton
                    mq = MQConnectionSingleton.get_instance()
                    mq.declare_queue(mq_queue_name)
                    mq.send_message(mq_queue_name, json.dumps(data, ensure_ascii=False))
                    self.crawler.mq_server = mq
                    break
                except:
                    j += 1
                    continue
            else:
                try:
                    file_name = f"{str(date.today())}.txt"
                    full_file_name = os.path.join(data_back_path, file_name)
                    with open(full_file_name, "a+", encoding="utf-8") as f:
                        f.write(json.dumps(data, ensure_ascii=False) + "\n")
                except:
                    pass
                current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
                payload = {
                    "msgtype": "text",
                    "text": {
                        "content": f"【数据推送接口提醒：{current_time}】\n数据推送错误，请及时处理！（{_url}）",
                        "mentioned_list": ["@all"]
                    }
                }
                try:
                    requests.post(wx_url, headers={"Content-Type": "text/plain"}, data=json.dumps(payload),
                                  timeout=10)
                except Exception as e:
                    logger.info(f"代理异常监控信息发送失败, 原因: {e}")

    def process_item(self, item, spider):
        logger = spider.logger
        if isinstance(item, BaseItem):
            dup_str, specific_dupe_filter_key = self.get_dup_str(item)
            if self.check_dup_exist(spider, dup_str, specific_dupe_filter_key or self.store_dupe_filter_key):
                logger.info("重复数据")
            else:
                _data = item.process_item()
                _date_type = _data.get("_data_type", 0)
                if _date_type == 0:
                    self.send_data(_data.get("_data", {}), logger=logger)
                else:
                    self.upstream_queue.put(json.dumps(_data))
                    spider.close_spider_when_idle()
        return item