import json
import datetime
from copy import deepcopy
from jinja2 import Template
from package.connector.elastic_db import es_db
from common.analyzer_common.search import DISearch
from common.analyzer_common.operator import OP
from package.connector.kafka_db import kafka_producer
from common.cache import alert_supress_cache


class Alert:
    """
    查询数据
    检查数据告警
    触发告警
    """
    Topic = 'custom_warning_log'
    NotifyTopic = 'system_notify_test'
    SampleSize = 5
    Template = """"""

    @classmethod
    def build_alert_condition(cls, *args, **kwargs):
        """构建告警搜索条件"""
        pass

    @classmethod
    def render_content(cls, configure, variables):
        """渲染通告模板"""
        tpl = configure.get('template', cls.Template)
        return Template(tpl).render(variables)

    @classmethod
    def check_trigger(cls, resp, configure):
        triggers = configure.get('triggers', [])

        for trigger in triggers:
            operator = trigger['operator']
            v = resp['hits']['total']['value']
            if op_func := getattr(OP, operator['op']):
                if op_func(v, operator['count']):
                    yield {'count': v, 'trigger': trigger}
                    break  # 事件计数 一个满足下面的规则不再执行
    
    @classmethod
    def notify(cls, triggers, supress, configure, variables: dict):
        """触发告警
        发送内容
        通告内容  # TODO 现在是随便定义的
        """
        notifies_configure = configure.get('notifies_configure', [])
        supress_count = alert_supress_cache.get(id)

        for trigger in triggers:
            kafka_producer.send(cls.Topic, value=json.dumps({**variables, 'supress_count': supress_count, **trigger},
                                                            ensure_ascii=False))
        if supress:  # 通告抑制
            if alert_supress_cache.get(id):  # 抑制中
                alert_supress_cache.incr(id)
                return
            
            alert_supress_cache.set(id, ex=supress * 60)  # 设置抑制周期到期时间

        for notify_configure in notifies_configure:  # 发送通告
            for trigger in triggers:
                content = cls.render_content(configure, {**variables, **trigger})
                kafka_producer.send(cls.NotifyTopic, value=json.dumps({
                    'notify_configure': notify_configure,
                    'data': {'content': content}
                }, ensure_ascii=False))

    
    @classmethod
    def check(cls, search, configure, id=None, name='', supress=None, category=None):
        now_ts = int(datetime.datetime.utcnow().timestamp() * 1000)

        aggs = cls.build_alert_condition(configure)
        s = deepcopy(search)
        if aggs:
            s['condition']['aggs'] = aggs
        resp = DISearch.search(s)

        matched_triggers = list(cls.check_trigger(resp, configure))
        if not matched_triggers:
            return
        
        variables = {
            'id': id,
            'category': category,
            'name': name,

            'start_time': None,
            'end_time': now_ts,
            
            'supress': supress,
            'search': search,
            'samples': [item['_source'] for item in resp.get('hits', {}).get('hits', [])]  # 日志样本
        }
        cls.notify(matched_triggers, supress, configure, variables=variables)
    

class CountAlert(Alert):
    Category = 'count'
    Template = """
规则名: {{name}},
告警类型: {{category}},
触发规则: {{trigger}}},

当前数量: {{count}},
op: {{trigger.operator.op}},
阈值: {{trigger.operator.count}}
"""


class GroupAlert(Alert):
    """分组告警"""
    Category = 'group'
    Template = """
规则名: {{name}},
告警类型: {{category}},
触发规则: {{trigger}}},
分组: {{group}},
当前数量: {{count}},
op: {{trigger.operator.op}},
阈值: {{trigger.operator.count}}
"""

    @classmethod
    def build_alert_condition(cls, configure):
        group_options = configure.get('group_options', [])
        triggers = configure.get('triggers', [])
        group_metrics = [{k: v for k,v in trigger.items() if k != 'operator'} for trigger in triggers]
        aggs = DISearch.build_group_aggs({**group_options, 'metrics': group_metrics})
        return aggs

    @classmethod
    def check_trigger(cls, resp, configure):
        """分组告警 多组触发 多组告警"""
        triggers = configure.get('triggers', [])
        
        datas = list(DISearch.flattern_aggs(resp['aggregations'], key_as_tuple=True))
        group_metrics = {data['composite']: {k: v for k, v in data.items() if k != 'composite'} for data in datas}
        for group, metric in group_metrics.items():
            for trigger in triggers:
                operator = trigger['operator']
                if op_func := getattr(OP, operator['op']):
                    agg_type, field = trigger['name_or_agg'], trigger['field']
                    v = metric[f'{agg_type}_{field}']
                    if op_func(v, operator['count']):
                        yield {'count': v, 'trigger': trigger, 'trigger_group': group}


class MetricAlert(Alert):
    Category = 'metric'
    Template = """
规则名: {{name}},
告警类型: {{category}},
触发规则: {{trigger}}},

当前数量: {{count}},
op: {{trigger.operator.op}},
阈值: {{trigger.operator.count}}
"""
    @classmethod
    def build_alert_condition(cls, configure):
        triggers = configure.get('triggers', [])
        metric_options = [{k: v for k,v in trigger.items() if k != 'operator'} for trigger in triggers]
        aggs = DISearch.build_metric_aggs(metric_options)
        return aggs
    
    @classmethod
    def check_trigger(cls, resp, configure):
        triggers = configure.get('triggers', [])

        datas = list(DISearch.flattern_aggs(resp['aggregations'], key_as_tuple=True))
        metric_map = {k: v for data in datas for k,v in data.items()}
        for trigger in triggers:
            operator = trigger['operator']
            agg_type, field = trigger['name_or_agg'], trigger['field']
            v = metric_map.get(f'{agg_type}_{field}', 0)
            if op_func := getattr(OP, operator['op']):
                if op_func(v, operator['count']):
                    yield {'count': v, 'trigger': trigger}
                    break


AlertMap  = {item.Category: item for item in Alert.__subclasses__()}


if __name__ == '__main__':

    from package.connector.redis_db import rdb
    from package.connector.kafka_db import kafka_producer
    from setting import setting

    rdb.init(host='192.168.101.79', password='MY.io2019')
    es_db.init(hosts='http://192.168.101.80:9200')
    kafka_producer.init(**{'bootstrap.servers': setting.kafka_servers, **setting.kafka_options})
    r1 = CountAlert.check(search={
        'scope': '', 'condition': {'query': {'match_all': {}}}
    }, configure={'triggers': [{'operator': {'op': 'gt', 'count': 5, 'level': 5, }}], 
                  'notifies_configure':[{'id': '0682461c30ba70928000de75cc5fbf50'}]})

    print(r1)

    r2 = GroupAlert.check(
        search={'scope': '', 'condition': {'query': {'match_all': {}}}},
        configure={
        'group_options': {'groups': [{'name_or_agg': 'terms', 'field': '_host'},
                                  {'name_or_agg': 'terms', 'field': '_datamodel'}
                                  ]},
        'triggers': [{'name_or_agg': 'value_count', 'field': '_host', 'operator': {'op': 'gt', 'count': 5, 'level': 5, }}], 
        'notifies_configure': [{'id': '0682461c30ba70928000de75cc5fbf50'}]
        })
    print(r2)

    r3 = MetricAlert.check(
        search={'scope': '', 'condition': {'query': {'match_all': {}}}},
        configure={
        'triggers': [{'name_or_agg': 'value_count', 'field': '_host', 'operator': {'op': 'gt', 'count': 5, 'level': 5, }}], 
        'notifies_configure': [{'id': '0682461c30ba70928000de75cc5fbf50'}]})
    print(r3)
