""""""
import json


class Statistic:
    """系统统计分析"""
    LogIndex = 'dsm-*'  # 日志索引前缀
    HotIndexStatisticKey = 'statistic:index:hot'
    ReadIndexStatisticKey = 'statistic:index:readonly'

    def __init__(self, client, redis_client):
        self.es_client = client
        self.rdb = redis_client
        self.hot_indexes, self.readonly_indexes = self.statistic_index()

    def statistic_index(self):
        hot_indexes, readonly_indexes = [], []
        resp = self.es_client.ilm.explain_lifecycle(self.LogIndex)
        for index, item in resp['indices'].items():
            info = {
                'index': index,
                'created_at': item['lifecycle_date_millis'],
                'phase': item['phase']
            }
            if item['phase'] == 'hot':
                hot_indexes.append(info)
            else:
                readonly_indexes.append(info)
        return hot_indexes, readonly_indexes

    def _statistic_indexes(self, indexes):
        """
        索引最早最晚时间统计 计数
        数据源最早最晚时间统计 计数
        """
        if not indexes:
            return {}

        aggregations = {
            '_index': {
                'terms': {'field': '_index', 'size': 65535},
                "aggs": {
                    '_time_max': {"max": {"field": "_time"}},
                    '_time_min': {"min": {"field": "_time"}},
                    '_source_bucket': {
                        "terms": {"field": "_sourceid", "size": 65535},
                        "aggs": {'_time_max': {"max": {"field": "_time"}},
                                 '_time_min': {"min": {"field": "_time"}}}
                    }
                }
            }
        }
        resp = self.es_client.search(index=indexes, body={'aggs': aggregations}, size=0, track_total_hits=False)
        return {
            aggregation['key']: {
                'doc_count': aggregation['doc_count'],
                '_time_min': aggregation['_time_min']['value'],
                '_time_max': aggregation['_time_max']['value'],
                '_source': {
                    source['key']: {
                        'doc_count': source['doc_count'],
                        '_time_min': source['_time_min']['value'],
                        '_time_max': source['_time_max']['value'],
                    }
                    for source in aggregation['_source_bucket']['buckets']
                }
            } for aggregation in resp['aggregations']['_index']['buckets']}

    def overview_indexes(self, indexes):
        """数据模型统计"""
        resp = self.es_client.cat.indices(index=indexes, bytes='b', format='json')
        return {
            item['index']: {
                'doc_count': int(item['docs.count']),
                'size': int(item['store.size']),
                'health': item['health']
            }
            for item in resp
        }

    def statistic_indexes(self):
        """
        热索引数据会不断变更 每次都要实时计算
        冷温索引数据不会变更 计算一次即可 计算一次之后全部走缓存
        """
        hot_indexes = [item['index'] for item in self.hot_indexes]
        readonly_indexes = [item['index'] for item in self.readonly_indexes]

        hot_cache = {k.decode(): json.loads(v) for k, v in self.rdb.redis.hgetall(self.HotIndexStatisticKey).items()}
        readonly_cache = {k.decode(): json.loads(v) for k, v in
                          self.rdb.redis.hgetall(self.ReadIndexStatisticKey).items()}

        result = self.overview_indexes(hot_indexes)
        todo_hot_indexes = [k for k, v in result.items() if v['doc_count'] > hot_cache.get(k, {}).get('doc_count', 0)]
        todo_readonly_indexes = [item for item in readonly_indexes if item not in readonly_cache]

        hot_statistic = self._statistic_indexes(todo_hot_indexes)
        readonly_statistic = self._statistic_indexes(todo_readonly_indexes)

        todel_hot_indexes = [idx for idx in hot_cache if idx not in hot_indexes]  # 转冷索引的缓存数据删除
        if todel_hot_indexes:
            self.rdb.redis.hdel(self.HotIndexStatisticKey, *todel_hot_indexes)

        todel_readonly_indexes = [idx for idx in readonly_cache if idx not in readonly_indexes]  # 已删除索引的缓存数据删除
        if todel_readonly_indexes:
            self.rdb.redis.hdel(self.ReadIndexStatisticKey, *todel_readonly_indexes)

        for k, v in hot_statistic.items():
            self.rdb.redis.hset(self.HotIndexStatisticKey, k, json.dumps(v))

        for k, v in readonly_statistic.items():
            self.rdb.redis.hset(self.ReadIndexStatisticKey, k, json.dumps(v))


if __name__ == '__main__':
    import time
    from common.package.elastic_db import es_db
    from common.package.redis_db import rdb
    from common.package.db import Postgres
    es_db.init(hosts='http://192.168.101.79:9200')
    rdb.init(host='192.168.101.79', password='MY.io2019')

    db = Postgres()
    db.init(url='postgresql+psycopg2://dsm:dsm@192.168.101.79:5432/dsm', pool_pre_ping=True)

    def test_search():
        statistic = Statistic(es_db.client, rdb)
        r1 = statistic.statistic_indexes()

    a = time.time()
    test_search()
    print(time.time() - a)
