from elasticsearch import Elasticsearch


class ElasticsearchDB:
    """"""
    def __init__(self, **kwargs):
        """
        :param kwargs:
        """
        self.timeout = 60
        self.retry_on_timeout = True
        self.max_retries = 3
        self.client = None

        if kwargs:
            self.init(**kwargs)

    def init(self, **kwargs):
        config = {'timeout': self.timeout, 'retry_on_timeout': self.retry_on_timeout, 'max_retries': self.max_retries,
                  **kwargs}
        self.client = Elasticsearch(**config)

    def search(self, index, body, **kwargs):
        return self.client.search(index=index, body=body, **kwargs)

    def delete(self, index, body):
        resp = self.client.delete_by_query(index=index, body=body, refresh=True)
        return resp

    def search_page_data(self, index, body, page, page_size, track_total_hits=True, **kwargs):
        """es 翻页查询"""
        query = {'from': (page - 1) * page_size, 'size': page_size, **body}
        resp = self.search(index, body=query, track_total_hits=track_total_hits, **kwargs)
        return {
            'page': page,
            'page_size': page_size,
            'total': resp['hits']['total']['value'],
            'data': resp['hits']['hits']
        }

    def scroll_search(self, index, body, size=5000, max_size=100000, scroll="1m", **kwargs):
        """滚动搜索"""
        current_size = 0
        resp = self.search(index=index, scroll=scroll, body=body, size=size, **kwargs)
        scroll_id = resp.get('_scroll_id')

        while True:
            data = [item['_source'] for item in resp.get('hits').get('hits')]
            if not data:  # 查询不到无数据
                self.clear_scroll(scroll_id)
                return

            count = len(data)
            left_size = max_size - current_size
            if count >= left_size:
                yield data[:left_size]

                self.clear_scroll(scroll_id)
                return

            yield [item['_source'] for item in resp.get('hits').get('hits')]
            current_size += count
            resp = self.client.scroll(scroll_id=scroll_id, scroll=scroll)

    def clear_scroll(self, scroll_id):
        return self.client.clear_scroll(body={'scroll_id': [scroll_id]},
                                        ignore=(404,))

    def async_op(self, op, **kwargs):
        """异步操作"""
        func = getattr(self.client.async_search, op)
        resp = func(**kwargs)
        return resp

    def upsert_compose(self, name, body):
        return self.client.cluster.put_component_template(name, body)

    def upsert_template(self, name, body):
        """模板 新增修改"""
        resp = self.client.indices.put_index_template(name=name, body=body)
        return resp

    def delete_template(self, name):
        """删除模板"""
        resp = self.client.indices.delete_index_template(name=name, ignore=[404])
        return resp

    def upsert_lifecycle(self, name, body):
        """生命周期"""
        resp = self.client.ilm.put_lifecycle(name, body)
        return resp

    def delete_lifecycle(self, name):
        """生命周期"""
        resp = self.client.ilm.delete_lifecycle(name, ignore=[404])
        return resp

    def create_index(self, index, body):
        """索引创建"""
        resp = self.client.indices.create(index=index, body=body)
        return resp

    def delete_index(self, index):
        """索引删除"""
        resp = self.client.indices.delete(index=index, ignore=[404])
        return resp

    def rollover(self, index):
        """索引滚动"""
        body = {'conditions': {'max_docs': 0}}
        resp = self.client.indices.rollover(index, body=body)
        return resp

    # ---------------字段管理相关---------------------
    def field_capacity(self, index, fields, **kwargs):
        """索引字段 信息"""
        return self.client.field_caps(index=index, fields=fields, **kwargs)

    # ---------------ES总览 健康 监控 容量管理相关---------------------
    def cat_health(self, **kwargs):
        """获取es集群状态"""
        return self.client.cat.health(format='json', **kwargs)

    def cat_allocation(self, **kwargs):
        """获取主机 磁盘消耗"""
        return self.client.cat.allocation(bytes='b', format='json', **kwargs)

    def cat_indices(self, index, **kwargs):
        """获取索引概览 条数 大小"""
        return self.client.cat.indices(index=index, bytes='b', format='json', **kwargs)

    def cat_count(self, index, **kwargs):
        """仅获取索引条目数"""
        return self.client.cat.count(index=index, format='json', **kwargs)


es_db = ElasticsearchDB()


if __name__ == '__main__':
    es_db.init(hosts='http://192.168.101.79:9200')

    def test_msearch():
        """批量查询"""
        r = es_db.client.msearch(body=[
            {'index': 'dsm-*'},
            {'query': {'match_all': {}}},

            {'index': 'stat*'},
            {'query': {'match_all': {}}}
        ])
        print(r)

    def test_search():
        a = es_db.search(index='ml_train', body={'query': {'match_all': {}}}, ignore=[404])
        print(a)

    def test_index_manager():
        """测试索引生成管理模块"""
        patten = 'dsm-aaa'

        compose = {
            "template": {
                "settings": {
                    "index.number_of_shards": 5,
                    "index.number_of_replicas": 0,
                    "index.refresh_interval": "60s",
                    "index.codec": "default",
                    "index.routing.allocation.require.temp": "*hot*"
                },
                "mappings": {
                    "dynamic": "false",
                    "dynamic_templates": [
                        {
                            "strings_as_keywords": {
                                "match_mapping_type": "string",
                                "mapping": {
                                    "type": "keyword",
                                    "ignore_above": 256
                                }
                            }
                        }
                    ]
                }
            }
        }

        mappings = {
            "dynamic": False,
            "properties": {
                "metric_name": {
                    "type": "keyword"
                },
                "metric_value": {
                    "type": "integer"
                },
                "labels": {
                    "type": "object",
                    "dynamic": "true"
                }

            },
            "dynamic_templates": [
                {
                    "strings_as_keywords": {
                        "match_mapping_type": "string",
                        "mapping": {
                            "type": "keyword",
                            "ignore_above": 10
                        }
                    }
                }
            ]
        }

        lifecycle_data = {
            "policy": {
                "phases": {
                    "hot": {
                        "actions": {
                            "rollover": {
                                "max_size": f'40gb',
                                "max_docs": f'50000000',
                                'max_age': f'30d'
                            },
                            "set_priority": {
                                "priority": 100
                            }
                        }
                    },
                    "warm": {
                        "actions": {
                            "allocate": {
                                "number_of_replicas": 0,
                                "require": {
                                    "temp": "*warm*"
                                }
                            },
                            "shrink": {
                                "number_of_shards": 1
                            },
                            "forcemerge": {
                                "max_num_segments": 1,
                                "index_codec": 'best_compression'
                            },
                            "set_priority": {
                                "priority": 50
                            }
                        }
                    },
                    'cold': {
                        "min_age": f"60d",
                        "actions": {
                            "set_priority": {
                                "priority": 0
                            }
                        }
                    },
                    'delete': {
                        'min_age': f'180d',
                        'actions': {
                            'delete': {}
                        }
                    }
                }
            }
        }

        template_data = {
            "priority": 100,
            "index_patterns": [
                f"{patten}-*"
            ],
            "template": {
                "settings": {
                    "index": {
                        "lifecycle.name": patten,
                        "lifecycle.rollover_alias": patten,
                        "number_of_replicas": 0,
                        "number_of_shards": 5
                    }
                },
                "mappings": mappings
            },
            "composed_of": [],
            "_meta": {}
        }

        index_data = {
            "aliases": {
                patten: {
                    "is_write_index": True
                },
            }
        }

        a = es_db.upsert_compose(name='compose_dsm', body=compose)
        template_data['composed_of'] = ['compose_dsm']  # 引用组件

        a = es_db.upsert_lifecycle(name=patten, body=lifecycle_data)
        b = es_db.upsert_template(name=patten, body=template_data)
        c = es_db.create_index(index=f'{patten}-000001', body=index_data)

        d = es_db.rollover(index=patten)

        e = es_db.delete_index(index=f'*{patten}*')
        f = es_db.delete_lifecycle(name=patten)
        g = es_db.delete_template(name=patten)

        print(a, b, c, d, e, f, g)


    def test_index_fields():
        a = es_db.field_capacity('*', '*')
        print(a)

    def test_cat():
        a = es_db.cat_health()
        b = es_db.cat_allocation()
        c = es_db.cat_indices(index='*')
        d = es_db.cat_count(index='*')
        print(a, b, c, d)

    def scroll_test():
        scroll = es_db.scroll_search(index='dsm-d0a46302d3ce11efa5def635d424d390', body={'query': {'match_all': {}}},
                                     page_size=1, max_size=2)

        for item in scroll:
            print(item)

    def async_op():
        async_sub = es_db.async_op('submit', index='dsm-d644a208d15e11efb3bef635d424d390',
                                   body={'query': {'match_all': {}}}, size=10000, timeout='1s')

        if not async_sub.get('is_running'):  # True后台执行中 False 执行完成
            return async_sub

        async_id = async_sub['id']
        async_get = es_db.async_op('get', id=async_id)
        async_del = es_db.async_op('delete', id=async_id)
        print(async_get, async_del)


    async_op()
    scroll_test()
    test_search()
    test_index_manager()
    test_index_fields()
    test_cat()
