#!/usr/bin/python3
# ******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2021-2021. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#     http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# ******************************************************************************/
import operator

import json

import elasticsearch
from elasticsearch import helpers

index_dict = dict(
    obs_index="obs_info",
    pr_index="pr_info",
    iso_index="iso_info",
    sig_index="sig_info",
    branch_info_index="branch_info"
)


class Es:
    def __init__(self):
        self.es = elasticsearch.Elasticsearch("http://127.0.0.1:9200")
        self.async_es = elasticsearch.AsyncElasticsearch("http://127.0.0.1:9200", timeout=600, max_retries=3,
                                                         retry_on_timeout=True, )

    async def get_indices(self):
        response = await self.async_es.indices.get(index='_all')
        print("all indices is %s" % response)

    @staticmethod
    def parse_date():
        with open('data.json', 'r') as file:
            data = json.load(file)
        return data

    async def create_index(self):
        for key, value in index_dict.items():
            response = await self.async_es.indices.create(index=value)
            print(f"create index: {value} {response.body}")

    async def insert_data(self, data):
        for key, value in index_dict.items():
            content = data.get(value)
            if isinstance(content, list):
                for item in content:
                    response = await self.async_es.index(index=value, body=item)
                    print(f'insert xml_data to {value}: {response}')
            else:
                response = await self.async_es.index(index=value, body=content)
                print(f'insert xml_data to {value}: {response}')

    async def bulk_insert_data(self, data):
        data_list = list()
        for key, value in index_dict.items():
            content = data.get(value)
            if isinstance(content, list):
                for item in content:
                    insert_body = {
                        "_index": value,
                        "_source": item
                    }
                    data_list.append(insert_body)
            else:
                insert_body = {
                    "_index": value,
                    "_source": content
                }
                data_list.append(insert_body)

        # helpers.bulk(self.es, actions=data_list)
        success_num, failed_num = await helpers.async_bulk(self.async_es, data_list)
        print(f'success: {success_num}, failed: {failed_num}')

    def delete_index(self, index=None):
        if index is None:
            for key, value in index_dict.items():
                try:
                    response = self.es.indices.delete(index=value)
                    print(f'delete index {value},{response}')
                except elasticsearch.NotFoundError:
                    print(f'{value} is not exist')
        else:
            response = self.es.indices.delete(index=index)
            print(f'delete index {index},{response}')

    async def query_data(self, index, body):
        query_body = {"query": {"bool": {}}}
        terms = list()
        for key, value in body.items():
            terms.append({"term": {key: value}})

        query_body["query"]["bool"]["filter"] = terms
        query_body['size'] = 100
        # response = self.es.search(index=index, body=query_body, stored_fields=["repo_name", "source_name"])
        response = await self.async_es.search(index=index, body=query_body)
        print(f"result is {response}")

    async def query_data_old(self, index, body):
        response = await self.async_es.search(index=index, body=body, _source=None)
        print(f"result is {response}")

    def query_mapping(self, index):
        response = self.es.indices.get_mapping(index=index)
        print(f"{index} mapping is {response}")

    def update_mapping(self, index):
        with open('sig_info_mapping.json', 'r') as file:
            new_mapping = json.load(file)
        response = self.es.indices.put_mapping(index=index, body=new_mapping)
        print(f"update mapping is {response}")
