# coding:utf-8
import time
import json
from opensearchpy import OpenSearch


class OpensearchClient():
    '''group_id search'''
    def __init__(self, index="test_group_id"):
        self.__hosts = [
            {'host': '10.13.1.160', 'port': 9210},
            {'host': '10.13.1.199', 'port': 9210},
            {'host': '10.13.1.154', 'port': 9210},
            {'host': '10.13.1.204', 'port': 9210},
            {'host': '10.13.1.160', 'port': 9220},
            {'host': '10.13.1.199', 'port': 9220}
        ]
        self.__index = index
        self.search_client = OpenSearch(hosts=self.__hosts,  http_compress=True)

    def search(self, body):
        return self.search_client.search(body=body, index=self.__index)

    def insert(self, doc, id, index_name):
        return self.search_client.index(index=index_name, body=doc, id=id, refresh=True)
        #return self.search_client.index(index=self.__index, body=doc, id=id)#, refresh=True)

    def term_search(self, value, field):
        body = {"query": {"term": {field: {"value": value}}}}
        return self.search_client.search(body=body, index=self.__index)

    def vector_search(self, vector, field="embedding", topk=10, time_from='',is_cluster_root=False):
        '''vector search'''
        body = {"size":topk, "query":{"bool":{"must":[{"knn": {field: {"vector": vector, "k": topk}}}]}}}
        ## 召回范围限制
        if time_from and isinstance(time_from, int):
            body["query"]["bool"]["filter"] = {"range":{"ctime":{"gte":time_from}}}
        ## 只召回簇心 解决簇发散问题
        if is_cluster_root:
            body["query"]["bool"]["must"].append({"term": {"root": {"value": "true"}}})
        return self.search_client.search(body=body, index=self.__index)

    def multi_search(self, title_split, keywords, size=50, time_from='', is_cluster_root=False):
        ''''title or keywords'''
        body = {"query": {"bool": {"must": [{"bool": {"should": [], "minimum_should_match":1}}]}}, "size": size}
        if not title_split and not keywords:
            return []
        if title_split:
            body["query"]["bool"]["must"][0]["bool"]["should"].append({"match": {"title_split": {"query": title_split, "operator": "or", "minimum_should_match": "20%"}}})
        if keywords:
            body["query"]["bool"]["must"][0]["bool"]["should"].append({"match": {"keywords": {"query": keywords, "operator": "or", "minimum_should_match": "20%"}}})
        ## 召回范围限制
        if time_from and isinstance(time_from, int):
            body["query"]["bool"]["filter"] = {"range":{"ctime":{"gte":time_from}}}
        ## 只召回簇心 解决簇发散问题
        if is_cluster_root:
            body["query"]["bool"]["must"].append({"term": {"root": {"value": "true"}}})
        return self.search_client.search(body=body, index=self.__index)

    def large(self, body=None):
        if body:
            bb = body
        else:
            bb = {"size": 10000, "query": {"match_all": {}} }
        result = self.search_client.search(index=self.__index, scroll='3m', body=bb)
        #print(result)
        sid = result['_scroll_id']
        scroll_size = result['hits']['total']['value']
        #scroll_size = result['hits']['total']
        #print(scroll_size)
        first_flag = True
        # Start scrolling
        while(scroll_size > 0):
            if first_flag:
                page_docs = result
            else:
                page_docs = self.search_client.scroll(scroll_id=sid, scroll='3m')
                sid = page_docs['_scroll_id']
                scroll_size = len(page_docs['hits']['hits'])

            first_flag = False
            yield page_docs['hits']['hits']
        self.search_client.clear_scroll(scroll_id=sid)

    def msearch(self, bodys):
        assert isinstance(bodys, list)
        if not any(bodys):	return []
        #body = "{}\n" + "\n{}\n".join([json.dumps(b) if not isinstance(b, str) else b for b in bodys])
        body = "{}\n" + "\n{}\n".join([json.dumps(b) for b in bodys if b])
        rets = self.search_client.msearch(body=body, index=self.__index)
        return [r['hits']['hits'] for r in rets['responses']]
        
    def build_dsl(self, field, value, size, search_type, time_from='', is_cluster_root=False):
        # search_type: match/vector/term
        if search_type == "vector":
            if field != "embedding" or not isinstance(value, list) or not value:    return {}
            body = {"size":size, "query":{"bool":{"must":[{"knn": {field: {"vector": value, "k": size}}}]}}}
        elif search_type == "match":
            if not isinstance(value, str) or not value:  return {}
            body = {"query": {"bool": {"must": [{"match": {field: {"query": value, "operator": "or", "minimum_should_match": "20%"}}}]}}, "size": size}
        elif search_type == "term":
            if not isinstance(value, str) or not value:  return {}
            body = {"query": {"bool": {"must": [{"term": {field: {"value": value}}}]}}, "size": size}
        else:
            return {}
        ## 召回范围限制
        if time_from and isinstance(time_from, int):
            body["query"]["bool"]["filter"] = {"range":{"ctime":{"gte":time_from}}}
        ## 只召回簇心 解决簇发散问题
        if is_cluster_root:
            body["query"]["bool"]["must"].append({"term": {"root": {"value": "true"}}})
        return body


if __name__ == "__main__":
    search_client = OpensearchClient("group_id_online")
    st1 = time.time()
    ret = search_client.term_search(value="comos:ktzqtyu7365006", field="dataid")
    st2 = time.time()
    #print(ret, 1000*(st2-st1))
    print("*****************************************************************")
    embedding = [-0.2590000033378601, -0.19660000503063202, -0.029500000178813934, -0.05260000005364418, 0.12370000034570694, -0.12690000236034393, 0.11060000211000443, -0.18449999392032623, 0.02459999918937683, 0.24079999327659607, -0.041600000113248825, 0.12790000438690186, -0.0024999999441206455, 0.16680000722408295, -0.002899999963119626, 0.08410000056028366, 0.044199999421834946, -0.04190000146627426, -0.10329999774694443, 0.11129999905824661, -0.1080000028014183, -0.022700000554323196, 0.273499995470047, -0.052000001072883606, -0.08229999989271164, -0.0478999987244606, 0.1891999989748001, -0.06889999657869339, 0.17149999737739563, 0.037700001150369644, -0.17730000615119934, -0.16290000081062317, 0.07739999890327454, 0.02930000051856041, -0.013799999840557575, 0.010700000450015068, -0.008200000040233135, -0.041999999433755875, 0.14409999549388885, -0.1860000044107437, -0.12110000103712082, -0.03519999980926514, -0.1688999980688095, -0.0778999999165535, 0.031199999153614044, -0.20499999821186066, 0.30140000581741333, 0.021700000390410423, -0.024800000712275505, -0.0414000004529953, 0.025299999862909317, 0.1915999948978424, -0.17640000581741333, -0.031199999153614044, 0.02669999934732914, -0.09870000183582306, 0.09099999815225601, -0.09239999949932098, -0.18930000066757202, 0.05490000173449516, -0.13330000638961792, 0.04749999940395355, -0.032999999821186066, 0.19589999318122864]
    embedding = [0.09645670652389526, 0.15912294387817383, 0.09543108940124512, -0.11754217743873596, 0.1367480754852295, 0.18715858459472656, 0.008428730070590973, 0.0723019391298294, -0.03061315044760704, -0.10637947171926498, -0.2639138698577881, -0.0009368199971504509, 0.2513260245323181, 0.0438845194876194, -0.13062889873981476, 0.08004643768072128, 0.20508694648742676, -0.0008118299883790314, 0.1669405698776245, -0.07869485020637512, 0.03829548880457878, 0.04519097879528999, -0.08231519907712936, -0.054607558995485306, -0.04866812005639076, -0.02504695951938629, -0.1334395408630371, 0.01801544986665249, -0.12321943789720535, 0.1668419986963272, -0.24418847262859344, 0.0511392205953598, 0.036674611270427704, -0.04890695959329605, 0.10027214884757996, -0.13822491466999054, 0.023933880031108856, -0.10925541818141937, 0.0456668995320797, -0.1176876574754715, 0.1281130313873291, -0.039526041597127914, -0.029549360275268555, 0.12870317697525024, 0.27949047088623047, 0.11553662270307541, -0.0030025800224393606, -0.16005900502204895, -0.16271047294139862, -0.09951239079236984, -0.03400887921452522, 0.017387300729751587, 0.3028739094734192, 0.1591845154762268, -0.0835857167840004, 0.011144829913973808, 0.10933206230401993, -0.018400559201836586, 0.04035529866814613, 0.03785755857825279, -0.1879965364933014, -0.0656064972281456, -0.13844284415245056, -0.21713027358055115]
    ret = search_client.vector_search(vector=embedding)
    st3 = time.time()
    print(ret, 1000*(st3-st2))
    
    ret = search_client.multi_search("滑雪 晋级 中国队", keywords='', is_cluster_root=True)
    
    for r in ret['hits']['hits'][:10]:
        print(r)
    exit()
    for doc in search_client.large():
        print(doc)
    



