from datetime import datetime
from elasticsearch import Elasticsearch
from elasticsearch import helpers
import json


def connect_es():
    _es = Elasticsearch(host="localhost", port=9200)
    if _es.ping():
        return _es
    else:
        print("could not connect to es ")


def create_index(es_object, index_name, body_file):
    """
    根据../data/es_index/mashups.json创建mashups index
    :return:
    """
    created = False
    settings = json.load(open(body_file))
    try:
        if not es_object.indices.exists(index_name):
            es_object.indices.create(index=index_name, ignore=400, body=settings)
            print("create index success")
        created = True
    except Exception as ex:
        print(str(ex))
    finally:
        print(created)
        return created


def test_store_data(es_object, index_name, doc_type, data_body):
    count = 1
    with open("../data/eng_sample.txt") as file:
        for line in file:
            line = line.strip()
            # es.index() 为插入,es.create()为创建
            es_object.index(index=index_name, doc_type=doc_type, body={"id": count, "title": line})
            count += 1

    # es_object.create(index=index_name, doc_type="_doc", body=data_body)


def store_data_bulk(es_object, index_name, doc_type, data_list):
    """
    insert_struct 为 list,按顺序包含每个待插入元素的key
    传入的data_list 要求为list,item 为 dict ,且完全包含此index所需的变量
    :return:
    """
    actions = []
    for data in data_list:
        # 拼接插入数据结构
        action = {
            "_op_type": "index",
            "_index": index_name,
            "_type": doc_type,
            "_source": data
        }
        # 形成一个长度与查询结果数量相等的列表
        actions.append(action)
    print("actions")
    return helpers.bulk(client=es_object, actions=actions)


if __name__ == "__main__":
    es = connect_es()
    # es.indices.delete(index="test_index")
    # create_index(es, "test_index")
    # with open("../data/eng_sample.txt") as file:
    #     datas = [{"id": i+2, "title": line.strip()}for i, line in enumerate(file)]
    # store_data_bulk(es_object=es, index_name="test_index", doc_type="members", property_names={"title", "id"}, data_list=datas)
    # print(es.get(index="test_index", id=1,doc_type="_doc"))
    print(es.search(body={"query": {"match_all": {}}}))
