import json
from args import *
from elasticsearch import Elasticsearch
import math
import random
from provinces_departments import provinces, departments
from elasticsearch.helpers import bulk
from tqdm import tqdm
import time
with open("data_5_25.json", "r") as f:
    text = json.loads(f.read())
client = Elasticsearch(hosts=ELASTIC_ADDRESS, basic_auth=(ELASTIC_USERNAME, ELASTIC_PASSWORD), timeout=10000)


def read_data(data):
    if 'nm' in data[0]:
        data = [d['nm'] for d in data]
    batch_size = 1000
    pieces = int(math.ceil(len(data) / batch_size))
    data = split_data(data, pieces)
    ret = []
    for d in data:
        response = client.search(index=READ_INDEX, body={
            "query": {
                "terms": {
                    "nm": d
                }
            },
            "size": len(d)

        }, timeout="100s")
        hits = response.body['hits']['hits']
        ret += [hit["_source"] for hit in hits]
    return ret

def randomize_all_province(data):
    def randomize_province(data):
        def get_province_all_departments():
            if random.random() < 0.75:
                return random.choice(provinces)
            else:
                return random.choice(departments)

        data["lyss"] = get_province_all_departments()
    for d in data:
        randomize_province(d)



def split_data(data, pieces=11):
    lens = len(data)
    batch_size = int(math.ceil(lens / pieces))
    ret = []
    for i in range(pieces):
        st = i * batch_size
        ed = min(i * batch_size + batch_size, lens)
        ret.append(data[st: ed])
    return ret

def mapping_and_setting():
    if client.indices.exists(index=INDEX_NAME):
        from delete import delete
        print("索引存在，正在删除。。。")
        delete()
        print("删除成功！")
    result = client.indices.create(index=INDEX_NAME, body={**mapping, **settings}, ignore=400)
    if result.get("acknowledged"):
        print("索引创建成功")
    else:
        print("索引创建失败")
        print(result)


def bulk_by_time(data, delta=60):
    for d in tqdm(data):
        success, _ = bulk(client, d, index=INDEX_NAME, timeout="100s")
        time.sleep(delta)

if __name__ == "__main__":
    mapping_and_setting()
    # 读取数据库中的数据
    data = read_data(text)
    # 随机分配省市
    randomize_all_province(data)
    # 将数据分成若干部分
    data = split_data(data)
    # 增量导入数据
    bulk_by_time(data)
