import logging
import math

from elasticsearch import Elasticsearch
import requests
from elasticsearch.helpers import bulk
from tqdm import tqdm

from args import *
from logger import logger
client = Elasticsearch(hosts=ELASTIC_ADDRESS, basic_auth=(ELASTIC_USERNAME, ELASTIC_PASSWORD), timeout=10000)
query = {
            "query": {
                "range": {
                    "fssj": {
                        "gte": start_date,
                        "lte": end_date
                    }
                }
            }
        }

def get_count(start_date:str=start_date, end_date:str=end_date):
    response = client.count(
        index=READ_INDEX_NAME, body=query
    )
    count = response.body['count']
    return count


def wrap_response(data, from_, to_):
    response = requests.post(vector_service_url, json={
        "data": data[from_: to_]
    }).json()['data']
    return response

def wrap_vector(news_list:list[dict]):
    title_content = []
    titles = []
    contents = []
    for news in news_list:
        title = news.get('sjmc', "")
        content = news.get('sjms', "")
        title_content.append(f'{title} {content}')
        titles.append(title)
        contents.append(content)

    split = int(math.ceil(len(news_list) / vector_batch_size))
    vector_list = []
    vector_contents_list = []
    vector_title_content_list = []
    for s in tqdm(range(split)):
        from_ = s * vector_batch_size
        to_ = min(from_ + vector_batch_size, len(news_list))
        vector_list += wrap_response(titles, from_, to_)
        vector_contents_list += wrap_response(contents, from_, to_)
        vector_title_content_list += wrap_response(title_content, from_, to_)

    for n in range(len(vector_list)):
        news_list[n]['vector'] = vector_list[n]
        news_list[n]['contentVector'] = vector_contents_list[n]
        news_list[n]['titleContentVector'] = vector_title_content_list[n]


def process_keywords(news_list:list[dict]):
    for news in news_list:
        keywords = news.get("keywords", [])
        keywords = [k['name'] for k in keywords]
        news['keywords'] = keywords

def read_batch():
    count = get_count()
    logger.info(f"start_date: {start_date}-end_date: {end_date} count: {count}")
    logger.info(f"read_batch_size: {READ_BATCH_SIZE}")
    split = int(math.ceil(count / READ_BATCH_SIZE))
    total_news = []
    for s in range(split):
        t_query = {}
        t_query['query'] = query['query']
        from_ = s * READ_BATCH_SIZE
        t_query['from'] = from_
        t_query['size'] = READ_BATCH_SIZE
        logger.info(f"processing from: {from_}, to: {min(from_ + READ_BATCH_SIZE, count)}")
        search = client.search(index=READ_INDEX_NAME, body=t_query)
        temp_news = search.body['hits']['hits']
        temp_news = [t['_source'] for t in temp_news]
        process_keywords(temp_news)
        wrap_vector(temp_news)
        total_news += temp_news
    logger.info(f"read data successfully!")
    return total_news

def index(json_data):
    success, _ = bulk(client, json_data, index=WRITE_INDEX_NAME, chunk_size=READ_BATCH_SIZE, timeout="100s")
    logger.info(f'{success}条记录成功插入')


if __name__ == '__main__':
    total_news = read_batch()
    index(total_news)