from elasticsearch import Elasticsearch
from args import *
import json
import re

def contains_japanese(text):
    # 日文字符的Unicode范围
    japanese_pattern = re.compile(r'[\u3040-\u30ff\u31f0-\u31ff]')
    return bool(japanese_pattern.search(text))


def is_chinese_or_numeric(text):
    if contains_japanese(text):
        return False

    total_chars = len(text)
    if total_chars == 0:
        return False

    chinese_chars = 0
    numeric_chars = 0

    for char in text:
        if '\u4e00' <= char <= '\u9fff':  # 中文字符的Unicode范围
            chinese_chars += 1
        elif char.isdigit():
            numeric_chars += 1

    ratio = (numeric_chars + chinese_chars) / total_chars
    return ratio >= 0.7

client = Elasticsearch(
            hosts=ELASTIC_ADDRESS, basic_auth=(ELASTIC_USERNAME, ELASTIC_PASSWORD),
            timeout=10000)


def wrap_country_query(query_str: str):
    query = {
        "query": {
            "multi_match": {
                "fields": ["news_content_zh", "news_title_zh"],
                "query": query_str
            }
        }
    }
    return query

def get_country_bias_data(query_count:int=total_country_query):
   ret = []
   for i in range(query_count):
       country = total_countries[i]
       country_str = " ".join(country)
       query = wrap_country_query(country_str)
       response = client.search(body=query, index=INDEX_NAME)
       hits = response.body["hits"]["hits"]
       for hit in hits:
           news = hit["_source"]
           title = news.get("news_title_zh", "")
           content = news.get("news_content_zh", "")
           date = news.get("news_publicdate", "")
           ret.append({"title": title, "content": content, "date": date})
   write_json(ret, "country_data.json")
   return ret

def write_json(json_, file_name):
    with open(file_name, "w") as f:
        json.dump(json_, f, ensure_ascii=False, indent=4)


def wrap_date_query(start_date: str, end_date: str, size:int=-1, from_=-1):
    query = {
        "query": {
            "bool": {
                "filter": {
                    "range": {
                        "news_publicdate": {
                            "gte": start_date,
                            "lte": end_date
                        }
                    }
                }
            }
        },

    }
    if not (from_ == -1 or size == -1):
        query["from"] = from_
        query["size"] = size
    return query


def get_random_date_data(total_count, start_date: str, end_date: str):
    ret = []
    i = 0
    count = client.count(body=wrap_date_query(start_date, end_date))['count']
    from_ = 0
    while len(ret) < total_count and from_ < count:
        query = wrap_date_query(start_date, end_date, read_batch, from_)
        i += 1
        from_ += read_batch * i
        hits = client.search(body=query, index=INDEX_NAME).body['hits']['hits']
        news = [h['_source'] for h in hits]
        ret += [n for n in news if judge_news_zh(n)]
    ret = [wrap_news(r) for r in ret]
    write_json(ret, "random_date_data.json")
    return ret[:total_count]
def judge_news_zh(news: dict):
    title = news.get("news_title_zh", "")
    content = news.get("news_content_zh", "")
    text = f'{title} {content}'[:100]
    return is_chinese_or_numeric(text)

def wrap_news(news: dict):
    title = news.get("news_title_zh", "")
    content = news.get("news_content_zh", "")
    date = news.get("news_publicdate", "")
    return {"title": title, "content": content, "date": date}


if __name__ == "__main__":
    start_date = "2022-05-26"
    end_date = "2022-05-26"
    get_random_date_data(1200, start_date, end_date)