from func.writer.writer_func import *
from func.log.default_log import DefaultLog
from func.writer.literature_review import LiteratureReview
import os
import argparse
import time
import hashlib

log = DefaultLog().getLogger()

def get_title_with_abstract(datalist, abstractlist, top_k=10):
    # 创建paper_id到title的映射字典
    paper_id_to_title = {}
    for key, value in datalist.items():
        paper_id = value['paper_id']
        title = value['paper_title']
        paper_id_to_title[paper_id] = title

    # 将paper_id和title与摘要信息进行匹配
    title_with_abstract = []
    for key,value in abstractlist.items():
        d = {"title":paper_id_to_title[key],"abstract":value}
        title_with_abstract.append(d)
    title_with_abstract = title_with_abstract[:top_k]
    return title_with_abstract

def run_agent(topic=None, debug=False, k=10):
    searchCore = SearchCore()
    writer = LiteratureReviewWriter()
    lit_review = LiteratureReview(topic)

    # 记录开始时间
    start_time = time.perf_counter()
    # 第一步先判断主题类型
    log.info(f"=========主题类型判断===========")
    topic_type = writer.topic_classification(topic)
    log.info(f"主题: {topic} 类型: {topic_type}")
    # 第二步根据文献主题获取文献数据
    log.info(f"=========问题关键字特征抽取===========")
    main_word,keyword_list = searchCore.get_keywords(topic)
    keywords = {}
    keywords["main_word"] = main_word
    keywords["keyword_list"] = keyword_list
    log.info("关键字特征抽取：" + str(keyword_list))

    # 记录结束时间
    end_time = time.perf_counter()
    # 计算耗时
    elapsed_time = end_time - start_time
    log.info(f"关键字特征抽取-耗时: {elapsed_time:.6f} 秒")
    # 第三步根据文献数据，抽取文献摘要
    log.info(f"=========文献摘要提取===========")
    title_with_abstract = searchCore.abstract_extract_v2(topic, main_word, keyword_list, threshold=0.4, top_k=1, sec_top_k=10, gradient=-10, max_workers=100)
    log.info("摘要数据：" + str(len(title_with_abstract)))
    for k in title_with_abstract:
        pageid=k["paper_id"]
        title=k["title"]
        log.info(f"摘要标题:{title}, key：{pageid}")
    # 记录结束时间
    end_time = time.perf_counter()
    # 计算耗时
    elapsed_time = end_time - start_time
    log.info(f"文献摘要提取-耗时: {elapsed_time:.6f} 秒")

    log.info(f"=========文献综述结构生成===========")
    structure = writer.get_structure(topic, topic_type, data=title_with_abstract)
    lit_review.set_structure(structure)
    #structure_md = lit_review.structure_to_md(structure) old
    # 新增代码start
    structure_md = lit_review.structure_to_md(structure)
    number_structure = lit_review.add_numbering_to_structure(structure)
    lit_review.set_structure(number_structure)
    # 新增代码End
    structure_list = lit_review.structure_to_list(structure)
    log.info(f"文献综述结构: {structure_md}")

    # 记录结束时间
    end_time = time.perf_counter()
    # 计算耗时
    elapsed_time = end_time - start_time
    log.info(f"文献综述结构生成-耗时: {elapsed_time:.6f} 秒")

    # 第五步根据结构，生成文献综述的初稿
    log.info(f"=========文献综述初稿生成===========")
    sub_section_content = writer.write_subsections_content_parallel(keywords, structure_list, structure_md, max_workers=20)
    lit_review.set_content(sub_section_content)
    lit_review.content_to_LitReview()
    # 第六步将文献综述初稿保存到本地
    log.info(f"=========文献综述初稿保存===========")
    # 确保目录存在
    os.makedirs("lite_review", exist_ok=True)
    file_path = lit_review.LitReview_to_md("./lite_review/")

    # 记录结束时间
    end_time = time.perf_counter()
    # 计算耗时
    elapsed_time = end_time - start_time
    log.info(f"最终结果-耗时: {elapsed_time:.6f} 秒")

    md5 = calculate_file_hash(file_path)
    log.info(f"文献生成md5:{md5}")
    
    return lit_review

def is_chinese(text):
    """
    判断文本中是否包含中文字符
    """
    return bool(re.search('[\u4e00-\u9fff]', text))

def calculate_file_hash(file_path):
    hash_func = getattr(hashlib, "md5")()
    with open(file_path, "rb") as f:
        while chunk := f.read(8192):
            hash_func.update(chunk)
    return hash_func.hexdigest()

if __name__ == "__main__": 
    # 设置命令行参数解析
    parser = argparse.ArgumentParser(description="Run the literature review agent")
    parser.add_argument("--topic", type=str, required=True, help="The topic for the literature review")
    parser.add_argument("--debug", action="store_true", help="Enable debugging")
    parser.add_argument("--k", type=int, default=10, help="Number of results to return")
    
    # 解析命令行参数
    args = parser.parse_args()
    
    # 使用传递进来的参数
    topic = args.topic
    #topic = "强化学习在LLM中的应用"
    debug = args.debug
    k = args.k

    
    # 如果是中文，进行翻译
    if is_chinese(topic):
        baiduTranslate = BaiduTranslate()
        log.info("原主题：" + topic)
        topic = baiduTranslate.translate(topic, "zh", "en")
        log.info("翻译主题：" + topic)


    # 调用 run_agent 函数
    literature_review = run_agent(topic=topic, debug=debug, k=k)
    file_name = literature_review.file_name
    log.info(f"Literature review generated in: {file_name}")