from langchain_openai import ChatOpenAI
from langchain_core.pydantic_v1 import BaseModel, Field
from langchain_core.prompts import ChatPromptTemplate

llm = ChatOpenAI(
    api_key="sk-0e687ddcf0164a6fb66c1096447223c4",  # 阿里百炼大模型API获取：https://bailian.console.aliyun.com/?apiKey=1#/api-key
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1", # 使用文档：https://help.aliyun.com/zh/model-studio/getting-started/what-is-model-studio
    model="qwen-plus",
    temperature=0
    # other params...
)

chunks = {}


def create_new_chunk(chunk_id, proposition):
    summary_llm = llm.with_structured_output(ChunkMeta)
    summary_prompt_template = ChatPromptTemplate.from_messages([
        ("system", "Generate a new summary and a title based on the propositions."),
        ("user", "propositions:{propositions}"),
    ])
    summary_chain = summary_prompt_template | summary_llm
    chunk_meta = summary_chain.invoke({"propositions": [proposition]})
    chunks[chunk_id] = {
        "summary": chunk_meta.summary,
        "title": chunk_meta.title,
        "propositions": [proposition],
    }

class ChunkMeta(BaseModel):
    title: str = Field(description="The title of the chunk.")
    summary: str = Field(description="The summary of the chunk.")

def add_proposition(chunk_id, proposition):
    summary_llm = llm.with_structured_output(ChunkMeta)

    summary_prompt_template = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                "If the current_summary and title is still valid for the propositions return them."
                "If not generate a new summary and a title based on the propositions.",
            ),
            (
                "user",
                "current_summary:{current_summary}\n\ncurrent_title:{current_title}\n\npropositions:{propositions}",
            ),
        ]
    )

    summary_chain = summary_prompt_template | summary_llm

    chunk = chunks[chunk_id]

    current_summary = chunk["summary"]
    current_title = chunk["title"]
    current_propositions = chunk["propositions"]

    all_propositions = current_propositions + [proposition]

    chunk_meta = summary_chain.invoke(
        {
            "current_summary": current_summary,
            "current_title": current_title,
            "propositions": all_propositions,
        }
    )

    chunk["summary"] = chunk_meta.summary
    chunk["title"] = chunk_meta.title
    chunk["propositions"] = all_propositions

def find_chunk_and_push_proposition(proposition):

    class ChunkID(BaseModel):
        chunk_id: int = Field(description="The chunk id.")

    allocation_llm = llm.with_structured_output(ChunkID)

    allocation_prompt = ChatPromptTemplate.from_messages(
        [
            (
                "system",
                "You have the chunk ids and the summaries"
                "Find the chunk that best matches the proposition."
                "If no chunk matches, return a new chunk id."
                "Return only the chunk id.",
            ),
            (
                "user",
                "proposition:{proposition}" "chunks_summaries:{chunks_summaries}",
            ),
        ]
    )

    allocation_chain = allocation_prompt | allocation_llm

    chunks_summaries = {
        chunk_id: chunk["summary"] for chunk_id, chunk in chunks.items()
    }

    best_chunk_id = allocation_chain.invoke(
        {"proposition": proposition, "chunks_summaries": chunks_summaries}
    ).chunk_id

    if best_chunk_id not in chunks:
        best_chunk_id = create_new_chunk(best_chunk_id, proposition)
        return

    add_proposition(best_chunk_id, proposition)



# 示例调用流程（添加在文件末尾）
if __name__ == "__main__":
    # 示例输入数据（替换为您的实际数据）
    input_propositions = [
    "近日，国家博物馆的一款文创产品‘凤冠冰箱贴’迎来了高光时刻。",
    "这款‘凤冠冰箱贴’上市仅8个月，销量突破100万件。",
    "‘凤冠冰箱贴’还带动了凤冠全系列文创产品的销售额过亿。",
    "为了购买这款‘凤冠冰箱贴’，有人在网上持续‘蹲库存’。",
    "还有人一大早就到国家博物馆门口排队。",
    "话题#抢凤冠冰箱贴的队排到了国博大门外#登上热搜榜。",
    "一枚‘凤冠冰箱贴’展现了古人‘顶流’的审美。",
    "这款畅销的‘凤冠冰箱贴’的原型是明代孝端皇后的九龙九凤冠。",
    "孝端皇后的九龙九凤冠重量达到2320克。",
    "九龙九凤冠共镶嵌100多块宝石和4000多颗珍珠。",
    "九龙九凤冠上装饰有花丝金龙和点翠花型，淋漓尽致地展现了明代美学风范。",
    "在‘凤冠冰箱贴’火爆的同时，一顶复刻的凤冠也在网络上走红。",
    "一名高二女生用扭扭棒和珍珠复刻了一顶精美的凤冠。",
    "前不久，在杭州第十四中学青山湖学校，一顶手工复刻的精美凤冠得到了同学和老师们的称赞。",
    "这顶凤冠由该校高二年级的4名女生合作完成。",
    "制作前，她们仔细研究了凤冠各角度的图片，并决定用扭扭棒来制作。",
    "几人商量后，下单了300根扭扭棒以及几百颗珍珠。",
    "大家分工合作，有人负责珍珠饰品组装，有人负责制作凤冠背后的博鬓。",
    "最终她们完美复刻了这顶明代凤冠。",
    "网友们纷纷点赞，表示‘用扭扭棒复刻凤冠，传统文化在新生代手中玩出新花样’。",
    "复刻文物风潮激发了年轻人的兴趣，‘文博热’持续升温。",
    "很多年轻人在打卡博物馆、领略文物之美的同时，也通过‘复刻’的方式表达对中华优秀传统文化的热爱。",
    "浙江温州的一位95后女孩小周喜欢逛博物馆，并自学用黏土复刻文物。",
    "她已经制作了100多件微缩文物，跨越十多个朝代。",
    "辽宁沈阳的一位手工达人用平常物件为女儿复刻了沈阳故宫的院藏珍品——清代黑缎嵌点翠凤戏牡丹女帽。",
    "越来越多的年轻人正在成为新生代传统技艺传承人。",
    "绒花、面塑、剪纸等‘老手艺’因年轻人而成为‘新网红’。",
    "承载着丰富历史文化信息的文物因年轻人的‘复刻’以新的面貌重回现代生活。"
    ]
    
    # 核心调用逻辑（遍历处理每个命题）
    for prop in input_propositions:
        find_chunk_and_push_proposition(prop)
    
    # 打印分块结果
    print("="*40 + " 分块结果 " + "="*40)
    for chunk_id, content in chunks.items():
        print(f"\nChunk {chunk_id} - {content['title']}")
        print(f"Summary: {content['summary']}")
        print("Propositions:")
        for p in content['propositions']:
            print(f" - {p}")