# memory_value_demo.py
import os
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain


def demonstrate_memory_value():
    """演示Memory的核心价值"""

    print("🎯 Memory核心价值演示")
    print("=" * 50)

    llm = ChatOpenAI(
        api_key=os.getenv("DASHSCOPE_API_KEY"),
        base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
        model="qwen-turbo"
    )

    # 1. 没有Memory的对话
    print("❌ 没有Memory的对话：")

    prompt_template = ChatPromptTemplate.from_template(
        "你是HR助手，请回答用户问题：{input}"
    )

    questions = [
        "我叫李明，想找Java开发工作",
        "我有3年工作经验",
        "我的技能是Spring Boot和MySQL",
        "根据我的情况推荐合适的职位"
    ]

    for i, question in enumerate(questions, 1):
        formatted_prompt = prompt_template.format(input=question)
        response = llm.invoke(formatted_prompt)
        print(f"   第{i}轮对话：")
        print(f"   用户：{question}")
        print(f"   AI：{response.content[:100]}...")
        print()

    print("🔗 有Memory的对话：")

    # 2. 有Memory的对话
    memory = ConversationBufferMemory()
    conversation = ConversationChain(
        llm=llm,
        memory=memory,
        verbose=True
    )

    for i, question in enumerate(questions, 1):
        response = conversation.predict(input=question)
        print(f"   第{i}轮对话：")
        print(f"   用户：{question}")
        print(f"   AI：{response[:100]}...")
        print()

    # 3. 显示完整的对话历史
    print("📋 完整对话历史：")
    print(memory.buffer)
    print(type(memory.buffer))


# 运行演示
demonstrate_memory_value()
