from chromadb.utils.embedding_functions import OllamaEmbeddingFunction
from crewai import Agent, Task, Crew, LLM
from crewai.memory import EntityMemory, ShortTermMemory, LongTermMemory
from crewai.memory.storage.ltm_sqlite_storage import LTMSQLiteStorage
from crewai.memory.storage.rag_storage import RAGStorage
from langchain_openai import ChatOpenAI
import os

# 设置API密钥
os.environ["OPENAI_API_KEY"] = "your-api-key"

# OllamaEmbeddingFunction
def simple_chat(message):
    """最简单的聊天函数"""
    # 创建智能体
    agent = Agent(
        role="助手",
        goal="帮助用户",
        backstory="你是一个专业的对话分析师，擅长理解自然语言，能够准确识别用户的真实需求和情感状态。",
        llm=LLM(
                model='doubao-1-5-pro-32k-character-250715',
                base_url='http://192.168.77.47:8099/api/v1/doubao',
                api_key='2e6d307b-3067-4639-ae78-ee7e55fc2800'
            ),
        telemetry=False
    )

    # 创建任务
    task = Task(description=f"回复: {message}", expected_output="用温和的自然语言回答",agent=agent)

    # 执行
    crew = Crew(
        agents=[agent], tasks=[task],
                # Long-term memory for persistent storage across sessions
                long_term_memory=LongTermMemory(
                    storage=LTMSQLiteStorage(
                        db_path="./my_crew1/long_term_memory_storage.db"
                    )
                ),
                # Short-term memory for current context using RAG
                short_term_memory=ShortTermMemory(
                    storage=RAGStorage(
                        embedder_config={
                            "provider": "ollama",
                            "config": {
                                "url": "http://192.168.77.47:11434",
                                "model_name": "bge-m3",
                            }
                        },
                        type="short_term",
                        path="./my_crew1/"
                    )
                ),
    entity_memory = EntityMemory(
        storage=RAGStorage(
            embedder_config={
                "provider": "ollama",
                "config": {
                    "url": "http://192.168.77.47:11434",
                    "model_name": "bge-m3"
                }
            },
            type="short_term",
            path="./my_crew1/"
        )))
    return crew.kickoff()


# 直接使用
if __name__ == "__main__":
    while True:
        user_msg = input("你说: ")
        if user_msg == "quit":
            break
        response = simple_chat(user_msg)
        print("AI:", response)