import chainlit as cl

from llama_index.core import Settings
from llama_index.core.chat_engine import SimpleChatEngine

from llms import deepseek_llm,gangtise_llm
from app_chat_basic_rag import create_chat_engine_rag

@cl.on_chat_start
async def start():
    # Settings.llm = deepseek_llm()
    Settings.llm = gangtise_llm()

    # chat_engine = SimpleChatEngine.from_defaults()
    chat_engine = await create_chat_engine_rag()
    cl.user_session.set("chat_engine", chat_engine)

    await cl.Message(
        author="Assistant", content="您好，我是但问智能助手，请问有什么可以帮到您的吗？"
    ).send()


@cl.on_message
async def main(message: cl.Message):
    chat_engine = cl.user_session.get("chat_engine")
    msg = cl.Message(content="", author="Assistant")

    res = await cl.make_async(chat_engine.stream_chat)(message.content)

    # 流式界面输出
    for token in res.response_gen:
        await msg.stream_token(token)
    await msg.send()
