# run_chat.py

from bilud_chatglm3_qa_langchain import qa_chain

def main():
    print("\n=== ChatGLM + LangChain 本地问答系统 ===")
    while True:
        query = input("\n请输入问题：")
        if query.lower() in ["exit", "quit", "\u9000\u51fa"]:
            break
        result = qa_chain({"query": query})

        print("\n回答：", result["result"])
        print("\n信息来源：")
        for doc in result["source_documents"]:
            print(" -", doc.metadata.get("source", "unknown"))

if __name__ == "__main__":
    main()
