# 加载 .env 到环境变量
import os
from dotenv import load_dotenv, find_dotenv
load_dotenv(find_dotenv(), override=True)
import argparse  
from fastapi import FastAPI
from typing import Union
import gradio as gr

from components.llms import Qwen2LLM, OpenAILLM, ZhipuLLM

def main():

    parser = argparse.ArgumentParser(description="启动robot")
    
    parser.add_argument('--chat_mode', type=str, required=False, default="web", help="聊天模式")

    parser.add_argument('--db', type=str, required=False, help="切分数据集为向量")

    args = parser.parse_args()

    if args.db == "load":
        load_data()
        return
    elif args.db == "clean":
        from common.libs import helper
        helper.delete_directory_contents("./db")
        return

    if args.chat_mode == "shell":
        chat_shell()
    elif args.chat_mode == "web":
        chat_web()
    else:
        raise Exception("error")

# 初始化模型
llm = Qwen2LLM()
# llm = OpenAILLM()
# llm = ZhipuLLM()
def load_data(folder_path:str="./dataset", export_path:str="./db"):
    from components.embedding import ZhipuEmbedding
    from components.data_utils import ReadFileFolder
    from components.vector_databases import VectorDB
    try:
        filter = ReadFileFolder(folder_path=folder_path)
        docs = filter.get_all_chunk_content(200, 150)
        from components.embedding import BGEembedding
        embedding_model = BGEembedding()
        embedding_model = ZhipuEmbedding()
        database = VectorDB(docs)
        vectors = database.get_vector(embedding_model)
        database.export_data(export_path)
    except Exception as e:
        return False
    return True


def chat_shell():
    print("===============================")
    print("=开启shell聊天模式=")
    print("AI：请问我有什么能够帮你的")
    while True:        
        user_text = input("用户：")
        if user_text == "q":
            print("=结束对话")
            break
        robot_text = llm.chat(user_text)
        # print(robot_text)
        print("AI：" + robot_text)


def chat_web():
    gr.ChatInterface(chat_response).launch()


def chat_response(message, history):
    global llm
    robot_text = llm.chat(message)
    return robot_text

if __name__ == '__main__':
    main()