import os
import gradio as gr
from langchain.utilities import SerpAPIWrapper
from langchain.agents import Tool
from langchain.tools.file_management.write import WriteFileTool
from langchain.tools.file_management.read import ReadFileTool
from langchain.embeddings import OpenAIEmbeddings
import faiss
from langchain.vectorstores import FAISS
from langchain.docstore import InMemoryDocstore
from langchain_experimental.autonomous_agents import AutoGPT
from langchain.chat_models import ChatOpenAI

SERPAPI_API_KEY = os.getenv("SERPAPI_API_KEY")
# embedding模型 代理地址后面不可以加 /v1
OPENAI_API_BASE = os.getenv("OPENAI_API_BASE_EMBEDDING")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")


def auto_gpt(openai_api_key, openai_api_proxy: str | None, openai_model_name, model_temperature, tool_des, ai_name, ai_role, user_input):
    model_temperature = float(model_temperature)
    # 构造 AutoGPT 的工具集
    search = SerpAPIWrapper(serpapi_api_key=SERPAPI_API_KEY)
    tools = [
        Tool(
            name="search",
            func=search.run,
            description=tool_des,
        ),
        WriteFileTool(),
        ReadFileTool(),
    ]

    # OpenAI Embedding 模型
    embeddings_model = OpenAIEmbeddings()
    # OpenAI Embedding 向量维数
    embedding_size = 1536
    # 使用 Faiss 的 IndexFlatL2 索引
    index = faiss.IndexFlatL2(embedding_size)
    # 实例化 Faiss 向量数据库
    vectorstore = FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {})

    agent = AutoGPT.from_llm_and_tools(
        ai_name=ai_name,
        ai_role=ai_role,
        tools=tools,
        llm=ChatOpenAI(model_name=openai_model_name, temperature=model_temperature, openai_proxy=openai_api_proxy, openai_api_key=openai_api_key),
        memory=vectorstore.as_retriever(),  # 实例化 Faiss 的 VectorStoreRetriever
    )
    # 打印 Auto-GPT 内部的 chain 日志
    # agent.chain.verbose = True
    agent.run([user_input])
    print(agent)


def ui():
    from theme.winter import Winter
    with gr.Blocks(title="Auto GPT UI", theme=Winter()) as interface:
        gr.Markdown('''# <span style='color:brown'>Auto GPT</span> ''')
        openai_api_key = gr.Textbox(label="APIKey", placeholder="请输入API Key", value=OPENAI_API_KEY)
        openai_api_proxy = gr.Textbox(label="OpenAI代理地址（可选）", placeholder="请输入OpenAI代理地址", value=OPENAI_API_BASE)
        openai_model_name = gr.Dropdown(value="gpt-3.5-turbo", choices=["gpt-3.5-turbo", "gpt-4"], label="模型",
                                        info="注意所选模型是否支持所选API类型", nteractive=True)
        model_temperature = gr.Number(label="Temperature", value=0.7, step=0.1, maximum=1.0)
        with gr.Row():
            tool_des = gr.Textbox(label="Tool描述", placeholder="请输入Agent Tool描述",
                              value="useful for when you need to answer questions about current events. You should ask targeted questions")
        with gr.Row():
            ai_name = gr.Textbox(label="AI名字", placeholder="请输入AI的名字", value="Jarvis")
            ai_role = gr.Dropdown(value="Assistanto", choices=["Assistant", "User", "System"], label="AI角色",
                                  nteractive=True)
        user_input = gr.Textbox(label="请输入要对话的内容", lines=8)
        ai_ask = gr.Button(value="运行", variant='primary')
        model_output = gr.Textbox(label="输出内容", lines=10)

        ai_ask.click(auto_gpt, inputs=[openai_api_key, openai_api_proxy, openai_model_name, model_temperature, tool_des, ai_name, ai_role, user_input], outputs=model_output)
        interface.queue(max_size=8).launch()


if __name__ == '__main__':
    ui()
