import sys
import os
import faiss
import gradio as gr


from langchain.vectorstores import FAISS
from langchain.docstore import InMemoryDocstore
from langchain.utilities import SerpAPIWrapper
from langchain.agents import Tool
from langchain.tools.file_management.write import WriteFileTool
from langchain.tools.file_management.read import ReadFileTool
from langchain.embeddings import OpenAIEmbeddings
from langchain_experimental.autonomous_agents import AutoGPT
from langchain.chat_models import ChatOpenAI


sys.path.append(os.path.dirname(os.path.abspath(__file__)))


os.environ["SERPAPI_API_KEY"] = "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"
os.environ["OPENAI_API_KEY"] = "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"
#构建工具类
vectorstore=""

def autoGPTTools():
     # 构造 AutoGPT 的工具集
    search = SerpAPIWrapper()
    tools = [
        Tool(
            name="search",
            func=search.run,
            description="useful for when you need to answer questions about current events. You should ask targeted questions",
        ),
        WriteFileTool(),
        ReadFileTool(),
    ]
    return tools

def autogpt(text,modelName,tools,vectorstore):
    agent = AutoGPT.from_llm_and_tools(
        ai_name="Jarvis",
        ai_role="Assistant",
        tools=tools,
        #gpt-3.5-turbo,gpt-4
        llm=ChatOpenAI(model_name=modelName, temperature=0),
        memory=vectorstore.as_retriever(), # 实例化 Faiss 的 VectorStoreRetriever
    )
    return agent


def faissUtils():
    # OpenAI Embedding 模型
    embeddings_model = OpenAIEmbeddings()
    # OpenAI Embedding 向量维数
    embedding_size = 1536
    # 使用 Faiss 的 IndexFlatL2 索引
    index = faiss.IndexFlatL2(embedding_size)
    # 实例化 Faiss 向量数据库
    vectorstore = FAISS(embeddings_model.embed_query, index, InMemoryDocstore({}), {})
    return vectorstore

def translation(modelName,text):
    #step-1 构建AutoGpt工具类
    tools = autoGPTTools()
    #step-2 实例向量数据库
    vectorstore = faissUtils()
    #step-3 实例Autogpt 
    agent = autogpt(text,modelName,tools,vectorstore)
    #step-4 执行调用
    answer = agent.run(text)
    
    return answer

def launch_gradio():
      iface = gr.Interface(
          fn=translation,
          title="ChatGpt",
          inputs=[
                gr.Dropdown(
                    ['gpt-3.5-turbo','gpt-4'],label="类型",type="value",default="gpt-3.5-turbo"
                    ),
                gr.Textbox(label="问题", placeholder="请提问")
          ],
          outputs=[
              gr.Text()
          ],
          allow_flagging="never"
        )
      iface.launch(share=True,server_name="0.0.0.0");

# def initialize_translator():

if __name__ == "__main__":
   
    # initialize_translator()
    # 启动 Gradio 服务
    launch_gradio()