# Later, load the index
from llama_index.core import StorageContext, load_index_from_storage
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_index.core.agent.workflow import FunctionAgent
from llama_index.llms.openai import OpenAI
import asyncio
import os
from llama_index.llms.openai_like import OpenAILike
from llama_index.embeddings.openai_like import OpenAILikeEmbedding
from llama_index.core import Settings
from llama_index.embeddings.huggingface import HuggingFaceEmbedding

# Settings.embed_model = OpenAILikeEmbedding(
#     model_name="qwen3-32b-fp8",
#     api_base="http://v2.open.venus.oa.com/llmproxy",
#     api_key="lqySgeYWKMK3kmoaBJQ9ClRG@2745",
#     embed_batch_size=10,
# )
Settings.embed_model = HuggingFaceEmbedding(
    model_name="BAAI/bge-small-en-v1.5"
)

llm = OpenAILike(
    model="qwen3-235b-a22b-fp8-local-II",
    api_base="http://v2.open.venus.oa.com/llmproxy",
    api_key="lqySgeYWKMK3kmoaBJQ9ClRG@2745",
    is_chat_model=True,
    is_function_calling_model=True,
)
Settings.llm = llm
# Path to storage directory
storage_dir = "storage"

if os.path.exists(storage_dir):
    # Load index from backup if it exists
    storage_context = StorageContext.from_defaults(persist_dir=storage_dir)
    index = load_index_from_storage(storage_context)
    query_engine = index.as_query_engine()
else:
    # Create a new index if no backup exists
    documents = SimpleDirectoryReader("/home/ubuntu/ai-tester/docs").load_data()
    index = VectorStoreIndex.from_documents(documents, )
    query_engine = index.as_query_engine()

    # Save the index as a backup
    index.storage_context.persist(storage_dir)

async def search_documents(query: str) -> str:
    """Useful for answering natural language questions about an personal essay written by Paul Graham."""
    response = await query_engine.aquery(query)
    return str(response)


# Create an enhanced workflow with both tools
agent = FunctionAgent(
    tools=[search_documents],
    llm=llm,
    system_prompt="You are an excellent testing engineer. You are unfamiliar with the project code and documentation, so you need to read the documentation to confirm."
)


# Now we can ask questions about the documents or do calculations
async def main():
    response = await agent.run(
        "What did the author do in college? Also, what's 7 * 8?"
    )
    print(response)

async def send_request(agent, prompt: str) -> str:
    """
    Send a request to the LLM and return the response.
    """
    # Use the agent to run the prompt
    response = await agent.run(prompt)
    return str(response)

# Run the agent
if __name__ == "__main__":
    asyncio.run(main())