rogerscuall's picture
Upload folder using huggingface_hub
890d952 verified
# /// script
# dependencies = [
# "PyYAML",
# "langchain-community", # For FAISS, HuggingFaceEmbeddings
# "langchain", # Core Langchain
# "faiss-cpu", # FAISS vector store
# "sentence-transformers", # For HuggingFaceEmbeddings
# "openai-agents", # OpenAI Agents SDK
# "gradio[mcp]",
# "gradio",
# # "unstructured" # Required by loader.py, not directly by app.py but good for environment consistency
# ]
# ///
import yaml
import gradio as gr
from agents import Agent, gen_trace_id, Runner, ModelSettings
import asyncio
from textwrap import dedent
# Import the retriever tool and port recommendations agent
from retriever_tool import retrieve_network_information
from port_recomendations import port_recommendations_agent
with open("prompts.yaml", 'r') as stream:
prompt_templates = yaml.safe_load(stream)
# Create the main orchestrator agent with the port recommendations agent as a tool
main_agent = Agent(
name="network_agent",
instructions=dedent("""
You are a network infrastructure assistant that helps users with various network-related queries.
You have access to specialized tools and agents:
1. retrieve_network_information: For general network documentation queries
2. port_recommendations_tool: For port/interface recommendations and connectivity questions
Use the appropriate tool based on the user's request:
- For port recommendations, unused ports, interface questions, or device connectivity: use port_recommendations_tool
- For general network information, configuration details, or documentation queries: use retrieve_network_information
Always be helpful, precise, and provide detailed responses based on the tools' output.
"""),
model="gpt-4o-mini",
model_settings=ModelSettings(tool_choice="required", temperature=0.0),
tools=[
retrieve_network_information,
port_recommendations_agent.as_tool(
tool_name="port_recommendations_tool",
tool_description="Get port and interface recommendations for connecting devices to the network. Use this for questions about unused ports, interface recommendations, or device connectivity."
)
],
)
async def run(query: str):
""" Run the network query process and return the final result"""
try:
trace_id = gen_trace_id()
print(f"View trace: https://platform.openai.com/traces/trace?trace_id={trace_id}")
result = await Runner.run(
main_agent,
f"Query: {query}",
max_turns=5,
)
return result.final_output
except Exception as e:
print(f"Error during query processing: {e}")
return f"An error occurred during processing: {str(e)}"
async def main(query: str):
result = await run(query)
print(result)
return result
def sync_run(query: str):
"""Synchronous wrapper for the async run function for Gradio"""
return asyncio.run(run(query))
# Gradio Interface
with gr.Blocks(theme=gr.themes.Default(primary_hue="blue")) as ui:
gr.Markdown("# Network Infrastructure Assistant")
gr.Markdown("Ask questions about network infrastructure, port recommendations, or device connectivity.")
with gr.Row():
with gr.Column():
query_textbox = gr.Textbox(
label="Your Question",
placeholder="e.g., 'I need an unused port for a new server' or 'What's the BGP configuration?'",
lines=3
)
run_button = gr.Button("Ask", variant="primary")
with gr.Column():
response_textbox = gr.Textbox(
label="Response",
lines=10,
interactive=False
)
# Event handlers
run_button.click(fn=sync_run, inputs=query_textbox, outputs=response_textbox)
query_textbox.submit(fn=sync_run, inputs=query_textbox, outputs=response_textbox)
# Example queries
gr.Markdown("### Example Queries:")
gr.Markdown("- I need an unused port for a new server")
gr.Markdown("- I need to dual connect a server to the network, what ports should I use?")
gr.Markdown("- What are the BGP settings for the fabric?")
gr.Markdown("- Show me the VLAN configuration")
if __name__ == "__main__":
# Test query
# test_result = asyncio.run(main("I need to dual connect a server to the network, what ports should I use?"))
# Launch Gradio interface
ui.launch(inbrowser=True, debug=True, mcp_server=True)