File size: 4,446 Bytes
7c8d127
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# app.py
import streamlit as st
import json
from transformers import pipeline

# --- Functions (Simulated) ---
def get_current_weather(location):
    """Simulates fetching the current weather for a given location."""
    weather_data = {
        "location": location,
        "temperature": "32°C",  # Changed for consistency
        "conditions": "Sunny",  # Changed for consistency
        "humidity": "50%"      # Changed for consistency
    }
    return json.dumps(weather_data)

def search_wikipedia(query):
    """Simulates searching Wikipedia for a given query."""
    search_results = {
        "query": query,
        "summary": f"This is a simulated summary for the query: {query}. Real information would be here."
    }
    return json.dumps(search_results)

available_functions = {
    "get_current_weather": get_current_weather,
    "search_wikipedia": search_wikipedia
}

# --- Agent Interaction Function ---
model = pipeline("text-generation", model="gpt2")  # Keep it outside the function

def run_agent(user_query):
    prompt = f"""You are a helpful information-gathering agent.
    Your goal is to answer user queries effectively. You have access to the following tools (functions):
    {list(available_functions.keys())}.

    For certain questions, you might need to use these tools to get the most up-to-date information.
    When you decide to use a tool, respond in a JSON format specifying the 'action' (the function name)
    and the 'parameters' for that function.

    If you can answer the question directly without using a tool, respond in a JSON format with
    'action': 'final_answer' and 'answer': 'your direct answer'.

    Example 1:
    User query: What's the weather like in Karachi?
    Agent response: {{"action": "get_current_weather", "parameters": {{"location": "Karachi"}}}}

    Example 2:
    User query: Tell me about the capital of Pakistan.
    Agent response: {{"action": "final_answer", "answer": "The capital of Pakistan is Islamabad."}}

    Example 3:
    User query: Search for information about the Mughal Empire.
    Agent response: {{"action": "search_wikipedia", "parameters": {{"query": "Mughal Empire"}}}}

    User query: {user_query}
    Agent response: """

    output = model(prompt, max_new_tokens=100, num_return_sequences=1, stop_sequence="\n")[0]['generated_text']
    return output

# --- Response Processing Function ---
def process_agent_response(agent_response_json):
    try:
        response_data = json.loads(agent_response_json)
        action = response_data.get("action")

        if action == "get_current_weather":
            parameters = response_data.get("parameters", {})
            location = parameters.get("location")
            if location:
                weather = get_current_weather(location)
                return {"final_answer": f"The agent called 'get_current_weather' for '{location}'. Simulated result: {weather}"}
            else:
                return {"final_answer": "Error: Location not provided for weather lookup."}
        elif action == "search_wikipedia":
            parameters = response_data.get("parameters", {})
            query = parameters.get("query")
            if query:
                search_result = search_wikipedia(query)
                return {"final_answer": f"The agent called 'search_wikipedia' for '{query}'. Simulated result: {search_result}"}
            else:
                return {"final_answer": "Error: Query not provided for Wikipedia search."}
        elif action == "final_answer":
            return {"final_answer": response_data.get("answer", "No direct answer provided.")}
        else:
            return {"final_answer": f"Unknown action: {action}"}
    except json.JSONDecodeError:
        return {"final_answer": "Error decoding agent's response."}

# --- Streamlit App ---
def main():
    st.title("Gen AI Information Gathering Agent")
    st.write("Ask me a question, and I'll try to answer it using my simulated tools!")

    user_query = st.text_input("Your question:", "What's the weather like in London?")
    if user_query:
        agent_response = run_agent(user_query)
        st.write(f"Agent Response (JSON):")
        st.json(agent_response)  # Use st.json for better display

        processed_response = process_agent_response(agent_response)
        st.write("Processed Response:")
        st.write(processed_response["final_answer"])

if __name__ == "__main__":
    main()