File size: 4,599 Bytes
275ee33
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
import streamlit as st
from langchain.schema.runnable import RunnablePassthrough
from langchain.memory import ConversationBufferWindowMemory
from langchain.agents import AgentExecutor
from langchain.agents.format_scratchpad import format_to_openai_functions
import wikipedia
from langchain.tools.render import format_tool_to_openai_function
from langchain.prompts import MessagesPlaceholder
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.agents import tool
import requests
import datetime,os
from langchain.pydantic_v1 import BaseModel,Field

st.set_page_config(page_title="ChatBot", page_icon="🤖")
st.header('Agent ChatBot')
st.write('Allows users to interact with the ChatGPT,Wikipedia and fetch current weather temperature of any place.')
if "messages" not in st.session_state:
    st.session_state["messages"] = [{"role":"assistant","content":"Hello! How can I assist you today?"}]


chat_model = ChatOpenAI(model="gpt-3.5-turbo-1106",
                        streaming=True,)

# Define the input schema
class OpenMeteoInput(BaseModel):
    latitude: float = Field(..., description="Latitude of the location to fetch weather data for")
    longitude: float = Field(..., description="Longitude of the location to fetch weather data for")

@tool(args_schema=OpenMeteoInput)
def get_current_temperature(latitude: float, longitude: float) -> dict:
    """Fetch current temperature for given coordinates."""
    
    BASE_URL = "https://api.open-meteo.com/v1/forecast"
    
    # Parameters for the request
    params = {
        'latitude': latitude,
        'longitude': longitude,
        'hourly': 'temperature_2m',
        'forecast_days': 1,
    }

    # Make the request
    response = requests.get(BASE_URL, params=params)
    
    if response.status_code == 200:
        results = response.json()
    else:
        return f"API Request failed with status code: {response.status_code}"

    current_utc_time = datetime.datetime.utcnow()
    time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']]
    temperature_list = results['hourly']['temperature_2m']
    
    closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time))
    current_temperature = temperature_list[closest_time_index]
    
    return f'The current temperature is {current_temperature}°C'


@tool
def SearchWikiPedia(query: str):
    "Serach wikipedia and get page summaries."

    page_titles = wikipedia.search(query)
    
    summaries = []

    for page_title in page_titles[: 3]:
        try:

            wiki_page = wikipedia.page(title=page_title,auto_suggest=False)

            summaries.append(f"Page: {wiki_page.title}\nSummary: {wiki_page.summary}")

        except Exception:
            pass

    if not summaries:
        return "No Good WikiPedia Search results found."
    return "\n\n".join(summaries)

tools =  [get_current_temperature,SearchWikiPedia]

functions = [format_tool_to_openai_function(t) for t in tools]
model_with_function = chat_model.bind(functions = functions)

prompt = ChatPromptTemplate.from_messages([("system","You are helpfull assistant.You can query from wikipedia if you donot know the answer of user query also you can check current weather temperature of any place."),
    MessagesPlaceholder(variable_name="chat_history"),
    ("user","{user_input}"),
    MessagesPlaceholder(variable_name="agent_scratchpad")])

chain = prompt | model_with_function | OpenAIFunctionsAgentOutputParser()

agent_chain = RunnablePassthrough.assign(
    agent_scratchpad = lambda x: format_to_openai_functions(x['intermediate_steps'])
) | chain


for msg in st.session_state["messages"]:
    st.chat_message(msg["role"]).write(msg["content"])

@st.cache_resource
def main():
    memory=ConversationBufferWindowMemory(return_messages=True,memory_key="chat_history",k=54)
    return AgentExecutor(agent=agent_chain,tools=tools,memory=memory)

def _main():
    if user_input:= st.chat_input("Send Message To GPT..."):
        st.chat_message("user").write(user_input)
        st.session_state.messages.append({"role":"user","content":user_input})
        response = main().invoke({'user_input':user_input})
        with st.chat_message("assistant"):
            st.write(response["output"])
            st.session_state.messages.append({"role":"assistant","content":response["output"]})
try:
    _main()
except Exception as error:
    st.error(error)