Spaces:
Sleeping
Sleeping
import streamlit as st | |
from langchain.schema.runnable import RunnablePassthrough | |
from langchain.memory import ConversationBufferWindowMemory | |
from langchain.agents import AgentExecutor | |
from langchain.agents.format_scratchpad import format_to_openai_functions | |
import wikipedia | |
from langchain.tools.render import format_tool_to_openai_function | |
from langchain.prompts import MessagesPlaceholder | |
from langchain.agents.output_parsers import OpenAIFunctionsAgentOutputParser | |
from langchain.chat_models import ChatOpenAI | |
from langchain.prompts import ChatPromptTemplate | |
from langchain.agents import tool | |
import requests | |
import datetime,os | |
from langchain.pydantic_v1 import BaseModel,Field | |
from dotenv import load_dotenv | |
load_dotenv() | |
st.set_page_config(page_title="ChatBot", page_icon="🤖") | |
st.header('Agent ChatBot') | |
st.write('Allows users to interact with the ChatGPT,Wikipedia and fetch current weather temperature of any place.') | |
if "messages" not in st.session_state: | |
st.session_state["messages"] = [{"role":"assistant","content":"Hello! How can I assist you today?"}] | |
chat_model = ChatOpenAI(model="gpt-3.5-turbo-1106", | |
streaming=True,) | |
# Define the input schema | |
class OpenMeteoInput(BaseModel): | |
latitude: float = Field(..., description="Latitude of the location to fetch weather data for") | |
longitude: float = Field(..., description="Longitude of the location to fetch weather data for") | |
def get_current_temperature(latitude: float, longitude: float) -> dict: | |
"""Fetch current temperature for given coordinates.""" | |
BASE_URL = "https://api.open-meteo.com/v1/forecast" | |
# Parameters for the request | |
params = { | |
'latitude': latitude, | |
'longitude': longitude, | |
'hourly': 'temperature_2m', | |
'forecast_days': 1, | |
} | |
# Make the request | |
response = requests.get(BASE_URL, params=params) | |
if response.status_code == 200: | |
results = response.json() | |
else: | |
return f"API Request failed with status code: {response.status_code}" | |
current_utc_time = datetime.datetime.utcnow() | |
time_list = [datetime.datetime.fromisoformat(time_str.replace('Z', '+00:00')) for time_str in results['hourly']['time']] | |
temperature_list = results['hourly']['temperature_2m'] | |
closest_time_index = min(range(len(time_list)), key=lambda i: abs(time_list[i] - current_utc_time)) | |
current_temperature = temperature_list[closest_time_index] | |
return f'The current temperature is {current_temperature}°C' | |
def SearchWikiPedia(query: str): | |
"Serach wikipedia and get page summaries." | |
page_titles = wikipedia.search(query) | |
summaries = [] | |
for page_title in page_titles[: 3]: | |
try: | |
wiki_page = wikipedia.page(title=page_title,auto_suggest=False) | |
summaries.append(f"Page: {wiki_page.title}\nSummary: {wiki_page.summary}") | |
except Exception: | |
pass | |
if not summaries: | |
return "No Good WikiPedia Search results found." | |
return "\n\n".join(summaries) | |
tools = [get_current_temperature,SearchWikiPedia] | |
functions = [format_tool_to_openai_function(t) for t in tools] | |
model_with_function = chat_model.bind(functions = functions) | |
prompt = ChatPromptTemplate.from_messages([("system","You are helpfull assistant.You can query from wikipedia if you donot know the answer of user query also you can check current weather temperature of any place."), | |
MessagesPlaceholder(variable_name="chat_history"), | |
("user","{user_input}"), | |
MessagesPlaceholder(variable_name="agent_scratchpad")]) | |
chain = prompt | model_with_function | OpenAIFunctionsAgentOutputParser() | |
agent_chain = RunnablePassthrough.assign( | |
agent_scratchpad = lambda x: format_to_openai_functions(x['intermediate_steps']) | |
) | chain | |
for msg in st.session_state["messages"]: | |
st.chat_message(msg["role"]).write(msg["content"]) | |
def main(): | |
memory=ConversationBufferWindowMemory(return_messages=True,memory_key="chat_history",k=54) | |
return AgentExecutor(agent=agent_chain,tools=tools,memory=memory) | |
def _main(): | |
if user_input:= st.chat_input("Send Message To GPT..."): | |
st.chat_message("user").write(user_input) | |
st.session_state.messages.append({"role":"user","content":user_input}) | |
response = main().invoke({'user_input':user_input}) | |
with st.chat_message("assistant"): | |
st.write(response["output"]) | |
st.session_state.messages.append({"role":"assistant","content":response["output"]}) | |
try: | |
_main() | |
except Exception as error: | |
st.error(error) |