import os import requests import gradio as gr import openai from dotenv import load_dotenv # Load environment variables load_dotenv() openai.api_key = os.getenv("OPENAI_API_KEY") # Define function to get current weather def get_current_weather(location): weather_api_key = os.getenv("WEATHER_API_KEY") base_url = f"http://api.openweathermap.org/data/2.5/weather?q={location}&appid={weather_api_key}&units=metric" response = requests.get(base_url, timeout=10) response.raise_for_status() data = response.json() return { "location": location, "temperature": data['main']['temp'], "weather": data['weather'][0]['description'] } # Define chat function def weather_chat(user_message): messages = [ {"role": "user", "content": user_message}, {"role": "assistant", "content": "You are a weather bot. Answer only in Celsius. If two cities are asked, provide weather for both."} ] response = openai.ChatCompletion.create( model="gpt-3.5-turbo", temperature=0, max_tokens=256, messages=messages, functions=[ { "name": "get_current_weather", "description": "Get the current weather in a given location", "parameters": { "type": "object", "properties": { "location": {"type": "string", "description": "The city, e.g. San Francisco"} }, "required": ["location"] } } ] ) function_call = response['choices'][0]['message']['function_call'] arguments = eval(function_call['arguments']) weather_data = get_current_weather(arguments['location']) messages.append({"role": "assistant", "content": None, "function_call": function_call}) messages.append({"role": "function", "name": "get_current_weather", "content": str(weather_data)}) response = openai.ChatCompletion.create( model="gpt-3.5-turbo", messages=messages ) return response['choices'][0]['message']['content'] # Define Gradio interface iface = gr.Interface( fn=weather_chat, inputs=gr.Textbox(label="Weather Queries"), outputs=gr.Textbox(label="Weather Updates"), title="DDS Weather Bot", description="Ask me anything about weather!" ) # Launch the Gradio interface iface.launch()