Spaces:
Runtime error
Runtime error
File size: 3,420 Bytes
583c1b1 ca00ac2 ce3bf23 386acb4 ce3bf23 ca00ac2 386acb4 ce3bf23 386acb4 3045cfe 386acb4 3045cfe 386acb4 3045cfe 583c1b1 3045cfe ca00ac2 386acb4 3045cfe ce3bf23 386acb4 ce3bf23 386acb4 583c1b1 386acb4 975d3c6 386acb4 583c1b1 386acb4 583c1b1 386acb4 583c1b1 386acb4 583c1b1 386acb4 583c1b1 ce3bf23 583c1b1 386acb4 583c1b1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 |
import os
import json
import gradio as gr
import requests
from dotenv import load_dotenv
from llama_index.core.agent import ReActAgent
from llama_index.core.tools import FunctionTool
from llama_index.llms.openai import OpenAI
# Load environment variables
load_dotenv()
# Get OpenRouter token
openrouter_token = os.getenv("OPENROUTER_API_KEY")
if not openrouter_token:
raise ValueError("OpenRouter token not found. Configure OPENROUTER_API_KEY in your environment variables")
# Define weather function with static data
def get_current_weather(location: str, unit: str = "fahrenheit") -> dict:
"""
Get the current weather in a given location
Args:
location (str): The city name, e.g. San Francisco, Tokyo
unit (str): The unit of temperature, either celsius or fahrenheit
Returns:
dict: Weather information including location, temperature and unit
"""
location = location.lower()
if "tokyo" in location:
return {"location": "Tokyo", "temperature": "10", "unit": "celsius"}
elif "san francisco" in location:
return {"location": "San Francisco", "temperature": "72", "unit": "fahrenheit"}
elif "paris" in location:
return {"location": "Paris", "temperature": "22", "unit": "celsius"}
else:
return {"location": location, "temperature": "unknown", "unit": unit}
# Create a tool for the agent
weather_tool = FunctionTool.from_defaults(
name="get_current_weather",
fn=get_current_weather,
description="Get the current weather in a given location"
)
# Custom OpenRouter implementation using OpenAI-compatible interface
class OpenRouterLLM(OpenAI):
def __init__(self, model_name="qwen/qwen-2.5-coder-32b-instruct:free", temperature=0.7, max_tokens=512, api_key=None):
# Initialize with custom base URL and model name
super().__init__(
model=model_name,
temperature=temperature,
max_tokens=max_tokens,
api_key=api_key,
api_base="https://openrouter.ai/api/v1",
additional_headers={
"HTTP-Referer": "weather-assistant-app",
"X-Title": "Weather Assistant"
}
)
# Configure the language model with OpenRouter
llm = OpenRouterLLM(
model_name="qwen/qwen-2.5-coder-32b-instruct:free",
temperature=0.7,
max_tokens=512,
api_key=openrouter_token
)
# Create the agent with an appropriate system prompt
agent = ReActAgent.from_tools(
[weather_tool],
llm=llm,
verbose=False
)
def respond(message, history):
# Execute the agent with user input
response = agent.chat(message)
return str(response)
# Create Gradio interface
with gr.Blocks(title="Weather Assistant") as demo:
gr.Markdown("# 🌤️ Weather Assistant")
gr.Markdown("### Ask about the weather in Tokyo, San Francisco, or Paris")
chatbot = gr.ChatInterface(
respond,
examples=[
"What's the weather like in Tokyo?",
"How's the weather in San Francisco?",
"Tell me about the current weather in Paris",
"What should I wear in Tokyo based on the weather?",
"Is it warm in San Francisco?"
],
title="Chat with Weather Assistant"
)
gr.Markdown("### Built with LlamaIndex and OpenRouter API")
# Launch the application
if __name__ == "__main__":
demo.launch() |