of_LLms / main.py
ka1kuk's picture
Update main.py
4f7c60f verified
raw
history blame
1.42 kB
import gradio as gr
from langchain_experimental.llms.ollama_functions import OllamaFunctions
# Initialize the Ollama model
model = OllamaFunctions(model="gemma:7b")
model = model.bind(
functions=[
{
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g., San Francisco, CA",
},
"unit": {
"type": "string",
"enum": ["celsius", "fahrenheit"],
},
},
"required": ["location"],
},
}
],
function_call={"name": "get_current_weather"},
)
def get_weather(location, unit):
user_input = f"{location}, {unit}"
result = model.invoke(user_input)
return result
iface = gr.Interface(
fn=get_weather,
inputs=[gr.Textbox(label="Location (e.g., 'San Francisco, CA')"), gr.Radio(choices=["celsius", "fahrenheit"], label="Unit")],
outputs=gr.Text(label="Weather Information"),
title="Weather Information",
description="Enter a location and select the unit to get the current weather.",
allow_flagging="never"
)
iface.launch()