Spaces:
Sleeping
Sleeping
pratikshahp
commited on
Commit
•
a813590
1
Parent(s):
686662f
Update app.py
Browse files
app.py
CHANGED
@@ -1,21 +1,18 @@
|
|
1 |
import os
|
2 |
from dotenv import load_dotenv
|
3 |
import httpx
|
4 |
-
import
|
5 |
from langchain.prompts import PromptTemplate
|
6 |
from langchain_huggingface import HuggingFaceEndpoint
|
7 |
from langchain_core.messages import BaseMessage, HumanMessage
|
8 |
from langgraph.graph import MessageGraph, END
|
9 |
from typing import Sequence
|
10 |
|
|
|
11 |
load_dotenv()
|
12 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
13 |
WEATHER_TOKEN = os.getenv("WEATHER_TOKEN")
|
14 |
|
15 |
-
# streamlit app
|
16 |
-
st.title("City Weather Information with AI Review")
|
17 |
-
city = st.text_input("Enter the name of a city:")
|
18 |
-
|
19 |
# Initialize the HuggingFace inference endpoint
|
20 |
llm = HuggingFaceEndpoint(
|
21 |
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
|
@@ -25,8 +22,7 @@ llm = HuggingFaceEndpoint(
|
|
25 |
)
|
26 |
|
27 |
# Define nodes
|
28 |
-
def fetch_weather_node(
|
29 |
-
city = state[0].content.strip()
|
30 |
url = f"https://api.openweathermap.org/data/2.5/weather?q={city}&appid={WEATHER_TOKEN}&units=metric"
|
31 |
|
32 |
try:
|
@@ -39,15 +35,14 @@ def fetch_weather_node(state: Sequence[BaseMessage]) -> str:
|
|
39 |
except Exception as e:
|
40 |
return f"Error: {e}"
|
41 |
|
42 |
-
def generate_review_node(
|
43 |
-
|
44 |
-
response = llm(input_text)
|
45 |
return response
|
46 |
|
47 |
# Define the prompt template for generating weather reviews
|
48 |
review_prompt_template = """
|
49 |
You are an expert weather analyst. Based on the provided weather information, generate a detailed and insightful review.
|
50 |
-
Weather Information: {weather_info
|
51 |
Your review should include an analysis of the weather conditions and finish in 150 words.
|
52 |
Review:
|
53 |
"""
|
@@ -67,27 +62,33 @@ builder.set_finish_point("generate_review")
|
|
67 |
# Compile the graph
|
68 |
graph = builder.compile()
|
69 |
|
70 |
-
#
|
71 |
-
|
72 |
if city:
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
st.subheader("Mermaid Graph")
|
85 |
-
st.write("Check out this [mermaid link](https://mermaid.live/) to display a graph with following data")
|
86 |
-
#st.write(graph.get_graph().draw_mermaid())
|
87 |
-
mermaid_code = graph.get_graph().draw_mermaid()
|
88 |
-
st.markdown(f"```mermaid\n{mermaid_code}\n```", unsafe_allow_html=True)
|
89 |
|
90 |
-
|
91 |
-
|
92 |
else:
|
93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
import os
|
2 |
from dotenv import load_dotenv
|
3 |
import httpx
|
4 |
+
import gradio as gr
|
5 |
from langchain.prompts import PromptTemplate
|
6 |
from langchain_huggingface import HuggingFaceEndpoint
|
7 |
from langchain_core.messages import BaseMessage, HumanMessage
|
8 |
from langgraph.graph import MessageGraph, END
|
9 |
from typing import Sequence
|
10 |
|
11 |
+
# Load environment variables
|
12 |
load_dotenv()
|
13 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
14 |
WEATHER_TOKEN = os.getenv("WEATHER_TOKEN")
|
15 |
|
|
|
|
|
|
|
|
|
16 |
# Initialize the HuggingFace inference endpoint
|
17 |
llm = HuggingFaceEndpoint(
|
18 |
repo_id="mistralai/Mistral-7B-Instruct-v0.3",
|
|
|
22 |
)
|
23 |
|
24 |
# Define nodes
|
25 |
+
def fetch_weather_node(city: str) -> str:
|
|
|
26 |
url = f"https://api.openweathermap.org/data/2.5/weather?q={city}&appid={WEATHER_TOKEN}&units=metric"
|
27 |
|
28 |
try:
|
|
|
35 |
except Exception as e:
|
36 |
return f"Error: {e}"
|
37 |
|
38 |
+
def generate_review_node(weather_info: str) -> str:
|
39 |
+
response = llm(weather_info)
|
|
|
40 |
return response
|
41 |
|
42 |
# Define the prompt template for generating weather reviews
|
43 |
review_prompt_template = """
|
44 |
You are an expert weather analyst. Based on the provided weather information, generate a detailed and insightful review.
|
45 |
+
Weather Information: {weather_info}
|
46 |
Your review should include an analysis of the weather conditions and finish in 150 words.
|
47 |
Review:
|
48 |
"""
|
|
|
62 |
# Compile the graph
|
63 |
graph = builder.compile()
|
64 |
|
65 |
+
# Define the Gradio interface
|
66 |
+
def get_weather_and_review(city: str) -> str:
|
67 |
if city:
|
68 |
+
try:
|
69 |
+
# Prepare the input for the graph
|
70 |
+
weather_info = graph.invoke(HumanMessage(content=city))
|
71 |
+
weather_info_text = weather_info[1].content
|
72 |
+
|
73 |
+
# Generate the review using the refined prompt
|
74 |
+
review_input = review_prompt_template.format(weather_info=weather_info_text)
|
75 |
+
review = graph.invoke(HumanMessage(content=review_input))
|
76 |
+
review_text = review[2].content
|
77 |
+
|
78 |
+
return f"**Weather Information:**\n{weather_info_text}\n\n**AI Generated Weather Review:**\n{review_text}"
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
+
except Exception as e:
|
81 |
+
return f"Error generating weather review: {e}"
|
82 |
else:
|
83 |
+
return "Please enter a city name."
|
84 |
+
|
85 |
+
interface = gr.Interface(
|
86 |
+
fn=get_weather_and_review,
|
87 |
+
inputs=gr.Textbox(lines=2, placeholder="Enter the name of a city:", label="City"),
|
88 |
+
outputs="text",
|
89 |
+
title="City Weather Information with AI Review",
|
90 |
+
description="Enter the name of a city to get current weather information and an AI-generated review based on that information."
|
91 |
+
)
|
92 |
+
|
93 |
+
if __name__ == "__main__":
|
94 |
+
interface.launch()
|