File size: 12,709 Bytes
a0cfb47
 
 
 
 
 
 
 
 
31cd393
 
 
 
a0cfb47
 
 
 
 
 
 
 
d4af652
a0cfb47
 
d4af652
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a0cfb47
 
 
9d0ea95
1080ac9
 
 
 
 
 
 
 
 
 
 
 
 
 
9d0ea95
afff569
89b8d9b
 
 
 
 
 
 
 
 
 
 
a0cfb47
 
 
 
 
 
 
 
 
 
 
 
 
 
d4af652
a0cfb47
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74f516c
a0cfb47
74f516c
 
 
 
 
 
 
 
a0cfb47
 
74f516c
a0cfb47
d4af652
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
afff569
1080ac9
ef7328d
afff569
 
 
1080ac9
 
afff569
 
 
1080ac9
 
afff569
1080ac9
afd0e9f
ff0a5bd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1080ac9
 
 
 
 
 
 
ff0a5bd
 
 
afff569
a0cfb47
89b8d9b
a0cfb47
 
 
afff569
9d0ea95
 
1080ac9
9d0ea95
 
afff569
 
9d0ea95
afff569
 
fb0eb6d
 
 
afff569
 
 
 
 
 
 
 
 
 
 
 
fb0eb6d
afff569
fb0eb6d
 
9d0ea95
 
 
 
 
afff569
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1080ac9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
afff569
 
 
1080ac9
 
0049a45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
afff569
 
 
 
 
 
 
 
 
 
 
 
 
 
89b8d9b
afff569
 
 
 
 
 
 
1080ac9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
import streamlit as st
import requests
import openai
from io import BytesIO
from PIL import Image

# Set page configuration as the first Streamlit command
st.set_page_config(page_title="Eco-Symphony", page_icon="🌱", layout="centered")

# Set API keys from Streamlit Secrets
openai.api_key = st.secrets["OPENAI_API_KEY"]
OPENWEATHER_API_KEY = st.secrets["OPENWEATHER_API_KEY"]
HUGGINGFACE_API_KEY = st.secrets["HUGGINGFACE_API_KEY"]

# Hugging Face API URLs
MUSICGEN_API_URL = "https://api-inference.huggingface.co/models/facebook/musicgen-small"
IMAGEGEN_API_URL = "https://api-inference.huggingface.co/models/Artples/LAI-ImageGeneration-vSDXL-2"

# Headers for Hugging Face API requests
headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"}

# Inject custom CSS for green theme
st.markdown("""
    <style>
    body {
        background-color: #ffffff;
    }
    .stApp {
        color: #2e7d32;
        font-family: 'Arial', sans-serif;
    }
    .stButton>button {
        background-color: #66bb6a;
        color: #fff;
        font-weight: bold;
    }
    .stTextInput>div>input {
        background-color: #e8f5e9;
        color: #2e7d32;
    }
    .stMarkdown h1, .stMarkdown h2, .stMarkdown h3, .stMarkdown p {
        color: #388e3c;
    }
    .stMarkdown h2 {
        font-weight: bold;
    }
    </style>
""", unsafe_allow_html=True)

# Initialize session state variables
if "real_data" not in st.session_state:
    st.session_state.real_data = {}
if "story" not in st.session_state:
    st.session_state.story = ""
if "music_bytes" not in st.session_state:
    st.session_state.music_bytes = None
if "image_bytes" not in st.session_state:
    st.session_state.image_bytes = None
if "ngos" not in st.session_state:
    st.session_state.ngos = []
if "points" not in st.session_state:
    st.session_state.points = 0
if "daily_challenges" not in st.session_state:
    st.session_state.daily_challenges = []

# Function to generate daily eco-friendly challenges
def generate_daily_challenges() -> list:
    prompt = "Give me 5 small, easy-to-do eco-friendly daily challenges that can be completed in a day."
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}],
        max_tokens=100,
        temperature=0.8
    )
    challenges = response.choices[0].message['content'].strip().split("\n")
    return [challenge.strip() for challenge in challenges if challenge.strip()]

# Function to fetch weather data
def fetch_real_data(city: str) -> dict:
    weather_url = f'https://api.openweathermap.org/data/2.5/weather?q={city}&appid={OPENWEATHER_API_KEY}&units=metric'
    weather_response = requests.get(weather_url)
    if weather_response.status_code != 200:
        st.error("Error fetching weather data.")
        return {}
    weather_data = weather_response.json()
    return {
        "temperature": weather_data['main'].get('temp', 'Data not available'),
        "humidity": weather_data['main'].get('humidity', 'Data not available'),
        "weather_condition": weather_data['weather'][0].get('main', 'Data not available')
    }

# Function to determine mood based on weather data
def determine_mood(data: dict) -> str:
    weather_condition = data["weather_condition"].lower()
    temperature = data["temperature"]
    if "rain" in weather_condition:
        return "rainy"
    elif "clear" in weather_condition and temperature > 25:
        return "sunny"
    elif "cloud" in weather_condition:
        return "cloudy"
    elif temperature < 15:
        return "cool"
    else:
        return "neutral"

# Function to create a narrative
def create_narrative(city: str, data: dict) -> str:
    return f"In {city}, the weather is {data['weather_condition']} with a temperature of {data['temperature']}Β°C."

# Function to generate a story using OpenAI
def generate_story_with_ai(narrative: str, mood: str) -> str:
    messages = [
        {"role": "system", "content": "You are a creative storyteller using characters and imagery."},
        {"role": "user", "content": f"{narrative} The mood is '{mood}', write a story about how the environment feels in 50 words."}
    ]
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=messages,
        max_tokens=150,
        temperature=0.7
    )
    return response.choices[0].message['content'].strip()

# Function to generate music from Hugging Face API
def generate_music(description: str) -> bytes:
    payload = {"inputs": description}
    response = requests.post(MUSICGEN_API_URL, headers=headers, json=payload)
    if response.status_code != 200:
        st.error(f"Error generating music: {response.status_code} {response.text}")
        return None
    return response.content

# Function to generate an image based on the story
def generate_image(description: str) -> bytes:
    payload = {"inputs": description}
    response = requests.post(IMAGEGEN_API_URL, headers=headers, json=payload)
    if response.status_code != 200:
        st.error(f"Error generating image: {response.status_code} {response.text}")
        return None
    return response.content

# Function to fetch endangered species data
def fetch_endangered_species(city: str) -> dict:
    prompt = f"Provide details of an endangered species which is highly specific in {city}, including its name, image description, and current population."
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}],
        max_tokens=150,
        temperature=0.8
    )
    response_content = response.choices[0].message['content'].strip()
    try:
        return eval(response_content)  # Assuming the response is a JSON-like structure
    except Exception as e:
        st.error(f"Error parsing endangered species data: {e}")
        return {}

# Function to fetch NGOs using OpenAI
def fetch_nearby_ngos_with_openai(city: str, interests: list) -> list:
    prompt = (
        f"List NGOs near {city} that focus on {', '.join(interests)}. "
        "Provide the names, locations, and focus areas in JSON format as a list of dictionaries."
    )
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}],
        max_tokens=200,
        temperature=0.7
    )
    response_content = response.choices[0].message['content'].strip()
    
    try:
        ngo_list = eval(response_content)
        if isinstance(ngo_list, list) and all(isinstance(ngo, dict) for ngo in ngo_list):
            return ngo_list
        else:
            st.error("Unexpected response format. Could not parse NGO data.")
            return []
    except Exception as e:
        st.error(f"Error fetching NGO data: {e}")
        return []

# Streamlit UI
st.title("🌿 Eco-Symphony 🎢")
st.write("Enter a city to explore real-time environmental data, complete daily challenges, and unlock hidden content!")

city = st.text_input("Enter City Name:", placeholder="Type the name of a city...")

if st.button("Generate Environmental Data, Music, and Image"):
    st.session_state.real_data = fetch_real_data(city)
    if st.session_state.real_data:
        # Generate narrative and mood
        narrative = create_narrative(city, st.session_state.real_data)
        mood = determine_mood(st.session_state.real_data)
        
        # Generate AI story
        st.session_state.story = generate_story_with_ai(narrative, mood)

        # Generate Music and Image Based on Story and Mood
        music_description = f"{mood} mood with {st.session_state.real_data['weather_condition'].lower()} weather"
        st.session_state.music_bytes = generate_music(music_description)
        st.session_state.image_bytes = generate_image(st.session_state.story)

# Display Music and Image at the Top
if st.session_state.music_bytes:
    st.subheader("🎢 Generated Music")
    st.audio(BytesIO(st.session_state.music_bytes), format="audio/wav")

if st.session_state.image_bytes:
    st.subheader("πŸ–ΌοΈ Generated Image")
    st.image(Image.open(BytesIO(st.session_state.image_bytes)), caption="Generated Image based on Story", use_column_width=True)

# Display Environmental Narrative and Data
if st.session_state.real_data:
    st.subheader("πŸ“œ Environmental Narrative")
    narrative = create_narrative(city, st.session_state.real_data)
    st.write(narrative)
    
    st.subheader("πŸ“Š Real Weather Data")
    st.write("Temperature (Β°C):", st.session_state.real_data.get("temperature", "Data not available"))
    st.write("Humidity (%):", st.session_state.real_data.get("humidity", "Data not available"))
    st.write("Weather Condition:", st.session_state.real_data.get("weather_condition", "Data not available"))

if st.session_state.story:
    st.subheader("🌈 AI-Generated Story")
    st.write(st.session_state.story)

# Daily Challenges Section
st.subheader("πŸ† Daily Challenges")

if not st.session_state.daily_challenges:
    st.session_state.daily_challenges = generate_daily_challenges()

completed_challenges = []
for i, challenge in enumerate(st.session_state.daily_challenges):
    if st.checkbox(challenge, key=f"challenge_{i}"):
        completed_challenges.append(i)

# Update points based on completed challenges
st.session_state.points = len(completed_challenges) * 10  # 10 points per challenge
st.markdown(f"<h2 style='text-align: center;'>πŸ’° Points: {st.session_state.points}</h2>", unsafe_allow_html=True)

# Function to fetch endangered species data for a region
def fetch_all_endangered_species(city: str) -> list:
    prompt = (
        f"Provide a list of endangered species found near {city}, "
        "including their names, population estimates, and descriptions. "
        "Return the data in JSON format as a list of dictionaries."
    )
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=[{"role": "user", "content": prompt}],
        max_tokens=300,
        temperature=0.8
    )
    response_content = response.choices[0].message['content'].strip()
    
    try:
        species_list = eval(response_content)  # Assuming the response is a JSON-like structure
        if isinstance(species_list, list) and all(isinstance(species, dict) for species in species_list):
            return species_list
        else:
            st.error("Unexpected response format. Could not parse species data.")
            return []
    except Exception as e:
        st.error(f"Error fetching endangered species data: {e}")
        return []

# Display the endangered species section
if len(completed_challenges) == len(st.session_state.daily_challenges):
    st.success("All challenges completed! πŸŽ‰ You've unlocked the secret section!")

    # Fetch endangered species data for the user's city
    species_data_list = fetch_all_endangered_species(city)
    if species_data_list:
        st.subheader("πŸ¦‹ Endangered Species in Your Region")
        
        for species_data in species_data_list:
            species_name = species_data.get('name', 'Unknown')
            st.write(f"**Species**: {species_name}")
            st.write(f"**Population**: {species_data.get('population', 'Unknown')}")
            st.write(f"**Description**: {species_data.get('description', 'No description available')}")

            # Generate an image of each endangered species
            image_description = f"Generate an image of the endangered species: {species_name}"
            species_image_bytes = generate_image(image_description)
            if species_image_bytes:
                species_image = Image.open(BytesIO(species_image_bytes))
                st.image(species_image, caption=f"Endangered Species: {species_name}", use_column_width=True)
            st.write("---")

# User's Environmental Interests Section
st.subheader("🌍 Get Involved!")
st.write("Choose your areas of interest for saving the environment:")
interests = st.multiselect(
    "Select Areas of Interest:",
    ["Afforestation", "Water Conservation", "Biodiversity Protection", "Recycling", "Climate Change Awareness"]
)

if st.button("Find Nearby NGOs"):
    if interests:
        st.session_state.ngos = fetch_nearby_ngos_with_openai(city, interests)
    else:
        st.warning("Please select at least one area of interest.")

# Display NGO information
if st.session_state.ngos:
    st.subheader("🌟 NGOs Near You")
    for ngo in st.session_state.ngos:
        st.write(f"**{ngo.get('name', 'Unknown NGO')}**")
        st.write(f"πŸ“ Location: {ngo.get('location', 'Unknown Location')}")
        st.write(f"🌱 Focus Area: {ngo.get('focus', 'Unknown Focus Area')}")
        st.write("---")