File size: 3,888 Bytes
ede67af
ad79d19
df11b38
 
 
db5971e
ad79d19
635f9b8
d92c041
635f9b8
d92c041
3ede1d7
ede67af
df11b38
ede67af
df11b38
 
ad79d19
 
 
 
ede67af
df11b38
 
 
ede67af
df11b38
ede67af
81b6fcd
 
 
 
 
 
 
 
 
 
 
 
ad79d19
db5971e
ad79d19
df11b38
ede67af
9c8bbb1
 
 
 
 
 
 
 
 
 
 
 
 
 
d698c91
 
 
 
 
9c8bbb1
d92c041
9c8bbb1
 
d92c041
 
 
 
 
d698c91
 
 
 
 
 
9c8bbb1
d92c041
 
 
9c8bbb1
 
635f9b8
d92c041
 
 
0dd87dd
9c8bbb1
 
635f9b8
d92c041
d698c91
6dcd77e
ad79d19
635f9b8
ad79d19
 
05a695e
 
 
3a10b1f
05a695e
81b6fcd
 
ad79d19
f13a076
ad79d19
81b6fcd
d92c041
ad79d19
 
df11b38
 
 
ad79d19
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import streamlit as st
import os, pickle, faiss, numpy as np
from groq import Groq
from sentence_transformers import SentenceTransformer
from langdetect import detect
import requests
from datetime import datetime
from rapidfuzz import process
import json

# 🇵🇰 Pakistan flag image
PAK_FLAG_URL = "https://flagcdn.com/w320/pk.png"

client = Groq(api_key=os.environ.get("GROQ_API_KEY"))

@st.cache_resource
def load_data():
    idx = faiss.read_index("resqbot_index.faiss")
    with open("resqbot_chunks.pkl","rb") as f:
        ch = pickle.load(f)
    return idx, ch

@st.cache_resource
def load_model():
    return SentenceTransformer('all-MiniLM-L6-v2')

embed_model = load_model()

def detect_language_fallback(text):
    try:
        lang = detect(text)
        if lang not in ["en", "ur"]:
            if any("\u0600" <= c <= "\u06FF" for c in text):
                return "ur"
            else:
                return "en"
        return lang
    except:
        return "en"

st.title("🤖 ResQBot – Disaster QA (Urdu + English)")

with st.spinner("🛡️ Loading ResQBot..."):
    index, chunks = load_data()

st.markdown("""
    <style>
    .grid-section {
        display: grid;
        grid-template-columns: repeat(auto-fit, minmax(320px, 1fr));
        gap: 1rem;
    }
    </style>
""", unsafe_allow_html=True)

st.markdown(f"### 🌌 <img src='{PAK_FLAG_URL}' width='30' style='vertical-align:middle;'> Disaster Alerts", unsafe_allow_html=True)

st.markdown('<div class="grid-section">', unsafe_allow_html=True)

# Earthquake Grid Block (DEMO DATA)
quakes = [
    {"mag": 5.4, "place": "Quetta, Balochistan", "time": "2025-07-27 03:45 AM"},
    {"mag": 4.8, "place": "Peshawar, KPK", "time": "2025-07-26 11:30 PM"},
]
st.markdown("#### Earthquake Alerts")
if quakes:
    for q in quakes:
        st.warning(f"Magnitude {q['mag']} quake in {q['place']} at {q['time']}")
    if len(quakes) > 1:
        st.error("⚠️ Increased seismic activity detected.")
else:
    st.success("✅ No notable earthquakes in Pakistan.")

# Flood Grid Block (DEMO DATA)
floods = [
    ("2025-07-28", 9200, "High"),
    ("2025-07-29", 7800, "Medium"),
    ("2025-07-30", 4000, "Low"),
]
st.markdown("#### Flood Forecast")
if floods:
    high_risk_days = [f for f in floods if f[2] in ["High", "Medium"]]
    if high_risk_days:
        for d, v, risk in floods:
            st.info(f"{d}: Discharge {v:.1f} m³/s – Risk level: {risk}")
    else:
        st.success("✅ No significant flood risk detected in Pakistan.")
else:
    st.error("⚠️ Unable to fetch flood data at the moment.")

st.markdown('</div>', unsafe_allow_html=True)

st.markdown("---")
st.markdown("### 💬 Ask About Disaster Preparedness")
st.markdown("You can ask about earthquake, flood, shelter advice or precaution/preparations etc. in English or Urdu.")
query = st.text_input("❓ Your question (English یا اردو/or):")
if query:
    with st.spinner("🤖 Thinking..."):
        emb = embed_model.encode([query])
        D,I = index.search(np.array(emb), k=3)
        context_chunks = [chunks[i] for i in I[0]]
        context = "\n".join(context_chunks)
        if len(context) > 4000:
            context = context[:4000] + "..."

        lang = detect_language_fallback(query)

        if lang=="ur":
            prompt = f"""اس سیاق و سباق کی بنیاد پر اردو میں کم از کم 3-4 لائنوں میں جواب دیں۔:\n\n{context}\n\nسوال: {query}"""
        else:
            prompt = f"""Answer in at least 3-4 lines and to the point in English based on this context:\n\n{context}\n\nQuestion: {query}"""

        resp = client.chat.completions.create(
            messages=[{"role":"user","content":prompt}],
            model="llama-3.1-8b-instant"
        )
        st.markdown("### 💬 ResQBot Answer:")
        st.write(resp.choices[0].message.content)