File size: 1,961 Bytes
b63ad23
10e8a0c
b63ad23
10e8a0c
b63ad23
10e8a0c
 
 
 
 
 
 
b63ad23
10e8a0c
 
 
 
 
 
 
 
 
 
b63ad23
10e8a0c
 
b63ad23
10e8a0c
 
 
 
 
 
b63ad23
10e8a0c
 
b63ad23
10e8a0c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
from vector_rag import query_vector_store, llm # <--- FIX: Import llm here!
import wikipedia
# REMOVED: All duplicate model/pipeline/tokenizer imports and initialization code

# The 'llm' instance is now imported from vector_rag.py and is ready to use.
wikipedia.set_lang("en")

async def get_smart_rag_response(query: str) -> str:
    print(" Received Query:", query)

    # First: Try Wikipedia
    try:
        summary = wikipedia.summary(query, sentences=5)
        print("Wikipedia summary found.")
        
        prompt = f"""Use the following Wikipedia information to answer the question as clearly as possible.

Wikipedia Context:
{summary}

Question: {query}
Answer:"""
        result = llm.predict(prompt) 
        answer = result.replace(prompt, "").strip()
        return f"[Wikipedia]\n{answer}"
    except wikipedia.exceptions.PageError:
        print("Wikipedia page not found.")
    except wikipedia.exceptions.DisambiguationError as e:
        return f"The query is ambiguous. Did you mean: {', '.join(e.options[:5])}?"

    # Second: Fallback to LLM (no context)
    try:
        print("Fallback: LLM with no context")
        
        fallback_prompt = f"You are a knowledgeable assistant. Please answer the following question clearly:\n\n{query}"
        llm_answer = llm.predict(fallback_prompt) 
        answer = llm_answer.replace(fallback_prompt, "").strip()
        if answer and "not sure" not in answer.lower():
            return f"[LLM Fallback]\n{answer.strip()}"
    except Exception as e:
        print("Error during LLM fallback:", e)

    #Finally: Fallback to Local Documents
    try:
        print("Fallback: Local vector search")
        vector_answer = query_vector_store(query)
        if vector_answer:
            return f"[Local Document]\n{vector_answer}"
    except Exception as e:
        print("Error during local vector search:", e)

    return "Sorry, I couldn’t find any information to answer your question."