|
|
import gradio as gr |
|
|
from llama_cpp import Llama |
|
|
import os |
|
|
from huggingface_hub import hf_hub_download |
|
|
|
|
|
|
|
|
MODEL_REPO = "bartowski/Mistral-7B-Instruct-v0.3-GGUF" |
|
|
MODEL_FILE = "Mistral-7B-Instruct-v0.3-Q4_K_M.gguf" |
|
|
|
|
|
|
|
|
model_path = hf_hub_download( |
|
|
repo_id=MODEL_REPO, |
|
|
filename=MODEL_FILE, |
|
|
token=os.environ.get("HF_TOKEN") |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
MODEL_REPO = "TheBloke/Mistral-7B-Instruct-v0.3-GGUF" |
|
|
MODEL_FILE = "mistral-7b-instruct-v0.3.Q4_K_M.gguf" |
|
|
MAX_TOKENS = 150 |
|
|
CPU_THREADS = os.cpu_count() |
|
|
|
|
|
|
|
|
llm = Llama( |
|
|
model_path=model_path, |
|
|
n_ctx=2048, |
|
|
n_threads=CPU_THREADS, |
|
|
n_gpu_layers=0 |
|
|
) |
|
|
|
|
|
def format_krishna_prompt(message, history): |
|
|
"""Create proper Mistral instruction prompt with Krishna context""" |
|
|
system_prompt = """<s>[INST] <<SYS>> |
|
|
You are Lord Krishna, the divine charioteer of Arjuna in Bhagavad Gita. |
|
|
Answer with Vedic wisdom using simple English and occasional Sanskrit terms. |
|
|
Maintain a compassionate, all-knowing tone. |
|
|
<</SYS>>""" |
|
|
|
|
|
conversation = [] |
|
|
for human, assistant in history: |
|
|
conversation.append(f"{human}[/INST] {assistant}</s>") |
|
|
return f"{system_prompt}{''.join(conversation)}<s>[INST] {message} [/INST]" |
|
|
|
|
|
def query_krishna(message, history): |
|
|
"""Get response from Mistral 7B on CPU""" |
|
|
try: |
|
|
prompt = format_krishna_prompt(message, history) |
|
|
output = llm( |
|
|
prompt, |
|
|
max_tokens=MAX_TOKENS, |
|
|
temperature=0.7, |
|
|
top_p=0.9, |
|
|
stop=["</s>"] |
|
|
) |
|
|
return output['choices'][0]['text'].strip() |
|
|
except Exception as e: |
|
|
return f"π Divine wisdom temporarily obscured: {str(e)}" |
|
|
|
|
|
|
|
|
krishna_avatar = "krishna.jpg" |
|
|
|
|
|
with gr.Blocks(title="Divine Dialogue with Lord Krishna") as demo: |
|
|
gr.Markdown(""" |
|
|
# ποΈ Bhagavad Gita Wisdom Chatbot |
|
|
**Ask questions to Shree Krishna** |
|
|
""") |
|
|
|
|
|
with gr.Row(): |
|
|
gr.Image(krishna_avatar, height=200, show_label=False) |
|
|
|
|
|
gr.ChatInterface( |
|
|
query_krishna, |
|
|
chatbot=gr.Chatbot(height=500), |
|
|
examples=[ |
|
|
"What is the purpose of life?", |
|
|
"How to achieve inner peace?", |
|
|
"Explain karma yoga from Bhagavad Gita" |
|
|
] |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch() |
|
|
|