Spaces:
Sleeping
Sleeping
| # app.py β Updated version for Hugging Face token & CPU | |
| import os | |
| import tempfile | |
| import textwrap | |
| from datetime import datetime | |
| from typing import List, Dict, Any, Optional | |
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| from src.conversation import ConversationMemory | |
| from src.chatbot import LocalChatbot | |
| # ---------------------- | |
| # HUGGING FACE SETTINGS | |
| # ---------------------- | |
| HF_TOKEN = os.getenv("HF_TOKEN") # your Hugging Face token stored as secret variable | |
| MODEL_PATH = "RedHatAI/gemma-2-2b-it-quantized.w4a16" # public or private model | |
| # ---------------------- | |
| # LOAD MODEL + TOKENIZER | |
| # ---------------------- | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL_PATH, use_fast=True, token=HF_TOKEN) | |
| llm = AutoModelForCausalLM.from_pretrained( | |
| MODEL_PATH, | |
| device_map="cpu", | |
| dtype="auto", | |
| token=HF_TOKEN | |
| ) | |
| # ---------------------- | |
| # MEMORY + CHATBOT | |
| # ---------------------- | |
| memory = ConversationMemory(max_len=60) | |
| bot = LocalChatbot(llm, memory, tokenizer=tokenizer) | |
| INTENT_TEMPLATES = { | |
| "math": "You are a math solver. Solve step-by-step only.", | |
| "code": "You are a coding expert. Provide clean, working code.", | |
| "civics": "Explain clearly like a Class 10 SST teacher.", | |
| "exam": "Prepare concise exam-focused notes and important questions." | |
| } | |
| # ---------------------- | |
| # HELPER FUNCTIONS | |
| # ---------------------- | |
| def now_ts(): | |
| return datetime.now().strftime("%Y-%m-%d %H:%M:%S") | |
| def generate_reply(user_msg: str, history: Optional[List[Dict[str, Any]]]): | |
| if history is None: | |
| history = [] | |
| if not user_msg.strip(): | |
| return history | |
| # Detect intent | |
| intent = None | |
| low = user_msg.lower() | |
| for key in INTENT_TEMPLATES: | |
| if low.startswith(key): | |
| intent = key | |
| user_msg = user_msg[len(key):].strip() | |
| break | |
| system_prefix = INTENT_TEMPLATES.get(intent, None) | |
| if system_prefix: | |
| prompt = f"{system_prefix}\nUser: {user_msg}" | |
| else: | |
| prompt = f"User: {user_msg}" | |
| # Generate reply using LocalChatbot | |
| bot_reply = bot.ask(prompt) | |
| ts = now_ts() | |
| bot_reply_ts = f"{bot_reply}\n\nπ {ts}" | |
| history.append({"role": "user", "content": user_msg}) | |
| history.append({"role": "assistant", "content": bot_reply_ts}) | |
| try: | |
| memory.add(user_msg, bot_reply) | |
| except: | |
| pass | |
| return history | |
| # ---------------------- | |
| # EXPORT TXT/PDF | |
| # ---------------------- | |
| def export_chat_files(history: List[Dict[str, Any]]) -> Dict[str, Optional[str]]: | |
| tmpdir = tempfile.gettempdir() | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| txt_path = os.path.join(tmpdir, f"chat_history_{timestamp}.txt") | |
| with open(txt_path, "w", encoding="utf-8") as f: | |
| for msg in history: | |
| content = msg.get("content", "") | |
| lines = content.splitlines() | |
| lines = [l.replace("USER:", "").replace("ASSISTANT:", "").strip() for l in lines] | |
| f.write("\n".join(lines).strip() + "\n") | |
| f.write("-" * 60 + "\n") | |
| pdf_path = None | |
| try: | |
| from reportlab.lib.pagesizes import A4 | |
| from reportlab.pdfgen import canvas | |
| pdf_path = os.path.join(tmpdir, f"chat_history_{timestamp}.pdf") | |
| c = canvas.Canvas(pdf_path, pagesize=A4) | |
| width, height = A4 | |
| margin = 40 | |
| textobject = c.beginText(margin, height - margin) | |
| textobject.setFont("Helvetica", 10) | |
| with open(txt_path, "r", encoding="utf-8") as fh: | |
| for line in fh: | |
| for wrapped in textwrap.wrap(line.rstrip(), 100): | |
| textobject.textLine(wrapped) | |
| c.drawText(textobject) | |
| c.showPage() | |
| c.save() | |
| except: | |
| pdf_path = None | |
| return {"txt": txt_path, "pdf": pdf_path} | |
| # ---------------------- | |
| # UI | |
| # ---------------------- | |
| with gr.Blocks(title="Tayyab β Chatbot (API)") as demo: | |
| with gr.Row(): | |
| with gr.Column(scale=1, min_width=220): | |
| gr.Markdown("### β‘ Tools & Export") | |
| new_chat_btn = gr.Button("β New Chat") | |
| export_btn = gr.Button("π₯ Export TXT/PDF") | |
| with gr.Column(scale=3): | |
| gr.Markdown("<h3>Smart Learning Assistant - Tayyab</h3>") | |
| chatbot = gr.Chatbot(height=480) | |
| msg = gr.Textbox(placeholder="Type a message", show_label=False, lines=3) | |
| send_btn = gr.Button("Send") | |
| file_txt = gr.File(visible=False) | |
| file_pdf = gr.File(visible=False) | |
| # Chat actions | |
| send_btn.click(generate_reply, inputs=[msg, chatbot], outputs=[chatbot]) | |
| msg.submit(generate_reply, inputs=[msg, chatbot], outputs=[chatbot]) | |
| def new_chat(): | |
| memory.clear() | |
| return [] | |
| new_chat_btn.click(new_chat, outputs=[chatbot]) | |
| def export_handler(history): | |
| files = export_chat_files(history or []) | |
| return ( | |
| gr.update(value=files.get("txt"), visible=True), | |
| gr.update(value=files.get("pdf"), visible=bool(files.get("pdf"))) | |
| ) | |
| export_btn.click(export_handler, inputs=[chatbot], outputs=[file_txt, file_pdf]) | |
| if __name__ == "__main__": | |
| demo.launch() | |