import gradio as gr from huggingface_hub import InferenceClient # نعمل client للموديل من Hugging Face client = InferenceClient(model="deepseek-ai/DeepSeek-V3.2-Exp", provider="novita") # الفانكشن اللي هتشتغل كـ backend def chat_fn(message, history): # نحول الهستوري لنص مرتب context = "\n".join([f"User: {u}\nAI: {a}" for u, a in history]) prompt = f"{context}\nUser: {message}\nAI:" response = client.text_generation(prompt, max_new_tokens=200) return response # واجهة Gradio with gr.Blocks() as demo: gr.Markdown("## 🤖 DeepSeek Chatbot (API Enabled)") chatbot = gr.Chatbot() msg = gr.Textbox(label="اكتب رسالتك هنا") clear = gr.Button("مسح المحادثة") def respond(message, history): response = chat_fn(message, history) history.append((message, response)) return history, "" msg.submit(respond, [msg, chatbot], [chatbot, msg]) clear.click(lambda: None, None, chatbot, queue=False) demo.launch()