import gradio as gr from transformers import pipeline pipe = pipeline( "text-generation", model="agentica-org/DeepCoder-14B-Preview", device="cuda" if torch.cuda.is_available() else "cpu" ) def chat(message, history): messages = [{"role": "user", "content": message}] response = pipe(messages) return response[0]["generated_text"] gr.ChatInterface(chat).launch()