deepcoder-demo / app.py
madansa7's picture
Update app.py
48eb102 verified
raw
history blame
389 Bytes
import gradio as gr
from transformers import pipeline
pipe = pipeline(
"text-generation",
model="agentica-org/DeepCoder-14B-Preview",
device="cuda" if torch.cuda.is_available() else "cpu"
)
def chat(message, history):
messages = [{"role": "user", "content": message}]
response = pipe(messages)
return response[0]["generated_text"]
gr.ChatInterface(chat).launch()