Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
# ุชุญู ูู ุงููู ูุฐุฌ ู ู Hugging Face | |
model_name = "Salesforce/codegen-350M-mono" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
# ุฏุงูุฉ ุชูููุฏ ุงูููุฏ | |
def generate_code(prompt): | |
inputs = tokenizer(prompt, return_tensors="pt") | |
outputs = model.generate(**inputs, max_length=200, do_sample=True, top_k=50) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
# ูุงุฌูุฉ Gradio | |
interface = gr.Interface( | |
fn=generate_code, | |
inputs=gr.Textbox(lines=5, label="ุงูุชุจ ูุตู ุงูููุฏ ููุง"), | |
outputs=gr.Textbox(lines=10, label="ุงูููุฏ ุงููุงุชุฌ"), | |
title="ู ูููุฏ ููุฏ Python", | |
description="ุงูุชุจ ูุตููุง ููููุฏ ุงูุฐู ุชุฑูุฏูุ ูุณููููุฏ ููุฏูุง ุจุงุณุชุฎุฏุงู ูู ูุฐุฌ ุฐูุงุก ุตูุงุนู." | |
) | |
# ุชุดุบูู ุงูุชุทุจูู | |
interface.launch() | |