Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
model_name = "aburnazy/opt-350m-hy" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
def generate_text(prompt, temperature, top_k, top_p, max_length): | |
inputs = tokenizer.encode(prompt, return_tensors="pt") | |
outputs = model.generate(inputs, max_length=max_length, temperature=temperature, top_k=top_k, top_p=top_p, do_sample=True) | |
text = tokenizer.decode(outputs[0], skip_special_tokens=True) | |
return text | |
iface = gr.Interface( | |
fn=generate_text, | |
inputs=[ | |
gr.inputs.Textbox(lines=2, placeholder="Առավոտ էր: Արարատյան դաշտի լուսապայծառ "), | |
gr.inputs.Slider(minimum=0, maximum=1, step=0.01, default=0.8, label='Temperature'), | |
gr.inputs.Slider(minimum=0, maximum=100, step=1, default=20, label='Top K'), | |
gr.inputs.Slider(minimum=0, maximum=1, step=0.01, default=0.1, label='Top P'), | |
gr.inputs.Slider(minimum=10, maximum=1024, step=1, default=200, label='Max Length'), | |
], | |
outputs="text" | |
) | |
iface.launch() | |