finalyyyy / app.py
i99om's picture
Update app.py
8a59f02 verified
raw
history blame contribute delete
753 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import os
model_id = "i99om/phi-2"
token = os.environ.get("HF_TOKEN")
tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=token)
model = AutoModelForCausalLM.from_pretrained(model_id, use_auth_token=token)
def generate_text(prompt):
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_new_tokens=150)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
with gr.Blocks(api=True) as demo:
textbox = gr.Textbox(label="ุฃุฏุฎู„ ุงู„ู†ุต")
output = gr.Textbox(label="ุงู„ู†ุงุชุฌ")
btn = gr.Button("ุชูˆู„ูŠุฏ")
btn.click(generate_text, inputs=textbox, outputs=output)
demo.launch()