Spaces:
Runtime error
Runtime error
File size: 1,947 Bytes
e4cb831 9f978c0 e4cb831 9f978c0 e4cb831 9f978c0 e4cb831 9f978c0 e4cb831 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 |
import gradio as gr
from gradio import Interface, Textbox, Markdown,Slider
from transformers import AutoModelForCausalLM , AutoTokenizer
import torch
openelm_270m_instruct = AutoModelForCausalLM.from_pretrained("apple/OpenELM-270M-Instruct", trust_remote_code=True)
tokenizer = AutoTokenizer.from_pretrained("NousResearch/Llama-2-7b-hf")
def generate(newQuestion,num):
tokenized_prompt = tokenizer(newQuestion)
tokenized_prompt = torch.tensor(
tokenized_prompt['input_ids'],
)
tokenized_prompt = tokenized_prompt.unsqueeze(0)
# Generate
output_ids = openelm_270m_instruct.generate(
tokenized_prompt,
max_length=int(num),
pad_token_id=0,
)
output_text = tokenizer.decode(
output_ids[0].tolist(),
skip_special_tokens=True
)
return output_text
developer_info = """
this space is developed by Ahmadreza Anaami \n
feel free to set via Api key too \n
apple/OpenELM-270M-Instruct
"""
def greet(name,num):
return generate(name,num)
iface = gr.Interface(
fn=greet,
inputs=[Textbox(label="Enter Text Here:", type="text"),Textbox(label="number of generated tokens:", type="text")],
outputs=[Textbox(label="generated answer:")],
title="OpenELM-270M-Instruct",
# Markdown(developer_info, elem_id="dev-info"), # Place Markdown directly
description = developer_info,
css="""
/* Style the developer info section (optional) */
#dev-info {
font-size: 0.8rem;
color: #888; /* Adjust color as desired */
margin-top: 1rem;
text-align: center;
}
/* Style the input area (optional) */
.gr-input text {
padding: 10px;
border-radius: 5px;
font-size: 1rem;
}
/* Style the score label (optional) */
.gr-output.gr-slider label {
font-weight: bold;
}
""",
)
iface.launch() |