galactica-base / app.py
morenolq's picture
Update app.py
007f585
raw
history blame
No virus
1.25 kB
import gradio as gr
from transformers import pipeline
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("facebook/galactica-1.3b")
model = AutoModelForCausalLM.from_pretrained("facebook/galactica-1.3b")
text2text_generator = pipeline("text-generation", model=model, tokenizer=tokenizer, num_workers=2)
def predict(text):
text = text.strip()
out_text = text2text_generator(text, max_length=128,
temperature=0.7,
do_sample=True,
eos_token_id = tokenizer.eos_token_id,
bos_token_id = tokenizer.bos_token_id,
pad_token_id = tokenizer.pad_token_id,
)[0]['generated_text']
out_text = "<p>" + out_text + "</p>"
out_text = out_text.replace(text, text + "<b><span style='background-color: #ffffcc;'>")
out_text = out_text + "</span></b>"
out_text = out_text.replace("\n", "<br>")
return out_text
iface = gr.Interface(
fn=predict,
inputs=gr.Textbox(lines=10),
outputs=gr.HTML(),
description="Galactica",
examples=[["The attention mechanism in LLM is"]]
)
iface.launch(share=True)