Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
from transformers import BloomTokenizerFast, BloomForCausalLM | |
description = """ | |
When in legal doubt, you better call BLOOM! Ask BLOOM any legal question: | |
<img src="https://huggingface.co/spaces/tomrb/bettercallbloom/resolve/main/img.jpeg" width=200px> | |
""" | |
title = "Better Call Bloom!" | |
examples = [["Adventurer is approached by a mysterious stranger in the tavern for a new quest."]] | |
tokenizer = BloomTokenizerFast.from_pretrained("tomrb/bettercallbloom-3b-8bit") | |
model = BloomForCausalLM.from_pretrained("tomrb/bettercallbloom-3b-8bit",low_cpu_mem_usage=True) | |
generator = pipeline('text-generation', model=model, tokenizer=tokenizer) | |
def preprocess(text): | |
#We add 'Question :' and 'Answer #1:' at the start and end of the prompt | |
return "Question: " + text + "Answer #1:" | |
def generate(text): | |
preprocessed_text = preprocess(text) | |
result = generator(preprocessed_text, max_length=128) | |
output = re.split(r'\nQuestion:|Answer #|Title:',result[0]['generated_text'])[2] | |
return output | |
examples = [ | |
["I started a company with a friend. What types of legal documents should we fill in to clarify the ownership of the company?"], | |
["[CA] I got a parking ticket in Toronto. How can I contest it?"], | |
] | |
demo = gr.Interface( | |
fn=generate, | |
inputs=gr.inputs.Textbox(lines=5, label="Input Text", placeholder = "Write your question here..."), | |
outputs=gr.outputs.Textbox(label="Generated Text"), | |
examples=examples, | |
description=description, | |
title=title | |
) | |
demo.launch() |