import gradio as gr from titlecase import titlecase from transformers import BloomTokenizerFast, BloomForCausalLM tokenizer = BloomTokenizerFast.from_pretrained("bigscience/bloom-560m") # https://huggingface.co/blog/how-to-generate def generate(text, temp=0.7, logging=True): input_ids = tokenizer.encode(text, return_tensors='pt') output = model.generate( input_ids, do_sample=True, max_length=30, top_p=0.92, top_k=50, temperature=temp, repetition_penalty=1.2, min_length=len(text)+1 ) decoded = tokenizer.decode(output[0], skip_special_tokens=True) if logging: print(f"\n\n{'-'*100}\nInput: {text}\nOutput: {decoded}\nTemp: {temp}") return titlecase(decoded) description = "Generate Titles for the Vice Youtube Channel" title = "Vice Headlines" model_name = "marcderbauer/vice-headlines" model = BloomForCausalLM.from_pretrained(model_name) interface = gr.Interface( fn=generate, inputs=['text', gr.Slider(0.01,1, step=0.01, value=0.7, label="Temperature")], outputs='text', examples=[["This Japanese"], ["Why"], ["North Korea"], ["Inside"], ["Spongebob"]], description=description, title=title, ) interface.launch()