# -*- coding: utf-8 -*- """chatgpt-prompts-bart-long.ipynb Automatically generated by Colaboratory. Original file is located at https://colab.research.google.com/drive/185ao64S-GtYsW71yesPrKvnywS3UKxQl https://huggingface.co/merve/chatgpt-prompts-bart-long # GPT to create ChatGPT prompts """ from transformers import AutoTokenizer, TFAutoModelForSeq2SeqLM # Load the tokenizer and model tokenizer = AutoTokenizer.from_pretrained("merve/chatgpt-prompts-bart-long") model = TFAutoModelForSeq2SeqLM.from_pretrained("merve/chatgpt-prompts-bart-long") # Define a function to generate text def generate_text(prompt): # Encode the prompt inputs = tokenizer.encode(prompt, return_tensors='pt') # Generate a sequence of tokens outputs = model.generate(inputs, max_length=250, do_sample=True, temperature=0.7) # Decode the output tokens to text return tokenizer.decode(outputs[0], skip_special_tokens=True) import gradio as gr ui=gr.Interface(fn=generate_text,inputs="text",outputs="text") ui.launch()