import streamlit as st import random from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("merve/chatgpt-prompts-bart-long") model = AutoModelForSeq2SeqLM.from_pretrained("merve/chatgpt-prompts-bart-long", from_tf=True) def generate(prompt): batch = tokenizer(prompt, return_tensors="pt") generated_ids = model.generate(batch["input_ids"], max_new_tokens=150) output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True) return output[0] st.title("ChatGPT-BingChat Prompt Generator") st.write("This app generates ChatGPT/BingChat & GPT-3 prompts using [this](https://huggingface.co/merve/chatgpt-prompts-bart-long) model trained by Merve. Enter a role and a prompt will be generated based on it.") prompt = st.text_input("Enter a Role, Example: Virtual Assistant", placeholder="Text here", value="") if st.button("Generate"): output = generate(prompt) st.write("Generated Prompt:", box=True) st.write("
{}
".format(output), unsafe_allow_html=True) st.write("") st.write("
Examples:
",unsafe_allow_html=True, box=True) st.write("", unsafe_allow_html=True) with open("examples.txt", "r") as f: examples = f.readlines() random_examples = random.sample(examples, 5) for example in random_examples: example = example.strip() st.write("
• {}
".format(example), unsafe_allow_html=True)