Alex commited on
Commit
02de365
β€’
1 Parent(s): fa8350a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +19 -0
app.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
+ import gradio as gr
3
+
4
+ tokenizer = AutoTokenizer.from_pretrained("merve/chatgpt-prompts-bart-long")
5
+ model = AutoModelForSeq2SeqLM.from_pretrained("merve/chatgpt-prompts-bart-long", from_tf=True)
6
+
7
+ def generate(prompt):
8
+
9
+ batch = tokenizer(prompt, return_tensors="pt")
10
+ generated_ids = model.generate(batch["input_ids"], max_new_tokens=150)
11
+ output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
12
+ return output[0]
13
+
14
+ input_component = gr.Textbox(label = "Input a persona, e.g. photographer", value = "photographer")
15
+ output_component = gr.Textbox(label = "Prompt")
16
+ examples = [["photographer"], ["developer"]]
17
+ description = "This app generates ChatGPT prompts, it's based on a BART model trained on [this dataset](https://huggingface.co/datasets/fka/awesome-chatgpt-prompts). πŸ““ Simply enter a persona that you want the prompt to be generated based on. πŸ§™πŸ»πŸ§‘πŸ»β€πŸš€πŸ§‘πŸ»β€πŸŽ¨πŸ§‘πŸ»β€πŸ”¬πŸ§‘πŸ»β€πŸ’»πŸ§‘πŸΌβ€πŸ«πŸ§‘πŸ½β€πŸŒΎ"
18
+ gr.Interface(generate, inputs = input_component, outputs=output_component, examples=examples, title = "πŸ‘¨πŸ»β€πŸŽ€ ChatGPT Prompt Generator πŸ‘¨πŸ»β€πŸŽ€", description=description).launch()
19
+