TogetherAI commited on
Commit
f9e0a72
β€’
1 Parent(s): b4d6d38

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -35
app.py CHANGED
@@ -1,42 +1,20 @@
 
1
  import gradio as gr
2
- import requests
3
- from PIL import Image
4
- from io import BytesIO
5
- import base64
6
 
7
- api_url = "https://5cb20b40-572c-426f-9466-995256f9b6eb.id.repl.co/generate_image"
 
8
 
9
- def generate_image(model, prompt, seed, negative_prompt, sampler, steps):
10
- data = "?model=" + model + "&prompt=" + prompt + "&seed=" + str(seed) + "&negative_prompt=" + negative_prompt + "&sampler=" + sampler + "&steps=" + str(steps)
11
- response = requests.post(api_url + data, timeout=400)
12
- if response.status_code == 200:
13
- img_base64 = response.json()["url"]
14
- img_bytes = base64.b64decode(img_base64)
15
- img = Image.open(BytesIO(img_bytes))
16
- return img
17
- else:
18
- return None
19
 
20
- dropdown_model = gr.inputs.Dropdown(['Seek.art MEGA', 'Deliberate', 'Dreamlike Photoreal', 'Realistic Vision'], label="Model", default="Seek.art MEGA")
21
- textbox_prompt = gr.inputs.Textbox(label="Prompt")
22
- number_seed = gr.inputs.Number(label="Seed", default=0)
23
- textbox_negative_prompt = gr.inputs.Textbox(label="Negative Prompt", default="")
24
- dropdown_sampler = gr.inputs.Dropdown(["k_lms", "k_heun", "k_euler", "k_euler_a", "k_dpm_2", "k_dpm_2_a", "DDIM", "k_dpm_fast", "k_dpm_adaptive", "k_dpmpp_2m", "k_dpmpp_2s_a", "k_dpmpp_sde"], label="Sampler", default="k_dpmpp_2s_a")
25
- number_steps = gr.inputs.Number(label="Steps", default=50)
26
 
27
- inputs = [dropdown_model, textbox_prompt, number_seed, textbox_negative_prompt, dropdown_sampler, number_steps]
 
 
 
 
28
 
29
- outputs = gr.outputs.Image(label="Generated Image", type="pil")
30
 
31
- interface = gr.Interface(
32
- fn=generate_image,
33
- inputs=inputs,
34
- outputs=outputs,
35
- title="Image Generator",
36
- description="Select options and enter a prompt to generate an image.",
37
- examples=[],
38
- layout="vertical",
39
- theme="ParityError/Interstellar" # Ihr spezifiziertes Theme
40
- )
41
-
42
- interface.launch()
 
1
+ from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
2
  import gradio as gr
 
 
 
 
3
 
4
+ tokenizer = AutoTokenizer.from_pretrained("merve/chatgpt-prompts-bart-long")
5
+ model = AutoModelForSeq2SeqLM.from_pretrained("merve/chatgpt-prompts-bart-long", from_tf=True)
6
 
7
+ def generate(prompt):
 
 
 
 
 
 
 
 
 
8
 
9
+ batch = tokenizer(prompt, return_tensors="pt")
10
+ generated_ids = model.generate(batch["input_ids"], max_new_tokens=150)
11
+ output = tokenizer.batch_decode(generated_ids, skip_special_tokens=True)
12
+ return output[0]
 
 
13
 
14
+ input_component = gr.Textbox(label = "Input a persona, e.g. photographer", value = "photographer")
15
+ output_component = gr.Textbox(label = "Prompt")
16
+ examples = [["photographer"], ["developer"]]
17
+ description = "This app generates ChatGPT prompts, it's based on a BART model trained on [this dataset](https://huggingface.co/datasets/fka/awesome-chatgpt-prompts). πŸ““ Simply enter a persona that you want the prompt to be generated based on. πŸ§™πŸ»πŸ§‘πŸ»β€πŸš€πŸ§‘πŸ»β€πŸŽ¨πŸ§‘πŸ»β€πŸ”¬πŸ§‘πŸ»β€πŸ’»πŸ§‘πŸΌβ€πŸ«πŸ§‘πŸ½β€πŸŒΎ"
18
+ gr.Interface(generate, inputs = input_component, outputs=output_component, examples=examples, title = "πŸ‘¨πŸ»β€πŸŽ€ ChatGPT Prompt Generator πŸ‘¨πŸ»β€πŸŽ€", description=description).launch()
19
 
 
20