Spaces:
Runtime error
Runtime error
gpt2prompt
Browse files
app.py
CHANGED
@@ -2,7 +2,8 @@ import gradio as gr
|
|
2 |
import os
|
3 |
import sys
|
4 |
from pathlib import Path
|
5 |
-
|
|
|
6 |
models = [
|
7 |
"Yntec/NovelAIRemix",
|
8 |
"Joeythemonster/anything-midjourney-v-4-1",
|
@@ -95,8 +96,16 @@ with gr.Blocks(css=css) as myface:
|
|
95 |
with gr.Column(scale=50):
|
96 |
input_text=gr.Textbox(label="Use this box to extend an idea automatically, by typing some words and clicking Extend Idea",lines=2)
|
97 |
see_prompts=gr.Button("Extend Idea")
|
98 |
-
|
99 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
def short_prompt(inputs):
|
101 |
return(inputs)
|
102 |
|
@@ -105,7 +114,7 @@ with gr.Blocks(css=css) as myface:
|
|
105 |
run.click(send_it1, inputs=[magic1, model_name1], outputs=[output1])
|
106 |
|
107 |
|
108 |
-
|
109 |
|
110 |
myface.queue(concurrency_count=200)
|
111 |
myface.launch(inline=True, show_api=False, max_threads=400)
|
|
|
2 |
import os
|
3 |
import sys
|
4 |
from pathlib import Path
|
5 |
+
from transformers import pipeline
|
6 |
+
pipe = pipeline('text-generation', model_id='Ar4ikov/gpt2-650k-stable-diffusion-prompt-generator')
|
7 |
models = [
|
8 |
"Yntec/NovelAIRemix",
|
9 |
"Joeythemonster/anything-midjourney-v-4-1",
|
|
|
96 |
with gr.Column(scale=50):
|
97 |
input_text=gr.Textbox(label="Use this box to extend an idea automatically, by typing some words and clicking Extend Idea",lines=2)
|
98 |
see_prompts=gr.Button("Extend Idea")
|
99 |
+
|
100 |
+
def get_valid_prompt(text: str) -> str:
|
101 |
+
dot_split = text.split('.')[0]
|
102 |
+
n_split = text.split('\n')[0]
|
103 |
+
|
104 |
+
return {
|
105 |
+
len(dot_split) < len(n_split): dot_split,
|
106 |
+
len(n_split) > len(dot_split): n_split,
|
107 |
+
len(n_split) == len(dot_split): dot_split
|
108 |
+
}[True]
|
109 |
def short_prompt(inputs):
|
110 |
return(inputs)
|
111 |
|
|
|
114 |
run.click(send_it1, inputs=[magic1, model_name1], outputs=[output1])
|
115 |
|
116 |
|
117 |
+
seeprompts.click(magic1 = get_valid_prompt(pipe(input_text, max_length=77)[0]['generated_text']))
|
118 |
|
119 |
myface.queue(concurrency_count=200)
|
120 |
myface.launch(inline=True, show_api=False, max_threads=400)
|