treadon commited on
Commit
8cc6050
1 Parent(s): b27b5dc

code cleanup

Browse files
Files changed (1) hide show
  1. app.py +64 -57
app.py CHANGED
@@ -12,6 +12,8 @@ model = None
12
  tokenizer = None
13
 
14
  def init_model():
 
 
15
  model_id = os.environ.get("MODEL_ID") or "treadon/prompt-fungineer-355M"
16
  auth_token = os.environ.get("HUB_TOKEN") or True
17
 
@@ -43,11 +45,14 @@ def format_prompt(prompt, enhancers=True, inspiration=False, negative_prompt=Fal
43
 
44
 
45
  def generate_text(prompt, extra=False, top_k=100, top_p=0.95, temperature=0.85, enhancers = True, inpspiration = False , negative_prompt = False):
 
 
46
  try:
47
  if model is None:
48
  init_model()
49
- except:
50
- pass
 
51
 
52
  if model is None:
53
  return ["Try Again"] * 4
@@ -74,59 +79,61 @@ def generate_text(prompt, extra=False, top_k=100, top_p=0.95, temperature=0.85,
74
 
75
  return samples
76
 
77
-
78
- with gr.Blocks() as fungineer:
79
- with gr.Row():
80
- gr.Markdown("""# Midjourney / Dalle 2 / Stable Diffusion Prompt Generator
81
- This is the 355M parameter model. There is also a 7B parameter model that is much better but far slower (access coming soon).
82
- Just enter a basic prompt and the fungineering model will use its wildest imagination to expand the prompt in detail. You can then use this prompt to generate images with Midjourney, Dalle 2, Stable Diffusion, Bing Image Creator, or any other image generation model.""")
83
- with gr.Row():
84
- with gr.Column():
85
- base_prompt = gr.Textbox(lines=5, label="Base Prompt", placeholder="An astronaut in space", info="Enter a very simple prompt that will be fungineered into something exciting!")
86
- extra = gr.Checkbox(value=True, label="Extra Fungineer Imagination", info="If checked, the model will be allowed to go wild with its imagination.")
87
- with gr.Accordion("Advanced Generation Settings", open=False):
88
- top_k = gr.Slider( minimum=10, maximum=1000, value=100, label="Top K", info="Top K sampling")
89
- top_p = gr.Slider( minimum=0.1, maximum=1, value=0.95, step=0.01, label="Top P", info="Top P sampling")
90
- temperature = gr.Slider( minimum=0.1, maximum=1.2, value=0.85, step=0.01, label="Temperature", info="Temperature sampling. Higher values will make the model more creative")
91
-
92
- with gr.Accordion("Advanced Output Settings", open=False):
93
- enh = gr.Checkbox(value=True, label="Enhancers", info="Add image meta information such as lens type, shuffter speed, camera model, etc.")
94
- insp = gr.Checkbox(value=False, label="Inpsiration", info="Include inspirational photographers that are known for this type of photography. Sometimes random people will appear here, needs more training.")
95
- neg = gr.Checkbox(value=False, label="Negative Prompt", info="Include a negative prompt, more often used in Stable Diffusion. If you're a Stable Diffusion user, chances are you already have a better negative prompt you like to use.")
96
-
97
- with gr.Column():
98
- outputs = [
99
- gr.Textbox(lines=2, label="Fungineered Text 1"),
100
- gr.Textbox(lines=2, label="Fungineered Text 2"),
101
- gr.Textbox(lines=2, label="Fungineered Text 3"),
102
- gr.Textbox(lines=2, label="Fungineered Text 4"),
103
- ]
104
-
105
- inputs = [base_prompt, extra, top_k, top_p, temperature, enh, insp, neg]
106
-
107
-
108
- submit = gr.Button(label="Fungineer",variant="primary")
109
- submit.click(generate_text, inputs=inputs, outputs=outputs)
110
-
111
- examples = []
112
- with open("examples.json") as f:
113
- examples = json.load(f)
114
-
115
- for i, example in enumerate(examples):
116
- with gr.Tab(f"Example {i+1}"):
117
- with gr.Row():
118
- with gr.Column():
119
- gr.Markdown(f"### Base Prompt")
120
- gr.Image(value=f"{example['base']['src']}")
121
- gr.Markdown(f"{example['base']['prompt']}")
122
- with gr.Column():
123
- gr.Markdown(f"### 355M Prompt Fungineered")
124
- gr.Image(value=f"{example['355M']['src']}")
125
- gr.Markdown(f"{example['355M']['prompt']}")
126
- with gr.Column():
127
- gr.Markdown(f"### 7B Prompt Fungineered")
128
- gr.Markdown(f"Coming Soon!")
129
-
130
-
131
- fungineer.launch(enable_queue=True)
 
 
132
 
 
12
  tokenizer = None
13
 
14
  def init_model():
15
+ global model, tokenizer
16
+
17
  model_id = os.environ.get("MODEL_ID") or "treadon/prompt-fungineer-355M"
18
  auth_token = os.environ.get("HUB_TOKEN") or True
19
 
 
45
 
46
 
47
  def generate_text(prompt, extra=False, top_k=100, top_p=0.95, temperature=0.85, enhancers = True, inpspiration = False , negative_prompt = False):
48
+ global model, tokenizer
49
+
50
  try:
51
  if model is None:
52
  init_model()
53
+ except Exception as e:
54
+ print(e)
55
+ return ["Try Again"] * 4
56
 
57
  if model is None:
58
  return ["Try Again"] * 4
 
79
 
80
  return samples
81
 
82
+ if __name__ == "__main__":
83
+ with gr.Blocks() as fungineer:
84
+ with gr.Row():
85
+ gr.Markdown("""# Midjourney / Dalle 2 / Stable Diffusion Prompt Generator
86
+ This is the 355M parameter model. There is also a 7B parameter model that is much better but far slower (access coming soon).
87
+ Just enter a basic prompt and the fungineering model will use its wildest imagination to expand the prompt in detail. You can then use this prompt to generate images with Midjourney, Dalle 2, Stable Diffusion, Bing Image Creator, or any other image generation model.
88
+ Treat this model more like a text-to-text model (simple prompt > complex prompt) rather than a generative model (prefix + word generation). It is a generative model under the hood.""")
89
+ with gr.Row():
90
+ with gr.Column():
91
+ base_prompt = gr.Textbox(lines=5, label="Base Prompt", placeholder="An astronaut in space", info="Enter a very simple prompt that will be fungineered into something exciting!")
92
+ extra = gr.Checkbox(value=True, label="Extra Fungineer Imagination", info="If checked, the model will be allowed to go wild with its imagination.")
93
+ with gr.Accordion("Advanced Generation Settings", open=False):
94
+ top_k = gr.Slider( minimum=10, maximum=1000, value=100, label="Top K", info="Top K sampling")
95
+ top_p = gr.Slider( minimum=0.1, maximum=1, value=0.95, step=0.01, label="Top P", info="Top P sampling")
96
+ temperature = gr.Slider( minimum=0.1, maximum=1.2, value=0.85, step=0.01, label="Temperature", info="Temperature sampling. Higher values will make the model more creative")
97
+
98
+ with gr.Accordion("Advanced Output Settings", open=False):
99
+ enh = gr.Checkbox(value=True, label="Enhancers", info="Add image meta information such as lens type, shuffter speed, camera model, etc.")
100
+ insp = gr.Checkbox(value=False, label="Inpsiration", info="Include inspirational photographers that are known for this type of photography. Sometimes random people will appear here, needs more training.")
101
+ neg = gr.Checkbox(value=False, label="Negative Prompt", info="Include a negative prompt, more often used in Stable Diffusion. If you're a Stable Diffusion user, chances are you already have a better negative prompt you like to use.")
102
+
103
+ with gr.Column():
104
+ outputs = [
105
+ gr.Textbox(lines=2, label="Fungineered Text 1"),
106
+ gr.Textbox(lines=2, label="Fungineered Text 2"),
107
+ gr.Textbox(lines=2, label="Fungineered Text 3"),
108
+ gr.Textbox(lines=2, label="Fungineered Text 4"),
109
+ ]
110
+
111
+ inputs = [base_prompt, extra, top_k, top_p, temperature, enh, insp, neg]
112
+
113
+
114
+ submit = gr.Button(label="Fungineer",variant="primary")
115
+ submit.click(generate_text, inputs=inputs, outputs=outputs)
116
+
117
+ examples = []
118
+ with open("examples.json") as f:
119
+ examples = json.load(f)
120
+
121
+ for i, example in enumerate(examples):
122
+ with gr.Tab(f"Example {i+1}"):
123
+ with gr.Row():
124
+ with gr.Column():
125
+ gr.Markdown(f"### Base Prompt")
126
+ gr.Image(value=f"{example['base']['src']}")
127
+ gr.Markdown(f"{example['base']['prompt']}")
128
+ with gr.Column():
129
+ gr.Markdown(f"### 355M Prompt Fungineered")
130
+ gr.Image(value=f"{example['355M']['src']}")
131
+ gr.Markdown(f"{example['355M']['prompt']}")
132
+ with gr.Column():
133
+ gr.Markdown(f"### 7B Prompt Fungineered")
134
+ gr.Markdown(f"Coming Soon!")
135
+
136
+
137
+ init_model()
138
+ fungineer.launch(enable_queue=True)
139