Spaces:
Runtime error
Runtime error
gokaygokay
commited on
Commit
•
5b04683
1
Parent(s):
63296c8
prompt_types
Browse files- huggingface_inference_node.py +3 -3
- ui_components.py +14 -4
huggingface_inference_node.py
CHANGED
@@ -62,10 +62,10 @@ You are allowed to make up film and branding names, and do them like 80's, 90's
|
|
62 |
"fantasy": fantasy_prompt
|
63 |
}
|
64 |
|
|
|
65 |
print(f"Received prompt_type: '{prompt_type}'") # Debug print
|
66 |
-
|
67 |
-
|
68 |
-
base_prompt = prompt_types[prompt_type.strip()]
|
69 |
print(f"Using {prompt_type} prompt")
|
70 |
elif custom_base_prompt.strip():
|
71 |
base_prompt = custom_base_prompt
|
|
|
62 |
"fantasy": fantasy_prompt
|
63 |
}
|
64 |
|
65 |
+
# Update this part to handle the prompt_type correctly
|
66 |
print(f"Received prompt_type: '{prompt_type}'") # Debug print
|
67 |
+
if prompt_type and prompt_type.strip() and prompt_type in prompt_types:
|
68 |
+
base_prompt = prompt_types[prompt_type]
|
|
|
69 |
print(f"Using {prompt_type} prompt")
|
70 |
elif custom_base_prompt.strip():
|
71 |
base_prompt = custom_base_prompt
|
ui_components.py
CHANGED
@@ -15,6 +15,9 @@ title = """<h1 align="center">FLUX Prompt Generator</h1>
|
|
15 |
</center></p>
|
16 |
"""
|
17 |
|
|
|
|
|
|
|
18 |
def create_interface():
|
19 |
prompt_generator = PromptGenerator()
|
20 |
huggingface_node = HuggingFaceInferenceNode()
|
@@ -113,6 +116,12 @@ def create_interface():
|
|
113 |
interactive=True
|
114 |
)
|
115 |
custom_base_prompt = gr.Textbox(label="Custom Base Prompt", lines=5)
|
|
|
|
|
|
|
|
|
|
|
|
|
116 |
generate_text_button = gr.Button("Generate Prompt with LLM (Llama 3.1 70B)")
|
117 |
text_output = gr.Textbox(label="Generated Text", lines=10)
|
118 |
|
@@ -171,13 +180,14 @@ def create_interface():
|
|
171 |
outputs=[output]
|
172 |
)
|
173 |
|
174 |
-
def generate_text_with_llm(output, happy_talk, compress, compression_level,
|
175 |
-
|
176 |
-
|
|
|
177 |
|
178 |
generate_text_button.click(
|
179 |
generate_text_with_llm,
|
180 |
-
inputs=[output, happy_talk, compress, compression_level,
|
181 |
outputs=text_output,
|
182 |
api_name="generate_text" # Add this line
|
183 |
)
|
|
|
15 |
</center></p>
|
16 |
"""
|
17 |
|
18 |
+
# Add this global variable
|
19 |
+
selected_prompt_type = "happy" # Default value
|
20 |
+
|
21 |
def create_interface():
|
22 |
prompt_generator = PromptGenerator()
|
23 |
huggingface_node = HuggingFaceInferenceNode()
|
|
|
116 |
interactive=True
|
117 |
)
|
118 |
custom_base_prompt = gr.Textbox(label="Custom Base Prompt", lines=5)
|
119 |
+
def update_prompt_type(value):
|
120 |
+
global selected_prompt_type
|
121 |
+
selected_prompt_type = value
|
122 |
+
print(f"Updated prompt type: {selected_prompt_type}")
|
123 |
+
return value
|
124 |
+
prompt_type.change(update_prompt_type, inputs=[prompt_type], outputs=[prompt_type])
|
125 |
generate_text_button = gr.Button("Generate Prompt with LLM (Llama 3.1 70B)")
|
126 |
text_output = gr.Textbox(label="Generated Text", lines=10)
|
127 |
|
|
|
180 |
outputs=[output]
|
181 |
)
|
182 |
|
183 |
+
def generate_text_with_llm(output, happy_talk, compress, compression_level, custom_base_prompt):
|
184 |
+
global selected_prompt_type
|
185 |
+
print(f"Prompt type selected in UI: {selected_prompt_type}") # Debug print
|
186 |
+
return huggingface_node.generate(output, happy_talk, compress, compression_level, False, selected_prompt_type, custom_base_prompt)
|
187 |
|
188 |
generate_text_button.click(
|
189 |
generate_text_with_llm,
|
190 |
+
inputs=[output, happy_talk, compress, compression_level, custom_base_prompt],
|
191 |
outputs=text_output,
|
192 |
api_name="generate_text" # Add this line
|
193 |
)
|