Spaces:
Runtime error
Runtime error
| # filename: prompt_builder.py | |
| import os | |
| from collections import defaultdict | |
| from pathlib import Path | |
| import logging | |
| import json | |
| import configparser | |
| # Configure logging | |
| logging.basicConfig( | |
| level=logging.INFO, | |
| format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', | |
| handlers=[logging.StreamHandler()] | |
| ) | |
| logger = logging.getLogger("PromptBuilder") | |
| # Optional imports with better error handling | |
| try: | |
| from llama_cpp import Llama | |
| LLAMA_AVAILABLE = True | |
| except ImportError: | |
| logger.warning("llama-cpp-python not installed. LLM enhancement will be disabled.") | |
| LLAMA_AVAILABLE = False | |
| try: | |
| import gradio as gr | |
| GRADIO_AVAILABLE = True | |
| except ImportError: | |
| logger.error("Gradio not installed. Cannot launch UI.") | |
| GRADIO_AVAILABLE = False | |
| class PromptBuilder: | |
| def __init__(self, options_dir="prompt_options", config_file="config.ini"): | |
| self.options_dir = options_dir | |
| self.config = self._load_config(config_file) | |
| self.options = self._load_options() | |
| self.llm = None | |
| # Initialize LLM if available and configured | |
| if LLAMA_AVAILABLE and self.config.get('llm', 'enabled') == 'True': | |
| self._initialize_llm() | |
| def _load_config(self, config_file): | |
| """Load configuration from config.ini file or create with defaults""" | |
| config = configparser.ConfigParser() | |
| # Default configuration | |
| config['general'] = { | |
| 'options_dir': 'prompt_options', | |
| 'history_file': 'prompt_history.json' | |
| } | |
| config['llm'] = { | |
| 'enabled': 'True', | |
| 'model_path': 'models/model.gguf', | |
| 'system_prompt': 'You are a visual prompt engineer and expert in visual descriptions influenced by keywords.', | |
| 'n_gpu_layers': '30', | |
| 'seed': '1337', | |
| 'context_size': '2048' | |
| } | |
| # Try to load existing config | |
| if os.path.exists(config_file): | |
| try: | |
| config.read(config_file) | |
| logger.info(f"Loaded configuration from {config_file}") | |
| except Exception as e: | |
| logger.error(f"Error loading config: {e}") | |
| else: | |
| # Save default config | |
| try: | |
| os.makedirs(os.path.dirname(config_file), exist_ok=True) | |
| with open(config_file, 'w') as f: | |
| config.write(f) | |
| logger.info(f"Created default configuration at {config_file}") | |
| except Exception as e: | |
| logger.error(f"Error creating config: {e}") | |
| return config | |
| def _initialize_llm(self): | |
| """Initialize the LLM with configuration parameters""" | |
| try: | |
| model_path = self.config.get('llm', 'model_path') | |
| if not os.path.exists(model_path): | |
| logger.error(f"Model file not found: {model_path}") | |
| return | |
| self.llm = Llama( | |
| model_path=model_path, | |
| n_gpu_layers=self.config.getint('llm', 'n_gpu_layers'), | |
| seed=self.config.getint('llm', 'seed'), | |
| n_ctx=self.config.getint('llm', 'context_size'), | |
| ) | |
| logger.info(f"LLM initialized successfully with model: {model_path}") | |
| except Exception as e: | |
| logger.error(f"Failed to initialize LLM: {e}") | |
| self.llm = None | |
| def _ensure_directory_exists(self, directory): | |
| """Ensure the specified directory exists""" | |
| Path(directory).mkdir(parents=True, exist_ok=True) | |
| def _load_options(self): | |
| """Load prompt options from text files""" | |
| options = defaultdict(dict) | |
| # Ensure options directory exists | |
| self._ensure_directory_exists(self.options_dir) | |
| try: | |
| for filename in os.listdir(self.options_dir): | |
| if filename.endswith(".txt"): | |
| path = os.path.join(self.options_dir, filename) | |
| key = filename.replace(".txt", "") | |
| try: | |
| if '.' in key: | |
| group, field = key.split('.', 1) | |
| with open(path, "r", encoding="utf-8") as f: | |
| options[group][field] = [line.strip() for line in f if line.strip()] | |
| else: | |
| # top-level group | |
| with open(path, "r", encoding="utf-8") as f: | |
| options["general"][key] = [line.strip() for line in f if line.strip()] | |
| except Exception as e: | |
| logger.error(f"Error loading options from {path}: {e}") | |
| except Exception as e: | |
| logger.error(f"Error accessing options directory: {e}") | |
| return options | |
| def get_choices(self, group, field): | |
| """Get choices for a specific group and field""" | |
| return self.options.get(group, {}).get(field, []) | |
| def build_prompt(self, base_prompt="", custom_tags=None, enhance=False, **field_values): | |
| """Build a prompt from selected options""" | |
| parts = [base_prompt] if base_prompt else [] | |
| # Add field values to the prompt parts | |
| for key, value in field_values.items(): | |
| if not value: | |
| continue | |
| if key in ("styles", "lighting", "mood"): | |
| if key == "styles": | |
| parts.append(f"in {value} style") | |
| elif key == "lighting": | |
| parts.append(f"with {value} lighting") | |
| elif key == "mood": | |
| parts.append(f"evoking a {value} mood") | |
| else: | |
| parts.append(value) | |
| # Add custom tags | |
| if custom_tags: | |
| parts.append(custom_tags) | |
| # Join parts into a basic prompt | |
| basic_prompt = ", ".join(filter(None, parts)) | |
| # Enhance the prompt if requested and LLM is available | |
| if enhance and self.llm is not None: | |
| try: | |
| return self.enhance_prompt(basic_prompt) | |
| except Exception as e: | |
| logger.error(f"Error enhancing prompt: {e}") | |
| return f"Error enhancing prompt: {e}\nBasic prompt: {basic_prompt}" | |
| else: | |
| return basic_prompt | |
| return "No prompt built!" | |
| def enhance_prompt(self, prompt: str) -> str: | |
| """Enhance a prompt using the LLM""" | |
| if not self.llm: | |
| return f"LLM not available. Basic prompt: {prompt}" | |
| try: | |
| system_prompt = self.config.get('llm', 'system_prompt') | |
| base_prompt = f"""<|begin_of_text|><|start_header_id|>system<|end_header_id|> | |
| {system_prompt}<|eot_id|><|start_header_id|>user<|end_header_id|> | |
| {prompt}<|eot_id|><|start_header_id|>assistant<|end_header_id|> | |
| """ | |
| output = self.llm( | |
| base_prompt, | |
| max_tokens=256, # Generate more tokens for better descriptions | |
| stop=["<|eot_id|>"], | |
| echo=False, | |
| ) | |
| if output and "choices" in output and output["choices"]: | |
| return output["choices"][0]["text"].strip() | |
| else: | |
| logger.warning("LLM returned empty or invalid response") | |
| return prompt | |
| except Exception as e: | |
| logger.error(f"Error in LLM inference: {e}") | |
| return f"Error enhancing: {e}\nOriginal prompt: {prompt}" | |
| def save_prompt_to_history(self, prompt): | |
| """Save generated prompt to history file""" | |
| history_file = self.config.get('general', 'history_file') | |
| try: | |
| history = [] | |
| if os.path.exists(history_file): | |
| with open(history_file, 'r', encoding='utf-8') as f: | |
| history = json.load(f) | |
| # Add new prompt with timestamp | |
| from datetime import datetime | |
| history.append({ | |
| "prompt": prompt, | |
| "timestamp": datetime.now().isoformat() | |
| }) | |
| # Keep only last 100 prompts | |
| history = history[-100:] | |
| with open(history_file, 'w', encoding='utf-8') as f: | |
| json.dump(history, f, indent=2) | |
| logger.info(f"Saved prompt to history: {prompt[:30]}...") | |
| except Exception as e: | |
| logger.error(f"Error saving prompt to history: {e}") | |
| def create_ui(builder): | |
| """Create the Gradio UI""" | |
| with gr.Blocks(title="Prompt Builder UI") as demo: | |
| gr.Markdown("# 🌟 Prompt Builder\nCompose generative prompts using curated options.") | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| base_prompt = gr.Textbox( | |
| label="Base Prompt", | |
| placeholder="e.g., A portrait of...", | |
| lines=2 | |
| ) | |
| with gr.Column(scale=1): | |
| llama_prompt_enhancing = gr.Checkbox( | |
| label="Enhance with LLM", | |
| value=builder.llm is not None, | |
| interactive=builder.llm is not None, | |
| info="Enhance prompt with LLM" if builder.llm is not None else "LLM not available" | |
| ) | |
| custom_tags = gr.Textbox( | |
| label="Custom Tags", | |
| placeholder="e.g., cinematic, trending on ArtStation, detailed, 8k" | |
| ) | |
| with gr.Tabs(): | |
| with gr.TabItem("Character"): | |
| with gr.Row(): | |
| with gr.Column(): | |
| gender = gr.Dropdown( | |
| choices=builder.get_choices("character", "gender"), | |
| label="Gender" | |
| ) | |
| body = gr.Dropdown( | |
| choices=builder.get_choices("character", "body"), | |
| label="Body" | |
| ) | |
| clothing = gr.Dropdown( | |
| choices=builder.get_choices("character", "clothing"), | |
| label="Clothing" | |
| ) | |
| with gr.Column(): | |
| hair = gr.Dropdown( | |
| choices=builder.get_choices("character", "hair"), | |
| label="Hair" | |
| ) | |
| eyes = gr.Dropdown( | |
| choices=builder.get_choices("character", "eyes"), | |
| label="Eyes" | |
| ) | |
| with gr.TabItem("Background"): | |
| with gr.Row(): | |
| with gr.Column(): | |
| land_type = gr.Dropdown( | |
| choices=builder.get_choices("background", "land_type"), | |
| label="Land Type" | |
| ) | |
| sky = gr.Dropdown( | |
| choices=builder.get_choices("background", "sky"), | |
| label="Sky" | |
| ) | |
| with gr.Column(): | |
| flora = gr.Dropdown( | |
| choices=builder.get_choices("background", "flora"), | |
| label="Flora" | |
| ) | |
| fauna = gr.Dropdown( | |
| choices=builder.get_choices("background", "fauna"), | |
| label="Fauna" | |
| ) | |
| with gr.TabItem("Style"): | |
| with gr.Row(): | |
| with gr.Column(): | |
| styles = gr.Dropdown( | |
| choices=builder.get_choices("general", "styles"), | |
| label="Style" | |
| ) | |
| with gr.Column(): | |
| lighting = gr.Dropdown( | |
| choices=builder.get_choices("general", "lighting"), | |
| label="Lighting" | |
| ) | |
| mood = gr.Dropdown( | |
| choices=builder.get_choices("general", "mood"), | |
| label="Mood" | |
| ) | |
| with gr.Row(): | |
| with gr.Column(scale=4): | |
| output = gr.Textbox( | |
| label="Generated Prompt", | |
| lines=4 | |
| ) | |
| with gr.Column(scale=1): | |
| copy_btn = gr.Button("📋 Copy to Clipboard") | |
| save_btn = gr.Button("💾 Save to History") | |
| clear_btn = gr.Button("🧹 Clear All") | |
| with gr.Row(): | |
| generate_btn = gr.Button("🔮 Build Prompt", variant="primary", size="lg") | |
| # Handle events | |
| def generate_prompt_handler( | |
| base_prompt, custom_tags, gender, body, clothing, hair, eyes, | |
| land_type, sky, flora, fauna, styles, lighting, mood, llama_prompt_enhancing | |
| ): | |
| result = builder.build_prompt( | |
| base_prompt=base_prompt, | |
| enhance=llama_prompt_enhancing, | |
| custom_tags=custom_tags, | |
| gender=gender, | |
| body=body, | |
| clothing=clothing, | |
| hair=hair, | |
| eyes=eyes, | |
| land_type=land_type, | |
| sky=sky, | |
| flora=flora, | |
| fauna=fauna, | |
| styles=styles, | |
| lighting=lighting, | |
| mood=mood | |
| ) | |
| return result | |
| def save_to_history(prompt): | |
| if prompt: | |
| builder.save_prompt_to_history(prompt) | |
| return gr.update(value="Saved to history!") | |
| return gr.update(value="Nothing to save") | |
| def clear_all(): | |
| return "", "", None, None, None, None, None, None, None, None, None, None, None, None, False, "" | |
| # Connect event handlers | |
| generate_btn.click( | |
| fn=generate_prompt_handler, | |
| inputs=[ | |
| base_prompt, custom_tags, gender, body, clothing, hair, eyes, | |
| land_type, sky, flora, fauna, styles, lighting, mood, llama_prompt_enhancing | |
| ], | |
| outputs=[output] | |
| ) | |
| save_btn.click( | |
| fn=save_to_history, | |
| inputs=[output], | |
| outputs=[output] | |
| ) | |
| clear_btn.click( | |
| fn=clear_all, | |
| inputs=[], | |
| outputs=[ | |
| base_prompt, custom_tags, gender, body, clothing, hair, eyes, | |
| land_type, sky, flora, fauna, styles, lighting, mood, llama_prompt_enhancing, output | |
| ] | |
| ) | |
| # JavaScript for copy to clipboard function | |
| copy_btn.click( | |
| None, | |
| _js=""" | |
| () => { | |
| const output = document.querySelector('#output textarea'); | |
| if (output) { | |
| navigator.clipboard.writeText(output.value); | |
| return "Copied to clipboard!"; | |
| } | |
| return "Nothing to copy"; | |
| } | |
| """, | |
| outputs=[output] | |
| ) | |
| return demo | |
| def main(): | |
| """Main entry point for the application""" | |
| builder = PromptBuilder() | |
| if not GRADIO_AVAILABLE: | |
| logger.error("Cannot start UI - Gradio not available") | |
| return | |
| demo = create_ui(builder) | |
| demo.launch() | |
| if __name__ == "__main__": | |
| main() |