Spaces:
Sleeping
Sleeping
| # gemini_client.py | |
| import os | |
| # correct import | |
| import google.generativeai as genai | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| # Try to import the official Google GenAI SDK. If it fails, fall back to a safe mock. | |
| try: | |
| import google.generativeai as genai | |
| GENAI_AVAILABLE = True | |
| except Exception: | |
| genai = None | |
| GENAI_AVAILABLE = False | |
| from llm_file_generator import file_generator | |
| api_key = os.getenv("GEMINI_API_KEY") | |
| if GENAI_AVAILABLE: | |
| if not api_key: | |
| # Do not raise here to allow the Space to run; instead, functions will return helpful errors. | |
| print("Warning: GEMINI_API_KEY not set. Gemini calls will return an error.") | |
| else: | |
| genai.configure(api_key=api_key) | |
| SYSTEM_INSTRUCTION = f"""You are AcidopShell AI Assistant - a helpful coding assistant integrated into a custom shell. | |
| You can answer questions, explain concepts, and generate code files. | |
| {file_generator.get_enhanced_prompt()} | |
| Be concise but helpful. When generating files, always provide complete, working code. | |
| You can provide explanations before or after the XML tags.""" | |
| # Create lightweight "model" wrappers only when genai is available | |
| if GENAI_AVAILABLE and api_key: | |
| try: | |
| model_with_files = genai.GenerativeModel( | |
| "gemini-2.0-flash-exp", | |
| system_instruction=SYSTEM_INSTRUCTION | |
| ) | |
| model_regular = genai.GenerativeModel("gemini-2.0-flash-exp") | |
| except Exception as e: | |
| print(f"Warning: Failed to initialize GenAI models: {e}") | |
| model_with_files = None | |
| model_regular = None | |
| else: | |
| model_with_files = None | |
| model_regular = None | |
| def _not_available_msg(): | |
| return "⚠️ Gemini SDK not available or GEMINI_API_KEY not configured in this environment." | |
| def ask_gemini(prompt: str): | |
| """ | |
| Basic Gemini query - returns response text or helpful error. | |
| """ | |
| if not GENAI_AVAILABLE or not model_regular: | |
| return _not_available_msg() | |
| try: | |
| response = model_regular.generate_content(prompt) | |
| return response.text | |
| except Exception as e: | |
| return f"⚠️ Error calling Gemini: {e}" | |
| def ask_gemini_with_file_generation(query: str, project_dir: str = "."): | |
| """ | |
| Enhanced Gemini query with automatic file generation. | |
| Returns: (response_text, files_were_generated) | |
| """ | |
| if not GENAI_AVAILABLE or not model_with_files: | |
| return _not_available_msg(), False | |
| try: | |
| response = model_with_files.generate_content(query) | |
| response_text = response.text | |
| files_generated = file_generator.process_response(response_text, project_dir) | |
| return response_text, files_generated | |
| except Exception as e: | |
| return f"⚠️ Error calling Gemini: {e}", False | |