aimevzulari's picture
Upload 19 files
bed5cc5 verified
"""
Cursor Rules Generator - Gemini Adapter
This module implements the LLM adapter for Google's Gemini API.
"""
import json
import requests
from typing import Dict, List, Optional, Any
from ..config.settings import Settings
from .adapter import LLMAdapter
class GeminiAdapter(LLMAdapter):
"""Adapter for Google's Gemini API."""
def __init__(self):
"""Initialize the Gemini adapter."""
self.api_key = None
self.api_url = Settings.GEMINI_API_URL
self.initialized = False
def initialize(self, api_key: str, **kwargs) -> None:
"""Initialize the adapter with API key and optional parameters.
Args:
api_key: The Gemini API key
**kwargs: Additional provider-specific parameters
"""
self.api_key = api_key
self.api_url = kwargs.get('api_url', Settings.GEMINI_API_URL)
self.initialized = True
def validate_api_key(self, api_key: str) -> bool:
"""Validate the Gemini API key.
Args:
api_key: The API key to validate
Returns:
bool: True if the API key is valid, False otherwise
"""
try:
# Try to list models with the provided API key
url = f"{self.api_url}/models?key={api_key}"
response = requests.get(url)
# Check if the request was successful
if response.status_code == 200:
return True
return False
except Exception:
return False
def get_available_models(self) -> List[Dict[str, str]]:
"""Get a list of available Gemini models.
Returns:
List[Dict[str, str]]: A list of model information dictionaries
"""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
try:
# Get available models
url = f"{self.api_url}/models?key={self.api_key}"
response = requests.get(url)
if response.status_code != 200:
raise ValueError(f"Failed to get models: {response.text}")
data = response.json()
# Filter for Gemini models and format the response
models = []
for model in data.get('models', []):
if 'gemini' in model.get('name', '').lower():
model_id = model.get('name').split('/')[-1]
models.append({
'id': model_id,
'name': self._format_model_name(model_id)
})
# If no models found, return default models
if not models:
models = [
{'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
{'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
{'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
]
return models
except Exception as e:
# Return default models on error
return [
{'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
{'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
{'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
]
def generate_rule(
self,
model: str,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Generate a Cursor Rule using Gemini.
Args:
model: The Gemini model ID to use
rule_type: The type of rule to generate
description: A short description of the rule's purpose
content: The main content of the rule
parameters: Additional parameters for rule generation
Returns:
str: The generated rule in MDC format
"""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
# Set default parameters if not provided
if parameters is None:
parameters = {}
# Extract parameters
temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
globs = parameters.get('globs', '')
referenced_files = parameters.get('referenced_files', '')
prompt = parameters.get('prompt', '')
# Prepare the prompt for Gemini
system_prompt = """
You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
MDC format example:
---
description: RPC Service boilerplate
globs:
alwaysApply: false
---
- Use our internal RPC pattern when defining services
- Always use snake_case for service names.
@service-template.ts
"""
user_prompt = f"""
Create a Cursor Rule with the following details:
Rule Type: {rule_type}
Description: {description}
Content: {content}
"""
if globs:
user_prompt += f"\nGlobs: {globs}"
if referenced_files:
user_prompt += f"\nReferenced Files: {referenced_files}"
if prompt:
user_prompt += f"\nAdditional Instructions: {prompt}"
# Prepare the API request
url = f"{self.api_url}/models/{model}:generateContent?key={self.api_key}"
payload = {
"contents": [
{
"role": "user",
"parts": [
{"text": system_prompt + "\n\n" + user_prompt}
]
}
],
"generationConfig": {
"temperature": temperature,
"topP": 0.8,
"topK": 40,
"maxOutputTokens": 2048
}
}
# Make the API request
try:
response = requests.post(url, json=payload)
if response.status_code != 200:
raise ValueError(f"Failed to generate rule: {response.text}")
data = response.json()
# Extract the generated text
generated_text = data.get('candidates', [{}])[0].get('content', {}).get('parts', [{}])[0].get('text', '')
# If no text was generated, create a basic rule
if not generated_text:
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
return generated_text
except Exception as e:
# Create a basic rule on error
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
def _format_model_name(self, model_id: str) -> str:
"""Format a model ID into a human-readable name.
Args:
model_id: The model ID
Returns:
str: A human-readable model name
"""
# Replace hyphens with spaces and capitalize each word
name = model_id.replace('-', ' ').title()
# Special case handling
name = name.replace('Gemini ', 'Gemini ')
name = name.replace('Pro ', 'Pro ')
name = name.replace('Flash ', 'Flash ')
name = name.replace('Lite', 'Lite')
return name
def _create_basic_rule(
self,
rule_type: str,
description: str,
content: str,
globs: str = '',
referenced_files: str = ''
) -> str:
"""Create a basic rule in MDC format without using the LLM.
Args:
rule_type: The type of rule
description: The rule description
content: The rule content
globs: Glob patterns for Auto Attached rules
referenced_files: Referenced files
Returns:
str: The rule in MDC format
"""
# Create MDC format
mdc = '---\n'
mdc += f'description: {description}\n'
if rule_type == 'Auto Attached' and globs:
mdc += f'globs: {globs}\n'
if rule_type == 'Always':
mdc += 'alwaysApply: true\n'
else:
mdc += 'alwaysApply: false\n'
mdc += '---\n\n'
mdc += content + '\n'
# Add referenced files
if referenced_files:
mdc += '\n' + referenced_files
return mdc