|
""" |
|
Cursor Rules Generator - OpenRouter Adapter |
|
|
|
This module implements the LLM adapter for OpenRouter API. |
|
""" |
|
|
|
import json |
|
import requests |
|
from typing import Dict, List, Optional, Any |
|
|
|
from ..config.settings import Settings |
|
from .adapter import LLMAdapter |
|
|
|
class OpenRouterAdapter(LLMAdapter): |
|
"""Adapter for OpenRouter API.""" |
|
|
|
def __init__(self): |
|
"""Initialize the OpenRouter adapter.""" |
|
self.api_key = None |
|
self.api_url = Settings.OPENROUTER_API_URL |
|
self.initialized = False |
|
|
|
def initialize(self, api_key: str, **kwargs) -> None: |
|
"""Initialize the adapter with API key and optional parameters. |
|
|
|
Args: |
|
api_key: The OpenRouter API key |
|
**kwargs: Additional provider-specific parameters |
|
""" |
|
self.api_key = api_key |
|
self.api_url = kwargs.get('api_url', Settings.OPENROUTER_API_URL) |
|
self.site_url = kwargs.get('site_url', 'https://cursor-rules-generator.example.com') |
|
self.site_name = kwargs.get('site_name', 'Cursor Rules Generator') |
|
self.initialized = True |
|
|
|
def validate_api_key(self, api_key: str) -> bool: |
|
"""Validate the OpenRouter API key. |
|
|
|
Args: |
|
api_key: The API key to validate |
|
|
|
Returns: |
|
bool: True if the API key is valid, False otherwise |
|
""" |
|
try: |
|
|
|
url = f"{self.api_url}/models" |
|
headers = { |
|
"Authorization": f"Bearer {api_key}" |
|
} |
|
response = requests.get(url, headers=headers) |
|
|
|
|
|
if response.status_code == 200: |
|
return True |
|
return False |
|
except Exception: |
|
return False |
|
|
|
def get_available_models(self) -> List[Dict[str, str]]: |
|
"""Get a list of available OpenRouter models. |
|
|
|
Returns: |
|
List[Dict[str, str]]: A list of model information dictionaries |
|
""" |
|
if not self.initialized: |
|
raise ValueError("Adapter not initialized. Call initialize() first.") |
|
|
|
try: |
|
|
|
url = f"{self.api_url}/models" |
|
headers = { |
|
"Authorization": f"Bearer {self.api_key}" |
|
} |
|
response = requests.get(url, headers=headers) |
|
|
|
if response.status_code != 200: |
|
raise ValueError(f"Failed to get models: {response.text}") |
|
|
|
data = response.json() |
|
|
|
|
|
models = [] |
|
for model in data.get('data', []): |
|
model_id = model.get('id') |
|
model_name = model.get('name', model_id) |
|
|
|
|
|
if not model.get('capabilities', {}).get('chat'): |
|
continue |
|
|
|
models.append({ |
|
'id': model_id, |
|
'name': model_name |
|
}) |
|
|
|
|
|
if not models: |
|
models = [ |
|
{'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'}, |
|
{'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'}, |
|
{'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'}, |
|
{'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'} |
|
] |
|
|
|
return models |
|
except Exception as e: |
|
|
|
return [ |
|
{'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'}, |
|
{'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'}, |
|
{'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'}, |
|
{'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'} |
|
] |
|
|
|
def generate_rule( |
|
self, |
|
model: str, |
|
rule_type: str, |
|
description: str, |
|
content: str, |
|
parameters: Optional[Dict[str, Any]] = None |
|
) -> str: |
|
"""Generate a Cursor Rule using OpenRouter. |
|
|
|
Args: |
|
model: The OpenRouter model ID to use |
|
rule_type: The type of rule to generate |
|
description: A short description of the rule's purpose |
|
content: The main content of the rule |
|
parameters: Additional parameters for rule generation |
|
|
|
Returns: |
|
str: The generated rule in MDC format |
|
""" |
|
if not self.initialized: |
|
raise ValueError("Adapter not initialized. Call initialize() first.") |
|
|
|
|
|
if parameters is None: |
|
parameters = {} |
|
|
|
|
|
temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE) |
|
globs = parameters.get('globs', '') |
|
referenced_files = parameters.get('referenced_files', '') |
|
prompt = parameters.get('prompt', '') |
|
|
|
|
|
system_prompt = """ |
|
You are a Cursor Rules expert. Create a rule in MDC format based on the provided information. |
|
|
|
MDC format example: |
|
--- |
|
description: RPC Service boilerplate |
|
globs: |
|
alwaysApply: false |
|
--- |
|
|
|
- Use our internal RPC pattern when defining services |
|
- Always use snake_case for service names. |
|
|
|
@service-template.ts |
|
""" |
|
|
|
user_prompt = f""" |
|
Create a Cursor Rule with the following details: |
|
|
|
Rule Type: {rule_type} |
|
Description: {description} |
|
Content: {content} |
|
""" |
|
|
|
if globs: |
|
user_prompt += f"\nGlobs: {globs}" |
|
|
|
if referenced_files: |
|
user_prompt += f"\nReferenced Files: {referenced_files}" |
|
|
|
if prompt: |
|
user_prompt += f"\nAdditional Instructions: {prompt}" |
|
|
|
|
|
url = f"{self.api_url}/chat/completions" |
|
headers = { |
|
"Authorization": f"Bearer {self.api_key}", |
|
"Content-Type": "application/json", |
|
"HTTP-Referer": self.site_url, |
|
"X-Title": self.site_name |
|
} |
|
|
|
payload = { |
|
"model": model, |
|
"messages": [ |
|
{ |
|
"role": "system", |
|
"content": system_prompt |
|
}, |
|
{ |
|
"role": "user", |
|
"content": user_prompt |
|
} |
|
], |
|
"temperature": temperature, |
|
"max_tokens": 2048 |
|
} |
|
|
|
|
|
try: |
|
response = requests.post(url, headers=headers, json=payload) |
|
|
|
if response.status_code != 200: |
|
raise ValueError(f"Failed to generate rule: {response.text}") |
|
|
|
data = response.json() |
|
|
|
|
|
generated_text = data.get('choices', [{}])[0].get('message', {}).get('content', '') |
|
|
|
|
|
if not generated_text: |
|
return self._create_basic_rule(rule_type, description, content, globs, referenced_files) |
|
|
|
return generated_text |
|
except Exception as e: |
|
|
|
return self._create_basic_rule(rule_type, description, content, globs, referenced_files) |
|
|
|
def _create_basic_rule( |
|
self, |
|
rule_type: str, |
|
description: str, |
|
content: str, |
|
globs: str = '', |
|
referenced_files: str = '' |
|
) -> str: |
|
"""Create a basic rule in MDC format without using the LLM. |
|
|
|
Args: |
|
rule_type: The type of rule |
|
description: The rule description |
|
content: The rule content |
|
globs: Glob patterns for Auto Attached rules |
|
referenced_files: Referenced files |
|
|
|
Returns: |
|
str: The rule in MDC format |
|
""" |
|
|
|
mdc = '---\n' |
|
mdc += f'description: {description}\n' |
|
|
|
if rule_type == 'Auto Attached' and globs: |
|
mdc += f'globs: {globs}\n' |
|
|
|
if rule_type == 'Always': |
|
mdc += 'alwaysApply: true\n' |
|
else: |
|
mdc += 'alwaysApply: false\n' |
|
|
|
mdc += '---\n\n' |
|
mdc += content + '\n' |
|
|
|
|
|
if referenced_files: |
|
mdc += '\n' + referenced_files |
|
|
|
return mdc |
|
|