aimevzulari's picture
Upload app.py
1a2c9f6 verified
"""
Cursor Rules Generator - Hugging Face Spaces App
This module implements the Gradio interface for Hugging Face Spaces deployment.
All code is self-contained in this file to avoid import issues.
"""
import os
import gradio as gr
import json
import requests
import traceback
from dotenv import load_dotenv
from abc import ABC, abstractmethod
from typing import Dict, List, Optional, Any
# Load environment variables
load_dotenv()
# Configuration settings
class Settings:
"""Application settings."""
# Application settings
APP_NAME = "Cursor Rules Generator"
DEBUG = os.getenv("DEBUG", "False").lower() == "true"
# API keys
GEMINI_API_KEY = os.getenv("GEMINI_API_KEY", "")
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY", "")
OPENROUTER_API_KEY = os.getenv("OPENROUTER_API_KEY", "")
# Default settings
DEFAULT_PROVIDER = os.getenv("DEFAULT_PROVIDER", "gemini")
DEFAULT_RULE_TYPE = os.getenv("DEFAULT_RULE_TYPE", "Always")
# LLM provider settings
GEMINI_API_URL = "https://generativelanguage.googleapis.com/v1beta"
OPENAI_API_URL = "https://api.openai.com/v1"
OPENROUTER_API_URL = "https://openrouter.ai/api/v1"
# LLM model settings
DEFAULT_GEMINI_MODEL = os.getenv("DEFAULT_GEMINI_MODEL", "gemini-2.0-flash")
DEFAULT_OPENAI_MODEL = os.getenv("DEFAULT_OPENAI_MODEL", "gpt-4o")
DEFAULT_OPENROUTER_MODEL = os.getenv("DEFAULT_OPENROUTER_MODEL", "openai/gpt-4o")
# Rule generation settings
MAX_RULE_LENGTH = int(os.getenv("MAX_RULE_LENGTH", "10000"))
DEFAULT_TEMPERATURE = float(os.getenv("DEFAULT_TEMPERATURE", "0.7"))
# LLM Adapter Interface
class LLMAdapter(ABC):
"""Base adapter interface for LLM providers."""
@abstractmethod
def initialize(self, api_key: str, **kwargs) -> None:
"""Initialize the adapter with API key and optional parameters."""
pass
@abstractmethod
def validate_api_key(self, api_key: str) -> bool:
"""Validate the API key."""
pass
@abstractmethod
def get_available_models(self) -> List[Dict[str, str]]:
"""Get a list of available models from the provider."""
pass
@abstractmethod
def generate_rule(
self,
model: str,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Generate a Cursor Rule using the LLM provider."""
pass
# Gemini Adapter
class GeminiAdapter(LLMAdapter):
"""Adapter for Google's Gemini API."""
def __init__(self):
"""Initialize the Gemini adapter."""
self.api_key = None
self.api_url = Settings.GEMINI_API_URL
self.initialized = False
self.last_error = None
def initialize(self, api_key: str, **kwargs) -> None:
"""Initialize the adapter with API key and optional parameters."""
self.api_key = api_key
self.api_url = kwargs.get('api_url', Settings.GEMINI_API_URL)
self.initialized = True
def validate_api_key(self, api_key: str) -> bool:
"""Validate the Gemini API key."""
try:
# Try to list models with the provided API key
url = f"{self.api_url}/models?key={api_key}"
response = requests.get(url)
# Check if the request was successful
if response.status_code == 200:
return True
# Store error details for debugging
self.last_error = f"API Error: Status {response.status_code}, Response: {response.text}"
print(f"Gemini API validation failed: {self.last_error}")
return False
except Exception as e:
# Store exception details for debugging
self.last_error = f"Exception: {str(e)}\n{traceback.format_exc()}"
print(f"Gemini API validation exception: {self.last_error}")
return False
def get_available_models(self) -> List[Dict[str, str]]:
"""Get a list of available Gemini models."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
try:
# Get available models
url = f"{self.api_url}/models?key={self.api_key}"
response = requests.get(url)
if response.status_code != 200:
print(f"Failed to get models: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to get models: {response.text}")
data = response.json()
# Filter for Gemini models and format the response
models = []
for model in data.get('models', []):
if 'gemini' in model.get('name', '').lower():
model_id = model.get('name').split('/')[-1]
models.append({
'id': model_id,
'name': self._format_model_name(model_id)
})
# If no models found, return default models
if not models:
models = [
{'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
{'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
{'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
]
return models
except Exception as e:
print(f"Exception in get_available_models: {str(e)}\n{traceback.format_exc()}")
# Return default models on error
return [
{'id': 'gemini-2.5-pro', 'name': 'Gemini 2.5 Pro'},
{'id': 'gemini-2.0-flash', 'name': 'Gemini 2.0 Flash'},
{'id': 'gemini-2.0-flash-lite', 'name': 'Gemini 2.0 Flash-Lite'}
]
def generate_rule(
self,
model: str,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Generate a Cursor Rule using Gemini."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
# Set default parameters if not provided
if parameters is None:
parameters = {}
# Extract parameters
temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
globs = parameters.get('globs', '')
referenced_files = parameters.get('referenced_files', '')
prompt = parameters.get('prompt', '')
# Prepare the prompt for Gemini
system_prompt = """
You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
MDC format example:
---
description: RPC Service boilerplate
globs:
alwaysApply: false
---
- Use our internal RPC pattern when defining services
- Always use snake_case for service names.
@service-template.ts
"""
user_prompt = f"""
Create a Cursor Rule with the following details:
Rule Type: {rule_type}
Description: {description}
Content: {content}
"""
if globs:
user_prompt += f"\nGlobs: {globs}"
if referenced_files:
user_prompt += f"\nReferenced Files: {referenced_files}"
if prompt:
user_prompt += f"\nAdditional Instructions: {prompt}"
# Prepare the API request
url = f"{self.api_url}/models/{model}:generateContent?key={self.api_key}"
payload = {
"contents": [
{
"role": "user",
"parts": [
{"text": system_prompt + "\n\n" + user_prompt}
]
}
],
"generationConfig": {
"temperature": temperature,
"topP": 0.8,
"topK": 40,
"maxOutputTokens": 2048
}
}
# Make the API request
try:
response = requests.post(url, json=payload)
if response.status_code != 200:
print(f"Failed to generate rule: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to generate rule: {response.text}")
data = response.json()
# Extract the generated text
generated_text = data.get('candidates', [{}])[0].get('content', {}).get('parts', [{}])[0].get('text', '')
# If no text was generated, create a basic rule
if not generated_text:
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
return generated_text
except Exception as e:
print(f"Exception in generate_rule: {str(e)}\n{traceback.format_exc()}")
# Create a basic rule on error
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
def _format_model_name(self, model_id: str) -> str:
"""Format a model ID into a human-readable name."""
# Replace hyphens with spaces and capitalize each word
name = model_id.replace('-', ' ').title()
# Special case handling
name = name.replace('Gemini ', 'Gemini ')
name = name.replace('Pro ', 'Pro ')
name = name.replace('Flash ', 'Flash ')
name = name.replace('Lite', 'Lite')
return name
def _create_basic_rule(
self,
rule_type: str,
description: str,
content: str,
globs: str = '',
referenced_files: str = ''
) -> str:
"""Create a basic rule in MDC format without using the LLM."""
# Create MDC format
mdc = '---\n'
mdc += f'description: {description}\n'
if rule_type == 'Auto Attached' and globs:
mdc += f'globs: {globs}\n'
if rule_type == 'Always':
mdc += 'alwaysApply: true\n'
else:
mdc += 'alwaysApply: false\n'
mdc += '---\n\n'
mdc += content + '\n'
# Add referenced files
if referenced_files:
mdc += '\n' + referenced_files
return mdc
# OpenAI Adapter
class OpenAIAdapter(LLMAdapter):
"""Adapter for OpenAI API."""
def __init__(self):
"""Initialize the OpenAI adapter."""
self.api_key = None
self.api_url = Settings.OPENAI_API_URL
self.initialized = False
self.last_error = None
def initialize(self, api_key: str, **kwargs) -> None:
"""Initialize the adapter with API key and optional parameters."""
self.api_key = api_key
self.api_url = kwargs.get('api_url', Settings.OPENAI_API_URL)
self.initialized = True
def validate_api_key(self, api_key: str) -> bool:
"""Validate the OpenAI API key."""
try:
# Try to list models with the provided API key
url = f"{self.api_url}/models"
headers = {
"Authorization": f"Bearer {api_key}"
}
response = requests.get(url, headers=headers)
# Check if the request was successful
if response.status_code == 200:
return True
# Store error details for debugging
self.last_error = f"API Error: Status {response.status_code}, Response: {response.text}"
print(f"OpenAI API validation failed: {self.last_error}")
return False
except Exception as e:
# Store exception details for debugging
self.last_error = f"Exception: {str(e)}\n{traceback.format_exc()}"
print(f"OpenAI API validation exception: {self.last_error}")
return False
def get_available_models(self) -> List[Dict[str, str]]:
"""Get a list of available OpenAI models."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
try:
# Get available models
url = f"{self.api_url}/models"
headers = {
"Authorization": f"Bearer {self.api_key}"
}
response = requests.get(url, headers=headers)
if response.status_code != 200:
print(f"Failed to get models: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to get models: {response.text}")
data = response.json()
# Filter for chat models and format the response
models = []
for model in data.get('data', []):
model_id = model.get('id')
if any(prefix in model_id for prefix in ['gpt-4', 'gpt-3.5']):
models.append({
'id': model_id,
'name': self._format_model_name(model_id)
})
# If no models found, return default models
if not models:
models = [
{'id': 'gpt-4o', 'name': 'GPT-4o'},
{'id': 'gpt-4-turbo', 'name': 'GPT-4 Turbo'},
{'id': 'gpt-3.5-turbo', 'name': 'GPT-3.5 Turbo'}
]
return models
except Exception as e:
print(f"Exception in get_available_models: {str(e)}\n{traceback.format_exc()}")
# Return default models on error
return [
{'id': 'gpt-4o', 'name': 'GPT-4o'},
{'id': 'gpt-4-turbo', 'name': 'GPT-4 Turbo'},
{'id': 'gpt-3.5-turbo', 'name': 'GPT-3.5 Turbo'}
]
def generate_rule(
self,
model: str,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Generate a Cursor Rule using OpenAI."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
# Set default parameters if not provided
if parameters is None:
parameters = {}
# Extract parameters
temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
globs = parameters.get('globs', '')
referenced_files = parameters.get('referenced_files', '')
prompt = parameters.get('prompt', '')
# Prepare the prompt for OpenAI
system_prompt = """
You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
MDC format example:
---
description: RPC Service boilerplate
globs:
alwaysApply: false
---
- Use our internal RPC pattern when defining services
- Always use snake_case for service names.
@service-template.ts
"""
user_prompt = f"""
Create a Cursor Rule with the following details:
Rule Type: {rule_type}
Description: {description}
Content: {content}
"""
if globs:
user_prompt += f"\nGlobs: {globs}"
if referenced_files:
user_prompt += f"\nReferenced Files: {referenced_files}"
if prompt:
user_prompt += f"\nAdditional Instructions: {prompt}"
# Prepare the API request
url = f"{self.api_url}/chat/completions"
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json"
}
payload = {
"model": model,
"messages": [
{
"role": "system",
"content": system_prompt
},
{
"role": "user",
"content": user_prompt
}
],
"temperature": temperature,
"max_tokens": 2048
}
# Make the API request
try:
response = requests.post(url, headers=headers, json=payload)
if response.status_code != 200:
print(f"Failed to generate rule: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to generate rule: {response.text}")
data = response.json()
# Extract the generated text
generated_text = data.get('choices', [{}])[0].get('message', {}).get('content', '')
# If no text was generated, create a basic rule
if not generated_text:
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
return generated_text
except Exception as e:
print(f"Exception in generate_rule: {str(e)}\n{traceback.format_exc()}")
# Create a basic rule on error
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
def _format_model_name(self, model_id: str) -> str:
"""Format a model ID into a human-readable name."""
# Replace hyphens with spaces and capitalize each word
name = model_id.replace('-', ' ').title()
# Special case handling
name = name.replace('Gpt ', 'GPT ')
name = name.replace('Gpt4', 'GPT-4')
name = name.replace('Gpt3', 'GPT-3')
name = name.replace('Gpt 4', 'GPT-4')
name = name.replace('Gpt 3', 'GPT-3')
name = name.replace('Turbo', 'Turbo')
name = name.replace('O', 'o')
return name
def _create_basic_rule(
self,
rule_type: str,
description: str,
content: str,
globs: str = '',
referenced_files: str = ''
) -> str:
"""Create a basic rule in MDC format without using the LLM."""
# Create MDC format
mdc = '---\n'
mdc += f'description: {description}\n'
if rule_type == 'Auto Attached' and globs:
mdc += f'globs: {globs}\n'
if rule_type == 'Always':
mdc += 'alwaysApply: true\n'
else:
mdc += 'alwaysApply: false\n'
mdc += '---\n\n'
mdc += content + '\n'
# Add referenced files
if referenced_files:
mdc += '\n' + referenced_files
return mdc
# OpenRouter Adapter
class OpenRouterAdapter(LLMAdapter):
"""Adapter for OpenRouter API."""
def __init__(self):
"""Initialize the OpenRouter adapter."""
self.api_key = None
self.api_url = Settings.OPENROUTER_API_URL
self.initialized = False
self.last_error = None
def initialize(self, api_key: str, **kwargs) -> None:
"""Initialize the adapter with API key and optional parameters."""
self.api_key = api_key
self.api_url = kwargs.get('api_url', Settings.OPENROUTER_API_URL)
self.site_url = kwargs.get('site_url', 'https://cursor-rules-generator.example.com')
self.site_name = kwargs.get('site_name', 'Cursor Rules Generator')
self.initialized = True
def validate_api_key(self, api_key: str) -> bool:
"""Validate the OpenRouter API key."""
try:
# Try to list models with the provided API key
url = f"{self.api_url}/models"
headers = {
"Authorization": f"Bearer {api_key}"
}
response = requests.get(url, headers=headers)
# Check if the request was successful
if response.status_code == 200:
return True
# Store error details for debugging
self.last_error = f"API Error: Status {response.status_code}, Response: {response.text}"
print(f"OpenRouter API validation failed: {self.last_error}")
return False
except Exception as e:
# Store exception details for debugging
self.last_error = f"Exception: {str(e)}\n{traceback.format_exc()}"
print(f"OpenRouter API validation exception: {self.last_error}")
return False
def get_available_models(self) -> List[Dict[str, str]]:
"""Get a list of available OpenRouter models."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
try:
# Get available models
url = f"{self.api_url}/models"
headers = {
"Authorization": f"Bearer {self.api_key}"
}
response = requests.get(url, headers=headers)
if response.status_code != 200:
print(f"Failed to get models: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to get models: {response.text}")
data = response.json()
# Format the response
models = []
for model in data.get('data', []):
model_id = model.get('id')
model_name = model.get('name', model_id)
# Skip non-chat models
if not model.get('capabilities', {}).get('chat'):
continue
models.append({
'id': model_id,
'name': model_name
})
# If no models found, return default models
if not models:
models = [
{'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'},
{'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'},
{'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'},
{'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'}
]
return models
except Exception as e:
print(f"Exception in get_available_models: {str(e)}\n{traceback.format_exc()}")
# Return default models on error
return [
{'id': 'openai/gpt-4o', 'name': 'OpenAI GPT-4o'},
{'id': 'anthropic/claude-3-opus', 'name': 'Anthropic Claude 3 Opus'},
{'id': 'google/gemini-2.5-pro', 'name': 'Google Gemini 2.5 Pro'},
{'id': 'meta-llama/llama-3-70b-instruct', 'name': 'Meta Llama 3 70B'}
]
def generate_rule(
self,
model: str,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Generate a Cursor Rule using OpenRouter."""
if not self.initialized:
raise ValueError("Adapter not initialized. Call initialize() first.")
# Set default parameters if not provided
if parameters is None:
parameters = {}
# Extract parameters
temperature = parameters.get('temperature', Settings.DEFAULT_TEMPERATURE)
globs = parameters.get('globs', '')
referenced_files = parameters.get('referenced_files', '')
prompt = parameters.get('prompt', '')
# Prepare the prompt for OpenRouter
system_prompt = """
You are a Cursor Rules expert. Create a rule in MDC format based on the provided information.
MDC format example:
---
description: RPC Service boilerplate
globs:
alwaysApply: false
---
- Use our internal RPC pattern when defining services
- Always use snake_case for service names.
@service-template.ts
"""
user_prompt = f"""
Create a Cursor Rule with the following details:
Rule Type: {rule_type}
Description: {description}
Content: {content}
"""
if globs:
user_prompt += f"\nGlobs: {globs}"
if referenced_files:
user_prompt += f"\nReferenced Files: {referenced_files}"
if prompt:
user_prompt += f"\nAdditional Instructions: {prompt}"
# Prepare the API request
url = f"{self.api_url}/chat/completions"
headers = {
"Authorization": f"Bearer {self.api_key}",
"Content-Type": "application/json",
"HTTP-Referer": self.site_url,
"X-Title": self.site_name
}
payload = {
"model": model,
"messages": [
{
"role": "system",
"content": system_prompt
},
{
"role": "user",
"content": user_prompt
}
],
"temperature": temperature,
"max_tokens": 2048
}
# Make the API request
try:
response = requests.post(url, headers=headers, json=payload)
if response.status_code != 200:
print(f"Failed to generate rule: Status {response.status_code}, Response: {response.text}")
raise ValueError(f"Failed to generate rule: {response.text}")
data = response.json()
# Extract the generated text
generated_text = data.get('choices', [{}])[0].get('message', {}).get('content', '')
# If no text was generated, create a basic rule
if not generated_text:
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
return generated_text
except Exception as e:
print(f"Exception in generate_rule: {str(e)}\n{traceback.format_exc()}")
# Create a basic rule on error
return self._create_basic_rule(rule_type, description, content, globs, referenced_files)
def _create_basic_rule(
self,
rule_type: str,
description: str,
content: str,
globs: str = '',
referenced_files: str = ''
) -> str:
"""Create a basic rule in MDC format without using the LLM."""
# Create MDC format
mdc = '---\n'
mdc += f'description: {description}\n'
if rule_type == 'Auto Attached' and globs:
mdc += f'globs: {globs}\n'
if rule_type == 'Always':
mdc += 'alwaysApply: true\n'
else:
mdc += 'alwaysApply: false\n'
mdc += '---\n\n'
mdc += content + '\n'
# Add referenced files
if referenced_files:
mdc += '\n' + referenced_files
return mdc
# LLM Adapter Factory
class LLMAdapterFactory:
"""Factory for creating LLM adapters."""
@staticmethod
def create_adapter(provider_name: str) -> LLMAdapter:
"""Create an adapter for the specified provider."""
provider_name = provider_name.lower()
if provider_name == "gemini":
return GeminiAdapter()
elif provider_name == "openai":
return OpenAIAdapter()
elif provider_name == "openrouter":
return OpenRouterAdapter()
else:
raise ValueError(f"Unsupported provider: {provider_name}")
@staticmethod
def get_supported_providers() -> Dict[str, str]:
"""Get a dictionary of supported providers."""
return {
"gemini": "Google Gemini",
"openai": "OpenAI",
"openrouter": "OpenRouter"
}
# Rule Generator
class RuleGenerator:
"""Engine for generating Cursor Rules."""
def __init__(self):
"""Initialize the rule generator."""
self.factory = LLMAdapterFactory()
def create_rule(
self,
provider: str,
model: str,
rule_type: str,
description: str,
content: str,
api_key: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Create a Cursor Rule using the specified LLM provider."""
# Set default parameters if not provided
if parameters is None:
parameters = {}
try:
# Create and initialize the adapter
adapter = self.factory.create_adapter(provider)
adapter.initialize(api_key)
# Generate the rule using the adapter
rule = adapter.generate_rule(model, rule_type, description, content, parameters)
return rule
except Exception as e:
print(f"Exception in create_rule: {str(e)}\n{traceback.format_exc()}")
# If LLM generation fails, create a basic rule
return self._create_basic_rule(rule_type, description, content, parameters)
def _create_basic_rule(
self,
rule_type: str,
description: str,
content: str,
parameters: Optional[Dict[str, Any]] = None
) -> str:
"""Create a basic rule in MDC format without using an LLM."""
# Set default parameters if not provided
if parameters is None:
parameters = {}
# Extract parameters
globs = parameters.get('globs', '')
referenced_files = parameters.get('referenced_files', '')
# Create MDC format
mdc = '---\n'
mdc += f'description: {description}\n'
if rule_type == 'Auto Attached' and globs:
mdc += f'globs: {globs}\n'
if rule_type == 'Always':
mdc += 'alwaysApply: true\n'
else:
mdc += 'alwaysApply: false\n'
mdc += '---\n\n'
mdc += content + '\n'
# Add referenced files
if referenced_files:
mdc += '\n' + referenced_files
return mdc
def validate_rule_type(self, rule_type: str) -> bool:
"""Validate if the rule type is supported."""
valid_types = ['Always', 'Auto Attached', 'Agent Requested', 'Manual']
return rule_type in valid_types
def get_rule_types(self) -> List[Dict[str, str]]:
"""Get a list of supported rule types."""
return [
{
'id': 'Always',
'name': 'Always',
'description': 'Always included in the model context'
},
{
'id': 'Auto Attached',
'name': 'Auto Attached',
'description': 'Included when files matching glob patterns are referenced'
},
{
'id': 'Agent Requested',
'name': 'Agent Requested',
'description': 'Rule is presented to the AI, which decides whether to include it'
},
{
'id': 'Manual',
'name': 'Manual',
'description': 'Only included when explicitly referenced using @ruleName'
}
]
# Initialize components
rule_generator = RuleGenerator()
factory = LLMAdapterFactory()
# Get supported providers
providers = factory.get_supported_providers()
provider_choices = list(providers.keys())
# Get rule types
rule_types = rule_generator.get_rule_types()
rule_type_choices = [rt['id'] for rt in rule_types]
def validate_api_key(provider, api_key):
"""Validate an API key for a specific provider.
Args:
provider: The LLM provider
api_key: The API key to validate
Returns:
tuple: (success, message, model_names, model_ids)
"""
if not provider or not api_key:
return False, "Lütfen bir sağlayıcı seçin ve API anahtarı girin.", [], []
try:
# Create the adapter
adapter = factory.create_adapter(provider)
# Print debug info
print(f"Validating {provider} API key: {api_key[:5]}...{api_key[-5:] if len(api_key) > 10 else ''}")
# Validate the API key
valid = adapter.validate_api_key(api_key)
if valid:
# Initialize the adapter
adapter.initialize(api_key)
# Get available models
models = adapter.get_available_models()
model_names = [model['name'] for model in models]
model_ids = [model['id'] for model in models]
print(f"Models found: {model_names}")
print(f"Model IDs: {model_ids}")
# Use default models if none are returned
if not model_names or not model_ids:
if provider == "gemini":
model_names = ["Gemini 2.5 Pro", "Gemini 2.0 Flash", "Gemini 2.0 Flash-Lite"]
model_ids = ["gemini-2.5-pro", "gemini-2.0-flash", "gemini-2.0-flash-lite"]
elif provider == "openai":
model_names = ["GPT-4o", "GPT-4 Turbo", "GPT-3.5 Turbo"]
model_ids = ["gpt-4o", "gpt-4-turbo", "gpt-3.5-turbo"]
elif provider == "openrouter":
model_names = ["OpenAI GPT-4o", "Anthropic Claude 3 Opus", "Google Gemini 2.5 Pro"]
model_ids = ["openai/gpt-4o", "anthropic/claude-3-opus", "google/gemini-2.5-pro"]
print(f"Using default models: {model_names}")
return True, "API anahtarı doğrulandı.", model_names, model_ids
else:
error_msg = getattr(adapter, 'last_error', 'Bilinmeyen hata')
return False, f"Geçersiz API anahtarı. Hata: {error_msg}", [], []
except Exception as e:
error_details = traceback.format_exc()
print(f"Exception in validate_api_key: {str(e)}\n{error_details}")
return False, f"Hata: {str(e)}", [], []
def generate_rule(provider, api_key, model_index, model_ids, rule_type, description, content, globs, referenced_files, prompt, temperature):
"""Generate a Cursor Rule.
Args:
provider: The LLM provider
api_key: The API key for the provider
model_index: The index of the selected model
model_ids: The list of model IDs
rule_type: The type of rule to generate
description: A short description of the rule's purpose
content: The main content of the rule
globs: Glob patterns for Auto Attached rules
referenced_files: Referenced files
prompt: Additional instructions for the LLM
temperature: Temperature parameter for generation
Returns:
tuple: (success, message, rule)
"""
print(f"Generate rule called with model_index: {model_index}, model_ids: {model_ids}")
if not provider or not api_key:
return False, "Lütfen bir sağlayıcı seçin ve API anahtarı girin.", ""
if model_index is None or model_index == "":
return False, "Lütfen bir model seçin. Model seçimi yapılamıyorsa, API anahtarını tekrar doğrulayın.", ""
if not rule_type or not description or not content:
return False, "Lütfen kural tipi, açıklama ve içerik alanlarını doldurun.", ""
# Convert model_index to integer if it's a string
try:
if isinstance(model_index, str) and model_index.isdigit():
model_index = int(model_index)
except:
pass
# Get the model ID
if not model_ids:
return False, "Model listesi bulunamadı. Lütfen API anahtarını tekrar doğrulayın.", ""
try:
model_index = int(model_index)
except:
return False, f"Geçersiz model indeksi: {model_index}", ""
if model_index < 0 or model_index >= len(model_ids):
return False, f"Geçersiz model seçimi. İndeks: {model_index}, Mevcut modeller: {len(model_ids)}", ""
model = model_ids[model_index]
# Validate rule type
if not rule_generator.validate_rule_type(rule_type):
return False, f"Geçersiz kural tipi: {rule_type}", ""
# Validate globs for Auto Attached rule type
if rule_type == 'Auto Attached' and not globs:
return False, "Auto Attached kural tipi için glob desenleri gereklidir.", ""
try:
# Prepare parameters
parameters = {
'globs': globs,
'referenced_files': referenced_files,
'prompt': prompt,
'temperature': float(temperature) if temperature else 0.7
}
# Generate the rule
rule = rule_generator.create_rule(
provider=provider,
model=model,
rule_type=rule_type,
description=description,
content=content,
api_key=api_key,
parameters=parameters
)
return True, "Kural başarıyla oluşturuldu.", rule
except Exception as e:
error_details = traceback.format_exc()
print(f"Exception in generate_rule: {str(e)}\n{error_details}")
return False, f"Kural oluşturulurken bir hata oluştu: {str(e)}", ""
def update_rule_type_info(rule_type):
"""Update the rule type information.
Args:
rule_type: The selected rule type
Returns:
str: Information about the selected rule type
"""
if rule_type == 'Always':
return "Her zaman model bağlamına dahil edilir."
elif rule_type == 'Auto Attached':
return "Glob desenine uyan dosyalar referans alındığında dahil edilir."
elif rule_type == 'Agent Requested':
return "Kural AI'ya sunulur, dahil edilip edilmeyeceğine AI karar verir."
elif rule_type == 'Manual':
return "Yalnızca @ruleName kullanılarak açıkça belirtildiğinde dahil edilir."
else:
return ""
def update_globs_visibility(rule_type):
"""Update the visibility of the globs input.
Args:
rule_type: The selected rule type
Returns:
bool: Whether the globs input should be visible
"""
return rule_type == 'Auto Attached'
# Create Gradio interface
with gr.Blocks(title="Cursor Rules Oluşturucu") as demo:
gr.Markdown("# Cursor Rules Oluşturucu")
gr.Markdown("Gemini, OpenRouter, OpenAI API ve tüm modellerini destekleyen dinamik bir Cursor Rules oluşturucu.")
with gr.Row():
with gr.Column():
provider = gr.Dropdown(
choices=provider_choices,
label="LLM Sağlayıcı",
value=provider_choices[0] if provider_choices else None
)
api_key = gr.Textbox(
label="API Anahtarı",
placeholder="API anahtarınızı girin",
type="password"
)
validate_btn = gr.Button("API Anahtarını Doğrula")
api_status = gr.Textbox(
label="API Durumu",
interactive=False
)
# Default model choices for each provider
default_models = {
"gemini": ["Gemini 2.5 Pro", "Gemini 2.0 Flash", "Gemini 2.0 Flash-Lite"],
"openai": ["GPT-4o", "GPT-4 Turbo", "GPT-3.5 Turbo"],
"openrouter": ["OpenAI GPT-4o", "Anthropic Claude 3 Opus", "Google Gemini 2.5 Pro"]
}
model_dropdown = gr.Dropdown(
label="Model",
choices=default_models.get(provider_choices[0] if provider_choices else "gemini", []),
interactive=True
)
# Hidden field to store model IDs
model_ids = gr.State([])
rule_type = gr.Dropdown(
choices=rule_type_choices,
label="Kural Tipi",
value=rule_type_choices[0] if rule_type_choices else None
)
rule_type_info = gr.Textbox(
label="Kural Tipi Bilgisi",
interactive=False,
value=update_rule_type_info(rule_type_choices[0] if rule_type_choices else "")
)
description = gr.Textbox(
label="Açıklama",
placeholder="Kuralın amacını açıklayan kısa bir açıklama"
)
globs = gr.Textbox(
label="Glob Desenleri (Auto Attached için)",
placeholder="Örn: *.ts, src/*.js",
visible=False
)
content = gr.Textbox(
label="Kural İçeriği",
placeholder="Kuralın ana içeriği",
lines=10
)
referenced_files = gr.Textbox(
label="Referans Dosyaları (İsteğe bağlı)",
placeholder="Her satıra bir dosya adı girin, örn: @service-template.ts",
lines=3
)
prompt = gr.Textbox(
label="AI Prompt (İsteğe bağlı)",
placeholder="AI'ya özel talimatlar verin",
lines=3
)
temperature = gr.Slider(
label="Sıcaklık",
minimum=0.0,
maximum=1.0,
value=0.7,
step=0.1
)
generate_btn = gr.Button("Kural Oluştur")
with gr.Column():
generation_status = gr.Textbox(
label="Durum",
interactive=False
)
rule_output = gr.Textbox(
label="Oluşturulan Kural",
lines=20,
interactive=False
)
download_btn = gr.Button("İndir")
# Provider change handler to update default models
def update_default_models(provider_value):
if provider_value == "gemini":
return gr.Dropdown.update(choices=default_models["gemini"], value=default_models["gemini"][0] if default_models["gemini"] else None)
elif provider_value == "openai":
return gr.Dropdown.update(choices=default_models["openai"], value=default_models["openai"][0] if default_models["openai"] else None)
elif provider_value == "openrouter":
return gr.Dropdown.update(choices=default_models["openrouter"], value=default_models["openrouter"][0] if default_models["openrouter"] else None)
else:
return gr.Dropdown.update(choices=[], value=None)
provider.change(
fn=update_default_models,
inputs=[provider],
outputs=[model_dropdown]
)
# API key validation
validate_btn.click(
fn=validate_api_key,
inputs=[provider, api_key],
outputs=[api_status, model_dropdown, model_ids]
)
# Rule type change
rule_type.change(
fn=update_rule_type_info,
inputs=[rule_type],
outputs=[rule_type_info]
)
rule_type.change(
fn=update_globs_visibility,
inputs=[rule_type],
outputs=[globs]
)
# Generate rule
generate_btn.click(
fn=generate_rule,
inputs=[
provider,
api_key,
model_dropdown,
model_ids,
rule_type,
description,
content,
globs,
referenced_files,
prompt,
temperature
],
outputs=[generation_status, rule_output]
)
# Download rule
def download_rule(rule, description):
if not rule:
return None
# Create file name from description
file_name = description.lower().replace(" ", "-").replace("/", "-")
if not file_name:
file_name = "cursor-rule"
return {
"name": f"{file_name}.mdc",
"data": rule
}
download_btn.click(
fn=download_rule,
inputs=[rule_output, description],
outputs=[gr.File()]
)
# Launch the app
if __name__ == "__main__":
demo.launch(
server_name="0.0.0.0",
server_port=int(os.environ.get("PORT", 7860)),
share=True
)