LaunchLLM / backend_config.py
Bmccloud22's picture
Deploy LaunchLLM - Production AI Training Platform
ec8f374 verified
"""
Backend API Key Configuration for AURA Platform
This file stores YOUR (the platform owner's) API keys that are used
to provide managed services to end users.
Users do NOT need to provide their own API keys - everything is billed
through their AURA wallet.
SECURITY: Keep this file secure! Never commit to public repos.
Add to .gitignore immediately.
"""
import os
from pathlib import Path
from dotenv import load_dotenv
# Load environment variables from .env or .env.backend
# Try .env.backend first (cleaner separation), then fall back to .env
backend_env_path = Path(".env.backend")
main_env_path = Path(".env")
if backend_env_path.exists():
try:
load_dotenv(backend_env_path)
except:
pass # If .env.backend fails, try .env
if main_env_path.exists():
try:
load_dotenv(main_env_path)
except:
pass # If .env fails, rely on system environment variables
# ============================================================================
# AURA PLATFORM API KEYS (Your Keys - Not User Keys)
# ============================================================================
class BackendConfig:
"""
Platform-level API keys for managed services.
These are YOUR keys that power the platform.
"""
# HuggingFace - For model downloads (FREE account)
# Get at: https://huggingface.co/settings/tokens
# Check both AURA_HUGGINGFACE_TOKEN and fallback to user's HUGGINGFACE_TOKEN
AURA_HUGGINGFACE_TOKEN = os.getenv("AURA_HUGGINGFACE_TOKEN") or os.getenv("HUGGINGFACE_TOKEN") or os.getenv("HF_TOKEN") or ""
# OpenAI - For synthetic data generation (PAID account)
# Get at: https://platform.openai.com/api-keys
# Check both AURA_OPENAI_API_KEY and fallback to user's OPENAI_API_KEY
AURA_OPENAI_API_KEY = os.getenv("AURA_OPENAI_API_KEY") or os.getenv("OPENAI_API_KEY") or ""
# Anthropic - Alternative for synthetic data (PAID account)
# Get at: https://console.anthropic.com/settings/keys
# Check both AURA_ANTHROPIC_API_KEY and fallback to user's ANTHROPIC_API_KEY
AURA_ANTHROPIC_API_KEY = os.getenv("AURA_ANTHROPIC_API_KEY") or os.getenv("ANTHROPIC_API_KEY") or ""
# RunPod - For cloud GPU infrastructure (PAID account)
# Get at: https://www.runpod.io/console/user/settings
# Check both AURA_RUNPOD_API_KEY and fallback to user's RUNPOD_API_KEY
AURA_RUNPOD_API_KEY = os.getenv("AURA_RUNPOD_API_KEY") or os.getenv("RUNPOD_API_KEY") or ""
# WandB - For experiment tracking (PAID team plan)
# Get at: https://wandb.ai/authorize
AURA_WANDB_API_KEY = os.getenv("AURA_WANDB_API_KEY", "")
# ========================================================================
# PRICING CONFIGURATION (What You Charge Users)
# ========================================================================
# Synthetic Data Pricing (per example generated)
SYNTHETIC_DATA_COST_PER_EXAMPLE = 0.10 # $0.10 per example
# Your actual costs (for margin calculation)
# Based on typical usage: ~1500 tokens per example
SYNTHETIC_DATA_ACTUAL_COST_GPT4 = 0.045 # GPT-4: ~$0.045/example
SYNTHETIC_DATA_ACTUAL_COST_GPT35 = 0.003 # GPT-3.5: ~$0.003/example
SYNTHETIC_DATA_ACTUAL_COST_CLAUDE = 0.024 # Claude Sonnet: ~$0.024/example
# Default provider for synthetic data
DEFAULT_SYNTHETIC_PROVIDER = "openai" # or "anthropic"
DEFAULT_SYNTHETIC_MODEL = "gpt-4" # or "gpt-3.5-turbo" for lower cost
# ========================================================================
# VALIDATION
# ========================================================================
@classmethod
def validate_keys(cls):
"""Check which API keys are configured"""
status = {
"huggingface": bool(cls.AURA_HUGGINGFACE_TOKEN),
"openai": bool(cls.AURA_OPENAI_API_KEY),
"anthropic": bool(cls.AURA_ANTHROPIC_API_KEY),
"runpod": bool(cls.AURA_RUNPOD_API_KEY),
"wandb": bool(cls.AURA_WANDB_API_KEY)
}
return status
@classmethod
def get_missing_keys(cls):
"""Return list of missing API keys"""
status = cls.validate_keys()
return [key for key, configured in status.items() if not configured]
@classmethod
def is_ready_for_production(cls):
"""Check if all critical keys are configured"""
critical_keys = ["huggingface", "openai", "runpod"]
status = cls.validate_keys()
return all(status[key] for key in critical_keys)
@classmethod
def get_status_report(cls):
"""Generate configuration status report"""
status = cls.validate_keys()
missing = cls.get_missing_keys()
report = "AURA Backend API Key Status\n\n"
for service, configured in status.items():
icon = "[OK]" if configured else "[MISS]"
report += f"{icon} {service.title()}: {'Configured' if configured else 'Missing'}\n"
report += f"\nReady for Production: {'Yes' if cls.is_ready_for_production() else 'No'}\n"
if missing:
report += f"\nMissing Keys: {', '.join(missing)}\n"
report += "\nAction Required:\n"
report += "1. Add missing keys to .env file\n"
report += "2. Restart the application\n"
return report
# ============================================================================
# HELPER FUNCTIONS
# ============================================================================
def get_backend_config():
"""Get backend configuration instance"""
return BackendConfig()
def check_backend_status():
"""Print backend configuration status"""
print(BackendConfig.get_status_report())
# ============================================================================
# USAGE INSTRUCTIONS
# ============================================================================
"""
SETUP INSTRUCTIONS:
1. Create a .env file in the project root:
AURA_HUGGINGFACE_TOKEN=hf_xxxxxxxxxxxx
AURA_OPENAI_API_KEY=sk-xxxxxxxxxxxx
AURA_ANTHROPIC_API_KEY=sk-ant-xxxxxxxxxxxx
AURA_RUNPOD_API_KEY=xxxxxxxxxxxx
AURA_WANDB_API_KEY=xxxxxxxxxxxx
2. Add .env to .gitignore:
echo ".env" >> .gitignore
3. Verify configuration:
python backend_config.py
4. Use in your code:
from backend_config import BackendConfig
# Use AURA's OpenAI key for synthetic data
client = OpenAI(api_key=BackendConfig.AURA_OPENAI_API_KEY)
SECURITY NOTES:
- Never hardcode API keys in this file
- Always use environment variables
- Keep .env file secure and never commit it
- Rotate keys regularly
- Use read-only tokens where possible (e.g., HuggingFace)
"""
if __name__ == "__main__":
# Test configuration
check_backend_status()