"""
S4 Custom Deepseek Text Node  
Author: S4MUEL
GitHub: https://github.com/S4MUEL-404/ComfyUI-S4API

Deepseek text generation node with custom API key input.
"""

import requests
import json
import os
import hashlib
from typing import Optional


class S4TextWithDeepseek:
    """
    Generates text via Deepseek API endpoint.
    """

    # Fixed API configuration
    API_URL = "https://api.deepseek.com/v1/chat/completions"

    def __init__(self):
        # Ensure cache directory exists
        self.cache_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), "cache")
        os.makedirs(self.cache_dir, exist_ok=True)

    @classmethod
    def INPUT_TYPES(cls):
        return {
            "required": {
                "system_prompt": (
                    "STRING",
                    {
                        "multiline": True,
                        "default": "",
                        "tooltip": "System instruction for the AI",
                    },
                ),
                "user_prompt": (
                    "STRING",
                    {
                        "multiline": True,
                        "default": "",
                        "tooltip": "User prompt for Deepseek",
                    },
                ),
                "api_key": (
                    "STRING",
                    {
                        "multiline": False,
                        "default": "",
                        "tooltip": "Your Deepseek API Key"
                    }
                ),
                "model": (
                    ["deepseek-chat", "deepseek-reasoner"],
                    {
                        "default": "deepseek-chat",
                        "tooltip": "Choose Deepseek model",
                    },
                ),
                "max_tokens": (
                    "INT",
                    {
                        "default": 2048,
                        "min": 1,
                        "max": 32768,
                        "step": 1,
                        "display": "number",
                        "tooltip": "Maximum number of tokens to generate",
                    },
                ),
                "temperature": (
                    "FLOAT",
                    {
                        "default": 0.7,
                        "min": 0.0,
                        "max": 2.0,
                        "step": 0.1,
                        "display": "number",
                        "tooltip": "Sampling temperature (0.0 = deterministic, 2.0 = very random)",
                    },
                ),
                "top_p": (
                    "FLOAT",
                    {
                        "default": 0.95,
                        "min": 0.0,
                        "max": 1.0,
                        "step": 0.05,
                        "display": "number",
                        "tooltip": "Top-p sampling threshold",
                    },
                ),
                "seed": (
                    "INT",
                    {
                        "default": 0,
                        "min": 0,
                        "max": 2147483647,
                        "step": 1,
                        "display": "number",
                        "control_after_generate": True,
                        "tooltip": "Random seed for reproducible results (32-bit limit for Deepseek API)",
                    },
                ),
            },
        }

    RETURN_TYPES = ("STRING",)
    RETURN_NAMES = ("text",)
    FUNCTION = "generate_text"
    CATEGORY = "💀PromptsO"
    DESCRIPTION = "Generates text using Deepseek API with custom API key."
    
    def validate_inputs(self, user_prompt: str, api_key: str) -> None:
        """Validate input parameters"""
        if not user_prompt or not user_prompt.strip():
            raise ValueError("User prompt cannot be empty")
        if not api_key or not api_key.strip():
            raise ValueError("API key cannot be empty")
    
    def generate_cache_key(self, system_prompt: str, user_prompt: str, model: str, 
                          max_tokens: int, temperature: float, top_p: float, seed: int) -> str:
        """Generate cache key based on input parameters"""
        cache_data = {
            "system_prompt": system_prompt.strip(),
            "user_prompt": user_prompt.strip(),
            "model": model,
            "max_tokens": max_tokens,
            "temperature": temperature,
            "top_p": top_p,
            "seed": seed
        }
        cache_str = json.dumps(cache_data, sort_keys=True)
        return hashlib.md5(cache_str.encode()).hexdigest()
    
    def load_from_cache(self, cache_key: str) -> Optional[str]:
        """Load result from cache if exists"""
        cache_file = os.path.join(self.cache_dir, f"{cache_key}.json")
        if os.path.exists(cache_file):
            try:
                with open(cache_file, 'r', encoding='utf-8') as f:
                    cache_data = json.load(f)
                    return cache_data.get("result")
            except Exception as e:
                print(f"⚠️ Failed to load cache: {e}")
        return None
    
    def save_to_cache(self, cache_key: str, result: str) -> None:
        """Save result to cache"""
        cache_file = os.path.join(self.cache_dir, f"{cache_key}.json")
        try:
            cache_data = {
                "result": result,
                "timestamp": json.dumps({"time": "cached"})
            }
            with open(cache_file, 'w', encoding='utf-8') as f:
                json.dump(cache_data, f, ensure_ascii=False, indent=2)
        except Exception as e:
            print(f"⚠️ Failed to save cache: {e}")
    
    def generate_text(
        self,
        system_prompt,
        user_prompt,
        api_key,
        model="deepseek-chat",
        max_tokens=2048,
        temperature=0.7,
        top_p=0.95,
        seed=0,
    ):
        self.validate_inputs(user_prompt, api_key)
        
        # Generate cache key
        cache_key = self.generate_cache_key(
            system_prompt, user_prompt, model, max_tokens, temperature, top_p, seed
        )
        
        # Check cache first
        cached_result = self.load_from_cache(cache_key)
        if cached_result:
            print(f"💾 Using cached result for seed {seed}")
            return (cached_result,)
        
        # Prepare messages
        messages = []
        
        # Add system message if provided
        if system_prompt and system_prompt.strip():
            messages.append({
                "role": "system",
                "content": system_prompt.strip()
            })
        
        # Add user message
        messages.append({
            "role": "user",
            "content": user_prompt
        })
        
        # Prepare request data
        data = {
            "model": model,
            "messages": messages,
            "max_tokens": max_tokens,
            "temperature": temperature,
            "top_p": top_p,
            "stream": False
        }
        
        # Add seed for reproducible results
        if seed > 0:
            data["seed"] = seed
        
        # Make API call
        headers = {
            "Authorization": f"Bearer {api_key}",
            "Content-Type": "application/json"
        }
        
        print(f"🤖 Generating text with Deepseek...")
        print(f"   • Model: {model}")
        print(f"   • User prompt: {user_prompt[:50]}{'...' if len(user_prompt) > 50 else ''}")
        if system_prompt.strip():
            print(f"   • System prompt: {system_prompt[:30]}{'...' if len(system_prompt) > 30 else ''}")
        print(f"   • Max tokens: {max_tokens}")
        print(f"   • Temperature: {temperature}")
        print(f"   • Top-p: {top_p}")
        print(f"   • Seed: {seed}")
        print(f"   • Cache key: {cache_key[:8]}...")
        
        try:
            response = requests.post(
                self.API_URL, 
                headers=headers, 
                json=data, 
                timeout=120
            )
            
            if response.status_code != 200:
                error_data = response.json() if response.content else {}
                error_msg = error_data.get("error", {}).get("message", response.text)
                raise Exception(f"API request failed with status {response.status_code}: {error_msg}")
            
            # Parse response
            result = response.json()
            
            if "choices" not in result or not result["choices"]:
                raise Exception("No response generated from API")
            
            # Extract generated text
            generated_text = result["choices"][0]["message"]["content"]
            
            # Log usage information
            if "usage" in result:
                usage = result["usage"]
                print(f"📊 Token usage:")
                print(f"   • Prompt tokens: {usage.get('prompt_tokens', 0)}")
                print(f"   • Completion tokens: {usage.get('completion_tokens', 0)}")
                print(f"   • Total tokens: {usage.get('total_tokens', 0)}")
            
            print(f"✅ Text generated successfully!")
            print(f"   • Response length: {len(generated_text)} characters")
            
            # Save to cache
            self.save_to_cache(cache_key, generated_text)
            
            return (generated_text,)
            
        except Exception as e:
            print(f"❌ Error generating text: {str(e)}")
            raise e