"""Text deduplication using SimHash algorithm"""

from typing import Dict, Any, Optional, Set
import hashlib
import re
import logging
from simhash import Simhash
import redis
from datetime import timedelta

from .base_cleaner import BaseCleaner

logger = logging.getLogger(__name__)


class DeduplicationFilter(BaseCleaner):
    """Filter for detecting and removing duplicate or near-duplicate content
    
    Uses SimHash algorithm for efficient similarity detection
    """
    
    def __init__(self, config: Optional[Dict[str, Any]] = None):
        """Initialize deduplication filter
        
        Args:
            config: Configuration with options:
                - similarity_threshold: Threshold for considering duplicates (default: 0.85)
                - hamming_distance: Max hamming distance for duplicates (default: 3)
                - redis_url: Redis URL for fingerprint storage
                - redis_prefix: Prefix for Redis keys (default: 'simhash:')
                - ttl_days: TTL for fingerprints in days (default: 30)
                - min_text_length: Minimum text length to check (default: 50)
        """
        super().__init__(config)
        self.similarity_threshold = self.config.get('similarity_threshold', 0.85)
        self.hamming_distance = self.config.get('hamming_distance', 3)
        self.redis_url = self.config.get('redis_url', 'redis://localhost:6379')
        self.redis_prefix = self.config.get('redis_prefix', 'simhash:')
        self.ttl_days = self.config.get('ttl_days', 30)
        self.min_text_length = self.config.get('min_text_length', 50)
        
        # Local cache for current session
        self.local_fingerprints: Set[int] = set()
        
        # Redis client (lazy initialization)
        self._redis_client = None
        
        # Statistics
        self.stats = {
            'checked': 0,
            'duplicates': 0,
            'near_duplicates': 0
        }
    
    @property
    def redis_client(self):
        """Lazy initialization of Redis client"""
        if self._redis_client is None:
            try:
                self._redis_client = redis.from_url(self.redis_url)
                self._redis_client.ping()
                logger.info("Connected to Redis for deduplication")
            except Exception as e:
                logger.warning(f"Redis connection failed, using local cache only: {e}")
                self._redis_client = None
        return self._redis_client
    
    def clean(self, data: Dict[str, Any]) -> Dict[str, Any]:
        """Check for duplicate content and mark/filter as needed
        
        Args:
            data: Input data dictionary
            
        Returns:
            Data with duplicate detection results
        """
        # Extract main content for duplicate checking
        content_text = self._extract_content_text(data)
        
        if not content_text or len(content_text) < self.min_text_length:
            # Too short to check for duplicates
            return data
        
        # Generate SimHash
        simhash_value = self._generate_simhash(content_text)
        
        # Check for duplicates
        is_duplicate, similarity = self._check_duplicate(simhash_value, content_text)
        
        # Store fingerprint
        self._store_fingerprint(simhash_value, data.get('url', ''))
        
        # Add deduplication metadata
        if 'cleaned_data' not in data:
            data['cleaned_data'] = {}
        
        data['cleaned_data']['simhash'] = str(simhash_value)
        data['cleaned_data']['is_duplicate'] = is_duplicate
        data['cleaned_data']['similarity'] = similarity
        
        # Update statistics
        self.stats['checked'] += 1
        if is_duplicate:
            if similarity >= 0.95:
                self.stats['duplicates'] += 1
            else:
                self.stats['near_duplicates'] += 1
        
        # Optionally filter out duplicates
        if is_duplicate and self.config.get('filter_duplicates', False):
            logger.info(f"Filtering duplicate content (similarity: {similarity:.2f})")
            data['_filtered'] = True
            data['_filter_reason'] = f"Duplicate content (similarity: {similarity:.2f})"
        
        return data
    
    def _extract_content_text(self, data: Dict[str, Any]) -> str:
        """Extract main text content for duplicate checking
        
        Args:
            data: Data dictionary
            
        Returns:
            Combined text content
        """
        text_parts = []
        
        # Title
        if 'title' in data and data['title']:
            text_parts.append(str(data['title']))
        
        # Main content
        if 'content' in data and data['content']:
            if isinstance(data['content'], str):
                text_parts.append(data['content'])
            elif isinstance(data['content'], dict):
                # Extract text from content dict
                if 'description' in data['content']:
                    text_parts.append(str(data['content']['description']))
                if 'text' in data['content']:
                    text_parts.append(str(data['content']['text']))
                if 'body' in data['content']:
                    text_parts.append(str(data['content']['body']))
        
        # Description
        if 'description' in data and data['description']:
            text_parts.append(str(data['description']))
        
        # Excerpt
        if 'excerpt' in data and data['excerpt']:
            text_parts.append(str(data['excerpt']))
        
        return ' '.join(text_parts)
    
    def _generate_simhash(self, text: str) -> int:
        """Generate SimHash fingerprint for text
        
        Args:
            text: Text to hash
            
        Returns:
            SimHash value as integer
        """
        # Tokenize text (simple word-based tokenization for Chinese/English)
        tokens = self._tokenize(text)
        
        # Generate SimHash
        simhash = Simhash(tokens)
        
        return simhash.value
    
    def _tokenize(self, text: str) -> list:
        """Tokenize text for SimHash
        
        Args:
            text: Text to tokenize
            
        Returns:
            List of tokens
        """
        # Remove special characters and normalize
        text = re.sub(r'[^\w\s\u4e00-\u9fff]', ' ', text.lower())
        
        # Split into tokens
        tokens = text.split()
        
        # For Chinese text, also split into character bigrams
        chinese_chars = re.findall(r'[\u4e00-\u9fff]+', text)
        for chinese_text in chinese_chars:
            # Add character bigrams
            for i in range(len(chinese_text) - 1):
                tokens.append(chinese_text[i:i+2])
        
        return tokens
    
    def _check_duplicate(self, simhash_value: int, text: str) -> tuple[bool, float]:
        """Check if content is duplicate based on SimHash
        
        Args:
            simhash_value: SimHash value to check
            text: Original text for fallback similarity check
            
        Returns:
            Tuple of (is_duplicate, similarity_score)
        """
        # Check local cache first
        for cached_hash in self.local_fingerprints:
            distance = self._hamming_distance(simhash_value, cached_hash)
            if distance <= self.hamming_distance:
                similarity = 1.0 - (distance / 64.0)  # 64-bit hash
                return True, similarity
        
        # Check Redis if available
        if self.redis_client:
            try:
                # Get all stored fingerprints (in production, use more efficient method)
                pattern = f"{self.redis_prefix}*"
                keys = self.redis_client.keys(pattern)
                
                for key in keys[:1000]:  # Limit to prevent overload
                    stored_hash = self.redis_client.get(key)
                    if stored_hash:
                        stored_value = int(stored_hash)
                        distance = self._hamming_distance(simhash_value, stored_value)
                        if distance <= self.hamming_distance:
                            similarity = 1.0 - (distance / 64.0)
                            return True, similarity
            
            except Exception as e:
                logger.error(f"Error checking Redis for duplicates: {e}")
        
        return False, 0.0
    
    def _hamming_distance(self, hash1: int, hash2: int) -> int:
        """Calculate Hamming distance between two hashes
        
        Args:
            hash1: First hash value
            hash2: Second hash value
            
        Returns:
            Hamming distance
        """
        xor = hash1 ^ hash2
        distance = 0
        while xor:
            distance += xor & 1
            xor >>= 1
        return distance
    
    def _store_fingerprint(self, simhash_value: int, url: str):
        """Store fingerprint in cache
        
        Args:
            simhash_value: SimHash value to store
            url: URL associated with this content
        """
        # Add to local cache
        self.local_fingerprints.add(simhash_value)
        
        # Store in Redis if available
        if self.redis_client:
            try:
                # Create key using URL hash
                url_hash = hashlib.md5(url.encode()).hexdigest()[:8]
                key = f"{self.redis_prefix}{url_hash}"
                
                # Store with TTL
                ttl = timedelta(days=self.ttl_days)
                self.redis_client.setex(
                    key,
                    ttl,
                    str(simhash_value)
                )
                
                # Also store reverse mapping for debugging
                self.redis_client.setex(
                    f"{self.redis_prefix}url:{url_hash}",
                    ttl,
                    url
                )
                
            except Exception as e:
                logger.error(f"Error storing fingerprint in Redis: {e}")
    
    def clear_cache(self):
        """Clear local fingerprint cache"""
        self.local_fingerprints.clear()
        logger.info("Local fingerprint cache cleared")
    
    def get_stats(self) -> Dict[str, Any]:
        """Get deduplication statistics
        
        Returns:
            Statistics dictionary
        """
        stats = super().get_stats()
        stats['deduplication'] = self.stats.copy()
        stats['local_cache_size'] = len(self.local_fingerprints)
        return stats