"""
Base DAO classes for consistent data access patterns across all database types.
Provides common functionality and optimized operations.
"""
import asyncio
import logging
from abc import ABC, abstractmethod
from typing import Dict, List, Optional, Any, Union
from datetime import datetime

from client.es_client import ESClient
from client.redis_client import RedisClientManager
from utils.mongo_util import db

logger = logging.getLogger(__name__)


class BaseDAO(ABC):
    """Base Data Access Object with common functionality"""
    
    def __init__(self):
        self._logger = logging.getLogger(self.__class__.__name__)
    
    def _log_operation(self, operation: str, details: str = ""):
        """Log DAO operations for monitoring and debugging"""
        self._logger.info(f"{operation}: {details}")


class BaseESDAO(BaseDAO):
    """
    Base Elasticsearch DAO with optimized connection management and common operations
    """
    
    def __init__(self, index_name: Optional[str] = None):
        super().__init__()
        self.index_name = index_name
        self._es_client = None
    
    async def _get_client(self) -> ESClient:
        """Get or create ES client instance"""
        if self._es_client is None:
            self._es_client = ESClient()
        return self._es_client
    
    async def _execute_with_client(self, operation):
        """Execute operation with proper client management"""
        async with await self._get_client() as es:
            return await operation(es)
    
    # Common Query Builders
    def build_bool_query(self, must: List[Dict] = None, must_not: List[Dict] = None, 
                        should: List[Dict] = None, minimum_should_match: int = 1) -> Dict:
        """Build standard bool query structure"""
        query = {"bool": {}}
        
        if must:
            query["bool"]["must"] = must
        if must_not:
            query["bool"]["must_not"] = must_not
        if should:
            query["bool"]["should"] = should
            query["bool"]["minimum_should_match"] = minimum_should_match
            
        return query
    
    def build_time_range_query(self, field: str, start_time: str, end_time: str) -> Dict:
        """Build time range query"""
        return {
            "range": {
                field: {
                    "gte": start_time,
                    "lte": end_time
                }
            }
        }
    
    def build_terms_query(self, field: str, values: List[Any]) -> Dict:
        """Build terms query for multiple values"""
        return {"terms": {field: values}}
    
    def build_term_query(self, field: str, value: Any) -> Dict:
        """Build term query for exact match"""
        return {"term": {field: value}}
    
    # Optimized Operations
    async def bulk_index(self, docs: List[Dict], index: str = None) -> Dict:
        """Optimized bulk indexing with error handling"""
        target_index = index or self.index_name
        if not target_index:
            raise ValueError("Index name must be provided")
        
        if not docs:
            return {"indexed": 0, "errors": []}
        
        async def _bulk_operation(es_client):
            try:
                await es_client.bulk_index(target_index, docs)
                self._log_operation(f"Bulk indexed {len(docs)} documents to {target_index}")
                return {"indexed": len(docs), "errors": []}
            except Exception as e:
                self._logger.error(f"Bulk index failed: {e}")
                return {"indexed": 0, "errors": [str(e)]}
        
        return await self._execute_with_client(_bulk_operation)
    
    async def bulk_update(self, updates: List[Dict], index: str = None) -> Dict:
        """Bulk update operations"""
        target_index = index or self.index_name
        if not target_index:
            raise ValueError("Index name must be provided")
        
        if not updates:
            return {"updated": 0, "errors": []}
        
        async def _bulk_update_operation(es_client):
            try:
                operations = []
                for update in updates:
                    doc_id = update.get("_id")
                    doc_body = update.get("_source", update)
                    if doc_id:
                        operations.append({"update": {"_index": target_index, "_id": doc_id}})
                        operations.append({"doc": doc_body})
                
                await es_client.client.bulk(operations=operations)
                self._log_operation(f"Bulk updated {len(updates)} documents in {target_index}")
                return {"updated": len(updates), "errors": []}
            except Exception as e:
                self._logger.error(f"Bulk update failed: {e}")
                return {"updated": 0, "errors": [str(e)]}
        
        return await self._execute_with_client(_bulk_update_operation)
    
    async def search_with_pagination(self, query: Dict, size: int = 100, 
                                   from_: int = 0, index: str = None) -> Dict:
        """Search with pagination support"""
        target_index = index or self.index_name
        if not target_index:
            raise ValueError("Index name must be provided")
        
        query["size"] = size
        query["from"] = from_
        
        async def _search_operation(es_client):
            return await es_client.search(target_index, query)
        
        return await self._execute_with_client(_search_operation)
    
    async def count_documents(self, query: Dict, index: str = None) -> int:
        """Count documents matching query"""
        target_index = index or self.index_name
        if not target_index:
            raise ValueError("Index name must be provided")
        
        count_query = {"query": query.get("query", query)}
        
        async def _count_operation(es_client):
            result = await es_client.client.count(index=target_index, body=count_query)
            return result["count"]
        
        return await self._execute_with_client(_count_operation)


class BaseMongoDAO(BaseDAO):
    """
    Base MongoDB DAO with optimized operations
    """
    
    def __init__(self, collection_name: str):
        super().__init__()
        self.collection_name = collection_name
        self.collection = db[collection_name]
    
    async def bulk_insert(self, documents: List[Dict]) -> Dict:
        """Bulk insert with error handling"""
        if not documents:
            return {"inserted": 0, "errors": []}
        
        try:
            # Use asyncio.to_thread for CPU-bound operations
            result = await asyncio.to_thread(
                lambda: self.collection.insert_many(documents, ordered=False)
            )
            self._log_operation(f"Bulk inserted {len(result.inserted_ids)} documents to {self.collection_name}")
            return {"inserted": len(result.inserted_ids), "errors": []}
        except Exception as e:
            self._logger.error(f"Bulk insert failed: {e}")
            return {"inserted": 0, "errors": [str(e)]}
    
    async def bulk_update(self, updates: List[Dict]) -> Dict:
        """Bulk update operations"""
        if not updates:
            return {"updated": 0, "errors": []}
        
        try:
            operations = []
            for update in updates:
                filter_doc = update.get("filter", {})
                update_doc = update.get("update", {})
                operations.append({
                    "updateOne": {
                        "filter": filter_doc,
                        "update": update_doc,
                        "upsert": update.get("upsert", False)
                    }
                })
            
            result = await asyncio.to_thread(
                lambda: self.collection.bulk_write(operations, ordered=False)
            )
            self._log_operation(f"Bulk updated {result.modified_count} documents in {self.collection_name}")
            return {"updated": result.modified_count, "errors": []}
        except Exception as e:
            self._logger.error(f"Bulk update failed: {e}")
            return {"updated": 0, "errors": [str(e)]}
    
    def build_time_range_query(self, field: str, start_time: datetime, end_time: datetime) -> Dict:
        """Build time range query for MongoDB"""
        return {
            field: {
                "$gte": start_time,
                "$lte": end_time
            }
        }
    
    async def find_with_pagination(self, query: Dict, skip: int = 0, 
                                  limit: int = 100, sort: List = None) -> List[Dict]:
        """Find documents with pagination"""
        try:
            cursor = self.collection.find(query)
            if sort:
                cursor = cursor.sort(sort)
            cursor = cursor.skip(skip).limit(limit)
            
            result = await asyncio.to_thread(lambda: list(cursor))
            return result
        except Exception as e:
            self._logger.error(f"Find with pagination failed: {e}")
            return []


class BaseRedisDAO(BaseDAO):
    """
    Base Redis DAO with optimized operations
    """
    
    def __init__(self, db_number: int = 3):
        super().__init__()
        self.db_number = db_number
        self._client = None
    
    def _get_client(self):
        """Get Redis client for specified database"""
        if self._client is None:
            self._client = RedisClientManager.get_client(self.db_number)
        return self._client
    
    async def bulk_set(self, key_value_pairs: Dict[str, Any], expire_seconds: int = None) -> bool:
        """Bulk set operations with optional expiration"""
        if not key_value_pairs:
            return True
        
        try:
            client = self._get_client()
            pipe = client.pipeline()
            
            for key, value in key_value_pairs.items():
                pipe.set(key, value)
                if expire_seconds:
                    pipe.expire(key, expire_seconds)
            
            await asyncio.to_thread(pipe.execute)
            self._log_operation(f"Bulk set {len(key_value_pairs)} keys in Redis DB {self.db_number}")
            return True
        except Exception as e:
            self._logger.error(f"Bulk set failed: {e}")
            return False
    
    async def bulk_hset(self, hash_key: str, field_value_pairs: Dict[str, Any]) -> bool:
        """Bulk hash set operations"""
        if not field_value_pairs:
            return True
        
        try:
            client = self._get_client()
            await asyncio.to_thread(client.hset, hash_key, mapping=field_value_pairs)
            self._log_operation(f"Bulk hset {len(field_value_pairs)} fields to {hash_key}")
            return True
        except Exception as e:
            self._logger.error(f"Bulk hset failed: {e}")
            return False
    
    async def bulk_delete(self, keys: List[str]) -> int:
        """Bulk delete operations"""
        if not keys:
            return 0
        
        try:
            client = self._get_client()
            deleted_count = await asyncio.to_thread(client.delete, *keys)
            self._log_operation(f"Bulk deleted {deleted_count} keys from Redis DB {self.db_number}")
            return deleted_count
        except Exception as e:
            self._logger.error(f"Bulk delete failed: {e}")
            return 0


class HealthDataProcessor:
    """
    Utility class for processing health data with standardized business logic
    """
    
    HEALTH_METRICS = {
        'blood_pressure': {
            'fields': ['blood_pressure_high', 'blood_pressure_low'],
            'type': '血压',
            'unit': 'mmHg',
            'exception_rules': [
                (140, 90, 1, '轻度高血压'),
                (160, 100, 2, '中度高血压'),
                (180, 120, 3, '重度高血压')
            ]
        },
        'heart_rate': {
            'fields': ['heart_rate'],
            'type': '心率',
            'unit': 'bpm',
            'exception_rules': [
                (100, None, 1, '心率偏高'),
                (60, None, 1, '心率偏低', 'lt')
            ]
        },
        'body_temperature': {
            'fields': ['body_temperature'],
            'type': '体温',
            'unit': '°C',
            'exception_rules': [
                (37.2, None, 1, '发热'),
                (38.0, None, 2, '高热'),
                (39.0, None, 3, '超高热')
            ]
        }
    }
    
    @classmethod
    def calculate_exception_level(cls, metric_type: str, values: Dict) -> int:
        """Calculate health exception level based on metric values"""
        if metric_type not in cls.HEALTH_METRICS:
            return 0
        
        metric_config = cls.HEALTH_METRICS[metric_type]
        exception_rules = metric_config['exception_rules']
        
        for rule in exception_rules:
            if cls._check_exception_rule(rule, values):
                return rule[2]  # Return exception level
        
        return 0  # Normal
    
    @classmethod
    def _check_exception_rule(cls, rule: tuple, values: Dict) -> bool:
        """Check if values match exception rule"""
        threshold1, threshold2, level, description = rule[:4]
        operator = rule[4] if len(rule) > 4 else 'gte'
        
        # Implementation depends on specific metric type and rule structure
        # This is a simplified version
        if operator == 'gte':
            return any(v >= threshold1 for v in values.values() if v is not None)
        elif operator == 'lt':
            return any(v < threshold1 for v in values.values() if v is not None)
        
        return False
    
    @classmethod
    def process_health_data(cls, health_data: Dict) -> Dict:
        """Process health data and calculate exception levels"""
        processed_data = health_data.copy()
        
        for metric_type in cls.HEALTH_METRICS:
            metric_fields = cls.HEALTH_METRICS[metric_type]['fields']
            metric_values = {field: health_data.get(field) for field in metric_fields}
            
            if any(v is not None for v in metric_values.values()):
                exception_level = cls.calculate_exception_level(metric_type, metric_values)
                processed_data[f'{metric_type}_exception_level'] = exception_level
        
        return processed_data