import json
import logging
from kafka import KafkaConsumer
from kafka.errors import KafkaError
from typing import List, Dict, Any, Optional
from datetime import datetime
import threading
import time

logger = logging.getLogger(__name__)


class KafkaConsumerService:
    def __init__(self, bootstrap_servers: str = "localhost:9092"):
        self.bootstrap_servers = bootstrap_servers
        self.consumer = None
        self.consumed_messages = []
        self.is_consuming = False
        self.consumer_thread = None

    def create_consumer(self, topics: List[str], group_id: str = "fastapi-group"):
        try:
            self.consumer = KafkaConsumer(
                *topics,
                bootstrap_servers=self.bootstrap_servers,
                group_id=group_id,
                value_deserializer=lambda m: json.loads(m.decode('utf-8')) if m else None,
                key_deserializer=lambda k: k.decode('utf-8') if k else None,
                auto_offset_reset='latest',
                enable_auto_commit=True
            )
            logger.info(f"Created consumer for topics: {topics}")
            return True
        except Exception as e:
            logger.error(f"Failed to create consumer: {e}")
            return False

    def start_consuming(self, topics: List[str], group_id: str = "fastapi-group"):
        if self.is_consuming:
            return {"success": False, "message": "Consumer is already running"}
        
        if not self.create_consumer(topics, group_id):
            return {"success": False, "message": "Failed to create consumer"}
        
        self.is_consuming = True
        self.consumer_thread = threading.Thread(target=self._consume_messages)
        self.consumer_thread.daemon = True
        self.consumer_thread.start()
        
        return {"success": True, "message": f"Started consuming from topics: {topics}"}

    def _consume_messages(self):
        try:
            for message in self.consumer:
                if not self.is_consuming:
                    break
                
                consumed_msg = {
                    "topic": message.topic,
                    "partition": message.partition,
                    "offset": message.offset,
                    "key": message.key,
                    "value": message.value,
                    "timestamp": datetime.fromtimestamp(message.timestamp / 1000)
                }
                
                self.consumed_messages.append(consumed_msg)
                logger.info(f"Consumed message from {message.topic}: {message.value}")
                
                # Keep only last 100 messages to prevent memory issues
                if len(self.consumed_messages) > 100:
                    self.consumed_messages.pop(0)
                    
        except Exception as e:
            logger.error(f"Error consuming messages: {e}")

    def stop_consuming(self):
        self.is_consuming = False
        if self.consumer:
            self.consumer.close()
        return {"success": True, "message": "Consumer stopped"}

    def get_consumed_messages(self, limit: int = 10) -> List[Dict[str, Any]]:
        return self.consumed_messages[-limit:]

    def clear_messages(self):
        self.consumed_messages.clear()
        return {"success": True, "message": "Messages cleared"}

    def get_topics(self) -> List[str]:
        try:
            if not self.consumer:
                temp_consumer = KafkaConsumer(bootstrap_servers=self.bootstrap_servers)
                topics = list(temp_consumer.topics())
                temp_consumer.close()
                return topics
            return list(self.consumer.topics())
        except Exception as e:
            logger.error(f"Failed to get topics: {e}")
            return []