import time
import json
from functools import wraps

from kafka import (
    KafkaConsumer,
    KafkaProducer,
    TopicPartition,
    KafkaClient,
    KafkaAdminClient,
)
from kafka.admin import NewTopic, NewPartitions
from utils import logger, heartbeat


def json_string(l):
    return json.dumps(l, ensure_ascii=False, separators=(',', ':'))


def json_parse(b):
    try:
        return json.loads(b)
    except json.decoder.JSONDecodeError:
        if isinstance(b, bytes):
            return b.decode()
        else:
            return b


class KafkaAdmin:
    def __init__(self, servers):
        self.client = KafkaClient(bootstrap_servers=servers)
        self.admin_client = KafkaAdminClient(bootstrap_servers=servers)

    # def __del__(self):
    #     if self.admin_client:
    #         self.admin_client.close()
    #     if self.client:
    #         self.client.close()

    def __str__(self):
        return json_string(self.topics_list)

    @property
    def topics_list(self):
        return self.admin_client.list_topics()

    def current_all_topics(self):
        return self.topics_list

    def create_topic_and_partition(self, topic, num_partitions):
        topic_list = [NewTopic(name=topic, num_partitions=num_partitions, replication_factor=1)]
        self.admin_client.create_topics(topic_list)

    def create_partition(self, topic, num_partitions):
        # 在已有的topic中创建分区
        if topic not in self.topics_list:
            self.create_topic_and_partition(topic, num_partitions)
            return
        new_partitions = NewPartitions(num_partitions)
        self.admin_client.create_partitions({topic: new_partitions})

    @logger.catch(message="删除TOPIC异常")
    def delete_topic(self, topic, timeout=10000):
        if isinstance(topic, str):
            self.admin_client.delete_topics([topic], timeout)
        elif isinstance(topic, list):
            self.admin_client.delete_topics(topic, timeout)
        future = self.client.cluster.request_update()
        self.client.poll(future=future)

    def delete_all_topics(self):
        '''
            删除所有的topic
        '''
        self.delete_topic(self.topics_list)


class Producer:
    def __init__(self, servers):
        self._servers = servers
        self.connect()

    def __del__(self):
        self._producer.close()

    def connect(self):
        self._producer: KafkaProducer = KafkaProducer(
            bootstrap_servers=self._servers,
            key_serializer=lambda v: json_string(v).encode(),
            value_serializer=lambda v: json_string(v).encode(),
        )

    def reconnect(self):
        self._producer.close()
        time.sleep(1)
        self.connect()

    @logger.catch(message='kafka生产异常')
    def async_send(self, message, key, topic, partition=None, **kwargs):
        if message:
            try:
                self._producer.send(topic=topic, value=message, partition=partition, key=key, **kwargs)
            except AssertionError as e:
                if e.__str__() == 'Unrecognized partition':
                    KafkaAdmin(self._servers).create_partition(topic, partition + 1)
                    self.reconnect()
                    self.async_send(message, key, topic, partition, **kwargs)
                else:
                    raise e

    @logger.catch(message='kafka生产异常')
    def send(self, message, key, topic, partition=None):
        if message:
            try:
                future = self._producer.send(topic=topic, value=message, partition=partition, key=key)
                future.get()
            except AssertionError as e:
                if e.__str__() == 'Unrecognized partition':
                    KafkaAdmin(self._servers).create_partition(topic, partition + 1)
                    self.reconnect()
                    Producer.send(message, key, topic, partition)
                else:
                    raise e


class Consumer:
    def __init__(self, servers):
        self._servers = servers

    @logger.catch(message='消费kafka异常', reraise=False)
    def consumer(
            self,
            topic,
            group_id='spider-consumer-group',
            timeout=60000,
            partition=None,
            **kwargs
    ):
        if partition is not None:
            consumer = KafkaConsumer(
                group_id=group_id,
                bootstrap_servers=self._servers,
                key_deserializer=lambda m: json_parse(m),
                value_deserializer=lambda m: json_parse(m),
                consumer_timeout_ms=timeout,
                **kwargs
            )
            consumer.assign([TopicPartition(topic, partition)])
        else:
            consumer = KafkaConsumer(
                topic,
                group_id=group_id,
                bootstrap_servers=self._servers,
                key_deserializer=lambda m: json_parse(m),
                value_deserializer=lambda m: json_parse(m),
                consumer_timeout_ms=timeout,
                **kwargs
            )
        return consumer

    def decorator(self, func):
        @wraps(func)
        @logger.catch(reraise=False)
        def wrap_func(*args, **kwargs):
            topic = kwargs.get("topic")
            group_id = kwargs.get("group_id")
            max_count = kwargs.get("max_count", 100)
            timeout = kwargs.get("timeout", 60000)
            partition = kwargs.get("partition")

            _consumer = self.consumer(topic=topic, group_id=group_id, timeout=timeout, partition=partition)

            cache = []
            try:
                while 1:
                    try:
                        for msg in _consumer:
                            # todo 从KAFKA取出数据后，需要保存到本地
                            cache.append(msg)
                            if len(cache) >= max_count:
                                break
                        cache_processing(func, cache, "正常", *args, **kwargs)
                    except KeyboardInterrupt:
                        cache_processing(func, cache, "退出", *args, **kwargs)
                        exit(0)
                    except Exception as e:
                        cache_processing(func, cache, "发生异常", *args, **kwargs)
                        raise e
            finally:
                _consumer.close()

        def cache_processing(func, cache, action, *args, **kwargs):
            heartbeat()
            if len(cache):
                logger.debug("{},{}并保存缓存 topic:{}|partition:{}|key:{} 中的数据,缓存数据量:{}".format(
                    func.__name__,
                    action,
                    kwargs.get('topic'),
                    kwargs.get("partition"),
                    kwargs.get("key"),
                    len(cache)
                ))
                kwargs.update({"cache": cache})
                func(*args, **kwargs)
                cache.clear()

        return wrap_func
