#
#  Copyright 2022 The Open Islands Authors. All Rights Reserved.
#
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
#
import typing

from confluent_kafka import KafkaException, KafkaError
from confluent_kafka.admin import AdminClient, NewTopic

from pyoi.context.mq._manager import BaseMQManager
from pyoi.util import log_utils
from pyoi.util.base_utils import trys

LOGGER = log_utils.getLogger()


class MQManager(BaseMQManager):
    def __init__(
            self,
            group: str = None,
            params: dict = None,
            bootstrap_servers: str = None,
    ):
        super(MQManager, self).__init__(group=group, params=params)
        self.bootstrap_servers = bootstrap_servers
        self._created_topic = set()
        self._admin_client = AdminClient({'bootstrap.servers': bootstrap_servers, "broker.address.family": "v4"})
        # todo: set according to task timeout
        self._message_ttl = 24 * 60 * 60 * 1000
        self._topic_config = {"retention.ms": str(self._message_ttl),
                              "max.message.bytes": str(self._max_message_size + 1024)}

    @trys()
    def create_topic(self, topic):
        if topic in self._created_topic:
            LOGGER.info(f"topic {topic} already be created")
            return
        LOGGER.info(f"try to create topic {topic} with config {self._topic_config}")
        new_topics = [NewTopic(t, num_partitions=self._num_partitions, replication_factor=1, config=self._topic_config)
                      for
                      t in [topic]]
        self._admin_client.create_topics(new_topics=new_topics)
        for t, f in self._admin_client.create_topics(new_topics).items():
            try:
                f.result()
                self._created_topic.add(topic)
                LOGGER.info(f"create topic {t} success with {self._num_partitions} partitions")
            except KafkaException as e:
                if e.args[0] == KafkaError.TOPIC_ALREADY_EXISTS:
                    LOGGER.warning(f"topic {t} already exists")
                    continue
                else:
                    raise e
            except Exception as e:
                raise e

    def list_topics(self) -> typing.List[str]:
        cluster_metadata = self._admin_client.list_topics()
        cluster_topics = cluster_metadata.topics.keys()
        this_federation_group_topics = []
        topic_prefix = [self.group, f"send-{self.group}", f"receive-{self.group}"]
        for t in cluster_topics:
            for prefix in topic_prefix:
                if t.startswith(prefix):
                    this_federation_group_topics.append(t)
                    break
        return this_federation_group_topics

    def cleanup_topic(self, topic):
        self._admin_client.delete_topics(topics=[topic])

    def cleanup_group(self):
        this_federation_group_topics = self.list_topics()
        self._admin_client.delete_topics(topics=this_federation_group_topics)
        LOGGER.debug(f"cleanup group topics: {this_federation_group_topics}")

    def check_group(self) -> bool:
        return len(self.list_topics()) > 0
