#
#  Copyright 2022 The Open Islands Authors. All Rights Reserved.
#
#  Licensed under the Apache License, Version 2.0 (the "License");
#  you may not use this file except in compliance with the License.
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
#

import time

from confluent_kafka import Producer, Consumer, KafkaError, KafkaException, cimpl

from pyoi.constant import FederatedPacketFormat
from pyoi.context.mq._channel import BaseMQChannel
from pyoi.context.mq._utils import gen_consumer_group_id
from pyoi.context.types import FederatedPacketRouting, FederatedPacketHeaders
from pyoi.util import log_utils
from pyoi.util.base_utils import json_dumps, json_loads, string_to_bytes, bytes_to_string
from pyoi.util.base_utils import trys

LOGGER = log_utils.getLogger()


class MQChannel(BaseMQChannel):
    def __init__(
            self,
            routing_info: FederatedPacketRouting,
            params: dict,
            bootstrap_servers: str,
            consumer_group_dynamic: bool = True,
            header_in_body: bool = False,
    ):
        super(MQChannel, self).__init__(routing_info=routing_info, params=params)
        self._bootstrap_servers = bootstrap_servers
        self._consumer_group_dynamic = consumer_group_dynamic
        self._connect_args = {"bootstrap.servers": self._bootstrap_servers, "broker.address.family": "v4",
                              "message.max.bytes": str(self._max_message_size + 1024)}
        self._header_in_body = header_in_body
        LOGGER.info(f"create producer with config {self._connect_args}")
        self._producer = Producer(self._connect_args)

    def publish_to(self, topic: str, headers: FederatedPacketHeaders, data: bytes):
        self._producer.produce(topic, value=self.to_message(headers, data), on_delivery=self.on_delivery)
        self._producer.flush()

    def on_delivery(self, err, msg: cimpl.Message):
        if err:
            LOGGER.error(f"delivery error: {err} {msg.error()}")

    def consume_on(self, topic: str):
        consumer_group_id = gen_consumer_group_id(self.group) if self._consumer_group_dynamic else self.group
        LOGGER.debug(f"start consume on topic: {topic} with consumer group id: {consumer_group_id}")
        connect_args = self._connect_args.copy()
        connect_args["group.id"] = consumer_group_id
        connect_args["enable.auto.commit"] = True
        connect_args["auto.offset.reset"] = "smallest"
        consumer = Consumer(connect_args)
        try:
            consumer.subscribe([topic])
            while True:
                msg = consumer.poll(1)
                if msg is None:
                    continue
                if msg.error():
                    if msg.error().code() == KafkaError._PARTITION_EOF:
                        LOGGER.warning(
                            f"topic {msg.topic()} partition {msg.partition()} reached end at offset {msg.offset()}")
                    elif msg.error().code() == KafkaError.UNKNOWN_TOPIC_OR_PART:
                        LOGGER.warning(f"consume topic {msg.topic()} not exists now")
                        time.sleep(1)
                    else:
                        raise KafkaException(msg.error())
                else:
                    headers, data = self.from_message(msg.value())
                    yield consumer, headers, data
        except Exception as e:
            raise e
        finally:
            LOGGER.debug(f"close consumer: {consumer_group_id}")
            consumer.close()

    @trys()
    def ack(self, deliver):
        # using auto commit
        pass

    @trys()
    def cancel(self):
        pass

    def close(self):
        pass

    def to_message(self, headers: FederatedPacketHeaders, data: bytes) -> bytes:
        if headers.properties.format == FederatedPacketFormat.PLAIN.name:
            body = data.hex()
        else:
            body = bytes_to_string(data)
        packet = {"headers": headers.to_dict(), "body": body}
        return json_dumps(packet, byte=True)

    def from_message(self, message: bytes) -> (FederatedPacketHeaders, bytes):
        packet = json_loads(message)
        headers = FederatedPacketHeaders.from_dict(packet["headers"])
        if headers.properties.format == FederatedPacketFormat.PLAIN.name:
            data = bytes.fromhex(packet["body"])
        else:
            data = string_to_bytes(packet["body"])
        return headers, data
