# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jaewon, inkyu

import logging

import kafka
from tornado import gen
from tornado.queues import Queue


class KafkaPublisher(object):
  _STATUS_STOPPED = 0
  _STATUS_WAITING = 1

  def __init__(self, kafka_servers=None, logger=None):
    self._kafka_servers = kafka_servers or ['localhost:9092']
    self._status = self._STATUS_STOPPED

    self._queue = Queue(maxsize=0)
    self._send_messages_handle = None

    assert len(kafka_servers) > 0
    self._ioloop = None
    self._kafka_servers = kafka_servers
    self._kafka_producer = None
    self._send_error_counter = 0
    self._logger = logger or logging.getLogger(__name__)

  def __del__(self):
    self.stop()

  def send_message(self, topic, msg, timestamp=None):
    # TODO(inkyu): Multithread support
    assert self._status != self._STATUS_STOPPED
    self._queue.put((topic, msg, timestamp))

  def start(self, ioloop):
    assert self._status == self._STATUS_STOPPED
    self._ioloop = ioloop
    self._status = self._STATUS_WAITING
    self._ioloop.add_callback(self._run_loop)

  def _init_kafka_producer(self):
    self._kafka_producer = kafka.KafkaProducer(bootstrap_servers=self._kafka_servers,
                                               compression_type='gzip',
                                               retries=5,
                                               batch_size=1024,
                                               max_request_size=1048576,
                                               max_in_flight_requests_per_connection=1)

  async def _run_loop(self):
    counter = 0
    async for (topic, msg, timestamp) in self._queue:
      if self._status == self._STATUS_STOPPED:
        break
      while self._kafka_producer is None:
        try:
          self._init_kafka_producer()
        except Exception as e:
          self._logger.error('Kafka error(%s): Retrying in 5 seconds.', e)
          await gen.sleep(5)
      self._kafka_send(topic, msg)
      self._queue.task_done()
      # Prevent starvation
      counter += 1
      if counter == 100 or self._kafka_producer is None:
        await gen.sleep(0.1)
        counter = 0

  def stop(self):
    if self._status == self._STATUS_STOPPED:
      return
    self._status = self._STATUS_STOPPED
    self._queue.put((None, None, None))
    self._kafka_clean_up()

  def _kafka_clean_up(self):
    if self._kafka_producer:
      self._kafka_producer.close()
    self._kafka_producer = None

  def _kafka_send_error(self, topic, msg, error, *args, **kwargs):
    self._logger.error('Send error: (%s)', error)
    self._kafka_clean_up()
    self._send_error_counter += 1
    if self._send_error_counter > 100000:
      self._ioloop.add_callback(self._ioloop.stop)
      self._logger.error('Exit due to too many send error.')

  def _kafka_send(self, topic, msg, *args, timestamp=None, **kwargs):
    try:
      timestamp_ms = None
      if timestamp is not None:
        timestamp_ms = int(timestamp * 1e-6)
      f = self._kafka_producer.send(topic, msg, *args, timestamp_ms=timestamp_ms, **kwargs)
      f.add_errback(self._ioloop.add_callback, self._kafka_send_error, topic, msg)
    except kafka.errors.KafkaTimeoutError as e:
      self._kafka_send_error(topic, msg, e)
