# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jaewon, inkyu

import datetime
import logging
import os

from absl import app, flags
from tornado.ioloop import IOLoop

from coin.exchange.base.order_gateway_logger import (
    parse_file_name_from_coin2 as og_log_name_parser)
from coin.exchange.base.strategy_logger import parse_file_name as strat_log_name_parser
from coin.exchange.base.subscription_logger import (
    parse_file_name as subscription_log_name_parser)
from coin.support.proto_log.logic.util import generate_log_info
from coin.util.queue.constants import TopicType
from coin.util.queue.sync_publisher import KafkaPublisher
from coin.util.queue.tools.kafka_topic import generate_kafka_topic
from coin.util.log_watcher.proto_log_watcher import ProtoLogWatcher


def _get_log_file_name_parser(kafka_topic_type):
  if kafka_topic_type in (TopicType.ACCOUNT,
                          TopicType.ACCOUNTESTIMATE):
    parser = og_log_name_parser
  elif kafka_topic_type in (TopicType.STRAT, TopicType.TELEMETRY):
    parser = strat_log_name_parser
  elif kafka_topic_type in (TopicType.TICKER,):
    parser = subscription_log_name_parser
  else:
    raise ValueError('Unknown kafka topic type: %s' % kafka_topic_type.name)
  return parser


class KafkaPublishingHandler:
  _written = 0

  def __init__(self, *, kafka_publisher, kafka_topic_type):
    self._kafka_publisher = kafka_publisher
    assert isinstance(kafka_topic_type, TopicType)
    self._kafka_topic_type = kafka_topic_type
    self._log_name_parser = _get_log_file_name_parser(kafka_topic_type)
    self._logger = logging.getLogger(__name__)

  def on_log(self, path, record):
    try:
      splitted = path.rstrip().split('/')
      filename = splitted[-1]
      hostname = splitted[-3]
      log_file_rsp = self._log_name_parser(filename)
      log_info = generate_log_info(hostname, log_file_rsp)
      topic = generate_kafka_topic(self._kafka_topic_type, log_info)
    except IndexError:
      return

    self._kafka_publisher.send_message(topic, bytes(record.data), timestamp=record.timestamp)
    self._written += 1
    if self._written % 10000 == 0:
      self._logger.info('%d entries are written.', self._written)


def main(argv=None):
  FLAGS = flags.FLAGS
  ioloop = IOLoop.current()

  kafka_publisher = KafkaPublisher(kafka_servers=FLAGS.kafka_servers.split(','))
  kafka_publisher.start(ioloop)

  handler = KafkaPublishingHandler(
      kafka_publisher=kafka_publisher,
      kafka_topic_type=TopicType[FLAGS.kafka_topic_type])

  target_dir = os.path.abspath(os.path.normpath(
      os.path.expanduser(FLAGS.target_dir)))
  try:
    watcher = ProtoLogWatcher(target_dir=target_dir,
                              log_callback=handler.on_log)
    watcher.start(ioloop)

    exit_after = datetime.timedelta(hours=FLAGS.exit_after_hours)
    ioloop.add_timeout(exit_after, ioloop.stop)
    ioloop.start()
    watcher.stop()
    return 0
  except (KeyboardInterrupt, SystemExit):
    print()
    return 1


if __name__ == "__main__":
  logging.basicConfig(
      level=logging.DEBUG,
      format='%(levelname)8s %(asctime)s %(name)s %(filename)s:%(lineno)d] %(message)s')

  flags.DEFINE_string('kafka_topic_type',
                      None,
                      '')

  flags.DEFINE_string('target_dir',
                      '~/data/strat_proto_log',
                      '')

  flags.DEFINE_string('kafka_servers',
                      'coin-kafka.corp.prestolabs.io:9092',
                      'Kafka server list separated by comma.')

  flags.DEFINE_integer('exit_after_hours', 24, '')

  app.run(main)
