# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: leon

import datetime
import json
import logging
import os
import subprocess
import sys

import pytz
from absl import app, flags

from coin.proto.coin_order_gateway_pb2 import OrderGatewayLog, OrderEvent
from coin.support.proto_log.app.strat_log_replayer import _get_min_max_dt
from coin.support.proto_log.logic.util import (read_og_log, OgInfo)
from coin.util.queue.config import KafkaConfig
from coin.util.queue.constants import TopicType
import google.protobuf.json_format as json_format
import coin.proto.coin_order_enums_pb2 as coin_enum

FLAGS = flags.FLAGS

flags.DEFINE_bool('debug_info_only', False, "debug info only")


class OnLog(object):
  def __init__(self, filename):
    self.file_obj = open(filename, "w")
    self.last_print_time = 0
    self.num = 0

  def close(self):
    self.file_obj.close()

  def on_log(self, timestamp, log):
    pb = OrderGatewayLog()
    pb.ParseFromString(log)

    if FLAGS.debug_info_only:
      if pb.type != OrderGatewayLog.ORDER_EVENT:
        return

      if pb.event.type != OrderEvent.DEBUG_INFO:
        return

    data = json_format.MessageToDict(pb, preserving_proto_field_name=True)
    hr_ts = datetime.datetime.fromtimestamp(timestamp / 1e9).strftime('%Y%m%d %H:%M:%S.%f')
    data['record_timestamp'] = timestamp
    data['record_timestamp_human_readable'] = hr_ts
    if pb.HasField('raw_msg_id'):
      source = coin_enum.OrderSubTopicId.Name(pb.raw_msg_id >> (64 - 8))
      data['source'] = source

    if pb.type == OrderGatewayLog.ORDER_EVENT:
      if pb.event.HasField('error_code'):
        data['event']['error_code'] = coin_enum.OrderErrorCode.Name(pb.event.error_code)

      if pb.event.type in (OrderEvent.DEBUG_INFO, OrderEvent.UNKNOWN_ORDER_EVENT):
        try:
          data['event']['tag'] = json.loads(pb.event.tag)
        except ValueError:
          print("data: ", data)

    if pb.type == OrderGatewayLog.RATE_LIMIT_REPORT_MESSAGE:
      info = json.loads(pb.rate_limit_report_message.rate_limit_info)
      data['rate_limit_report_message']['rate_limit_info'] = info

    try:
      json.dump(data, self.file_obj)
    except ValueError:
      print(data)
      raise

    self.file_obj.write("\n")
    self.num += 1
    if timestamp > self.last_print_time + int(3600 * 1e9):
      print(self.num, hr_ts)
      self.last_print_time = timestamp

  def on_kafka_log(self, partition, record):
    self.on_log(record.timestamp * 1e6, record.value)


def main(_):
  owner = FLAGS.owner
  market_type = FLAGS.market_type
  exchange = FLAGS.exchange
  trading_date = FLAGS.trading_date
  start_time = FLAGS.start_time
  end_time = FLAGS.end_time
  root_dir = os.path.expanduser(FLAGS.root_dir)
  kafka_config_filename = FLAGS.kafka_config_filename
  machine = FLAGS.machine
  assert market_type, '--market_type must be specified.'
  assert exchange, '--exchange must be specified.'
  assert trading_date or (start_time and end_time), '--trading_date or (--start_time and end_time) must be specified.'
  assert machine, '--machine must be specified.'
  sys.stdout.flush()

  kafka_config = None
  if kafka_config_filename is not None:
    kafka_config = KafkaConfig.from_cmd_config(kafka_config_filename)

  dt_format = '%Y%m%dT%H%M%S'
  if trading_date is not None:
    trading_date = datetime.datetime.strptime(trading_date, '%Y%m%d').date()
    start_dt, end_dt = _get_min_max_dt(trading_date)
  else:
    assert start_time is not None and end_time is not None, (start_time, end_time)
    start_dt = datetime.datetime.strptime(
        start_time, dt_format).replace(tzinfo=pytz.UTC)
    end_dt = datetime.datetime.strptime(
        end_time, dt_format).replace(tzinfo=pytz.UTC)

  start_dt_str = start_dt.strftime(dt_format)
  end_dt_str = end_dt.strftime(dt_format)
  filename = 'og_log.%s-%s.%s.%s.%s.json' % (
      start_dt_str, end_dt_str, owner, market_type, exchange)
  pretty_filename = 'og_log.pretty.%s-%s.%s.%s.%s.json' % (
      start_dt_str, end_dt_str, owner, market_type, exchange)

  print('Running for %s-%s %s ...' % (start_dt_str, end_dt_str, machine))
  onlog = OnLog(filename)
  callback = onlog.on_kafka_log if kafka_config is not None else onlog.on_log

  og_info = OgInfo(market_type=market_type, exchange=exchange, owner=owner,
                   machine=machine, trading_date=None)
  kafka_topic_type = TopicType[FLAGS.topic_type] if \
                     FLAGS.topic_type is not None else None
  read_og_log(og_info=og_info, start_time=start_dt, end_time=end_dt,
              callback=callback, root_dir=root_dir,
              kafka_config=kafka_config,
              kafka_topic_type=kafka_topic_type)
  onlog.close()

  # out = open(pretty_filename, "wb")
  # subprocess.run(["jq", "-s", '.', filename], stdout=out, check=False)
  # out.close()

  print(filename, pretty_filename)


if __name__ == '__main__':
  flags.DEFINE_string('owner', None, 'Account.')
  flags.DEFINE_string('market_type', None, 'Market type.')
  flags.DEFINE_string('exchange', None, 'Exchange name.')
  flags.DEFINE_string('trading_date', None, 'Trading date in form of %Y%m%d.')
  flags.DEFINE_string('start_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('end_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('root_dir', '~/data/og_proto_log', 'root_dir.')
  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')
  flags.DEFINE_string('topic_type', None, 'topic type')
  flags.DEFINE_string('machine', None, 'Instance machine name.')
  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')
  app.run(main)
