# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: leon

import concurrent.futures
import datetime
import functools
import json
import logging
import os
import sys

import pytz
from absl import app, flags

from coin.base.param_util import to_list
from coin.base.datetime_util import to_timestamp_int
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.support.proto_log.logic.util import (read_strat_log, StratInfo)
from coin.util.queue.config import KafkaConfig
from coin.util.queue.constants import TopicType
from coin.util.queue.tools.kafka_archive import run_from_kafka_archive
import google.protobuf.json_format as json_format
import coin.proto.coin_order_enums_pb2 as coin_enum

FLAGS = flags.FLAGS


def _get_min_max_dt(trading_date):
  return (
    datetime.datetime.combine(
      trading_date, datetime.time.min.replace(tzinfo=pytz.UTC)),
    datetime.datetime.combine(
      trading_date, datetime.time.max.replace(tzinfo=pytz.UTC))
  )


def _get_start_end_datetime(trading_date, start_time, end_time, dt_format=None):
  dt_format = '%Y%m%dT%H%M%S' if dt_format is None else dt_format
  if trading_date is not None:
    trading_date = datetime.datetime.strptime(trading_date, '%Y%m%d').date()
    start_dt, end_dt = _get_min_max_dt(trading_date)
  else:
    assert start_time is not None and end_time is not None, (start_time, end_time)
    start_dt = datetime.datetime.strptime(
        start_time, dt_format).replace(tzinfo=pytz.UTC)
    end_dt = datetime.datetime.strptime(
        end_time, dt_format).replace(tzinfo=pytz.UTC)
  return start_dt, end_dt


def _get_time_range_groups(start_time, end_time, timedelta):
  range_start = start_time
  range_end = start_time + timedelta
  ranges = []
  while range_end < end_time:
    ranges.append((range_start, range_end))
    range_start = range_end
    range_end += datetime.timedelta(hours=1)
  else:
    ranges.append((range_start, end_time))
  return ranges


def _replay_log(*, symbol_filter, log_type_filter, **replay_args):
  onlog = OnLog(symbol_filter=symbol_filter, log_type_filter=log_type_filter)
  callback = onlog.on_kafka_log if replay_args.get('kafka_config') is not None else \
             onlog.on_log
  replay_args['callback'] = callback
  read_strat_log(**replay_args)
  return onlog.record_list


class OnLog(object):
  def __init__(self, symbol_filter, log_type_filter):
    self._symbol_filter = symbol_filter
    self._log_type_filter = log_type_filter
    self.record_list = []

  def _log_filtered(self, data):
    if self._log_type_filter is not None:
      if 'type' not in data:
        return True
      if data['type'] not in self._log_type_filter:
        return True
    if self._symbol_filter is not None:
      if 'type' not in data:
        return True
      if data['type'] == 'OG_LOG' and data['og_log']['type'] == 'ORDER_EVENT':
        order_event = data['og_log']['event']
        if order_event.get('symbol') not in self._symbol_filter:
          return True
    return False

  def on_log(self, timestamp, log):
    pb = StrategyLog()
    pb.ParseFromString(log)
    data = json_format.MessageToDict(pb, preserving_proto_field_name=True)

    if self._log_filtered(data):
      return

    hr_ts = datetime.datetime.fromtimestamp(timestamp / 1e9).strftime(
        '%Y%m%d %H:%M:%S.%f')
    data['record_timestamp'] = timestamp
    data['record_timestamp_human_readable'] = hr_ts
    if data['type'] == 'OG_LOG' and pb.og_log.HasField('raw_msg_id'):
      source = coin_enum.OrderSubTopicId.Name(pb.og_log.raw_msg_id >> (64 - 8))
      data['og_log']['source'] = source
    self.record_list.append(data)

  def on_kafka_log(self, partition, record):
    self.on_log(record.timestamp * 1e6, record.value)


def main(_):
  root_dir = os.path.expanduser(FLAGS.root_dir)
  kafka_config_filename = FLAGS.kafka_config_filename
  trading_date = FLAGS.trading_date
  machine = FLAGS.machine
  strategy_name = FLAGS.strategy_name
  start_time = FLAGS.start_time
  end_time = FLAGS.end_time
  symbol_filter = FLAGS.symbol
  log_type_filter = FLAGS.log_type
  assert machine, '--machine must be specified.'
  assert strategy_name, '--strategy_name must be specified.'
  sys.stdout.flush()

  kafka_config = None
  if kafka_config_filename is not None:
    kafka_config = KafkaConfig.from_cmd_config(kafka_config_filename)

  if symbol_filter is not None:
    symbol_filter = [elem.strip() for elem in symbol_filter.split(',')]
  if log_type_filter is not None:
    log_type_filter = [elem.strip() for elem in log_type_filter.split(',')]

  start_dt, end_dt = _get_start_end_datetime(trading_date, start_time, end_time)
  dt_format = '%Y%m%dT%H%M%S'
  start_dt_str = start_dt.strftime(dt_format)
  end_dt_str = end_dt.strftime(dt_format)
  print('Running for %s-%s %s ...' % (start_dt_str, end_dt_str, machine))

  strat_info = StratInfo(strategy_name=strategy_name, machine=machine, trading_date=None)
  key_futures = []
  results = []
  with concurrent.futures.ProcessPoolExecutor(max_workers=FLAGS.max_workers) as executor:
    time_ranges = _get_time_range_groups(start_dt, end_dt, datetime.timedelta(hours=1))
    for time_range in time_ranges:
      range_start, range_end = time_range
      replay_func = functools.partial(
          _replay_log,
          symbol_filter=symbol_filter, log_type_filter=log_type_filter,
          strat_info=strat_info, start_time=range_start, end_time=range_end,
          root_dir=root_dir, kafka_config=kafka_config,
          kafka_topic_type=TopicType[FLAGS.topic_type])
      key_futures.append((time_range, executor.submit(replay_func)))
    for time_range, future in key_futures:
      try:
        results += future.result()
      except Exception as e:
        logging.error('Fail to replay strat log. %s' % time_range)
        raise e

  filename = 'strat_log.%s-%s.%s.json' % (start_dt_str, end_dt_str, strategy_name)
  with open(filename, 'w') as out_file:
    if FLAGS.unpretty:
      records = '[\n' + ',\n'.join(json.dumps(record) for record in results) + '\n]'
      out_file.write(records)
    else:
      json.dump(results, out_file, indent=2)
  print(filename)


if __name__ == '__main__':
  flags.DEFINE_string('root_dir', '~/data/strat_proto_log', 'root_dir.')
  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')
  flags.DEFINE_string('topic_type', 'STRAT', 'topic type')
  flags.DEFINE_string('trading_date', None, 'Trading date in form of %Y%m%d.')
  flags.DEFINE_string('machine', None, 'Instance machine name.')
  flags.DEFINE_string('strategy_name', None, 'Strategy name.')
  flags.DEFINE_string('start_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('end_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('symbol', None, '')
  flags.DEFINE_string('log_type', None, '')
  flags.DEFINE_bool('unpretty', False, '')
  flags.DEFINE_integer('max_workers', 24, 'Max number of workers.')
  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')
  app.run(main)
