# Copyright (c) 2021 Presto Labs Pte. Ltd.
# Author: daniel

import datetime
from datetime import datetime as dt
import os
import matplotlib.pyplot as plt
import pytz
import json
from absl import app, flags

from coin.support.proto_log.app.strat_log_replayer import OnLog as strat_log_parser
from coin.support.proto_log.app.og_rate_limit_hit_analyzer import RateLimitHitAnalyzer
from coin.support.proto_log.app.og_http_request_timeout_analyzer import HttpRequestTimeoutAnalyzer
from coin.support.proto_log.app.og_fail_orders_analyzer import FailOrdersAnalyzer
from coin.support.proto_log.app.og_debug_info_analyzer import DebugInfoAnalyzer
from coin.support.proto_log.logic.util import (read_strat_log, StratInfo)


FLAGS = flags.FLAGS


def _sumzip(*items):
  return [sum(values) for values in zip(*items)]


def _get_yesterday():
  return (datetime.date.today() - datetime.timedelta(days=1)).strftime('%Y%m%d')


def _get_min_max_dt(trading_date):
  return (
      datetime.datetime.combine(trading_date, datetime.time.min.replace(tzinfo=pytz.UTC)),
      datetime.datetime.combine(trading_date, datetime.time.max.replace(tzinfo=pytz.UTC))
  )


class OGLogger(object):
  def __init__(self,
               root_dir,
               machine,
               strategy_name,
               start_dt,
               end_dt):
    self._root_dir = root_dir
    self._machine = machine
    self._strategy_name = strategy_name
    self._start_dt = start_dt
    self._end_dt = end_dt

  def dump(self):
    parser = strat_log_parser(symbol_filter=None, log_type_filter=['OG_LOG', 'TELEMETRY'])
    callback = parser.on_log
    strat_info = StratInfo(strategy_name=self._strategy_name,
                           machine=self._machine,
                           trading_date=None)
    read_strat_log(strat_info=strat_info,
                   start_time=self._start_dt,
                   end_time=self._end_dt,
                   callback=callback,
                   kafka_config=None,
                   root_dir=self._root_dir)
    return parser.record_list


class OGLogAnalyzer(RateLimitHitAnalyzer,
                    HttpRequestTimeoutAnalyzer,
                    DebugInfoAnalyzer,
                    FailOrdersAnalyzer):
  def __init__(self,
               root_dir,
               machine,
               strategy_name,
               trading_date,
               start_time,
               end_time,
               log_type_filter,
               output_dir,
               local_run,
               tmp_dir,
               slurm_run):
    RateLimitHitAnalyzer.__init__(self)
    HttpRequestTimeoutAnalyzer.__init__(self)
    DebugInfoAnalyzer.__init__(self)
    FailOrdersAnalyzer.__init__(self)
    self._root_dir = root_dir
    self._machine = None
    self._machine_list = []
    if machine is not None:
      self._machine_list = [elem.strip() for elem in machine.split(',')]
      self._machine = self._machine_list[0]
    self._strategy_name = None
    self._strategy_list = []
    if strategy_name is not None:
      self._strategy_list = [elem.strip() for elem in strategy_name.split(',')]
      self._strategy_name = self._strategy_list[0]
    self._start_time = start_time
    self._end_time = end_time
    self._log_type_filter = log_type_filter
    self._output_dir = output_dir
    self._local_run = local_run
    self._tmp_dir = tmp_dir
    self._slurm_run = slurm_run
    if trading_date is None:
      trading_date = _get_yesterday()
    self._trading_date = trading_date
    trading_date_tmp = datetime.datetime.strptime(trading_date, '%Y%m%d').date()
    start_dt, end_dt = _get_min_max_dt(trading_date_tmp)
    if start_time is not None:
      start_dt = datetime.datetime.strptime(start_time, '%Y%m%dT%H%M%S').replace(tzinfo=pytz.UTC)
    if end_time is not None:
      end_dt = datetime.datetime.strptime(end_time, '%Y%m%dT%H%M%S').replace(tzinfo=pytz.UTC)
    self._start_dt = start_dt
    self._end_dt = end_dt
    self._log_type_list = []
    if log_type_filter is not None:
      self._log_type_list = [elem.strip() for elem in log_type_filter.split(',')]

  def process_by_strategy(self, machine, strategy_name):
    start_dt_str = self._start_dt.strftime('%Y%m%dT%H%M%S')
    end_dt_str = self._end_dt.strftime('%Y%m%dT%H%M%S')
    st = dt.now()
    print(
        '%s: running for %s-%s %s %s ...' %
        (st, start_dt_str, end_dt_str, machine, strategy_name))
    og_logger = OGLogger(self._root_dir,
                         machine,
                         strategy_name,
                         self._start_dt,
                         self._end_dt)
    records = og_logger.dump()
    if len(self._log_type_list) == 0 or 'DEBUG_INFO' in self._log_type_list:
      print('analyzing debug info ...')
      self.generate_debug_info_report(records,
                                      strategy_name,
                                      machine,
                                      self._start_dt,
                                      self._end_dt,
                                      self._output_dir,
                                      self._local_run)
    if len(self._log_type_list) == 0 or 'RATE_LIMIT_HIT' in self._log_type_list:
      print('analyzing rate limit report ...')
      self.generate_rate_limit_hit_report(records,
                                          strategy_name,
                                          machine,
                                          self._start_dt,
                                          self._end_dt,
                                          self._output_dir,
                                          self._local_run)
    if len(self._log_type_list) == 0 or 'HTTP_REQUEST_TIMEOUT' in self._log_type_list:
      print('analyzing http conn timeout ...')
      self.generate_http_request_timeout_report(records,
                                                strategy_name,
                                                machine,
                                                self._start_dt,
                                                self._end_dt,
                                                self._output_dir,
                                                self._local_run)
    if len(self._log_type_list) == 0 or 'FAIL_ORDERS' in self._log_type_list:
      print('analyzing fail orders ...')
      self.generate_fail_orders_report(records,
                                       strategy_name,
                                       machine,
                                       self._start_dt,
                                       self._end_dt,
                                       self._output_dir,
                                       self._local_run)
    et = dt.now()
    print(
        '%s: done for %s-%s %s %s, spent: %s seconds' %
        (et, start_dt_str, end_dt_str, machine, strategy_name, (et - st).seconds))

  def process_by_machine(self, machine):
    fin_path = f"{self._root_dir}/{machine}/{self._trading_date}"
    if not os.path.exists(fin_path):
      print('ERROR: path non-exist %s' % fin_path)
      return
    strategies = []
    for filename in os.listdir(fin_path):
      strategy_name = filename.split('.')[0]
      if strategy_name in strategies:
        continue
      strategies.append(strategy_name)
    for strategy_name in strategies:
      if len(self._strategy_list) > 0 and strategy_name not in self._strategy_list:
        continue
      self.process_by_strategy(machine, strategy_name)

  def generate_combine_summary_report(self,
                                      trade_date,
                                      output_dir,
                                      local_run,
                                      slurm_run):
    print("generating combine summary report ...")
    if slurm_run:
      # another process flow for slurm run
      tmp_dir = f"{self._tmp_dir}/{self._trading_date}"
      if not os.path.exists(tmp_dir):
        os.makedirs(tmp_dir)
      combine_report = {
          "rate_limit_hit_summary": self._rate_limit_hit_summary,
          "http_request_timeout_summary": self._http_request_timeout_summary,
          "fail_orders_summary": self._fail_orders_summary,
          "error_stats_summary": self._error_stats_summary,
      }
      report = f"{tmp_dir}/{self._strategy_name}_{self._machine}.json"
      with open(report, 'w') as ofile:
        json.dump(combine_report, ofile, indent=2)
      return
    plt.rcParams.update({'font.size': 7})
    fig = plt.figure(figsize=(30, 16), dpi=200)
    # draw rate limit hit summary
    plt.subplot(311)
    plt.title('rate limit hit summary')
    if len(self._rate_limit_hit_summary) > 0:
      records = sorted(self._rate_limit_hit_summary, key=lambda x: x['total_num'], reverse=True)
      color = ['steelblue', 'orange', 'chocolate', 'green']
      label = []
      query_color = []
      query_total = []
      place_color = []
      place_total = []
      amend_color = []
      amend_total = []
      cancel_color = []
      cancel_total = []
      for record in records:
        label.append(record['strategy_name'])
        query_total.append(record['query_num'])
        query_color.append(color[0])
        place_total.append(record['place_num'])
        place_color.append(color[1])
        amend_total.append(record['amend_num'])
        amend_color.append(color[2])
        cancel_total.append(record['cancel_num'])
        cancel_color.append(color[3])

      plt.bar(label, query_total, width=0.35, label='Query', color=query_color)
      plt.bar(label, place_total, width=0.35, label='Place', color=place_color,
              bottom=_sumzip(query_total))
      plt.bar(label, amend_total, width=0.35, label='Amend', color=amend_color,
              bottom=_sumzip(query_total, place_total))
      plt.bar(label, cancel_total, width=0.35, label='Cancel', color=cancel_color,
              bottom=_sumzip(query_total, place_total, amend_total))
      plt.ylabel('total # of hits')
      plt.xticks(range(len(label)), label, rotation=270)
    # draw http request timeout summary
    plt.subplot(312)
    plt.title('http request timeout summary')
    if len(self._http_request_timeout_summary) > 0:
      records = \
          sorted(self._http_request_timeout_summary, key=lambda x: x['total_num'], reverse=True)
      label = []
      total = []
      for record in records:
        label.append(record['strategy_name'])
        total.append(record['total_num'])
      # draw bar chart
      plt.bar(label, total, width=0.35, label='')
      plt.ylabel('total # of timedout')
      plt.xticks(range(len(label)), label, rotation=270)
    # draw failed orders summary
    plt.subplot(313)
    plt.title('fail orders summary')
    if len(self._fail_orders_summary) > 0:
      records = \
          sorted(self._fail_orders_summary, key=lambda x: x['total_num'], reverse=True)
      color = ['steelblue', 'orange']
      label = []
      order_rejected_color = []
      order_rejected_total = []
      cancel_rejected_color = []
      cancel_rejected_total = []
      for record in records:
        order_rejected_num = int(record['order_rejected_num'])
        cancel_rejected_num = int(record['cancel_rejected_num'])
        total = order_rejected_num + cancel_rejected_num
        if total == 0:
          continue
        label.append(record['strategy_name'])
        order_rejected_total.append(order_rejected_num)
        order_rejected_color.append(color[0])
        cancel_rejected_total.append(cancel_rejected_num)
        cancel_rejected_color.append(color[1])
      # draw bar chart
      plt.bar(label, order_rejected_total, width=0.35, label='Order Rejected',
              color=order_rejected_color)
      plt.bar(label, cancel_rejected_total, width=0.35, label='Cancel Rejected',
              color=cancel_rejected_color, bottom=_sumzip(order_rejected_total))
      plt.ylabel('total # of fail orders')
      plt.xticks(range(len(label)), label, rotation=270)
    plt.grid(True)
    plt.legend()
    fig.tight_layout()
    if local_run is True:
      plt.show()
    else:
      output_path = f"{output_dir}/{trade_date}"
      if not os.path.exists(output_path):
        os.makedirs(output_path)
      output_file = f"{trade_date}_combine_analysis_summary_report.svg"
      print(f"write to {output_path}/{output_file}")
      fig.savefig(f"{output_path}/{output_file}", dpi=fig.dpi)
    plt.close(fig)

  def prepare_analysis_result(self):
    # read report file which generated by slurm job
    tmp_dir = f"{self._tmp_dir}/{self._trading_date}"
    if not os.path.exists(tmp_dir):
      print("dir not exist: ", tmp_dir)
      return
    smy_rpt = {
        "rate_limit_hit_summary": [],
        "http_request_timeout_summary": [],
        "fail_orders_summary": [],
        "error_stats_summary": [],
    }
    for report in os.listdir(tmp_dir):
      path = f"{tmp_dir}/{report}"
      with open(path) as infile:
        tmp_rpt = json.load(infile)
        smy_rpt['rate_limit_hit_summary'] += tmp_rpt['rate_limit_hit_summary']
        smy_rpt['http_request_timeout_summary'] += tmp_rpt['http_request_timeout_summary']
        smy_rpt['fail_orders_summary'] += tmp_rpt['fail_orders_summary']
        smy_rpt['error_stats_summary'] += tmp_rpt['error_stats_summary']
    self._rate_limit_hit_summary = smy_rpt['rate_limit_hit_summary']
    self._http_request_timeout_summary = smy_rpt['http_request_timeout_summary']
    self._fail_orders_summary = smy_rpt['fail_orders_summary']
    self._error_stats_summary = smy_rpt['error_stats_summary']

  def generate_summary_report(self):
    if len(self._log_type_list) == 0 or 'RATE_LIMIT_HIT' in self._log_type_list:
      # generate rate limit hit summary report
      self.generate_rate_limit_hit_summary_report(self._trading_date,
                                                  self._output_dir,
                                                  self._local_run,
                                                  self._slurm_run)
    if len(self._log_type_list) == 0 or 'HTTP_REQUEST_TIMEOUT' in self._log_type_list:
      # generate http request timeout summary report
      self.generate_http_request_timeout_summary_report(self._trading_date,
                                                        self._output_dir,
                                                        self._local_run,
                                                        self._slurm_run)
    if len(self._log_type_list) == 0 or 'FAIL_ORDERS' in self._log_type_list:
      # generate failed orders summary report
      self.generate_fail_orders_summary_report(self._trading_date,
                                               self._output_dir,
                                               self._local_run,
                                               self._slurm_run)
    # generate combine summary report
    self.generate_combine_summary_report(self._trading_date,
                                         self._output_dir,
                                         self._local_run,
                                         self._slurm_run)

  def process(self):
    if not os.path.exists(self._root_dir):
      print('ERROR: root dir non-exist %s' % self._roor_dir)
      return
    # analyze og log
    for machine in os.listdir(self._root_dir):
      if len(self._machine_list) > 0 and machine not in self._machine_list:
        continue
      self.process_by_machine(machine)
    # generate summary report
    self.generate_summary_report()


def main(_):
  root_dir = os.path.expanduser(FLAGS.root_dir)
  machine = FLAGS.machine
  strategy_name = FLAGS.strategy_name
  trading_date = FLAGS.trading_date
  start_time = FLAGS.start_time
  end_time = FLAGS.end_time
  log_type = FLAGS.log_type
  output_dir = FLAGS.output_dir
  local_run = FLAGS.local_run
  tmp_dir = FLAGS.tmp_dir
  slurm_run = FLAGS.slurm_run
  analyzer = OGLogAnalyzer(root_dir,
                           machine,
                           strategy_name,
                           trading_date,
                           start_time,
                           end_time,
                           log_type,
                           output_dir,
                           local_run,
                           tmp_dir,
                           slurm_run)
  if FLAGS.generate_summary_report_only:
    analyzer.prepare_analysis_result()
    analyzer.generate_summary_report()
  else:
    analyzer.process()


# usage:
# .pyrunner python/coin/support/proto_log/app/strat_state_anaylyzer.py
# params(option):
#   --log_type=RATE_LIMIT_HIT,HTTP_REQUEST_TIMEOUT
#
if __name__ == '__main__':
  flags.DEFINE_string('root_dir',
                      '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log',
                      'root dir specify')
  flags.DEFINE_string('output_dir',
                      '/remote/iosg/strat-1/buckets/analysis.derived.coin/live/plot/og_log_plot',
                      'output_dir.')
  flags.DEFINE_string('tmp_dir',
                      '/tmp',
                      'tmp dir for slurm run')
  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')
  flags.DEFINE_string('trading_date', None, 'Trading date in form of %Y%m%d.')
  flags.DEFINE_string('machine', None, 'Instance machine name.')
  flags.DEFINE_string('strategy_name', None, 'Strategy name.')
  flags.DEFINE_string('start_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('end_time', None, '%Y%m%dT%H%M%S')
  flags.DEFINE_string('symbol', None, '')
  flags.DEFINE_string('log_type', None, '')
  flags.DEFINE_bool('local_run', False, "")
  flags.DEFINE_bool('slurm_run', False, "")
  flags.DEFINE_bool('generate_summary_report_only', False, "generate summary report only")
  app.run(main)
