# Copyright (c) 2021 Presto Labs Pte. Ltd.
# Author: daniel

import collections
import datetime
import os
import pytz
import signal
import time
import matplotlib.pyplot as plt
import json

import google.protobuf.json_format as json_format

from absl import app, flags
from tornado.ioloop import IOLoop, PeriodicCallback
from coin.util.queue.reader import AsyncKafkaReader
from coin.util.queue.config import KafkaConfig
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.support.proto_log.logic.util import (read_strat_log, StratInfo)
from coin.support.proto_log.app.og_http_request_timeout_analyzer_v2 import (HttpRequestTimeoutAnalyzer, HttpRequestTimeoutPlotter)
from coin.support.proto_log.app.og_rate_limit_hit_analyzer_v2 import (RateLimitHitAnalyzer, RateLimitHitPlotter)
from coin.support.proto_log.app.og_fail_orders_analyzer_v2 import (FailOrdersAnalyzer, FailOrdersPlotter)
from coin.support.proto_log.app.og_debug_info_analyzer_v2 import (DebugInfoAnalyzer, DebugInfoPlotter)
from coin.base.param_util import to_list


FLAGS = flags.FLAGS


OGLogConfig = collections.namedtuple(
    'OGLogConfig',
    ['root_dir', 'machine', 'strategy_name', 'trading_date', 'start_dt', 'end_dt']
)


PlotterConfig = collections.namedtuple(
    'PlotterConfig',
    ['output_dir', 'machine', 'strategy_name', 'trading_date', 'start_dt', 'end_dt', 'tmp_dir']
)


AppConfig = collections.namedtuple(
    'AppConfig',
    ['og_log_config', 'kafka_config', 'plotter_config']
)


def _get_today():
  return datetime.date.today().strftime('%Y%m%d')

def _get_yesterday():
  return (datetime.date.today() - datetime.timedelta(days=1)).strftime('%Y%m%d')


def _get_min_max_dt(trading_date):
  return (
      datetime.datetime.combine(trading_date, datetime.time.min.replace(tzinfo=pytz.UTC)),
      datetime.datetime.combine(trading_date, datetime.time.max.replace(tzinfo=pytz.UTC))
  )


class AnalyserMixin(HttpRequestTimeoutAnalyzer,
                    RateLimitHitAnalyzer,
                    FailOrdersAnalyzer,
                    DebugInfoAnalyzer):
  def __init__(self, app_config):
    self._og_log_config = app_config.og_log_config

    HttpRequestTimeoutAnalyzer.__init__(self, self._og_log_config)
    RateLimitHitAnalyzer.__init__(self, self._og_log_config)
    FailOrdersAnalyzer.__init__(self, self._og_log_config)
    DebugInfoAnalyzer.__init__(self, self._og_log_config)

    self._results = collections.defaultdict(dict)
    self._results['http_request_timeout_result'] = None
    self._results['rate_limit_hit_result'] = None
    self._results['fail_orders_result'] = None
    self._results['debug_info_result'] = None

  def results(self):
    return self._results;

  def process(self, records):
    self._results['http_request_timeout_result'] = HttpRequestTimeoutAnalyzer.process(self, records)
    self._results['rate_limit_hit_result'] = RateLimitHitAnalyzer.process(self, records)
    self._results['fail_orders_result'] = FailOrdersAnalyzer.process(self, records)
    self._results['debug_info_result'] = DebugInfoAnalyzer.process(self, records)

    return self._results


class PlotterMixin(HttpRequestTimeoutPlotter,
                   RateLimitHitPlotter,
                   FailOrdersPlotter,
                   DebugInfoPlotter):
  def __init__(self, app_config):
    self._plotter_config = app_config.plotter_config

    HttpRequestTimeoutPlotter.__init__(self, self._plotter_config)
    RateLimitHitPlotter.__init__(self, self._plotter_config)
    FailOrdersPlotter.__init__(self, self._plotter_config)
    DebugInfoPlotter.__init__(self, self._plotter_config)

  def plot_summary(self, results):
    # http request timeout
    fig = plt.figure(figsize=(30, 16), dpi=200)
    axe = plt.subplot(311)
    HttpRequestTimeoutPlotter.plot_summary(self,
                                           results['http_request_timeout_result'],
                                           axe)
    # rate limi hit
    axe = plt.subplot(312)
    RateLimitHitPlotter.plot_summary(self,
                                     results['rate_limit_hit_result'],
                                     axe)
    # fail orders
    axe = plt.subplot(313)
    FailOrdersPlotter.plot_summary(self,
                                   results['fail_orders_result'],
                                   axe)
    # plot
    output_path = f"{self._plotter_config.output_dir}/{self._plotter_config.trading_date}"
    if not os.path.exists(output_path):
      os.makedirs(output_path)
    output_file = f"{self._plotter_config.trading_date}_combine_analysis_summary_report.svg"
    print(f"write to {output_path}/{output_file}")

    fig.tight_layout()
    fig.savefig(f"{output_path}/{output_file}", dpi=fig.dpi)
    plt.close(fig)

  def plot_details(self, results):
    # http request timeout
    HttpRequestTimeoutPlotter.plot_details(self,
                                           results['http_request_timeout_result'])
    # rate limit hit
    RateLimitHitPlotter.plot_details(self,
                                     results['rate_limit_hit_result'])
    # fail orders
    FailOrdersPlotter.plot_details(self,
                                   results['fail_orders_result'])
    # debug info
    DebugInfoPlotter.plot_details(self,
                                  results['debug_info_result'])


class DataBuffer(object):
  def __init__(self, app_config):
    self._log_type_filter = [StrategyLog.OG_LOG, StrategyLog.TELEMETRY]
    self._records = []

  def _filter(self, log):
    if self._log_type_filter is None:
      return False
    if log.type in self._log_type_filter:
      return False
    return True

  def process(self, topic, timestamp, log):
    if self._filter(log):
      return;
    data = json_format.MessageToDict(log, preserving_proto_field_name=True)
    self._records.append(data)

  def records(self):
    return self._records


class OGLogReader(object):
  def __init__(self,
               data_buffer,
               oglog_config):
    self._data_buffer = data_buffer
    self._root_dir = oglog_config.root_dir
    self._machine = oglog_config.machine
    self._strategy_name = oglog_config.strategy_name
    self._start_dt = oglog_config.start_dt
    self._end_dt = oglog_config.end_dt

  def _dump(self, timestamp, log):
    topic = "None"
    pb = StrategyLog()
    pb.ParseFromString(log)
    self._data_buffer.process("", timestamp, pb)

  def read(self):
    strat_info = StratInfo(strategy_name=self._strategy_name,
                           machine=self._machine,
                           trading_date=None)
    callback = self._dump
    read_strat_log(strat_info=strat_info,
                   start_time=self._start_dt,
                   end_time=self._end_dt,
                   callback=callback,
                   kafka_config=None,
                   root_dir=self._root_dir)


class DataEngine(object):
  def __init__(self,
               app_config,
               data_buffer = None,
               data_analyser = None,
               data_plotter = None,
               topics = None):
    # data flow
    self._data_buffer = data_buffer
    self._data_analyser = data_analyser
    self._data_plotter = data_plotter

    self._app_config = app_config

    # kafka
    self._topics = to_list(topics) if topics is not None else None
    self._dump_after_sec = datetime.timedelta(seconds=60)

    # ioloop
    self._ioloop = None

  def _run_once(self):
    if self._data_buffer is None:
      return
    reader = OGLogReader(self._data_buffer,
                         self._app_config.og_log_config)
    reader.read()

  async def _run_loop(self):
    # TODO(daniel): read data from kafka
    reader = AsyncKafkaReader(self._topics,
                              kafka_config=self._app_config.kafka_config,
                              timestamp_from=None)
    await reader.open()
    try:
      while True:
        msg = None
        try:
          records = await reader.read()
          for msg in records:
            topic = msg.topic
            pb = StrategyLog()
            pb.ParseFromString(msg.value)
            if self._data_buffer is None:
              continue
            self._data_buffer.process(topic, msg.timestamp * 1e6, pb)
        except ConnectionError:
          continue
        except Exception:
          if msg is not None:
            print(msg)
          print(traceback.format_exc())
          continue
    finally:
      time.sleep(1)
      await reader.close()

    self._ioloop.stop()

  def _keep_results(self):
    if self._data_analyser is None:
      return
    if self._data_plotter is None:
      return
    results = self._data_analyser.results()
    plotter_config = self._app_config.plotter_config
    tmp_dir = f"{plotter_config.tmp_dir}/{plotter_config.trading_date}"
    if not os.path.exists(tmp_dir):
      os.makedirs(tmp_dir)
    report = f"{tmp_dir}/{plotter_config.strategy_name}_{plotter_config.machine}.json"
    with open(report, 'w') as ofile:
      json.dump(results, ofile, indent=2)

  def _plot_details(self):
    if self._data_analyser is None:
      return
    if self._data_plotter is None:
      return
    records = self._data_buffer.records()
    self._data_analyser.process(records)
    results = self._data_analyser.results()
    self._data_plotter.plot_details(results)

  def start(self, *, ioloop):
    self._ioloop = ioloop

    if self._app_config.kafka_config is None:
      # read historic data from log
      self._ioloop.add_callback(self._run_once)
      self._ioloop.add_callback(self._plot_details)
      self._ioloop.add_callback(self._keep_results)
      self._ioloop.stop()
    else:
      # read realtime data from kafka
      self._ioloop.add_callback(self._run_loop)
      dump = PeriodicCallback(self._plot_details, 120 * 1000).start
      self._ioloop.add_timeout(self._dump_after_sec, dump)

  def signal(self, sig, frame):
    print("caught signal %s" % sig)
    os._exit(0)


class SummaryReportGenerator(object):
  def __init__(self, data_plotter, app_config):
    self._data_plotter = data_plotter

    self._app_config = app_config

    self._results = collections.defaultdict(dict)
    self._results['http_request_timeout_result'] = None
    self._results['rate_limit_hit_result'] = None
    self._results['fail_orders_result'] = None
    self._results['debug_info_result'] = None

  def _merge_result(self, result):
    # merge http_request_timeout_result
    if self._results['http_request_timeout_result'] is None:
      self._results['http_request_timeout_result'] = {'summary': []}

    if 'http_request_timeout_result' in result and \
       result['http_request_timeout_result'] and \
       'summary' in result['http_request_timeout_result']:
      self._results['http_request_timeout_result']['summary'] += \
      result['http_request_timeout_result']['summary']

    # merge rate_limit_hit_result
    if self._results['rate_limit_hit_result'] is None:
      self._results['rate_limit_hit_result'] = {'summary': []}

    if 'rate_limit_hit_result' in result and \
       result['rate_limit_hit_result'] and \
       'summary' in result['rate_limit_hit_result']:
      self._results['rate_limit_hit_result']['summary'] += \
      result['rate_limit_hit_result']['summary']

    # merge fail_orders_result
    if self._results['fail_orders_result'] is None:
      self._results['fail_orders_result'] = {'summary': []}

    if 'fail_orders_result' in result and \
       result['fail_orders_result'] and \
       'summary' in result['fail_orders_result']:
      self._results['fail_orders_result']['summary'] += \
      result['fail_orders_result']['summary']

    # merge debug info result
    if self._results['debug_info_result'] is None:
      self._results['debug_info_result'] = {'summary': []}

    if 'debug_info_result' in result and \
       result['debug_info_result'] and \
       'summary' in result['debug_info_result']:
      self._results['debug_info_result']['summary'] += \
      result['debug_info_result']['summary']

  def _save_result_to_file(self):
    pass

  def _load_result_to_file(self):
    plotter_config = self._app_config.plotter_config
    tmp_dir = f"{plotter_config.tmp_dir}/{plotter_config.trading_date}"
    if not os.path.exists(tmp_dir):
      print("error: dir not exist: ", tmp_dir)
      return

    for report in os.listdir(tmp_dir):
      path = f"{tmp_dir}/{report}"
      with open(path) as infile:
        result = json.load(infile)
        self._merge_result(result)

  def plot_summary(self):
    self._load_result_to_file()
    self._data_plotter.plot_summary(self._results)


def _get_config(root_dir,
                machine,
                strategy_name,
                trading_date,
                kafka_config_path,
                output_dir,
                tmp_dir):
  if kafka_config_path is None:
    if trading_date is None:
      trading_date = _get_yesterday()
    start_dt, end_dt = _get_min_max_dt(datetime.datetime.strptime(trading_date, '%Y%m%d').date())
    og_log_config = OGLogConfig(root_dir=root_dir,
                                machine=machine,
                                strategy_name=strategy_name,
                                trading_date=trading_date,
                                start_dt=start_dt,
                                end_dt=end_dt)
    plotter_config = PlotterConfig(output_dir=output_dir,
                                   machine=machine,
                                   strategy_name=strategy_name,
                                   trading_date=trading_date,
                                   start_dt=start_dt,
                                   end_dt=end_dt,
                                   tmp_dir=tmp_dir)
    app_config = AppConfig(og_log_config=og_log_config,
                           kafka_config=None,
                           plotter_config=plotter_config)
    return app_config
  else:
    trading_date = _get_today()
    start_dt, end_dt = _get_min_max_dt(datetime.datetime.strptime(trading_date, '%Y%m%d').date())
    kafka_config = KafkaConfig.from_cmd_config(kafka_config_path)
    plotter_config = PlotterConfig(output_dir=output_dir,
                                   machine=machine,
                                   strategy_name=strategy_name,
                                   trading_date=trading_date,
                                   start_dt=start_dt,
                                   end_dt=end_dt,
                                   tmp_dir=tmp_dir)
    app_config = AppConfig(og_log_config=None,
                           kafka_config=kafka_config,
                           plotter_config=plotter_config)
    return app_config


def launch(root_dir,
           machine,
           strategy_name,
           trading_date,
           kafka_config_path,
           output_dir,
           tmp_dir):
  app_config = _get_config(root_dir,
                           machine,
                           strategy_name,
                           trading_date,
                           kafka_config_path,
                           output_dir,
                           tmp_dir)
  buffer = DataBuffer(app_config)
  analyser = AnalyserMixin(app_config)
  plotter = PlotterMixin(app_config)
  engine = DataEngine(app_config, buffer, analyser, plotter)

  ioloop = IOLoop.current()
  engine.start(ioloop=ioloop)
  signal.signal(signal.SIGTERM, engine.signal)
  signal.signal(signal.SIGINT, engine.signal)

  try:
    ioloop.start()
  except (KeyboardInterrupt, SystemExit):
    print("exception")
    return

  # plat summary report
  report = SummaryReportGenerator(plotter, app_config)
  report.plot_summary()

def main(_):
  launch(FLAGS.root_dir,
         FLAGS.machine,
         FLAGS.strategy_name,
         FLAGS.trading_date,
         FLAGS.kafka_config_path,
         FLAGS.output_dir,
         FLAGS.tmp_dir)

# usage:
# .pyrunner python/coin/support/proto_log/app/strat_state_anaylyzer.py
# params(option):
#   @daniel: to update
#
if __name__ == '__main__':
  flags.DEFINE_string('root_dir',
                      '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log',
                      'root dir')
  flags.DEFINE_string('output_dir',
                      '.',
                      'output dir.')
  flags.DEFINE_string('tmp_dir',
                      '/tmp',
                      'tmp dir for slurm run')
  flags.DEFINE_string('kafka_config_path', None, 'kafka config')
  flags.DEFINE_string('trading_date', None, 'trading date in form of %Y%m%d.')
  flags.DEFINE_string('machine', None, 'instance name.')
  flags.DEFINE_string('strategy_name', None, 'strategy name.')
  app.run(main)
