# Copyright (c) 2020 Presto Labs Pte. Ltd.
# Author: jhkim

import time
import copy
import os
import json
import pandas
import datetime

from absl import flags

from cc.appcoin2.strategy.run_sim_util import create_temp_file_and_dump_json
from coin.strategy.mm.tool.archive_base import get_trading_dates


def split_by_hours(from_dt, to_dt, split_hours):
  split_timedelta = pandas.Timedelta(split_hours)
  dt_iter = from_dt + split_timedelta
  intervals = []
  while dt_iter < to_dt:
    if from_dt < dt_iter and dt_iter < to_dt:
      intervals.append((from_dt, dt_iter))
      from_dt = dt_iter
    dt_iter += split_timedelta
  if from_dt < to_dt:
    intervals.append((from_dt, to_dt))
  return intervals


def split_by_rollover(from_dt, to_dt):
  dt_iter = datetime.datetime.combine(from_dt.date(), datetime.time(hour=8))
  intervals = []
  while dt_iter < to_dt:
    # friday
    if from_dt < dt_iter and dt_iter.isoweekday() == 5:
      intervals.append([from_dt, dt_iter - datetime.timedelta(minutes=5)])
      from_dt = dt_iter + datetime.timedelta(minutes=20)
    dt_iter += datetime.timedelta(days=1)
  if from_dt < to_dt:
    intervals.append([from_dt, to_dt])
  return intervals


def _to_trading_intervals(trading_dates, hours, last_date, split_rollover=True, hours_warmup=None):
  if hasattr(flags.FLAGS, "split_rollover"):
    split_rollover = bool(flags.FLAGS.split_rollover)
  if hours.find("-") >= 0:
    start_hour, end_hour = hours.split("-")
  else:
    start_hour = "0"
    end_hour = hours

  trading_intervals = []
  for trading_date in trading_dates:
    from_dt = trading_date + pandas.Timedelta(start_hour)
    to_dt = trading_date + pandas.Timedelta(end_hour)
    if split_rollover:
      trading_intervals.extend(split_by_rollover(from_dt, to_dt))
    else:
      trading_intervals.extend([[from_dt, to_dt]])

  last_date += datetime.timedelta(days=1)
  filtered_intervals = []
  for interval in trading_intervals:
    if interval[0] >= last_date:
      break
    elif interval[1] > last_date:
      interval[1] = last_date
    filtered_intervals.append(interval)
  trading_intervals = filtered_intervals

  # Merge Friday before/after delivery to previous/next intervals
  merged_intervals = []
  for interval in copy.deepcopy(trading_intervals):
    if merged_intervals:
      last_interval = merged_intervals[-1]
      if last_interval[-1] == interval[0] and \
          (last_interval[0].isoweekday() == 5 or interval[0].isoweekday() == 5) and \
          min(last_interval[1] - last_interval[0], interval[1] - interval[0]) < \
          datetime.timedelta(hours=18):
        last_interval[-1] = copy.deepcopy(interval[-1])
      else:
        merged_intervals.append(interval)
    else:
      merged_intervals.append(interval)

  out_intervals = []
  if hasattr(flags.FLAGS, "split_hours") and flags.FLAGS.split_hours is not None:
    for interval in merged_intervals:
      out_intervals.extend(split_by_hours(*interval, flags.FLAGS.split_hours))
  else:
    out_intervals = merged_intervals

  for interval in out_intervals:
    if hours_warmup is not None:
      interval[0] -= pandas.Timedelta(f"{hours_warmup}H")

  return out_intervals


def _exclude_trading_intervals(trading_intervals, exclude_intervals):
  time_points = []
  for interval in exclude_intervals:
    time_points.append((interval[0], 0))
    time_points.append((interval[1], 1))
  for interval in trading_intervals:
    time_points.append((interval[0], 2))
    time_points.append((interval[1], 3))
  final_intervals = []
  start_point = None
  is_trading = False
  exclude_cnt = 0
  for point, mark in sorted(time_points):
    if mark == 2:
      # trading start
      if exclude_cnt == 0:
        start_point = point
      is_trading = True
    elif mark == 3:
      # trading end
      if exclude_cnt == 0:
        final_intervals.append([start_point, point])
      is_trading = False
    elif mark == 0:
      # exclude start
      if is_trading and exclude_cnt == 0 and start_point != point:
        final_intervals.append([start_point, point])
      exclude_cnt += 1
    elif mark == 1:
      # exclude end
      if is_trading:
        start_point = point
      exclude_cnt -= 1
  return final_intervals


def to_trading_intervals(trading_dates, hours, last_date, exclude_date=None, hours_warmup=None):
  if exclude_date:
    exclude_dates = [
        datetime.datetime.strptime(dt, '%Y%m%d') for dt in exclude_date.split(',')]
    exclude_dates = set(exclude_dates)
    set_trading_dates = set(trading_dates)
    trading_dates = sorted(list(set_trading_dates - exclude_dates))
  trading_intervals = _to_trading_intervals(trading_dates, hours, last_date, hours_warmup=hours_warmup)
  return trading_intervals


class SimBacktest(object):
  def __init__(self,
               tmp_dirname,
               analyzer_feed_machine,
               config_path,
               strat_prefix,
               *,
               sigcache=None,
               latency_mult=None,
               latency_sec=None,
               is_snapshot=None,
               use_exchange_timestamp=None,
               use_adjusted_pos=False,
               sim_feed_machine=None,
               unfold_stat=True,
               unfold=None,
               use_latency_model=False,
               use_live_latency=False,
               use_live_price=False,
               exclude_trading_halt=False,
               makerfill_tolerance_bps=None,
               bbo_from_cache=True,
               include_funding_rate=False,
               run_with_kline_dumper=False,
               warmup_hours=None,
               training_date=None,
               force_plot_debug_info=None,
               include_mm_stats=None,
               include_lp_plot=None,
               price_bp_tholds=None,
               quote_tholds=None,
               base_tholds=None,
               include_option_pnl=None,
               include_tag_info=None,
               **kwargs):
    assert flags.FLAGS.trading_date
    self.use_latency_model = use_latency_model
    self.use_live_latency = use_live_latency
    self.use_live_price = use_live_price
    self.makerfill_tolerance_bps = makerfill_tolerance_bps
    self.bbo_from_cache = bbo_from_cache
    self.include_funding_rate = include_funding_rate
    self.exclude_trading_halt = exclude_trading_halt
    self.unfold_stat = unfold_stat
    self.unfold = unfold
    self.strat_prefix = strat_prefix
    self.strat_prefix_base = strat_prefix
    self.sim_result_dir = flags.FLAGS.sim_result_dir
    self.dump_pnl_per_x_minute = flags.FLAGS.dump_pnl_per_x_minute
    self.dump_fill_only = flags.FLAGS.dump_fill_only
    self.use_exchange_timestamp = not use_exchange_timestamp
    self.tmp_dirname = tmp_dirname
    self.config_path = config_path
    self.config_filename = config_path.split("/")[-1]
    self.use_adjusted_pos = use_adjusted_pos
    self.adjusted_pos_run_hours = flags.FLAGS.adjusted_pos_run_hours
    self.pb_suffix = '*.pb'
    self.sim_info_filename = None
    self.warmup_hours = warmup_hours
    self.run_with_kline_dumper = run_with_kline_dumper
    self.training_date = training_date
    self.force_plot_debug_info = force_plot_debug_info
    self.include_mm_stats = include_mm_stats
    self.include_lp_plot = include_lp_plot
    self.hours_warmup = None
    self.price_bp_tholds = price_bp_tholds
    self.quote_tholds = quote_tholds
    self.base_tholds = base_tholds
    self.include_option_pnl = include_option_pnl
    self.include_tag_info = include_tag_info

    trading_dates = [get_trading_dates(trading_date)
                     for trading_date in flags.FLAGS.trading_date.split(',')]

    self.trading_date = (
          f"{trading_dates[0][0].strftime('%Y%m%d')}-{trading_dates[-1][-1].strftime('%Y%m%d')}")
    last_dates = [sub_trading_dates[-1] for sub_trading_dates in trading_dates]
    if flags.FLAGS.group_dates is not None and flags.FLAGS.group_dates > 1:
      assert flags.FLAGS.hours is None or flags.FLAGS.hours == "24H"
      group_dates = flags.FLAGS.group_dates
      trading_dates = [sub_trading_dates[::group_dates] for sub_trading_dates in trading_dates]
      self.hours = "%sH" % (24 * group_dates)
      self.time_range = "0-24"
    else:
      self.hours = flags.FLAGS.hours or "24H"
      if self.hours.find("-") >= 0:
        self.time_range = self.hours
      else:
        self.time_range = f"0-{self.hours}"
    if flags.FLAGS.hours_warmup is not None:
      self.hours_warmup = flags.FLAGS.hours_warmup.replace("H", "")

    self.trading_intervals = []
    for sub_trading_dates, last_date in zip(trading_dates, last_dates):
      self.trading_intervals += to_trading_intervals(
          sub_trading_dates, self.hours, last_date, flags.FLAGS.exclude_date, self.hours_warmup)
    self.trading_interval_map = {itv[0]: i for i, itv in enumerate(self.trading_intervals)}

    for i in range(5):
      try:
        self.sim_setting_filename = self.override_params_on_sim_file(
            tmp_dirname=tmp_dirname,
            sim_setting_path="data/coin2/sim/sim_setting.json",
            latency_mult=latency_mult,
            latency_sec=latency_sec,
            is_snapshot=is_snapshot,
            use_exchange_timestamp=use_exchange_timestamp,
            sim_feed_machine=sim_feed_machine,
            makerfill_tolerance_bps=makerfill_tolerance_bps)
        break
      except OSError:
        time.sleep(i + 1)

    self.strat_config_filename = create_temp_file_and_dump_json(
        dir=self.tmp_dirname, prefix="strat.", suffix=".json", data=None)
    # TODO(jhkim): fix below hack later.
    # btw, analyzer feed machine doesn't really matter.
    self.analyzer_feed_machine = analyzer_feed_machine
    if not self.analyzer_feed_machine:
      self.analyzer_feed_machine = 'feed-05.ap-northeast-1.aws'
      if self.config_path.find("okex") >= 0:
        self.analyzer_feed_machine = 'feed-05.cn-hongkong.aliyun'
      elif self.config_path.find("bitstamp") >= 0:
        self.analyzer_feed_machine = 'feed-02.eu-west-1.aws'

    if latency_mult is not None:
      self.strat_prefix += f"_LATMULT_{latency_mult}"
    elif latency_sec is not None:
      self.strat_prefix += f"_LAT_{latency_sec}"
    if is_snapshot is not None:
      self.strat_prefix += f"_ISSNAP_{is_snapshot}"
    if use_exchange_timestamp is not None:
      self.strat_prefix += f"_USEET_{use_exchange_timestamp}"
    # don't want strat name getting longer
    # if sim_feed_machine is not None:
    #   self.strat_prefix += f"_SFM_{sim_feed_machine}"
    if makerfill_tolerance_bps is not None:
      self.strat_prefix += f"_MAKTOL{makerfill_tolerance_bps}"

    self.stat_csv_filename = os.path.join(tmp_dirname, 'stat.csv')

  def trading_interval_count(self):
    return len(self.trading_intervals)

  @staticmethod
  def override_params_on_sim_file(
      *,
      tmp_dirname,
      sim_setting_path,
      latency_mult,
      latency_sec,
      is_snapshot,
      use_exchange_timestamp,
      sim_feed_machine,
      makerfill_tolerance_bps):
    with open(sim_setting_path) as sf:
      sim_setting_obj = json.load(sf)
    if latency_mult is not None and latency_sec is not None:
      raise ValueError("latency_mult & latency_sec is exclusive flag.")
    if latency_mult is not None:
      latency_mult = float(latency_mult)
      sim_setting_obj = {
          key: dict(simdict, latency_sec=simdict['latency_sec'] * latency_mult)
          for key, simdict in sim_setting_obj.items()
      }
    elif latency_sec is not None:
      sim_setting_obj = {
          key: dict(simdict, latency_sec=latency_sec) for key, simdict in sim_setting_obj.items()
      }
    if is_snapshot is not None:
      sim_setting_obj = {
          key: dict(simdict, is_snapshot=is_snapshot) for key, simdict in sim_setting_obj.items()
      }
    if use_exchange_timestamp is not None:
      sim_setting_obj = {
          key: dict(simdict, use_exchange_timestamp=int(use_exchange_timestamp))
          for key, simdict in sim_setting_obj.items()
      }

    if makerfill_tolerance_bps is not None:
      sim_setting_obj = {
          key: dict(simdict, makerfill_tolerance_bps=float(makerfill_tolerance_bps))
          for key, simdict in sim_setting_obj.items()
      }

    if sim_feed_machine is not None:
      sim_setting_obj = {
          key: dict(simdict, feed_machine=sim_feed_machine)
          for key, simdict in sim_setting_obj.items()
      }

    new_config_filename = create_temp_file_and_dump_json(
        dir=tmp_dirname, prefix='sim_setting.', suffix=".json", data=sim_setting_obj)
    return new_config_filename

  def gen_cmds_warmup(self):
    return []

  def gen_cmds_prerun(self):
    cmds = []
    for i, trading_interval in enumerate(self.trading_intervals):
      cmd = self.cmd_prerun_on_date(trading_interval, i)
      if cmd is not None:
        cmds.append(cmd)
    return cmds

  def gen_cmds_kline(self):
    return []

  def gen_cmds(self):
    cmds = []
    for i, trading_interval in enumerate(self.trading_intervals):
      cmds.append(self.cmd_run_on_date(trading_interval, i))
    return cmds

  def gen_jobname(self, i):
    return f"{i}_{self.config_filename}"

  def gen_jobnames(self):
    cmds = []
    for i, trading_interval in enumerate(self.trading_intervals):
      cmds.append(self.gen_jobname(i))
    return cmds

  def cmd_prerun_on_date(self, trading_interval, idx):
    return None

  def cmd_run_on_date(self, trading_interval, idx):
    raise NotImplementedError()

  def pb_path(self):
    return f"{self.tmp_dirname}/{self.pb_suffix}"

  def cmd_analyze_and_plot(self, compress=False, cpus=None):
    if self.config_path is None:
      return
    if "24H" in self.hours or len(self.trading_intervals) == 1 or flags.FLAGS.group_dates:
      extraflag = "--real_scale_t_axis=True"
    else:
      extraflag = "--real_scale_t_axis=False"

    if flags.FLAGS.split_debug_hours:
      extraflag += f" --split_hours={flags.FLAGS.split_debug_hours}"
    if flags.FLAGS.full_bbo:
      extraflag += " --full_bbo"

    if "," in flags.FLAGS.trading_date:
      extraflag += " --hide_skipped_dates"

    cpus_use = len(self.trading_intervals) if not cpus else cpus
    cpus_use = min(cpus_use, 6)
    tsstr = ""
    if len(self.trading_intervals) == 1:
      trading_interval = self.trading_intervals[0]
      fromdt, todt = trading_interval[0], trading_interval[1]
      tss = pandas.DatetimeIndex([fromdt, todt]).astype(int)
      tsstr = f"--set_xlim={tss[0]}-{tss[1]}"
    sim_info_str = ""
    if self.sim_info_filename:
      sim_info_str = f"--sim_info {self.sim_info_filename}"
    cmd = f"""./pyrunner \
python/coin/pnl/sim_stat_frompb.py \
--pb_path "{self.pb_path()}" \
--feed_machine {self.analyzer_feed_machine} \
--sim_result_prefix={self.strat_prefix} \
--strat_prefix={self.strat_prefix_base} \
--trading_date={self.trading_date} \
--time_range={self.time_range} \
--sim_result_dir={self.sim_result_dir} \
{extraflag} \
--cpu={cpus_use} \
--compress={compress} \
{"--unfold_stat" if self.unfold_stat else "--nounfold_stat"} \
{"--unfold" if self.unfold else "--nounfold"} \
--stat_csv={self.stat_csv_filename} \
{tsstr} \
{sim_info_str} \
--use_adjusted_pos={self.use_adjusted_pos} \
--use_feed_cache \
--bbo_from_cache={self.bbo_from_cache} \
--include_funding_rate={self.include_funding_rate} \
--dump_pnl_per_x_minute={self.dump_pnl_per_x_minute} \
--use_bbo={flags.FLAGS.use_bbo} \
--force_plot_debug_info={self.force_plot_debug_info}  \
--include_mm_stats={self.include_mm_stats}  \
--include_lp_plot={self.include_lp_plot}  \
{"--price_bp_tholds=" + ",".join([str(i) for i in self.price_bp_tholds]) if self.price_bp_tholds else ""} \
{"--quote_tholds=" + ",".join([str(i) for i in self.quote_tholds]) if self.quote_tholds else ""} \
{"--base_tholds=" + ",".join([str(i) for i in self.base_tholds]) if self.base_tholds else ""} \
{"--include_option_pnl" if self.include_option_pnl else ""} \
{"--include_tag_info" if self.include_tag_info else ""}"""
    return cmd, cpus_use
