# Copyright (c) 2020 Presto Labs Pte. Ltd.
# Author: chunhui

import collections
import contextlib
import glob
import logging
import numpy as np
import os
import traceback
import pandas as pd

from concurrent.futures import ProcessPoolExecutor
from datetime import datetime

from absl import app
from coin.base import flags
from cc.appcoin2.strategy.sim_driver import batch_local
from cc.appcoin2.strategy.run_sim import define_sim_flags, run_batch_for_params
import cc.appcoin2.strategy.run_sim_util as rsutil
from coin.base.mail_util import send_mail, send_mail_with_attachments
import coin.research.read_strat_info as rsi
from coin.strategy.mm.tool.pnl_plot import init_flags

from .live_log_reader import read_live_log
from .sim_log_reader import read_sim_log
from .plot import plot_graph


def guess_sim_type(strategy_name):
  sim_type = None
  if strategy_name.startswith('basis_smm'):
    sim_type = 'basis_smm'
  elif strategy_name.startswith('basis_spread'):
    sim_type = 'basis_spread'
  elif strategy_name.startswith('mm_currency'):
    sim_type = 'basis_spread'
  elif strategy_name.startswith('mm_portfolio'):
    sim_type = 'basis_spread'
  elif strategy_name.startswith('mmrev'):
    sim_type = 'mmrev'
  elif strategy_name.startswith('basis_model'):
    sim_type = 'basis_lmagg'
  elif strategy_name.startswith('model'):
    sim_type = 'basis_lmagg'
  elif strategy_name.startswith('lm'):
    sim_type = 'lm'
  return sim_type


def run_sim(config_file_name, sim_type):
  try:
    config_files = eval(config_file_name)
  except Exception:
    config_files = glob.glob(config_file_name)
  num_cpu = flags.FLAGS.cpu
  pb_dirs = {}
  sparams = rsutil.get_sparams(flags.FLAGS.sparams)

  for sim_backtests in run_batch_for_params(sim_type, config_files, sparams, [{}], batch_local,
                                            num_cpu):
    for sim_backtest in sim_backtests:
      pb_dirs[sim_backtest.tmp_dirname] = 1

  assert len(pb_dirs) == 1
  pb_dir = list(pb_dirs.keys())[0]
  print("PB dir: ", pb_dir)
  return pb_dir


def get_stats(grpdf, pnl_ccy):
  stat = grpdf[[
      f'pnl_net({pnl_ccy})',
      f'pnl_fee({pnl_ccy})',
      f'funding_fee({pnl_ccy})',
      f'fill_amt({pnl_ccy})',
      f'taker_amt({pnl_ccy})',
      f'maker_amt({pnl_ccy})',
      f'sub_amt({pnl_ccy})',
      'fill_count',
      'sub_count',
      f'market_volume_amt({pnl_ccy})',
  ]].sum()
  stat['net_ret(bps)'] = stat[f'pnl_net({pnl_ccy})'] / stat[f'fill_amt({pnl_ccy})'] * 1e4
  stat['fee_rate(bps)'] = stat[f'pnl_fee({pnl_ccy})'] / stat[f'fill_amt({pnl_ccy})'] * 1e4
  stat['fill_cnt_ratio'] = stat['fill_count'] / stat['sub_count']
  stat['participation_ratio'] = stat[f'fill_amt({pnl_ccy})'] / stat[f'market_volume_amt({pnl_ccy})']
  del stat[f'market_volume_amt({pnl_ccy})']
  del stat['fill_count']
  del stat['sub_count']
  return stat


def post_process(sstats_dict):
  pnlgrps = {}
  pnlstats = {}
  combdfs = []
  pnl_currency = None

  rows = []
  pnl_ccys = []
  timedrets = []
  stats_rows = []
  for stratname, sstats in sstats_dict.items():
    pnldfs = []
    for sstat in sstats:
      pnldf = sstat['combdf']
      if len(pnldf) == 0:
        continue

      if 'timedret' in sstat:
        timedrets.append(sstat['timedret'])

      assert (~np.isnan(pnldf['timestamp'])).all()
      pnldf = pnldf.loc[pnldf['pnl_net'].diff() != 0]
      pnldf['maker_fill_cumsum'] = pnldf[pnldf['fill_type'] == 'MAKER']['fill_cash'].cumsum()
      pnldf['taker_fill_cumsum'] = pnldf[pnldf['fill_type'] == 'TAKER']['fill_cash'].cumsum()
      pnldf['maker_fill_cumsum'] = pnldf['maker_fill_cumsum'].fillna(method='ffill').fillna(0)
      pnldf['taker_fill_cumsum'] = pnldf['taker_fill_cumsum'].fillna(method='ffill').fillna(0)
      pnldf['fill_cash_cumsum'] = pnldf['fill_cash'].cumsum().fillna(method='ffill')
      if len(pnldf) > 2000:
        pnldf.index = pd.to_datetime(pnldf.timestamp).dt.floor(freq='60s')
        pnldf = pnldf.loc[~pnldf.index.duplicated(keep='last')]
      pnldf = pnldf.reset_index(drop=True)

      if np.isnan(pnldf['timestamp']).any():
        pnldf = pnldf.loc[~np.isnan(pnldf['timestamp'])].reset_index(drop=True)

      pnldf['pnl_diff'] = pnldf['pnl_net'].diff()
      pnldf['pnl_diff'] = pnldf['pnl_diff'].fillna(0)
      ccyinfo = sstat['currency_info']
      coin_to_fiat_mult = 1
      if ccyinfo['pnl_currency'] != ccyinfo['quote_currency']:
        coin_to_fiat_mult = pnldf['mtm_price'].mean()
        pnldf['pnl_diff'] *= coin_to_fiat_mult

      if pnl_currency is None:
        pnl_currency = ccyinfo['quote_currency']
      else:
        assert pnl_currency == ccyinfo['quote_currency'], (pnl_currency, ccyinfo['quote_currency'])

      pnldf['from_symbol'] = stratname
      pnldfs.append(pnldf)

      date = pd.DatetimeIndex(sstat['combdf']['timestamp']).date[0]
      rows.append({'date': date, 'symbol': stratname})
      pnl_ccy = ccyinfo['quote_currency']
      pnl_ccys = list(set(pnl_ccys + [pnl_ccy]))

      rows[-1].update(sstat['pnl_stat'])
      rows[-1][f'pnl_net({pnl_ccy})'] = (sstat['pnl_stat']['pnl_net'] * coin_to_fiat_mult)
      rows[-1][f'pnl_gross({pnl_ccy})'] = (sstat['pnl_stat']['pnl_gross'] * coin_to_fiat_mult)
      rows[-1][f'pnl_fee({pnl_ccy})'] = (sstat['pnl_stat']['pnl_fee'] * coin_to_fiat_mult)
      rows[-1][f'funding_fee({pnl_ccy})'] = (sstat['pnl_stat']['funding_fee'] * coin_to_fiat_mult)
      cols_amount = [
          'fill_amt', 'taker_amt', 'maker_amt', 'sub_amt', 'sub_amt_filled',
          'market_volume_amt'
      ]
      for col_convert in cols_amount:
        rows[-1][f'{col_convert}({pnl_ccy})'] = (sstat['pnl_stat'][col_convert] *
                                                 sstat['pnl_stat']['multiplier'])

      for rm_col in ['multiplier', 'grs_ret', 'sharpe', 'pnl_net', 'pnl_gross', 'pnl_fee'
                     ] + cols_amount:
        if rm_col in rows[-1]:
          del rows[-1][rm_col]

    if len(pnldfs) == 0:
      continue
    base = collections.defaultdict(float)
    vol_keys = ['fill_cash_cumsum', 'taker_fill_cumsum', 'maker_fill_cumsum']
    for pnldf in pnldfs:
      for key in vol_keys:
        pnldf[key] = pnldf[key] + base[key]
        base[key] = pnldf.iloc[-1][key]
    combdf = pd.concat(pnldfs, axis=0).sort_values(['timestamp']).reset_index(drop=True)
    combdfs.append(combdf)
    combdf['pnl_net'] = combdf['pnl_diff'].cumsum()
    combdf['volume_diff'] = combdf['fill_cash_cumsum'].diff()
    combdf.loc[0, 'volume_diff'] = combdf.loc[0, 'fill_cash']
    combdf['open_cash_diff'] = combdf['open_cash'].fillna(method='ffill').diff()
    combdf.loc[0, 'open_cash_diff'] = combdf.loc[0, 'open_cash']
    pnlgrps[stratname] = combdf[['order_id', 'timestamp', 'pnl_net', 'open_cash', 'fill_cash'] +
                                vol_keys].copy()
    assert len(pnl_ccys) <= 1, pnl_ccys

    if len(pnl_ccys) == 1:
      group_df = pd.DataFrame(rows).sort_values(['date', 'symbol']).reset_index(drop=True)
      pnlstats[stratname] = pd.Series(get_stats(group_df, pnl_ccys[0]))
      stats_rows += rows
      rows = []
    else:
      pnlstats[stratname] = pd.Series()

  pd.options.display.float_format = '{:,.6f}'.format

  # no order fill
  if len(combdfs) == 0:
    return

  totdf = pd.concat(combdfs, axis=0, sort=False).sort_values(['timestamp']).reset_index(drop=True)
  totdf['pnl_net'] = totdf['pnl_diff'].cumsum()
  totdf['fill_cash_cumsum'] = totdf['volume_diff'].cumsum()
  totdf['open_cash'] = totdf['open_cash_diff'].cumsum()
  pnlgrps['total'] = totdf[['order_id', 'timestamp', 'pnl_net', 'open_cash', 'fill_cash'] +
                            vol_keys].copy()
  total_stats_df = pd.DataFrame(stats_rows).sort_values(['date', 'symbol']).reset_index(drop=True)
  groupstat = pd.Series(get_stats(total_stats_df, pnl_ccys[0]))
  pnlstats['total'] = groupstat

  dti = pd.DatetimeIndex(totdf['timestamp'])

  for i, (key, pnldf) in enumerate(pnlgrps.items()):
    dti_i = pd.DatetimeIndex(pnldf['timestamp'])
    pnldf.loc[:, 'tindex'] = dti_i

  def normalize_sort_keys(kv_pair):
    kv_pair = {key.replace('CURRENT_', ''): value for key, value in kv_pair.items()}
    return dict(sorted(kv_pair.items()))

  pnlgrps = normalize_sort_keys(pnlgrps)
  pnlstats = normalize_sort_keys(pnlstats)
  return pnlgrps, dti, pnlstats


def prepare_trading_date(first_live_event_ts, ori_date):
  _, to_dt = ori_date.split('-')
  start_dt = pd.DatetimeIndex([first_live_event_ts]).date[0]
  start_dt = datetime.strftime(start_dt, '%Y%m%d')
  flags.FLAGS.trading_date = f"{start_dt}-{to_dt}"


def prepare_email_text(summary, sim_keys, live_keys, low_correlations, poor_performances):
  content = summary.strip() + '\n'
  content += f"sim_keys: {sim_keys}\n"
  content += f"live_keys: {live_keys}\n"
  if (len(low_correlations) > 0):
    cor_text = [f'{x[0]}: {x[1]}' for x in low_correlations]
    content += 'Low correlations: ' + ', '.join(cor_text) + '\n'
  if (len(poor_performances) > 0):
    content += 'Poor live performance: ' + ', '.join(poor_performances) + '\n'
  return content


def get_live_log(strategy_name):
  live_log_dict, _ = read_live_log()
  print(f'{strategy_name} Finish reading live log\n')
  return live_log_dict


def get_sim_log(driver_path, strategy_name, sim_type):
  if flags.FLAGS.pb_dir is None:
    if sim_type is None:
      sim_type = guess_sim_type(strategy_name)
    pb_dir = run_sim(driver_path, sim_type)
    print(f'{strategy_name} Finish running sim')
  else:
    pb_dir = flags.FLAGS.pb_dir
    print(f'{strategy_name} Skip running sim')
  sim_log_dict = read_sim_log(os.path.join(pb_dir, '*.pb'))
  if len(sim_log_dict) == 0:
    sim_log_dict = read_sim_log(os.path.join(pb_dir, '*.pb.lz4'))
  print(f'{strategy_name} Finish reading sim log')
  return sim_log_dict


# sim_log_dict keys: {exchange}.{symbol}
# live_log_dict keys: {symbol}
def inner_join_results(sim_log_dict, live_log_dict):
  sim_log_dict = {k.split('.', 1)[-1]: v for k, v in sim_log_dict.items()}
  common_symbols = set(sim_log_dict.keys()) & set(live_log_dict.keys())
  sim_log_dict =\
      {k: sim_log_dict[k] for k in sorted(list(sim_log_dict.keys())) if k in common_symbols}
  live_log_dict =\
     {k: live_log_dict[k] for k in sorted(list(live_log_dict.keys())) if k in common_symbols}
  return sim_log_dict, live_log_dict


def analyze_sim_live_result(
    strategy_name, sim_type, orderlog_machine, driver_path, ori_name, ori_date, output_dir,
    sim_result_dir):
  flags.FLAGS.orderlog_machine = orderlog_machine
  flags.FLAGS.strategy_name = strategy_name
  flags.FLAGS.sim_result_dir = sim_result_dir

  subject = f'Sim Live Report {ori_name} on date {ori_date}'
  try:
    if flags.FLAGS.run_parallel:
      with ProcessPoolExecutor(max_workers=2) as executor:
        live = executor.submit(get_live_log, strategy_name)
        sim = executor.submit(get_sim_log, driver_path, strategy_name, sim_type)
        live_log_dict = live.result()
        sim_log_dict = sim.result()
    else:
      live_log_dict, first_event_ts = read_live_log()
      prepare_trading_date(first_event_ts, ori_date)
      print(f'{strategy_name} Finish reading live log\n')
      sim_log_dict = get_sim_log(driver_path, strategy_name, sim_type)

    sim_log_dict, live_log_dict = inner_join_results(sim_log_dict, live_log_dict)

    print(sim_log_dict.keys())
    print(live_log_dict.keys())
    sim_dfs, sim_dti, sim_stats = post_process(sim_log_dict)
    live_dfs, live_dti, live_stats = post_process(live_log_dict)
    assert (len(live_dfs) == len(live_stats))
    assert (len(sim_dfs) == len(sim_stats))

    graph_paths = []
    low_correlations = []
    poor_performances = []

    for symbol, live_df in live_dfs.items():
      live_stat = live_stats[symbol]
      sim_name = [n for n in sim_dfs.keys() if symbol in n]
      if len(sim_name) != 1:
        print(f'live symbol {symbol} not found in sim {sim_name}')
        continue
      sim_name = sim_name[0]
      live_name = symbol
      sim_stat = sim_stats[sim_name]
      sim_df = sim_dfs[sim_name]
      print(f'sim {sim_name} vs live {live_name}')
      path, correlation, poor_performance = plot_graph(sim_df, live_df,
                                                       f'{flags.FLAGS.strategy_name}-{sim_name}',
                                                       sim_stat, live_stat, sim_dti, live_dti,
                                                       sim_name, live_name, output_dir)
      if correlation < 0.9:
        low_correlations.append((sim_name, round(correlation, 4)))
      if poor_performance:
        poor_performances.append(sim_name)

      graph_paths.append(path)

    main_text = prepare_email_text(f"{strategy_name} {flags.FLAGS.trading_date} use_bbo={flags.FLAGS.use_bbo}",
                                   sim_dfs.keys(), live_dfs.keys(), low_correlations,
                                   poor_performances)
    if flags.FLAGS.report_email:
      send_mail_with_attachments(subject, flags.FLAGS.from_mail, flags.FLAGS.report_email, subject, main_text,
                                 graph_paths)
    else:
      print(main_text)
  except Exception as e:
    name = f'Failed Sim Live Daily Report {ori_date}'
    text = '%s\n\n%s' % (strategy_name, traceback.format_exc())
    print(e)
    print(text)
    send_mail(name, flags.FLAGS.from_mail, flags.FLAGS.report_email, name, text)


def main(_):
  ori_name = flags.FLAGS.strategy_name
  ori_date = flags.FLAGS.trading_date

  if flags.FLAGS.sim_type is None:
    sim_type = guess_sim_type(ori_name)
  else:
    sim_type = flags.FLAGS.sim_type
  strategy_group = sim_type

  if 'lm_volume' in flags.FLAGS.strategy_name:
    strategy_group = 'lm-volume'
  elif 'lm_agg2_highstack' in flags.FLAGS.strategy_name:
    strategy_group = 'lm-agg2-highstack'
  elif 'lm_agg2' in flags.FLAGS.strategy_name:
    strategy_group = 'lm-agg2'
  elif 'lm_focus' in flags.FLAGS.strategy_name:
    strategy_group = 'lm-focus'
  if strategy_group in ['lm-agg2', 'lm-volume', 'lm-agg2-highstack', 'lm-focus']:
    strategy_infos = rsi.get_strategy_infos(ori_date, ori_name, strategy_group)
    strategy_infos = strategy_infos.groupby(['trading_date', 'driver']).first().reset_index()
    del strategy_infos['subsymbol']
    print(strategy_infos)
    sim_type = 'lm'
  else:
    from cc.appcoin2.strategy.basis_strat.support.strategy_pta_info import get_strategy_meta_info
    strategy_infos = get_strategy_meta_info()[['name', 'host', 'driver']]

  ncpu = min(flags.FLAGS.analyzer_cpu, flags.FLAGS.max_cpu)
  if flags.FLAGS.use_prev_config:
    from cc.appcoin2.strategy.basis_strat.support.git_helper import GithubPR
    context = GithubPR()
  else:
    context = contextlib.nullcontext()

  with context as git_helper:
    if git_helper:
      git_helper.checkout(flags.FLAGS.trading_date.split('-')[0], 'master')
    with ProcessPoolExecutor(max_workers=ncpu) as executor:
      for strategy_info in strategy_infos.to_dict('records'):
        strategy_name = strategy_info['name']
        if sim_type == 'lm' or ori_name in strategy_name:
          machine = strategy_info['host']
          driver_path = strategy_info['driver']
          output_dir = os.path.join(flags.FLAGS.output_dir, strategy_name)
          tmp_sim_result_dir = os.path.join(flags.FLAGS.sim_result_dir, strategy_name)
          runtuple = (analyze_sim_live_result, strategy_name, sim_type, machine,
                      driver_path, ori_name, ori_date, output_dir, tmp_sim_result_dir)
          if ncpu == 1:
            runtuple[0](*runtuple[1:])
          else:
            executor.submit(*runtuple)


def define_flags():
  define_sim_flags()
  flags.FLAGS.analyzer_cpu = 4
  flags.FLAGS.remove_tmp_files = False
  flags.DEFINE_bool('run_parallel', True,
                    'get sim and live logs in parallel. Careful with mem usage')

  flags.DEFINE_bool('use_prev_config', False, 'use config at the beginning of trading date')
  flags.DEFINE_string('out_dir', 'tmp/pnl_plot', '')
  flags.DEFINE_string('from_mail', 'basis_strategy@joomo.io', '')
  flags.DEFINE_string('pb_dir', None, 'directory of pb generated by sim')
  # moved to define_sim_flags
  # flags.DEFINE_boolean('use_bbo', str(os.environ.get("use_bbo", None)) == '1', '')
  flags.DEFINE_bool('force_rerun', False, '')
  flags.DEFINE_bool('use_full_live_log', True, '')
  flags.DEFINE_string('symbol', None, 'symbol select if needed')
  flags.DEFINE_string('output_dir', '', '')
  flags.DEFINE_integer('max_cpu', 7, '')
  flags.DEFINE_string(
      'initial_position', '', 'The initial position, full_symbol=pos;full_sym2=pos '
      'e.g.BTC-USD.20200626.Huobi=-50;BTC-USD.20200626.Okex=-50 ')
  flags.DEFINE_float('initial_position_all', None, '')

  flags.DEFINE_string('exchange', None, 'Exchange name.')

  flags.DEFINE_string('market_type', None, 'Spot, Futures')

  flags.DEFINE_string('feed_machine', None, 'feed machine name.')

  flags.DEFINE_string('feed_root', '/remote/iosg/coin/data/flow', 'feed machine name.')

  flags.DEFINE_string('time_range', '0-24', '')

  flags.DEFINE_string('worker_ids', '1,2', '')

  flags.DEFINE_string('api_override', None, '')

  flags.DEFINE_bool('use_feed_cache', True, '')

  flags.DEFINE_bool('use_swap_over_futures', False, '')

  flags.DEFINE_string('owner', None, '')

  flags.DEFINE_string('feed_cache_dir', '/remote/iosg/coin-2/feed_cache', '')

  flags.DEFINE_boolean('use_pickle', False, '')

  flags.DEFINE_boolean('dump_stats', False, '')

  flags.DEFINE_string('bbo_cache_root',
                      '/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M/', '')

  flags.DEFINE_bool('squeeze_stat', False, '')

  init_flags()

  flags.FLAGS.feed_machine = 'feed-05.ap-northeast-1.aws'


if __name__ == "__main__":
  define_flags()
  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')
  app.run(main)
