import concurrent.futures
import datetime
import functools
import os
import logging
import pathlib
import signal
import tempfile
import traceback
from typing import Dict, List, Optional

import pandas as pd
from absl import (app, flags)
from jinja2 import Template
from tornado.ioloop import IOLoop

from coin.base.datetime_util import (
    convert_string_to_datetime,
    get_dt_ranges,
    to_timestamp_int,)
from coin.base.logging import init_logging
from coin.proto.coin_request_pb2 import StrategyRequestProto
from coin.proto.coin_strategy_summary_pb2 import StrategyStatLog
from coin.support.pta.logging.strategy_stat_logger import (
    gen_strategy_stat_dir,
    enumerate_strat_stat_log_into_stratinfos,
    StrategyStatReader,)
from coin.support.pta.plot.strat_plotter import StrategyPlotter
from coin.support.pta.util.info_util import enumerate_strat_infos
from coin.tool.strat_monitor.app.monitor_runner_slurm.monitor_runner_slurm import (
    _get_group_from_sbatch_args,
    SBatchRunner)
from coin.tool.strat_monitor.util.monitor_util import (
    get_active_strategy,
    get_pnl_adjust_info,
    separate_into_group,)
from xunkemgmt_client.client.util.query_util import (
    query_accounts,
    query_strategies)


FLAGS = flags.FLAGS


TMPL = '''#!/bin/bash
#SBATCH --spread-job

if [ -z ${SLURM_JOB_ID} ]; then
  (>&2 echo "ERROR: run this script on slurm.")
  exit 1
fi

set -e
export PYTHONUNBUFFERED=1
source ~/miniconda3/etc/profile.d/conda.sh
conda activate {{coin_env}}

NUM_PARALLEL="$((${SLURM_NTASKS} + 30))"
parallel -j "${NUM_PARALLEL}" --delay 0.1 <<EOF
{% for cmd in cmds %}
srun -N1 -n1 -l --exclusive --unbuffered {{cmd}}
{% endfor %}
EOF
'''


class _Onlog:
  def __init__(self):
    self.strat_stat_logs = []

  def on_log(self, timestamp: int, log):
    pb = StrategyStatLog()
    pb.ParseFromString(log)
    self.strat_stat_logs.append(pb)


def _dump_plot(*,
               start_dt: datetime.datetime,
               end_dt: datetime.datetime,
               root_dir: str,
               plot_dir: str,
               strategy_name: str,
               machine: str,
               account_df: pd.DataFrame,
               pnl_adj: Optional[Dict[str, List[Dict]]]=None):
  onlog = _Onlog()
  stat_reader = StrategyStatReader()
  stat_reader.run_from_archive(
      on_log_callback=onlog.on_log,
      start_time=start_dt,
      end_time=end_dt,
      root_dir=root_dir,
      machine=machine,
      strategy_name=strategy_name,
  )
  plotter = StrategyPlotter(account_df, pnl_adj)
  plotter.do_pnl_plot(
      strat_name=strategy_name,
      strat_stat_logs=onlog.strat_stat_logs,
      start_time=start_dt,
      end_time=end_dt,
      plot_dir=plot_dir,
  )


def _run_with_slurm(strat_list: List[str]):
  num_group = _get_group_from_sbatch_args(FLAGS.sbatch_args)
  strat_buckets = separate_into_group(strat_list, num_group)
  logging.info('num of strat buckets: %s' % len(strat_buckets))
  curr_file = os.path.relpath(__file__, pathlib.Path().resolve())
  cmds = []
  for strat_bucket in strat_buckets:
    args_dict = {
        'aggregate': FLAGS.aggregate,
        'strategy_name': ','.join(strat_bucket),
        'start_time': FLAGS.start_time,
        'end_time': FLAGS.end_time,
        'pnl_adj_file': FLAGS.pnl_adj_file,
        'root_dir': FLAGS.root_dir,
        'plot_dir': FLAGS.plot_dir,
        'max_workers': 1,
    }
    args_str = ' '.join([i for i in flags.flag_dict_to_args(args_dict)])
    cmd = f'./pyrunner {curr_file} {args_str}'
    cmds.append(cmd)
  values = {
      'coin_env': FLAGS.coin_env,
      'cmds': cmds,
  }
  tmpl = Template(TMPL)
  rendered = tmpl.render(values)
  with tempfile.NamedTemporaryFile(mode='w', dir='') as script_file:
    script_file.write(rendered)
    script_file.flush()
    ioloop = IOLoop.current()
    sbatch_runner = SBatchRunner(ioloop, script_file.name, FLAGS.sbatch_args)
    signal.signal(signal.SIGTERM, sbatch_runner.signal_handler)
    signal.signal(signal.SIGINT, sbatch_runner.signal_handler)
    job_id = sbatch_runner.run()
    logging.info('job_id: %s' % job_id)


def _run_with_process_pool(strat_list: List[str],
                           start_dt: datetime.datetime,
                           end_dt: datetime.datetime,):
  root_dir = FLAGS.root_dir
  plot_dir = FLAGS.plot_dir
  max_workers = FLAGS.max_workers
  dt_ranges = get_dt_ranges(start_dt, end_dt, FLAGS.aggregate)
  pnl_adj = {}
  if FLAGS.pnl_adj_file is not None:
    pnl_adj = get_pnl_adjust_info(FLAGS.pnl_adj_file, start_dt.date(), end_dt.date())
  account_df = query_accounts()

  total_num_fail = 0
  for start_dt, end_dt in dt_ranges:
    start_td_str = start_dt.strftime('%Y%m%dT%H%M%S')
    end_td_str = end_dt.strftime('%Y%m%dT%H%M%S')
    logging.info('\nRunning for %s-%s ...' % (start_td_str, end_td_str))
    strat_infos = enumerate_strat_infos(
        root_dir, start_dt, end_dt, enumerate_strat_stat_log_into_stratinfos)
    strat_infos = [
        strat_info for strat_info in strat_infos
        if strat_info.strategy_name in set(strat_list)
    ]
    func_list = []
    for strat_info in strat_infos:
      each_plot_dir = gen_strategy_stat_dir(
          log_root=plot_dir,
          trading_date=start_dt,
          strat_request=StrategyRequestProto(strategy_name=strat_info.strategy_name),
          machine=strat_info.machine)
      if not os.path.exists(each_plot_dir):
        os.makedirs(each_plot_dir)
      func = functools.partial(_dump_plot,
                               start_dt=start_dt,
                               end_dt=end_dt,
                               root_dir=root_dir,
                               plot_dir=each_plot_dir,
                               strategy_name=strat_info.strategy_name,
                               machine=strat_info.machine,
                               account_df=account_df,
                               pnl_adj=pnl_adj)
      func_list.append((strat_info, func))

    if max_workers > 1:
      with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
        fut_to_strat_info = {executor.submit(func): key for key, func in func_list}
        done_futs, not_done_futs = concurrent.futures.wait(
            fut_to_strat_info.keys(), timeout=None)
        func_list = [(fut_to_strat_info[fut], fut.result) for fut in done_futs]
        for fut in not_done_futs:
          strat_info = fut_to_strat_info[fut]
          logging.error(
              'Fail to dump strategy log due to timeout. %s', strat_info._asdict())
        for pid, process in executor._processes.items():
          process.terminate()

    success = 0
    num_func = len(func_list)
    for key, func in func_list:
      try:
        func()
        success += 1
      except Exception as e:
        logging.error('Fail to dump plot. %s, %s, %s', key._asdict(), type(e), e)
        logging.error(traceback.format_exc())
    logging.info('Success rate: %s/%s' % (success, num_func))
    num_fail = num_func - success
    total_num_fail += num_fail
  return total_num_fail


def main(_):
  init_logging()
  assert FLAGS.start_time, '--start_time must be specified.'
  assert FLAGS.end_time, '--end_time must be specified.'
  start_dt = convert_string_to_datetime(FLAGS.start_time)
  end_dt = convert_string_to_datetime(FLAGS.end_time)
  root_dir = FLAGS.root_dir
  assert root_dir and os.path.exists(root_dir)
  assert FLAGS.plot_dir is not None

  strategy_universe = query_strategies(as_proto=True)
  if FLAGS.strategy_name is not None:
    strat_list = [elem.strip() for elem in FLAGS.strategy_name.split(',')]
  else:
    filtered = strategy_universe
    threshold = to_timestamp_int(start_dt)
    strat_list = get_active_strategy(update_timestamp_threshold=threshold)
    if FLAGS.business_unit is not None:
      business_units = [elem.strip() for elem in FLAGS.business_unit.split(',')]
      filtered = [elem for elem in filtered if elem.business_unit in business_units]
    if FLAGS.strategy_group is not None:
      strategy_groups = [elem.strip() for elem in FLAGS.strategy_group.split(',')]
      filtered = [elem for elem in filtered if elem.strategy_group in strategy_groups]
    strat_list = list(set(strat_list).intersection(
      set([elem.strategy_name for elem in filtered])))

  if FLAGS.sbatch_args is not None:
    ret = _run_with_slurm(strat_list)
  else:
    ret = _run_with_process_pool(strat_list, start_dt, end_dt)
  return ret


if __name__ == '__main__':
  flags.DEFINE_boolean('aggregate', False, '')
  flags.DEFINE_string("business_unit", None, 'business_unit')
  flags.DEFINE_string("strategy_group", None, "strategy_group")
  flags.DEFINE_string("strategy_name", None, "strategy_name")
  flags.DEFINE_string("start_time", None, "%Y%m%dT%H%M%S")
  flags.DEFINE_string("end_time", None, "%Y%m%dT%H%M%S")
  flags.DEFINE_string('pnl_adj_file', None, 'pnl_adj_file')
  flags.DEFINE_string("root_dir", None, "root_dir")
  flags.DEFINE_string("plot_dir", None, "plot_dir")
  flags.DEFINE_integer('max_workers', 24, 'Max number of workers.')
  flags.DEFINE_string('coin_env', 'coin2_motion_env_migration', 'coin env')
  flags.DEFINE_string('sbatch_args', None, 'sbatch arguments')
  app.run(main)
