import contextlib
import datetime
import json
import pytz
import signal
import tempfile

from absl import app, flags
import jinja2
import pytz
from tornado.ioloop import IOLoop

from coin.base.database.util import read_db_config
from coin.base.datetime_util import (
    to_timestamp_int,
    convert_string_to_dates)
from coin.tool.strat_monitor.util.monitor_util import (
    get_active_strategy,
    get_strat_from_json_config,
    separate_into_group)
from coin.tool.strat_monitor.app.monitor_runner_slurm.monitor_runner_slurm import (
    _get_group_from_sbatch_args,
    SBatchRunner)
from coin.support.pta.app.pta_weekly_dumper_runner_slurm.pta_weekly_dumper_runner_slurm import get_log_size_stats
from xunkemgmt_client.client.util.query_util import query_strategies


FLAGS = flags.FLAGS


def main(argv):
  assert FLAGS.remote_home is not None
  strategy_config_filename = FLAGS.strategy_config_filename
  start_date = convert_string_to_dates(FLAGS.start_date)[0]
  end_date = convert_string_to_dates(FLAGS.end_date)[0]
  start_time = datetime.datetime.combine(
      start_date, datetime.time.min.replace(tzinfo=pytz.UTC))
  end_time = datetime.datetime.combine(
      end_date, datetime.time.max.replace(tzinfo=pytz.UTC))
  num_days = (end_date - start_date).days
  if strategy_config_filename is not None:
    strat_list = get_strat_from_json_config(strategy_config_filename)
  else:
    threshold = to_timestamp_int(datetime.datetime.combine(
        start_date, datetime.time.min.replace(tzinfo=pytz.UTC)))
    strat_list = get_active_strategy(update_timestamp_threshold=threshold)
  if FLAGS.strategy_group is not None:
    strategy_groups = [elem.strip() for elem in FLAGS.strategy_group.split(',')]
    all_strats = [elem.strategy_name for elem in query_strategies(as_proto=True)
                 if elem.strategy_group in strategy_groups]
    strat_list = list(set(strat_list).intersection(set(all_strats)))
  num_group = _get_group_from_sbatch_args(FLAGS.sbatch_args)
  strat_groups = separate_into_group(strat_list, num_group)
  print('num of strat groups', len(strat_groups))

  strat_info_list = []
  pta_db_config = read_db_config(FLAGS.pta_db_config)
  log_size_stats = get_log_size_stats(
      start_date, end_date, FLAGS.root_dir, pta_db_config)
  with contextlib.ExitStack() as stack:
    total_mem_byte = 0
    for strat_group in strat_groups:
      mem_byte = 0
      for strat in strat_group:
        log_size = log_size_stats.loc[strat, 'mean'] if strat in \
                   log_size_stats.index else 25 * 10**6
        mem_byte += log_size
      # log on disk is compressed by around 1/15 and give extra memory by 150% due to
      # oe save for safety and extra 1G for code
      mem_byte = mem_byte * num_days * 15 * 2.5 + 10**9
      total_mem_byte += mem_byte
      json_str = json.dumps([{'strategy_name': strat} for strat in strat_group], indent=2)
      strat_file = tempfile.NamedTemporaryFile(mode='w', dir='')
      stack.enter_context(strat_file)
      strat_file.write(json_str)
      strat_file.flush()
      strat_info_list.append(
          {'name': strat_file.name, 'mem-per-cpu': '%dM' % (mem_byte / 10**6)})

    template_loader = jinja2.FileSystemLoader(searchpath='./')
    template_env = jinja2.Environment(loader=template_loader)
    template = template_env.get_template(
        'coin/support/pta/app/pta_dumper_runner_slurm/slurm_script.tmpl')
    script = template.render(
        strat_info_list=strat_info_list,
        start_time=start_time.strftime('%Y%m%dT%H%M%S'),
        end_time=end_time.strftime('%Y%m%dT%H%M%S'),
        ignore_error=FLAGS.ignore_error,
        offset_hour=FLAGS.offset_hour,
        aggregate=FLAGS.aggregate,
        interval_hour=FLAGS.interval_hour,
        root_dir=FLAGS.root_dir,
        kafka_config_filename=FLAGS.kafka_config_filename,
        pnl_adj_file=FLAGS.pnl_adj_file,
        realtime_pnl_adj=FLAGS.realtime_pnl_adj,
        pta_db_config=FLAGS.pta_db_config,
        latency_db_config=FLAGS.latency_db_config,
        output_file=FLAGS.output_file,
        log_history_dir=FLAGS.log_history_dir,
        plot_dir=FLAGS.plot_dir,
        plot_order=FLAGS.plot_order,
        remote_home=FLAGS.remote_home,
        coin_env=FLAGS.coin_env)

    if FLAGS.dry_run:
      print(script)
      return

    with tempfile.NamedTemporaryFile(mode='w', dir='') as script_file:
      script_file.write(script)
      script_file.flush()

      ioloop = IOLoop.current()
      sbatch_runner = SBatchRunner(ioloop, script_file.name, FLAGS.sbatch_args)
      signal.signal(signal.SIGTERM, sbatch_runner.signal_handler)
      signal.signal(signal.SIGINT, sbatch_runner.signal_handler)
      job_id = sbatch_runner.run()
      print(job_id)


if __name__ == '__main__':
  flags.DEFINE_string('start_date', None, 'Start date in form of %Y%m%d.')

  flags.DEFINE_string('end_date', None, 'End date in form of %Y%m%d.')

  flags.DEFINE_boolean('ignore_error', False, 'Ignore error')

  flags.DEFINE_float('offset_hour',
                     None,
                     'offset hour between the start time to read data and start time of pta.')

  flags.DEFINE_string('strategy_group', None, 'filter by strategy_group')

  flags.DEFINE_boolean('aggregate', False,
                       'calculate total pnl from start_date to end_date.')

  flags.DEFINE_float('interval_hour', None, 'interval_hour')

  flags.DEFINE_string('root_dir', None, '')

  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')

  flags.DEFINE_string('pnl_adj_file', None, 'pnl_adj_file')

  flags.DEFINE_boolean('realtime_pnl_adj', False, 'realtime pnl adj')

  flags.DEFINE_string('pta_db_config', None, '')

  flags.DEFINE_string('latency_db_config', None, 'path of latency db config.')

  flags.DEFINE_string('output_file', None, 'output_file')

  flags.DEFINE_string('log_history_dir', None, 'log_history_dir')

  flags.DEFINE_string('plot_dir', None, 'plot dir')

  flags.DEFINE_boolean('plot_order', False, 'do order plot')

  flags.DEFINE_string('strategy_config_filename',
                      None,
                      'strategy_config_filename')

  flags.DEFINE_string('remote_home', None, 'path of remote home.')

  flags.DEFINE_string('coin_env',
                      'miniconda3/envs/coin2_motion_env',
                      'coin_env path relative to remote_home')

  flags.DEFINE_bool('dry_run', False, 'print sbatch script only and not run sbatch')

  flags.DEFINE_string('sbatch_args', None, 'sbatch arguments')

  app.run(main)