import contextlib
import datetime
import json
import logging
import signal
import subprocess
import sys
import tempfile
import time

from absl import app, flags
from tornado.ioloop import IOLoop, PeriodicCallback
import jinja2
import pytz

from coin.base.datetime_util import to_timestamp_int
from coin.tool.strat_monitor.util.monitor_util import (get_active_strategy,
                                                       get_topic_from_config,
                                                       get_topic_from_strat,
                                                       separate_into_group)
from coin.util.queue.config import KafkaConfig
from coin.util.queue.tools.kafka_topic import parse_kafka_topic


FLAGS = flags.FLAGS


class SBatchError(Exception):
  pass


class SBatchRunner(object):
  def __init__(self, ioloop, script_filepath, sbatch_args=None):
    self._ioloop = ioloop
    self._script_filepath = script_filepath
    self._sbatch_args = sbatch_args or []
    self._sbatch_process = None

  def run(self):
    sbatch_args = self._sbatch_args
    if isinstance(sbatch_args, str):
      sbatch_args = [token for token in self._sbatch_args.split(' ') if token]

    sbatch_args = ['--parsable'] + sbatch_args
    if sbatch_args:
      logging.debug('sbatch_args: %s', ' '.join(sbatch_args))

    self._sbatch_process = subprocess.Popen(
        ['sbatch', *sbatch_args, self._script_filepath],
        stdout=subprocess.PIPE)
    PeriodicCallback(self._check_exit, 10*1000).start()
    self._ioloop.start()

    sbatch_output = self._sbatch_process.stdout.read().decode('utf-8')
    job_id = int(sbatch_output.strip())
    sys.stderr.write('Job ID: %d\n' % job_id)
    for i in range(3):
      logging.info('scancel %s' % job_id)
      subprocess.run(['scancel', str(job_id)])
      time.sleep(0.2)

    if self._sbatch_process.returncode != 0:
      raise SBatchError(self._sbatch_process.args)
    return job_id

  def _check_exit(self):
    subprocess_exit = self._sbatch_process.poll() is not None
    if subprocess_exit:
      logging.info('stop ioloop.')
      self._ioloop.stop()

  def _kill_sbtach_process(self):
    logging.info('kill_sbatch_process.')
    if self._sbatch_process is not None:
      self._sbatch_process.terminate()

  def signal_handler(self, sig, frame):
    logging.warning('Caught signal: %s', sig)
    self._ioloop.add_callback_from_signal(self._kill_sbtach_process)


def _get_group_from_sbatch_args(sbatch_args):
  group = None
  assert isinstance(sbatch_args, str)
  splitted = sbatch_args.split(' ')
  for arg in splitted:
    try:
      if arg.startswith('-n'):
        group = int(arg[2:])
    except Exception:
      continue
  return group


def _generate_slurm_script(kafka_config_filename,
                           strat_file_list,
                           offset_hour,
                           pnl_adj_file,
                           realtime_pnl_adj,
                           dump_result_after_sec,
                           dump_result_frequency,
                           exit_after_min,
                           memcached_config,
                           plot_pnl,
                           plot_order,
                           plot_dir,
                           remote_home,
                           coin_env):
  template_loader = jinja2.FileSystemLoader(searchpath='./')
  template_env = jinja2.Environment(loader=template_loader)

  template = template_env.get_template(
      'coin/tool/strat_monitor/app/monitor_runner_slurm/slurm_script.tmpl')
  return template.render(kafka_config_filename=kafka_config_filename,
                         strat_file_list=strat_file_list,
                         offset_hour=offset_hour,
                         pnl_adj_file=pnl_adj_file,
                         realtime_pnl_adj=realtime_pnl_adj,
                         dump_result_after_sec=dump_result_after_sec,
                         dump_result_frequency=dump_result_frequency,
                         exit_after_min=exit_after_min,
                         memcached_config=memcached_config,
                         plot_pnl=plot_pnl,
                         plot_order=plot_order,
                         plot_dir=plot_dir,
                         remote_home=remote_home,
                         coin_env=coin_env)


def main(argv):
  assert FLAGS.remote_home is not None
  if FLAGS.strategy_config_filename is not None:
    topics = get_topic_from_config(
        FLAGS.strategy_config_filename, FLAGS.kafka_config_filename)
  else:
    threshold = to_timestamp_int(
        datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) - datetime.timedelta(days=1))
    kafka_config = KafkaConfig.from_cmd_config(FLAGS.kafka_config_filename)
    topics = get_topic_from_strat(
        get_active_strategy(update_timestamp_threshold=threshold), kafka_config)
  strats_from_kafka = list(set([
      parse_kafka_topic(topic)[1].strategy_name for topic in topics]))
  num_group = _get_group_from_sbatch_args(FLAGS.sbatch_args)
  strat_groups = separate_into_group(strats_from_kafka, num_group)
  print('num of strat groups', len(strat_groups))
  strat_file_list = []
  with contextlib.ExitStack() as stack:
    for strat_group in strat_groups:
      json_str = json.dumps([{'strategy_name': strat} for strat in strat_group], indent=2)
      strat_file = tempfile.NamedTemporaryFile(mode='w', dir='')
      stack.enter_context(strat_file)
      strat_file.write(json_str)
      strat_file.flush()
      strat_file_list.append(strat_file.name)
    script = _generate_slurm_script(kafka_config_filename=FLAGS.kafka_config_filename,
                                    strat_file_list=strat_file_list,
                                    offset_hour=FLAGS.offset_hour,
                                    pnl_adj_file=FLAGS.pnl_adj_file,
                                    realtime_pnl_adj=FLAGS.realtime_pnl_adj,
                                    dump_result_after_sec=FLAGS.dump_result_after_sec,
                                    dump_result_frequency=FLAGS.dump_result_frequency,
                                    exit_after_min=FLAGS.exit_after_min,
                                    memcached_config=FLAGS.memcached_config,
                                    plot_pnl=FLAGS.plot_pnl,
                                    plot_order=FLAGS.plot_order,
                                    plot_dir=FLAGS.plot_dir,
                                    remote_home=FLAGS.remote_home,
                                    coin_env=FLAGS.coin_env)

    if FLAGS.dry_run:
      print(script)
      return

    with tempfile.NamedTemporaryFile(mode='w', dir='') as script_file:
      script_file.write(script)
      script_file.flush()

      ioloop = IOLoop.current()
      sbatch_runner = SBatchRunner(ioloop, script_file.name, FLAGS.sbatch_args)
      signal.signal(signal.SIGTERM, sbatch_runner.signal_handler)
      signal.signal(signal.SIGINT, sbatch_runner.signal_handler)
      job_id = sbatch_runner.run()
      print(job_id)


if __name__ == '__main__':
  logging.basicConfig(
      level='DEBUG',
      format='%(levelname)8s %(asctime)s %(name)s %(filename)s:%(lineno)d] %(message)s')

  flags.DEFINE_string('kafka_config_filename',
                      '../../coin_deploy/support_monitor/config/kafka_config.json',
                      'kafka config')

  flags.DEFINE_string('strategy_config_filename',
                      None,
                      'strategy_config_filename')

  flags.DEFINE_float('offset_hour', None, 'offset_hour')

  flags.DEFINE_string('pnl_adj_file', None, 'pnl_adj_file')

  flags.DEFINE_boolean('realtime_pnl_adj', False, 'realtime pnl adj')

  flags.DEFINE_integer('dump_result_after_sec', 60, 'dump_result_after_sec')

  flags.DEFINE_integer('dump_result_frequency', 20, 'dump_result_frequency')

  flags.DEFINE_integer('exit_after_min', 240, 'exit_after_min')

  flags.DEFINE_string('memcached_config', None, 'path of database config.')

  flags.DEFINE_boolean('plot_pnl', False, 'plot pnl')

  flags.DEFINE_boolean('plot_order', False, 'plot order')

  flags.DEFINE_string('plot_dir', None, 'plot dir')

  flags.DEFINE_string('remote_home', None, 'path of remote home.')

  flags.DEFINE_string('coin_env',
                      'miniconda3/envs/coin2_motion_env',
                      'coin_env path relative to remote_home')

  flags.DEFINE_bool('dry_run', False, 'print sbatch script only and not run sbatch')

  flags.DEFINE_string('sbatch_args', None, 'sbatch arguments')

  app.run(main)
