import contextlib
import datetime
import json
import signal
import tempfile

from absl import app, flags
import jinja2
import pandas as pd
import pytz
from sqlalchemy import create_engine, MetaData
from sqlalchemy.sql import text
from sqlalchemy.orm import scoped_session, sessionmaker
from tornado.ioloop import IOLoop

from coin.base.database.util import read_db_config
from coin.base.datetime_util import to_timestamp_int
from coin.tool.strat_monitor.util.monitor_util import (
    get_active_strategy,
    get_strat_from_json_config,
    separate_into_group)
from coin.tool.strat_monitor.plot.strat_plotter import (
    get_week_range)
from coin.tool.strat_monitor.app.monitor_runner_slurm.monitor_runner_slurm import (
    _get_group_from_sbatch_args,
    SBatchRunner)


FLAGS = flags.FLAGS


def get_log_size_stats(start_date, end_date, source_info, db_config):
  uri = 'mysql+mysqldb://{}:{}@{}/{}'.format(
      db_config['user'], db_config['password'], db_config['host'], db_config['database'])
  engine = create_engine(uri, echo=False, pool_recycle=3600)
  metadata = MetaData()
  metadata.reflect(engine, views=True)
  source_view = metadata.tables['StrategyPtaStatsBySourceView']
  Session = scoped_session(sessionmaker(autocommit=False,
                                        autoflush=False,
                                        bind=engine))
  cols = ['strategy_id', 'strategy_name', 'strategy_group', 'trader', 'trading_date',
          'machine', 'source_info', 'log_size_byte', 'log_num', 'log_num_dict',
          'log_replay_duration']
  cols_text = [text(col) for col in cols]
  query_obj = Session.query(*cols_text).\
      filter(source_view.c.trading_date >= start_date).\
      filter(source_view.c.trading_date <= end_date).\
      filter(source_view.c.source_info == source_info).\
      group_by(source_view.c.strategy_name, source_view.c.trading_date)
  source_info = pd.DataFrame(query_obj.all(), columns=cols)

  cols = ['sum', 'mean', 'min', 'max', 'count']
  if source_info.empty:
    log_size_stats = pd.DataFrame(columns=cols, index=['strategy_name'])
  else:
    log_size_stats = source_info.groupby('strategy_name')['log_size_byte'].agg(cols)
  return log_size_stats


def main(argv):
  assert FLAGS.remote_home is not None
  strategy_config_filename = FLAGS.strategy_config_filename
  start_date, end_date = get_week_range(FLAGS.year, FLAGS.start_week)
  pta_db_config = read_db_config(FLAGS.pta_db_config)
  num_days = (end_date - start_date).days
  if strategy_config_filename is not None:
    strat_candidates = get_strat_from_json_config(strategy_config_filename)
  else:
    threshold = to_timestamp_int(datetime.datetime.combine(
        start_date, datetime.time.min.replace(tzinfo=pytz.UTC)))
    strat_candidates = get_active_strategy(update_timestamp_threshold=threshold)
  num_group = _get_group_from_sbatch_args(FLAGS.sbatch_args)
  strat_groups = separate_into_group(strat_candidates, num_group)
  print('num of strat groups', len(strat_groups))

  strat_info_list = []
  log_size_stats = get_log_size_stats(
      start_date, end_date, FLAGS.root_dir, pta_db_config)
  with contextlib.ExitStack() as stack:
    total_mem_byte = 0
    for strat_group in strat_groups:
      mem_byte = 0
      for strat in strat_group:
        log_size = 25 * 10**6
        if strat in log_size_stats.index:
          mean_log_size = log_size_stats.loc[strat, 'mean']
          if mean_log_size > 0:
            log_size = mean_log_size
        mem_byte += log_size
      # log on disk is compressed by around 1/15 and give extra memory by 50% for safety
      # and extra 1G for code
      mem_byte = mem_byte * num_days * 15 * 1.5 + 10**9
      total_mem_byte += mem_byte
      json_str = json.dumps(
          [{'strategy_name': strat} for strat in strat_group], indent=2)
      strat_file = tempfile.NamedTemporaryFile(mode='w', dir='')
      stack.enter_context(strat_file)
      strat_file.write(json_str)
      strat_file.flush()
      strat_info_list.append(
          {'name': strat_file.name, 'mem-per-cpu': '%dM' % (mem_byte / 10**6)})

    template_loader = jinja2.FileSystemLoader(searchpath='./')
    template_env = jinja2.Environment(loader=template_loader)
    template = template_env.get_template(
        'coin/support/pta/app/pta_weekly_dumper_runner_slurm/slurm_script.tmpl')
    script = template.render(
        strat_info_list=strat_info_list,
        year=FLAGS.year,
        start_week=FLAGS.start_week,
        end_week=FLAGS.end_week,
        plot_dir=FLAGS.plot_dir,
        ignore_error=FLAGS.ignore_error,
        strategy_group=FLAGS.strategy_group,
        kafka_config_filename=FLAGS.kafka_config_filename,
        pnl_adj_file=FLAGS.pnl_adj_file,
        realtime_pnl_adj=FLAGS.realtime_pnl_adj,
        kline_db_config=FLAGS.kline_db_config,
        kline_symbols_list=FLAGS.kline_symbols_list,
        root_dir=FLAGS.root_dir,
        remote_home=FLAGS.remote_home,
        coin_env=FLAGS.coin_env)

    if FLAGS.dry_run:
      print(script)
      return

    with tempfile.NamedTemporaryFile(mode='w', dir='') as script_file:
      script_file.write(script)
      script_file.flush()

      ioloop = IOLoop.current()
      sbatch_runner = SBatchRunner(ioloop, script_file.name, FLAGS.sbatch_args)
      signal.signal(signal.SIGTERM, sbatch_runner.signal_handler)
      signal.signal(signal.SIGINT, sbatch_runner.signal_handler)
      job_id = sbatch_runner.run()
      print(job_id)


if __name__ == '__main__':
  flags.DEFINE_integer('year', None, 'Year')

  flags.DEFINE_integer('start_week', None, 'Start week.')

  flags.DEFINE_integer('end_week', None, 'End week.')

  flags.DEFINE_string('plot_dir', None, 'plot dir')

  flags.DEFINE_boolean('ignore_error', False, 'Ignore error')

  flags.DEFINE_string('strategy_group', None, 'filter by strategy_group')

  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')

  flags.DEFINE_string('strategy_config_filename',
                      None,
                      'strategy_config_filename')

  flags.DEFINE_string('pnl_adj_file', None, 'pnl_adj_file')

  flags.DEFINE_boolean('realtime_pnl_adj', False, 'realtime pnl adj')

  flags.DEFINE_string('kline_db_config',
                      '../../coin_deploy/support_info/db_config_iosg/mysql_config_prod.json',
                      '')

  flags.DEFINE_string('kline_symbols_list',
                      '../../coin_deploy/support_info/kline_config/kline_symbols_list.csv',
                      'kline_symbols_list')

  flags.DEFINE_string('pta_db_config', None, '')

  flags.DEFINE_string('root_dir', None, '')

  flags.DEFINE_string('remote_home', None, 'path of remote home.')

  flags.DEFINE_string('coin_env',
                      'miniconda3/envs/coin2_motion_env',
                      'coin_env path relative to remote_home')

  flags.DEFINE_bool('dry_run', False, 'print sbatch script only and not run sbatch')

  flags.DEFINE_string('sbatch_args', None, 'sbatch arguments')

  app.run(main)
