# Copyright (c) 2019 Presto Labs Pte. Ltd.
# Author: fengyang

import calendar
import datetime
import json
import toml
import jinja2
import subprocess
import tempfile
import sys
import os
from absl import app, flags

from coin.support.feed_tool.feed_cache.app.coin2_fast_feed_log_checker import check_log_file

FLAGS = flags.FLAGS

machines = [
    "feed-05.ap-northeast-1.aws",
    "feed-01.ap-northeast-1.aws.huobi",
    "feed-02.ap-northeast-2.aws",
    "feed-02.eu-west-1.aws",
    "feed-02.us-east-1.aws",
    "feed-05.cn-hongkong.aliyun",
    "feed-01.ap-southeast-1.aws",
    "feed-10.ap-northeast-1.aws",
]

week_futures = ('Futures.Okex.v3', 'Futures.Okex.v5', 'Futures.Huobi.v1')
quarter_futures = ('Futures.Bybit.v2', 'Futures.Bybit.v3', 'Futures.Ftx.v1', 'Futures.Binance.v1-delivery')


def find_last_time_successful_cases(log_dir, trading_date_str):
  successful = {}
  log_dir = log_dir or '/remote/iosg/jenkins-2/bot-coin/jenkins/workspace/coin2_fastfeed/logs'
  log_date_dir = os.path.join(log_dir, trading_date_str)
  if not os.path.exists(log_date_dir):
    return successful
  for root, machines, files_in_date in os.walk(log_date_dir):
    for machine in machines:
      for machine_dir, dirs_in_machine, files in os.walk(os.path.join(root, machine)):
        for log_file in files:
          full_path = os.path.join(machine_dir, log_file)
          finished, duration = check_log_file(full_path)
          if finished:
            market, exchange, api_version, _, group, _, _ = log_file.split('.')
            mea = ".".join((market, exchange, api_version))
            successful[machine] = successful.get(machine, {})
            successful[machine][mea] = successful[machine].get(mea, [])
            successful[machine][mea].append(group)
  return successful


def get_last_friday_of_seasons(trading_date):
  year = trading_date.year
  seasons = [[3, 31], [6, 30], [9, 30], [12, 31]]
  quarter_days = []
  for day in seasons:
    m, d = day
    start_date = datetime.datetime(year, m, d)
    last_friday = start_date
    oneday = datetime.timedelta(days=1)
    while last_friday.weekday() != calendar.FRIDAY:
      last_friday -= oneday
    quarter_days.append(last_friday)
  return quarter_days


def get_durations(trading_date, mea, recipe, group, quarter_days):
  trading_date_str = datetime.datetime.strftime(trading_date, '%Y%m%d')
  start_durations = []
  if (((trading_date.weekday() == 4) and (mea in week_futures)) or
      (trading_date in quarter_days) and (mea in quarter_futures)):
    if recipe == 'non_tbs' and mea == 'Futures.Huobi.v1':
      # fix futures expiry issue, and skip data of warmup
      start_durations.append((trading_date_str, 'PT07H59M'))
      start_durations.append((trading_date_str + 'T081500', 'PT15H45M'))
    elif mea == 'Futures.Ftx.v1':
      # utc 3 AM
      start_durations.append((trading_date_str, 'PT3H'))
      start_durations.append((trading_date_str + 'T03', 'PT21H'))
    else:
      # utc 8 AM
      start_durations.append((trading_date_str, 'PT8H'))
      start_durations.append((trading_date_str + 'T08', 'PT16H'))
  elif mea == 'Futures.Binance.v1' and group == 'a' and recipe != 'bbo_price_change':
    # split to 2 jobs as consume too long time
    start_durations.append((trading_date_str, 'PT12H'))
    start_durations.append((trading_date_str + 'T12', 'PT12H'))
  else:
    start_durations.append((trading_date_str, 'PT24H'))

  return start_durations


def build_mea_to_config_file(m, config, file_path):
  mea_to_config_file = {}
  for mea in config['feed']['exchanges'].keys():
    mea_to_config_file[(m, mea)] = file_path

  return mea_to_config_file


def merge_spilt_channel(json_obj):
  spilt_channel_meas = [mea for mea in json_obj['feed']['exchanges'].keys() if '_split_channel' in mea]
  for mea in spilt_channel_meas:
    json_obj['feed']['exchanges'][mea[:-14]]['channels'].extend(json_obj['feed']['exchanges'][mea]['channels'])
    del json_obj['feed']['exchanges'][mea]
  return json_obj


def load_feed_writer_config(machines):
  common_config_tmpl = "../data/coin2/feed/ops/feed-writer/%s.json"
  config = {}
  mea_to_config_file = {}
  for m in machines:
    json_config_path = common_config_tmpl % m
    with open(json_config_path, 'r') as f:
      json_obj = merge_spilt_channel(json.load(f))
      config[m] = json_obj
      # remove ../ as fastfeed_converter run at coin repo
      config_file = json_config_path[3:]
      mea_to_config_file.update(build_mea_to_config_file(m, json_obj, config_file))
    config, mea_to_config_file = load_lmax_config(config, mea_to_config_file, m)
    config, mea_to_config_file = load_daily_relaunch_config(config, mea_to_config_file, m)
  return config, mea_to_config_file


def load_lmax_config(config, mea_to_config_file, machine):
  worker_config_tmpl = "../data/coin2/feed/ops/feed-writer/%s.lmaxdigital.w%s.json"
  # worker 1,2 have the same exchanges setting, except host setting
  worker_path = worker_config_tmpl % (machine, "1")
  if os.path.exists(worker_path):
      with open(worker_path, 'r') as f:
        json_obj = json.load(f)
        config[machine]['feed']['exchanges'].update(json_obj['feed']['exchanges'])
        # remove ../ as fastfeed_converter run at coin repo
        config_file = worker_path[3:]
        mea_to_config_file.update(build_mea_to_config_file(machine, json_obj, config_file))
  return config, mea_to_config_file


def load_daily_relaunch_config(config, mea_to_config_file, machine):
  config_path = "../data/coin2/feed/ops/feed-writer/%s.daily-relaunch.json" % machine
  if os.path.exists(config_path):
      with open(config_path, 'r') as f:
        json_obj = json.load(f)
        for (mea, _config) in json_obj['feed']['exchanges'].items():
          if mea in config[machine]['feed']['exchanges']:
            config[machine]['feed']['exchanges'][mea]['products']['groups'].extend(
                _config['products']['groups'])
          else:
            config[machine]['feed']['exchanges'][mea] = json_obj['feed']['exchanges'][mea]
        # remove ../ as fastfeed_converter run at coin repo
        config_file = config_path[3:]
        mea_to_config_file.update(build_mea_to_config_file(machine, json_obj, config_file))
  return config, mea_to_config_file


def load_subscribers_setting(subscribers_path = "../data/coin2/feed/subscribers.toml"):
  with open(subscribers_path, 'r') as f:
    return toml.load(f)


def load_symbol_groups_setting(symbol_groups_path = "../data/coin2/feed/symbol_groups.toml"):
  with open(symbol_groups_path, 'r') as f:
    return toml.load(f)

def convert_mea_to_toml_setting(mea):
  return mea.lower().replace('.', '_')


def get_mea_from_config(machine, config):
  return config[machine]['feed']['exchanges'].keys()


def get_recipe_from_config(machine, mea, config, subscribers):
  channels = config[machine]['feed']['exchanges'][mea]['channels']
  assert len(channels) > 0, "Only support subscribe by channel"
  toml_mea = convert_mea_to_toml_setting(mea)
  recipes_setting = subscribers[toml_mea]['recipes']
  recipes = []
  channels_set = set(channels)
  for recipe, channel_settings in recipes_setting.items():
    if channel_settings and set(channel_settings).issubset(channels_set):
      recipes.append(recipe)

  return recipes


def get_groups_from_setting(machine, mea, config):
  if machine == "feed-10.ap-northeast-1.aws":
    symbol_groups = load_symbol_groups_setting()
    toml_mea = convert_mea_to_toml_setting(mea)
    groups = set()
    for symbol in config[machine]['feed']['exchanges'][mea]["products"]["norms"]:
      for  group in symbol_groups[toml_mea].keys():
        if symbol in symbol_groups[toml_mea][group] and group not in groups :
          groups.add(group)
    return list(groups)

  groups = config[machine]['feed']['exchanges'][mea]["products"]["groups"]
  assert len(groups) > 0, "Only support subscribe by group"
  return groups


def get_symbols_from_config(mea,config):
  toml_mea = convert_mea_to_toml_setting(mea)
  symbols = []
  for group,symbol_list in config[toml_mea].items():
    symbols.extend(symbol_list)
  return symbols


def get_fastfeed_setting(machine, mea, arbitration, trading_date, long_warmup=False):
  suffix=''
  if arbitration:
    if mea not in non_arbitrate_meas:
      setting_path = 'data/coin2/feed/ops/fastfeed-converter/common_arbitration{suffix}.json'
    else:
      setting_path = 'data/coin2/feed/ops/fastfeed-converter/common_shift{suffix}.json'
  else:
    setting_path = 'data/coin2/feed/ops/fastfeed-converter/common{suffix}.json'

  if machine in use_mirror1_nfs_machines and have_data_in_mirror1(machine, trading_date):
    suffix = suffix + '_mirror1'
  if long_warmup or mea in long_warmup_meas:
    suffix = suffix + '_longwarmup'
  setting_path = setting_path.format(suffix=suffix)
  return setting_path


def have_data_in_mirror1(machine, trading_date):
  flow_root_dir = '/remote/iosg/coin-mirror-1/buckets/feed.raw.coin/'

  prev_trading_date = datetime.datetime.strptime(trading_date,'%Y%m%d')
  prev_trading_date = prev_trading_date - datetime.timedelta(days=1)
  prev_trading_date = prev_trading_date.strftime('%Y%m%d')

  trading_date_data_path = os.path.join(flow_root_dir, machine, trading_date)
  prev_trading_date_data_path = os.path.join(flow_root_dir, machine, prev_trading_date)
  if os.path.exists(trading_date_data_path) and os.path.exists(prev_trading_date_data_path):
    return True
  return False

use_mirror1_nfs_machines = [
    "feed-05.ap-northeast-1.aws",
    "feed-01.ap-northeast-1.aws.huobi",
    "feed-02.ap-northeast-2.aws",
    "feed-02.eu-west-1.aws",
    "feed-02.us-east-1.aws",
    "feed-05.cn-hongkong.aliyun",
    "feed-10.ap-northeast-1.aws",
]

non_arbitrate_meas = [
    "Spot.Huobi.v1",
    "Futures.Huobi.v1",
    "Futures.Huobi.v1-swap",
    "Spot.Huobi.v1-mbp5",
    "Futures.Bitmex.v1",
    "Spot.Quoinex.v2",
    "Futures.Bitflyer.v1",
    "Futures.Huobi.v1-noti",
    "Futures.Huobi.v1-swap-noti",
    "Futures.Huobi.v1-linear-swap",
    "Spot.Korbit.v1",
    "Spot.Bitbank.v1",
    "Spot.Lmaxdigital.v1",
    "Spot.Uniswap.v3",
    "Spot.Uniswap.v3-arbitrum",
    "Spot.Uniswap.v2",
    "Futures.Deribit.v2",
    "Options.Deribit.v2",
    "Spot.Gateio.v4",
    "Futures.Gateio.v4-btc",
    "Futures.Gateio.v4",
    "Futures.Gateio.v4-delivery-btc",
    "Futures.Gateio.v4-delivery",
    "Futures.Dydx.v3",
    "Spot.Pancakeswap.v2",
    "Spot.Mexc.v2",
    "Spot.Mexc.v3",
    "Spot.Crypto.v2",
    "Futures.Crypto.v1",
    "Spot.Bitget.v1",
    "Futures.Bitget.v1",
    "Spot.Bybit.v1",
    "Spot.Bybit.v3",
    "Spot.Mercado.v4",
    "Spot.Bit.v1",
    "Spot.Maicoin.v2",
    "Spot.Bittrex.v3",
]

ignore_recipes = {
    'Futures.Binance.v1-delivery': ['snapshot'],
    'Futures.Binance.v1': ['snapshot', 'bbo_price_change'],
    'Futures.Huobi.v1': ['snapshot'],
    'Futures.Huobi.v1-swap': ['snapshot'],
    'Futures.Okex.v3': ['snapshot'],
    'Futures.Okex.v3-swap': ['snapshot'],
    'Futures.Okex.v5': ['snapshot'],
    'Futures.Okex.v5-swap': ['snapshot'],
    'Spot.Binance.v1':['realtime'],
    'Futures.Apollox.v1': ['snapshot'],
    'Futures.Bitget.v1': ['realtime'],
    'Spot.Bitget.v1': ['realtime'],
    'Futures.Mexc.v1': ['realtime'],
    'Spot.Mexc.v3': ['realtime'],
}

long_warmup_meas = [
  "Spot.Binance.v1",
  "Spot.Gdax.v1",
  "Spot.Okex.v5",
  "Spot.Bitbank.v1",
  "Spot.Upbit.v1",
]

def is_mea_enabled(mea, arbitration):
  return True


def gen_lauch_script(trading_date, priority_settings ,settings, log_dir):
  template_loader = jinja2.FileSystemLoader(searchpath="./")
  template_env = jinja2.Environment(loader=template_loader)

  template = template_env.get_template(
      'coin/support/feed_tool/feed_cache/script/coin2_fast_feed.tmpl')
  return template.render(trading_date=trading_date, priority_settings=priority_settings,settings=settings, log_dir=log_dir)


def run_sbatch(script_filepath):
  sbatch_args = FLAGS.sbatch_args
  sbatch_args = [token for token in sbatch_args.split(' ') if token]
  # cwd setting let slurm-id.out log in fastfeed deploy directory
  completed = subprocess.run(['sbatch', *sbatch_args, script_filepath],
                             stdout=subprocess.PIPE,
                             cwd='/remote/iosg/jenkins-2/bot-coin/jenkins/workspace/coin2_fastfeed/')
  sbatch_output = completed.stdout.decode('utf-8')
  if completed.returncode != 0:
    print(sbatch_output + ' - FAILED')
    sys.exit(1)
  else:
    print(sbatch_output + ' - SUCCESS')

def get_pi_dir(mea, trading_date):
  pi_dir = None
  if FLAGS.pi_dir:
    pi_dir = FLAGS.pi_dir
  elif mea == "Spot.Bybit.v1" and trading_date < datetime.datetime.strptime("20220406", '%Y%m%d'):
    pi_dir = "/remote/iosg/jenkins-2/bot-coin/jenkins/workspace/coin2_fastfeed/spot_bybit_v1_pi_dir"
  return pi_dir


def main(argv):
  trading_date_str = FLAGS.trading_date
  assert trading_date_str, '--trading_date must be specified.'
  trading_date = datetime.datetime.strptime(trading_date_str, '%Y%m%d')

  config, mea_to_config_file = load_feed_writer_config(machines)
  subscribers = load_subscribers_setting()
  quarter_days = get_last_friday_of_seasons(trading_date)
  symbol_groups = load_symbol_groups_setting()
  workers = [1, 2]
  job_id = 0
  settings = {}
  priority_settings = {}
  high_priority_mea = ['Futures.Binance.v1']
  if FLAGS.last_time_failed_only:
    successful = find_last_time_successful_cases(None, trading_date_str)
  else:
    successful = {}
  if not successful:
    last_time_empty = True
  else:
    last_time_empty = False
  for m in machines:
    if (FLAGS.machine is not None) and (m not in FLAGS.machine):
      continue
    priority_settings[m] = {}
    settings[m] = {}
    meas = get_mea_from_config(m, config)
    for mea in meas:
      if (FLAGS.mea is not None) and (mea not in FLAGS.mea):
        continue
      if not is_mea_enabled(mea, FLAGS.arbitration):
        continue
      mea_setting = {}
      pi_dir = get_pi_dir(mea, trading_date)
      recipes = get_recipe_from_config(m, mea, config, subscribers)
      groups = get_groups_from_setting(m, mea, config)
      symbols = get_symbols_from_config(mea,symbol_groups)
      filtered_recipes1 = []
      filtered_recipes2 = []  # Huobi futures non_tbs recipe need to be splitted.
      for recipe in recipes:
        if (FLAGS.recipe is not None) and (recipe not in FLAGS.recipe):
          continue
        if (FLAGS.recipe_include is not None) and (FLAGS.recipe_include not in recipe):
          continue
        if (FLAGS.recipe is None) and (recipe in ignore_recipes.get(mea, [])):
          continue
        if recipe == 'non_tbs' and mea == 'Futures.Huobi.v1':
          filtered_recipes2.append(recipe)
        else:
          filtered_recipes1.append(recipe)

      for filtered_recipes in (filtered_recipes1, filtered_recipes2):
        if len(filtered_recipes) == 0:
          continue
        recipes_str = ','.join(filtered_recipes)
        threads_num = len(filtered_recipes)
        # slurm22 will bind 2x cpus_per_task number of vcore to job due to hyperthreading
        # if threads_num <= 2:
        #   cpus_per_task = 1
        # else:
        #   cpus_per_task = 2

        mea_setting[recipes_str] = {}
        fastfeed_setting = get_fastfeed_setting(m, mea, FLAGS.arbitration, trading_date_str)
        driver_settings = [fastfeed_setting, mea_to_config_file[(m, mea)]]
        # norm setting will override group setting
        if FLAGS.norm is not None :
          norms = [s for s in FLAGS.norm if s in symbols]
          if (len(norms)==0):
            continue
          mea_setting[recipes_str]['norm'] = []
          norms = ','.join(norms)
          for worker in workers:
            if (FLAGS.worker is not None) and (worker != int(FLAGS.worker)):
              continue
            start_durations = get_durations(trading_date, mea, recipes_str, None, quarter_days)
            for (start, duration) in start_durations:
              mea_setting[recipes_str]['norm'].append(
                  (norms, worker, driver_settings, start, duration, threads_num, job_id, pi_dir))
              job_id = job_id + 1
        else:
          for group in groups:
            if (FLAGS.group is not None) and (group not in FLAGS.group):
              continue
            mea_setting[recipes_str][group] = []
            for worker in workers:
              if (FLAGS.worker is not None) and (worker != int(FLAGS.worker)):
                continue
              start_durations = get_durations(trading_date, mea, recipes_str, group, quarter_days)
              if not last_time_empty and group in successful.get(m, {}).get(mea, []):
                continue
              for (start, duration) in start_durations:
                mea_setting[recipes_str][group].append(
                    (worker, driver_settings, start, duration, threads_num, job_id, pi_dir))
                job_id = job_id + 1
      if mea in high_priority_mea:
        priority_settings[m][mea] = mea_setting
      else:
        settings[m][mea] = mea_setting
  

  log_dir = "/remote/iosg/jenkins-2/bot-coin/jenkins/workspace/coin2_fastfeed/logs"
  script = gen_lauch_script(trading_date_str, priority_settings ,settings, log_dir)

  if FLAGS.script_only:
    print(script)
    return

  tmp_dir = '/remote/iosg/jenkins-2/bot-coin/jenkins/workspace/coin2_fastfeed/tmpfile'
  with tempfile.NamedTemporaryFile(mode='w', dir=tmp_dir) as script_file:
    script_file.write(script)
    script_file.flush()
    run_sbatch(script_file.name)


if __name__ == '__main__':
  flags.DEFINE_string('trading_date', None, 'yyyymmdd')

  flags.DEFINE_bool('script_only',
                    False,
                    'If true, it will print sbatch script and not run sbatch.')

  flags.DEFINE_list('machine', None, "Separate by comma")

  flags.DEFINE_list('mea', None, "Separate by comma")

  flags.DEFINE_list('recipe', None, "Separate by comma")

  flags.DEFINE_string('recipe_include', None, "recipe substring")

  flags.DEFINE_list('group', None, "Separate by comma")
  
  flags.DEFINE_list('norm', None, "Separate by comma")

  flags.DEFINE_enum('worker', None, ["1", "2"], "Specify worker 1 or 2, or leave it empty")

  flags.DEFINE_bool('arbitration', False, "Generate fastfeed with arbitration")

  flags.DEFINE_bool('last_time_failed_only', False, "Re-generate last time failed cases only")

  flags.DEFINE_string(
      'sbatch_args',
      '-n325 --mem-per-cpu=1G --job-name=coin2_fastfeed --hint=multithread --priority=TOP --wait',
      'sbatch arguments')

  flags.DEFINE_string('pi_dir', None, "Product info search path")

  app.run(main)
