from datetime import timezone, datetime,timedelta
from glob import glob
import os
import json
import random
import subprocess
import toml
import re
from time import sleep
from jinja2 import Environment, FileSystemLoader, select_autoescape
from absl import app, flags
from xunkemgmt_client.tool.slack_noti import send_to_slack

FLAGS = flags.FLAGS

#list exchange will check all products
important_exchange_list = [
  'Futures.Binance.v1',
  'Futures.Binance.v1-delivery',
  'Futures.Bybit.v3',
  'Futures.Bybit.v3-linear',
  'Futures.Dydx.v3',
  'Futures.Okex.v5',
  'Futures.Okex.v5-swap',
  'Spot.Binance.v1',
  'Spot.Gdax.v1',
  'Spot.Bithumb.v2',
  'Spot.Bitbank.v1',
  'Spot.Crypto.v2',
]

machines = [
  "feed-01.ap-northeast-1.aws.huobi",
  "feed-02.ap-northeast-2.aws",
  "feed-02.eu-west-1.aws",
  "feed-02.us-east-1.aws",
  "feed-05.ap-northeast-1.aws",
  "feed-05.cn-hongkong.aliyun",
  "feed-01.ap-southeast-1.aws",
]


def get_product_info(check_date):
  product_info_root = '../../coin_product_info/data'
  product_info_path = os.path.join(product_info_root,check_date)
  check_date = datetime.strptime(check_date,'%Y%m%d')
  search_limit = 30
  while not os.path.exists(product_info_path) and search_limit>0:
    product_info_path = os.path.join(product_info_root,(check_date-timedelta(days=1)).strftime('%Y%m%d'))
    search_limit -=1
  if search_limit <= 0:
    product_info_path = '../data/coin2/product_info'
  file_list=glob(product_info_path+'/*.json')
  product_symbol_map = {}
  for file in file_list :
    with open(file,'r') as f:
      pis = json.load(f)
    product_symbol_map[pis['mea']]=set()
    for pi in pis['product_infos'] :
      product_symbol_map[pis['mea']].add(pi['symbol'])
  return product_symbol_map

def get_recipe_config():
  subscriber_config_path = '../data/coin2/feed/subscribers.toml'
  with open(subscriber_config_path,'r') as fp:
    subscriber_config = toml.load(fp)
  recipe_config = {}
  for mea in subscriber_config.keys():
    if 'default_recipe' in subscriber_config[mea].keys():
      recipe_config[mea] = subscriber_config[mea]['default_recipe']
    elif 'realtime' in list(subscriber_config[mea]['recipes']):
      recipe_config[mea] = 'realtime'
    elif 'bbo' in list(subscriber_config[mea]['recipes']):
      recipe_config[mea] = 'bbo'
    elif 'snapshot' in list(subscriber_config[mea]['recipes']):
      recipe_config[mea] = 'snapshot'
  return recipe_config


def get_vanillas_configs_path(machine,check_date):
  vanillas_configs_path='../data/coin2/feed/vanillas_configs'
  if os.path.exists(vanillas_configs_path):
    dir_list=os.listdir(vanillas_configs_path)
    dir_list.sort(reverse=True)
    for dir in dir_list:
      if dir > check_date :
        continue
      current_path = os.path.join(vanillas_configs_path,dir)
      files = os.listdir(current_path)
      for file in files:
        if machine in file:
          return current_path
  else:
    return "../data/coin2/feed/ops/feed-writer/"
    

def get_subscribe_config(machine,vanilla_config_path=None):
  vanilla_config_path = vanilla_config_path or '../data/coin2/feed/ops/feed-writer/'
  extra_machine_config_path = '../data/coin2/feed/ops/feed-writer/'
  machine_config_file_path = os.path.join(vanilla_config_path,f"{machine}.json")
  subscribe_group = {}
  subscribe_products = {}
  
  with open (machine_config_file_path,'r') as f :
    feed_machine_config=json.load(f)
  if machine == 'feed-10.ap-northeast-1.aws':
    for mea in feed_machine_config['feed']['exchanges'] :
      if '_split_channel' in mea :
        continue
      subscribe_products[mea] = feed_machine_config['feed']['exchanges'][mea]['products']['norms']
    return subscribe_products
  for mea in feed_machine_config['feed']['exchanges'] :
    if '_split_channel' in mea :
      continue
    subscribe_group[mea] = feed_machine_config['feed']['exchanges'][mea]['products']['groups']
  
  lmaxdigital_config_file_path = os.path.join(extra_machine_config_path,f"{machine}.lmaxdigital.w1.json")
  dailyrelaunch_config_file_path = os.path.join(extra_machine_config_path,f"{machine}.daily-relaunch.json")
  
  def merge_subscribe_group_info(subscribe_group,new_config_path):
    if os.path.exists(new_config_path):
      with open(new_config_path, 'r') as f:
        feed_machine_config=json.load(f)
      for mea in feed_machine_config['feed']['exchanges'] :
        if mea in subscribe_group.keys():
          subscribe_group[mea].extend(feed_machine_config['feed']['exchanges'][mea]['products']['groups'])
        else :
          subscribe_group[mea] = feed_machine_config['feed']['exchanges'][mea]['products']['groups']    
    return subscribe_group
  
  subscribe_group = merge_subscribe_group_info(subscribe_group,lmaxdigital_config_file_path)
  subscribe_group = merge_subscribe_group_info(subscribe_group,dailyrelaunch_config_file_path)
      
  symbol_groups_file_name = machine+'_symbol_groups.toml'
  symbol_groups_file_path = os.path.join(vanilla_config_path,symbol_groups_file_name)
  if vanilla_config_path == extra_machine_config_path:
    symbol_groups_file_path = '../data/coin2/feed/symbol_groups.toml'
  
  with open(symbol_groups_file_path,'r') as f:
    symbol_groups=toml.load(f)
  
  for mea,group_list in subscribe_group.items():
    subscribe_products[mea] = set()
    remove_group_list = set()
    for group in group_list :
      snake_mea = to_snake_mea(mea)
      if len(symbol_groups[snake_mea][group])>0:
        subscribe_products[mea].update(symbol_groups[snake_mea][group])
      else:
        remove_group_list.add(group)
    group_list = set(group_list) - remove_group_list
    subscribe_group[mea] = group_list
  
  return subscribe_products


def generate_symbol_cheklist(machine,start_time,use_raw):
  full_check_mea = FLAGS.full_check_mea or important_exchange_list
  if not use_raw:
    full_check_mea = []
  check_date = start_time.split('T')[0]
  vanilla_config_path = get_vanillas_configs_path(machine,check_date)
  subscribe_products = get_subscribe_config(machine,vanilla_config_path)
  ico_products = []
  for products in get_subscribe_config('feed-10.ap-northeast-1.aws').values():
      ico_products = ico_products + products
  product_info_symbol = get_product_info(check_date)
  checklist={}
  for mea,symbols in subscribe_products.items():
    if mea in ['Options.Deribit.v2', 'Spot.Uniswap.v3', 'Spot.Pancakeswap.v2', 'Spot.Uniswap.v2', 'Spot.Coingecko.v3', 'Spot.Uniswap.v3-arbitrum'] :
      continue
    if mea == 'Futures.Gateio.v4-btc':
      mea = 'Futures.Gateio.v4'
    if mea in full_check_mea:
      symbol_intersection= symbols & product_info_symbol[mea]
      checklist[mea]=[s for s in symbol_intersection if is_ok_to_add(mea,s) and s not in ico_products]
    else:
      # BTC/ETH-USD/USDT/USDC/KRW
      if mea in product_info_symbol.keys():
        symbol_intersection= symbols & product_info_symbol[mea]
        important_product_list = [s for s in symbol_intersection if 'BTC-USD' in s or 'ETH-USD' in s or 'BTC-KRW' in s or 'ETH-KRW' in s]
        random_pick_product_list = random.sample(list(symbol_intersection), min(3 ,len(symbol_intersection)))
        important_product_list = list(set(important_product_list + random_pick_product_list))
        if len(important_product_list) >0 :
          checklist[mea] = important_product_list
  return checklist


def to_snake_mea(mea:str):
  return mea.lower().replace('.','_')


def is_ok_to_add(mea, symbol):
  nonexist_symbols = {
    'Spot.Gdax.v1': {'INDEX-USDT', 'USDC-EUR', 'USDC-GBP', 'DAR-USDT'},
    'Spot.Bitbank.v1': {'BCHN-BTC', 'LTC-BTC', 'MONA-BTC', 'QTUM-BTC', 'XLM-BTC', 'XRP-BTC', 'ETH-BTC'},
  }
  if re.search(r'\d[S|L]-',symbol):
      # ratio x N products
      return False
  if mea in nonexist_symbols.keys() and symbol in nonexist_symbols[mea]:
    return False
  return True

def generate_driver_config(machine,driver_file_dir,use_mirror1,use_raw,start_time):
  start_time = start_time or (datetime.now(timezone.utc)-timedelta(hours=3)).strftime('%Y%m%dT%H0000')
  checklist = generate_symbol_cheklist(machine,start_time,use_raw)
  recipe_config = get_recipe_config()
  env = Environment(
    loader=FileSystemLoader("coin/support/feed_tool/feed_symbol_status_checker"),
    autoescape=select_autoescape()
  )
  template = env.get_template("driver_json_file.tmpl") 
  archive_type = "RAW"
  worker = random.choice([1,2])
  if not use_raw:
    archive_type = "FASTFEED"
    worker = 0
  driver_files = []	
  for mea,products in checklist.items():
    json_str = template.render(machine=machine, mea=mea, products=products.__str__().replace("'", '"'), archive_type=archive_type,
                               start_time=start_time, worker=worker, recipe=recipe_config[to_snake_mea(mea)], use_mirror1=use_mirror1)
    driver_file_path = os.path.join(driver_file_dir, f"{mea}.json")
    with open(driver_file_path,'w') as fp:
      fp.writelines(json_str)
    driver_files.append(os.path.abspath(driver_file_path))
  return driver_files

def summary_result(result_file_path,slack_send,use_raw):
  result = {}
  mention_list=None
  ignore_mea = FLAGS.ignore_mea or []
  full_check_mea = FLAGS.full_check_mea or important_exchange_list
  
  for root, dirs, files in os.walk(result_file_path):
    for file in files:
      with open(os.path.join(root,file),'r') as fp:
        mea_result = json.load(fp)
      if mea_result["machine"] not in result.keys():
        result[mea_result["machine"]] = {}
      if mea_result["mea"] not in result[mea_result["machine"]].keys():
        result[mea_result["machine"]][mea_result["mea"]] = {}
      msssing_symbols = [set(),set()]
      if "trade_missing_symbol" in mea_result.keys():
        msssing_symbols[0] = set(mea_result["trade_missing_symbol"])
      if "book_missing_symbol" in mea_result.keys():
        msssing_symbols[1] = set(mea_result["book_missing_symbol"])
      result[mea_result["machine"]][mea_result["mea"]]['msssing_symbols']=msssing_symbols
      result[mea_result["machine"]][mea_result["mea"]]['worker']=mea_result['worker']
  if len(result.keys()) > 0:
    briefing_str ='[symbol status checker]Following products have no book&trade feed in recent 2h: \n'
    issue_briefing_str = "\n[**WARNING**]following exchanges may have issue, please check:\n"
    machine_briefing_str = []
    machine_issue_briefing_str = []
    if not use_raw:
      briefing_str ='[symbol status checker]Following products have no book&trade feed in fastfeed: \n'
    for machine in result.keys() :
      machine_briefing_len = len(machine_briefing_str)
      machine_issue_briefing_len = len(machine_issue_briefing_str)
      for mea,info in result[machine].items():
        all_missing = info['msssing_symbols'][0] & info['msssing_symbols'][1]
        book_only = info['msssing_symbols'][1] - info['msssing_symbols'][0]
        worker = info['worker']
        briefing_prefix = f"    {mea.ljust(30,' ')}    worker{str(worker)}    "
                
        if len(all_missing) > 0:
          if len(all_missing) > 30:
            machine_issue_briefing_str.append(briefing_prefix + f"MASSIVE BOOK/TRADE MISSING, amount: {str(len(all_missing))}\n")
          elif (mea in ignore_mea) or (mea in full_check_mea and len(all_missing) < 6) or (mea not in full_check_mea and len(all_missing) < 3):
            machine_briefing_str.append(briefing_prefix + f"book&trade missing: {all_missing.__str__()}\n")
          else:
            machine_issue_briefing_str.append(briefing_prefix + f"book&trade missing: {all_missing.__str__()}\n")

        if len(book_only) > 0:
          if len(book_only) > 30:
            machine_issue_briefing_str.append(briefing_prefix + f"MASSIVE TRADE ONLY, amount: {str(len(book_only))}\n")
          elif mea in ignore_mea:
            machine_briefing_str.append(briefing_prefix + f"book missing: {book_only.__str__()}\n")
          else:
            machine_issue_briefing_str.append(briefing_prefix + f"book missing: {book_only.__str__()}\n")

      if len(machine_briefing_str) > machine_briefing_len:
        machine_briefing_str.insert(machine_briefing_len ,f"{machine}:\n")
      if len(machine_issue_briefing_str) > machine_issue_briefing_len:
        machine_issue_briefing_str.insert(machine_issue_briefing_len ,f"{machine}:\n")
    
    if len(machine_briefing_str) > 0:
      briefing_str += ''.join(machine_briefing_str)
    if len(machine_issue_briefing_str) > 0 :
      briefing_str += issue_briefing_str + ''.join(machine_issue_briefing_str)
      mention_list = ['ziyan','junxiao']
  else :
    briefing_str = '[symbol status checker]All checked symbols are good'
  print(briefing_str)
  if slack_send :
    def send_msg(briefing_str,mention_list,retry_time):
      try:
        send_to_slack(briefing_str,'#coin_feed_noti', 'msg',title='symbol status checker test', mention_list = mention_list)
      except Exception as e:
        if retry_time < 3:
          sleep(10)
          send_msg(briefing_str, mention_list, retry_time + 1)
        else:
          raise e
    send_msg(briefing_str,mention_list,0)
   
      
def generate_script(machines,use_mirror1,use_raw,start_time):
  command_paras = []
  task_dir = datetime.now(timezone.utc).strftime('%Y%m%d%H%M')
  task_path = os.path.join('../../log',task_dir)
  for m in machines:
    driver_file_dir = os.path.join(task_path,'driver',m)
    result_file_dir = os.path.join(task_path,'result',m)
    os.makedirs(driver_file_dir,exist_ok=True)
    os.makedirs(result_file_dir,exist_ok=True)
    driver_files = generate_driver_config(m,driver_file_dir,use_mirror1,use_raw,start_time)
    command_paras.append((driver_files,os.path.abspath(result_file_dir)))
  env = Environment(
    loader=FileSystemLoader("coin/support/feed_tool/feed_symbol_status_checker"),
    autoescape=select_autoescape()
  )
  # template = env.get_template("coin2_symbol_status_checker_slurm.tmpl")
  template = env.get_template("coin2_symbol_status_checker.tmpl")
  script=template.render(command_paras=command_paras)
  with open(os.path.join(task_path,'run_checker.sh'),'w') as fp:
    fp.writelines(script)
  return task_path

  
def main(_):
  start_time = datetime.now().timestamp()
  task_path = generate_script(machines,FLAGS.use_mirror1,FLAGS.use_raw,FLAGS.start_time)
  # ret = subprocess.run(f"sbatch {FLAGS.sbatch_args} {task_path}/run_checker.sh", capture_output=True, shell=True)
  ret = subprocess.run(f"bash {task_path}/run_checker.sh", capture_output=True, shell=True)
  if ret.returncode != 0 :
    print(ret.stderr.decode())
  summary_result(os.path.join(task_path,'result'),FLAGS.slack_send,FLAGS.use_raw)
  duration = datetime.now().timestamp() - start_time
  print(f"duration:{duration}s")
  ret.check_returncode()
  
  
if __name__ == '__main__':
  flags.DEFINE_boolean('slack_send', True, 'send result to slack')
  flags.DEFINE_boolean('use_raw', True, 'use raw or fastfeed')
  flags.DEFINE_boolean('use_mirror1', True, 'use raw feed on coin-mirror-1 to archive')
  flags.DEFINE_string('start_time', None, 'check start time, format:YYYYMMDDTHHMMSS')
  flags.DEFINE_string(
      'sbatch_args',
      '-N2 --ntasks=100 --spread-job --mem-per-cpu=1G --job-name=coin2_symbol_status_checker --priority=TOP --wait',
      'sbatch arguments')
  flags.DEFINE_list('full_check_mea', None, 'meas will check all symbols')
  flags.DEFINE_list('ignore_mea', None, 'meas will not tigger slack mention alert')
  app.run(main)


