from absl import app, flags
import datetime
import pandas as pd
import os
import logging
import json
import toml
import glob
import tempfile
import traceback

from collections import defaultdict
from coin.base.query_util import (query_klines, query_pta)
from coin.base.format_util import formatNumber
from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
from xunkemgmt_client.tool.slack_noti import send_to_slack
from xunkemgmt_client.client.util.query_util import (
  query_accounts, query_exchange_apis)


FLAGS = flags.FLAGS

UNSUPPORT_MARKET = [
                    ('Futures', 'Bitflyer'),
                    ('Spot', 'Bitflyer'),
                    ('Spot', 'Coincheck'),
                    ('Spot', 'Coinone'),
                    ('Spot', 'Korbit'),
                    ('Spot', 'Lmaxdigital'),
                    #remove the exchanges below once supported
                    ('Futures', 'Prex'),
                    ('Options', 'Otc'),
                    ('Spot', 'Bitstamp'),
                    ('Spot', 'Uniswap'),
                    ('Spot', 'Upbitid'),
                  ]


def _convert_to_symbol_contract_type(market_type, exchange, symbol, trading_date):
  func = lambda x: x.relative_norm
  if symbol == 'TOTAL':
    return symbol
  errstr = ""
  try:
    trading_date = datetime.datetime.combine(trading_date, datetime.time.min)
    return func(generate_product_from_str2(
        market_type, exchange, None, symbol, trading_date))
  except Exception as e:
    errstr += traceback.format_exc()
  try:
    trading_date = datetime.datetime.combine(trading_date, datetime.time.max)
    return func(generate_product_from_str2(
        market_type, exchange, None, symbol, trading_date))
  except Exception as e:
    errstr += traceback.format_exc()
  logging.error("Unable to generate: %s \n%s" % (
      f'{market_type}:{exchange}:{symbol}', errstr))
  return None


def gen_inferred_api_version(symbol_df, trading_date):
  pi_dict = defaultdict(dict)
  pi_file_list = glob.glob('../data/coin2/product_info/*.json')
  for pi_file in pi_file_list:
    with open(pi_file) as f:
      pi = json.load(f)
      market_type, exchange, api_version = pi['mea'].split('.')
      pi_symbols_list = [product['symbol'] for product in pi['product_infos']]
      pi_dict[(market_type, exchange)].update({api_version: pi_symbols_list})
  exchange_api_df = query_exchange_apis()
  exchange_api_df = exchange_api_df[exchange_api_df['active']].set_index(
    ['market_type', 'exchange', 'api_version'])
  api_versions = []
  for idx, row in symbol_df.iterrows():
    symbol_api_version = row['api_version']
    if pd.isnull(symbol_api_version):
      me_apis = pi_dict[(row['market_type'], row['exchange'])]
      if len(me_apis) == 1:
        symbol_api_version = list(me_apis.keys())[0]
      else:
        symbol_contract_type = _convert_to_symbol_contract_type(
          row['market_type'], row['exchange'], row['symbol'], trading_date)
        for api_version in sorted(me_apis):
          mea = (row['market_type'], row['exchange'], api_version)
          if symbol_contract_type in me_apis[api_version] and mea in exchange_api_df.index:
            symbol_api_version = api_version
    api_versions.append(symbol_api_version)
  symbol_df['api_version'] = api_versions
  symbol_df = symbol_df[symbol_df['api_version'].notna()]
  return symbol_df


def get_symbol_from_pta(trading_date):
  pta_df = query_pta(trading_date,
                     trading_date,
                     business_units=['Coin', 'Day1mm', 'ExtDay1mm'],
                     pta_type='SYMBOL')
  all_trading_symbols = pta_df[['market_type', 'exchange', 'symbol']]
  all_trading_symbols = all_trading_symbols.drop_duplicates().dropna()
  kline_support_symbol = all_trading_symbols[all_trading_symbols.apply(
      lambda x: (x.market_type, x.exchange) not in UNSUPPORT_MARKET, axis=1)]
  kline_support_symbol.dropna(inplace=True)
  return kline_support_symbol


def get_exchange_from_pta(trading_date):
  pta_df = query_pta(trading_date,
                     trading_date,
                     business_units=['Coin'])
  all_trading_exchanges = pta_df[['market_type', 'exchange']]
  all_trading_exchanges = all_trading_exchanges.drop_duplicates().dropna()
  all_trading_exchanges['symbol'] = 'TOTAL'
  all_trading_exchanges['api_version'] = 'coingecko'
  return all_trading_exchanges


def get_symbol_from_klinedb(trading_date):
  klines = query_klines(trading_date, trading_date)
  symbols = klines[
    ['market_type', 'exchange', 'symbol', 'source_exchange_api_id']].drop_duplicates().dropna()
  symbols.rename(columns={'source_exchange_api_id': 'exchange_api_id'}, inplace=True)
  exchange_apis = query_exchange_apis()
  symbols = symbols.merge(
    exchange_apis[['exchange_api_id', 'api_version']], how='left')
  return symbols


def get_symbol_from_symbol_list(kline_symbols_list, kline_exchanges_list, trading_date):
  dt = datetime.datetime.combine(trading_date, datetime.time.max)
  kline_collector_symbols = pd.read_csv(kline_symbols_list)
  cols_needed = ['market_type', 'exchange', 'symbol', 'api_version']
  kline_collector_symbols = kline_collector_symbols[cols_needed]
  kline_collector_symbols = kline_collector_symbols[kline_collector_symbols['symbol'] != 'TOTAL']
  kline_collector_symbols['symbol'] = kline_collector_symbols.apply(
    lambda x: generate_product_from_str2(x['market_type'], x['exchange'], None, x['symbol'], dt).symbol, axis=1)
  exchange_symbols = pd.read_csv(kline_exchanges_list)
  exchange_symbols = exchange_symbols[cols_needed]
  kline_collector_symbols = kline_collector_symbols.append(exchange_symbols)
  return kline_collector_symbols


def get_symbol_from_cc_symbol_groups(mea_list):
  assert isinstance(mea_list, list)
  df_list = []
  symbol_group_file = 'data/coin2/feed/symbol_groups.toml'
  with open(symbol_group_file) as f:
    symbol_groups_universe = toml.load(f)
  for mea in mea_list:
    market_type, exchange, api_version = mea.split('.')
    pi_file = f'data/coin2/product_info/{mea}.json'
    with open(pi_file) as f:
      pi = json.load(f)
      assert mea == pi['mea'], (mea, pi['mea'])
    pi_symbols_list = [product['symbol'] for product in pi['product_infos']]
    symbol_list = [symbol for symbol_group in
        [*(symbol_groups_universe[mea.lower().replace('.', '_')].values())] for symbol in symbol_group]
    symbol_list = list(set(symbol_list).intersection(pi_symbols_list))
    result = [{'market_type': market_type, 'exchange': exchange,
               'symbol': symbol, 'api_version': api_version} for symbol in symbol_list]
    df_list.append(pd.DataFrame(result))
  return pd.concat(df_list)


def collect_kline(trading_date, symbols, legacy=False):
  if len(symbols) == 0: return
  cmd = './pyrunner coin/support/market_quotes/app/kline_collector.py ' \
        '--mysql_config ../../coin_deploy/support_info/db_config_iosg/mysql_config_prod.json ' \
        '--start_date %s --end_date %s ' \
        '--kline_symbols_list %s ' \
        '--market_type Futures,Spot,Options'
  if legacy:
    cmd += ' --legacy'
  symbols = symbols[['market_type', 'exchange', 'symbol_contract_type', 'api_version']].rename(
      columns={'symbol_contract_type': 'symbol'})
  symbols['source'] = symbols['exchange']
  symbols['kline_period'] = 'KLINE_INTERVAL_1HOUR'
  with tempfile.TemporaryDirectory() as tmpdirname:
    save_path = tmpdirname + '/kline_symbol.csv'
    symbols.to_csv(save_path)
    cmd = cmd % (trading_date.strftime('%Y%m%d'), trading_date.strftime('%Y%m%d'), save_path)
    pipeline = os.popen(cmd)
    result = pipeline.read()
    pipeline.close()
    logging.info(result)


def collect_third_party_kline(trading_date, exchanges):
  if len(exchanges) == 0: return
  cmd = './pyrunner third_party/coingecko/kline_collector.py ' \
        '--mysql_config ../../coin_deploy/support_info/db_config_iosg/mysql_config_prod.json ' \
        '--start_date %s --end_date %s ' \
        '--kline_symbols_list %s '
  exchanges = exchanges[['market_type', 'exchange', 'symbol_contract_type', 'api_version']].rename(
      columns={'symbol_contract_type': 'symbol'})
  exchanges['source'] = 'CoinGecko'
  exchanges['kline_period'] = 'KLINE_INTERVAL_1DAY'
  with tempfile.TemporaryDirectory() as tmpdirname:
    save_path = tmpdirname + '/kline_exchange.csv'
    exchanges.to_csv(save_path)
    cmd = cmd % (trading_date.strftime('%Y%m%d'), trading_date.strftime('%Y%m%d'), save_path)
    pipeline = os.popen(cmd)
    result = pipeline.read()
    pipeline.close()
    logging.info(result)


def _check_abnormal_klines(trading_date, turnover_threshold=None):
  abnormal_klines_list = []
  klines = query_klines(trading_date, trading_date, period='KLINE_INTERVAL_1HOUR')
  klines_1day = query_klines(trading_date, trading_date, period='KLINE_INTERVAL_1DAY')
  for _, kline in klines.iterrows():
    try:
      market_type = kline.market_type
      exchange = kline.exchange
      symbol = kline.symbol
      symbol_contract_type = kline.symbol_contract_type
      kline_dict = json.loads(kline.kline_dict)
      if len(kline_dict['klines']) == 24:
        continue
      kline_length = len(kline_dict['klines'])
      kline_dict_1day = klines_1day.loc[(klines_1day['market_type'] == market_type)
                                          & (klines_1day['exchange'] == exchange)
                                          & (klines_1day['symbol'] == symbol), 'kline_dict']
      if len(kline_dict_1day) == 0:
        continue
      kline_dict_1day = json.loads(kline_dict_1day.iloc[0])
      turnover_in_usd = kline_dict_1day['klines'][0].get('turnover_in_usd', None)
      abnormal_kline = {'market_type': market_type, 'exchange': exchange, 'symbol': symbol,
                        'symbol_contract_type': symbol_contract_type, 'kline_length': kline_length,
                        'turnover_in_usd': turnover_in_usd}
      abnormal_klines_list.append(abnormal_kline)
    except Exception as e:
      logging.error('Fail to check kline.', type(e), e)
      logging.error(traceback.format_exc())
      continue
  abnormal_klines_df = pd.DataFrame(abnormal_klines_list)
  if turnover_threshold and len(abnormal_klines_df) > 0:
    abnormal_klines_df = abnormal_klines_df[abnormal_klines_df['turnover_in_usd'] > turnover_threshold]
    abnormal_klines_df['turnover_in_usd'] = [formatNumber(turnover)
      for turnover in abnormal_klines_df['turnover_in_usd']]
  return abnormal_klines_df


def check_kline(trading_date):
  symbols_from_pta = get_symbol_from_pta(trading_date)
  exchanegs_from_pta = get_exchange_from_pta(trading_date)
  symbols_from_klinedb = get_symbol_from_klinedb(trading_date)
  symbols_from_symbol_list = get_symbol_from_symbol_list(
      FLAGS.kline_symbols_list, FLAGS.kline_exchanges_list, trading_date)
  symbols_from_pi = get_symbol_from_cc_symbol_groups(['Spot.Gdax.v1'])
  symbols_needed = pd.concat([symbols_from_pta,
    symbols_from_symbol_list, exchanegs_from_pta, symbols_from_pi]).drop_duplicates()
  symbols_needed = gen_inferred_api_version(symbols_needed, trading_date)

  symbol_cols = ['market_type', 'exchange', 'api_version', 'symbol']
  symbols_needed_set = set([tuple(x) for x in symbols_needed[symbol_cols].to_numpy()])
  symbols_from_klinedb_set = set([tuple(x) for x in symbols_from_klinedb[symbol_cols].to_numpy()])
  symbols_diff_set = symbols_needed_set - symbols_from_klinedb_set
  excluded = pd.read_csv(FLAGS.kline_exclude_list)
  excluded_set = set([tuple(x) for x in excluded[symbol_cols].to_numpy()])
  symbols_diff_set = symbols_diff_set - excluded_set
  symbols_diff = pd.DataFrame(symbols_diff_set, columns=symbol_cols)

  if not symbols_diff.empty:
    symbols_diff['symbol_contract_type'] = symbols_diff.apply(
      lambda x: _convert_to_symbol_contract_type(
      x['market_type'], x['exchange'], x['symbol'], trading_date), axis=1)
    symbols_diff.sort_values(symbol_cols, inplace=True)
  return symbols_diff


def _send_kline_report(kline_df, title, trading_date):
  if len(kline_df) > 0:
    noti_msg = f"{title}:\nTrading_date:{trading_date}\n" + kline_df.to_string()
    send_to_slack(noti_msg, FLAGS.slack_receiver, 'file')


def main(_):
  max_date = datetime.datetime.utcnow().date() - datetime.timedelta(days=1)
  if FLAGS.start_date is None and FLAGS.end_date is None:
    start_date = max_date
    end_date = start_date
  else:
    start_date = datetime.datetime.strptime(FLAGS.start_date, '%Y%m%d').date()
    end_date = datetime.datetime.strptime(FLAGS.end_date, '%Y%m%d').date()
  assert start_date <= end_date <= max_date, (start_date, end_date, max_date)
  trading_date = end_date
  while trading_date >= start_date:
    logging.info("Check Kline in %s" % trading_date)
    klines_diff = check_kline(trading_date)
    klines_queried = pd.DataFrame()
    if len(klines_diff) > 0:
      if FLAGS.query_missing_kline and len(klines_diff) > 0:
        logging.info("begin to query kline")
        symbols_klines_to_query = klines_diff[klines_diff['symbol'] != 'TOTAL']
        exchanges_klines_to_query = klines_diff[klines_diff['symbol'] == 'TOTAL']
        collect_kline(trading_date, symbols_klines_to_query, FLAGS.legacy)
        collect_third_party_kline(trading_date, exchanges_klines_to_query)
        klines_diff_after_query = check_kline(trading_date)
        klines_queried = \
            pd.concat([klines_diff, klines_diff_after_query]).drop_duplicates(
                subset=['market_type', 'exchange', 'symbol'], keep=False)
        klines_diff = klines_diff_after_query
    abnormal_klines_df = _check_abnormal_klines(trading_date, 1e6)
    if FLAGS.slack_receiver:
      title_list = ['Missing Kline', 'Queried missing Kline', 'Abnormal klines']
      data_list = [klines_diff, klines_queried, abnormal_klines_df]
      for i in range(len(title_list)):
        logging.info(f"{title_list[i]}:" + "\n" + data_list[i].to_string())
        _send_kline_report(data_list[i], title_list[i], trading_date)
    trading_date -= datetime.timedelta(1)


if __name__ == '__main__':
  flags.DEFINE_string('kline_symbols_list',
                      '../../coin_deploy/support_info/kline_config/kline_symbols_list.csv',
                      'kline_symbols_list')
  flags.DEFINE_string('kline_exchanges_list',
                      '../../coin_deploy/support_info/kline_config/kline_exchanges_list.csv',
                      'kline_exchanges_list')
  flags.DEFINE_string('kline_exclude_list',
                      '../../coin_deploy/support_info/kline_config/kline_exclude_list.csv',
                      'kline_exclude_list')
  flags.DEFINE_string('start_date', None, '%Y%m%d')
  flags.DEFINE_string('end_date', None, '%Y%m%d')
  flags.DEFINE_string('slack_receiver', None, 'send result to slack')
  flags.DEFINE_boolean('query_missing_kline', False, 'auto query missing kline')
  flags.DEFINE_boolean('legacy', False, 'py or cc collector')
  logging.basicConfig(level='INFO', format='%(levelname)s %(asctime)s %(name)s] %(message)s')
  app.run(main)
