# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: leon, gekim

import io
import sys
import datetime
import logging
import json
import toml
import os
import subprocess
from multiprocessing import Pool
from collections import namedtuple

import pandas as pd
from pandas.io.sql import to_sql
import jinja2

from absl import app, flags

from xunkemgmt_client.tool.slack_noti import send_to_slack

from coin.base.datetime_util import (convert_string_to_dates, iterate_date)

from coin.proto.coin_request_pb2 import ExchangeApiRequestProto
from coin.support.feed_tool.feed_stats.logic.util import (
    get_exchange_api_id_from_request,)
from coin.support.feed_tool.feed_stats.logic.util import (query_quote_to_usd_exchange_rate)
from coin.support.feed_tool.feed_stats.logic.feed_health_checker import (
    QUOTE_LIST,
    find_unhealthy_feed_stats,
    integrate_unhealthy_stats,
    FeedStatsNotReadyError,
    gen_latest_feed_health_stats_with_raw,
    FeedHealthResult,
)

from xunkemgmt_client.market.feed_stats_v2.database.connection.coin2_database import (
    coin2_feed_stats_querier_engine,
    coin2_feed_stats_importer_engine,
)

from coin.exchange.base.kr_rest.py_product import PyProduct

import pprint

FLAGS = flags.FLAGS

machines = [
    "feed-01.ap-northeast-1.aws.huobi",
    "feed-02.ap-northeast-2.aws",
    "feed-02.eu-west-1.aws",
    "feed-02.us-east-1.aws",
    "feed-05.ap-northeast-1.aws",
    "feed-05.cn-hongkong.aliyun",
    "feed-01.ap-southeast-1.aws",
]


UniqueSymbol = namedtuple("UniqueSymbol", ["market", "exchange", "api", "symbol", "group"])


def product_from_norm(unique_symbol, nano):
  return PyProduct.from_norm(unique_symbol, nano).absolute_norm


def get_absolute_norm(market, exchange, symbol, pool, nano):
  unique_symbol = "{}:{}:{}".format(market.title(), exchange.title(), symbol)
  try:
    res = pool.apply_async(product_from_norm, (unique_symbol, nano))
    return res.get(timeout=1)
    # import pdb; pdb.set_trace()
    # abs_norm = PyProduct.from_norm(unique_symbol, nano).absolute_norm
    # return abs_norm
  except Exception as e:
    print(str(e))
    print("{} can't be converted to absolute norm".format(unique_symbol))
    return None


filter_out_meas = [
    'futures_bybit_v1',
    'futures_kraken_v1',
    'spot_hitbtc_v2',
    'spot_bitfinex_v2',
    'options_deribit_v2',
    'spot_bybit_v1',
    'spot_upbitsg_v1',
    'spot_coingecko_v3',
]


def search_vanilla_config(file_name, trading_date):
  vanillas_configs_dir = "../data/coin2/feed/vanillas_configs/"
  if not os.path.exists(vanillas_configs_dir):
    return None
  date_folders = []
  for _, dirs, _ in os.walk(vanillas_configs_dir, topdown=False):
    for name in dirs:
        if name.isnumeric():
            date_folders.append(name)
  date_folders.sort(reverse=True)
  for date_folder in date_folders:
    if int(trading_date) < int(date_folder):
      continue
    file_path = os.path.join(vanillas_configs_dir, date_folder, file_name)
    if os.path.exists(file_path):
      return file_path
  return None


def load_symbol_groups_setting(machine, str_trading_date):
  symbol_groups_path = search_vanilla_config('%s_symbol_groups.toml' % machine, str_trading_date)
  if symbol_groups_path is None:
    symbol_groups_path = '../data/coin2/feed/symbol_groups.toml'
  with open(symbol_groups_path, 'r') as f:
    symbol_groups = toml.load(f)
    for mea in filter_out_meas:
      symbol_groups.pop(mea, None)
    return symbol_groups


def convert_mea_to_toml_setting(mea):
  return mea.lower().replace('.', '_')


def create_map_norm_to_absolute(machine, trading_date):
  group_setting = load_symbol_groups_setting(machine, trading_date.strftime('%Y%m%d'))
  pool = Pool(1)
  norm_to_absolute = {}
  dt = datetime.datetime.combine(trading_date, datetime.datetime.min.time())
  nano_seconds = int(dt.timestamp() * 1e+9)
  group_setting_symbols = 0
  for mea, groups in group_setting.items():
    m, e, a = mea.split('_')
    market = m.title()
    exchange = e.title()
    for group, symbols in groups.items():
      group_setting_symbols += len(symbols)
      for symbol in symbols:
        absolute_norm = get_absolute_norm(m, e, symbol, pool, nano_seconds)
        # None means convert failed
        if absolute_norm is not None:
          unique = UniqueSymbol(market, exchange, a, symbol, group)
          norm_to_absolute[unique] = absolute_norm
        # debug information
        # print("{}:{}:{}:{}".format(market, exchange, symbol, absolute_norm))
  pool.terminate()
  assert len(norm_to_absolute) > 0, "py_symbology.so has issue"
  print("{} of {} symbols are valid in symbol_groups.toml".format(
      len(norm_to_absolute), group_setting_symbols))
  return norm_to_absolute


def get_absolute_name_from_norm(data, market, exchange, api, norm_name, group):
  m = market.title()
  e = exchange.title()
  unique = UniqueSymbol(m, e, api, norm_name, group)
  if unique in data:
    return data[unique]
  return None


class ExistChecker:
  """A checker to check sybscribed symbols existing."""
  name = 'ExistChecker'

  def __init__(self, str_trading_date):
    self.str_trading_date = str_trading_date    
    self.symbols = self.__get_all_symbols()
    self.checked_count = 0
    self.failed_count = 0

  def __load_feed_writer_config(self):
    json_config_tmpl = "../data/coin2/feed/ops/feed-writer/%s.json"
    config = {}
    for m in machines:
      json_config_path = search_vanilla_config("%s.json" % m, self.str_trading_date)
      if json_config_path is None :
        json_config_path = json_config_tmpl % m
      with open(json_config_path, 'r') as f:
        json_obj = json.load(f)
        config[m] = json_obj
    return config

  def __get_check_symbol(self, mea):
    nonexist_symbols = {
        'Spot.Coincheck.v1': {'FCT-JPY'},
        'Spot.Gdax.v1': {'INDEX-USDT', 'USDC-EUR', 'USDC-GBP', 'DAR-USDT'},
        'Spot.Bitbank.v1': {'BCHN-BTC', 'LTC-BTC', 'MONA-BTC', 'QTUM-BTC', 'XLM-BTC', 'XRP-BTC', 'ETH-BTC'},
        'Spot.Phemex.v1': {'ARG-USDT', 'BBF-USDT', 'BEND-USDT', 'BLOK-USDT', 'BRZ-USDC', 'CEEK-USDT', 'CEL-USDT', 'CRO-USDT', 'CRPT-USDT', 'DAO-USDT'},
        'Futures.Apollox.v1': {'ETH-BUSD.PERPETUAL', 'AI-BUSD.PERPETUAL', 'GDC-BUSD.PERPETUAL', 'EUR-BUSD.PERPETUAL', 'SOL-BUSD.PERPETUAL', 'OP-BUSD.PERPETUAL', 'BNB-BUSD.PERPETUAL', 'AVAX-BUSD.PERPETUAL',
                               'MASK-BUSD.PERPETUAL', 'FTM-BUSD.PERPETUAL', 'BTC-BUSD.PERPETUAL', 'MATIC-BUSD.PERPETUAL', 'CAKE-BUSD.PERPETUAL', 'DOGE-BUSD.PERPETUAL', 'GBP-BUSD.PERPETUAL', 'APT-BUSD.PERPETUAL'},
    }
    product_info_path = "../../coin_product_info/data/%s" % self.str_trading_date
    if os.path.exists(product_info_path):
      product_info_path = os.path.join(product_info_path, "%s.json" % mea)
    else:
      product_info_path = os.path.join("../data/coin2/product_info", "%s.json" % mea)
    if os.path.exists(product_info_path):
      with open(product_info_path, 'r') as f:
        pis = json.load(f)['product_infos']
      symbols = {pi['symbol'] for pi in pis}
      #ignore products being received on 10ap
      ico_info_path = search_vanilla_config('feed-10.ap-northeast-1.aws.json', self.str_trading_date)
      ico_symbols = set()
      if ico_info_path is not None:
        with open(ico_info_path, 'r') as f:
          ico_pis = json.load(f)['feed']['exchanges']
        if mea in ico_pis.keys():
          ico_symbols = set(ico_pis[mea]['products']['norms'])
          symbols = symbols - ico_symbols
      if mea in nonexist_symbols.keys():
        symbols = symbols - nonexist_symbols[mea]
      if mea == 'Futures.Ftx.v1':
        symbols = {symbol for symbol in symbols if '.NNEXT_QUARTER' not in symbol and '.NEXT_QUARTER' not in symbol}
        symbols = symbols.union({"BTC-USD.NEXT_QUARTER", "ETH-USD.NEXT_QUARTER"})
      return symbols
    return None

  def __get_all_symbols(self):
    all_symbols = []
    subscribe_config = self.__load_feed_writer_config()
    for machine, config in subscribe_config.items():
      group_setting = load_symbol_groups_setting(machine, self.str_trading_date)
      exchanges = config['feed']['exchanges']
      for mea in exchanges.keys():
        if "_split_channel" in mea or convert_mea_to_toml_setting(mea) in filter_out_meas:
          continue
        check_symbol = self.__get_check_symbol(mea)
        groups = exchanges[mea]['products']['groups']
        assert len(groups) > 0, "Only support groups setting"
        toml_mea = convert_mea_to_toml_setting(mea)
        market_type, exchange, api_version = mea.split('.')
        for group in groups:
          if group not in group_setting[toml_mea]:
            continue 
          symbols = group_setting[toml_mea][group]
          for symbol in symbols:
            if check_symbol is None or symbol in check_symbol:
              data = {}
              data['machine'] = machine
              data['market_type'] = market_type
              data['exchange'] = exchange
              data['api_version'] = api_version
              data['symbol'] = symbol
              data['group'] = group
              all_symbols.append(data)
    return all_symbols

  def __health_stats(self, failed_symbols, trading_date):
    health_stats = []
    for symbol in failed_symbols:
      sub_request = ExchangeApiRequestProto(
          market_type=symbol['market_type'], 
          exchange=symbol['exchange'], 
          api_version=symbol['api_version'])
      exchange_api_id_list = get_exchange_api_id_from_request([sub_request])
      assert len(exchange_api_id_list) == 1
      exchange_api_id = exchange_api_id_list[0]
      symbol_stats = {}
      symbol_stats['trading_date'] = trading_date
      symbol_stats['machine'] = symbol['machine']
      symbol_stats['exchange_api_id'] = exchange_api_id
      symbol_stats['exchange'] = symbol['exchange']
      symbol_stats['symbol'] = symbol['symbol']
      symbol_stats['group'] = symbol['group']
      symbol_stats['health'] = FeedHealthResult.FEED_NOT_EXIST
      if 'worker' not in symbol.keys():
        reason = self.name + ": NOT EXIST"
      else:
        reason = "ExistChecker: worker {} NOT EXIST".format(symbol['worker'])
      symbol_stats['reason'] = reason
      health_stats.append(symbol_stats)

    return pd.DataFrame(health_stats)

  def check(self, dataset):
    trading_date = dataset['trading_date'].unique()[0]
    norm_to_absolutes = {}
    for machine in machines:
      norm_to_absolutes[machine] = create_map_norm_to_absolute(machine, trading_date)
    failed_symbols = []
    for symbol in self.symbols:
      machine = symbol['machine']   
      market_type = symbol['market_type']
      exchange = symbol['exchange']
      api_version = symbol['api_version']
      abs_name = get_absolute_name_from_norm(
          norm_to_absolutes[machine],
          market_type,
          exchange,
          api_version,
          symbol['symbol'],
          symbol['group'])
      if abs_name is None:
        pprint.pprint(symbol)
        failed_symbols.append(symbol)
        continue
      #assert abs_name is not None

      extracted_ds = dataset.loc[
          (dataset['machine'] == machine) &
          (dataset['exchange'] == exchange) &
          (dataset['market_type'] == market_type) &
          (dataset['api_version'] == api_version) &
          (dataset['symbol'] == abs_name)]
      if extracted_ds.shape[0] == 0:
        failed_symbols.append(symbol)

    self.checked_count = len(self.symbols)
    self.failed_count = len(failed_symbols)
    return self.__health_stats(failed_symbols, trading_date)

  def stats(self):
    return {'name': self.name, 'checked': self.checked_count, 'failed': self.failed_count}

  def is_to_slack(self):
    return True


class ThresholdChecker:
  THRESHOLD_MAP = {
      'no_book': [
          ('no_book_duration', 600E+9),  # nano second
          ('volume_usd', 100000),
          ('book_count', 20000)
      ],
      'no_trade': [
          ('no_trade_duration', 600E+9),  # nano second
          ('volume_usd', 100000),
          ('trade_count', 3000)
      ],
  }

  name = 'ThresholdChecker'

  def __init__(self):
    self.checked_count = 0
    self.failed_count = 0

  def __health_stats(self, dataset, original_dataset):
    cond = (dataset['reason'].isna() & (dataset['health'] == FeedHealthResult.FEED_NOT_EXIST))
    dataset.loc[cond, 'reason'] = self.name + ": NOT EXIST, feed_stats_id: " + \
        original_dataset.loc[cond, 'feed_stats_id'].astype(str) + ", worker: " + \
        original_dataset.loc[cond, 'worker'].astype(str)
    dataset.loc[~cond, 'reason'] = self.name + ": " + dataset.loc[~cond, 'reason'] + \
        ", feed_stats_id: " + original_dataset.loc[~cond, 'feed_stats_id'].astype(str) + \
        ", worker: " + original_dataset.loc[~cond, 'worker'].astype(str)

    return dataset

  def check(self, dataset):
    feed_stats = filter_and_gen_columns(dataset.copy())
    latest_unhealthy_stats = pd.DataFrame()
    for threshold in self.THRESHOLD_MAP.items():
      unhealthy_stats = find_unhealthy_feed_stats(feed_stats, threshold)
      latest_unhealthy_stats = integrate_unhealthy_stats(latest_unhealthy_stats,
                                                         unhealthy_stats,
                                                         threshold)
    latest_feed_health_stats = gen_latest_feed_health_stats_with_raw(latest_unhealthy_stats,
                                                                     dataset)

    self.checked_count = len(dataset)
    self.failed_count = len(latest_feed_health_stats)
    return self.__health_stats(latest_feed_health_stats,
                               dataset.iloc[latest_feed_health_stats.index])

  def stats(self):
    return {'name': self.name, 'checked': self.checked_count, 'failed': self.failed_count}

  def is_to_slack(self):
    return False


class FeedLastTimeChecker:
  """Last time of book/trade message is not calculated by no_book/trade duration.
  FeedFirstLastTimeChecker is used to check that a symbol is gone.
  """

  name = 'FeedLastTimeChecker'

  def __init__(self):
    self.checked_count = 0
    self.failed_count = 0

  def __health_stats(self, dataset):
    columns = ['trading_date', 'machine', 'exchange_api_id', 'symbol']
    failed_data = dataset[columns].copy()
    failed_data['health'] = FeedHealthResult.BAD
    failed_data['reason'] = self.name + ": last 1 hour no feed, feed_stats_id: " + \
        dataset['feed_stats_id'].astype(str) + ", worker: " + \
        dataset['worker'].astype(str)

    return failed_data

  def check(self, dataset):
    today = dataset['time_last_book'][0].date()
    # filter out not activate symbols
    dataset = dataset[dataset['trade_count'] > 100]
    # Friday is rollover day
    if today.isoweekday() == 5:
      dataset = dataset[dataset['market_type'] == 'Spot']
    # check whether last 1 hour no book and trade
    threshold_timestamp = pd.Timestamp(today) + pd.DateOffset(hours=23)
    last_1_hour_no_book = dataset['time_last_book'] < threshold_timestamp
    last_1_hour_no_trade = dataset['time_last_trade'] < threshold_timestamp
    last_1_hour_no_feed = dataset[last_1_hour_no_book & last_1_hour_no_trade]
    self.checked_count = len(dataset)
    self.failed_count = len(last_1_hour_no_feed)
    return self.__health_stats(last_1_hour_no_feed)

  def stats(self):
    return {'name': self.name, 'checked': self.checked_count, 'failed': self.failed_count}

  def is_to_slack(self):
    return True


class MostActiveChecker:
  """Check no book/trade duration for symbol which is the most activate
  """

  name = 'MostActiveChecker'
  threshold = 1200E+9

  def __init__(self):
    self.checked_count = 0
    self.failed_count = 0

  def __health_stats(self, dataset):
    columns = ['trading_date', 'machine', 'exchange_api_id', 'symbol', 'reason']
    failed_data = dataset[columns].copy()
    failed_data['health'] = FeedHealthResult.BAD

    return failed_data

  def check(self, dataset):
    idx = dataset['trade_count'] == \
        dataset.groupby(['machine', 'exchange_api_id'])['trade_count'].transform(max)
    most_active_data = dataset[idx].groupby(['machine', 'exchange_api_id']).first().reset_index()

    book_failed_condition = most_active_data['no_book_duration'] > self.threshold
    # bitflyer have no trade at maintainance time
    trade_failed_condition = (most_active_data['no_trade_duration'] > self.threshold) & \
                             (most_active_data['exchange'] != "Bitflyer")
    both_failed_condition = book_failed_condition & trade_failed_condition
    book_failed_only_condition = book_failed_condition & (~both_failed_condition)
    trade_failed_only_condition = trade_failed_condition & (~both_failed_condition)
    book_failed_only = most_active_data[book_failed_only_condition].copy()
    book_failed_only['reason'] = self.name + ": no_book_duration, feed_stats_id: " + \
        book_failed_only['feed_stats_id'].astype(str)
    trade_failed_only = most_active_data[trade_failed_only_condition].copy()
    trade_failed_only['reason'] = self.name + ": no_trade_duration, feed_stats_id: " + \
        trade_failed_only['feed_stats_id'].astype(str)
    both_failed = most_active_data[both_failed_condition].copy()
    both_failed['reason'] = self.name + ": no_book_duration, no_trade_duration, feed_stats_id: " + \
        both_failed['feed_stats_id'].astype(str)

    failed_data = pd.concat([book_failed_only, trade_failed_only, both_failed])
    self.checked_count = len(most_active_data)
    self.failed_count = len(failed_data)

    return self.__health_stats(failed_data)

  def stats(self):
    return {'name': self.name, 'checked': self.checked_count, 'failed': self.failed_count}

  def is_to_slack(self):
    return True


def _gen_query_feed_stats_to_check_health_sql(trading_date):
  template_loader = jinja2.FileSystemLoader(searchpath="./")
  template_env = jinja2.Environment(loader=template_loader)

  sql_tmpl = 'coin/support/feed_tool/feed_stats/query_sql/' \
             'coin2_query_feed_stats_to_check_health.sql.tmpl'
  template = template_env.get_template(sql_tmpl)
  return template.render(trading_date=trading_date, machines=machines)


def query_feed_stats_to_check_health(trading_date):
  engine = coin2_feed_stats_querier_engine
  feed_stats = pd.read_sql(_gen_query_feed_stats_to_check_health_sql(trading_date), engine)
  return feed_stats


def filter_and_gen_columns(feed_stats):

  feed_stats.dropna(how='any', inplace=True)
  feed_stats = feed_stats.sort_values('create_time').groupby(
      ['machine', 'exchange', 'trading_date', 'symbol'],
      as_index=False).last().reset_index(drop=True)
  feed_stats = feed_stats.loc[feed_stats['quote'].isin(QUOTE_LIST)]
  pd.options.mode.chained_assignment = None  # default='warn'
  exchange_rate = query_quote_to_usd_exchange_rate(QUOTE_LIST)
  feed_stats['quote_exchange'] = feed_stats['quote'].map(exchange_rate)
  feed_stats['volume_usd'] = feed_stats['volume'] * feed_stats['quote_exchange']
  return feed_stats


def _gen_delete_feed_health_stats_sql(trading_date):
  template_loader = jinja2.FileSystemLoader(searchpath="./")
  template_env = jinja2.Environment(loader=template_loader)

  sql_tmpl = 'coin/support/feed_tool/feed_stats/query_sql/' \
             'coin2_delete_feed_health_stats.sql.tmpl'
  template = template_env.get_template(sql_tmpl)
  return template.render(trading_date=trading_date, machines=machines)


def _insert_feed_health_db(feed_health_stats):
  engine = coin2_feed_stats_importer_engine
  to_sql(feed_health_stats[['trading_date', 'machine',
                            'exchange_api_id', 'symbol', 'health', 'reason']],
         'FeedHealthStats', engine, if_exists='append', index=False)


def insert_unhealthy_to_feed_health_stats_db(trading_date, latest_feed_health_stats):
  engine = coin2_feed_stats_importer_engine
  with engine.connect() as connection:
    sql = _gen_delete_feed_health_stats_sql(trading_date)
    connection.execute(sql)

  _insert_feed_health_db(latest_feed_health_stats)


def print_checker_stats(stats, f):
  print("Checker : %s" % stats['name'], file=f)
  print("Checked : %d" % stats['checked'], file=f)
  print("Failed  : %d" % stats['failed'], file=f)
  print(file=f)


def run_feed_health_check(trading_date):
  raw_feed_stats = query_feed_stats_to_check_health(trading_date)
  if len(raw_feed_stats) == 0:
    raise FeedStatsNotReadyError('Feed not ready on %s.' % trading_date)

  print("Feed stats data: %d" % len(raw_feed_stats))
  print()

  # checkers = [ExistChecker(), ThresholdChecker(), FeedLastTimeChecker(),
  #             MostActiveChecker()]
  checkers = [ExistChecker(trading_date)]

  latest_feed_health_stats = pd.DataFrame()
  slack_io = io.StringIO()
  alert = False
  for checker in checkers:
    unhealthy_stats = checker.check(raw_feed_stats.copy())
    if checker.stats()['failed'] > 0:
      pd.set_option('display.max_rows', 10000)
      pd.set_option('display.max_columns', 10000)
      pd.set_option('display.width', 10000)
      print(unhealthy_stats)
    print_checker_stats(checker.stats(), sys.stdout)
    if checker.is_to_slack() and checker.stats()['failed'] > 0:
      pd.set_option('display.unicode.ambiguous_as_wide', True)
      print_checker_stats(checker.stats(), slack_io)
      print(unhealthy_stats, file=slack_io)
      print(file=slack_io)
      alert = True
    latest_feed_health_stats = pd.concat([latest_feed_health_stats, unhealthy_stats],
                                         axis=0, sort=True,
                                         ignore_index=True)

  if not latest_feed_health_stats.empty:
    insert_unhealthy_to_feed_health_stats_db(trading_date, latest_feed_health_stats)
  if FLAGS.slack_send and alert:
    slack_title = "Feed stats checker " + trading_date + '\n'
    mention_list = ['leon', 'fengyang', 'junxiao', "ziyan"]
    send_to_slack(slack_title + slack_io.getvalue(), '#coin_feed_noti', 'msg', mention_list=mention_list)


def run_message_time_check(trading_date):
  td_df = query_message_time_diff(trading_date.strftime('%Y%m%d'))
  
  reference_df = []
  reference_dates = [td for td in iterate_date(trading_date - datetime.timedelta(days=5), trading_date)]
  for reference_date in reference_dates:
    td_str = reference_date.strftime('%Y%m%d')
    reference_df.append(query_message_time_diff(td_str))
  reference_df = pd.concat(reference_df,axis='index')
  reference_df = reference_df.groupby(['machine','symbol']).mean()
  
  failed = []
  for row in td_df.iterrows():
    row = row[1]
    reference_row = reference_df.loc[(row['machine'],row['symbol'])]
    
    if row['avg_message_time_diff'] / reference_row['avg_message_time_diff'] > 3 :
      failed.append({
        "type":"avg_message_time_diff",
        "machine":row["machine"],
        "mea":"Futures.Binance.v1",
        "symbol":row["symbol"],
        "average_last_5days": round(reference_row['avg_message_time_diff'],2),
        "value_today": round(row['avg_message_time_diff'],2)
      })
      
    if row['max_message_time_diff'] / reference_row['max_message_time_diff'] > 5 :
      failed.append({
        "type":"max_message_time_diff",
        "machine":row["machine"],
        "mea":"Futures.Binance.v1",
        "symbol":row["symbol"],
        "average_last_5days": round(reference_row['max_message_time_diff'],2),
        "value_today": round(row['max_message_time_diff'],2)
      })
  
  slack_io = io.StringIO()    
  if len(failed)>0:
    failed = pd.DataFrame(failed)
    pd.set_option('display.max_rows', 10000)
    pd.set_option('display.max_columns', 10000)
    pd.set_option('display.width', 10000)
    print(failed,file=slack_io)
    slack_title = "Feed ft-et checker " + trading_date.strftime('%Y%m%d') + '\n'
    mention_list = ['leon', 'fengyang', 'junxiao', "ziyan"]
    print(slack_title + slack_io.getvalue())
    if FLAGS.slack_send :
      send_to_slack(slack_title + slack_io.getvalue(), '#coin_feed_noti', 'msg', mention_list=mention_list)
    

def query_message_time_diff(td_str):
  query_template = "SELECT avg_message_time_diff, max_message_time_diff\n" + \
      "FROM feed_stat_20200803.StatsView\n" + \
      "WHERE market_type = 'Futures' and exchange = 'Binance' and api_version = 'v1' and recipe = 'realtime' and " + \
      "trading_date = '%s' and machine = '%s' and symbol = '%s'"

  dfs = []
  for m in machines:
    for s in ["BTC-USDT.PERPETUAL", "ETH-USDT.PERPETUAL"]:
      query_str = query_template % (td_str, m, s)
      df = pd.read_sql(query_str, coin2_feed_stats_querier_engine)
      df["machine"] = m
      df["symbol"] = s
      dfs.append(df)
  return pd.concat(dfs,axis='index')
    

def main(_):
  start_date = FLAGS.start_date
  assert start_date, '--start_date must be specified.'
  end_date = FLAGS.end_date
  assert end_date, '--end_date must be specified.'

  start_date = convert_string_to_dates(start_date)[0]
  end_date = convert_string_to_dates(end_date)[0]
  trading_dates = [td for td in iterate_date(start_date, end_date)]

  for trading_date in trading_dates:
    td_str = trading_date.strftime('%Y%m%d')
    print('\nRunning feed health check for %s ...' % td_str)
    run_feed_health_check(td_str)
    run_message_time_check(trading_date)


if __name__ == '__main__':
  flags.DEFINE_string('start_date', None, 'yyyymmdd.')

  flags.DEFINE_string('end_date', None, 'yyyymmdd. Feed at end_date is not checked.')

  # default py_symbology.so is build in same repository
  result = subprocess.run(["realpath ./$(git rev-parse --show-cdup)"],
                          capture_output=True,
                          shell=True,
                          encoding="utf8")
  if result.returncode == 0:
    lib_dir = os.path.join(result.stdout.strip(), 'bazel-bin/cc/appcoin2/support/symbology')
  else:
    lib_dir = None
  flags.DEFINE_string('py_product_lib_dir', lib_dir, 'py_symbology.so directory.')

  flags.DEFINE_boolean('slack_send', False, 'send result to slack')

  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')

  app.run(main)
