import os
import sys
import datetime
import traceback
from typing import List, Tuple
import ujson
import time
from concurrent.futures import (ProcessPoolExecutor, ThreadPoolExecutor, as_completed, wait,
                                ALL_COMPLETED)
from collections import OrderedDict

fastfeature_dir_path = os.path.expandvars("$REMOTE_HOME/workspace/fastfeature")
sys.path.append(os.path.join(fastfeature_dir_path, "coinstrat"))
os.environ["BATCH_INSTALL_ROOT"] = f"{fastfeature_dir_path}/third_party"

from telethon.sync import TelegramClient
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry

import pandas as pd
import numpy as np
from numerize.numerize import numerize
from absl import app, flags
from google.protobuf.json_format import MessageToDict

from coin.base.datetime_util import to_datetime, to_timestamp_int
from xunkemgmt_client.client.api_client import XunkemgmtClient
from xunkemgmt_client.tool.slack_noti import send_to_slack
import coin2.service.strategy.trading_summary_service_pb2 as tss_pb2
from coinstrat.strat_lm.app.midfreq.utils.reader import IntervalReader
import coin.research.read_strat_info as rsi
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.proto.coin_order_gateway_pb2 import OrderGatewayLog, OrderEvent
from coin.support.proto_log.logic.util import read_strat_log, StratInfo

from .tier_const import (STAT_MAP, DIVIDER, XUNKE_COVERED_EXCHANGES, XUNKE_SPOT_ONLY_EXCHANGES,
                         XUNKE_OPTIONS_EXCHANGES, XUNKE_FUTURES_ONLY_EXCHANGES,
                         XUNKE_EXCLUDE_AGG_TYPES_EXCHANGES, XUNKE_COVERED_BUSINESS_UNITS)

FLAGS = flags.FLAGS

DELTA_ROLLING_DAYS = 5

DIRPATH = "/remote/iosg/data-2/buckets/feed.derived2.interval_h5/coin/main/PT240M"

os.environ["TZ"] = "GMT"
time.tzset()


def calc_rolling_delta(delta_rolling_days: int, **kwargs) -> str:
  res = """"""
  if not kwargs:
    res += "\n"
    return res
  df = pd.concat(kwargs, axis=1).applymap(format_tvr_or_ms)
  for k, v in kwargs.items():
    assert isinstance(v, pd.Series), f"{k}\nshould be pd.Series"
    rolling_delta_mean = v.sort_index().diff().dropna().rolling(delta_rolling_days).mean()
    res += (f"""{k} delta {delta_rolling_days}D rolling mean: """
            f"""{format_tvr_or_ms(rolling_delta_mean.iloc[-1])}\n""")
  res += df.to_string() + "\n"
  return res


def convert_tvr_in_ccy(xunke_df: pd.DataFrame, info_name: str) -> pd.Series:
  if info_name == "Deribit Volume":
    reader = IntervalReader(start_date=xunke_df.index.min().strftime("%Y%m%d"),
                            end_date=xunke_df.index.max().strftime("%Y%m%d"),
                            exchange='Futures.Deribit.v2',
                            use_pit_pi_info=True)
    index_df = reader.get_df_by_key('CLOSE_INDEX')
    index_df.index = pd.to_datetime(index_df.index)
    perp_df = index_df[['BTC-USD.PERPETUAL', 'ETH-USD.PERPETUAL']].resample('1D').last()
    eth_2_btc = perp_df['ETH-USD.PERPETUAL'] / perp_df['BTC-USD.PERPETUAL']
    btc_vol = xunke_df.loc[xunke_df['accounting_currency'] == 'BTC', 'volume']
    daily_eth_vol = xunke_df[xunke_df['accounting_currency'] == 'ETH'].groupby(['trading_date'
                                                                               ])['volume'].sum()
    daily_eth_vol.index = pd.to_datetime(daily_eth_vol.index.astype(str))
    eth_2_btc_vol = daily_eth_vol.multiply(eth_2_btc)
    daily_turnover = btc_vol + eth_2_btc_vol
  else:
    NotImplementedError()
  return daily_turnover


def crawl_binance_promotional_spot_symbols() -> np.array:
  def format_df(df: pd.DataFrame) -> np.array:
    if df.empty:
      return np.array([])
    df["symbol"] = df["baseAsset"] + "-" + df["quoteAsset"]
    return df.loc[df["makerZero"], "symbol"].to_numpy()

  endpoint = ("https://www.binance.com/bapi/accounts/v1/public/commission/symbol-activity-list?"
              "currentPage={currpage}&pageSize=100")
  currpage = 1
  rsp = requests.get(endpoint.format(currpage=currpage))
  rsp.raise_for_status()
  rsp_json = rsp.json()
  datas = rsp_json["data"]["symbolCommissionActivityList"]
  total_pages = int(rsp_json["data"]["pages"])
  assert total_pages >= 1, "total_pages should be at least 1"
  for currpage in range(2, total_pages + 1):
    rsp = requests.get(endpoint.format(currpage=currpage))
    rsp.raise_for_status()
    rsp_json = rsp.json()
    datas.extend(rsp_json["data"]["symbolCommissionActivityList"])
    time.sleep(0.5)
  return format_df(pd.DataFrame(datas))


def crawl_binance_spot_tvr(start_dt: str, end_dt: str) -> pd.DataFrame:
  def req_with_retry(url, symbol):
    session = requests.Session()
    retries = Retry(total=5, backoff_factor=0.1, status_forcelist=[429, 500, 502, 503, 504])
    adapter = HTTPAdapter(max_retries=retries)
    session.mount('http://', adapter)
    session.mount('https://', adapter)
    return session.get(url), symbol

  rsp, _ = req_with_retry("https://api.binance.com/api/v3/exchangeInfo", None)
  univ = [(i["symbol"], f'{i["baseAsset"]}-{i["quoteAsset"]}')
          for i in rsp.json()["symbols"]
          if i['status'] == "TRADING"]
  data = []
  start_dt = datetime.datetime.strptime(start_dt, "%Y%m%d")
  end_dt = datetime.datetime.strptime(end_dt, "%Y%m%d")
  futures = []
  with ThreadPoolExecutor(max_workers=5) as executor:
    for off_symbol, symbol in univ:
      futures.append(
          executor.submit(
              req_with_retry,
              f"https://api.binance.com/api/v3/klines?symbol={off_symbol}&interval=1d&"
              f"startTime={int(start_dt.timestamp() * 1000)}&endTime={int(end_dt.timestamp() * 1000)}",
              symbol))
    active_threads = []
    for future in as_completed(futures):
      active_threads.append(future)
      if len(active_threads) >= 5:
        wait(active_threads, return_when=ALL_COMPLETED)
        active_threads = []
        time.sleep(0.2)
      try:
        res, symbol = future.result()
        for i in res.json():
          data.append({
              "symbol": symbol,
              "volume": float(i[5]),
              "turnover": float(i[7]),
              "ts": i[0],
              "close": float(i[4])
          })
      except Exception:
        traceback.print_exc()
        pass
    wait(active_threads, return_when=ALL_COMPLETED)
  df = pd.DataFrame(data)
  df["quote_symbol"] = df["symbol"].apply(lambda x: f"{x.split('-')[1]}-BUSD")
  df["quote_symbol2"] = df["symbol"].apply(lambda x: f"{x.split('-')[1]}-USDT")
  df["quote_symbol3"] = df["symbol"].apply(lambda x: f"BUSD-{x.split('-')[1]}")
  df["quote_symbol4"] = df["symbol"].apply(lambda x: f"USDT-{x.split('-')[1]}")
  for col in ["quote_symbol", "quote_symbol2", "quote_symbol3", "quote_symbol4"]:
    tmp_df = df[["ts", "symbol", "close"]].copy()
    tmp_df.rename(columns={"symbol": col}, inplace=True)
    if col in ["quote_symbol3", "quote_symbol4"]:
      tmp_df["close"] = 1 / tmp_df["close"]
    tmp_df.rename(columns={"close": col.replace("quote_symbol", "conversion")}, inplace=True)
    df = df.merge(tmp_df, on=["ts", col], how="left")
  df["final_conversion"] = df[["conversion", "conversion2", "conversion3",
                               "conversion4"]].mean(axis=1)
  df["turnover_in_usd"] = df["turnover"] * df["final_conversion"]
  final_df = df.pivot_table(index="ts", columns="symbol", values="turnover_in_usd")
  with open(os.path.expandvars("$COIN_REPO/data/coin2/product_info/Spot.Binance.v1.json"),
            "r") as f:
    pi_dict = ujson.load(f)
  pi_map = {
      f'{v["native_base"]}-{v["native_quote"]}': v["symbol"] for v in pi_dict["product_infos"]
  }
  final_df.rename(columns=pi_map, inplace=True)
  return final_df


def crawl_bithumb_spot_turnover(start_dt: str,
                                end_dt: str) -> Tuple[pd.Series, bool, str, pd.Series]:
  # Query telegram message
  # 00시 기준  2023년 7월 3일
  # 440,700백만원 입니다.

  # As of  00:00 3/7/2023 (KST)
  # 440,700 million won.
  error_txt = ""
  interval_feed_exchange_sr = pd.Series()
  try:
    api_id = os.environ["TELEGRAM_API_ID"]
    api_hash = os.environ["TELEGRAM_API_HASH"]
    username = os.environ["TELEGRAM_USERNAME"]
    phone_number = os.environ["TELEGRAM_PHONE_NUMBER"]
    password = os.environ["TELEGRAM_PASSWORD"]

    client = TelegramClient(
        username,
        api_id,
        api_hash,
    )

    client.start(phone=phone_number, password=password)
    channel = client.get_entity("trades_alert")
    days = (datetime.datetime.strptime(end_dt, "%Y%m%d") -
            datetime.datetime.strptime(start_dt, "%Y%m%d")).days + 1
    msgs = client.get_messages(channel, limit=days + 1)
    datas = []
    reader = IntervalReader(start_date=(datetime.datetime.strptime(start_dt, "%Y%m%d") -
                                        datetime.timedelta(days=1)).strftime('%Y%m%d'),
                            end_date=end_dt,
                            exchange="Spot.Bithumb.v2",
                            use_pit_pi_info=True)
    tvrdf = reader.get_liquidity()
    tvrdf.index = pd.to_datetime(tvrdf.index, utc=True).tz_convert("Asia/Seoul")
    btc_closedf = reader.get_df_by_key("CLOSE_TRADE")['BTC-KRW']
    btc_closedf.index = pd.to_datetime(btc_closedf.index, utc=True).tz_convert("Asia/Seoul")
    daily_btc_closedf = btc_closedf.resample("D").last()
    btcdf = tvrdf.filter(regex=r".*-BTC").resample("D").sum().multiply(daily_btc_closedf, axis=0)
    krwdf = tvrdf.filter(regex=r".*-KRW").resample("D").sum()
    converted_krwdf = btcdf.multiply(daily_btc_closedf, axis=0)
    interval_feed_exchange_sr = converted_krwdf.add(krwdf).sum(axis=1) + krwdf.sum(axis=1)
    interval_feed_exchange_sr.index = interval_feed_exchange_sr.index.strftime("%Y%m%d")
    interval_feed_exchange_sr = interval_feed_exchange_sr.loc[
        (interval_feed_exchange_sr.index >= start_dt) & (interval_feed_exchange_sr.index <= end_dt)]
    interval_feed_exchange_sr.sort_index(ascending=False, inplace=True)
    for msg in msgs:
      txt = msg.text
      dt = (msg.date.astimezone(datetime.timezone(datetime.timedelta(hours=9))) -
            datetime.timedelta(days=1)).date()
      for split_txt in txt.split("\n\n"):
        if "of" in split_txt:
          dt_from_txt = datetime.datetime.strptime(
              split_txt.strip("As of").strip(" ").split("(KST)")[0].strip(" "),
              "%H:%M %d/%m/%Y").date()
          final_txt = split_txt.split("(KST)")[-1].lstrip(" \n").replace(" won.", "")
          if " " in final_txt:
            number, unit = final_txt.split(" ")
          else:
            for idx, char in enumerate(final_txt):
              if char.isalpha():
                number = final_txt[:idx]
                unit = final_txt[idx:]
          if unit == "thousand":
            day_exchange_krw_tvr = float(number.replace(",", "")) * 1e3
          elif unit == "million":
            day_exchange_krw_tvr = float(number.replace(",", "")) * 1e6
          elif unit == "billion":
            day_exchange_krw_tvr = float(number.replace(",", "")) * 1e9
          else:
            error_txt += txt + "\n--------------------\n"
            return pd.Series(), False, error_txt
          datas.append({"ts": dt_from_txt, "exchange_krw_tvr": day_exchange_krw_tvr})
      error_txt += txt + "\n--------------------\n"

      if dt > datetime.datetime.strptime(end_dt, "%Y%m%d").date():
        continue
      if dt < datetime.datetime.strptime(start_dt, "%Y%m%d").date():
        break
  except Exception:
    traceback.print_exc()
    return pd.Series(), False, error_txt, interval_feed_exchange_sr
  res = pd.DataFrame(datas).set_index("ts")["exchange_krw_tvr"].sort_index(ascending=False)
  res = res.loc[(res.index >= datetime.datetime.strptime(start_dt, "%Y%m%d").date()) &
                (res.index <= datetime.datetime.strptime(end_dt, "%Y%m%d").date())]
  return res, True, error_txt, interval_feed_exchange_sr


class OnLog(object):
  def __init__(self, exchange, strategy, machine, delta_start_dt, start_dt) -> None:
    self._exchange = exchange
    self._strategy = strategy
    self._machine = machine
    self._delta_start_dt = delta_start_dt
    self._start_dt = start_dt
    self._fills = []
    self._start_ts = None
    self._end_ts = None
    self._delta_start_ts = None
    self._ticker = []

  @property
  def start_ts(self):
    return self._start_ts

  @property
  def delta_start_ts(self):
    return self._delta_start_ts

  @property
  def end_ts(self):
    return self._end_ts

  @property
  def exchange(self):
    return self._exchange

  @property
  def strategy(self):
    return self._strategy

  @property
  def machine(self):
    return self._machine

  @property
  def fills(self) -> list:
    return self._fills

  def set_fills(self, fills):
    self._fills = fills

  def on_log(self, ts: int, log: bytes):
    if self._start_ts is None and to_datetime(ts) >= self._start_dt:
      self._start_ts = ts
    if self._delta_start_ts is None and to_datetime(ts) >= self._delta_start_dt:
      self._delta_start_ts = ts
    self._end_ts = ts
    pb = StrategyLog()
    pb.ParseFromString(log)
    if (pb.type == StrategyLog.OG_LOG and pb.og_log.type == OrderGatewayLog.ORDER_EVENT and
        pb.og_log.account_request.exchange == self._exchange and
        pb.og_log.event.type == OrderEvent.ORDER_FILLED):
      self._fills.append({
          "ts": ts,
          "symbol": pb.og_log.event.symbol,
          "fill_price": pb.og_log.event.fill_price,
          "fill_qty": pb.og_log.event.fill_qty
      })


def crawl_bithumb_turnover_by_strategy(kst_start_dt: datetime.datetime,
                                       kst_delta_start_dt: datetime.datetime,
                                       kst_end_dt: datetime.datetime, strat_name: str,
                                       machine: str) -> OnLog:
  on_log = OnLog(exchange="Bithumb",
                 strategy=strat_name,
                 machine=machine,
                 delta_start_dt=kst_delta_start_dt,
                 start_dt=kst_start_dt)
  strat_info_cls = StratInfo(strategy_name=strat_name, machine=machine, trading_date=None)
  min_start_dt = min(kst_delta_start_dt, kst_start_dt)
  read_strat_log(strat_info=strat_info_cls,
                 start_time=min_start_dt,
                 end_time=kst_end_dt + datetime.timedelta(days=1),
                 callback=on_log.on_log,
                 root_dir="/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log")
  return on_log


def crawl_bithumb_strategy(start_dt: str, delta_start_dt: str,
                           end_dt: str) -> Tuple[pd.DataFrame, pd.DataFrame]:
  # Query Bithumb Strategies and calculate the total turnover in KST timezone
  kst_start_dt = datetime.datetime.strptime(start_dt, "%Y%m%d") - datetime.timedelta(hours=9)
  kst_delta_start_dt = datetime.datetime.strptime(delta_start_dt,
                                                  "%Y%m%d") - datetime.timedelta(hours=9)
  kst_end_dt = datetime.datetime.strptime(end_dt, "%Y%m%d") - datetime.timedelta(hours=9)
  stratinfo_df = pd.DataFrame(
      rsi.get_strategy_info(trading_date=pd.date_range(kst_start_dt.strftime("%Y%m%d"), (
          kst_end_dt + datetime.timedelta(days=1)).strftime("%Y%m%d")).tolist()))
  bithumb_df = stratinfo_df.loc[(stratinfo_df["exchange"] == "Bithumb") & (
      ~stratinfo_df["strategy_name"].isin(["vmm_orb_btc_delta_hedge", "vmm_grnd_btc_delta_hedge"])),
                                ["machine", "strategy_name", "owner"]].drop_duplicates()
  on_log_res = []
  with ProcessPoolExecutor(max_workers=min(10, len(bithumb_df))) as executor:
    futures = []
    for _, row in bithumb_df.iterrows():
      futures.append(
          executor.submit(crawl_bithumb_turnover_by_strategy, kst_start_dt, kst_delta_start_dt,
                          kst_end_dt, row["strategy_name"], row["machine"]))
    for future in as_completed(futures):
      on_log_res.append(future.result())
  datas = []
  daily_datas = []

  for on_log in on_log_res:
    krw_tvr = 0.0
    filldf = pd.DataFrame(on_log.fills)
    filldf["ts"] = pd.to_datetime(filldf["ts"])
    filldf["pq"] = filldf["fill_price"] * filldf["fill_qty"]
    if not filldf.empty:
      krw_tvr = filldf.loc[filldf["ts"] >= to_datetime(on_log.start_ts), "pq"].sum()
    datas.append({
        "strategy_name": on_log.strategy,
        "machine": on_log.machine,
        "log_start_ts": to_datetime(on_log.start_ts),
        "log_end_ts": to_datetime(on_log.end_ts),
        "krw_tvr": krw_tvr
    })
    filldf.set_index("ts", inplace=True)
    filldf.index = filldf.index.tz_localize("UTC")
    filldf.index = filldf.index.tz_convert("Asia/Seoul")
    daily_pq = filldf["pq"].resample("D").sum()
    daily_df = daily_pq.to_frame(name="krw_tvr")
    daily_df["strategy_name"] = on_log.strategy
    daily_df["machine"] = on_log.machine
    daily_datas.append(daily_df)
  daily_df = pd.concat(daily_datas)
  # remove tzinfo of index
  daily_df.index = daily_df.index.tz_localize(None)
  return pd.DataFrame(datas), daily_df


def format_tvr_or_ms(val) -> str:
  if isinstance(val, str):
    return val
  if isinstance(val, np.float32) or isinstance(val, np.float64):
    val = float(val)
  return numerize(val) if abs(val) > 1 else f"{val:.4%}"


def query_mmprogram_summary(end_dt: str) -> str:
  tss_args = {
      "business_units":
          XUNKE_COVERED_BUSINESS_UNITS,
      "start_timestamp":
          to_timestamp_int(
              datetime.datetime.strptime(end_dt, "%Y%m%d") -
              datetime.timedelta(days=max(29, DELTA_ROLLING_DAYS))),
      "end_timestamp":
          to_timestamp_int(
              datetime.datetime.strptime(end_dt, "%Y%m%d") + datetime.timedelta(days=1))
  }
  agg_types = ["EXCHANGE", "MARKET_TYPE", "SYMBOL", "TRADING_DATE"]
  datas = []
  tss_args_list = []
  for exchange in XUNKE_COVERED_EXCHANGES:
    tmp_tss_args = tss_args.copy()
    tmp_tss_args['exchanges'] = [exchange]
    if exchange not in XUNKE_EXCLUDE_AGG_TYPES_EXCHANGES:
      tmp_tss_args["agg_types"] = agg_types
    if exchange in XUNKE_SPOT_ONLY_EXCHANGES:
      tmp_tss_args["market_types"] = ["Spot"]
    elif exchange in XUNKE_FUTURES_ONLY_EXCHANGES:
      tmp_tss_args["market_types"] = ["Futures"]
    elif exchange in XUNKE_OPTIONS_EXCHANGES:
      tmp_tss_args["market_types"] = ["Options"]
    else:
      tmp_tss_args["market_types"] = ["Futures", "Spot", "Options"]
    tss_args_list.append(tmp_tss_args)
  with XunkemgmtClient() as client:
    for target_tss_args in tss_args_list:
      rsp = client.query_trading_summary_interval_histories(
          tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(**target_tss_args))
      for hist in rsp.histories:
        hist_dict = MessageToDict(hist, preserving_proto_field_name=True)
        tmp = {}
        for col in ["exchange", "market_type", "symbol", "accounting_currency", "trading_date"]:
          if col in hist_dict["summary_info"]:
            tmp[col] = hist_dict["summary_info"][col]
        tmp["turnover_in_usd"] = hist_dict["summary"]["turnover_mark_in_usd"]
        tmp["turnover_maker_in_usd"] = hist_dict["summary"][
            "turnover_maker_mark_in_usd"] + hist_dict["summary"][
                "turnover_maker_inferred_mark_in_usd"]
        tmp["turnover_taker_in_usd"] = hist_dict["summary"][
            "turnover_taker_mark_in_usd"] + hist_dict["summary"][
                "turnover_taker_inferred_mark_in_usd"]
        for col in ["turnover_maker", "turnover_taker", "turnover", "volume"]:
          if f"{col}_mark" in hist_dict["summary"]:
            tmp[col] = hist_dict["summary"][f"{col}_mark"] + hist_dict["summary"].get(
                f"{col}_inferred_mark", 0.0)
        datas.append(tmp)
  xunke_df = pd.DataFrame(datas)
  final_str = f"""```Presto VIP/MM Tier Evaluation (As of {FLAGS.end_dt})\n"""
  final_str += f"""Business Units: {",".join(XUNKE_COVERED_BUSINESS_UNITS)}\n"""
  final_str += DIVIDER * 3 + "\n"
  delta_start_dt = (datetime.datetime.strptime(end_dt, "%Y%m%d") -
                    datetime.timedelta(days=DELTA_ROLLING_DAYS)).strftime("%Y%m%d")
  for info_name, info_dict in STAT_MAP.items():

    tmp_str = f"""{info_name}\n"""
    if "backdays" in info_dict and info_dict["backdays"] == "MTD":
      start_dt = datetime.datetime(
          datetime.datetime.strptime(end_dt, "%Y%m%d").year,
          datetime.datetime.strptime(end_dt, "%Y%m%d").month, 1)
    elif "backdays" in info_dict and info_dict["backdays"] == "WTD":
      start_dt = (datetime.datetime.strptime(end_dt, "%Y%m%d") -
                  datetime.timedelta(days=datetime.datetime.strptime(end_dt, "%Y%m%d").weekday()))
    elif "backdays" in info_dict and info_dict["backdays"].endswith("D"):
      start_dt = (datetime.datetime.strptime(end_dt, "%Y%m%d") -
                  datetime.timedelta(days=int(info_dict["backdays"][:-1]) - 1))
    else:
      raise ValueError("backdays not supported")
    min_start_dt = min(datetime.datetime.strptime(delta_start_dt, "%Y%m%d"),
                       start_dt).strftime("%Y%m%d")
    start_dt = start_dt.strftime("%Y%m%d")
    if info_name == "Bithumb Club B":
      exchange_sr, is_done, bithumb_txt, intv_exchange_sr = crawl_bithumb_spot_turnover(
          min_start_dt, end_dt)
      strat_df, daily_strat_df = crawl_bithumb_strategy(start_dt, delta_start_dt, end_dt)
      daily_strat_df.index = pd.to_datetime(daily_strat_df.index.astype(str))
      exchange_sr.index = pd.to_datetime(exchange_sr.index.astype(str))
      intv_exchange_sr.index = pd.to_datetime(intv_exchange_sr.index.astype(str))
      kwargs = {
          "daily_exchange_turnover_in_krw (Telegram)":
              exchange_sr,
          "daily_exchange_turnover_in_krw (IntvFeed * 2)":
              intv_exchange_sr * 2,
          "daily_turnover_in_krw":
              daily_strat_df["krw_tvr"].resample("1D").sum(),
          "daily_ms_turnover_in_krw (Telegram)":
              daily_strat_df["krw_tvr"].resample("1D").sum() / exchange_sr,
          "daily_ms_turnover_in_krw (IntvFeed * 2)":
              daily_strat_df["krw_tvr"].resample("1D").sum() / (intv_exchange_sr * 2)
      }
      strat_tvr = daily_strat_df.loc[daily_strat_df.index >= start_dt, "krw_tvr"].sum()
      if is_done:
        for eval_thold_dict, eval_metric in zip(info_dict["eval_thold"], info_dict["eval_metric"]):
          eval_score = strat_tvr / exchange_sr.loc[exchange_sr.index >= start_dt].sum()
          eval_score2 = strat_tvr / (
              intv_exchange_sr.loc[intv_exchange_sr.index >= start_dt].sum() * 2)
          tmp_str += (
              f"""Period:{start_dt} - {end_dt} ({info_dict["backdays"]}) """
              f"""Evaluation Metric: {eval_metric}, """
              f"""Presto {format_tvr_or_ms(eval_score)} """
              f"""({format_tvr_or_ms(strat_tvr)} / """
              f"""{format_tvr_or_ms(exchange_sr.loc[exchange_sr.index >= start_dt].sum())})"""
              f""", Presto (IntvFeed * 2) {format_tvr_or_ms(eval_score2)} """
              f"""({format_tvr_or_ms(strat_tvr)} / """
              f"""{format_tvr_or_ms((intv_exchange_sr.loc[intv_exchange_sr.index >= start_dt].sum() * 2))})"""
          )
          level = ""
          thold = ""
          for tmp_level, tmp_eval_thold in eval_thold_dict.items():
            if eval_score > tmp_eval_thold:
              tmp_str += (f"""Thold {format_tvr_or_ms(tmp_eval_thold)} """
                          f"""(Level: {tmp_level})\n""")
              level = tmp_level
              thold = tmp_eval_thold
              break
          if level == "" and thold == "":
            _, last_value = list(eval_thold_dict.items())[-1]
            tmp_str += (f"""Under {format_tvr_or_ms(last_value)} """
                        """(Level: 0)\n""")
          tmp_str += calc_rolling_delta(DELTA_ROLLING_DAYS, **kwargs)
        tmp_str += "@@@ Bithumb Official Telegram MSG @@@\n" + bithumb_txt + "\n"
        strat_df["log_start_ts"] = strat_df["log_start_ts"].apply(
            lambda x: datetime.datetime.strftime(x, "%Y-%m-%d %H:%M:%S"))
        strat_df["log_end_ts"] = strat_df["log_end_ts"].apply(
            lambda x: datetime.datetime.strftime(x, "%Y-%m-%d %H:%M:%S"))
        strat_df["krw_tvr_ms"] = strat_df["krw_tvr"] / exchange_sr.loc[
            exchange_sr.index >= start_dt].sum()
        tmp_str += strat_df.set_index("strategy_name").sort_index().applymap(
            format_tvr_or_ms).to_string() + "\n"
        tmp_str += "\n@@@ DAILY SUMMARY @@@\n"
        combdf = daily_strat_df.join(exchange_sr.to_frame(), how="left").reset_index().set_index([
            "ts", "strategy_name"
        ]).sort_index(level=["ts", "strategy_name"],
                      ascending=[True, True]).reindex(["machine", "krw_tvr", "exchange_krw_tvr"],
                                                      axis=1)
        combdf["krw_tvr_ms"] = combdf["krw_tvr"] / combdf["exchange_krw_tvr"]
        tmp_str += combdf.applymap(format_tvr_or_ms).to_string() + "\n"
      else:
        tmp_str += bithumb_txt + "\n"
      tmp_str += DIVIDER * 3 + "\n"
      final_str += tmp_str
      continue
    exchanges = []
    market_types = []
    tvr_df = []
    for mea in info_dict["mea"]:
      market_type, exchange, _ = mea.split(".")
      reader = IntervalReader(start_date=min_start_dt,
                              end_date=end_dt,
                              exchange=mea,
                              use_pit_pi_info=True)
      tmp_tvr_df = reader.get_liquidity()
      tvr_df.append(tmp_tvr_df)
      market_types.append(market_type)
      exchanges.append(exchange)
    tvr_df = pd.concat(tvr_df, axis=1)
    eval_metric = info_dict["eval_metric"]
    for symbol_regex, eval_thold_dict, eval_metric in zip(info_dict["symbol_regex"],
                                                          info_dict["eval_thold"],
                                                          info_dict["eval_metric"]):
      tmp_xunke_df = xunke_df[(xunke_df["exchange"].isin(exchanges)) &
                              (xunke_df["market_type"].isin(market_types)) &
                              (xunke_df["symbol"].str.match(symbol_regex)) &
                              (pd.to_datetime(xunke_df["trading_date"].astype(str)) >=
                               datetime.datetime.strptime(min_start_dt, "%Y%m%d"))].copy()
      tmp_tvr_df = tvr_df.filter(regex=symbol_regex)

      if info_name == "Binance Spot MM":
        tvr_from_kline = crawl_binance_spot_tvr(min_start_dt, end_dt)
        exclude_symbols = crawl_binance_promotional_spot_symbols()
        tmp_xunke_df = tmp_xunke_df[~tmp_xunke_df["symbol"].isin(exclude_symbols)]
        tmp_tvr_df = tvr_from_kline.loc[:, ~tvr_from_kline.columns.isin(exclude_symbols)]
        tmp_tvr_df.index = pd.to_datetime(tmp_tvr_df.index, unit='ms')
      kwargs = {}
      tmp_xunke_df.set_index("trading_date", inplace=True)
      tmp_xunke_df.index = pd.to_datetime(tmp_xunke_df.index.astype(str))
      daily_exchange_tvr = tmp_tvr_df.sum(axis=1)
      daily_exchange_tvr.index = pd.to_datetime(daily_exchange_tvr.index)
      daily_exchange_tvr = daily_exchange_tvr.resample("1D").sum()
      delta_exchange_tvr = daily_exchange_tvr.loc[
          daily_exchange_tvr.index >= datetime.datetime.strptime(min_start_dt, "%Y%m%d")]
      delta_xunke_df = tmp_xunke_df.loc[
          tmp_xunke_df.index >= datetime.datetime.strptime(min_start_dt, "%Y%m%d")]
      kwargs["daily_exchange_turnover_in_usd"] = delta_exchange_tvr.resample("1D").sum()
      tmp_xunke_df = tmp_xunke_df.loc[
          tmp_xunke_df.index >= datetime.datetime.strptime(start_dt, "%Y%m%d")]
      daily_exchange_tvr = daily_exchange_tvr.loc[
          daily_exchange_tvr.index >= datetime.datetime.strptime(start_dt, "%Y%m%d")]
      exchange_tvr = daily_exchange_tvr.sum()
      if eval_metric in [
          "turnover_in_usd", "turnover", "turnover_maker_in_usd", "turnover_taker_in_usd"
      ]:
        eval_score = tmp_xunke_df[eval_metric].sum()
        kwargs[f"daily_{eval_metric}"] = delta_xunke_df[eval_metric].resample("1D").sum()
      elif eval_metric == "ms_maker":
        eval_score = tmp_xunke_df["turnover_maker_in_usd"].sum() / exchange_tvr
        agged_data = delta_xunke_df["turnover_maker_in_usd"].resample("1D").sum()
        kwargs["daily_turnover_maker_in_usd"] = agged_data
        kwargs["daily_ms_maker"] = agged_data / delta_exchange_tvr
      elif eval_metric == "ms_taker":
        eval_score = tmp_xunke_df["turnover_taker_in_usd"].sum() / exchange_tvr
        agged_data = delta_xunke_df["turnover_taker_in_usd"].resample("1D").sum()
        kwargs["daily_turnover_taker_in_usd"] = agged_data
        kwargs["daily_ms_taker"] = agged_data / delta_exchange_tvr
      elif eval_metric.startswith("turnover_in"):
        daily_tvr = convert_tvr_in_ccy(tmp_xunke_df, info_name)
        delta_tvr = convert_tvr_in_ccy(delta_xunke_df, info_name)
        kwargs[f"daily_{eval_metric}"] = delta_tvr.resample("1D").sum()
        eval_score = daily_tvr.sum()
      else:
        NotImplementedError(eval_metric)
      tmp_str += (f"""Period:{start_dt} - {end_dt} ({info_dict["backdays"]}) """
                  f"""Evaluation Metric: {eval_metric}, """
                  f"""Presto {format_tvr_or_ms(eval_score)}, """)
      level = ""
      thold = ""
      for tmp_level, tmp_eval_thold in eval_thold_dict.items():
        if eval_score > tmp_eval_thold:
          tmp_str += (f"""Thold {format_tvr_or_ms(tmp_eval_thold)} """
                      f"""(Level: {tmp_level})\n""")
          level = tmp_level
          thold = tmp_eval_thold
          break
      if level == "" and thold == "":
        _, last_value = list(eval_thold_dict.items())[-1]
        tmp_str += (f"""Under {format_tvr_or_ms(last_value)} """
                    """(Level: 0)\n""")
      tmp_str += calc_rolling_delta(DELTA_ROLLING_DAYS, **kwargs)
      tmp_str += f"""used meas: {info_dict["mea"]}\n"""
      tmp_str += f"""used symbol regex: {symbol_regex}\n"""
      tmp_str += "" if tmp_xunke_df["accounting_currency"].dropna().empty\
        else f"""accounting_currency: {tmp_xunke_df["accounting_currency"].dropna().unique().tolist()[0]}\n"""
      tmp_str += ujson.dumps(OrderedDict(
          (k, format_tvr_or_ms(v)) for k, v in eval_thold_dict.items()),
                             indent=2) + "\n"
    tmp_str += DIVIDER * 3 + "\n"
    final_str += tmp_str
  final_str += """```"""
  return final_str


def query_strat_summary_by_group(start_dt: str,
                                 end_dt: str,
                                 strat_groups: List[str] = ["lm-volume"]) -> str:
  datas = []
  datas2 = []
  with XunkemgmtClient() as client:
    args = {
        "business_units": ["Coin"],
        "start_timestamp":
            to_timestamp_int(datetime.datetime.strptime(start_dt, "%Y%m%d")),
        "end_timestamp":
            to_timestamp_int(
                datetime.datetime.strptime(end_dt, "%Y%m%d") + datetime.timedelta(days=1)),
        "agg_types": ["STRATEGY_NAME",],
        "strategy_groups":
            strat_groups,
    }
    rsp = client.query_trading_summary_interval_histories(
        tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(**args))
    args["agg_types"].append("TRADING_DATE")
    rsp2 = client.query_trading_summary_interval_histories(
        tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(**args))
    for hist in rsp.histories:
      hist_dict = MessageToDict(hist, preserving_proto_field_name=True)
      tmp = {}
      tmp.update(hist_dict["summary_info"])
      for field_name in ["pnl_mark_in_usd", "turnover_mark_in_usd", "ret_bp"]:
        tmp[field_name] = hist_dict["summary"][field_name]
      datas.append(tmp)
    for hist in rsp2.histories:
      hist_dict = MessageToDict(hist, preserving_proto_field_name=True)
      tmp = {}
      tmp.update(hist_dict["summary_info"])
      datas2.append(tmp)
  df = pd.DataFrame(datas)
  df2 = pd.DataFrame(datas2)
  df[["pnl_mark_in_usd", "turnover_mark_in_usd"]] = df[["pnl_mark_in_usd",
                                                        "turnover_mark_in_usd"]].applymap(numerize)
  df.set_index("strategy_name", inplace=True)
  trd_df = df2.groupby(["strategy_name"])["trading_date"].apply(lambda x: f"{x.min()}-{x.max()}")

  df = trd_df.to_frame().join(df)
  df.sort_values(by="ret_bp", ascending=True, inplace=True)
  msg = f"""```{",".join(strat_groups)} report ({FLAGS.start_dt} - {FLAGS.end_dt})\n\n"""
  msg += f"""{df.to_string()}\n"""
  msg += DIVIDER * 3 + "\n"
  msg += f"""Underperformed Strategies (ret_thold_bp: {FLAGS.thold_bp:,.2f}bp)\n\n"""
  msg += f"""{df[df["ret_bp"] < FLAGS.thold_bp].to_string()}\n"""
  msg += DIVIDER * 3 + "\n"
  msg += f"""TOP {int(FLAGS.top_loss_symbol_num)} """ if FLAGS.top_loss_symbol_num else ""
  msg += f"""Underperformed Symbols (ret_thold_bp: {FLAGS.symbol_thold_bp:,.2f}bp)\n\n"""
  del trd_df, df2, df, datas, datas2
  datas = []
  datas2 = []
  with XunkemgmtClient() as client:
    args = {
        "business_units": ["Coin"],
        "start_timestamp":
            to_timestamp_int(datetime.datetime.strptime(start_dt, "%Y%m%d")),
        "end_timestamp":
            to_timestamp_int(
                datetime.datetime.strptime(end_dt, "%Y%m%d") + datetime.timedelta(days=1)),
        "agg_types": ["STRATEGY_NAME", "SYMBOL"],
        "strategy_groups":
            strat_groups
    }

    rsp = client.query_trading_summary_interval_histories(
        tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(**args))
    args["agg_types"].append("TRADING_DATE")
    rsp2 = client.query_trading_summary_interval_histories(
        tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(**args))
    for hist in rsp.histories:
      hist_dict = MessageToDict(hist, preserving_proto_field_name=True)
      tmp = {}
      tmp.update(hist_dict["summary_info"])
      for field_name in [
          "pnl_mark_from_fill_in_usd", "turnover_mark_in_usd", "ret_bp_from_fill",
          "sharpe_ratio_from_fill"
      ]:
        tmp[field_name] = hist_dict["summary"].get(field_name, np.nan)
      datas.append(tmp)
    for hist in rsp2.histories:
      hist_dict = MessageToDict(hist, preserving_proto_field_name=True)
      tmp = {}
      tmp.update(hist_dict["summary_info"])
      for field_name in ["pnl_mark_from_fill_in_usd", "turnover_mark_in_usd"]:
        tmp[field_name] = hist_dict["summary"].get(field_name, np.nan)
      datas2.append(tmp)
  df = pd.DataFrame(datas)
  df2 = pd.DataFrame(datas2)
  max_dt_sr = df2.groupby(["strategy_name", "symbol"])["trading_date"].max()
  remove_symbols = df2.loc[(
      (df2["trading_date"] == int(end_dt)) & (df2["turnover_mark_in_usd"] == 0.0)) | (
          (df2[["strategy_name", "symbol"]].apply(tuple, axis=1).isin(max_dt_sr[
              max_dt_sr != int(end_dt)].index.to_frame(False).apply(tuple, axis=1)))),
                           ["strategy_name", "symbol"]].drop_duplicates()
  df2 = df2.loc[~df2[["strategy_name", "symbol"]].apply(tuple, axis=1).
                isin(remove_symbols.apply(tuple, axis=1))]
  trd_df = df2.groupby(["strategy_name",
                        "symbol"])["trading_date"].apply(lambda x: f"{x.min()}-{x.max()}")
  df[["pnl_mark_from_fill_in_usd",
      "turnover_mark_in_usd"]] = df[["pnl_mark_from_fill_in_usd",
                                     "turnover_mark_in_usd"]].applymap(numerize)
  df.set_index(["strategy_name", "symbol"], inplace=True)
  df = trd_df.to_frame().join(df)
  df.sort_values(by="ret_bp_from_fill", ascending=True, inplace=True)
  top_num = int(FLAGS.top_loss_symbol_num) if FLAGS.top_loss_symbol_num else len(df)
  msg += f"""{df[df["ret_bp_from_fill"] < FLAGS.symbol_thold_bp].head(top_num).to_string()}\n"""
  msg += "```"
  return msg


def main(_):
  assert FLAGS.type in ["strat", "mmprogram"], "type must be strat or mmprogram"
  assert FLAGS.strat_group, "strat_group is required"
  assert FLAGS.start_dt and FLAGS.end_dt, "start_dt and end_dt are required"
  assert FLAGS.user_list, "user_list is required"
  if FLAGS.type == "strat":
    msg = query_strat_summary_by_group(FLAGS.start_dt, FLAGS.end_dt, FLAGS.strat_group)
  elif FLAGS.type == "mmprogram":
    msg = query_mmprogram_summary(FLAGS.end_dt)
  for user in FLAGS.user_list:
    send_to_slack(msg, f"@{user}" if not user.startswith("#") else user, 'msg')


if __name__ == "__main__":
  flags.DEFINE_string("type", "", "type of function to run")
  flags.DEFINE_list("strat_group", None, "strat group")
  flags.DEFINE_list("user_list", "lucasyoo,beomsoo", "member list to send slack")
  flags.DEFINE_string("start_dt", None, "%Y%m%d")
  flags.DEFINE_string("end_dt", None, "%Y%m%d")
  flags.DEFINE_float("thold_bp", -0.2, "thold_bp for checking ret_bp")
  flags.DEFINE_float("symbol_thold_bp", -1.0, "thold_bp for checking ret_bp in strategy")
  flags.DEFINE_integer("top_loss_symbol_num", None, "top loss symbol num")
  flags.mark_flag_as_required("strat_group")
  flags.mark_flag_as_required("start_dt")
  flags.mark_flag_as_required("end_dt")
  app.run(main)
