# Copyright (c) 2020 Presto Labs Pte. Ltd.
# Author: jhkim

import gc
import os
import sys
import datetime
import collections
import glob
import functools

import pandas
import bisect
import numpy
import numexpr as ne
import json
from difflib import SequenceMatcher

from concurrent.futures import as_completed, ProcessPoolExecutor
from google.protobuf.json_format import MessageToDict

from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
from coin.strategy.mm.tool.signal_plot_base import (dump_book, dump_book_from_cache,
                                                    dump_funding_rate, BboTuple, BookCompress)
import coin.strategy.mm.tool.archive_base as abase
import coin.pnl.sim_stat_calc2 as sscalc2
import coin.pnl.sim_stat_plot as ssplot
import coin.pnl.sim_stat_option as ssopt
import coin.pnl.pnl_compact_stat as pcstat
import coin.strategy.mm.tool.order_plot_util as oputil
import coin.strategy.mm.tool.lp_plot_util as lputil
import coin2.exchange.order.system_pb2 as ospec
from coin.tool.strat_monitor.handlers.check_balance_handler import BalanceChecker

from absl import app, flags

import matplotlib
import matplotlib.ticker
import matplotlib.dates as mdates

matplotlib.use("Agg")
matplotlib.rcParams['agg.path.chunksize'] = 10000
import matplotlib.pyplot as plt

import warnings
from matplotlib.backends.backend_pdf import PdfPages

# NOTE: please git pull coin_binary before use.
import pyorio.record.record as cc_impl
import pyorio.record.simple_record as py_impl


def plot_debug_info():
  return flags.FLAGS.use_bbo and (len(abase.get_trading_dates(flags.FLAGS.trading_date)) <= 2 or
                                  flags.FLAGS.force_plot_debug_info or flags.FLAGS.include_tag_info)


def squeeze_order_df(orderdf):
  orderdf2 = orderdf.copy()
  for catecol in ['type']:
    if catecol in orderdf2:
      orderdf2[catecol] = orderdf2[catecol].astype('category')
  if len(orderdf2) > 0:
    orderdf2['post_only'] = orderdf2['post_only'].astype(bool)
    orderdf2['time'] = orderdf2['time'].astype(int)
    if 'submit_time' in orderdf2:
      # can be nan
      orderdf2['submit_time'] = orderdf2['submit_time'].astype(float)
    orderdf2['order_id'] = orderdf2['order_id'].astype(int)
  for rmcol in [
      'ask0_price_on_accepted', 'bid0_price_on_accepted', 'last_feed_symbol', 'last_feed_type',
      'queue_pos_init', 'queue_pos', 'queue_pos_ratio'
  ] + ['tag'] * (not flags.FLAGS.include_tag_info):
    if rmcol in orderdf2:
      del orderdf2[rmcol]
  return orderdf2


def get_pnlstr(pnldict):
  def get_first_value(dict_pick, key_name):
    rets = [value for key, value in dict_pick.items() if key.startswith(key_name)]
    if len(rets) == 0:
      return None
    else:
      return float(rets[0])

  def get_first(dict_pick, key_name, format_str):
    fval = get_first_value(dict_pick, key_name)
    if fval is not None and not numpy.isnan(fval) and not numpy.isinf(fval):
      return format_str % fval
    else:
      return None

  if "pnl_net(BTC)" in pnldict:
    pnl_s = get_first(pnldict, "pnl_net(BTC)", "%+09.4f")
    pnl_head = "pnlbtc"
  elif "pnl_net(USD)" in pnldict:
    pnl_s = get_first(pnldict, "pnl_net(USD)", "%+09d")
    pnl_head = "pnlusd"
  else:
    pnl_s = get_first(pnldict, "pnl_net", "%+09d")
    pnl_head = "pnl"

  mdd_value = get_first_value(pnldict, "mdd")
  pnl_value = get_first_value(pnldict, "pnl_net")
  if mdd_value == 0:
    pnl2mdd = "0"
  else:
    pnl2mdd = "%.2f" % (abs(pnl_value) / abs(mdd_value))
  roe_s = get_first(pnldict, "return_on_equity", "%.2f")
  ffr_s = get_first(pnldict, "full_fill_ratio", "%.2f")
  target_sharpe_col = 'sortino_1440min' if flags.FLAGS.flag_values_dict().get('is_focus_mt', False) else 'sortino_30min'
  sp_s = get_first(pnldict, target_sharpe_col, "%.2f")
  nflip_s = get_first(pnldict, "flip_count", "%.0f")
  nret_s = get_first(pnldict, "net_ret(bps)", "%.2f")
  sub_count = get_first_value(pnldict, 'sub_count')
  duration_day = get_first_value(pnldict, 'duration_day')
  if duration_day == 0 or duration_day is None:
    norder_day = 0
  else:
    norder_day = "%.0f" % (sub_count / duration_day)
  return (f"{pnl_head}_{pnl_s}_pnl2mdd_{pnl2mdd}_roe_{roe_s}_ffr_{ffr_s}" +
          f"_nordday_{norder_day}_sp_{sp_s}_nflip_{nflip_s}_ret_{nret_s}")


def gen_schedule(from_dt, to_dt):
  midnight = datetime.datetime.combine(from_dt.date(), datetime.datetime.min.time())
  from_hours_float = (from_dt - midnight).total_seconds() / 3600
  to_hours_float = (to_dt - midnight).total_seconds() / 3600
  while to_hours_float > 24:
    yield midnight, from_hours_float, 24
    from_hours_float = 0
    to_hours_float -= 24
    midnight += datetime.timedelta(days=1)
  if to_hours_float > from_hours_float:
    yield midnight, from_hours_float, to_hours_float


def run_on_records(records, account_info_eod, compress, last_ending_ts):
  symbol_info = records[0].account_info
  symbol_info.MergeFrom(account_info_eod)
  from_dt = last_ending_ts if last_ending_ts is not None else records[1].time
  from_dt = pandas.DatetimeIndex([from_dt]).to_pydatetime()[0]
  to_dt = symbol_info.end_timestamp if symbol_info.end_timestamp > 0 else records[-1].time
  if hasattr(account_info_eod, "symbol_mtm_time") and symbol_info.symbol_mtm_time > 0:
    to_dt = account_info_eod.symbol_mtm_time
  to_dt = pandas.DatetimeIndex([to_dt]).to_pydatetime()[0]
  product = generate_product_from_str2(symbol_info.market_type, symbol_info.exchange,
                                       symbol_info.api, symbol_info.relative_norm, from_dt)

  if (to_dt - from_dt).total_seconds() <= 3600:
    print("short range. use full book")
    compress_opt = BookCompress.BBO_P_CHANGE
  else:
    compress_opt = BookCompress.EVERY_1_SECOND if compress \
        else BookCompress.BBO_P_CHANGE

  funding_df = dump_funding_rate(product, from_dt, to_dt, flags.FLAGS.bbo_cache_root)

  bbos = []
  for trading_date, from_hours, to_hours in gen_schedule(from_dt, to_dt):
    time_range = f"{'%.4f' % from_hours}-{'%.4f' % to_hours}"
    if flags.FLAGS.use_bbo:
      abbos = []
      if flags.FLAGS.bbo_from_cache:
        abbos = dump_book_from_cache(product, trading_date, time_range, flags.FLAGS.bbo_cache_root)
        if flags.FLAGS.squeeze_stat:
          abbos = abbos[::5]
      if len(abbos) == 0:
        if flags.FLAGS.full_bbo:
          compress_opt = BookCompress.BBO_P_CHANGE
        if compress_opt == BookCompress.BBO_P_CHANGE:
          every_n_sec = None
          every_n_bp = None
        elif plot_debug_info():
          every_n_sec = None
          every_n_bp = 5
        else:
          every_n_sec = 30
          every_n_bp = None
        abbos = dump_book(product=product,
                          feed_sub_request=None,
                          compress=compress_opt,
                          trading_date=trading_date,
                          time_range=time_range,
                          every_n_sec=every_n_sec,
                          every_n_bp=every_n_bp)
      if len(abbos) > 0:
        bbos.extend(abbos)

  bbodf = pandas.DataFrame(bbos, columns=BboTuple._fields)
  if len(bbodf) != 0:
    bbodf = bbodf[numpy.log(bbodf['bid0_price'] / bbodf['ask0_price']).abs() < 0.05]
    if last_ending_ts is not None:
      indexes = numpy.where(bbodf.timestamp <= last_ending_ts + 60 * 1e9)[0]
      if len(indexes) > 0:
        bbodf = bbodf[indexes[-1]:]
  stdate, eddate = flags.FLAGS.trading_date.split("-")
  sthrs, edhrs = flags.FLAGS.time_range.split("-")

  def to_hours(time_str):
    if time_str.endswith("H"):
      return float(time_str.replace("H", ""))
    elif time_str.endswith("M"):
      return float(time_str.replace("M", "")) / 60
    elif time_str.endswith("S"):
      return float(time_str.replace("S", "")) / 3600
    else:
      return float(time_str)

  start_dt = (pandas.DatetimeIndex([stdate]).to_pydatetime()[0] +
              datetime.timedelta(hours=to_hours(sthrs)))
  end_dt = (pandas.DatetimeIndex([eddate]).to_pydatetime()[0] +
            datetime.timedelta(hours=to_hours(edhrs)))

  df = squeeze_order_df(
      pandas.DataFrame(
          [MessageToDict(record, preserving_proto_field_name=True) for record in records[1:]]))

  if 'fill_qty' not in df:
    df['fill_qty'] = 0
    df['fill_price'] = 0
    df['fill_type'] = 'MAKER'
  else:
    start_ts = pandas.DatetimeIndex([start_dt]).astype(int)[0]
    if df.iloc[0]['time'] - start_ts < 4 * 3600e9:
      df = df.append(
          {
              'order_id': df['order_id'].min() - 1,
              'time': start_ts,
              'fill_qty': 0,
              'fill_price': numpy.nan,
              'fill_type': 'MAKER'
          },
          ignore_index=True)
    end_ts = pandas.DatetimeIndex([end_dt]).astype(int)[0]
    if end_ts - df.iloc[-1]['time'] < 4 * 3600e9:
      df = df.append(
          {
              'order_id': df['order_id'].max() + 1,
              'time': end_ts,
              'fill_qty': 0,
              'fill_price': numpy.nan,
              'fill_type': 'MAKER'
          },
          ignore_index=True)
    df.sort_values(['time'], inplace=True, kind='mergesort')

  stratname = (product.subscription_symbol
               if hasattr(product, "subscription_symbol") else product.symbol)
  key = f"{symbol_info.exchange}.{stratname}"

  initial_pos_add = 0
  if flags.FLAGS.hours_warmup is not None:
    cut_dt = (pandas.DatetimeIndex([symbol_info.start_timestamp]).to_pydatetime()[0] +
              pandas.Timedelta(f"{flags.FLAGS.hours_warmup}H"))
    idx = df['time'] < pandas.DatetimeIndex([cut_dt]).astype(int)[0]
    if not funding_df.empty:
       funding_idx = funding_df['timestamp'] < pandas.DatetimeIndex([cut_dt]).astype(int)[0]
       funding_df = funding_df.loc[~funding_idx]
    initial_pos_add += (df.loc[idx, 'sign'] * df.loc[idx, 'fill_qty']).sum()
    df = df.loc[~idx]
    if len(bbodf) > 0:
      idx = bbodf['timestamp'] < pandas.DatetimeIndex([cut_dt]).astype(int)[0]
      bbodf = bbodf.loc[~idx]
    if len(df) == 0 and initial_pos_add == 0:
      return key, None

  compact_stat = pcstat.PnlCompactStat.from_orderdf(
      product=product,
      orderdf=df,
      symbol_info=symbol_info,
      squeeze_stat=flags.FLAGS.squeeze_stat,
  )

  if compact_stat is None:
    return key, None

  # Backward compatibility
  initial_position = symbol_info.initial_position if hasattr(symbol_info, "initial_position") else 0

  # Overwrite initial position by flags
  for key_value in flags.FLAGS.initial_position.split(';'):
    if len(key_value.split('=')) == 2:
      key_, value = key_value.split('=')
      if key_ in product._full_symbol:
        initial_position = float(value)

  if flags.FLAGS.initial_position_all is not None:
    initial_position = flags.FLAGS.initial_position_all

  initial_position += initial_pos_add

  sresp = sscalc2.SimResponseCompact(param_label=None,
                                     compact_stat=compact_stat,
                                     miscinfo={
                                         'initial_pos': initial_position,
                                         'funding_df': funding_df
                                     },
                                     bbodf=bbodf,
                                     product=product)

  simstat = sscalc2.get_stat_df(
      sresp, 
      {}, 
      is_sliced=flags.FLAGS.include_mm_stats is False,
      pnl_ccy_override=getattr(flags.FLAGS, 'pnl_ccy_override', None))
  return key, simstat


def plot_each_strat(stratname, simstat, plttype, pnl_currency):
  if plttype == 'detail':
    ssplot.plot_detail_plotonly(stratname, simstat)
  elif plttype == 'pnl':
    pnldf = simstat['combdf']
    plt.rcParams["figure.figsize"] = 8, 6
    ssplot.setup_plt()
    plt.subplot(311)
    plt.plot(pnldf['mtm_price'], 'k-', lw=0.5, drawstyle='steps-post', rasterized=True)
    plt.title(f'{stratname} mtm_price')
    plt.subplot(3, 1, (2, 3))
    plt.plot(pnldf['open_cash'],
             'r:',
             lw=0.5,
             markersize=0.3,
             drawstyle='steps-post',
             rasterized=True)
    plt.axhline(y=0, color='black', alpha=0.3, linewidth=0.5, rasterized=True)
    plt.ylabel('open_cash', rasterized=True)
    plt.twinx()
    plt.plot(pnldf.index,
             pnldf['pnl_net'],
             'g-',
             numpy.nan,
             numpy.nan,
             'r:',
             lw=0.5,
             markersize=0.2,
             drawstyle='steps-post',
             rasterized=True)
    plt.grid(False)
    plt.title(stratname, rasterized=True)
    plt.ylabel('pnl')
    plt.legend([f"pnl({pnl_currency})", "open_cash"])

    dti = pandas.DatetimeIndex(pnldf['timestamp'])
    dt = dti.date
    dtidx = list(numpy.linspace(0, len(dti) - 1, 4).astype(int))
    plt.xticks(dtidx, dti[dtidx].strftime("%m%d %H:%M"), rotation=40, fontsize=6, rasterized=True)
    axvlines(dtidx, color='b', lw=0.2, rasterized=True)
    plt.tight_layout()
  else:
    raise ValueError(plttype)


def get_stats_dict_unroll(pb_path, compress=False, last_ending_ts=None):
  sstats_dict = collections.defaultdict(list)
  record_reader = cc_impl.SimpleReader(pb_path)
  records = []
  try:
    while True:
      data = record_reader.Read()
      if data is None:
        break
      record = ospec.OrderRecordProto()
      record.ParseFromString(data)
      records.append(record)
  except RuntimeError:
    pass
  if len(records) > 2:
    key, sstat = run_on_records(records[:-1],
                                account_info_eod=records[-1].account_info,
                                compress=compress,
                                last_ending_ts=last_ending_ts)
    if key is not None and sstat is not None:
      if flags.FLAGS.use_bbo:
        bbodf = sstat['combdf'][['timestamp', 'bid0_price', 'ask0_price']].copy()
        bbodf['mid_price'] = 0.5 * (bbodf['ask0_price'] + bbodf['bid0_price'])
      else:
        bbodf = pandas.DataFrame()
      pnldf = sstat['combdf']
      sstat['combdf'] = pnldf
      sstat['bbodf'] = bbodf
      sstats_dict[key].append(sstat)
  return (sstats_dict, 0 if len(records) == 0 else records[-1].account_info.sim_elapsed_time)

def get_stats_dict(*args, **kwargs):
  sstats_dict, sim_elapsed_time = get_stats_dict_unroll(*args, **kwargs)
  return sstats_dict

def axvlines(xs, *args, **kwargs):
  for x in xs:
    plt.axvline(x, *args, **kwargs)


def get_pnlrows(pnldf, fxrate_sr, interval):
  pnldf.loc[:, 'timestamp_interval'] = ((pnldf['timestamp'] / interval).astype(int) *
                                        interval).astype(int)
  pnldf['fxrate'] = pandas.to_datetime(pnldf['timestamp_interval']).dt.date.map(fxrate_sr)
  grpfx = pnldf.groupby('timestamp_interval').first()['fxrate']
  pnldiff = pnldf.groupby('timestamp_interval').first()['pnl_net'].diff()
  mtm_price = pnldf.groupby('timestamp_interval').first()['mtm_price']
  return pandas.DataFrame({
      'timestamp_interval':
          pnldiff.index,
      'mtm_price':
          mtm_price,
      'pnl_diff(USD)':
          pnldiff.fillna(0) * grpfx,
      'open_cash(USD)':
          pnldf.groupby('timestamp_interval').first()['open_cash'].iloc[:-1] * grpfx.iloc[:-1]
  })


def get_sharpe(pnldf, interval):
  pnldf.loc[:, 'tsgrp'] = (pnldf['timestamp'] / interval).astype(int)
  pnldiff = pnldf.groupby('tsgrp').first()['pnl_net'].diff()
  mean = numpy.mean(pnldiff)
  std = numpy.std(pnldiff)
  if (~pnldiff.isna()).sum() == 0:
    median = 0
    downside_std = 0
  else:
    median = numpy.median(pnldiff[~pnldiff.isna()])
    downside_std = numpy.std(pnldiff[pnldiff < median])
  if std > 0:
    sharpe = mean / std
  else:
    sharpe = 0
  if downside_std > 0:
    sortino = mean / downside_std
  else:
    sortino = 0
  return sharpe, sortino


def chunks(args, n):
  for i in range(0, len(args), n):
    yield args[i:i + n]


def print_dataframe(dataframe):
  print("=" * 200)
  for columns in chunks(dataframe.columns, 10):
    print("-" * 100)
    columns = list(set(columns) - set(['pnlvec_120min']))
    print(dataframe[columns].to_string())
  print("\n")


def get_stats(apnldf, grpdf, pnl_ccy, convert_ccy=True, merged=None):
  interval_mins = [240, 1440] if flags.FLAGS.flag_values_dict().get('is_focus_mt', False) else [5, 30]
  pnldf = apnldf[['order_id', 'timestamp', 'pnl_diff', 'pnl_diff_pureoos', 'fill_cash_cumsum']].copy()
  pnldf['pnl_net'] = pnldf['pnl_diff'].cumsum()
  pnldf['pnl_net_pureoos'] = pnldf['pnl_diff_pureoos'].cumsum()
  sharpes = [get_sharpe(pnldf, interval * 60e9) for interval in interval_mins]
  mdd = (pnldf['pnl_net'].cummax() - pnldf['pnl_net']).max()

  pnl_ccy_postfix = f"({pnl_ccy})" if convert_ccy else ""

  stat = grpdf[[
      f'pnl_net{pnl_ccy_postfix}',
      f'pnl_gross{pnl_ccy_postfix}',
      f'pnl_fee{pnl_ccy_postfix}',
      f'funding_fee{pnl_ccy_postfix}',
      f'fill_amt{pnl_ccy_postfix}',
      f'taker_amt{pnl_ccy_postfix}',
      f'maker_amt{pnl_ccy_postfix}',
      f'sub_amt{pnl_ccy_postfix}',
      'sub_count',
      f'market_volume_amt{pnl_ccy_postfix}',
      'flip_count',
      'fill_count',
      f'sub_amt_filled{pnl_ccy_postfix}',
  ]].sum()

  merged = False
  if 'symbol' in grpdf and 'date' in grpdf and grpdf.groupby(['symbol', 'date']).size().max() > 1:
    merged = True

  if merged:
    stat[f'market_volume_amt{pnl_ccy_postfix}'] = 0
    for _, symdf in grpdf.groupby(['symbol']):
      idx = (symdf['date'] == symdf['date'].min()) & (symdf['date_max'] == symdf['date_max'].max())
      # dang complicated.. actually data is simply not granule enough.
      if idx.sum() >= 1:
        stat[f'market_volume_amt{pnl_ccy_postfix}'] += symdf[
            f'market_volume_amt{pnl_ccy_postfix}'].loc[idx].iloc[0]
      '''
      let's say 99 strats:
      (Pdb) grpdf
            date    date_max                      symbol
  0    2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  1    2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  2    2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  3    2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  4    2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  ..          ...         ...                         ...
  96   2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL 1
  97   2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL
  98   2023-06-08  2023-06-12  Binance_MTL-USDT.PERPETUAL'''

  stat['peak_sub_count'] = grpdf['peak_sub_count'].max()
  stat['min_open_pos'] = apnldf['open_cash'].min()
  stat['max_open_pos'] = apnldf['open_cash'].max()
  stat['max_abs_open_pos'] = apnldf['open_cash_abs'].max()
  stat[f'required_margin{pnl_ccy_postfix}'] = grpdf[f'required_margin{pnl_ccy_postfix}'].sum()
  stat['return_on_equity'] = \
      pnldf['pnl_diff'].sum() / max(-stat['min_open_pos'], stat['max_open_pos'])

  stat[f'+pos_pnl{pnl_ccy_postfix}'] = apnldf[apnldf.open_cash >= 0]['pnl_diff'].sum()
  stat[f'-pos_pnl{pnl_ccy_postfix}'] = apnldf[apnldf.open_cash < 0]['pnl_diff'].sum()
  time_weighted_pos = apnldf.timestamp.diff() * apnldf.open_cash
  stat['time_weighted_pos'] = time_weighted_pos.fillna(0).sum() \
      / (apnldf.timestamp.max() - apnldf.timestamp.min())
  time_weighted_abs_pos = apnldf.timestamp.diff() * apnldf.open_cash_abs
  stat['time_weighted_abs_pos'] = time_weighted_abs_pos.fillna(0).sum() \
      / (apnldf.timestamp.max() - apnldf.timestamp.min())
  stat['full_fill_ratio'] = (stat[f'fill_amt{pnl_ccy_postfix}'] / stat[f'sub_amt_filled{pnl_ccy_postfix}'])
  
  if stat[f'market_volume_amt{pnl_ccy_postfix}'] == 0:
    stat['participation_ratio'] = numpy.nan
  else:
    stat['participation_ratio'] = stat[f'fill_amt{pnl_ccy_postfix}'] / stat[f'market_volume_amt{pnl_ccy_postfix}']
  if convert_ccy:
    del stat[f'sub_amt_filled{pnl_ccy_postfix}']
    del stat[f'market_volume_amt{pnl_ccy_postfix}']
  else:
    stat['duration'] = grpdf.groupby('date')['duration'].max().sum()

  if stat[f'fill_amt{pnl_ccy_postfix}'] > 0:
    stat['net_ret(bps)'] = stat[f'pnl_net{pnl_ccy_postfix}'] / stat[f'fill_amt{pnl_ccy_postfix}'] * 1e4
    stat['fee_rate(bps)'] = stat[f'pnl_fee{pnl_ccy_postfix}'] / stat[f'fill_amt{pnl_ccy_postfix}'] * 1e4
  else:
    stat['net_ret(bps)'] = 0
    stat['fee_rate(bps)'] = 0
  if stat[f'sub_amt{pnl_ccy_postfix}'] == 0:
    stat['fill_ratio'] = numpy.nan
  else:
    stat['fill_ratio'] = stat[f'fill_amt{pnl_ccy_postfix}'] / stat[f'sub_amt{pnl_ccy_postfix}']
  for minutes, sharpe in zip(interval_mins, sharpes):
    sharpe, sortino = sharpe
    stat[f'sharpe_{minutes}min'] = sharpe
    stat[f'sortino_{minutes}min'] = sortino
  stat[f'mdd{pnl_ccy_postfix}'] = mdd
  #if not merged:
  #  stat['#orders/min'] = (grpdf['sub_count'].sum() /
  #                        grpdf.groupby('date')['duration'].max().sum()) * 60e9
  #  stat['duration_day'] = grpdf.groupby('date')['duration'].max().sum() / 1e9 / 24 / 3600

  stat['ccyrate'] = 1
  if f'active_pnl({pnl_ccy_postfix})' in grpdf.columns:
    stat[f'active_pnl({pnl_ccy_postfix})'] = grpdf[f'active_pnl({pnl_ccy_postfix})'].sum()
  if pnl_ccy != 'USD':
    dts = pandas.to_datetime(pnldf['timestamp']).dt.date.drop_duplicates().tolist()
    fxrate_sr = get_fxrate(pnl_ccy, tuple(dts))
    if not fxrate_sr.empty:
      tmp_pnldf = pnldf[['timestamp', 'pnl_diff', 'fill_cash_cumsum']].copy()
      tmp_pnldf['date'] = pandas.to_datetime(tmp_pnldf['timestamp']).dt.date
      tmp_pnldf['ccyrate'] = 1.0 / tmp_pnldf['date'].map(fxrate_sr).ffill().bfill()
      tmp_pnldf['pnl_diff(USD)'] = tmp_pnldf['pnl_diff'] * tmp_pnldf['ccyrate']
      tmp_pnldf['pnl_net(USD)'] = tmp_pnldf['pnl_diff(USD)'].cumsum()
      tmp_pnldf['fill_amt(USD)'] = tmp_pnldf['fill_cash_cumsum'] * tmp_pnldf['ccyrate']
      stat['pnl_net(USD)'] = tmp_pnldf['pnl_net(USD)'].iloc[-1]
      stat['fill_amt(USD)'] = (tmp_pnldf['fill_cash_cumsum'].diff().fillna(0.0) *
                                tmp_pnldf['ccyrate']).sum()
      stat['mdd(USD)'] = (tmp_pnldf['pnl_net(USD)'].cummax() - tmp_pnldf['pnl_net(USD)']).max()
      stat['ccyrate'] = tmp_pnldf['ccyrate'].mean()

  if flags.FLAGS.flag_values_dict().get('unfold_stat', False):
    print(apnldf['from_symbol'].unique().tolist())
    print_dataframe(stat.to_frame().T)
  return stat


@functools.lru_cache(maxsize=None)
def get_fxrate(pnl_ccy: str, dts: tuple) -> pandas.Series:
  if pnl_ccy == "USD":
    return pandas.Series(1.0, index=dts)
  fx_dict = BalanceChecker.query_exchange_rates_by_dates(pnl_ccy, "USD", list(dts))
  if not fx_dict:
    return pandas.Series()
  return 1.0 / pandas.Series(fx_dict).reindex(dts).sort_index().ffill().bfill()


def groupby_first_fast(df, groupby):
  # Get rid of missing values that break nth.
  df = df[~(df[groupby].isnull())]
  # Use nth instead of first for speed.
  return df.groupby(groupby).nth(0)


def extract_info_from_key(key):
  remain_infos = []
  base = key
  for info in key.split('.'):
    if '-' in info:
      base, quote = info.split('-')
      remain_infos.append(quote)
    else:
      remain_infos.append(info)
  return base, ".".join(remain_infos)

def fmt_skip_dates(plt, formator_offset):
  original_fmt = plt.gca().xaxis.get_major_formatter()

  def skip_formatter(x, pos=None):
    idx = bisect.bisect(formator_offset.index, x)
    if idx >= len(formator_offset):
      idx = len(formator_offset) - 1
    x += formator_offset.iloc[idx]
    return original_fmt(x, pos)

  plt.gca().xaxis.set_major_formatter(matplotlib.ticker.FuncFormatter(skip_formatter))

def get_diff(seri_cum):
  seri_diff = seri_cum.diff(1)
  seri_diff.iloc[0] = seri_cum.iloc[0]
  return seri_diff

def get_aggregated_sstats(
    sstats_dict,
    hide_skipped_dates=False,
    squeeze=False,
    convert_ccy=True):
  pnlgrps = {}
  combdfs = []
  rows = []
  pnl_ccys = []
  timedrets = []
  symbol_infos = {}
  symbol_infos['total'] = None
  formator_offset = None
  for stratname, sstats in sstats_dict.items():
    pnldfs = []
    for sstat in sstats:
      pnldf = sstat['combdf'].copy()
      if len(pnldf) == 0:
        continue

      timedret = sstat.get('timedret', None)
      if timedret is not None:
        timedrets.append(timedret)

      assert (~numpy.isnan(pnldf['timestamp'])).all()

      rebuilt = False
      if 'open_cash' not in pnldf and 'open_cash_diff' in pnldf:
        rebuilt = True
        ocd_backup = pnldf['open_cash_diff'].copy()
        ocsum = pnldf['open_cash_diff'].sum()
        pnldf['open_cash'] = pnldf['open_cash_diff'].cumsum()
      else:
        pnldf = pnldf.loc[(pnldf['pnl_net'].diff() != 0) | (pnldf.index >= pnldf.shape[0] - 1)].copy()
      pnldf['fill_cash_cumsum'] = pnldf['fill_cash'].cumsum()

      if not rebuilt and len(pnldf) > 20000:
        pnldf['index'] = pandas.to_datetime(pnldf.timestamp).dt.floor(freq='60s')
        order_cnt = groupby_first_fast(pnldf, 'order_id').groupby('index')['timestamp'].count()
        pnldf = pnldf.groupby('index').first()
        pnldf['order_cnt'] = order_cnt
      pnldf = pnldf.reset_index(drop=True)

      if not rebuilt and numpy.isnan(pnldf['timestamp']).any():
        pnldf = pnldf.loc[~numpy.isnan(pnldf['timestamp'])].reset_index(drop=True)

      if rebuilt:
        pnldf['open_cash_diff'] = pnldf['open_cash'].diff(1).fillna(0)
        pnldf.loc[0, 'open_cash_diff'] = pnldf.loc[0, 'open_cash']
        try:
          assert (
              ((pnldf['open_cash_diff'].sum() - ocsum) < 1.) or
              numpy.isclose(pnldf['open_cash_diff'].sum(), ocsum)), sys.argv
        except:
          if os.environ.get("USER") in ['jhkim']:
            print(pnldf['open_cash_diff'].cumsum())
            import pdb; pdb.set_trace()
          raise
        del pnldf['open_cash']

      pnldf['pnl_diff'] = pnldf['pnl_net'].diff().fillna(0)
      if 'active_pnl' in pnldf.columns:
        pnldf['active_pnl_diff'] = pnldf['active_pnl'].diff().fillna(0)

      ccyinfo = sstat['currency_info']
      coin_to_fiat_mult = 1

      pnl_currency = ccyinfo['quote_currency']
      if ccyinfo['pnl_currency'] != ccyinfo['quote_currency']:
        if ccyinfo['pnl_currency'] != ccyinfo['base_currency']:
          # bitmex ETH-USD.PERPETUAL case
          pnl_currency = ccyinfo['pnl_currency']
        else:
          coin_to_fiat_mult = pnldf['mtm_price'].mean()
          pnldf['pnl_diff'] *= coin_to_fiat_mult

      pnldf['pnl_diff_pureoos'] = pnldf['pnl_diff'].copy()

      symbol_info = sstat['symbol_info']
      if symbol_info is not None:
        train_dates = pandas.DatetimeIndex(
            [str(i) for i in symbol_info.strat_aux_info.dates_train_is])
        dti = pandas.DatetimeIndex(pnldf['timestamp'])
        for train_date in train_dates:
          idx_beg = bisect.bisect_left(dti, train_date)
          idx_end = bisect.bisect_left(dti, train_date + pandas.Timedelta(days=1))
          pnldf.loc[idx_beg:idx_end, 'pnl_diff_pureoos'] = 0

      pnldf['from_symbol'] = stratname
      pnldfs.append(pnldf)

      dates = pandas.DatetimeIndex(sstat['combdf']['timestamp']).date
      rows.append({'date': dates.min(), 'date_max': dates.max(), 'symbol': stratname})
      pnl_ccy = pnl_currency
      pnl_ccys = list(set(pnl_ccys + [pnl_ccy]))

      rows[-1].update(sstat['pnl_stat'])
      if convert_ccy:
        rows[-1][f'pnl_net({pnl_ccy})'] = (sstat['pnl_stat']['pnl_net'] * coin_to_fiat_mult)
        rows[-1][f'pnl_gross({pnl_ccy})'] = (sstat['pnl_stat']['pnl_gross'] * coin_to_fiat_mult)
        rows[-1][f'pnl_fee({pnl_ccy})'] = (sstat['pnl_stat']['pnl_fee'] * coin_to_fiat_mult)
        rows[-1][f'funding_fee({pnl_ccy})'] = (sstat['pnl_stat']['funding_fee'] * coin_to_fiat_mult)
        rows[-1][f'required_margin({pnl_ccy})'] = (sstat['pnl_stat']['required_margin'] *
                                                  coin_to_fiat_mult)
        if 'active_pnl' in sstat['pnl_stat']:
          rows[-1][f'active_pnl({pnl_ccy})'] = (sstat['pnl_stat']['active_pnl'] * coin_to_fiat_mult)
        cols_amount = [
            'fill_amt', 'taker_amt', 'maker_amt', 'sub_amt', 'sub_amt', 'sub_amt_filled',
            'market_volume_amt'
        ]
        for col_convert in cols_amount:
          rows[-1][f'{col_convert}({pnl_ccy})'] = (sstat['pnl_stat'][col_convert] *
                                                  sstat['pnl_stat']['multiplier'])
        for rm_col in [
            'multiplier', 'grs_ret', 'sharpe', 'pnl_net', 'pnl_gross', 'pnl_fee', 'funding_fee',
            'active_pnl'
        ] + cols_amount:
          if rm_col in rows[-1]:
            del rows[-1][rm_col]

    if len(pnldfs) == 0:
      continue
    base = 0
    for pnldf in pnldfs:
      pnldf['fill_cash_cumsum'] = pnldf['fill_cash_cumsum'] + base
      base = pnldf.iloc[-1]['fill_cash_cumsum']
    combdf = pandas.concat(pnldfs, axis=0).sort_values(['timestamp']).reset_index(drop=True)
    if hide_skipped_dates:
      days_skip = numpy.floor(combdf['timestamp'].diff(1) / (1e9 * 3600 * 24)).fillna(0)
      formator_offset = days_skip.fillna(0).cumsum()
      ts_offset = formator_offset * (1e9 * 3600 * 24)
      combdf['timestamp'] -= ts_offset
      formator_offset.index = pandas.to_datetime(combdf['timestamp']).apply(mdates.date2num)

    combdfs.append(combdf)
    if 'open_cash' not in combdf and 'open_cash_diff' in combdf:
      combdf['open_cash'] = combdf['open_cash_diff'].cumsum()
    combdf['open_cash_abs'] = combdf['open_cash'].abs()
    combdf['pnl_net'] = combdf['pnl_diff'].cumsum()
    if 'active_pnl_diff' in combdf.columns:
      combdf['active_pnl'] = combdf['active_pnl_diff'].cumsum()
    combdf['pnl_net_pureoos'] = combdf['pnl_diff_pureoos'].cumsum()
    combdf['volume_diff'] = combdf['fill_cash_cumsum'].diff()
    combdf.loc[0, 'volume_diff'] = combdf.loc[0, 'fill_cash']
    combdf.loc[combdf.iloc[-1].name, 'open_cash'] = 0
    combdf.loc[combdf.iloc[-1].name, 'open_cash_abs'] = 0
    combdf['open_cash_abs_diff'] = combdf['open_cash_abs'].fillna(method='ffill').diff()
    combdf['open_cash_diff'] = combdf['open_cash'].fillna(method='ffill').diff()
    combdf.loc[0, 'open_cash_diff'] = combdf.loc[0, 'open_cash']
    combdf.loc[0, 'open_cash_abs_diff'] = combdf.loc[0, 'open_cash_abs']
    symbol_infos[stratname] = sstat['symbol_info']
    pnlgrps_col = [
        'order_id',
        'timestamp',
        'pnl_net',
        'pnl_net_pureoos',
        'open_cash',
        'fill_cash',
        'fill_cash_cumsum',
        'mtm_price',
    ] + ["submit_max_rate_used_ratio"] * ('submit_max_rate_used_ratio' in combdf.columns
    ) + ["cancel_max_rate_used_ratio"] * ('cancel_max_rate_used_ratio' in combdf.columns)
    if 'active_pnl' in combdf.columns:
      pnlgrps_col.append('active_pnl')
    pnlgrps[stratname] = combdf[pnlgrps_col].copy()
  totdf = None
  if len(combdfs) > 0:
    totdf = pandas.concat(combdfs, axis=0,
                          sort=False).sort_values(['timestamp']).reset_index(drop=True)
    totdf['pnl_net'] = totdf['pnl_diff'].cumsum()
    totdf['pnl_net_pureoos'] = totdf['pnl_diff_pureoos'].cumsum()
    totdf['fill_cash_cumsum'] = totdf['volume_diff'].cumsum()
    totdf['open_cash'] = totdf['open_cash_diff'].cumsum()
    totdf['open_cash_abs'] = totdf['open_cash_abs_diff'].cumsum()

    if squeeze:
      totdf = totdf.groupby(['timestamp']).last().reset_index()
      totdf['pnl_diff'] = get_diff(totdf['pnl_net'])
      totdf['open_cash_diff'] = get_diff(totdf['open_cash'])

    diff2 = totdf['pnl_diff'].to_numpy()
    totdf['pnl_net_reset'] = numpy.hstack([numpy.nancumsum(a) for a in numpy.array_split(diff2, 5)])
    totdf_col = [
        'order_id', 'timestamp', 'pnl_net', 'pnl_net_pureoos', 'pnl_net_reset', 'open_cash',
        'open_cash_abs', 'fill_cash_cumsum', 'fill_cash'
    ] + ["submit_max_rate_used_ratio"] * ('submit_max_rate_used_ratio' in totdf.columns
    ) + ["cancel_max_rate_used_ratio"] * ('cancel_max_rate_used_ratio' in totdf.columns)
    if 'active_pnl_diff' in totdf.columns:
      totdf['active_pnl'] = totdf['active_pnl_diff'].cumsum()
      totdf_col.append('active_pnl')
    pnlgrps['total'] = \
        totdf[totdf_col].copy()

  prtstats = {}
  if len(pnl_ccys) > 0:
    pnl_ccy = pnl_ccys[0]
    group_df = pandas.DataFrame(rows).sort_values(['date', 'symbol']).reset_index(drop=True)
    for symbol, atotdf in totdf.groupby('from_symbol'):
      symbol_df = group_df.loc[group_df['symbol'] == symbol]
      astat = get_stats(atotdf, symbol_df, pnl_ccy, convert_ccy=convert_ccy)
      prtstats[symbol] = astat
    groupstat = get_stats(totdf, group_df, pnl_ccy, convert_ccy=convert_ccy)
    group_stat = pandas.Series(groupstat)
    prtstats['Total'] = groupstat
    dts = pandas.to_datetime(totdf['timestamp']).dt.date.drop_duplicates().tolist()
    fxrate_sr = 1.0 / get_fxrate(pnl_ccy, tuple(dts))
    prtstats['Total']['pnlvec_120min'] = get_pnlrows(totdf, fxrate_sr,
                                                     120 * 60e9)['pnl_diff(USD)'].to_list()
    prtstats['Total']['pnlvec_rows'] = get_pnlrows(totdf, fxrate_sr, 10 * 60e9).to_dict('list')
    if hasattr(flags.FLAGS, "include_option_pnl") and flags.FLAGS.include_option_pnl:
      tmp_df = totdf[['timestamp', 'from_symbol', 'pnl_diff', 'mtm_price', 'open_pos']].copy()
      tmp_df['pnl_diff(USD)'] = tmp_df['pnl_diff'] * fxrate_sr
      prtstats['Total']['each_pnl_df'] = tmp_df
    if symbol_infos[rows[0]['symbol']] is not None:
      train_dates = pandas.concat([
          pandas.Series(symbol_infos[rows[0]['symbol']].strat_aux_info.dates_train_is).astype(str),
          pandas.Series(symbol_infos[rows[0]['symbol']].strat_aux_info.dates_train_oos).astype(str)
      ])
      prtstats['Total']['train_date'] = sorted(train_dates.drop_duplicates().to_list())
    else:
      prtstats['Total']['train_date'] = []

  return dict(
    combdfs=combdfs,
    pnlgrps=pnlgrps,
    pnl_ccys=pnl_ccys,
    pnl_currency=pnl_currency,
    formator_offset=formator_offset,
    timedrets=timedrets,
    symbol_infos=symbol_infos,
    prtstats=prtstats,
    group_stat=group_stat,
    totdf=totdf)

def plot_aggregated_pnl(sstats_dict,
                        real_scale_t_axis=False,
                        xlim=None,
                        print_df=False,
                        hide_skipped_dates=False,
                        sim_elapsed_time=None,
                        grouped=False,
                        plot_api_rate_used=True,
                        plot_fill_intensity=True):
  pnl_currency = None
  ret = get_aggregated_sstats(sstats_dict, hide_skipped_dates=hide_skipped_dates)
  combdfs = ret['combdfs']
  pnlgrps = ret['pnlgrps']
  pnl_ccys = ret['pnl_ccys']
  pnl_currency = ret['pnl_currency']
  prtstats = ret['prtstats']
  formator_offset = ret['formator_offset']
  timedrets = ret['timedrets']
  totdf = ret['totdf']
  group_stat = ret['group_stat']

  pandas.options.display.float_format = '{:,.6f}'.format
  plt.rcParams["figure.figsize"] = 15, 17
  ssplot.setup_plt()

  # no order fill
  if len(combdfs) == 0:
    sys.exit(0)

  assert len(pnl_ccys) <= 1, pnl_ccys

  if len(pnl_ccys) > 0:
    try:
      if sim_elapsed_time is not None:
        group_stat["sim_elapsed_sec_per_day"] = (
            sim_elapsed_time * 1e-9) / group_stat["duration_day"]
    except:
      pass
    if hasattr(flags.FLAGS, "sim_result_prefix") and print_df:
      print(f'{flags.FLAGS.sim_result_prefix}_Combined')
      print_dataframe(pandas.DataFrame(prtstats).T)

    if hasattr(flags.FLAGS, "stat_csv") and flags.FLAGS.stat_csv is not None:
      prtdf = pandas.DataFrame(prtstats).T
      prtdf['sim_prefix'] = flags.FLAGS.sim_result_prefix
      prtdf['strat_prefix'] = flags.FLAGS.strat_prefix
      prtdf.to_csv(flags.FLAGS.stat_csv, index_label='product')

    plt.subplot(10, 1, (9, 10))
    format_frame = group_stat.map('{:,.4f}'.format).to_frame()
    split_df = lambda df, index: (df.iloc[:index, :], df.iloc[index:, :])
    idx1 = numpy.ceil(format_frame.shape[0] / 4).astype(int)
    frame1, frame234 = split_df(format_frame, idx1)
    frame2, frame34 = split_df(frame234, idx1)
    frame3, frame4 = split_df(frame34, idx1)
    fframe = pandas.concat(
        [frame1.reset_index(),
         frame2.reset_index(),
         frame3.reset_index(),
         frame4.reset_index()],
        axis=1)
    fframe = fframe.fillna("")
    width1 = 0.05
    tbl = plt.table(fframe.values, colWidths=[width1] * 8, fontsize=13, loc="center")
    for key, cell in tbl.get_celld().items():
      cell.PAD = 0.001
    tbl.scale(2.6, 1.8)
    plt.axis('off')

  dti = pandas.DatetimeIndex(totdf['timestamp'])
  dt = dti.date
  datesplit = numpy.where(numpy.diff(dt) > datetime.timedelta(0))[0]
  dtidx = list(numpy.linspace(0, len(dti) - 1, 10).astype(int))

  if grouped:
    totdf = totdf.groupby('timestamp').last().reset_index()
    for key in list(pnlgrps.keys()):
      pnlgrps[key] = pnlgrps[key].groupby('timestamp').last().reset_index()

  for i, (key, pnldf) in enumerate(pnlgrps.items()):
    dti_i = pandas.DatetimeIndex(pnldf['timestamp'])
    if real_scale_t_axis:
      pnldf.loc[:, 'tindex'] = dti_i
    else:
      index = sscalc2.bisect_batch(numpy.array(dti_i), numpy.array(dti))
      pnldf.loc[:, 'tindex'] = index

  cols = [
      'tab:blue', 'tab:orange', 'tab:green', 'tab:red', 'tab:purple', 'tab:brown', 'tab:pink',
      'tab:gray', 'tab:olive', 'tab:cyan'
  ]
  keys = []

  ax1 = plt.subplot(10, 1, 8)
  tot_df = pnlgrps['total'].copy()
  unique_dt = numpy.unique(dt).tolist()
  fx_sr = pandas.to_datetime(tot_df['timestamp']).dt.date.map(
      1.0 / get_fxrate(pnl_currency, tuple(unique_dt)))
  tot_df['open_cash'] *= fx_sr
  tot_df['open_cash_abs'] *= fx_sr
  poscash = tot_df['open_cash'].copy()
  poscash[(poscash < 0)] = numpy.nan
  negcash = tot_df['open_cash'].copy()
  negcash[(negcash > 0)] = numpy.nan
  plt.plot(tot_df['tindex'].to_numpy(),
           poscash.to_numpy(),
           'g-',
           tot_df['tindex'].to_numpy(),
           negcash.to_numpy(),
           'r-',
           tot_df['tindex'].to_numpy(),
           tot_df['open_cash_abs'].to_numpy(),
           'b-',
           linewidth=0.5,
           drawstyle='steps-post')
  plt.fill_between(tot_df['tindex'].to_numpy(),
                   tot_df['open_cash'] * 0,
                   tot_df['open_cash'],
                   where=tot_df['open_cash'] > 0,
                   facecolor='green',
                   alpha=0.2,
                   lw=0.5)
  plt.fill_between(tot_df['tindex'].to_numpy(),
                   tot_df['open_cash'] * 0,
                   tot_df['open_cash'],
                   where=tot_df['open_cash'] < 0,
                   facecolor='red',
                   alpha=0.2,
                   lw=0.5)
  plt.xticks([])
  plt.ylabel('sum pos(USD)')
  plt.legend(['+POS', '-POS', 'GMV'])
  if xlim is not None:
    plt.xlim(xlim[0], xlim[1])
  xlim_extract = ax1.get_xlim()

  pnlkeys = list(pnlgrps.keys())
  pnlkeys = pnlkeys[-1:] + pnlkeys[:-1]

  def timelines(y, xstart, xstop, color='b', depth=0.01):
    plt.hlines(y, xstart, xstop, color, lw=2)

  '''
  dt_plot_count = 0
  for i, symbol_info in enumerate(symbol_infos.values()):
    if symbol_info is None:
      continue
    dates_train_is = symbol_info.strat_aux_info.dates_train_is
    if len(dates_train_is) > 1:
      if real_scale_t_axis:
        dti_aux = pandas.DatetimeIndex([str(i) for i in symbol_info.strat_aux_info.dates_train_is])
        dt_plot_count += 1
        levels = numpy.array([len(symbol_infos) - i] * (len(dti_aux)))
        colidx = i % len(cols)
        timelines(levels, dti_aux, dti_aux.shift(1, freq='D'), color=cols[colidx])
  if dt_plot_count > 0:
    plt.xlim(xlim_extract)
    plt.title("train_insample")
    plt.ylabel("symbols")'''
  if plot_fill_intensity:
    plt.subplot(10, 1, 6)
    pnldf = pnlgrps['total']
    if 'fill_cash' in pnldf:
      colidx = i % len(cols)
      fseri = pandas.Series(pnldf['fill_cash'])
      fseri.index = pnldf['tindex']
      freq = max(1, int((fseri.index.max() - fseri.index.min()).total_seconds() / 100))
      fseri_agg = fseri.resample(f"{freq}S").sum()
      #x = pnldf['tindex'].to_numpy()
      #y = pnldf['fill_cash'].to_numpy() * 0
      #szs = pnldf['fill_cash'].to_numpy()
      x = fseri_agg.index.to_numpy()
      y = fseri_agg.to_numpy() * 0
      szs = fseri_agg.to_numpy()
      szs /= szs.max()
      plt.scatter(x, y, szs * 500)
      plt.ylabel(f'fill_intensity')
      plt.xlim(xlim_extract)

  plt.subplot(10, 1, 7)
  for i, key in enumerate(pnlkeys):
    pnldf = pnlgrps[key]
    if 'open_cash' not in pnldf or key == 'total':
      continue
    colidx = i % len(cols)
    plt.plot(pnldf['tindex'].to_numpy(),
             pnldf['open_cash'].to_numpy(),
             '-',
             color=cols[colidx],
             linewidth=0.5,
             drawstyle='steps-post')
    plt.ylabel(f'pos({pnl_currency})')
    if hide_skipped_dates:
      fmt_skip_dates(plt, formator_offset)
    keys.append(f"{key}({pnl_currency})")
  if xlim is not None:
    plt.xlim(xlim[0], xlim[1])

  def plot_pnl_aggr(colname, plot_legend):
    keys = []
    key_to_extract = pnlkeys[-1] if pnlkeys[-1] != 'total' else pnlkeys[0]
    _, exchange_quote_info = extract_info_from_key(key_to_extract)
    for i, key in enumerate(pnlkeys):
      if i == 0:
        lw = 1
      else:
        lw = 0.5
      pnldf = pnlgrps[key]
      colidx = i % len(cols)
      plt.plot(pnldf['tindex'].to_numpy(),
               pnldf[colname].to_numpy(),
               '-',
               color=cols[colidx],
               markersize=1,
               linewidth=lw,
               drawstyle='steps-post')
      if len(pnlkeys) < 30:
        keys.append(f"{key}({pnl_currency})")
      else:
        legend_key = extract_info_from_key(key)[0] if key != 'total' \
                     else f"{key}.{exchange_quote_info}({pnl_currency})"
        keys.append(legend_key)
    if plot_legend:
      plt.legend(keys, loc='upper left')

  if plot_api_rate_used:
    plt.subplot(10, 1, 5 + (not plot_fill_intensity))
    plt.ylabel("api rate used")
    if "submit_max_rate_used_ratio" in pnlgrps["total"] and "cancel_max_rate_used_ratio" in pnlgrps["total"]:
      plt.plot(pnlgrps['total']['tindex'].to_numpy(),
              pnlgrps['total']['submit_max_rate_used_ratio'].to_numpy(),
              '-',
              alpha=0.5)
      plt.plot(pnlgrps['total']['tindex'].to_numpy(),
              pnlgrps['total']['cancel_max_rate_used_ratio'].to_numpy(),
              '-',
              alpha=0.5)
      plt.legend(["SUBMIT", "CANCEL"], loc='lower right')
      plt.ylim([0, 1])
      if xlim is not None:
        plt.xlim(xlim[0], xlim[1])

  plt.subplot(10, 1, (4, 6 - plot_api_rate_used - plot_fill_intensity))
  plot_pnl_aggr('pnl_net_pureoos', False)
  if xlim is not None:
    plt.xlim(xlim[0], xlim[1])
  plt.title('oos net pnl')

  plt.subplot(10, 1, (1, 3))
  plot_pnl_aggr('pnl_net', True)
  plt.plot(pnlgrps['total']['tindex'].to_numpy(),
           pnlgrps['total']['pnl_net_reset'].to_numpy(),
           'b--',
           markersize=1,
           linewidth=1,
           drawstyle='steps-post')
  if 'active_pnl' in pnlgrps['total']:
    plt.plot(pnlgrps['total']['tindex'].to_numpy(),
             pnlgrps['total']['active_pnl'].to_numpy(),
             'r--',
             markersize=1,
             linewidth=1,
             drawstyle='steps-post')
  plt.title('net pnl')
  plt.twinx()
  for i, key in enumerate(pnlkeys):
    if i == 0:
      lw = 1
    else:
      lw = 0.5
    pnldf = pnlgrps[key]
    colidx = i % len(cols)
    if key != 'total':
      plt.plot(pnldf['tindex'].to_numpy(),
               pnldf['mtm_price'].to_numpy() / pnldf['mtm_price'][0],
               ':',
               color=cols[colidx],
               markersize=1,
               linewidth=lw,
               drawstyle='steps-post',
               alpha=0.5)
      plt.grid(False)
      plt.yticks([])

  if xlim is not None:
    plt.xlim(xlim[0], xlim[1])

  if real_scale_t_axis:
    axvlines(dti[datesplit], color='b', lw=0.2)
  else:
    for dtid in datesplit:
      plt.text(dtid, 0, dt[dtid].strftime("%m%d"))
    plt.xticks(dtidx, dti[dtidx].strftime("%m%d %H:%M"), rotation=40)
    axvlines(datesplit, color='b', lw=0.2)

  plt.subplots_adjust(left=0.05, hspace=0.45, wspace=0.05)

  if hide_skipped_dates:
    fmt_skip_dates(plt, formator_offset)

  if real_scale_t_axis:
    axvlines(dti[datesplit], color='b', lw=0.2)
  else:
    for dtid in datesplit:
      plt.text(dtid, 0, dt[dtid].strftime("%m%d"))
    plt.xticks(dtidx, dti[dtidx].strftime("%m%d %H:%M"), rotation=40)
    axvlines(datesplit, color='b', lw=0.2)
  if len(timedrets) == 0:
    return None, prtstats['Total']
  else:
    return pandas.concat(timedrets, axis=0), prtstats['Total']


def get_sstats_dict_new(sstats_dict):
  sstats_dict_new = {}
  for key, sstats in sorted(sstats_dict.items()):
    # sstats: unordered, should be sorted by timestamps
    sstats_tuple = []
    for sstat in sstats:
      if sstat is None:
        continue
      if 'combdf' not in sstat or len(sstat['combdf']) == 0:
        continue
      sstats_tuple.append((sstat['combdf']['timestamp'].iloc[0], sstat))
    if len(sstats_tuple) > 0:
      _, sstats_sorted = zip(*sorted(sstats_tuple, key=lambda x: x[0]))
      sstats_dict_new[key] = sstats_sorted
  return sstats_dict_new


def lcs(str_a, str_b):
  lmatch = SequenceMatcher(None, str_a, str_b).find_longest_match(0, len(str_a), 0, len(str_b))
  return str_a[lmatch.a:(lmatch.a + lmatch.size)]


# Only used for adjusted postion sim, need to set start&end ts properly
def get_last_ending_ts(pb_paths):
  all_paths = collections.defaultdict(list)
  end_timestamps = collections.defaultdict(list)
  for pb_path in iter(pb_paths):
    record_reader = py_impl.SimpleRecordReader(pb_path)
    data = record_reader.read_record()
    if data is None:
      continue
    record = ospec.OrderRecordProto()
    record.ParseFromString(data)
    acc_info = record.account_info
    end_ts = acc_info.end_timestamp - (60 * 1e9)
    record_reader.close()
    if acc_info.end_timestamp == 0:
      continue
    all_paths[acc_info.relative_norm].append(pb_path)
    end_timestamps[acc_info.relative_norm].append(end_ts)

  last_ending_ts = {}
  for sym_paths, ends in zip(all_paths.values(), end_timestamps.values()):
    sym_paths = sym_paths[1:]
    ends = ends[:-1]
    last_ending_ts.update({p: end for p, end in zip(sym_paths, ends)})
  return last_ending_ts


def main(_):
  sstats_dict = collections.defaultdict(list)
  pb_paths = list(sorted(glob.glob(flags.FLAGS.pb_path)))
  if len(pb_paths) == 0 and flags.FLAGS.pb_path.find("pb.lz4") >= 0:
    pb_path_chunks = flags.FLAGS.pb_path.split("/")
    pb_path_chunks = pb_path_chunks[:-1] + ['*'] + pb_path_chunks[-1:]
    pb_path_str = "/".join(pb_path_chunks)
    print(pb_path_str)
    pb_paths = list(sorted(glob.glob(pb_path_str)))
  else:
    print(flags.FLAGS.pb_path)

  if flags.FLAGS.bbo_from_cache or flags.FLAGS.include_mm_stats:
    flags.FLAGS.use_bbo = True

  futures = []

  compress = len(pb_paths) >= 2 or flags.FLAGS.compress
  last_ending_ts_map = get_last_ending_ts(pb_paths) if flags.FLAGS.use_adjusted_pos \
      else {}
  sim_elapsed_time_sum = 0
  if len(pb_paths) == 1 or flags.FLAGS.cpu == 1:
    for pb_path in pb_paths:
      last_ending_ts = last_ending_ts_map.get(pb_path, None)
      ssdict, sim_elapsed_time = get_stats_dict_unroll(pb_path,
                                                       compress=compress,
                                                       last_ending_ts=last_ending_ts)
      sim_elapsed_time_sum += sim_elapsed_time
      for key, sstats in ssdict.items():
        sstats_dict[key].extend(sstats)
  else:
    futures = {}
    with ProcessPoolExecutor(max_workers=min(flags.FLAGS.cpu, flags.FLAGS.max_cpu)) as executor:
      pb_path_iter = iter(pb_paths)
      while True:
        for pb_path in pb_path_iter:
          last_ending_ts = last_ending_ts_map.get(pb_path, None)
          future = executor.submit(get_stats_dict_unroll,
                                   pb_path,
                                   compress=compress,
                                   last_ending_ts=last_ending_ts)
          futures[future] = True
          if len(futures) >= flags.FLAGS.cpu:
            break
        if len(futures) == 0:
          break

        for future in as_completed(futures):
          ssdict, sim_elapsed_time = future.result()
          sim_elapsed_time_sum += sim_elapsed_time
          for key, sstats in ssdict.items():
            sstats_dict[key].extend(sstats)
          del futures[future]
          del future
          gc.collect()
          break

  sstats_dict = get_sstats_dict_new(sstats_dict)
  os.makedirs(flags.FLAGS.sim_result_dir, exist_ok=True)

  xlim = None
  if flags.FLAGS.set_xlim is not None:
    fromdt, todt = flags.FLAGS.set_xlim.split("-")
    pdt = pandas.DatetimeIndex([int(fromdt), int(todt)])
    xlim = [pdt[0], pdt[1]]

  def plot_pnl(sim_result_dir,
               sim_result_prefix,
               strat_prefix,
               sstats_dict,
               dump_stat_key,
               print_df=False,
               sim_elapsed_time=None):
    os.makedirs(sim_result_dir, exist_ok=True)
    _, pnldict = plot_aggregated_pnl(sstats_dict,
                                     real_scale_t_axis=flags.FLAGS.real_scale_t_axis,
                                     xlim=xlim,
                                     print_df=print_df,
                                     hide_skipped_dates=flags.FLAGS.hide_skipped_dates,
                                     sim_elapsed_time=sim_elapsed_time,
                                     plot_api_rate_used=flags.FLAGS.plot_api_rate_used,
                                     plot_fill_intensity=flags.FLAGS.plot_fill_intensity,)
    pnlstr = get_pnlstr(pnldict)
    strat_lcs = lcs(sim_result_prefix, strat_prefix)
    if len(strat_lcs) > 10:
      short_prefix = sim_result_prefix.replace(strat_lcs, "")
    else:
      short_prefix = sim_result_prefix
    plt.suptitle(f"{strat_prefix}\n{short_prefix[:100]}\n{short_prefix[100:]}", fontsize=12)

    filename = f"s_{strat_prefix}_{pnlstr}_{short_prefix}"
    with open(f"{sim_result_dir}/filenames.txt", 'a') as txt:
      txt.write(f"{filename}\n")
    if len(filename) > 200:
      filename = filename[:200]
    filename = f"{filename}.png"
    if flags.FLAGS.sim_info:
      sim_info = json.load(open(flags.FLAGS.sim_info))
      os.makedirs(f"{sim_result_dir}/statsfile", exist_ok=True)
      with open(f"{sim_result_dir}/statsfile/{filename}.json", 'w') as sw:
        sim_info["dump_stat_key"] = dump_stat_key
        sim_info["products"] = list(sstats_dict.keys())
        sim_info["pnldict"] = pnldict.to_dict()
        json.dump(sim_info, sw, indent=2)
    plt.savefig(f"{sim_result_dir}/" + filename)
    plt.close()
    if hasattr(flags.FLAGS, "include_option_pnl") and flags.FLAGS.include_option_pnl:
      config_lists = os.listdir(f"{sim_result_dir}/configs")
      target_config = None if len([i for i in config_lists if sim_result_prefix in i]) == 0 else [
          i for i in config_lists if sim_result_prefix in i
      ][0]
      ssopt.plot_options_and_hedge_pnls(sim_result_dir, pnldict['each_pnl_df'], target_config)

  if flags.FLAGS.unfold:
    for key, val in sstats_dict.items():
      ssplot.setup_plt()
      plot_pnl(os.path.join(flags.FLAGS.sim_result_dir, flags.FLAGS.strat_prefix + f"_{key}"),
               flags.FLAGS.sim_result_prefix + f"_{key}_",
               flags.FLAGS.strat_prefix, {key: val},
               dump_stat_key="symbol",
               print_df=False)

  plot_pnl(flags.FLAGS.sim_result_dir,
           flags.FLAGS.sim_result_prefix,
           flags.FLAGS.strat_prefix,
           sstats_dict,
           dump_stat_key="total",
           print_df=True,
           sim_elapsed_time=sim_elapsed_time_sum)

  if not flags.FLAGS.squeeze_stat:
    timedret, _ = plot_aggregated_pnl(sstats_dict,
                                      real_scale_t_axis=flags.FLAGS.real_scale_t_axis,
                                      xlim=xlim,
                                      hide_skipped_dates=flags.FLAGS.hide_skipped_dates,
                                      plot_api_rate_used=flags.FLAGS.plot_api_rate_used,
                                      plot_fill_intensity=flags.FLAGS.plot_fill_intensity,)
    os.makedirs(f"{flags.FLAGS.sim_result_dir}/stats", exist_ok=True)
    if timedret is not None:
      ssplot.plot_timedret(
          f"{flags.FLAGS.sim_result_dir}/stats/"
          f"sim_{flags.FLAGS.sim_result_prefix[:100]}_tret.pdf", timedret,
          f"sim_{flags.FLAGS.sim_result_prefix}")
    plt.close()
    with PdfPages(f"{flags.FLAGS.sim_result_dir}/stats/"
                  f"sim_{flags.FLAGS.sim_result_prefix[:100]}_pnl_per_product.pdf") as pdf:
      if flags.FLAGS.dump_pnl_per_x_minute:
        minute_pnldfs = []
      for stratname, sstats in sstats_dict.items():
        pnldfs = []
        pnl_currency = None
        for sstat in sstats:
          pnl_currency = sstat['currency_info']['pnl_currency']
          pnldf = sstat['combdf']
          pnldf['pnl_diff'] = pnldf['pnl_net'].diff().fillna(0)
          pnldfs.append(pnldf.reset_index(drop=True))
        if len(pnldfs) == 0:
          continue
        combdf = pandas.concat(pnldfs, axis=0).sort_values(['timestamp']).reset_index(drop=True)
        del pnldfs
        combdf['pnl_net'] = combdf['pnl_diff'].cumsum()

        if flags.FLAGS.dump_pnl_per_x_minute:
          idx = pandas.to_datetime(
              combdf['timestamp']).dt.floor(freq=f'{flags.FLAGS.dump_pnl_per_x_minute}min')
          minute_pnl = combdf.set_index(idx)
          per_x_fill = minute_pnl.groupby(minute_pnl.index)[['fill_cash', 'fill_qty']].sum()
          minute_pnl = minute_pnl[~minute_pnl.index.duplicated(keep='last')]
          minute_pnl[['fill_cash', 'fill_qty']] = per_x_fill
          minute_pnl['symbol'] = stratname
          minute_pnldfs.append(minute_pnl)
        plot_each_strat(stratname, {'combdf': combdf}, plttype='pnl', pnl_currency=pnl_currency)
        pdf.savefig()
        plt.close()

      if flags.FLAGS.dump_pnl_per_x_minute and minute_pnldfs:
        pandas.concat(minute_pnldfs, axis=0).to_csv(
            f"{flags.FLAGS.sim_result_dir}/stats/sim_{flags.FLAGS.sim_result_prefix[:100]}_min_pnl.gzip",
            compression='gzip')
  if plot_debug_info():
    for symbol, sstats in sstats_dict.items():
      out_dir = f"{flags.FLAGS.sim_result_dir}/each/{flags.FLAGS.sim_result_prefix[:100]}"
      baseflags = abase.get_base_flags()
      baseflags.time_range = baseflags.time_range.replace("H", "")
      old_time_range = baseflags.time_range
      split_hours = flags.FLAGS.split_hours
      if baseflags.trading_date.find("-") >= 0:
        from_date, end_date = baseflags.trading_date.split("-")
      else:
        from_date = baseflags.trading_date
        end_date = baseflags.trading_date
      trading_dates = pandas.date_range(from_date, end_date)
      for _, simstat in enumerate(sstats):
        for trading_date in trading_dates:
          baseflags.trading_date = trading_date.strftime("%Y%m%d")
          for trange in (abase.split_time_range(old_time_range, split_hours)
                         if split_hours is not None else [old_time_range]):
            baseflags.time_range = trange
            filename_prefix = f"sim_{flags.FLAGS.sim_result_prefix[:100]}_{symbol}_{baseflags.trading_date}.{trange}"
            time_ranges = abase.get_time_range(baseflags)
            oputil.plot_order_plot(filename_prefix,
                                   time_ranges[0],
                                   time_ranges[1],
                                   simstat['orderdf'],
                                   simstat['bbodf'],
                                   pnlts=simstat['combdf']['timestamp'],
                                   pnls=simstat['combdf']['pnl_net'],
                                   prcdict=simstat['prcdict'],
                                   posts=simstat['combdf']['timestamp'],
                                   poss=simstat['combdf']['open_pos'],
                                   plot_dir=out_dir,
                                   plot_tags=flags.FLAGS.include_tag_info)

  if flags.FLAGS.include_lp_plot:
    for symbol, sstats in sstats_dict.items():
      out_dir = f"{flags.FLAGS.sim_result_dir}/each/{flags.FLAGS.sim_result_prefix[:100]}_lp"
      baseflags = abase.get_base_flags()
      baseflags.time_range = baseflags.time_range.replace("H", "")
      old_time_range = baseflags.time_range
      split_hours = flags.FLAGS.split_hours
      total_sumdf = pandas.Series()
      total_interval_num = 0
      if baseflags.trading_date.find("-") >= 0:
        from_date, end_date = baseflags.trading_date.split("-")
      else:
        from_date = baseflags.trading_date
        end_date = baseflags.trading_date
      trading_dates = pandas.date_range(from_date, end_date)
      for _, simstat in enumerate(sstats):
        for trading_date in trading_dates:
          baseflags.trading_date = trading_date.strftime("%Y%m%d")
          for trange in (abase.split_time_range(old_time_range, split_hours)
                         if split_hours is not None else [old_time_range]):
            baseflags.time_range = trange
            filename_prefix = f"sim_{flags.FLAGS.sim_result_prefix[:100]}_{symbol}_{baseflags.trading_date}.{trange}"
            time_ranges = abase.get_time_range(baseflags)
            sumdf, interval_num = lputil.plot_lp_plot(filename_prefix,
                                                      time_ranges[0],
                                                      time_ranges[1],
                                                      simstat['orderdf'],
                                                      simstat['bbodf'],
                                                      plot_dir=out_dir)
            if total_sumdf.size == 0:
              total_sumdf = sumdf
            else:
              total_sumdf += sumdf
            total_interval_num += interval_num

        filename_prefix = f"sim_{flags.FLAGS.sim_result_prefix[:100]}_{symbol}_depth"
        out_dir = f"{flags.FLAGS.sim_result_dir}/depth/"
        lputil.plot_total_depth_tb(filename_prefix, out_dir, total_sumdf, total_interval_num)

  if flags.FLAGS.include_mm_stats:
    plt.style.use('fivethirtyeight')
    ssplot.setup_plt()
    out_dir = f"{flags.FLAGS.sim_result_dir}/mm_stats/{flags.FLAGS.sim_result_prefix[:100]}"
    os.makedirs(out_dir, exist_ok=True)
    fig, axs = plt.subplots(len(sstats_dict), figsize=(15, 10))
    fig.suptitle(
        f"{flags.FLAGS.strat_prefix}\n{flags.FLAGS.sim_result_prefix[:100]}\n{flags.FLAGS.sim_result_prefix[100:]}",
        fontsize=12)
    plt_idx = 0
    for symbol, sstats in sstats_dict.items():
      print(f'Calculating {symbol}')
      ax = axs[plt_idx] if isinstance(axs, numpy.ndarray) else axs
      baseflags = abase.get_base_flags()
      baseflags.time_range = baseflags.time_range.replace("H", "")
      old_time_range = baseflags.time_range
      split_hours = flags.FLAGS.split_hours
      if baseflags.trading_date.find("-") >= 0:
        from_date, end_date = baseflags.trading_date.split("-")
      else:
        from_date = baseflags.trading_date
        end_date = baseflags.trading_date

      final_res = None
      title_ts_min = None
      title_ts_max = None
      for _, simstat in enumerate(sstats):
        orderdf = simstat['orderdf']
        bbodf = simstat['bbodf']
        mea = f"{simstat['symbol_info'].market_type}.{simstat['symbol_info'].exchange}.{simstat['symbol_info'].api}"
        ts_min = min(orderdf['timestamp'].min(), bbodf['timestamp'].min())
        title_ts_min = ts_min if title_ts_min is None else min(title_ts_min, ts_min)
        ts_max = max(orderdf['timestamp'].max(), bbodf['timestamp'].max())
        title_ts_max = ts_max if title_ts_max is None else max(title_ts_max, ts_max)
        res = oputil.plot_mm_stats_table(mea=mea,
                                         symbol=simstat['symbol_info'].relative_norm,
                                         oedf=orderdf,
                                         bbodf=bbodf,
                                         price_bp_tholds=flags.FLAGS.price_bp_tholds,
                                         quote_tholds=flags.FLAGS.quote_tholds,
                                         base_tholds=flags.FLAGS.base_tholds)
        if final_res is None:
          final_res = res
        else:
          final_res += res

      oputil.plot_mm_stats_chart(final_res,
                                 bbodf,
                                 orderdf,
                                 out_dir,
                                 f"{symbol}:{flags.FLAGS.sim_result_prefix[:100]}",
                                 is_split=True)
      oputil.plot_mm_stats_chart(final_res,
                                 bbodf,
                                 orderdf,
                                 out_dir,
                                 f"{symbol}:{flags.FLAGS.sim_result_prefix[:100]}",
                                 is_split=False)
      table_df = oputil.get_table_res(final_res).reset_index()
      bold_idx = table_df[table_df['Stat'] == 'Avg'].index
      table = ax.table(cellText=table_df.to_numpy(),
                       colLabels=table_df.columns,
                       loc='center',
                       cellLoc='center')
      for key, cell in table.get_celld().items():
        row, _ = key
        if row - 1 in bold_idx:
          cell.set_text_props(fontproperties=matplotlib.font_manager.FontProperties(weight='bold'))
      table.auto_set_column_width(col=list(range(len(table_df.columns))))
      ax.axis('tight')
      ax.axis('off')
      ax.set_title(
          f"{symbol} {datetime.datetime.utcfromtimestamp(title_ts_min/1e9).strftime('%Y-%m-%d %H:%M:%S')}-{datetime.datetime.utcfromtimestamp(title_ts_max/1e9).strftime('%Y-%m-%d %H:%M:%S')}",
      )
      plt_idx += 1
    fig.subplots_adjust(hspace=0.3)
    fig.savefig(f"{out_dir}/{flags.FLAGS.sim_result_prefix[:100]}_stat_table.png")
    plt.close(fig)


def define_sim_processing_flags():
  flags.DEFINE_string(
      'initial_position', '', 'The initial position, full_symbol=pos;full_sym2=pos '
      'e.g.BTC-USD.20200626.Huobi=-50;BTC-USD.20200626.Okex=-50 ')
  flags.DEFINE_float('initial_position_all', None, '')
  flags.DEFINE_boolean('use_bbo', False, '')
  flags.DEFINE_boolean('use_adjusted_pos', False, '')


if __name__ == '__main__':
  abase.define_base_flags()
  abase.define_feed_archive_flags()
  define_sim_processing_flags()
  ne.set_num_threads(ne.detect_number_of_cores())

  warnings.filterwarnings("ignore", category=UserWarning)
  warnings.simplefilter(action='ignore', category=FutureWarning)

  flags.DEFINE_integer('cpu', 4, '')

  flags.DEFINE_integer('max_cpu', 4, '')

  flags.DEFINE_string('pb_path', None, '')

  flags.DEFINE_string('sim_info', None, '')

  flags.DEFINE_string('sim_result_dir', 'sim_test2', '')

  flags.DEFINE_string('sim_result_prefix', '', '')

  flags.DEFINE_string('strat_prefix', '', '')

  flags.DEFINE_string('stat_csv', None, '')

  flags.DEFINE_string('set_xlim', None, '')

  flags.DEFINE_string('hours_warmup', None, '')

  flags.DEFINE_bool('unfold_stat', True, '')

  flags.DEFINE_bool('full_bbo', False, '')

  flags.DEFINE_bool('compress', False, '')

  flags.DEFINE_bool('squeeze_stat', False, '')

  flags.DEFINE_float('dump_pnl_per_x_minute', None, 'dump pnl per symbol per x minute')

  flags.DEFINE_bool('is_focus_mt', False, '')

  flags.DEFINE_bool('real_scale_t_axis', True, '')

  flags.DEFINE_bool('unfold', False, '')

  flags.DEFINE_float('split_hours', 6, '')

  flags.DEFINE_bool('hide_skipped_dates', False, '')

  flags.DEFINE_bool('force_plot_debug_info', False,
                    'True if you want "each" for multiple trading dates')

  flags.DEFINE_bool('include_mm_stats', False,
                    'True if you want to include mm stats (Uptime, Order Depth, Volume, ...)')

  flags.DEFINE_bool('include_lp_plot', False, '')
  flags.DEFINE_string('pnl_ccy_override', None, 'override pnl ccy, e.g. pnl_ccy_override=USD')

  flags.DEFINE_list('price_bp_tholds', [], 'mm_stats price threshold (e.g. 75: midP +/- 75bps)')

  flags.DEFINE_list(
      'quote_tholds', [],
      'mm_stats quote threshold (e.g. 100: Check total quote sum of bid/ask orders within price_bp)'
  )

  flags.DEFINE_list(
      'base_tholds', [],
      'mm_stats base threshold (e.g. 100: Check total base qty sum of bid/ask orders within price_bp_tholds)'
  )

  flags.DEFINE_bool('include_option_pnl', False, 'True if you want to include option pnl')
  flags.DEFINE_bool('include_tag_info', False, 'True if you want to include tag info at each txt')
  flags.DEFINE_bool('plot_api_rate_used', True, 'True if you want to plot api rate used')
  flags.DEFINE_bool('plot_fill_intensity', True, 'True if you want to plot api rate used')
  plt.style.use('fivethirtyeight')
  app.run(main)
