# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jhkim

import functools
import sys
import os
import json
import ujson
import pickle
import logging
import collections
import datetime
import traceback

from absl import app

from concurrent.futures import ProcessPoolExecutor

import pandas
from google.protobuf.json_format import MessageToDict

import matplotlib

matplotlib.use("Agg")
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages

import numpy

import re

from coin.base import flags
from cc.appcoin2.strategy.sim_backtest import to_trading_intervals
import coin.strategy.mm.tool.archive_base as abase
import coin.pnl.sim_stat_calc2 as sscalc2
import coin.pnl.sim_stat_plot as ssplot
import coin.pnl.pnl_compact_stat as pcstat
import coin.pnl.sim_stat_frompb as ssfrompb
import coin.pnl.sim_stat_option as ssopt
from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
from coin.support.proto_log.logic.util import run_from_strat_log_archive
from coin.support.proto_log.logic.util import run_from_strat_log_pt1m
from coin.proto.coin_strategy_pb2 import StrategyLog
import coin.proto.coin_order_enums_pb2 as order_pb2
import coin.proto.coin_market_enums_pb2 as market_pb2
import coin.proto.coin_order_gateway_pb2 as gateway_pb2
import coin.proto.coin_telemetry_pb2 as tele_pb2
import coin.strategy.mm.tool.order_plot_util as oputil
import coin.strategy.mm.tool.lp_plot_util as lputil
import coin.strategy.mm.tool.pnl_merger as pmerger

from coin.strategy.mm.tool.signal_plot_base import (dump_book, dump_book_from_cache,
                                                    dump_funding_rate, BookCompress, BboTuple)


def plot_debug_info():
  if flags.FLAGS.force_plot_debug_info is not None:
    return flags.FLAGS.use_bbo and flags.FLAGS.force_plot_debug_info
  return flags.FLAGS.use_bbo and (len(abase.get_trading_dates(flags.FLAGS.trading_date)) <= 2)


def split_trading_intervals():
  group_dates = 1
  trading_dates = abase.get_trading_dates(abase.get_base_flags().trading_date)
  last_date = trading_dates[-1]
  trading_dates = trading_dates[::group_dates]
  hours = "%sH" % (24 * group_dates)
  intervals = to_trading_intervals(trading_dates, hours, last_date)
  return intervals


def gen_trading_intervals(first_symbol_time, last_symbol_time, trading_intervals):
  for start, end in trading_intervals:
    if first_symbol_time <= end and last_symbol_time >= start:
      yield start, end


def bbo_from_cache():
  return os.environ.get('bbo_from_cache', '0') == '1' or flags.FLAGS.bbo_from_cache


# Be careful with:
# - 1. Symbol changes at roll over time
# - 2. Order event may not reach the end of day, but bbo must reach
# Solution: use trading_intervals that are same to sim partition to settle bbo start & end time
def dump_bbo(product, first_symbol_time, last_symbol_time):
  bbodfs = []
  trading_intervals = split_trading_intervals()
  for start_time, end_time in gen_trading_intervals(first_symbol_time, last_symbol_time,
                                                    trading_intervals):
    for trading_date, from_hours, to_hours in ssfrompb.gen_schedule(start_time, end_time):
      time_range = f"{'%.4f' % from_hours}-{'%.4f' % to_hours}"
      bbos = []
      if bbo_from_cache():
        bbos = dump_book_from_cache(product, trading_date, time_range, flags.FLAGS.bbo_cache_root)
      if len(bbos) == 0:
        compress_opt = BookCompress.EVERY_N_SECOND
        every_n_sec = 3 if plot_debug_info() else 30
        if flags.FLAGS.full_bbo:
          compress_opt = BookCompress.BBO_P_CHANGE
          every_n_sec = None
        bbos = dump_book(product=product,
                         feed_sub_request=None,
                         compress=compress_opt,
                         trading_date=trading_date,
                         time_range=time_range,
                         every_n_sec=every_n_sec)
      bbodf = pandas.DataFrame(bbos, columns=BboTuple._fields)
      bbodf = bbodf[numpy.log(bbodf['bid0_price'] / bbodf['ask0_price']).abs() < 0.05]
      bbodfs.append(bbodf)
  return pandas.concat(bbodfs, axis=0).reset_index(drop=True)


class OgpbFields(object):
  intkeyset = set(['timestamp', 'event_time', 'post_only', 'proc_order_id'])
  floatkeyset = set([
      'order_price', 'order_qty', 'fill_price', 'fill_qty', 'min_pos', 'max_pos', 'reserve',
      'net_position', 'weight', 'total', 'sub_amt_filled'
  ])
  catekeyset = set(
      ['symbol', 'type', 'currency', 'exchange_type', 'market_type', 'order_side', 'fill_type'])
  keyset = intkeyset | floatkeyset | catekeyset

  @staticmethod
  def GetDict(oepb):
    if (hasattr(flags.FLAGS, 'include_tag_info') and flags.FLAGS.include_tag_info and
        'tag' not in OgpbFields.catekeyset):
      OgpbFields.catekeyset.add('tag')
      OgpbFields.keyset.add('tag')
    oedict = {}
    for pbkey in OgpbFields.keyset:
      if hasattr(oepb, pbkey):
        oedict[pbkey] = getattr(oepb, pbkey)
      elif pbkey in ['weight', 'sub_amt_filled']:
        oedict[pbkey] = 0
    return oedict


class StratlogDumpJob(abase.SerializedJob):
  def __init__(self, job_dir, job_cls, baseflags, fsflags, osflags):
    self.job_cls = job_cls
    self.baseflags = baseflags
    self.fsflags = fsflags
    self.osflags = osflags
    self.strat_name = osflags.strategy_name
    self.use_bbo = flags.FLAGS.use_bbo or flags.FLAGS.include_mm_stats or flags.FLAGS.include_active_pnl or\
     flags.FLAGS.include_lp_plot
    use_bbo_override = os.environ.get("use_bbo", None)
    if use_bbo_override is not None:
      self.use_bbo = int(use_bbo_override) == 1

    os.makedirs(job_dir, exist_ok=True)
    self.start_date, self.end_date = baseflags.trading_date.split('-')
    if flags.FLAGS.use_pickle:
      os.makedirs(job_dir + "/pkl", exist_ok=True)
      abase.SerializedJob.__init__(
          self,
          os.path.join(
              job_dir + "/pkl", "%s.pkl" % "_".join([
                  self.start_date, self.end_date, self.osflags.orderlog_machine, self.strat_name,
                  "usebbo" if self.use_bbo else "nousebbo"
              ])))

  def run(self):
    if flags.FLAGS.use_pickle:
      if not os.path.exists(self.filename) or flags.FLAGS.force_rerun:
        self.serialize(self.filename)
      (strat, bbodf_dict) = self.deserialize()
      return (strat, bbodf_dict)
    else:
      return self.serialize(None)

  def serialize(self, filename):
    start_time, end_time = abase.get_time_range(self.baseflags)
    strat = self.job_cls(self.baseflags, self.osflags)
    strat.set_time_range(start_time, end_time)
    if flags.FLAGS.use_pt1m_dump:
      log_root_dir = flags.FLAGS.stratlog_cache_root
      run_from_strat_log_pt1m(on_log_callback=strat.on_pt1m,
                               start_time=start_time - datetime.timedelta(minutes=30),
                               end_time=end_time + datetime.timedelta(minutes=30),
                               root_dir=log_root_dir,
                               strategy_name=self.osflags.strategy_name)
    else:
      if (hasattr(flags.FLAGS, 'use_full_live_log') and flags.FLAGS.use_full_live_log) or (hasattr(
          flags.FLAGS, 'include_mm_stats') and flags.FLAGS.include_mm_stats) or (hasattr(
              flags.FLAGS, 'include_active_pnl') and flags.FLAGS.include_active_pnl) or (hasattr(
                  flags.FLAGS, 'include_lp_plot') and flags.FLAGS.include_lp_plot):
        log_root_dir = '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log'
      else:
        log_root_dir = '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_slim_proto_log'

      run_from_strat_log_archive(on_log_callback=strat.on_strat_log,
                                  start_time=start_time - datetime.timedelta(minutes=30),
                                  end_time=end_time + datetime.timedelta(minutes=30),
                                  root_dir=log_root_dir,
                                  machine=self.osflags.orderlog_machine,
                                  strategy_name=self.osflags.strategy_name)

    orddf = strat.get_dfset()['orddf']
    bbodf_dict = {}
    if len(orddf) > 0:
      for (exchange, symbol), sym_orddf in orddf.groupby(['exchange_type', 'symbol']):
        first_row = sym_orddf.iloc[0]
        first_symbol_time = pandas.DatetimeIndex([int(first_row['timestamp'])]).to_pydatetime()[0]
        market = first_row.get('market_type', 'Spot')
        exchange = first_row['exchange_type']
        product = generate_product_from_str2(market, exchange, None, symbol, first_symbol_time)
        last_row = sym_orddf.iloc[-1]
        last_symbol_time = pandas.DatetimeIndex([int(last_row['timestamp'])]).to_pydatetime()[0]
        if self.use_bbo:
          bbodf = dump_bbo(product, start_time, end_time)
        else:
          bbodf = pandas.DataFrame()
        bbodf_dict[(exchange, symbol)] = bbodf
    abase.print_mem()

    if flags.FLAGS.use_pickle:
      with open(filename, 'wb') as writefile:
        pickle.dump((strat, bbodf_dict), writefile)
    else:
      return (strat, bbodf_dict)

  def deserialize_impl(self, filename):
    return pickle.load(open(filename, 'rb'))


@functools.lru_cache
def get_ts_buffer():
  if flags.FLAGS.use_pt1m_dump:
    if "PT15M" in flags.FLAGS.stratlog_cache_root:
      return 450e9
    elif "PT5M" in flags.FLAGS.stratlog_cache_root:
      return 150e9
  return 30e9


class PnlPlot(abase.SerializedJob):
  def __init__(self, baseflags, osflags):
    self.baseflags = baseflags
    self.osflags = osflags
    self.strat_bals_ = []
    self.strat_poss_ = []
    self.pnl_bals_ = []
    self.exec_configs_ = []
    self.oes_ = []
    self.reserves_ = []
    self.signals_ = []
    self.lats_ = []
    self.start_time = None
    self.end_time = None
    self.pb = StrategyLog()

  def del_all_data(self):
    del self.strat_bals_
    del self.strat_poss_
    del self.pnl_bals_
    del self.oes_
    del self.exec_configs_
    del self.reserves_

  def get_filename_prefix2(self, postfix):
    return "_".join((self.osflags.strategy_name, self.baseflags.trading_date, postfix))

  def set_time_range(self, start_time, end_time):
    self.start_time = pandas.DatetimeIndex([start_time]).astype(int)[0]
    self.end_time = pandas.DatetimeIndex([end_time]).astype(int)[0]

  def get_df_time_range(self, rows):
    df = self.get_df(rows)
    if 'timestamp' in df:
      timecol = 'timestamp'
    else:
      timecol = 'event_time'
    if len(df) > 0:
      if self.start_time is not None:
        df = df.loc[df[timecol] >= self.start_time]
      if self.end_time is not None:
        df = df.loc[df[timecol] <= self.end_time]
    return df.reset_index(drop=True)

  def get_df(self, rows):
    if len(rows) == 0:
      return pandas.DataFrame()
    df = pandas.DataFrame(rows)
    df = pandas.concat([
        df[df.columns.intersection(OgpbFields.floatkeyset)].astype(float),
        df[df.columns.intersection(OgpbFields.intkeyset)].astype(int), df[df.columns.intersection(
            OgpbFields.catekeyset)]
    ],
                       axis=1)
    return df.reset_index(drop=True)

  def get_dfset(self):
    orddf = self.get_df_time_range(self.oes_)
    for fieldname, pbtype in [
        ('market_type', market_pb2.MarketType),
        ('exchange_type', market_pb2.ExchangeType),
        ('order_type', order_pb2.OrderType),
        ('order_side', order_pb2.OrderSide),
        ('fill_type', order_pb2.FillType),
        ('type', gateway_pb2.OrderEvent.OrderEventType),
    ]:
      if fieldname in orddf:
        storedarr = orddf[fieldname]
        for pbvalue in list(storedarr.unique()):
          matchidx = pbvalue == storedarr
          pbvaluestr = pbtype.Name(pbvalue)
          orddf.loc[matchidx, fieldname] = pbvaluestr
    if 'fill_type' in orddf:
      must_be_maker = ((orddf['fill_type'] == 'UNKNOWN_FILL_TYPE') & orddf['post_only'] &
                       (orddf['fill_qty'] > 0))
      orddf.loc[must_be_maker, 'fill_type'] = 'MAKER_FILL_TYPE'
    if 'event_time' in orddf:
      orddf['timestamp'] = orddf['event_time']
      del orddf['event_time']
      orddf.rename(columns={'proc_order_id': 'order_id'}, inplace=True)
      if len(orddf) == 0:
        orddf['sign'] = 0
      else:
        orddf['sign'] = (orddf.order_side.str.find("BUY") >=
                        0).astype(float) - (orddf.order_side.str.find("SELL") >= 0).astype(float)
      if 'fill_qty' not in orddf:
        orddf['fill_qty'] = 0
        orddf['fill_price'] = 0
      assert ((orddf['sign'] == 0) & (orddf['fill_qty'] > 0)).sum() == 0
    if 'fill_qty' in orddf:
      zerofq = orddf['fill_qty'] == 0
      orddf.loc[zerofq, 'fill_qty'] = numpy.nan
      orddf.loc[zerofq, 'fill_price'] = numpy.nan
    for column in orddf.columns:
      if len(orddf[column].unique()) < 20:
        try:
          orddf[column] = orddf[column].astype(float)
        except Exception:
          orddf[column] = orddf[column].astype('category')
    sigdf = pandas.DataFrame(self.signals_)
    latdf = pandas.DataFrame(self.lats_)
    return {
        'orddf': orddf,
        'confdf': self.get_df(self.exec_configs_),
        'pnldf': self.get_df(self.pnl_bals_),
        'stratbaldf': self.get_df(self.strat_bals_),
        'stratposdf': self.get_df(self.strat_poss_),
        'execconfdf': self.get_df(self.exec_configs_),
        'reservedf': self.get_df(self.reserves_),
        'sigdf': sigdf,
        'latdf': latdf,
        'start_time': self.start_time,
        'end_time': self.end_time,
    }

  def get_filename_prefix(self):
    return "_".join((self.osflags.orderlog_machine, self.osflags.strategy_name))

  def on_strat_log(self, timestamp, log):
    def dict_from_msg(proto_msg):
      ret = MessageToDict(proto_msg, preserving_proto_field_name=True)
      ret['timestamp'] = timestamp
      return ret

    self.pb.ParseFromString(log)

    if self.pb.HasField("og_log"):
      if self.pb.og_log.HasField("balance"):
        for each_bal in self.pb.og_log.balance.each_balance:
          self.strat_bals_.append(dict_from_msg(each_bal))
          self.strat_bals_[-1]['exchange_type'] = self.pb.og_log.balance.exchange
        return
      elif self.pb.og_log.HasField("position"):
        for each_pos in self.pb.og_log.position.each_position:
          self.strat_poss_.append(OgpbFields.GetDict(each_pos))
          self.strat_poss_[-1]['exchange_type'] = self.pb.og_log.position.exchange
          self.strat_poss_[-1]['timestamp'] = timestamp
        return
      elif self.pb.og_log.HasField("event"):
        oepb = self.pb.og_log.event
        oepb_types = [
            oepb.ORDER_SUBMITTED, oepb.ORDER_ACCEPTED, oepb.ORDER_FILLED, oepb.ORDER_ERROR,
            oepb.CANCEL_SUBMITTED, oepb.CANCEL_CONFIRMED
        ] + ([oepb.ORDER_AUTO_CANCELED] if flags.FLAGS.include_mm_stats else [])
        if oepb.type in oepb_types:
          self.oes_.append(OgpbFields.GetDict(oepb))
          if (oepb.event_time - timestamp > 1e9 * 60 * 60 * 8.99 and
              oepb.event_time - timestamp < 1e9 * 60 * 60 * 9.01):
            self.oes_[-1]['event_time'] -= 1e9 * 60 * 60 * 9
        return
    elif self.pb.HasField("pnl_balance"):
      self.pnl_bals_.append(dict_from_msg(self.pb.pnl_balance))
      return
    elif self.pb.HasField("agg_executor_config"):
      self.exec_configs_.append(dict_from_msg(self.pb.agg_executor_config))
      self.exec_configs_[-1]['exchange_type'] = self.exec_configs_[-1].get('account_request',
                                                                           {}).get(
                                                                               'exchange', '.*')
      return
    elif self.pb.HasField("reserve"):
      for reserve in self.pb.reserve.each_balance:
        self.reserves_.append(dict_from_msg(reserve))
      return
    elif self.pb.HasField("request"):
      return
    elif self.pb.type == self.pb.EXTRA_INFO:
      if "SIGNAL_INFO" in self.pb.extra_info:
        obj = ujson.loads(self.pb.extra_info)
        objcomb = obj['comb']
        for field in ['symbol', 'timestamp', 'window_size']:
          objcomb[field] = obj[field]
        self.signals_.append(objcomb)
      elif "LATENCY_INFO" in self.pb.extra_info:
        obj = ujson.loads(self.pb.extra_info)
        self.lats_.append(obj)

  def on_pt1m(self, timestamp, log):
    # {'currency': 'BTC', 'currency_native': 'btc', 'total': 1.13225888, 'available': 0.96912674, 'hold': 0.16313214, 'timestamp': 1675467008597680827}
    # exch = 'Upbit'
    # Korbit = 233;
    # exch_type = 234; # Upbit
    # exch = 'Binance'
    exch = log['exchange']
    exch_type = log['exchange_type'] # binance
    # exch_type = 252; # binance
    # market_type = 1 # Spot
    market_type = log['market_type'] # Futures

    timestamp += get_ts_buffer() # middle of the range

    if log['type'] == 'pnl_currency_stat':
      # {'currency': 'KRW', 'total': 363856454.4098417, 'timestamp': 1675467008597508221}
      self.pnl_bals_.append({
        'currency': log['currency'],
        'total': log['PNL'],
        'timestamp': timestamp,
        'exchange_type': exch,
      })
      return

    assert log['type'] == 'symbol_stat', f'Invalid log type {log["type"]}'
    currency = log['symbol'].split('-')[0]
    if market_type == 1:
      self.strat_bals_.append({
        'currency': currency,
        'currency_native': currency,
        'total': log['BAL'],
        'timestamp': timestamp,
        'exchange': exch,
        'exchange_type': exch,
      })

      # {'currency': 'BTC', 'total': 0.9787928221859706, 'timestamp': 1675467008597508221}
      self.reserves_.append({
        'currency': currency,
        'total': log['RESERVE'],
        'timestamp': timestamp,
        'exchange_type': exch,
      })
    elif market_type == 3:
      # {'net_position': 0.0, 'symbol': 'HNT-USDT.PERPETUAL', 'exchange_type': 'Binance', 'timestamp': 1674516623469936965}
      self.strat_poss_.append({
        'net_position': log['POS'],
        'symbol': log['symbol'],
        'exchange_type': exch,
        'timestamp': timestamp,
      })
    else:
      raise f"invalid martket type {market_type}"

# {'fill_type': 0, 'event_time': 1675467006583094885, 'post_only': True, 'market_type': 1, 'type': 1, 'order_qty': 0.16313214,
# 'fill_qty': 0.0, 'symbol': 'BTC-KRW', 'order_price': 29444000.0, 'exchange_type': 233, 'order_side': 2, 'proc_order_id': 1675309128674656560, 'fill_price': 0.0}
    if log['SUB_REQ_CNT'] > 0 or log['SUB_FILLED_AMT'] > 0:
      self.oes_.append({
        'fill_type': 0,
        'event_time': timestamp - get_ts_buffer() / 3 * 2,
        'type': 1,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': 0.0,
        'fill_price': 0.0,
        'symbol': log['symbol'],
        'order_qty': 0,
        'order_price': 0,
        'exchange_type': exch_type,
        'order_side': 1,
        'weight': log['SUB_REQ_CNT'],
        'sub_amt_filled': log['SUB_FILLED_AMT'],
        'proc_order_id': timestamp,
      })
    if log['SUB_BUY_QTY'] > 0:
      self.oes_.append({
        'fill_type': 0,
        'event_time': timestamp - get_ts_buffer() / 3 * 2,
        'type': 1,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': 0.0,
        'fill_price': 0.0,
        'symbol': log['symbol'],
        'order_qty': log['SUB_BUY_QTY'],
        'order_price': log['SUB_BUY_VWAP'],
        'exchange_type': exch_type,
        'order_side': 1,
        'proc_order_id': timestamp,
      })
    if log['SUB_SELL_QTY'] > 0:
      self.oes_.append({
        'fill_type': 0,
        'event_time': timestamp - get_ts_buffer() / 3 * 2,
        'type': 1,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': 0.0,
        'fill_price': 0.0,
        'symbol': log['symbol'],
        'order_qty': log['SUB_SELL_QTY'],
        'order_price': log['SUB_SELL_VWAP'],
        'exchange_type': exch_type,
        'order_side': 2,
        'proc_order_id': timestamp,
      })
    if log['LHS_MAKER_BUY_QTY'] > 0:
      self.oes_.append({
        'fill_type': 1,
        'event_time': timestamp - get_ts_buffer() / 3,
        'type': 4,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': log['LHS_MAKER_BUY_QTY'],
        'fill_price': log['LHS_MAKER_BUY_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['LHS_MAKER_BUY_QTY'],
        'order_price': log['LHS_MAKER_BUY_VWAP'],
        'exchange_type': exch_type,
        'order_side': 1,
        'proc_order_id': timestamp,
      })
    if log['LHS_TAKER_BUY_QTY'] > 0:
      self.oes_.append({
        'fill_type': 2,
        'event_time': timestamp - get_ts_buffer() / 3,
        'type': 4,
        'post_only': False,
        'market_type': market_type,
        'fill_qty': log['LHS_TAKER_BUY_QTY'],
        'fill_price': log['LHS_TAKER_BUY_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['LHS_TAKER_BUY_QTY'],
        'order_price': log['LHS_TAKER_BUY_VWAP'],
        'exchange_type': exch_type,
        'order_side': 1,
        'proc_order_id': timestamp,
      })
    if log['LHS_MAKER_SELL_QTY'] > 0:
      self.oes_.append({
        'fill_type': 1,
        'event_time': timestamp - get_ts_buffer() / 3,
        'type': 4,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': log['LHS_MAKER_SELL_QTY'],
        'fill_price': log['LHS_MAKER_SELL_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['LHS_MAKER_SELL_QTY'],
        'order_price': log['LHS_MAKER_SELL_VWAP'],
        'exchange_type': exch_type,
        'order_side': 2,
        'proc_order_id': timestamp,
      })
    if log['LHS_TAKER_SELL_QTY'] > 0:
      self.oes_.append({
        'fill_type': 2,
        'event_time': timestamp - get_ts_buffer() / 3,
        'type': 4,
        'post_only': False,
        'market_type': market_type,
        'fill_qty': log['LHS_TAKER_SELL_QTY'],
        'fill_price': log['LHS_TAKER_SELL_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['LHS_TAKER_SELL_QTY'],
        'order_price': log['LHS_TAKER_SELL_VWAP'],
        'exchange_type': exch_type,
        'order_side': 2,
        'proc_order_id': timestamp,
      })
    if log['RHS_MAKER_BUY_QTY'] > 0:
      self.oes_.append({
        'fill_type': 1,
        'event_time': timestamp + get_ts_buffer() / 3,
        'type': 4,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': log['RHS_MAKER_BUY_QTY'],
        'fill_price': log['RHS_MAKER_BUY_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['RHS_MAKER_BUY_QTY'],
        'order_price': log['RHS_MAKER_BUY_VWAP'],
        'exchange_type': exch_type,
        'order_side': 1,
        'proc_order_id': timestamp,
      })
    if log['RHS_TAKER_BUY_QTY'] > 0:
      self.oes_.append({
        'fill_type': 2,
        'event_time': timestamp + get_ts_buffer() / 3,
        'type': 4,
        'post_only': False,
        'market_type': market_type,
        'fill_qty': log['RHS_TAKER_BUY_QTY'],
        'fill_price': log['RHS_TAKER_BUY_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['RHS_TAKER_BUY_QTY'],
        'order_price': log['RHS_TAKER_BUY_VWAP'],
        'exchange_type': exch_type,
        'order_side': 1,
        'proc_order_id': timestamp,
      })
    if log['RHS_MAKER_SELL_QTY'] > 0:
      self.oes_.append({
        'fill_type': 1,
        'event_time': timestamp + get_ts_buffer() / 3,
        'type': 4,
        'post_only': True,
        'market_type': market_type,
        'fill_qty': log['RHS_MAKER_SELL_QTY'],
        'fill_price': log['RHS_MAKER_SELL_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['RHS_MAKER_SELL_QTY'],
        'order_price': log['RHS_MAKER_SELL_VWAP'],
        'exchange_type': exch_type,
        'order_side': 2,
        'proc_order_id': timestamp,
      })
    if log['RHS_TAKER_SELL_QTY'] > 0:
      self.oes_.append({
        'fill_type': 2,
        'event_time': timestamp + get_ts_buffer() / 3,
        'type': 4,
        'post_only': False,
        'market_type': market_type,
        'fill_qty': log['RHS_TAKER_SELL_QTY'],
        'fill_price': log['RHS_TAKER_SELL_VWAP'],
        'symbol': log['symbol'],
        'order_qty': log['RHS_TAKER_SELL_QTY'],
        'order_price': log['RHS_TAKER_SELL_VWAP'],
        'exchange_type': exch_type,
        'order_side': 2,
        'proc_order_id': timestamp,
      })

def calibrate_fill_position(title, symbol, sym_orddf, orddf, pdf=None):
  def plot_pos_comparison(pdf, rel_position, pos_from_fill, cnt, dti=None):
    if pdf is None:
      return
    if dti is not None:
      plt.plot(dti,
               rel_position,
               'r-',
               dti,
               pos_from_fill,
               'g-',
               lw=0.5,
               rasterized=True,
               drawstyle='steps-post')
    else:
      plt.plot(rel_position,
               'r-',
               pos_from_fill,
               'g-',
               lw=0.5,
               rasterized=True,
               drawstyle='steps-post')
    plt.title(f'{title}_{cnt:03d}')
    plt.legend(['pos_from_rest_query', 'pos_from_fill'], loc='upper right')


  virtual_fills = []
  random_order_id = int(orddf['order_id'].iloc[-1]) - 100000000
  cnt = 0

  while True:
    if cnt < 100:
      if pdf is not None:
        plt.subplot(211)
        plot_pos_comparison(pdf, sym_orddf['rel_position'], sym_orddf['pos_from_fill'], cnt)
        plt.subplot(212)
        plot_pos_comparison(pdf, sym_orddf['rel_position'], sym_orddf['pos_from_fill'], cnt, pandas.DatetimeIndex(sym_orddf['timestamp']))
        pdf.savefig()
        plt.close()
    cnt += 1
    scale = sym_orddf['scale']
    diff_pos = sym_orddf['rel_position'] - sym_orddf['pos_from_fill']
    type_is_rel_pos = sym_orddf['type'] == 'relpos'

    tsdiff = numpy.minimum(sym_orddf['time_since_fill'], sym_orddf['time_since_relpos'])
    cond = (diff_pos.abs() > (0.01 * scale)) & ~type_is_rel_pos
    cond = cond & (tsdiff > get_ts_buffer())

    i = sym_orddf.loc[cond].first_valid_index()
    if i is None:
      break
    virtual_fills.append({
        # 1 ns prior
        'type': 'ORDER_FILLED',
        'order_id': str(random_order_id),
        'timestamp': sym_orddf['timestamp'][i] - 1,
        'fill_qty': numpy.abs(diff_pos[i]),
        'fill_price': numpy.nan,
        'sign': numpy.sign(diff_pos[i]),
        'symbol': symbol,
        'fill_type': 'TAKER_FILL_TYPE'
    })
    random_order_id += 1
    sym_orddf.loc[i:, "pos_from_fill"] += diff_pos[i]
    sym_orddf.loc[i, 'pos_adjusted'] = diff_pos[i]

  return virtual_fills, random_order_id


def orddf_consolidate_pos(dfset, bbodf_dict):
  '''
(Pdb) dfset['execconfdf']
       symbol  lot_size  min_pos  max_pos            timestamp
0    BTC-USDT       0.8      0.2      1.8  1587427453761305846
1     HT-USDT      40.0    199.0    201.0  1587427461144156060

(Pdb) dfset['execconfdf']
              symbol  lot_size  min_pos  max_pos            timestamp
0   BTC-USD.20200417     150.0   -300.0    300.0  1586995653347101777
1   BTC-USD.20200626     300.0   -600.0    600.0  1586995653451201036'''
  if dfset['execconfdf'].empty and not dfset['reservedf'].empty:
    execconfdf = dfset['reservedf'].copy()
    execconfdf['symbol'] = (execconfdf['currency'] + ("-" + dfset['pnldf']['currency'].unique()))
    execconfdf['reserve'] = execconfdf['total']
  else:
    execconfdf = dfset['execconfdf']
  if (dfset['stratposdf'].empty or dfset['stratposdf']['exchange_type'].unique().tolist() == ['Otc']) and not dfset['stratbaldf'].empty:
    no_reserve = (flags.FLAGS.strategy_name.find("lm_focus") >= 0 and
                  ('max_pos' in execconfdf and execconfdf['max_pos'].sum() == 0))
    if execconfdf.empty or no_reserve:
      # assume zero reserve
      ccys = dfset['pnldf']['currency'].unique().tolist()
      assert len(ccys) == 1
      stratbaldf = dfset['stratbaldf']
      stratposdf = stratbaldf.loc[stratbaldf['currency'] != ccys[0]].copy()
      stratposdf['symbol'] = stratbaldf['currency'] + f"-{ccys[0]}"
      stratposdf['net_position'] = stratbaldf['total']
      execconfdf = stratposdf.groupby(['symbol',
                                       'exchange_type']).size().reset_index().drop(columns=[0])
      execconfdf['min_pos'] = 0
      execconfdf['max_pos'] = 0
      execconfdf['reserve'] = 0
      execconfdf['timestamp'] = 0
    else:
      symbols = pandas.Series(execconfdf.symbol.unique())
      bases = symbols.str.split('-').str[0].unique()
      quotes = symbols.str.split('-').str[1].unique()
      assert len(bases) == len(symbols), (bases, symbols)
      assert len(quotes) == 1, (quotes, symbols)
      quote = quotes[0]
      stratbaldf = dfset['stratbaldf']
      stratposdfs = []
      for base in bases:
        if len(stratbaldf) == 0 or 'currency' not in stratbaldf:
          continue
        stratposdf = stratbaldf.loc[stratbaldf['currency'] == base].reset_index(drop=True)
        symbol = f"{base}-{quote}"
        stratposdf['symbol'] = symbol
        stratposdf['net_position'] = stratposdf['total']
        stratposdfs.append(stratposdf)
      stratposdf = pandas.concat(stratposdfs, axis=0).reset_index(drop=True)
  else:
    stratposdf = dfset['stratposdf']
    if len(stratposdf) == 0:
      return None, bbodf_dict
    if execconfdf.empty:
      execconfdf = stratposdf[['symbol', 'exchange_type']].drop_duplicates()
      execconfdf['min_pos'] = 0
      execconfdf['max_pos'] = 0
      execconfdf['reserve'] = 0
      execconfdf['timestamp'] = 0
  orddf = dfset['orddf']

  if len(execconfdf) == 0 or len(orddf) == 0:
    return None, bbodf_dict

  rm_exception = (orddf['fill_qty'] > 0) & (numpy.isnan(orddf['fill_price']))
  orddf = orddf.loc[~rm_exception].reset_index(drop=True)
  if len(orddf) == 0:
    return None, bbodf_dict

  print(orddf.head())

  sym_orddfs = []
  sliced_bbodf_dict = {}
  for exchange_type, exch_execconfdf in execconfdf.groupby('exchange_type'):
    for symbol in exch_execconfdf.symbol.unique():
      if (flags.FLAGS.pick_symbol_regex is not None and
          not re.match(flags.FLAGS.pick_symbol_regex, symbol)):
        continue
      sym_exch_execconfdf = exch_execconfdf.loc[exch_execconfdf.symbol == symbol]
      sym_stratposdf = stratposdf.loc[(stratposdf.exchange_type.str.match(exchange_type)) &
                                      (stratposdf.symbol == symbol)]
      sym_relposdf = pandas.concat([sym_stratposdf, sym_exch_execconfdf], sort=False)
      sym_relposdf = sym_relposdf.sort_values('timestamp').reset_index(drop=True)
      sym_relposdf = sym_relposdf.fillna(method='ffill')
      if 'min_pos' in sym_relposdf and 'reserve' not in sym_relposdf:
        sym_relposdf['reserve'] = 0.5 * (sym_relposdf['min_pos'] + sym_relposdf['max_pos'])
      if sym_relposdf['reserve'].isna().all():
        sym_relposdf['reserve'] = 0.5 * (sym_relposdf['min_pos'] + sym_relposdf['max_pos'])
      sym_orddf1 = orddf.loc[(orddf.exchange_type.str.match(exchange_type)) &
                             (orddf.symbol == symbol)].copy()
      if len(sym_orddf1) == 0:
        continue
      sym_relposdf['rel_position'] = sym_relposdf['net_position'] - sym_relposdf['reserve']
      if not flags.FLAGS.use_pt1m_dump:
        sym_relposdf = sym_relposdf.loc[sym_relposdf['rel_position'].diff() != 0]
      sym_relposdf['scale'] = sym_relposdf['rel_position'].max() - sym_relposdf['rel_position'].min()
      sym_relposdf['reserve'] = sym_relposdf['reserve'].fillna(method='ffill').fillna(
          method='bfill')
      sym_relposdf['scale'] = sym_relposdf['scale'].fillna(method='ffill').fillna(method='bfill')
      sym_relposdf['type'] = 'relpos'
      sym_relposdf = sym_relposdf[['timestamp', 'rel_position', 'scale', 'type']]
      sym_relposdf['order_id'] = -1
      market_type = sym_orddf1['market_type'].iloc[0]

      if sym_orddf1['fill_qty'].sum() == 0 and flags.FLAGS.skip_empty_fill:
        continue

      sym_orddf = pandas.concat([sym_orddf1, sym_relposdf], sort=False)

      nan_order = sym_orddf['order_price'] == 99999999.0
      sym_orddf.loc[nan_order, 'order_price'] = numpy.nan
      sym_orddf.loc[nan_order, 'order_qty'] = numpy.nan

      sym_orddf['sign'] = sym_orddf['sign'].fillna(0)
      sym_orddf['fill_qty'] = sym_orddf['fill_qty'].fillna(0)
      sym_orddf['timestamp'] = sym_orddf['timestamp'].astype(int)  # ?
      sym_orddf = sym_orddf.sort_values('timestamp').reset_index(drop=True)
      sym_orddf['rel_position'] = sym_orddf['rel_position'].fillna(method='ffill')
      sym_orddf['scale'] = sym_orddf['scale'].fillna(method='ffill')
      sym_orddf = sym_orddf.loc[~numpy.isnan(sym_orddf['rel_position'])].reset_index(drop=True)

      if (flags.FLAGS.use_bbo or flags.FLAGS.include_mm_stats or flags.FLAGS.include_active_pnl or
          flags.FLAGS.include_lp_plot) and (exchange_type, symbol) in bbodf_dict and not bbodf_dict[
              (exchange_type, symbol)].empty:
        bbodf = bbodf_dict[(exchange_type, symbol)]
        bbodf = bbodf[bbodf['timestamp'] <= sym_orddf['timestamp'].max()]
        sliced_bbodf_dict[(exchange_type, symbol)] = bbodf
        bbodf['mtm_price'] = (0.5 *
                              (bbodf['ask0_price'] + bbodf['bid0_price'])).astype(float).copy()
        combdf = pandas.merge(sym_orddf, bbodf, on='timestamp', how='outer')
        combdf = combdf.sort_values(['timestamp']).reset_index(drop=True).set_index('timestamp')
        combdf['mtm_price'] = combdf['mtm_price'].fillna(method='ffill').fillna(method='bfill')
        sym_orddf.index = sym_orddf['timestamp']
        sym_orddf['mtm_price'] = combdf['mtm_price'][~combdf['mtm_price'].index.duplicated(
            keep='first')].copy()
        sym_orddf = sym_orddf.reset_index(drop=True)
      else:
        sym_orddf['mtm_price'] = sym_orddf['order_price'].fillna(method='ffill') \
                                                        .fillna(method='bfill')

      filled = sym_orddf['fill_qty'] > 0
      sym_orddf.loc[filled, 'mtm_price'] = sym_orddf.loc[filled, 'fill_price']
      sym_orddf['last_fill_time'] = sym_orddf['timestamp']
      sym_orddf.loc[sym_orddf['fill_qty'] == 0, 'last_fill_time'] = numpy.nan
      sym_orddf['last_fill_time'] = sym_orddf['last_fill_time'].fillna(method='ffill')

      sym_orddf['last_relpos_time'] = sym_orddf['timestamp']
      sym_orddf.loc[sym_orddf['type'] != 'relpos', 'last_relpos_time'] = numpy.nan
      sym_orddf['last_relpos_time'] = sym_orddf['last_relpos_time'].fillna(method='ffill')

      sym_orddf['time_since_fill'] = (sym_orddf['timestamp'] -
                                      numpy.nan_to_num(sym_orddf['last_fill_time']))
      sym_orddf['time_since_relpos'] = (sym_orddf['timestamp'] -
                                        numpy.nan_to_num(sym_orddf['last_relpos_time']))

      sym_orddf['pos_from_fill'] = (sym_orddf['fill_qty'] * sym_orddf['sign']).cumsum()

      if dfset['start_time'] is not None:
        within_range = sym_orddf['timestamp'] >= dfset['start_time']
        sym_orddf = sym_orddf.loc[within_range]
      if dfset['end_time'] is not None:
        within_range = sym_orddf['timestamp'] <= dfset['end_time']
        sym_orddf = sym_orddf.loc[within_range]

      if sym_orddf['fill_qty'].sum() == 0 and flags.FLAGS.skip_empty_fill:
        continue

      sym_orddf['last_pos_changed_ts'] = numpy.nan
      relpos = sym_orddf['type'] == 'relpos'
      sym_orddf.loc[relpos, 'last_pos_changed_ts'] = sym_orddf.loc[relpos, 'timestamp']
      sym_orddf['last_pos_changed_ts'] = sym_orddf['last_pos_changed_ts'].fillna(method='ffill')
      sym_orddf['pos_adjusted'] = 0

      dir_calibrate = f"{flags.FLAGS.out_dir}/pos_calibrate"
      os.makedirs(dir_calibrate, exist_ok=True)
      virtual_fills, _ = calibrate_fill_position(None, symbol, sym_orddf.copy(), orddf, None)
      title = f"{flags.FLAGS.strategy_name}_{exchange_type}_{symbol}"
      pdf_filename = os.path.join(dir_calibrate, f"nfix{'%04d' % len(virtual_fills)}_{title}.pdf")
      if flags.FLAGS.plot_pos_debug:
        with PdfPages(pdf_filename) as pdf:
          virtual_fills, random_order_id = calibrate_fill_position(title, symbol, sym_orddf, orddf, pdf)
      else:
        virtual_fills, random_order_id = calibrate_fill_position(title, symbol, sym_orddf, orddf, pdf=None)
      # EOD clear
      last_elem = sym_orddf.iloc[-1]
      diff_pos = 0 - last_elem['pos_from_fill']

      virtual_fills.append({
          # 1 ns prior
          'type': 'ORDER_FILLED',
          'order_id': str(random_order_id),
          'timestamp': dfset['end_time'] - 1,
          'fill_qty': numpy.abs(last_elem['pos_from_fill']),
          'fill_price': numpy.nan,
          'sign': numpy.sign(diff_pos),
          'symbol': symbol,
          'fill_type': 'TAKER_FILL_TYPE'
      })

      sym_orddf2 = pandas.concat([sym_orddf, pandas.DataFrame(virtual_fills)], sort=False)
      sym_orddf2 = sym_orddf2.sort_values('timestamp').reset_index(drop=True)
      sym_orddf2['exchange_type'].fillna(method='ffill', inplace=True)
      sym_orddf2['exchange_type'].fillna(method='bfill', inplace=True)
      sym_orddf2['market_type'] = market_type
      sym_orddf2 = sym_orddf2.loc[sym_orddf2['type'] != 'relpos']
      for catecol in ['type', 'order_side', 'symbol', 'exchange_type', 'market_type', 'fill_type']:
        sym_orddf2[catecol] = sym_orddf2[catecol].astype('category')
      sym_orddf2['order_id'] = sym_orddf2['order_id'].astype(int)
      for delcol in ['scale', 'rel_position', 'last_pos_changed_ts', 'last_fill_time']:
        del sym_orddf2[delcol]
      sym_orddf2['mtm_price'] = sym_orddf2['mtm_price'].fillna(method='ffill').fillna(
          method='bfill')
      sym_orddfs.append(sym_orddf2)
  if len(sym_orddfs) == 0:
    return None, (sliced_bbodf_dict if len(sliced_bbodf_dict) > 0 else bbodf_dict)
  orddf = pandas.concat(sym_orddfs, axis=0).reset_index(drop=True)
  for catecol in ['type', 'order_side', 'symbol', 'exchange_type', 'market_type', 'fill_type']:
    orddf[catecol] = orddf[catecol].astype('category')
  orddf['order_id'] = orddf['order_id'].astype(int)
  return orddf, sliced_bbodf_dict


def get_orddf_and_strat(trading_date, baseflags, fsflags, osflags):
  baseflags.trading_date = f'{trading_date.strftime("%Y%m%d")}-{trading_date.strftime("%Y%m%d")}'
  opjob = StratlogDumpJob(flags.FLAGS.out_dir, PnlPlot, baseflags, fsflags, osflags)
  strat, bbodf_dict = opjob.run()
  dfset = strat.get_dfset()
  strat.del_all_data()
  orddf, sliced_bbodf_dict = orddf_consolidate_pos(dfset, bbodf_dict)
  auxinfo = {'sigdf': dfset['sigdf'], 'latdf': dfset['latdf']}
  if orddf is not None:
    if strat.start_time is not None:
      idx = (strat.start_time <= orddf['timestamp']) & (strat.end_time >= orddf['timestamp'])
      orddf = orddf.loc[idx].reset_index(drop=True)
      orddf.rename(columns={'tag': 'order_tag'}, inplace=True)
  del dfset
  return orddf, sliced_bbodf_dict, strat, auxinfo


def get_active_pnl(simstat):
  combdf = simstat['combdf']
  init_price = numpy.nan if combdf[combdf['mtm_price'] > 0.0].empty else combdf[
      combdf['mtm_price'] > 0.0].iloc[0]['mtm_price']

  if numpy.isnan(init_price):
    combdf['sod_pnl'] = numpy.nan
    combdf['active_pnl'] = numpy.nan
    simstat['combdf'] = combdf
    simstat['fill_stat']['active_pnl'] = numpy.nan
    simstat['pnl_stat']['active_pnl'] = numpy.nan
    return simstat

  real_fill_cond = (combdf['type'] == 'ORDER_FILLED') & (combdf['fill_price'] > 0.0)
  real_fill_idx = numpy.nan if combdf[real_fill_cond].empty else combdf[real_fill_cond].index[0]
  # 1. If there's first virtual_fills earlier than real fill, it's open_pos is real open_pos
  cond = (combdf['type'] == 'ORDER_FILLED') & (combdf['fill_type'] == 'TAKER') & (
      combdf['fill_price'] == 0.0) & (combdf.index != combdf.index[-1]) & (
          True if numpy.isnan(real_fill_idx) else combdf.index < real_fill_idx)
  idx = numpy.nan if combdf[cond].empty else combdf[cond].index[0]
  if not numpy.isnan(idx):
    init_pos = combdf.loc[idx, 'open_pos']
  # 2. check the first real fill
  else:
    # 3. If there's real fill, calculate the pos
    if not numpy.isnan(real_fill_idx):
      init_pos = combdf.loc[real_fill_idx,
                            'open_pos'] - combdf.loc[real_fill_idx,
                                                     'fill_qty'] * combdf.loc[real_fill_idx, 'sign']
    # 4. If there's no real fill, first non-zero open_pos is open_pos (or open_pos == 0.0)
    else:
      init_pos = 0.0 if combdf[combdf['open_pos'] != 0.0].empty else combdf[
          combdf['open_pos'] != 0.0].iloc[0]['open_pos']
  sod_pnl = init_pos * (combdf['mtm_price'] - init_price)
  active_pnl = combdf['pnl_net'] - sod_pnl
  active_pnl[active_pnl.abs() < 1e-10] = 0.0
  # simstat['active_pnl'] = active_pnl
  # simstat['sod_pnl'] = sod_pnl
  combdf['sod_pnl'] = sod_pnl
  combdf['active_pnl'] = active_pnl
  simstat['combdf'] = combdf
  simstat['fill_stat']['active_pnl'] = active_pnl.iloc[-1]
  simstat['pnl_stat']['active_pnl'] = active_pnl.iloc[-1]
  return simstat


def plot_sigdf(*, plot_dir, filename_prefix, exchange_type, symbol, sigdf, bbodf):
  if sigdf.empty:
    return
  os.makedirs(plot_dir, exist_ok=True)
  sym_sigidx = ((sigdf['symbol'].str.find(symbol) >= 0) &
                (sigdf['symbol'].str.find(exchange_type) >= 0))
  sym_sigdf = sigdf.loc[sym_sigidx].copy().reset_index(drop=True)
  idxproj12 = sscalc2.bisect_batch(sym_sigdf['timestamp'], bbodf['timestamp'].astype(int).tolist())
  sym_sigdf['mid_price'] = bbodf['mid_price'].to_numpy()[idxproj12]
  idx_pos = sym_sigdf['mean'] > 0
  idx_neg = sym_sigdf['mean'] < 0
  plt.subplot(211)
  plt.plot(
      pandas.DatetimeIndex(sym_sigdf['timestamp'])[idx_pos].to_numpy(),
      sym_sigdf['mean'][idx_pos].to_numpy(), 'g.')
  plt.plot(
      pandas.DatetimeIndex(sym_sigdf['timestamp'])[idx_neg].to_numpy(),
      sym_sigdf['mean'][idx_neg].to_numpy(), 'r.')
  absy = sym_sigdf['mean'].abs().max()
  plt.ylim([-absy, absy])
  plt.subplot(212)
  plt.plot(pandas.DatetimeIndex(bbodf['timestamp']).to_numpy(),
           bbodf['mid_price'].to_numpy(),
           lw=0.5,
           drawstyle='steps-post')
  plt.plot(pandas.DatetimeIndex(sym_sigdf['timestamp'])[idx_pos].to_numpy(),
           sym_sigdf['mid_price'][idx_pos].to_numpy(),
           'g.',
           alpha=0.5)
  plt.plot(pandas.DatetimeIndex(sym_sigdf['timestamp'])[idx_neg].to_numpy(),
           sym_sigdf['mid_price'][idx_neg].to_numpy(),
           'r.',
           alpha=0.5)
  plt.suptitle(filename_prefix)
  plt.savefig(os.path.join(plot_dir, f"{filename_prefix}_sigplot.png"))
  plt.close()


def plot_order_debug_info(orddf, bbodf_dict, strat, auxinfo, baseflags, split_hours, out_dir,
                          strategy_name):
  old_time_range = baseflags.time_range
  for (exchange_type, symbol), sym_orddf in orddf.groupby(['exchange_type', 'symbol']):
    sym_orddf = sym_orddf.copy()
    bbodf = bbodf_dict[(exchange_type, symbol)]
    if len(bbodf) == 0:
      print("export use_bbo=1;  # please do this for order plot")
      continue
    first_row = sym_orddf.iloc[0].to_dict()
    sym_orddf['fill_type'] = sym_orddf['fill_type'].str.replace('_FILL_TYPE', '')
    product = generate_product_from_str2(
        first_row['market_type'], first_row['exchange_type'], None, first_row['symbol'],
        pandas.DatetimeIndex([first_row['timestamp']]).to_pydatetime()[0])
    abase.print_mem()
    compact_stat = pcstat.PnlCompactStat.from_orderdf(product=product, orderdf=sym_orddf)
    sresp = sscalc2.SimResponseCompact(param_label=None,
                                       compact_stat=compact_stat,
                                       miscinfo={'initial_pos': flags.FLAGS.position_diff},
                                       bbodf=bbodf,
                                       product=product)
    simstat = sscalc2.get_stat_df(sresp, {}, is_pt1m=flags.FLAGS.use_pt1m_dump)
    total_sumdf, total_interval_num = (pandas.Series(),
                                       0) if flags.FLAGS.include_lp_plot else (None, None)
    if flags.FLAGS.include_active_pnl:
      simstat = get_active_pnl(simstat)
    abase.print_mem()
    for trange in (abase.split_time_range(old_time_range, split_hours)
                   if split_hours is not None else [old_time_range]):
      baseflags.time_range = trange
      filename_prefix = strat.get_filename_prefix2(f"{exchange_type}_{symbol}_{trange}")
      time_ranges = abase.get_time_range(baseflags)
      plt.style.use('seaborn-whitegrid')
      oputil.plot_order_plot(
          filename_prefix,
          time_ranges[0],
          time_ranges[1],
          sym_orddf,
          bbodf,
          pnlts=simstat['combdf']['timestamp'],
          pnls=simstat['combdf']['pnl_net'],
          prcdict=simstat['prcdict'],
          posts=sym_orddf['timestamp'],
          poss=sym_orddf['pos_from_fill'].fillna(method='ffill'),
          plot_dir=os.path.join(out_dir, "each", strategy_name),
          active_pnl=simstat['combdf']['active_pnl'] if flags.FLAGS.include_active_pnl else None,
          plot_tags=flags.FLAGS.include_tag_info)
      if flags.FLAGS.include_lp_plot:
        sumdf, interval_num = lputil.plot_lp_plot(filename_prefix,
                                                  time_ranges[0],
                                                  time_ranges[1],
                                                  sym_orddf,
                                                  bbodf,
                                                  plot_dir=os.path.join(out_dir, "each", "lpplot"))
        if total_sumdf.size == 0:
          total_sumdf = sumdf
        else:
          total_sumdf += sumdf
        total_interval_num += interval_num
    filename_prefix = strat.get_filename_prefix2(f"{exchange_type}_{symbol}")
    if (flags.FLAGS.include_lp_plot) and (len(total_sumdf) != 0) and (total_interval_num != 0):
      depth_dir = os.path.join(out_dir, 'depth')
      lputil.plot_total_depth_tb(filename_prefix, depth_dir, total_sumdf, total_interval_num)
    plot_sigdf(plot_dir=os.path.join(out_dir, "each", "sigplot"),
               filename_prefix=filename_prefix,
               exchange_type=exchange_type,
               symbol=symbol,
               sigdf=auxinfo['sigdf'],
               bbodf=bbodf)
  if not auxinfo['latdf'].empty and 'latency_type' in auxinfo['latdf']:
    filename_prefix = strat.get_filename_prefix2("")
    LatencyType = tele_pb2.StrategyComponentLatencyStatsProto.LatencyType
    latdf = auxinfo['latdf']
    latdf = latdf.loc[~latdf['latency_type'].isna()].copy()
    if not latdf.empty:
      latdf['latency_tag'] = latdf['latency_type'].astype(int).apply(
          lambda val: LatencyType.DESCRIPTOR.values_by_number[val].name)
      latdf['twindow_latency_sum_sec'] = latdf['twindow_latency_sum'] * 1e-9
      latdf['twindow_share'] = latdf['twindow_latency_sum'] / latdf['twindow_length']
      print(latdf.groupby(['latency_tag', 'mea'])['twindow_share'].describe())
      plot_lat_dir = os.path.join(out_dir, "each", "latplot")
      os.makedirs(plot_lat_dir, exist_ok=True)
      latseri = latdf.groupby(['latency_tag', 'mea'])['twindow_share'].mean()
      latlabels = [f"{key}:{'%.6f' % row}" for key, row in latseri.iteritems()]
      plt.suptitle(filename_prefix)
      plt.title("latency breakdown")
      plt.pie(latseri / latseri.sum(), labels=latlabels)
      plt.savefig(os.path.join(plot_lat_dir, f"{filename_prefix}.png"))
      plt.close()
  # restore baseflags.time_range to original
  baseflags.time_range = old_time_range


def launch(trading_date, baseflags, fsflags, osflags):
  args = [trading_date, baseflags, fsflags, osflags]
  orddf, bbodf_dict, strat, auxinfo = get_orddf_and_strat(*args)
  pltargs = [
      orddf, bbodf_dict, strat, auxinfo, baseflags, flags.FLAGS.split_debug_hours,
      flags.FLAGS.out_dir, flags.FLAGS.strategy_name
  ]
  if (flags.FLAGS.strategy_name and flags.FLAGS.strategy_name.find("rmm_") >= 0):
    os.environ["feebps_tm"] = "0/0"
  if plot_debug_info():
    plot_order_debug_info(*pltargs)
  strat.set_time_range(*abase.get_time_range(baseflags))
  abase.print_mem()
  return orddf, bbodf_dict


def launch_all(baseflags, fsflags, osflags):
  opjob = StratlogDumpJob(flags.FLAGS.out_dir, PnlPlot, baseflags, fsflags, osflags)
  strat, bbodf_dict = opjob.run()
  dfset = strat.get_dfset()
  orddf, sliced_bbodf_dict = orddf_consolidate_pos(dfset, bbodf_dict)
  return orddf, sliced_bbodf_dict


def get_xlim_given_time_range(baseflags):
  start_time, end_time = abase.get_time_range(baseflags)
  if start_time is not None and end_time is not None:
    send = pandas.DatetimeIndex([start_time, end_time])
    return [send[0], send[1]]
  return None


def main(_):
  baseflags = abase.get_base_flags()
  fsflags = abase.get_feed_subscription_flags()
  osflags = abase.get_order_subscription_flags()

  trading_dates = abase.get_trading_dates(baseflags.trading_date)

  args = tuple([baseflags, fsflags, osflags])
  rettuples = []
  if flags.FLAGS.cpu == 1 or len(trading_dates) == 1:
    for trading_date in trading_dates:
      rettuples.append(launch(trading_date, *args))
  else:
    with ProcessPoolExecutor(max_workers=flags.FLAGS.cpu) as executor:
      futures = []
      for trading_date in trading_dates:
        futures.append(executor.submit(launch, trading_date, *args))
      for future in futures:
        res = future.result()
        if res is not None:
          rettuples.append(res)
  bbodf_dicts = collections.defaultdict(list)
  orddfs = []
  for orddf, bbodf_dict in rettuples:
    if orddf is None:
      continue
    orddfs.append(orddf)
    for symbol, bbodf in bbodf_dict.items():
      bbodf_dicts[symbol].append(bbodf)

  bbodf_dict = {}
  for symbol, bbodfs in bbodf_dicts.items():
    bbodf = pandas.concat(bbodfs, axis=0, sort=False).reset_index(drop=True)
    if len(bbodf) > 0:
      bbodf = bbodf.sort_values('timestamp').reset_index(drop=True)
    bbodf_dict[symbol] = bbodf

  orddf = pandas.concat(orddfs, axis=0, sort=False).reset_index(drop=True)
  orddf = orddf.sort_values('timestamp').reset_index(drop=True)
  sstats_dict = {}
  timedrets = []
  eachpnldfs = []
  for (exchange_type, symbol), sym_orddf in orddf.groupby(['exchange_type', 'symbol']):
    if len(sym_orddf) == 0:
      continue
    sym_orddf = sym_orddf.copy()
    first_row = sym_orddf.iloc[0]
    product = generate_product_from_str2(
        first_row['market_type'], first_row['exchange_type'], None, first_row['symbol'],
        pandas.DatetimeIndex([first_row['timestamp']]).to_pydatetime()[0])
    sym_bbodf = bbodf_dict.get((exchange_type, symbol), None)
    if bbo_from_cache():
      sym_bbodf = sym_bbodf.loc[~sym_bbodf['timestamp'].duplicated(keep='first')].reset_index(
          drop=True)
    if sym_bbodf is None:
      print(f"{symbol} is ignored")
      continue
    sym_orddf['fill_type'] = sym_orddf['fill_type'].str.replace('_FILL_TYPE', '')

    compact_stat = pcstat.PnlCompactStat.from_orderdf(product=product, orderdf=sym_orddf)
    funding_df = dump_funding_rate(product, trading_dates[0],
                                   trading_dates[-1] + datetime.timedelta(days=1),
                                   flags.FLAGS.bbo_cache_root)
    sresp = sscalc2.SimResponseCompact(param_label=None,
                                       compact_stat=compact_stat,
                                       miscinfo={
                                           'initial_pos': flags.FLAGS.position_diff,
                                           'funding_df': funding_df
                                       },
                                       bbodf=sym_bbodf,
                                       product=product)
    normsymbol = (product.subscription_symbol
                  if hasattr(product, "subscription_symbol") else product.symbol)
    simstat = sscalc2.get_stat_df(
        sresp, {}, is_sliced=(flags.FLAGS.include_mm_stats or flags.FLAGS.include_lp_plot) is False, is_pt1m=flags.FLAGS.use_pt1m_dump)
    if flags.FLAGS.include_active_pnl:
      simstat = get_active_pnl(simstat)
    if flags.FLAGS.include_option_pnl:
      tmp_df = simstat['combdf'].copy()
      tmp_df['pnl_diff(USD)'] = tmp_df['pnl_net'].diff() * (1 if symbol.split(
          "-")[1] in ["USDT", "USD"] else 1.0 / ssfrompb.get_fxrate(symbol.split("-")[1]))
      tmp_df['from_symbol'] = f'{exchange_type}.{symbol}'
      eachpnldfs.append(
          tmp_df[['timestamp', 'from_symbol', 'pnl_diff(USD)', 'mtm_price', 'open_pos']].copy())
    simstat['product'] = {
        'market_type': first_row['market_type'],
        'exchange_type': first_row['exchange_type'],
        'symbol': normsymbol
    }
    simstat['product2'] = product
    sstats_dict[f'{exchange_type}_{normsymbol}'] = [simstat]
    timedret = simstat.get('timedret', None)
    if timedret is not None:
      timedrets.append(timedret)

  name_prefix = f"{osflags.strategy_name}"
  xlim = None
  if len(trading_dates) == 1:
    xlim = get_xlim_given_time_range(baseflags)

  def plot_pnl(out_dir, stats_dict, namestr):
    os.makedirs(out_dir, exist_ok=True)
    plt.style.use('seaborn-whitegrid')
    timedret, pnlsr = ssfrompb.plot_aggregated_pnl(stats_dict, real_scale_t_axis=True, xlim=xlim, plot_api_rate_used=False)
    pnlstr = ssfrompb.get_pnlstr(pnlsr)
    plt.suptitle(f"{namestr}_total", fontsize=20)
    ssplot.setup_plt()
    plt.rcParams["figure.figsize"] = 6, 6
    picname = f"live_{pnlstr}_{namestr}_{flags.FLAGS.trading_date}"
    plt.savefig(f"{out_dir}/{picname}.png")
    plt.close()
    os.makedirs(f"{out_dir}/statsfile", exist_ok=True)
    pmerger.serialize_pnl(f"{out_dir}/statsfile", picname, stats_dict)
    with open(f"{out_dir}/statsfile/{picname}.json", 'w') as sw:
      pnl_info = {}
      pnl_info["strategy_name"] = flags.FLAGS.strategy_name
      pnl_info["dump_stat_key"] = namestr
      pnl_info["products"] = [sstats[0]['product'] for sstats in stats_dict.values()]
      pnldict = pnlsr.to_dict()
      pnldict.pop('each_pnl_df', None)
      pnl_info["pnldict"] = pnldict
      json.dump(pnl_info, sw, indent=2)
    if timedret is not None and not timedret.empty and flags.FLAGS.timed_ret:
      os.makedirs(f"{out_dir}/timedret", exist_ok=True)
      ssplot.plot_timedret(f"{out_dir}/timedret/timedret_{picname}.pdf", timedret,
                           f"live_{flags.FLAGS.trading_date}_{namestr}")

  merged_dir = os.path.join(flags.FLAGS.out_dir, "merged")
  print(merged_dir)
  plot_pnl(merged_dir, sstats_dict, name_prefix)

  exchanges = set(sstats[0]['product']['exchange_type'] for sstats in sstats_dict.values())
  if flags.FLAGS.split_exch and len(exchanges) > 1:
    for exchange in exchanges:
      sstats_exchange_dict = {
          k: v for k, v in sstats_dict.items() if v[0]['product']['exchange_type'] == exchange
      }
      plot_pnl(merged_dir, sstats_exchange_dict, name_prefix + f"_{exchange}_")

  if flags.FLAGS.clip_each is not None:
    pnl_keys = [
        (
            0 if len(sim_stat[-1]['combdf']) == 0 else
            sim_stat[-1]['combdf'].iloc[-1]['pnl_net'], key
        )
        for key, sim_stat in sstats_dict.items()]
    pnl_keys = sorted(pnl_keys)
    #TODO(jhkim): seems something weird about clip_each's usage
    keys_sel = pnl_keys[:-flags.FLAGS.clip_each] + pnl_keys[flags.FLAGS.clip_each:]
    keys_sel = set(map(lambda x: x[1], keys_sel))
    sstats_dict = {key: sim_stat for key, sim_stat in sstats_dict.items() if key in keys_sel}

  each_dir = os.path.join(flags.FLAGS.out_dir, "each")
  for key, val in sstats_dict.items():
    plot_pnl(os.path.join(each_dir), {key: val}, name_prefix + f"_{key}_")
  if flags.FLAGS.include_option_pnl:
    target_config = flags.FLAGS.strategy_name + ".json"
    each_pnl_df = pandas.concat(eachpnldfs)
    ssopt.plot_options_and_hedge_pnls(flags.FLAGS.out_dir,
                                      each_pnl_df,
                                      target_config,
                                      is_pnlplot=True)
  if flags.FLAGS.include_mm_stats:
    mm_stats_dir = os.path.join(each_dir, 'mm_stats')
    os.makedirs(mm_stats_dir, exist_ok=True)
    plt.style.use('seaborn-whitegrid')
    ssplot.setup_plt()
    plt.rcParams["figure.figsize"] = 16, 10
    fig, axs = plt.subplots(len(sstats_dict))
    fig.suptitle(f"{name_prefix}", fontsize=12)
    plt_idx = 0
    for key, simstats in sstats_dict.items():
      ax = axs[plt_idx] if isinstance(axs, numpy.ndarray) else axs
      exchange, symbol = key.split('_')
      bbodf = bbodf_dict.get((exchange, symbol), None)
      final_res = None
      title_ts_min = None
      title_ts_max = None
      for _, simstat in enumerate(simstats):
        orderdf = simstat['orderdf']
        assert simstat[
            'product2'].market_name == 'Spot', f"Only spot market is supported, not {simstat['product2'].market_name}"
        mea = f"{simstat['product2'].market_name}.{simstat['product2'].exchange_name}.{abase.get_product_api_version(simstat['product2'])}"
        ts_min = min(orderdf['timestamp'].min(), bbodf['timestamp'].min())
        title_ts_min = ts_min if title_ts_min is None else min(title_ts_min, ts_min)
        ts_max = max(orderdf['timestamp'].max(), bbodf['timestamp'].max())
        title_ts_max = ts_max if title_ts_max is None else max(title_ts_max, ts_max)
        res = oputil.plot_mm_stats_table(mea=mea,
                                         symbol=simstat['product2'].relative_norm,
                                         oedf=orderdf,
                                         bbodf=bbodf,
                                         price_bp_tholds=flags.FLAGS.price_bp_tholds,
                                         quote_tholds=flags.FLAGS.quote_tholds,
                                         base_tholds=flags.FLAGS.base_tholds,
                                         is_pnl_plot=True)
        if final_res is None:
          final_res = res
        else:
          final_res += res
      oputil.plot_mm_stats_chart(final_res,
                                 bbodf,
                                 orderdf,
                                 mm_stats_dir,
                                 f"{exchange}.{symbol}:",
                                 is_split=True,
                                 is_pnl_plot=True)
      oputil.plot_mm_stats_chart(final_res,
                                 bbodf,
                                 orderdf,
                                 mm_stats_dir,
                                 f"{exchange}.{symbol}:",
                                 is_split=False,
                                 is_pnl_plot=True)
      table_df = oputil.get_table_res(final_res).reset_index()
      bold_idx = table_df[table_df['Stat'] == 'Avg'].index
      table = ax.table(cellText=table_df.to_numpy(),
                       colLabels=table_df.columns,
                       loc='center',
                       cellLoc='center')
      for key, cell in table.get_celld().items():
        row, _ = key
        if row - 1 in bold_idx:
          cell.set_text_props(fontproperties=matplotlib.font_manager.FontProperties(weight='bold'))
      table.auto_set_column_width(col=list(range(len(table_df.columns))))
      ax.axis('tight')
      ax.axis('off')
      ax.set_title(
          f"{exchange} {symbol} {datetime.datetime.utcfromtimestamp(title_ts_min/1e9).strftime('%Y-%m-%d %H:%M:%S')}-{datetime.datetime.utcfromtimestamp(title_ts_max/1e9).strftime('%Y-%m-%d %H:%M:%S')}"
      )
      plt_idx += 1
    fig.subplots_adjust(hspace=0.3)
    fig.savefig(f"{mm_stats_dir}/{name_prefix}_stat_table.png")
    plt.close(fig)


def init_flags():
  abase.define_base_flags()
  abase.define_feed_archive_flags()
  abase.define_order_archive_flags()

  flags.DEFINE_bool('is_focus_mt', False, '')
  flags.DEFINE_bool('use_bbo', True, '')
  flags.DEFINE_bool('full_bbo', False, '')
  flags.DEFINE_bool('force_rerun', True, '')
  flags.DEFINE_bool('unfold_stat', True, '')
  flags.DEFINE_bool('timed_ret', True, '')
  flags.DEFINE_bool('split_exch', False, 'Plot pnl by exchange, no effect if only one exchange')
  flags.DEFINE_bool('skip_empty_fill', True, 'Skip case when fill_qty is 0')
  flags.DEFINE_string('pick_symbol_regex', None, 'symbol select if needed')
  flags.DEFINE_string('out_dir', 'pnl_plot', 'out_dir')
  flags.DEFINE_integer('cpu', 1, '')
  flags.DEFINE_float('split_debug_hours', 6, '')
  flags.DEFINE_float('position_diff', 0, '')
  flags.DEFINE_bool('use_full_live_log', False, '')
  flags.DEFINE_bool('use_pt1m_dump', False, 'True if you want to use pt1m dump instead of strat log.')
  flags.DEFINE_bool('force_plot_debug_info', None,
                    'True if you want "each" for multiple trading dates. False then do not plot')
  flags.DEFINE_bool('include_mm_stats', False,
                    'True if you want to include mm stats (Uptime, Order Depth, Volume, ...)')
  flags.DEFINE_bool('include_lp_plot', False, 'Fancy looking black liquidity plot')
  flags.DEFINE_bool('include_option_pnl', False, 'True if you want to include option pnl')
  flags.DEFINE_list('price_bp_tholds', [75, 300],
                    'mm_stats price threshold (e.g. 75: midP +/- 75bps)')
  flags.DEFINE_list(
      'quote_tholds', [100],
      'mm_stats quote threshold (e.g. 100: Check total quote sum of bid/ask orders within price_bp)'
  )
  flags.DEFINE_list(
      'base_tholds', [],
      'mm_stats base threshold (e.g. 100: Check total base qty sum of bid/ask orders within price_bp_tholds)'
  )
  flags.DEFINE_bool('include_active_pnl', False,
                    'active_pnl = pnl - init_pos * (price[t] - init_price)')
  flags.DEFINE_integer('clip_each', None, '')  # if 2, clip top, bottom 2 pnl symbols
  flags.DEFINE_integer('plot_pos_debug', 1, '')
  flags.DEFINE_bool('include_tag_info', False, 'True if you want to include tag info at each txt')

  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')


if __name__ == '__main__':
  init_flags()
  try:
    app.run(main)
  except Exception:
    traceback.print_exc()
    print(' '.join(sys.argv))
