# Copyright (c) 2019 Presto Labs Pte. Ltd.
# Author: yuxuan, chunhui, zijin

import matplotlib

matplotlib.use('Agg')
import matplotlib.pyplot as plt

import datetime
import glob
import json
import math
import os
import subprocess
import traceback
from concurrent.futures import ProcessPoolExecutor, as_completed
import warnings
import tempfile

import tabulate
import numpy as np
import pandas as pd
from absl import app, flags
import google.protobuf.json_format as json_format

from cc.appcoin2.strategy.basis_strat.support.live_oe_stat import live_oe_stat
from coin.base.mail_util import send_mail, send_mail_with_attachments
from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
from coin.proto.coin_order_gateway_pb2 import OrderEvent, OrderGatewayLog
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.strategy.accounting.fee import get_fee_rate
from coin.support.proto_log.logic.util import run_from_strat_log_archive
from coin.support.pta.logic.pta_stats_calculator import (get_product_multiplier, is_inverse,
                                                         get_accounting_currency)
from coin.strategy.basis_strat.analysis.util import get_all_strats_info
import coin.pnl.sim_stat_plot as ssplot
from coin.base.query_util import query_exchange_rates
from cc.appcoin2.strategy.basis_strat.support.daily_search.slack_helper import SlackHelper
from cc.appcoin2.strategy.basis_strat.support.cache_util import CACHE_ROOT, get_trading_dates
from coin.experimental.yuxuan.tools.dump_feed_from_cache import read_feeds
from python.coin.strategy.mm.tool.signal_plot_base import dump_funding_rate


FLAGS = flags.FLAGS


def plot_name(name, exchange, mkt, start, end):
  return '%s_%s_%s_%s_%s.png' % (name, exchange, mkt, start, end)


def is_inverse_handler(market_type, exchange, symbols):
  inverse_dict = {}
  for symbol in symbols:
    try:
      inverse_dict[symbol.split(
          '-')[0]] = inverse_dict[symbol] = is_inverse(market_type, exchange, symbol)
    except Exception:
      continue
  return inverse_dict


def get_accounting_currency_handler(market_type, exchange, symbols):
  currency = {}
  for symbol in symbols:
    try:
      currency[symbol.split('-')[0]] = get_accounting_currency(market_type, exchange, symbol)
    except Exception:
      continue
  return currency


class SubOnLog(object):
  def __init__(self, parent, coin):
    self.parent = parent
    self.coin = coin

  def get_ts(self):
    return self.parent.ts

  def on_fill(self, fill):
    pass

  def on_balance(self, balance):
    pass

  def on_reserve(self, reserve):
    pass

  def on_order_event(self, event):
    pass


def isclose(a, b, rtol=1.e-5, atol=1.e-8):
  # Use the same formula as np.isclose,
  # but only for two numbers instead of array to avoid overhead
  return abs(a - b) <= (atol + rtol * abs(b))


class FillPnlOnLog(SubOnLog):
  def __init__(self, parent, coin, is_inv, reserve):
    SubOnLog.__init__(self, parent, coin)
    self.is_fut = parent.is_futures
    self.is_inv = is_inv
    self.reserve = reserve
    self.fill_info = []

  def get_fill_info_columns(self):
    return [
        'ts',
        'fill_qty',
        'taker',
        'multiplier',
        'position',
        'reserve',
        'price',
    ]

  def update_reserve(self, reserve):
    self.reserve = reserve

  def on_balance(self, balance):
    if not (self.is_fut and self.is_inv):
      if self.reserve is not None and not self.is_fut:
        self.on_position(balance - self.reserve)

  def on_position(self, position):
    price = np.nan
    if self.fill_info:
      price = self.fill_info[-1][-1]
    self.fill_info.append([self.get_ts(), None, None, None, position, self.reserve, price])

  def on_fill(self, fill):
    fill_type = fill.get('guess_fill_type', 'UNKNOWN_FILL_TYPE')
    if fill_type == 'UNKNOWN_FILL_TYPE':
      fill['guess_fill_type'] = 'MAKER_FILL_TYPE' \
          if np.isclose(fill['fill_price'], fill['order_price']) \
          else 'TAKER_FILL_TYPE'
    taker = fill['guess_fill_type'] == 'TAKER_FILL_TYPE'
    multiplier = get_product_multiplier(self.parent.market_type, self.parent.exchange,
                                        fill['symbol'])
    fill_qty = fill['fill_qty']
    if 'SELL' in fill['order_side']:
      fill_qty = -fill_qty
    self.fill_info.append(
        [self.get_ts(), fill_qty, taker, multiplier, None, self.reserve, fill['fill_price']])

  def on_reserve(self, reserve):
    self.update_reserve(reserve)

  def combine_fill_pnl_on_log(self, fill_pnl_log):
    self.fill_info.extend(fill_pnl_log.fill_info)


class OnLog(object):
  def __init__(self, strategy_name, machine, mea, sample_symbol, start_date, end_date):
    self.strategy_name = strategy_name
    self.multiple_exchange = 'mm_currency' in strategy_name
    self.machine = machine
    self.mea = mea
    self.market_type, self.exchange, self.api = mea.split('.')
    self.is_futures = self.market_type == 'Futures'
    self.title_currency = sample_symbol.split('-')[-1]
    self.symbols = set()
    self.inverse_dict = dict()
    self.start_date = start_date
    self.end_date = end_date
    p = None
    try:
      p = generate_product_from_str2(self.market_type, self.exchange,
                                     None, sample_symbol, start_date)
    except Exception:
      pass
    self.maker_fee, self.taker_fee = get_fee_rate(self.market_type, self.exchange, p)
    self._sublogs = dict()
    self._on_reserve = False
    self.ts = 0
    self.all_data = []

  def get_usd_multiplier(self):
    coins = [symbol.split('-')[0] for symbol in self.symbols]
    currency = get_accounting_currency_handler(self.market_type, self.exchange, self.symbols)
    coins = [coin for coin in coins if coin in currency.keys()]
    ccy_list = [currency[coin] for coin in coins]
    query_date = (self.end_date - datetime.timedelta(days=1)).date()
    rate_list = query_exchange_rates(ccy_list, 'USD', query_date)
    self.usd_multiplier = {coin: rate for coin, rate in zip(coins, rate_list)}

  def validate_pb(self, timestamp, pb):
    if not self.multiple_exchange and timestamp < self.ts:
      return False
    if pb.type == StrategyLog.RESERVE and self.multiple_exchange:
      for a_data in pb.reserve.each_balance:
        reserve = a_data.total
        if self.multiple_exchange:
          if (self.is_futures and reserve > 0) or (not self.is_futures and reserve == 0):
            return False
    return True

  def on_log(self, timestamp, log):
    pb = StrategyLog()
    pb.ParseFromString(log)
    if not self.validate_pb(timestamp, pb):
      return
    self.ts = timestamp
    symbol = None
    if pb.type == StrategyLog.AGG_EXEUCTOR_CONFIG:
      symbol = pb.agg_executor_config.symbol
      is_inv = self.is_inv(symbol)
      if is_inv is None:
        return
      reserve = pb.agg_executor_config.reserve
      if reserve is None:
        reserve = (pb.agg_executor_config.max_pos + pb.agg_executor_config.min_pos) / 2
      sub_log = self._sublogs.get(symbol, None)
      if not sub_log:
        self._sublogs[symbol] = FillPnlOnLog(self, symbol, is_inv, reserve)
      if sub_log:
        if sub_log.reserve != reserve:
          print('%s reserve changed from %f to %f' % (symbol, sub_log.reserve, reserve))
        sub_log.on_reserve(reserve)
    elif pb.type == StrategyLog.OG_LOG:
      og_log = pb.og_log
      request = og_log.account_request
      if not (request.market_type == self.market_type and request.exchange == self.exchange):
        return
      if og_log.type == OrderGatewayLog.BALANCE:
        for a_data in og_log.balance.each_balance:
          ccy = a_data.currency
          if ccy in ['KRW', 'USD', 'USDT', 'JPY']:
            continue
          symbol = f'{ccy}-{self.title_currency}'
          if symbol in self._sublogs:
            self._sublogs[symbol].on_balance(a_data.total)
      elif og_log.type == OrderGatewayLog.POSITION:
        for a_data in og_log.position.each_position:
          symbol = a_data.symbol
          if symbol in self._sublogs and self.is_futures:
            self._sublogs[symbol].on_position(a_data.net_position)
      elif og_log.type == OrderGatewayLog.ORDER_EVENT:
        event = og_log.event
        event_data = json_format.MessageToDict(event, preserving_proto_field_name=True)
        symbol = event_data.get('symbol', None)
        if not symbol:
          return
        is_inv = self.is_inv(symbol)
        if is_inv is None:
          return
        if event.type == OrderEvent.ORDER_FILLED:
          if symbol in self._sublogs:
            self._sublogs[symbol].on_fill(event_data)
        elif event.type == OrderEvent.ORDER_SUBMITTED:
          if symbol not in self._sublogs and self.is_futures:
            self._sublogs[symbol] = FillPnlOnLog(self, symbol, is_inv, 0)
        event_data['ts'] = timestamp
        if not flags.FLAGS.light_ver or flags.FLAGS.output_oe_only:
          self.all_data.append(event_data)
    if symbol:
      self.symbols.add(symbol)

  def get_oe_as_df(self):
    df = pd.DataFrame(self.all_data)
    if df.empty:
      return df
    is_buy_order = df['order_side'].str.contains('BUY')
    is_sell_order = df['order_side'].str.contains('SELL')
    df.loc[is_buy_order.fillna(False), 'order_side'] = 'BUY_ORDER'
    df.loc[is_sell_order.fillna(False), 'order_side'] = 'SELL_ORDER'
    return df

  def combine_on_log(self, onlogs, symbols):
    all_fill_pnl_on_log = {symbol: [] for symbol in symbols}
    for symbol in symbols:
      if symbol in self._sublogs.keys():
        all_fill_pnl_on_log[symbol].append(self._sublogs[symbol])
      else:
        all_fill_pnl_on_log[symbol] = []
    for onlog in onlogs:
      self.start_date = min([self.start_date, onlog.start_date])
      self.end_date = max([self.end_date, onlog.end_date])
      for symbol in symbols:
        if not symbol in onlog._sublogs.keys():
          continue
        assert((onlog.start_date + datetime.timedelta(days=2)).date() == onlog.end_date.date())
        true_start_date = onlog.start_date + datetime.timedelta(hours=0.5)
        fill_info = pd.DataFrame(
            onlog._sublogs[symbol].fill_info,
            columns=onlog._sublogs[symbol].get_fill_info_columns())
        onlog._sublogs[symbol].fill_info = fill_info.loc[fill_info['ts']
                                                         >= true_start_date.timestamp() * 1e9].values.tolist()
        all_fill_pnl_on_log[symbol].append(onlog._sublogs[symbol])
    for symbol in symbols:
      fill_pnl_on_log_lst = all_fill_pnl_on_log[symbol]
      if len(fill_pnl_on_log_lst) == 0:
        continue
      fill_pnl_on_log = fill_pnl_on_log_lst[0]
      for log in fill_pnl_on_log_lst[1:]:
        fill_pnl_on_log.combine_fill_pnl_on_log(log)
      self._sublogs[symbol] = fill_pnl_on_log
    if len(symbols) != len(self._sublogs.keys()):
      lost_symbols = ','.join(
          [symbol for symbol in symbols if symbol not in self._sublogs.keys()])
      warnings.warn(f'Please check lost symbols: {lost_symbols}')
    self.symbols = self._sublogs.keys()

  def insert_bbodf(self, fill_info, bbodf, symbol):
    if symbol not in bbodf.columns:
      return fill_info
    bbodf = bbodf.rename(columns={
        'timestamp': 'ts',
        symbol: 'price'
    })[['ts', 'price']].dropna(subset=['price'])
    bbodf['fill_qty'] = 0
    fill_info = fill_info.append(bbodf, ignore_index=True,
                                 sort=False).sort_values('ts').reset_index(drop=True)
    # init position
    if math.isnan(fill_info.loc[0, 'position']):
      fill_info.at[0, 'position'] = fill_info.at[0, 'position_by_fill'] = 0
    fill_info.loc[:,
                  ['multiplier', 'position', 'position_by_fill', 'price'
                   ]] = fill_info.loc[:,
                                      ['multiplier', 'position', 'position_by_fill', 'price']].ffill(
                                     )
    fill_info['multiplier'].bfill(inplace=True)
    return fill_info

  def correct_position_by_fill(self, fill_info):
    fill_info['last_fill_ts'] = np.where(~pd.isna(fill_info['fill_qty']), fill_info['ts'], np.nan)
    fill_info['last_fill_ts'] = fill_info['last_fill_ts'].ffill().fillna(0)
    fill_info['query_position_change'] = fill_info['position'].ffill().diff().abs() > 1e-6
    update_fill_pos_mask = (fill_info['ts'] - fill_info['last_fill_ts'] >
                            10e9) & (~fill_info['query_position_change'])
    fill_info.loc[update_fill_pos_mask, 'position_by_fill'] = fill_info.loc[update_fill_pos_mask,
                                                                            'position']
    fill_info['fill_qty_cumsum'] = fill_info['fill_qty'].fillna(0).cumsum()
    fill_info['fill_qty_cumsum2'] = np.where(~pd.isnull(fill_info['position_by_fill']),
                                             fill_info['fill_qty_cumsum'], np.nan)
    fill_info['delta_amt'] = fill_info['fill_qty_cumsum'] - fill_info['fill_qty_cumsum2'].ffill()
    fill_info['position_by_fill'] = fill_info['position_by_fill'].ffill(
    ) + fill_info['delta_amt'].fillna(0)
    return fill_info

  def validate_funding_rate(self, funding_rate):
    num_days = len(set([datetime.datetime.fromtimestamp(x / 1e9).date()
                   for x in funding_rate['timestamp']]))
    length = len(funding_rate) / num_days
    if self.exchange == 'Ftx':
      if length != 0 and length != 24:
        warnings.warn("%s product(%s) should have 24 funding fees a day. Length: %d" %
                      (self.exchange, self.strategy_name, length))
    elif length != 0 and length != 3:
      warnings.warn("%s product(%s) should have 3 funding fees a day. Length: %d" %
                    (self.exchange, self.strategy_name, length))

  def insert_funding_fee(self, fill_info, funding_rate):
    self.validate_funding_rate(funding_rate)
    fill_info = fill_info.append(funding_rate.rename(columns={'timestamp': 'ts'}),
                                 sort=False,
                                 ignore_index=True).sort_values('ts').reset_index(drop=True)
    if math.isnan(fill_info.loc[0, 'position']):
      fill_info.at[0, 'position'] = fill_info.at[0, 'position_by_fill'] = 0
    fill_info.loc[:,
                  ['multiplier', 'position']] = fill_info.loc[:,
                                                              ['multiplier', 'position']].ffill()
    fill_info['multiplier'].bfill(inplace=True)
    fill_info['price'].ffill(inplace=True)
    fill_info['dt'] = pd.to_datetime(fill_info['ts'])
    return fill_info

  def calculate_funding_fee(self, prev_position, prev_price, fill_info, symbol, is_fut, is_inv):
    if is_fut:
      fill_info['funding_fee'] = fill_info['funding_rate'] * fill_info['position'] * fill_info[
          'multiplier']
      start_time = self.start_date.timestamp() * 1e9
      fill_info.loc[
          (fill_info['ts'] == start_time) & (fill_info['funding_rate'] > 0),
          'funding_fee'] = fill_info['funding_rate'] * prev_position * fill_info['multiplier']
      if is_inv:
        fill_info['funding_fee'] = fill_info['funding_fee'] / self.usd_multiplier[symbol.split('-')
                                                                                  [0]]
      else:
        fill_info.loc[fill_info['ts'] == start_time, 'price'] = prev_price
        fill_info['funding_fee'] = fill_info['funding_fee'] * fill_info['price'].ffill()
        fill_info['price'] = fill_info['price'].ffill()
      fill_info['cum_funding_fee'] = np.nancumsum(fill_info['funding_fee'])
      fill_info['net_pnl'] = fill_info['net_pnl'] - fill_info['cum_funding_fee']
    return fill_info

  def get_previous_eod(self, fill_info, start_date, col):
    pre_eod_value = fill_info.loc[fill_info['ts'] <= start_date].dropna(subset=[col])[col].values
    if len(pre_eod_value) > 0 and pre_eod_value[-1] != 0:
      pre_eod_value = pre_eod_value[-1]
    else:
      pre_eod_value = 0
      warnings.warn("No SOD %s for %s." % (col, self.strategy_name))
    return pre_eod_value

  def calculate_pnls(self, bbodf, funding_rates):
    maker_fee, taker_fee = self.maker_fee, self.taker_fee
    pnls = dict()
    for symbol, sublog in self._sublogs.items():
      fill_info = pd.DataFrame(
          sublog.fill_info,
          columns=sublog.get_fill_info_columns()).sort_values(
          by='ts').drop_duplicates().reset_index(
          drop=True)
      fill_info.loc[:, ['price', 'reserve']] = fill_info.loc[:, ['price', 'reserve']].ffill()
      fill_info = self.correct_position_by_fill(fill_info)
      fill_info['reserve_change'] = fill_info['reserve'].diff(1) != 0
      start_date = self.start_date.timestamp() * 1e9
      if bbodf is not None and len(bbodf.columns) > 1:
        fill_info = self.insert_bbodf(fill_info, bbodf, symbol)
      prev_position = self.get_previous_eod(fill_info, start_date, 'position')
      prev_price = self.get_previous_eod(fill_info, start_date, 'price')
      funding_rate = funding_rates[symbol]
      fill_info['funding_fee'] = fill_info['funding_rate'] = fill_info['cum_funding_fee'] = np.nan
      if len(funding_rate) > 0:
        funding_rate = funding_rate.loc[(funding_rate['timestamp'] >= start_date) & (
            funding_rate['timestamp'] < self.end_date.timestamp() * 1e9)]
        fill_info = self.insert_funding_fee(fill_info, funding_rate)
      if FLAGS.shift_start_date_hours == 0:
        fill_info = fill_info.loc[fill_info['ts'] >= start_date].reset_index(drop=True)
        if fill_info.at[0, 'position'] == np.nan:
          fill_info.at[0, 'position'] = fill_info.at[0, 'position_by_fill'] = 0
      fill_info['dt'] = pd.to_datetime(fill_info['ts'])
      fill_info = fill_info.loc[fill_info['ts'] >= start_date]
      if sublog.is_fut and sublog.is_inv:
        fill_info['volume'] = fill_info['fill_qty'].fillna(0).abs() * fill_info['multiplier']
        fill_info.loc[fill_info['taker'] == True, 'fee_rate'] = sublog.parent.taker_fee
        fill_info.loc[fill_info['taker'] == False, 'fee_rate'] = sublog.parent.maker_fee
        fill_info['fee'] = fill_info['volume'] * fill_info['fee_rate'] / fill_info['price']
        fill_info['cum_fee'] = np.nancumsum(fill_info['fee'])
        fill_info['holding_pnl'] = fill_info['position_by_fill'].shift(1) * (
            1 / fill_info.shift(1)['price'] - 1 / fill_info['price']) * fill_info['multiplier']
        fill_info['gross_pnl'] = np.nancumsum(fill_info['holding_pnl'])
        fill_info['net_pnl'] = fill_info['holding_pnl'].cumsum() - fill_info['cum_fee']
        fill_info['dollar_position'] = fill_info['position_by_fill'] * fill_info['multiplier']
      else:
        fill_order_mask = ~fill_info['fill_qty'].isnull()
        fill_info.loc[fill_order_mask, 'holding_pnl'] = fill_info['position_by_fill'].shift(1) * fill_info.loc[fill_order_mask, 'price'].diff() \
                          * fill_info['multiplier']
        fill_info['reserve_change'] = fill_info['reserve_change'].bfill().ffill()
        fill_info.loc[fill_info['reserve_change'].fillna(False), 'holding_pnl'] = 0
        fill_info.loc[fill_order_mask, 'volume'] = fill_info['fill_qty'].fillna(
            0).abs() * fill_info['price'] * fill_info['multiplier']
        fill_info['fee'] = np.where(fill_info['taker'], fill_info['volume'] * taker_fee,
                                    fill_info['volume'] * maker_fee)
        fill_info['cum_fee'] = np.nancumsum(fill_info['fee'])
        fill_info['gross_pnl'] = np.nancumsum(fill_info['holding_pnl'])
        fill_info.loc[fill_order_mask, 'net_pnl'] = fill_info['gross_pnl'] - fill_info['cum_fee']
        fill_info['position'] = fill_info['position'].ffill()
        fill_info['dollar_position'] = fill_info['position'] * \
          fill_info['price'] * fill_info['multiplier']
      fill_info.loc[fill_info['fill_qty'].isnull(), 'net_pnl'] = np.nan
      fill_info = fill_info.drop(
          ['last_fill_ts', 'query_position_change', 'fill_qty_cumsum', 'delta_amt'], axis=1)
      fill_info = self.calculate_funding_fee(prev_position, prev_price, fill_info, symbol,
                                             sublog.is_fut, sublog.is_inv)
      fill_info = fill_info.loc[fill_info['ts'] < self.end_date.timestamp() * 1e9]
      pnls[symbol] = fill_info
    return pnls

  def is_inv(self, symbol):
    if symbol in self.inverse_dict:
      return self.inverse_dict[symbol]
    elif symbol.split('-')[0] in self.inverse_dict and symbol.split('.')[-1].isdigit():
      return self.inverse_dict[symbol.split('-')[0]]
    else:
      try:
        is_inv = is_inverse(self.market_type, self.exchange, symbol)
        self.inverse_dict[symbol.split(
            '-')[0]] = self.inverse_dict[symbol] = is_inv
      except Exception:
        print(f'Unable to check if {symbol} is inverse')

  def output_pnls_df(self, onlogs, all_symbols):
    symbols = {}
    products = {}
    if onlogs:
      self.combine_on_log(onlogs, all_symbols)
    self.get_usd_multiplier()
    for symbol in sorted(self._sublogs.keys()):
      product = generate_product_from_str2(self.market_type, self.exchange, self.api, symbol,
                                           self.end_date)
      symbols[symbol] = product.symbol
      products[symbol] = product
    funding_rates = {
        symbol: dump_funding_rate(products[symbol], self.start_date, self.end_date,
                                  FLAGS.bbo_cache_root) for symbol in products.keys()
    }
    end_date = self.end_date.strftime('%Y%m%d')
    bbo_start_date = self.start_date.strftime('%Y%m%d')
    bbo_end_date = (self.end_date - datetime.timedelta(days=1)).strftime('%Y%m%d')
    bbodf = read_feeds(CACHE_ROOT,
                       get_trading_dates(bbo_start_date, bbo_end_date),
                       self.mea,
                       list(symbols.values()),
                       'close',
                       allow_exception=True)
    if bbodf is not None:
      bbodf['dt'] = pd.to_datetime(bbodf['timestamp'])
      bbodf = bbodf.set_index('dt')
    else:
      bbodf = pd.DataFrame()
    if FLAGS.shift_start_date_hours == 0:
      # shift in order to calculate 0UTC funding fee
      self.start_date = self.start_date + datetime.timedelta(hours=0.5)
    else:
      self.start_date = self.start_date + datetime.timedelta(hours=FLAGS.shift_start_date_hours)
    start_date = self.start_date.strftime('%Y%m%d')
    pnls = self.calculate_pnls(bbodf, funding_rates)
    return pnls, start_date, end_date, bbodf

  def output_pnls(self, onlogs, all_symbols, total_volume_df):
    pnls, start_date, end_date, bbodf = self.output_pnls_df(onlogs, all_symbols)
    if len(pnls) == 0:
      return
    sample = self._sublogs[sorted(pnls.keys())[0]]
    not_fut_or_inv = not (sample.is_fut and sample.is_inv)
    pnl_data = []
    cnt = 1
    total = len(pnls) + not_fut_or_inv
    if flags.FLAGS.landscape_layout:
      rows = total
      width = min(max(10, (self.end_date - self.start_date).days), 14)
      plt.figure(figsize=(width, 3 * rows))
    else:
      rows = int((total + 2.5) / 3)
      plt.figure(figsize=(12, 4 * rows))
    plt.suptitle(f'{self.strategy_name}_{self.market_type}_{self.exchange}')
    total_pnl = None
    total_pos = None
    if len(sorted(pnls.keys())) > 0:
      hide_multiplier = sorted(pnls.keys())[0].split('-')[1].split('.')[0] in ['USDT', 'USD']
    else:
      hide_multiplier = False
    for symbol in sorted(pnls.keys()):
      pnl_details = pnls[symbol]
      if not_fut_or_inv:
        a_total_pnl = pnl_details.dropna(subset=['net_pnl', 'dollar_position'])[
                                         ['dt', 'net_pnl']].copy()
        a_total_pos = pnl_details.dropna(subset=['net_pnl', 'dollar_position'])[[
            'dt', 'dollar_position'
        ]].copy()
        a_total_pnl.columns = ['dt', symbol].copy()
        a_total_pos.columns = ['dt', symbol].copy()
        if total_pnl is None:
          total_pnl = a_total_pnl
          total_pos = a_total_pos
        else:
          total_pnl = pd.merge(total_pnl, a_total_pnl, on='dt', how='outer')
          total_pos = pd.merge(total_pos, a_total_pos, on='dt', how='outer')

      pos = pd.DataFrame(pnl_details[['ts', 'position']].dropna(subset=['position']))
      pos['dt'] = pd.to_datetime(pos['ts'])

      fillpos = pd.DataFrame(pnl_details[['ts',
                                          'position_by_fill']].dropna(subset=['position_by_fill']))
      fillpos['dt'] = pd.to_datetime(fillpos['ts'])
      coin = symbol.split('-')[0]
      title = f'pnl breakdown {coin}-{self.title_currency}'
      if flags.FLAGS.landscape_layout:
        ax = plt.subplot(rows, 1, cnt)
      else:
        ax = plt.subplot(rows, min(3, total), cnt)
      plt.title(title)
      ax.plot(pos.set_index('dt')['position'],
              drawstyle="steps-post",
              color='blue',
              linewidth=1,
              alpha=0.4)
      ax.plot(fillpos.set_index('dt')['position_by_fill'],
              drawstyle="steps-post",
              color='green',
              linewidth=1,
              alpha=0.4)
      ax1 = ax.twinx()
      net_pnls = pnl_details.dropna(subset=['fill_qty', 'net_pnl']).set_index('dt')['net_pnl']
      if len(net_pnls) > 0:
        ax1.plot(net_pnls, color='red', linewidth=0.5)
      if flags.FLAGS.plot_price and symbol in bbodf.columns:
        ax2 = ax.twinx()
        ax2.plot(pnl_details.loc[pnl_details['fill_qty'] == 0].set_index('dt')['price'],
                 alpha=0.5,
                 color='orange')
        ax2.set_yticks([])
      ax.legend(['pos', 'fill pos'], loc='upper left', frameon=False)
      ax1.legend(['pnl'], loc='upper right', frameon=False)
      cnt += 1
      pnl_details = pnl_details.dropna(subset=['fill_qty'])
      if symbol not in total_volume_df.columns:
        total_volume = 0
      else:
        total_volume = total_volume_df[symbol].sum()
      if len(pnl_details) > 0:
        data = [symbol] + pnl_details.iloc[-1][['gross_pnl',
                                                'net_pnl', 'cum_fee', 'cum_funding_fee']].tolist()
        if 'volume' in pnl_details.columns:
          taker_volume = np.nansum(np.where(pnl_details['taker'], pnl_details['volume'], 0))
          maker_volume = np.nansum(np.where(pnl_details['taker'] == False, pnl_details['volume'],
                                            0))
          market_share = np.divide((taker_volume + maker_volume), total_volume)
        data += [taker_volume, maker_volume, "{:.4f}%".format(market_share * 100)]
        usd_multiplier = self.usd_multiplier[symbol.split('-')[0]]
        pnl_usd = pnl_details.iloc[-1]['net_pnl'] * usd_multiplier
        volume_usd = taker_volume + maker_volume if self.is_inv(symbol) \
                     else (taker_volume + maker_volume) * usd_multiplier
        if hide_multiplier:
          data += [pnl_usd, volume_usd]
        else:
          data += [usd_multiplier, pnl_usd, volume_usd]
        pnl_data.append(data)
    # plot total pnl and pos
    if total_pnl is not None and not total_pnl.empty:
      total_pnl = total_pnl.sort_values('dt').fillna(method='ffill')
      total_pos = total_pos.sort_values('dt').fillna(method='ffill')
      if flags.FLAGS.landscape_layout:
        ax = plt.subplot(rows, 1, cnt)
      else:
        ax = plt.subplot(rows, min(3, total), cnt)
      ax.plot(total_pos.set_index('dt').sum(1),
              drawstyle="steps-post",
              color='blue',
              linewidth=1,
              alpha=0.4)
      ax1 = ax.twinx()
      ax1.plot(total_pnl.set_index('dt').sum(1), drawstyle='steps-post', color='r', linewidth=0.5)
      ax.legend(['total pos'], loc='upper left', frameon=False)
      ax1.legend(['total pnl'], loc='upper right', frameon=False)
    name = plot_name(self.strategy_name, self.exchange, self.market_type, start_date, end_date)
    if not os.path.exists(FLAGS.output_dir):
      os.makedirs(FLAGS.output_dir)
    output_path = os.path.join(FLAGS.output_dir, name)
    plt.tight_layout()
    plt.savefig(output_path)
    if hide_multiplier:
      headers = ['coin', 'gross_pnl', 'net_pnl', 'cum_fee', 'cum_funding_fee', 'taker_volume', 'maker_volume', 'market_share',
                 'net_pnl_usd', 'volume_usd']
    else:
      headers = ['coin', 'gross_pnl', 'net_pnl', 'cum_fee', 'cum_funding_fee', 'taker_volume', 'maker_volume', 'market_share', 'usd_multiplier',
                 'net_pnl_usd', 'volume_usd']
    total_row = [
        sum(col) if isinstance(col[0], float) or isinstance(col[0], int) else 'TOTAL'
        for col in zip(*pnl_data)
    ]
    if total_row and not hide_multiplier:
      total_row[8] = np.nan  # usd_multiplier
    pnl_data.append(total_row)
    if total_row:
      agg_df = pd.DataFrame([total_row], columns=headers)
    else:
      agg_df = pd.DataFrame([], columns=headers)
    pnl_str = tabulate.tabulate(pnl_data, headers=headers, tablefmt='grid')
    pnl_df = []
    target_time = datetime.datetime.strptime(end_date, "%Y%m%d").date() - datetime.timedelta(1)
    for symbol in sorted(pnls.keys()):
      pnl_details = pnls[symbol].dropna(subset=['net_pnl', 'fill_qty'])[['dt', 'net_pnl']]
      res = pnl_details.loc[pnl_details['dt'].dt.date == target_time]
      if len(res) > 0:
        res = res.iloc[-1]
      else:
        continue
      date = pd.to_datetime(res['dt']).strftime('%Y-%m-%d')
      pnl_df.append([self.strategy_name, date, symbol.split('-')[0], res['net_pnl']])
    columns = [
        'strategy_name',
        'trading_date',
        'accounting_currency',
        'pnl_by_fill',
    ]
    all_inv = all(self.is_inv(symbol) for symbol in pnls.keys())
    if not pnl_df:
      pnl_output = pd.DataFrame([], columns=columns)
    elif all_inv:
      pnl_output = pd.DataFrame(pnl_df, columns=columns)
    else:
      pnl_output = pd.DataFrame([[
          self.strategy_name,
          pnl_df[0][1],
          self.title_currency.split('.')[0],
          sum([row[-1] for row in pnl_df]),
      ]],
                                columns=columns)
    return output_path, pnl_str, agg_df, pnl_output


def _analyze_oe(df):
  res = ''
  fill_mask = df['type'] == 'ORDER_FILLED'
  # Chunhui: Some strategies may not have `on_fill` in short period,
  # which will fail here
  unknown_fill_type_mask = df['guess_fill_type'] == 'UNKNOWN_FILL_TYPE'
  guess_maker_mask = np.abs(df['order_price'] - df['fill_price']) < 1e-6
  df.loc[fill_mask & unknown_fill_type_mask & guess_maker_mask,
         'guess_fill_type'] = 'MAKER_FILL_TYPE'
  df.loc[fill_mask & unknown_fill_type_mask & (~guess_maker_mask),
         'guess_fill_type'] = 'TAKER_FILL_TYPE'
  if 'fill_type' not in df.columns:
    df['fill_type'] = df['guess_fill_type']
  if 'proc_order_id' not in df.columns:
    df['proc_order_id'] = df['internal_order_id']

  _, fill_stat, interval_stat = live_oe_stat(df)

  fill_types = fill_stat.index.tolist()
  fill_qty = fill_stat.tolist()
  res += '\n'
  res += tabulate.tabulate([fill_qty], headers=fill_types, tablefmt='grid')

  res += '\n'
  res += format_describe(interval_stat.index.tolist(), interval_stat)
  return res


def analyze_oe(df):
  if df.empty:
    return ''
  res = ''
  id_key = 'internal_order_id' if 'internal_order_id' in df.columns else 'proc_order_id'
  for symbol, sdf in df.groupby('symbol'):
    res += '\n\n\n************************* %s *************************' % symbol
    vc = sdf['type'].value_counts()
    res += '\n%s' % tabulate.tabulate([vc], headers='keys', tablefmt='grid')
    oid_os = sdf[[id_key, 'order_side']].drop_duplicates().dropna()
    oid_os = {e[1]: e[2] for e in oid_os.itertuples()}
    sdf['order_side'] = sdf[id_key].map(oid_os)
    for order_side, osdf in sdf.groupby('order_side'):
      res += '\n\n%s' % order_side
      res += _analyze_oe(osdf)
  return res


def format_describe(names, desdf):
  data = []
  for name in names:
    data.append([name] + desdf.loc[name].tolist())
  return tabulate.tabulate(data, headers=['unit: ms'] + desdf.columns.tolist(), tablefmt='grid')


def get_agg_fill_ratios(strategy_name, machine, start, end):
  cmd = [
      'cd /home/ec2-user/workspace/basis_strat/20190304/coin/python', 'source ../venv/bin/activate',
      './pyrunner coin/strategy/basis_strat/analysis/agg_fill_ratio.py \
          --strategy_name=%s \
          --start=%s --end=%s' % (strategy_name, start, end)
  ]
  cmd = ';'.join(cmd)
  cmd = 'ssh -F jaewon_strategy/ssh_config/ssh_config %s \'(%s)\'' % (machine, cmd)
  cmd = ' && '.join(['env -i', 'cd coin_deploy', cmd])
  output = subprocess.check_output([cmd], shell=True)
  data = json.loads(output.strip())
  return data


def format_agg_fills(agg_fills):
  fill_data = []
  for symbol, fills in agg_fills.items():
    for side, fill in fills.items():
      submit_qty = float(fill[0])
      fill_qty = float(fill[1])
      fill_data.append([symbol, side, submit_qty, fill_qty, fill_qty / submit_qty])
  return tabulate.tabulate(
      fill_data,
      headers=['symbol', 'side', 'agg_submit_qty', 'agg_fill_qty', 'fill_ratio'],
      tablefmt='grid')


def _get_oe_output_path(strategy_name, start, end, market, exchange, output_dir):
  return os.path.join(output_dir,
                      '%s_%s_%s_%s_%s.csv' % (strategy_name, market, exchange, start, end))


def dump_order_events(logs, start, end, output_dir):
  dfs = []
  for log, _ in logs:
    strategy_name = log.strategy_name
    mea = log.mea
    df = log.get_oe_as_df()
    dfs.append(df)
  df = pd.concat(dfs, sort=False).sort_values('event_time').reset_index(drop=True)
  market_type, exchange, _ = mea.split('.')
  path = _get_oe_output_path(strategy_name, start, end, market_type, exchange, output_dir)
  df.to_csv(path, index=False)


def _dump_to_file(strategy_name, start, end, output):
  with open(os.path.join(FLAGS.output_dir, f'{strategy_name}_{start}_{end}'), 'w') as fo:
    fo.write(output)


def _report_strategy(strategy_name, name, content, files):
  if FLAGS.report_slack and files:
    try:
      sh = SlackHelper('.', 'team_u_ops')
      sh.send_slack_noti(strategy_name, files[0])
    except BaseException:
      traceback.print_exc()
  try:
    send_mail_with_attachments(name, FLAGS.from_mail, FLAGS.to_mail, name, content, files)
  except BaseException:
    traceback.print_exc()


def extract_trading_info_from_og_log(
    strategy_name, machine, start, end, mea, sample_symbol, quiet=False, is_start=True):
  try:
    print('Running %s %s %s %s %s' % (strategy_name, machine, start, end, mea))
    assert strategy_name
    assert machine
    assert start
    assert end
    market_type, _, _ = mea.split('.')
    assert market_type in ['Spot', 'Futures']
    if is_start:
      if FLAGS.shift_start_date_hours == 0:
        # shift in order to calculate 0UTC funding fee
        start_date = datetime.datetime.strptime(start, '%Y%m%d') - datetime.timedelta(hours=0.5)
      else:
        start_date = datetime.datetime.strptime(
            start, '%Y%m%d') - datetime.timedelta(hours=FLAGS.shift_start_date_hours)
    else:
      start_date = datetime.datetime.strptime(start, '%Y%m%d') - datetime.timedelta(hours=0.5)
    end_date = datetime.datetime.strptime(end, '%Y%m%d')
    on_log = OnLog(strategy_name, machine, mea, sample_symbol, start_date, end_date)
    run_from_strat_log_archive(
        on_log_callback=on_log.on_log,
        start_time=start_date,
        end_time=end_date,
        root_dir='/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log',
        machine=machine,
        strategy_name=strategy_name)
    return on_log
  except Exception as e:
    name = 'Failed Basis Strategy Daily Report Extract %s' % start
    text = '%s\n\n%s' % (strategy_name, traceback.format_exc())
    print(name)
    print(text)
    print(e)
    if FLAGS.dump_to_file:
      _dump_to_file(strategy_name, start, end, text)
    else:
      send_mail(name, FLAGS.from_mail, FLAGS.error_mail, name, text)


def report_trading_info_from_og_log(on_log, logs, strategy_name,
                                    machine, start, end, mea, symbols, total_volume, quiet=False):
  try:
    market_type, exchange, _ = mea.split('.')
    df = on_log.get_oe_as_df()

    output_path, pnl_str, agg_df, pnl_df = on_log.output_pnls(logs, symbols, total_volume)
    if (strategy_name.startswith('basis_spread2')):
      pnl_str = '%s %s %s \n\nPnl breakdown:\n%s%s' \
          % (strategy_name, exchange, market_type, pnl_str, analyze_oe(df))
    else:
      pnl_str = '%s\n\nPnl breakdown:\n%s%s' % (strategy_name, pnl_str, analyze_oe(df))
    # try:
    #  agg_fills = get_agg_fill_ratios(strategy_name, machine, start, end)
    #  pnl_str += '\n\nAgg fills\n\n%s' % format_agg_fills(agg_fills)
    # except Exception as e:
    #  print(e)
    if not quiet:
      print(pnl_str)
    name = 'Basis Strategy Daily Report %s' % start
    if flags.FLAGS.light_ver:
      pnl_str += '\nlight version'
    if FLAGS.dump_to_file:
      _dump_to_file(strategy_name, start, end, pnl_str)
    else:
      _report_strategy(strategy_name, name, pnl_str, [output_path])
    if FLAGS.report_oe_per_x_sec > 0:
      df['dt'] = pd.to_datetime(df['event_time'].astype(int))
      df['dt'] = df['dt'].dt.floor(freq=f'{FLAGS.report_oe_per_x_sec}S')
      gdf = df.groupby(['dt', 'type'])[['event_time']].count()
      gdf.columns = ['count']
      gdf['machine'] = machine
      gdf.to_csv(
          _get_oe_output_path(f'oe_cnt_{strategy_name}', start, end, market_type, exchange,
                              os.path.join(FLAGS.output_dir)))
    return agg_df, pnl_df
  except Exception as e:
    name = 'Failed Basis Strategy Daily Report %s' % start
    text = '%s\n\n%s\n%s' % (strategy_name, 'No pnl.', traceback.format_exc())
    print(name)
    print(text)
    print(e)
    if FLAGS.dump_to_file:
      _dump_to_file(strategy_name, start, end, text)
    else:
      send_mail(name, FLAGS.from_mail, FLAGS.error_mail, name, text)


def get_start_date():
  if FLAGS.start_date:
    return FLAGS.start_date
  return (datetime.datetime.now() - datetime.timedelta(days=1)).strftime('%Y%m%d')


def get_end_date(start):
  if FLAGS.end_date:
    return FLAGS.end_date
  return (datetime.datetime.strptime(start, '%Y%m%d') +
          datetime.timedelta(days=1)).strftime('%Y%m%d')


def report_oe_count_by_machine(start, end):
  files = glob.glob(f'{FLAGS.output_dir}/oe_cnt_*{start}*{end}*csv')
  df = None
  for f in files:
    strat_name = os.path.basename(f)
    strat_name = strat_name[:strat_name.find(f'_{start}')][len('oe_cnt_'):]
    adf = pd.read_csv(f)
    adf = adf[adf['type'].isin(['ORDER_SUBMITTED', 'CANCEL_SUBMITTED'])].copy()
    adf['strategy_name'] = strat_name
    df = adf if df is None else df.append(adf)
  df['dt'] = pd.to_datetime(df['dt'])
  data = []
  headers = []
  for machine, mdf in df.groupby('machine'):
    mdf = mdf.copy()
    mdf = mdf.groupby('dt').sum()
    des = mdf.describe()
    if not headers:
      headers = ['machine'] + des.T.columns.tolist()
    data.append([machine] + des.T.values.tolist()[0])
  data = sorted(data, key=lambda e: -e[-1])
  report_str = 'Order submit + cancel submit groupby machine per minute report\n\n\n'
  report_str += tabulate.tabulate(data, headers=headers, tablefmt='grid')
  sdata = []
  for e in data[:5]:
    machine = e[0]
    mdf = df[df['machine'] == machine].copy()
    adata = []
    for strat, sdf in mdf.groupby('strategy_name'):
      sdf = sdf.groupby('dt').sum()
      des = sdf.describe()
      aadata = []
      aadata.append(machine)
      aadata.append(strat)
      aadata += des.T.values.tolist()[0]
      adata.append(aadata)
    adata = sorted(adata, key=lambda e: -e[-1])
    for e in adata[1:]:
      e[0] = ''
    sdata += adata

  headers = headers[:1] + ['strategy_name'] + headers[1:]
  report_str += '\n\n\nstrategy details from top machines\n'
  report_str += tabulate.tabulate(sdata, headers=headers, tablefmt='grid')

  sdata = []
  for strategy, sdf in df.groupby('strategy_name'):
    sdf = sdf.copy().groupby('dt').sum()
    adata = [strategy] + sdf.describe().T.values.tolist()[0]
    sdata.append(adata)
  sdata = sorted(sdata, key=lambda e: -e[-1])[:20]

  headers = headers[1:]
  report_str += '\n\n\nranked by strategy\n'
  report_str += tabulate.tabulate(sdata, headers=headers, tablefmt='grid')

  name = 'Basis Strategy Machine Request %s' % start
  send_mail(name, FLAGS.from_mail, FLAGS.to_mail, name, report_str)


def report_aggregate_stats(all_agg_data, all_meas, date):
  df = pd.concat(all_agg_data).reset_index(drop=True)
  df['mea'] = pd.Series(all_meas)
  df = df[['mea', 'net_pnl_usd', 'volume_usd']]
  agg_df = df.groupby(['mea']).sum().reset_index()
  text = tabulate.tabulate(agg_df, headers=agg_df.columns, tablefmt='grid', numalign='right')
  name = f'Basis Strategy Daily Report Aggregated {date}'
  send_mail(name, FLAGS.from_mail, FLAGS.to_mail, name, text)


def report_strat(log_dict, strat, start, end, symbols, total_volume):
  all_pnls = []
  all_agg_data = []
  all_meas = []
  with ProcessPoolExecutor(max_workers=FLAGS.max_workers) as executor:
    futures = {}
    for key in log_dict:
      onlog = log_dict[key][0]
      if len(log_dict[key]) > 1:
        other_logs = log_dict[key][1:]
      else:
        other_logs = None
      fut = executor.submit(
          report_trading_info_from_og_log,
          onlog,
          other_logs,
          onlog.strategy_name,
          onlog.machine,
          start,
          end,
          onlog.mea,
          symbols,
          total_volume,
          quiet=True)
      futures[fut] = [strat, onlog.mea]
    for f in as_completed(futures.keys()):
      try:
        result = f.result()
        if not result:
          continue
        agg_df, pnl_df = result
        if agg_df is None:
          continue
        if pnl_df is not None:
          all_pnls.append(pnl_df)
        all_agg_data.append(agg_df)
        all_meas += [futures[f][1]] * agg_df.shape[0]
        print(f"{futures[f][0]['name']} {futures[f][1]} report done")
      except Exception:
        import traceback
        traceback.print_exc()
  return all_pnls, all_agg_data, all_meas


def calculate_total_volume(start_date, end_date, mea):
  try:
    dates = get_trading_dates(start_date, end_date)[:-1]
    buy_df = read_feeds(CACHE_ROOT, dates, mea, '', 'volume_buy_dollar')
    sell_df = read_feeds(CACHE_ROOT, dates, mea, '', 'volume_sell_dollar')
    df = buy_df
    for symbol in df.columns:
      if symbol in ['timestamp', 'datetime']:
        continue
      df.loc[:, symbol] = df.loc[:, symbol] + sell_df.loc[:, symbol]
    return df
  except BaseException:
    print('Interval feed for market volume is not ready.')
    return pd.DataFrame([])


def process_strat(strat, start, end, cpu):
  png = plot_name(strat['name'], strat['exchange'], strat['market_type'], start, end)
  png = os.path.join(FLAGS.output_dir, png)
  if FLAGS.ignore_exisiting and os.path.exists(png):
    print(f"{strat['name']}_{start}_{end} already exists, skipping")
    return [], [], []
  total_volume = calculate_total_volume(start, end, strat['mea'])
  start_dt = datetime.datetime.strptime(start, '%Y%m%d')
  end_dt = datetime.datetime.strptime(end, '%Y%m%d')
  cur = start_dt
  first = True
  with ProcessPoolExecutor(max_workers=cpu) as executor:
    futures = {}
    while cur < end_dt:
      fut = executor.submit(extract_trading_info_from_og_log, strat['name'], strat['host'], cur.strftime('%Y%m%d'),
                            (cur + datetime.timedelta(days=1)).strftime('%Y%m%d'), strat['mea'], strat['symbols'][0], is_start=first)
      first = False
      futures[fut] = (strat, cur.strftime('%Y%m%d'))
      cur += datetime.timedelta(days=1)
    logs = []
    for f in as_completed(futures.keys()):
      try:
        logs.append((f.result(), futures[f][1]))
        print(f"{futures[f][0]['name']} {futures[f][0]['mea']} {futures[f][1]} extract done")
      except Exception:
        import traceback
        traceback.print_exc()
    log_dict = {}
    symbols = []
    for log, date in logs:
      if len(log._sublogs) == 0:
        continue
      strat_name = log.strategy_name
      mea = log.mea
      symbols.extend(log.symbols)
      if not (strat_name, mea) in log_dict:
        log_dict[(strat_name, mea)] = []
      if date == start:
        log_dict[(strat_name, mea)].insert(0, log)
      else:
        log_dict[(strat_name, mea)].append(log)
    symbols = set(symbols)
    if FLAGS.output_oe_only:
      assert FLAGS.oe_output_dir
      dump_order_events(logs, start, end, FLAGS.oe_output_dir)
      return [], [], []
    all_pnls, all_agg_data, all_meas = report_strat(
        log_dict, strat, start, end, symbols, total_volume)
  return all_pnls, all_agg_data, all_meas


def main(argv):
  np.seterr(divide='ignore', invalid='ignore')
  tmp_output_dir = tempfile.mkdtemp()
  ts = datetime.datetime.now().timestamp()
  if not os.path.exists('tmp'):
    os.mkdirs('tmp')
  with open('tmp/daily_report_tmp_dir_%d.txt' % ts, 'w') as f:
    f.write(tmp_output_dir)
  FLAGS.output_dir = os.path.join(tmp_output_dir, FLAGS.output_dir)
  FLAGS.csv_output_dir = os.path.join(tmp_output_dir, FLAGS.csv_output_dir)

  if FLAGS.report_oe_per_x_sec > 0:
    assert FLAGS.output_dir
    assert not FLAGS.light_ver
  ssplot.setup_plt()
  start = get_start_date()
  end = get_end_date(start)
  if FLAGS.strategy_name:
    # Not used.
    strategy_name = FLAGS.strategy_name
    machine = FLAGS.machine
    mea = FLAGS.mea
    report_trading_info_from_og_log(strategy_name, machine, start, end, mea)
    return

  infos = get_all_strats_info(
        strategy_name_contains=flags.FLAGS.strategy_name_contains,
        strategy_type=flags.FLAGS.strategy_type,
        trade_bases=flags.FLAGS.trade_bases.split(',') if flags.FLAGS.trade_bases else None,
        trade_exchanges=flags.FLAGS.trade_exchanges.split(',')
        if flags.FLAGS.trade_exchanges else None)
  all_pnls = []
  all_agg_data = []
  all_meas = []

  days = (datetime.datetime.strptime(end, '%Y%m%d') -
          datetime.datetime.strptime(start, '%Y%m%d')).days
  cpu1 = max(FLAGS.max_workers // days, 1)
  cpu2 = max(FLAGS.max_workers // cpu1, 1)
  with ProcessPoolExecutor(max_workers=cpu1) as executor:
    futures = {}
    for strat in infos:
      fut = executor.submit(process_strat, strat, start, end, cpu2)
      futures[fut] = strat
    for f in as_completed(futures.keys()):
      try:
        pnls, agg_data, meas = f.result()
        all_pnls.extend(pnls)
        all_agg_data.extend(agg_data)
        all_meas.extend(meas)
        print(f"{futures[f]['name']} done")
      except Exception:
        import traceback
        traceback.print_exc()
  if FLAGS.output_oe_only:
    return
  if not os.path.exists(FLAGS.csv_output_dir):
    os.makedirs(FLAGS.csv_output_dir)
  if len(all_pnls) == 0:
    raise ValueError("Invalid strategy name./ No pnl for strategy.")
  pd.concat(all_pnls).sort_values(
          by='strategy_name').to_csv(
          os.path.join(
              FLAGS.csv_output_dir,
              "pnl_summary_%s.csv" %
              start))
  if len(all_agg_data) > 0:
    report_aggregate_stats(all_agg_data, all_meas, start)
  if FLAGS.report_oe_per_x_sec > 0:
    report_oe_count_by_machine(start, end)


if __name__ == '__main__':
  flags.DEFINE_string('strategy_name', '', '')
  flags.DEFINE_string('machine', '', '')
  flags.DEFINE_string('start_date', '', '')
  flags.DEFINE_string('end_date', '', '')
  flags.DEFINE_string('mea', '', '')
  flags.DEFINE_string('output_dir', 'plot/basis_og_pnl', '')
  flags.DEFINE_string('csv_output_dir', 'verify_pnl', '')
  flags.DEFINE_string('from_mail', 'basis_strategy@joomo.io', '')
  flags.DEFINE_string('to_mail', 'yuxuan@prestolabs.io', '')
  flags.DEFINE_string('error_mail', '', '')
  flags.DEFINE_bool('ignore_exisiting', False, 'Ignore already exists plot')
  flags.DEFINE_bool('dump_to_file', False, '')
  flags.DEFINE_integer('max_workers', 8, '')
  flags.DEFINE_boolean('output_oe_only', False, '')
  flags.DEFINE_string('oe_output_dir', '', '')
  # strategy filters
  flags.DEFINE_string('strategy_name_contains', '', '')
  flags.DEFINE_string('strategy_type', '', '')
  flags.DEFINE_string('trade_bases', '', '')
  flags.DEFINE_string('trade_exchanges', '', '')
  flags.DEFINE_bool('light_ver', False, 'light version only has pnl plot')
  flags.DEFINE_bool('plot_price', False, '')
  flags.DEFINE_bool('landscape_layout', False, 'better for long term plot')
  flags.DEFINE_integer('report_oe_per_x_sec', 0, 'count order event frequency')
  flags.DEFINE_float('shift_start_date_hours', 0, '')
  flags.DEFINE_float('shift_funding_fee_start_date_hours', 1, '')
  flags.DEFINE_bool('report_slack', False, '')
  flags.DEFINE_string('bbo_cache_root',
                      '/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M/', '')
  flags.DEFINE_bool('include_funding_rate', True, '')
  app.run(main)
