# Copyright (c) 2023 Presto Labs Pte. Ltd.
# Author: lucasyoo
import os
import sys
import subprocess
import collections
import logging
import datetime
import tempfile
import git
import ujson
from concurrent.futures import ProcessPoolExecutor

from google.protobuf.json_format import MessageToDict
import numpy as np
import pandas as pd
import matplotlib

matplotlib.use("Agg")
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from absl import app, flags

from coin.base.datetime_util import (to_datetime, to_timestamp_int)
import cc.appcoin2.strategy.lm.kline_generator as klg1
import cc.appcoin2.strategy.lm.kline_generator2 as klg2
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.support.proto_log.logic.util import (read_strat_log, StratInfo)
from xunkemgmt_client.client.api_client import XunkemgmtClient
import coin2.service.strategy.trading_summary_service_pb2 as tss_pb2
import coin2.service.feed.realtime_mid_price_service_pb2 as rmp_pb2

FLAGS = flags.FLAGS

RESERVE_HISTORY_DATA = collections.namedtuple(
    "RESERVE_HISTORY_DATA", ["ts", "strategy", "product_str", "price", "reserve", "exec_reserve"])

BS_OPTION_HISTORY_DATA = collections.namedtuple("BS_OPTION_HISTORY_DATA", [
    "ts",
    "strategy",
    "product_str",
    "volatility",
    "risk_free_rate",
    "exec_reserve",
    "option_pnl",
    "option_delta",
])


class TeamPnLAccounting(object):
  """
  This Class Calculates Team trading_pnl + Option pnl
  """
  DBG_DIR = "/remote/iosg/home-9/lucasyoo/workspace/coin/lucasyoo_dbg"
  DEFAULT_CMD = """
    bash {PNL_PLOT_DIR}/batch/pnl_plot.sh \
    --strategy_name {strategy_name} \
    --orderlog_machine {orderlog_machine} \
    --start_date {start_date} \
    --end_date {end_date}
  """

  def __init__(self, start_time: datetime.datetime, end_time: datetime.datetime,
               proj_strat_info_dict: dict):
    self._start_time = start_time
    self._end_time = end_time
    self._proj_strat_info_dict = proj_strat_info_dict
    self._strategy_list = [
        strategy for _, proj_info in self._proj_strat_info_dict.items()
        for strategy, _ in proj_info["strategy"].items()
    ]
    self._pplot_res = None
    self._xunke_res = None

  def query_emtpy_pnl_plot(self):
    for _, proj_info in self._proj_strat_info_dict.items():
      commence_date = proj_info["commence_date"]
      commence_end_date = proj_info["commence_end_date"]
      for strategy, hosts in proj_info["strategy"].items():
        for _, orderlog_machine in hosts.items():
          start_date = max(self._start_time.strftime("%Y%m%d"), commence_date)
          end_date = min(self._end_time.strftime("%Y%m%d"), commence_end_date)
          for date in pd.date_range(start_date, end_date):
            date_str = date.strftime("%Y%m%d")
            tmpdir = (f"{TeamPnLAccounting.DBG_DIR}/pnl_plot/{date_str}/{strategy}/"
                      f"{orderlog_machine}/option_plot/{strategy}")
            if not os.path.exists(tmpdir) or len(os.listdir(tmpdir)) == 0:
              print(f"empty {tmpdir}")

  def _run_and_query_pnl_plot_by_strat(self, strat, orderlog_machine, commence_date,
                                       commence_end_date) -> pd.DataFrame:
    final_start_str = max(self._start_time.strftime("%Y%m%d"), commence_date)
    final_end_str = min(self._end_time.strftime("%Y%m%d"), commence_end_date)
    if FLAGS.run_pnl_plot:
      logging.info(f"pnl_plot {strat} {orderlog_machine} {final_start_str}-{final_end_str}")
      cmd_str = TeamPnLAccounting.DEFAULT_CMD.format(PNL_PLOT_DIR=TeamPnLAccounting.DBG_DIR,
                                                     strategy_name=strat,
                                                     orderlog_machine=orderlog_machine,
                                                     start_date=final_start_str,
                                                     end_date=final_end_str)
      os.system(cmd_str)
    res = []
    for date in pd.date_range(final_start_str, final_end_str):
      date_str = date.strftime("%Y%m%d")
      tmpdir = (f"{TeamPnLAccounting.DBG_DIR}/pnl_plot/{date_str}/{strat}/"
                f"{orderlog_machine}/option_plot/{strat}")
      if os.path.exists(tmpdir) and len(os.listdir(tmpdir)) > 0:
        tmpdir_files = os.listdir(tmpdir)
        tmp = None
        pnl_related = [i for i in tmpdir_files if i != "mtm_pos.json" and i.endswith("json")]
        if len(pnl_related) == 1:
          tmp = pd.read_json(os.path.join(tmpdir,
                                          [i for i in pnl_related if i.endswith("json")][0]),
                             orient="split")
        else:
          tmp = pd.read_json(os.path.join(
              tmpdir, [i for i in pnl_related if i.endswith("json") and i.startswith("total")][0]),
                             orient="split")
        tmp["strategy"] = strat
        tmp = tmp[tmp.index.day == date.day]
        res.append(tmp)
      elif FLAGS.force_empty_date:
        cmd_str = TeamPnLAccounting.DEFAULT_CMD.format(PNL_PLOT_DIR=TeamPnLAccounting.DBG_DIR,
                                                       strategy_name=strat,
                                                       orderlog_machine=orderlog_machine,
                                                       start_date=date_str,
                                                       end_date=date_str)
        os.system(cmd_str)
      else:
        logging.warning(f"no file found for {date_str} {strat} {orderlog_machine}")
    return pd.concat(res) if res else pd.DataFrame()

  def _run_pnl_plot(self):
    results = []
    if len(self._proj_strat_info_dict) == 1:
      for _, proj_info in self._proj_strat_info_dict.items():
        for strategy, hosts in proj_info["strategy"].items():
          for _, orderlog_machine in hosts.items():
            results.append(
                self._run_and_query_pnl_plot_by_strat(strategy, orderlog_machine,
                                                      proj_info["commence_date"],
                                                      proj_info["commence_end_date"]))
    else:
      with ProcessPoolExecutor(max_workers=min(len(self._proj_strat_info_dict), 10)) as executor:
        futures = []
        for _, proj_info in self._proj_strat_info_dict.items():
          for strategy, hosts in proj_info["strategy"].items():
            for _, orderlog_machine in hosts.items():
              futures.append(
                  executor.submit(self._run_and_query_pnl_plot_by_strat, strategy, orderlog_machine,
                                  proj_info["commence_date"], proj_info["commence_end_date"]))
        for future in futures:
          res = future.result()
          if not res.empty:
            results.append(res)
    if results:
      results = pd.concat(results)
      results.index.name = "date"
      results = results.groupby(["strategy", pd.Grouper(freq="1D")]).agg({
          "trade_pnl": lambda x: x[-1] - x[0],
          "option_value": lambda x: x[-1] - x[0],
          "total_value": lambda x: x[-1] - x[0]
      }).reset_index()
      results.rename(columns={
          "option_value": "option_pnl",
          "total_value": "total_pnl"
      },
                     inplace=True)
      results['option_pnl'].fillna(0.0, inplace=True)
      results['total_pnl'] = results['trade_pnl'] + results['option_pnl']
    else:
      results = pd.DataFrame()
    self._pplot_res = results

  def _query_xunkemgmt_pnl(self):
    res = pd.DataFrame(columns=["date", "strategy", "trade_pnl", "option_pnl", "total_pnl"])
    with XunkemgmtClient() as client:
      rsp = client.query_trading_summary_interval_histories(
          tss_pb2.QueryTradingSummaryIntervalHistoriesRequestProto(
              business_units=["Day1mm", "ExtDay1mm"],
              start_timestamp=to_timestamp_int(self._start_time),
              end_timestamp=to_timestamp_int(self._end_time + datetime.timedelta(days=1)),
              agg_types=["STRATEGY_NAME", "TRADING_DATE"],
              strategy_names=self._strategy_list))
      for his in rsp.histories:
        strategy_name = his.summary_info.strategy_name
        date = his.summary_info.trading_date
        trading_pnl_usd = his.summary.trading_pnl_mark_in_usd
        otc_pnl_usd = his.summary.otc_pnl_mark_in_usd
        total_pnl = his.summary.pnl_mark_in_usd
        res.loc[len(res)] = [date, strategy_name, trading_pnl_usd, otc_pnl_usd, total_pnl]
    res["date"] = pd.to_datetime(res["date"], format="%Y%m%d")
    self._xunke_res = res

  def dumpy_pnl_from_strat_stat(self) -> None:
    with ProcessPoolExecutor(
        max_workers=min(sum([len(v["strategy"])
                             for _, v in self._proj_strat_info_dict.items()]), 10)) as executor:
      futures = []
      for _, proj_info in self._proj_strat_info_dict.items():
        commence_date = proj_info["commence_date"]
        commence_end_date = proj_info["commence_end_date"]
        for strategy, hosts in proj_info["strategy"].items():
          for _, orderlog_machine in hosts.items():
            start_date = max(self._start_time, datetime.datetime.strptime(commence_date, "%Y%m%d"))
            end_date = min(self._end_time + datetime.timedelta(days=1),
                           datetime.datetime.strptime(commence_end_date, "%Y%m%d"))
            futures.append(
                executor.submit(self._dump_pnl_from_strat_stat_by_strats, strategy, start_date,
                                end_date, orderlog_machine))
      for future in futures:
        future.result()

  def _dump_pnl_from_strat_stat_by_strats(self, strat: str, start_dt: datetime.datetime,
                                          end_dt: datetime.datetime, host: str) -> None:
    cmd = f"""
    ./pyrunner python/coin/support/pta/logging/app/strategy_stat_replayer.py \
      --root_dir /remote/iosg/strat-1/buckets/analysis.derived.coin/live/pnl_stat_proto_log \
      --machine {host} \
      --strategy_name {strat} \
      --start_time {start_dt.strftime("%Y%m%dT%H%M%S")} \
      --end_time {end_dt.strftime("%Y%m%dT%H%M%S")} \
      --unpretty
    """
    os.system(cmd)

  def _query_strategy_run_pnl(self):
    self._run_pnl_plot()
    self._query_xunkemgmt_pnl()

  def plot_pplot_xunke_pnl_by_project(self, project: str):
    xunke_df = self._xunke_res.loc[self._xunke_res['strategy'].str.contains(project.lower())].copy()
    pplot_df = self._pplot_res.loc[self._pplot_res['strategy'].str.contains(project.lower())].copy()
    start_dt = (xunke_df['date'].min()
                if 'date' in xunke_df.columns else xunke_df.index.min()).strftime("%Y%m%d")
    end_dt = (xunke_df['date'].max()
              if 'date' in xunke_df.columns else xunke_df.index.max()).strftime("%Y%m%d")
    pd.merge(
        xunke_df, pplot_df, how="outer", on=["date", "strategy"], suffixes=("_xunke", "_pplot")
    ).to_csv(
        f"{os.path.join(os.path.expandvars('$COIN_REPO'), FLAGS.output_dir)}/{project}_pnl_{start_dt}_{end_dt}.csv"
    )
    assert set(xunke_df['strategy'].unique()) == set(pplot_df['strategy'].unique(
    )), f"not match: {set(xunke_df['strategy'].unique())}, {set(pplot_df['strategy'].unique())}"
    strat_num = len(xunke_df['strategy'].unique())
    plt.rcParams["figure.figsize"] = 30, 20
    plt.rcParams["font.size"] = 15
    fig, axes = plt.subplots(strat_num + 1, 3)
    for i, strat in enumerate(xunke_df['strategy'].unique()):
      xunke_strat_df = xunke_df.loc[xunke_df['strategy'] == strat].copy().sort_values("date")
      pplot_strat_df = pplot_df.loc[pplot_df['strategy'] == strat].copy().sort_values("date")
      xunke_strat_df.set_index("date", inplace=True)
      pplot_strat_df.set_index("date", inplace=True)
      xunke_strat_df.index.name = None
      pplot_strat_df.index.name = None
      xunke_trade_sr = xunke_strat_df['trade_pnl'].cumsum()
      pplot_trade_sr = pplot_strat_df['trade_pnl'].cumsum()
      xunke_option_sr = xunke_strat_df['option_pnl'].cumsum()
      pplot_option_sr = pplot_strat_df['option_pnl'].cumsum()
      xunke_total_sr = xunke_strat_df['total_pnl'].cumsum()
      pplot_total_sr = pplot_strat_df['total_pnl'].cumsum()
      axes[i][0].set_title(f"{strat} trade pnl xunke: {xunke_trade_sr.iloc[-1]:,.0f} "
                           f"pplot: {pplot_trade_sr.iloc[-1]:,.0f}")
      axes[i][1].set_title(f"{strat} option pnl xunke: {xunke_option_sr.iloc[-1]:,.0f} "
                           f"pplot: {pplot_option_sr.iloc[-1]:,.0f}")
      axes[i][2].set_title(f"{strat} total pnl xunke: {xunke_total_sr.iloc[-1]:,.0f} "
                           f"pplot: {pplot_total_sr.iloc[-1]:,.0f}")
      xunke_trade_sr.plot(ax=axes[i][0], label="xunke", x_compat=True)
      xunke_option_sr.plot(ax=axes[i][1], label="xunke", x_compat=True)
      xunke_total_sr.plot(ax=axes[i][2], label="xunke", x_compat=True)
      pplot_trade_sr.plot(ax=axes[i][0], label="pplot", x_compat=True)
      pplot_option_sr.plot(ax=axes[i][1], label="pplot", x_compat=True)
      pplot_total_sr.plot(ax=axes[i][2], label="pplot", x_compat=True)
      axes[i][0].legend()
      axes[i][1].legend()
      axes[i][2].legend()
      axes[i][0].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[i][0].xaxis.set_major_locator(mdates.AutoDateLocator())
      axes[i][1].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[i][1].xaxis.set_major_locator(mdates.AutoDateLocator())
      axes[i][2].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[i][2].xaxis.set_major_locator(mdates.AutoDateLocator())

    if strat_num > 1:
      app_dict = {"trade_pnl": "sum", "option_pnl": "sum", "total_pnl": "sum"}
      xunke_df = xunke_df.groupby("date").agg(app_dict)
      pplot_df = pplot_df.groupby("date").agg(app_dict)
      xunke_df.index.name = None
      pplot_df.index.name = None
      xunke_trade_sr = xunke_df['trade_pnl'].cumsum()
      pplot_trade_sr = pplot_df['trade_pnl'].cumsum()
      xunke_option_sr = xunke_df['option_pnl'].cumsum()
      pplot_option_sr = pplot_df['option_pnl'].cumsum()
      xunke_total_sr = xunke_df['total_pnl'].cumsum()
      pplot_total_sr = pplot_df['total_pnl'].cumsum()
      axes[strat_num][0].set_title(f"all trade pnl xunke: {xunke_trade_sr.iloc[-1]:,.0f} "
                                   f"pplot: {pplot_trade_sr.iloc[-1]:,.0f}")
      axes[strat_num][1].set_title(f"all option pnl xunke: {xunke_option_sr.iloc[-1]:,.0f} "
                                   f"pplot: {pplot_option_sr.iloc[-1]:,.0f}")
      axes[strat_num][2].set_title(f"all total pnl xunke: {xunke_total_sr.iloc[-1]:,.0f} "
                                   f"pplot: {pplot_total_sr.iloc[-1]:,.0f}")
      xunke_trade_sr.plot(ax=axes[strat_num][0], label="xunke", x_compat=True)
      xunke_option_sr.plot(ax=axes[strat_num][1], label="xunke", x_compat=True)
      xunke_total_sr.plot(ax=axes[strat_num][2], label="xunke", x_compat=True)
      pplot_trade_sr.plot(ax=axes[strat_num][0], label="pplot", x_compat=True)
      pplot_option_sr.plot(ax=axes[strat_num][1], label="pplot", x_compat=True)
      pplot_total_sr.plot(ax=axes[strat_num][2], label="pplot", x_compat=True)
      axes[strat_num][0].legend()
      axes[strat_num][1].legend()
      axes[strat_num][2].legend()
      axes[strat_num][0].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[strat_num][0].xaxis.set_major_locator(mdates.AutoDateLocator())
      axes[strat_num][1].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[strat_num][1].xaxis.set_major_locator(mdates.AutoDateLocator())
      axes[strat_num][2].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
      axes[strat_num][2].xaxis.set_major_locator(mdates.AutoDateLocator())

    plt.tight_layout()
    plt.savefig(
        f"{os.path.join(os.path.expandvars('$COIN_REPO'), FLAGS.output_dir)}/{project}_pnl_{start_dt}_{end_dt}.png"
    )
    plt.close()

  def plot_pplot_xunke_pnl(self):
    os.makedirs(f"{os.path.join(os.path.expandvars('$COIN_REPO'), FLAGS.output_dir)}",
                exist_ok=True)
    self._query_strategy_run_pnl()
    projects = list(self._proj_strat_info_dict.keys())
    if len(projects) == 1:
      self.plot_pplot_xunke_pnl_by_project(projects[0])
    else:
      with ProcessPoolExecutor(max_workers=min(10, len(projects))) as executor:
        executor.map(self.plot_pplot_xunke_pnl_by_project, projects)
    app_dict = {"trade_pnl": "sum", "option_pnl": "sum", "total_pnl": "sum"}
    xunke_df = self._xunke_res.groupby("date").agg(app_dict).sort_index().cumsum()
    pplot_df = self._pplot_res.groupby("date").agg(app_dict).sort_index().cumsum()
    xunke_df.index.name = None
    pplot_df.index.name = None
    plt.rcParams["figure.figsize"] = 30, 20
    plt.rcParams["font.size"] = 20
    fig, axes = plt.subplots(1, 3)
    axes[0].set_title(f"all trade pnl xunke: {xunke_df['trade_pnl'].iloc[-1]:,.0f} "
                      f"pplot: {pplot_df['trade_pnl'].iloc[-1]:,.0f}")
    axes[1].set_title(f"all option pnl xunke: {xunke_df['option_pnl'].iloc[-1]:,.0f} "
                      f"pplot: {pplot_df['option_pnl'].iloc[-1]:,.0f}")
    axes[2].set_title(f"all total pnl xunke: {xunke_df['total_pnl'].iloc[-1]:,.0f} "
                      f"pplot: {pplot_df['total_pnl'].iloc[-1]:,.0f}")
    xunke_df['trade_pnl'].plot(ax=axes[0], label="xunke", x_compat=True)
    xunke_df['option_pnl'].plot(ax=axes[1], label="xunke", x_compat=True)
    xunke_df['total_pnl'].plot(ax=axes[2], label="xunke", x_compat=True)
    pplot_df['trade_pnl'].plot(ax=axes[0], label="pplot", x_compat=True)
    pplot_df['option_pnl'].plot(ax=axes[1], label="pplot", x_compat=True)
    pplot_df['total_pnl'].plot(ax=axes[2], label="pplot", x_compat=True)
    axes[0].legend()
    axes[1].legend()
    axes[2].legend()
    plt.tight_layout()
    start_dt = (xunke_df['date'].min()
                if 'date' in xunke_df.columns else xunke_df.index.min()).strftime("%Y%m%d")
    end_dt = (xunke_df['date'].max()
              if 'date' in xunke_df.columns else xunke_df.index.max()).strftime("%Y%m%d")
    plt.savefig(
        f"{os.path.join(os.path.expandvars('$COIN_REPO'), FLAGS.output_dir)}/total_pnl_{'_'.join(list(self._proj_strat_info_dict.keys()))}_{start_dt}_{end_dt}.png"
    )
    plt.close()


class FlowAccounting(object):
  def __init__(
      self,
      projects: list,
      start_time: datetime.datetime,
      end_time: datetime.datetime,
  ) -> None:
    self._start_time = start_time
    self._end_time = end_time
    with open(os.path.join(os.path.dirname(__file__), "config/strats.json")) as f:
      self._proj_strat_machine_info = ujson.load(f)
    self._projects = projects if len(projects) > 1 or projects[0] != "ALL" else [
        k for k, v in self._proj_strat_machine_info.items()
        if datetime.datetime.strptime(v["commence_date"], "%Y%m%d") <= end_time and
        datetime.datetime.strptime(v["commence_end_date"], "%Y%m%d") >= start_time
    ]
    self._res_hist_df = None
    self._kline_df = None

  @staticmethod
  def _get_mea(product_str):
    market, exchange, product = product_str.split(':', 2)
    me = f'{market}.{exchange}'
    if me == 'Spot.Binance':
      return me + '.v1'

    if me == 'Futures.Binance':
      if 'USDT' in product:
        return me + '.v1'
      else:
        return me + '.v1-delivery'

    if me == 'Spot.Ftx':
      return me + '.v1'

    if me == 'Futures.Ftx':
      return me + '.v1'

    if me == "Spot.Huobi":
      return me + '.v1'

    if me == "Spot.Gateio":
      return me + '.v4'

    if me == "Spot.Mexc":
      return me + '.v3'

    if me == "Spot.Okex":
      return me + '.v5'

    if me == "Spot.Kucoin":
      return me + '.v1'

    if me == "Spot.Coinone":
      return me + '.v2'

    if me == "Spot.Mercado":
      return me + '.v4'

    if me == "Spot.Bitmart":
      return me + '.v3'
    if me == "Spot.Bithumb":
      return me + '.v2'
    if me == "Spot.Bitbank":
      return me + '.v1'
    if me == "Spot.Bybit":
      return me + '.v3'

    if me == "Spot.Uniswap":
      pi_v2_dir = os.path.expandvars("${COIN_REPO}/data/coin2/product_info/Spot.Uniswap.v2.json")
      pi_v3_dir = os.path.expandvars("${COIN_REPO}/data/coin2/product_info/Spot.Uniswap.v3.json")
      with open(pi_v3_dir) as f:
        pi_v3 = pd.DataFrame(ujson.load(f)['product_infos'])
      with open(pi_v2_dir) as f:
        pi_v2 = pd.DataFrame(ujson.load(f)['product_infos'])
      if product in pi_v3['symbol'].tolist():  # use Uniswap V3 if available
        return me + '.v3'
      elif product in pi_v2['symbol'].tolist():
        return me + '.v2'
    if me == "Spot.Phemex":
      return me + '.v1'

  def _query_git_history_by_project(self, project) -> list:
    strategies = self._proj_strat_machine_info[project]["strategy"]
    repo = git.Repo(os.path.expandvars("$COIN_DEPLOY_REPO"))
    last_exec_reserve_dict = {}
    executor_reserve_list = []
    for strategy in strategies:
      strat_group = strategy.split("_")[0]
      if strat_group == "vmm":
        commits = reversed(list(repo.iter_commits(paths=f"{strat_group}/config/{strategy}.json")))
        for commit in commits:
          utc_commit_time = commit.committed_datetime.astimezone(
              datetime.timezone.utc).replace(tzinfo=None)
          for blob in commit.tree.traverse():
            if blob.path == f"{strat_group}/config/{strategy}.json":
              try:
                strat_config = ujson.loads(blob.data_stream.read().decode("utf-8"))
              except ujson.JSONDecodeError:
                logging.error(f"[{blob.path}] {utc_commit_time} failed to read json"
                              f"{commit.hexsha} {commit.message}")
                continue
              for product_str, exec_config in strat_config["executors"].items():
                if project in product_str:
                  exec_reserve = exec_config["reserve"]
                  if exec_reserve != last_exec_reserve_dict.get(product_str):
                    executor_reserve_list.append(
                        RESERVE_HISTORY_DATA(utc_commit_time, strategy, product_str, np.nan, np.nan,
                                             exec_reserve))
                    logging.info(
                        f"[{strategy}, {product_str}] {utc_commit_time} exec_reserve "
                        f"{last_exec_reserve_dict.get(product_str, 0.0)} -> {exec_reserve}")
                    last_exec_reserve_dict[product_str] = exec_reserve
      elif strat_group == "hamm":
        continue
      else:
        logging.error(f"unknown strategy group {strat_group}")
        continue
    return executor_reserve_list

  def query_coin_deploy_history(self) -> None:
    results = []
    if len(self._projects) == 1:
      results = self._query_git_history_by_project(self._projects[0])
    else:
      with ProcessPoolExecutor(max_workers=len(self._projects)) as executor:
        futures = []
        for project in self._projects:
          futures.append(executor.submit(self._query_git_history_by_project, project))
        for future in futures:
          res = future.result()
          results.extend(res)
    self._res_hist_df = pd.DataFrame(results)

  def _query_kline_by_exchange(self, mea, symbols, resolution) -> pd.DataFrame:
    commence_date_list = [
        datetime.datetime.strptime(
            self._proj_strat_machine_info[symbol.split("-")[0]]["commence_date"], "%Y%m%d")
        for symbol in symbols
    ]
    commence_end_date_list = [
        datetime.datetime.strptime(
            self._proj_strat_machine_info[symbol.split("-")[0]]["commence_end_date"], "%Y%m%d")
        for symbol in symbols
    ]
    kline_start_dt = max(self._start_time, min(commence_date_list))
    kline_end_dt = min(self._end_time, max(commence_end_date_list)) + datetime.timedelta(days=1)
    logging.info(f"querying {mea} {symbols} from {kline_start_dt} to {kline_end_dt}")
    final_dict = collections.defaultdict(list)
    final_failed = []
    if mea == "Spot.Mexc.v3" and kline_start_dt < datetime.datetime(
        2023, 2, 24) and kline_end_dt >= datetime.datetime(2023, 2, 24):
      tmp_mea = mea.replace("3", "2")
      tmp_res, failed = klg2.get_kline_from_interval_feed(kline_start_dt,
                                                          datetime.datetime(2023, 2, 24),
                                                          tmp_mea,
                                                          symbols,
                                                          return_df=True,
                                                          resolution=resolution)
      for k, v in tmp_res.items():
        final_dict[k].append(v)
      if failed:
        final_failed.extend(failed)
      tmp_start_dt = datetime.datetime(2023, 2, 24)
    elif mea == "Spot.Mexc.v3" and kline_start_dt < datetime.datetime(
        2023, 2, 24) and kline_end_dt < datetime.datetime(2023, 2, 24):
      mea = mea.replace("3", "2")
      tmp_start_dt = kline_start_dt
    else:
      tmp_start_dt = kline_start_dt
    try:
      res_dict, failed_symbols = klg2.get_kline_from_interval_feed(tmp_start_dt,
                                                                   kline_end_dt,
                                                                   mea,
                                                                   symbols,
                                                                   resolution=resolution,
                                                                   return_df=True)
    except:
      logging.error(f"failed to query kline: {mea} {symbols} {resolution}")
      res_dict, failed_symbols = klg2.get_kline_from_interval_feed(tmp_start_dt,
                                                                   kline_end_dt,
                                                                   mea,
                                                                   symbols,
                                                                   resolution="1h",
                                                                   return_df=True)
    for k, v in res_dict.items():
      final_dict[k].append(v)
    if failed_symbols:
      final_failed.extend(failed_symbols)
    final_failed = list(set(final_failed))
    if final_failed:
      product_strs = [":".join(mea.split(".")[:2]) + ":" + symbol for symbol in final_failed]
      for product_str in product_strs:
        try:
          res_dict2, failed_symbols2 = klg1.query_kline_as_pb(kline_end_dt,
                                                              product_str,
                                                              kline_start_dt,
                                                              kline_end_dt,
                                                              kline_period="1d",
                                                              tolerance=1)
        except Exception as e:
          logging.error(f"{e} failed to query kline: {mea} {final_failed} 1d")
          continue
        if failed_symbols2 or not res_dict2.klines:
          logging.error(f"failed to query kline: {mea} {final_failed} 1d")
        else:
          try:
            res_dict2, failed_symbols2 = klg1.query_kline_as_pb(kline_end_dt,
                                                                product_str,
                                                                kline_start_dt,
                                                                kline_end_dt,
                                                                kline_period="1h",
                                                                tolerance=1)
          except NotImplementedError as e:
            logging.error(f"{e} failed to query kline: {mea} {failed_symbols} 1h")
            continue
          if failed_symbols2 or not res_dict2.klines:
            logging.error(f"failed to query kline: {mea} {failed_symbols} 1h")

        if res_dict2.klines:
          tmp_df = pd.DataFrame([MessageToDict(i) for i in res_dict2.klines])
          tmp_df["klineTimestamp"] = tmp_df["klineTimestamp"].astype(int)
          final_dict[product_str.split(":")[-1]].append(tmp_df)
    if final_dict:
      final_res = pd.concat([
          pd.concat(df_list).assign(product_str=":".join(mea.split(".")[:2]) + ":" + symbol)
          for symbol, df_list in final_dict.items()
      ])
    else:
      final_res = pd.DataFrame()
    return final_res

  def query_kline(self, resolution="1m") -> None:
    product_strs = self._res_hist_df["product_str"].str.split(":").apply(pd.Series).rename(columns={
        0: "market_type",
        1: "exchange",
        2: "symbol"
    }).drop_duplicates()
    product_strs["product_str"] = product_strs["market_type"] + ":" + product_strs[
        "exchange"] + ":" + product_strs["symbol"]
    product_strs["mea"] = product_strs["product_str"].apply(FlowAccounting._get_mea)
    final_res = []
    if len(product_strs) == 1:
      mea = product_strs["mea"].iloc[0]
      symbols = product_strs["symbol"].tolist()
      final_res.append(self._query_kline_by_exchange(mea, symbols, resolution))
    else:
      with ProcessPoolExecutor(max_workers=len(product_strs)) as executor:
        futures = []
        for mea, symbols in product_strs.groupby("mea")["symbol"]:
          futures.append(
              executor.submit(self._query_kline_by_exchange, mea, symbols.tolist(), resolution))
        for future in futures:
          res = future.result()
          final_res.append(res)
    final_res = pd.concat(final_res)
    if not final_res.empty:
      final_res["dt"] = pd.to_datetime(final_res["klineTimestamp"])
    self._kline_df = final_res

  def run_team_pnl(self):
    pnl_acc_cls = TeamPnLAccounting(
        self._start_time, self._end_time,
        {k: v for k, v in self._proj_strat_machine_info.items() if k in self._projects})
    pnl_acc_cls.plot_pplot_xunke_pnl()
    # pnl_acc_cls.query_emtpy_pnl_plot()

  def run_specific_pnl(self):
    pnl_acc_cls = TeamPnLAccounting(
        self._start_time, self._end_time,
        {k: v for k, v in self._proj_strat_machine_info.items() if k in self._projects})
    pnl_acc_cls.dumpy_pnl_from_strat_stat()

  def run_active_pnl(self):
    pnl_acc_cls = TeamPnLAccounting(
        self._start_time, self._end_time,
        {k: v for k, v in self._proj_strat_machine_info.items() if k in self._projects})

def main(_):
  start_time = datetime.datetime.strptime(FLAGS.start_time, '%Y%m%d')
  end_time = datetime.datetime.strptime(FLAGS.end_time, '%Y%m%d')
  projects = FLAGS.projects.split(',')
  subprocess.run(['git', 'switch', 'master'], cwd=os.path.expandvars('$COIN_DEPLOY_REPO'))
  subprocess.run(['git', 'fetch', 'origin', 'master'], cwd=os.path.expandvars('$COIN_DEPLOY_REPO'))
  flow_accounting = FlowAccounting(projects, start_time, end_time)
  flow_accounting.query_coin_deploy_history()
  # flow_accounting.query_kline()
  # flow_accounting.run_team_pnl()
  flow_accounting.run_specific_pnl()


if __name__ == "__main__":
  logging.basicConfig(level="DEBUG", format='%(levelname)8s %(asctime)s %(name)s] %(message)s')

  flags.DEFINE_string("start_time", "20230101", "%Y%m%d, include start date")
  flags.DEFINE_string("end_time", "20230331", "%Y%m%d, include end date, 2023-03-31 23:59:59")
  flags.DEFINE_string("projects", "ALL", "csv style list of projects")
  flags.DEFINE_string("output_dir", "sim_result/accounting2", "output dir")
  flags.DEFINE_bool("run_pnl_plot", True, "run pnl_plot or just query data")
  flags.DEFINE_bool("force_empty_date", False, "re-run pnl_plot which has empty date")
  app.run(main)
