import pandas as pd
import numpy as np
import traceback
import logging
import pytz
import json
import os

from pathlib import Path
from absl import app, flags
import coin.proto.coin_order_enums_pb2 as order_enums
from datetime import datetime, timedelta, time
from google.protobuf.json_format import MessageToJson
from coin.base.query_util import (query_klines, query_pta)
from coin.base.datetime_util import to_timestamp_int
from xunkemgmt_client.client.util.query_util import (
    query_accounts, query_strategies, query_exchange_apis, query_applications)
from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
from concurrent.futures import ProcessPoolExecutor, as_completed
from xunkemgmt_client.client.api_client import XunkemgmtClient
from google.protobuf.json_format import MessageToDict
from coin2.service.account.account_info_service_pb2 import \
    QueryBalanceIntervalHistoriesRequestProto
from coin.support.pta.app.working_order_dumper import \
    WorkingOrderDumper, TickerKey
from coin.proto.coin_listing_mm_report_pb2 import (ListingMMReportDataProto,
                                                   OrderDepthProto,
                                                   OrderSpreadProto,
                                                   OrderUptimeProto)
from xunkemgmt_client.market.feed_stats_v2.database.connection.coin2_database import Session
from xunkemgmt_client.market.feed_stats_v2.model.feed_stats import (
    FeedStatsIndex, BookStats)

EPS = 1e-6
FLAGS = flags.FLAGS


class WorkingOrderInfoDumper(object):
  def __init__(self, 
               strat_log_root_dir, 
               strat_list, 
               start_time, 
               end_time, 
               interval_in_sec=60, 
               working_order_limit_sec=3000):
    self._strat_list = strat_list
    working_order_dumper = WorkingOrderDumper(
        start_dt=start_time.replace(tzinfo=pytz.UTC),
        end_dt=end_time.replace(tzinfo=pytz.UTC),
        interval=timedelta(seconds=interval_in_sec),
        strat_list=self._strat_list,
        strat_log_root_dir=strat_log_root_dir,
        working_order_limit_sec=working_order_limit_sec)
    self._order_dumper = working_order_dumper
    self._stats = {}

  def stats_replay(self):
    self._order_dumper.replay()
    self._stats = self._order_dumper.get_stats()

  def _get_working_order_info_list_by_symbol(self, 
                                             market_type, 
                                             exchange, symbol):
    stats = self._stats
    if market_type and exchange:
      ticker_key = TickerKey(market_type=market_type,
                             exchange=exchange,
                             symbol=symbol)
      ticker_key_list = [ticker_key]
    else:
      ticker_key_list = [key for key in stats.keys() if key.symbol == symbol]
    working_order_info_list = []
    for ticker_key in ticker_key_list:
      working_order_info_list.extend(stats.get(ticker_key, []))
    return working_order_info_list

  def _get_order_depth_snapshot(self, 
                                working_order_info, 
                                sell_price_thold=None, 
                                buy_price_thold=None):
    if working_order_info.working_order is None:
      return 0, 0
    order_info_list = []
    for order_log in working_order_info.working_order:
      order_event = order_log.event
      order_info = (order_event.order_qty, order_event.order_side, order_event.order_price)
      order_info_list.append(order_info)
    order_info_df = pd.DataFrame(order_info_list, columns=['qty', 'side', 'price'])
    sell_order_df = order_info_df[order_info_df['side'] == order_enums.SELL_ORDER]
    buy_order_df = order_info_df[order_info_df['side'] == order_enums.BUY_ORDER]
    if sell_price_thold:
      sell_order_df = sell_order_df[sell_order_df['price'] <= sell_price_thold]
    if buy_price_thold:
      buy_order_df = buy_order_df[buy_order_df['price'] >= buy_price_thold]
    sell_side_depth = (sell_order_df['price'] * sell_order_df['qty']).sum()
    buy_side_depth = (buy_order_df['price'] * buy_order_df['qty']).sum()
    return sell_side_depth, buy_side_depth

  def _get_order_spread_snapshot(self,
                                 working_order_info,
                                 mid_price,
                                 quote_thold=None,
                                 base_thold=None):
    if working_order_info.working_order is None:
      return None, None
    assert not (quote_thold and base_thold), 'quote_thold and base_thold redandunt'
    prices = []
    for order_side in (order_enums.SELL_ORDER, order_enums.BUY_ORDER):
      order_list = [(order_log.event.order_price, order_log.event.order_qty)
                    for order_log in working_order_info.working_order
                    if order_log.event.order_side == order_side]
      order_df = pd.DataFrame(order_list, columns=['price', 'qty']).set_index('price')
      if order_side == order_enums.SELL_ORDER:
        order_df = order_df.sort_index(ascending=True)
      else:
        order_df = order_df.sort_index(ascending=False)
      order_df['amount'] = order_df.index.values * order_df['qty']
      order_df = order_df.cumsum(axis=0)
      if quote_thold:
        if len(order_df) == 0 or max(order_df['amount']) < quote_thold:
          prices.append(None)
          continue
        traget_idx = next(i for i in range(len(order_df)) \
            if order_df['amount'].iloc[i] >= quote_thold)
      elif base_thold:
        if len(order_df) == 0 or max(order_df['qty']) < base_thold:
          prices.append(None)
          continue
        traget_idx = next(i for i in range(len(order_df)) \
            if order_df['qty'].iloc[i] >= base_thold)
      else:
        traget_idx = len(order_df) - 1
      prices.append(order_df.index[traget_idx])
    sell_spread = abs(prices[0] - mid_price) / mid_price if prices[0] else None
    buy_spread = abs(prices[1] - mid_price) / mid_price if prices[1] else None
    return sell_spread, buy_spread

  def _check_uptime(self,
                    working_order_info,
                    mid_price,
                    pi,
                    price_bp_thold=None,
                    quote_thold=None,
                    base_thold=None):
    sell_price_thold = pi.round_up(mid_price * (1 + EPS)) * \
        (1 + price_bp_thold * 1e-4) if price_bp_thold else None
    buy_price_thold = pi.round_down(mid_price * (1 - EPS)) * \
        (1 - price_bp_thold * 1e-4) if price_bp_thold else None
    sell_side_depth, buy_side_depth = self._get_order_depth_snapshot(
          working_order_info, sell_price_thold, buy_price_thold)
    if quote_thold:
      uptime = sell_side_depth >= quote_thold or buy_side_depth >= quote_thold
    elif base_thold:
      uptime = sell_side_depth >= base_thold or buy_side_depth >= base_thold
    else:
      uptime = sell_side_depth or buy_side_depth
    return uptime

  def get_avg_uptime_ratio(self,
                           symbol,
                           market_type, 
                           exchange,
                           *,
                           price_bp_thold=None, 
                           quote_thold=None, 
                           base_thold=None):
    assert price_bp_thold is None or price_bp_thold <= 1e4
    assert not (quote_thold and base_thold), 'quote_thold and base_thold redandunt'
    working_order_info_list = self._get_working_order_info_list_by_symbol(
        market_type, exchange, symbol)
    product = generate_product_from_str2(market_type, exchange, None, 
        norm_product=symbol, current_datetime=datetime.now())
    pi = product.product_holder.product_info
    num_samples = len(working_order_info_list)
    num_uptime = 0
    for working_order_info in working_order_info_list:
      try:
        mid_price = working_order_info.ticker.each_ticker[0].last
        if self._check_uptime(working_order_info, mid_price, pi, price_bp_thold, quote_thold, base_thold):
          num_uptime += 1
      except Exception as e:
        num_samples -= 1
        continue
    uptime_ratio = num_uptime / num_samples if num_samples != 0 else 0
    return uptime_ratio

  def get_avg_order_depth(self, symbol, market_type, exchange, *, price_bp_thold=None):
    working_order_info_list = self._get_working_order_info_list_by_symbol(
        market_type, exchange, symbol)
    sell_depth_list = []
    buy_depth_list = []
    product = generate_product_from_str2(market_type, exchange, None, 
        norm_product=symbol, current_datetime=datetime.now())
    pi = product.product_holder.product_info
    for working_order_info in working_order_info_list:
      try:
        mid_price = working_order_info.ticker.each_ticker[0].last
        sell_price_thold, buy_price_thold = None, None
        if price_bp_thold:
          sell_price_thold = pi.round_up(mid_price * (1 + EPS)) * (1 + price_bp_thold * 1e-4)
          buy_price_thold = pi.round_down(mid_price * (1 - EPS)) * (1 - price_bp_thold * 1e-4)
        sell_side_depth, buy_side_depth = self._get_order_depth_snapshot(
            working_order_info, sell_price_thold, buy_price_thold)
        sell_depth_list.append(sell_side_depth)
        buy_depth_list.append(buy_side_depth)
      except Exception as e:
        continue
    sell_avg_depth = sum(sell_depth_list) / len(sell_depth_list) if len(sell_depth_list) != 0 else 0
    buy_avg_depth = sum(buy_depth_list) / len(buy_depth_list) if len(buy_depth_list) != 0 else 0
    return sell_avg_depth, buy_avg_depth

  def get_avg_order_spread(self,
                           symbol,
                           *,
                           quote_thold=None,
                           base_thold=None,
                           market_type=None,
                           exchange=None):
    working_order_info_list = self._get_working_order_info_list_by_symbol(
        market_type, exchange, symbol)
    sell_spread_list, buy_spread_list = [], []
    for working_order_info in working_order_info_list:
      try:
        mid_price = working_order_info.ticker.each_ticker[0].last
        sell_spread, buy_spread = self._get_order_spread_snapshot(
            working_order_info, mid_price, quote_thold=quote_thold, base_thold=base_thold)
        sell_spread_list.append(sell_spread)
        buy_spread_list.append(buy_spread)
      except Exception as e:
        continue
    spreads_list = [sell_spread_list, buy_spread_list]
    result_list = []
    for i in range(len(spreads_list)):
      tmp_spread_list = [spread for spread in spreads_list[i]
                         if spread and spread not in [np.inf, -np.inf]]
      avg_spread = sum(tmp_spread_list) / len(tmp_spread_list) if len(tmp_spread_list) > 0 else None
      result_list.append(avg_spread)
    sell_avg_spread, buy_avg_spread = result_list
    return sell_avg_spread, buy_avg_spread

  def get_avg_order_price(self, symbol, *, market_type=None, exchange=None):
    working_order_info_list = self._get_working_order_info_list_by_symbol(
        market_type, exchange, symbol)
    price_list = []
    for working_order_info in working_order_info_list:
      try:
        order_mid_price = working_order_info.ticker.each_ticker[0].last
        price_list.append(order_mid_price)
      except Exception as e:
        continue
    avg_order_price = sum(price_list) / len(price_list) \
        if len(price_list) != 0 else None
    return avg_order_price


class MMReportInfoDumper(object):
  def __init__(self, *args, **kwargs):
    self._strat_log_root_dir = kwargs.get('strat_log_root_dir')
    self._strat_list = kwargs.get('strat_list')
    self._stats_requirements = kwargs.get('stats_requirements')
    self._db_config_path = kwargs.get('db_config_path')
    self._start_date = kwargs.get('start_date')
    self._end_date = kwargs.get('end_date')
    self._exchange_list = list(self._stats_requirements['exchange'])
    self._symbol = list(set(list(self._stats_requirements['symbol'])))

  def get_market_spreads(self):
    strat_time_str = datetime.strftime(self._start_date, '%Y-%m-%d')
    end_time_str = datetime.strftime(self._end_date, '%Y-%m-%d')
    exchange_api_df = query_exchange_apis()
    exchange_api_df = exchange_api_df[exchange_api_df['exchange'].isin(self._exchange_list)]
    feed_stats = Session().query(FeedStatsIndex.feed_stats_id,
                                FeedStatsIndex.trading_date,
                                FeedStatsIndex.exchange_api_id,
                                FeedStatsIndex.symbol).filter(
                                  FeedStatsIndex.trading_date >= strat_time_str,
                                  FeedStatsIndex.trading_date <= end_time_str,
                                  FeedStatsIndex.symbol.in_(self._symbol)).all()
    book_stats = Session().query(BookStats.feed_stats_id,
                                BookStats.avg_bid_ask_spread,
                                BookStats.twap).filter(
                                  BookStats.time_first_book >= strat_time_str,
                                  BookStats.time_last_book <= end_time_str).all()
    feed_stats_df = pd.merge(pd.DataFrame(feed_stats), pd.DataFrame(book_stats), on='feed_stats_id')
    feed_stats_df = pd.merge(feed_stats_df, exchange_api_df, on='exchange_api_id')
    feed_stats_df.sort_values(by='feed_stats_id', ascending=False, inplace=True)
    feed_stats_df.drop_duplicates(
        subset=['trading_date', 'exchange_api_id', 'symbol'], inplace=True)
    feed_stats_df = feed_stats_df[
        ['trading_date', 'market_type', 'exchange', 'symbol', 'avg_bid_ask_spread', 'twap']]
    feed_stats_df.sort_values(
        by=['market_type', 'exchange', 'symbol', 'trading_date'], inplace=True)
    feed_stats_df.reset_index(drop=True, inplace=True)
    feed_stats_df['avg_bid_ask_spread'] = feed_stats_df['avg_bid_ask_spread'] / feed_stats_df['twap']
    return feed_stats_df

  def get_pta_market_info(self, business_units=None):
    if not business_units:
      business_units = ['Day1mm', 'ExtDay1mm']
    pta_df = query_pta(start_date=self._start_date,
                       end_date=self._end_date,
                       business_units=business_units,
                       strategy_names=self._strat_list,
                       exchanges=self._exchange_list,
                       symbols=self._symbol)
    if len(pta_df) == 0: return pd.DataFrame()
    pta_df.rename(columns={'fill_pq_in_usd': 'pta_turnover_in_usd'}, inplace=True)
    common_cols = ['market_type', 'exchange', 'symbol', 'trading_date']
    pta_df = pta_df.groupby(common_cols, as_index=False).sum()
    kline_df = query_klines(start_date=self._start_date, end_date=self._end_date,
                            exchanges=self._exchange_list, symbols=self._symbol)
    kline_df = self._generate_kline_view_df(kline_df)
    pta_kline_df = pd.merge(pta_df, kline_df, on=common_cols, how='left')
    pta_kline_df['market_share'] = pta_kline_df.apply(
        lambda x: x['pta_turnover_in_usd'] / x['market_turnover_in_usd']
        if pd.notnull(x['market_turnover_in_usd'])
        and x['market_turnover_in_usd'] > 0 else np.nan, axis=1)
    return pta_kline_df

  def _generate_kline_view_df(self, kline_df):
    cols = ['market_type', 'exchange', 'symbol', 'trading_date']
    kline_view_df = kline_df.drop_duplicates(subset=cols)
    kline_view_df['kline_dict'] = kline_view_df['kline_dict'].apply(json.loads)
    kline_view_df['market_turnover_in_usd'] = kline_view_df['kline_dict'].apply(
        lambda x: x['klines'][0].get('turnover_in_usd'))
    kline_view_df['market_volume'] = kline_view_df['kline_dict'].apply(
      lambda x: x['klines'][0].get('volume'))
    kline_view_df['high_price'] = kline_view_df['kline_dict'].apply(
        lambda x: x['klines'][0].get('high'))
    kline_view_df['low_price'] = kline_view_df['kline_dict'].apply(
        lambda x: x['klines'][0].get('low'))
    info_needed = ['market_volume', 'market_turnover_in_usd', 'high_price', 'low_price']
    kline_view_df = kline_view_df[cols + info_needed]
    return kline_view_df

  def get_account_balance_info(self, account_id_list=None, business_units=None):
    assert self._db_config_path is not None
    if not business_units:
      business_units = ['Coin', 'Day1mm', 'ExtDay1mm']
    if not account_id_list:
      application_df = query_applications()
      application_df = application_df[
        application_df['strategy_name'].isin(self._strat_list)]
      account_id_list = list(application_df['account_id'])
      account_df = query_accounts()
      master_id_list = list(
        account_df[account_df['account_type'] == 'Master']['account_id'])
      account_id_list += master_id_list
    coin = self._symbol[0].strip().split('-')[0]
    start_time = datetime.combine(self._start_date, time.min)
    end_time = datetime.combine(self._end_date, time.max)
    output_list = []
    with XunkemgmtClient() as api_client:
      balance_request_dict = {
          'start_timestamp': to_timestamp_int(start_time),
          'end_timestamp': to_timestamp_int(end_time),
          'business_units': business_units,
          'account_ids': account_id_list,
          'currencies': [coin],
          'agg_types': ['MARKET_TYPE', 'EXCHANGE']
      }
      balance_request = QueryBalanceIntervalHistoriesRequestProto(**balance_request_dict)
      responses = api_client.query_balance_interval_histories(balance_request)
      responses = responses.histories
      for response in responses:
        response = {
            **MessageToDict(response.balance_info, preserving_proto_field_name=True),
            "each_balance": [
              MessageToDict(balance, preserving_proto_field_name=True)
              for balance in response.each_balance]
        }
        output_list.append(response)
    balance_df = pd.DataFrame(output_list)
    balance_df['trading_date'] = balance_df['trading_date'].apply(
        lambda x: datetime.strptime(str(x), '%Y%m%d'))
    balance_df['balance'] = balance_df['each_balance'].apply(
        lambda x: next((b['total'] for b in x if b['currency'] == coin), 0))
    balance_df.sort_values(by=['market_type', 'exchange', 'trading_date'], inplace=True)
    balance_df = balance_df.groupby(['trading_date', 'exchange']).sum(numeric_only=True)
    balance_df = balance_df[balance_df['balance'] != 0]
    balance_df.reset_index(inplace=True)
    return balance_df

  def _prepare_working_order_data(self, trading_date, stat_requirement):
    querier = WorkingOrderInfoDumper(
        self._strat_log_root_dir, self._strat_list, trading_date, trading_date + timedelta(1))
    querier.stats_replay()
    market_type = stat_requirement['market_type']
    exchange = stat_requirement['exchange']
    symbol = stat_requirement['symbol']
    sell_order_depth, buy_order_depth = querier.get_avg_order_depth(
        symbol=symbol, market_type=market_type, exchange=exchange, price_bp_thold=stat_requirement['price_bp_thold'])
    avg_order_price = querier.get_avg_order_price(
        symbol=symbol, market_type=market_type, exchange=exchange)
    sell_order_spread, buy_order_spread = querier.get_avg_order_spread(
        symbol=symbol, market_type=market_type, exchange=exchange, quote_thold=stat_requirement['quote_thold'])
    tight_uptime_ratio = querier.get_avg_uptime_ratio(
        price_bp_thold=stat_requirement['price_bp_thold'],
        symbol=symbol,
        quote_thold=stat_requirement['quote_thold'],
        market_type=market_type,
        exchange=exchange)
    uptime_ratio = querier.get_avg_uptime_ratio(
        symbol=symbol,
        market_type=market_type,
        exchange=exchange)
    trade_info = (trading_date, market_type, exchange, symbol, sell_order_depth, buy_order_depth,
                  sell_order_spread, buy_order_spread, uptime_ratio, tight_uptime_ratio,
                  avg_order_price)
    return trade_info
  
  def get_working_order_info(self, num_workers=7):
    futures_list = []
    with ProcessPoolExecutor(max_workers=num_workers * len(self._stats_requirements)) as executor:
      for _, stat_requirement in self._stats_requirements.iterrows():
        trading_date = self._start_date
        while trading_date < self._end_date:
          f = executor.submit(self._prepare_working_order_data, trading_date, stat_requirement)
          futures_list.append(f)
          trading_date += timedelta(1)
        trade_info_list = []
      for f in as_completed(futures_list):
        try:
          trade_info = f.result()
          trade_info_list.append(trade_info)
        except Exception:
          traceback.print_exc()
      trade_info_df = pd.DataFrame(trade_info_list,
                                   columns=['trading_date', 'market_type', 'exchange', 'symbol',
                                            'sell_order_depth', 'buy_order_depth',
                                            'sell_order_spread', 'buy_order_spread',
                                            'uptime_ratio', 'tight_uptime_ratio', 'avg_order_price'])
      trade_info_df.sort_values(by=[
        'market_type', 'exchange', 'symbol', 'trading_date'], inplace=True)
      trade_info_df.reset_index(drop=True, inplace=True)
    return trade_info_df

  def prepare_report_data(self):
    common_cols = ['market_type', 'exchange', 'symbol', 'trading_date']
    working_order_info_df = self.get_working_order_info()
    balance_info_df = self.get_account_balance_info()
    market_info_df = self.get_pta_market_info()
    market_spreads_df = self.get_market_spreads()
    market_spreads_df['trading_date'] = pd.to_datetime(market_spreads_df['trading_date'])
    market_info_df = pd.merge(market_info_df, market_spreads_df, on=common_cols, how='left')
    listing_mm_df = pd.merge(working_order_info_df, market_info_df, on=common_cols, how='inner')
    listing_mm_df = pd.merge(listing_mm_df, balance_info_df, on=['trading_date', 'exchange'], how='inner')
    print(f'verify listing mm result: \n {listing_mm_df.to_string()}')
    listing_mm_output = self._convert_to_listing_mm_proto(listing_mm_df)
    return listing_mm_output

  def _convert_to_listing_mm_proto(self, listing_mm_df):
    listing_mm_dict_list = listing_mm_df.to_dict('records')
    result_list = []
    requirements = self._stats_requirements
    base = requirements['symbol'].iloc[0].split('-')[0]
    quote = 'USD'
    for record in listing_mm_dict_list:
      trading_order_depths = []
      trading_order_spreads = []
      market_order_spreads = []
      trading_order_uptimes = []
      requirement = requirements[requirements['exchange'] == record['exchange']]
      requirement = requirement.to_dict('records')[0]
      depth_thold = requirement.get('quote_thold') or requirement.get('base_thold')
      depth_unit = quote if requirement.get('quote_thold') else base
      for order_side in ('BUY_ORDER', 'SELL_ORDER'):
        order_depth = OrderDepthProto(
            spread_thold = requirement['price_bp_thold'],
            depth_unit = depth_unit,
            depth = record['{}_depth'.format(order_side.lower())],
            order_side = order_enums.OrderSide.Value(order_side))
        order_spread = OrderSpreadProto(
            depth_thold = depth_thold,
            depth_unit = depth_unit,
            spread = record['{}_spread'.format(order_side.lower())],
            order_side = order_enums.OrderSide.Value(order_side))
        market_spread = OrderSpreadProto(
            spread = record['avg_bid_ask_spread'] / 2,
            order_side = order_enums.OrderSide.Value(order_side))
        trading_order_depths.append(order_depth)
        trading_order_spreads.append(order_spread)
        market_order_spreads.append(market_spread)
      uptime = OrderUptimeProto(
          depth_unit = depth_unit,
          uptime = record['uptime_ratio'])
      tight_uptime = OrderUptimeProto(
          depth_thold = depth_thold,
          depth_unit = depth_unit,
          spread_thold = requirement['price_bp_thold'],
          uptime = record['tight_uptime_ratio'])
      trading_order_uptimes.extend([uptime, tight_uptime])
      output = ListingMMReportDataProto(
          trading_date = int(record['trading_date'].replace(tzinfo=pytz.UTC).timestamp()),
          market_type = record['market_type'],
          exchange = record['exchange'],
          symbol = record['symbol'],
          eod_balance = record['balance'],
          trading_volume = record['fill_qty'],
          trading_turnover_in_usd = record['pta_turnover_in_usd'],
          market_volume = record['market_volume'],
          market_turnover_in_usd = record['market_turnover_in_usd'],
          low_order_price = record['low_price'],
          high_order_price = record['high_price'],
          avg_order_price = record['avg_order_price'],
          trading_order_depths = trading_order_depths,
          trading_order_spreads = trading_order_spreads,
          market_order_spreads = market_order_spreads,
          trading_order_uptimes = trading_order_uptimes)
      result_list.append(output)
    return result_list


def dump_mm_result_to_json(lisintg_mm_result):
  parent_dir_path = "../../coin_dmm_data/listing_mm/exchange/"
  for record in lisintg_mm_result:
    try:
      trading_date_str = datetime.utcfromtimestamp(record.trading_date).strftime('%Y%m%d')
      exchange = record.exchange
      symbol = record.symbol
      coin = symbol.split('-')[0]
      outpt_dir_path = os.path.join(parent_dir_path, '{}/{}/'.format(exchange.lower(), symbol.lower()))
      Path(outpt_dir_path).mkdir(parents=True, exist_ok=True)
      with open(os.path.join(outpt_dir_path, '{}.json'.format(trading_date_str)), 'w') as jsfile:
        listing_mm_record = MessageToJson(record, preserving_proto_field_name=True)
        jsfile.write(listing_mm_record)
    except Exception as e:
      logging.error(f'Failed to write {symbol} {exchange} {trading_date_str} json file', type(e), e)
      traceback.print_exc()
      continue


def main(_):
  strat_df = query_strategies(business_units=['Day1mm', 'ExtDay1mm'])
  strat_list = list(strat_df['strategy_name']) if len(strat_df) > 0 else None
  stats_requirements = pd.read_csv(FLAGS.mm_report_config)
  if FLAGS.start_date and FLAGS.end_date:
    start_date = datetime.strptime(FLAGS.start_date, '%Y%m%d')
    end_date = datetime.strptime(FLAGS.end_date, '%Y%m%d')
  else:
    end_date = datetime.combine(datetime.now(), time.min)
    start_date = end_date - timedelta(7)
  mm_report_dumper = MMReportInfoDumper(stats_requirements=stats_requirements,
                                        strat_log_root_dir=FLAGS.strat_log_root_dir,
                                        strat_list=strat_list,
                                        db_config_path=FLAGS.db_config,
                                        start_date=start_date,
                                        end_date=end_date)
  lisintg_mm_result = mm_report_dumper.prepare_report_data()
  dump_mm_result_to_json(lisintg_mm_result)


if __name__ == '__main__':
  flags.DEFINE_string('mm_report_config',
                      '',
                      'mm report stat requirement config')
  flags.DEFINE_string('db_config',
                      '../../coin_deploy/support_accounting/db_config/mysql_config_prod.json',
                      'db config file')
  flags.DEFINE_string('strat_log_root_dir',
                      '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_slim_proto_log',
                      'strat log root dir')
  flags.DEFINE_string('start_date', None, '%Y%m%d')
  flags.DEFINE_string('end_date', None, '%Y%m%d')
  app.run(main)
