import asyncio
import atexit
import datetime
import json
import logging
import os
import signal
import time
import tempfile
import traceback
import typing
from concurrent.futures import ThreadPoolExecutor

import google.protobuf.json_format as json_format
from absl import app, flags
from tornado.concurrent import run_on_executor
from tornado.ioloop import IOLoop, PeriodicCallback

import coin.base.database.util as db_util
from coin.base.datetime_util import to_timestamp_int
from coin.base.logging import init_logging
from coin.base.hostname import get_hostname
from coin.base.query_util import query_exchange_rates
from coin.base.timestamp import get_timestamp
from coin.proto.coin_meta_service_pb2 import QueryTransferRequestProto
from coin.proto.coin_request_pb2 import (
    AccountInfoProto,
    AccountRequestProto,
    StrategyInfoProto,
    TransferRequestProto)
from coin.proto.symbology_pb2 import ProductInfoProtoBundle
from coin.proto.coin_strategy_pb2 import StrategyLog
from coin.proto.coin_telemetry_pb2 import StrategyLatencyStatsProto
from coin.proto.coin_strategy_summary_pb2 import (
    StrategySummary,
    StrategySummaryMessage)
from coin.tool.strat_monitor.handlers.strategy_summary_calculator import \
    StrategySummaryCalculator
from coin.support.accounting.logic.constants import _gen_exchange_rate_coins
from coin.support.accounting.logic.query_util import AccountRequest
from coin.support.pta.logic.pta_stats_calculator import (get_accounting_currency,
                                                         is_inverse,
                                                         get_product_multiplier)
from coin.tool.strat_monitor.util.calculator_util import adjust_pnl_from_config
from coin.tool.strat_monitor.util.display_util import print_strategy_summary
from coin.tool.strat_monitor.util.info_util import AsyncCoinInfoExecutor
from coin.tool.strat_monitor.util.monitor_util import (get_pnl_adjust_info,
                                                       get_strat_from_json_config,
                                                       get_topics)
from coin.tool.strat_monitor.util.plot_util import (AsyncStrategyPlotExecutor)
from coin.util.queue.config import KafkaConfig
from coin.util.queue.constants import TopicType
from coin.util.queue.reader import AsyncKafkaReader
from coin.util.queue.tools.kafka_topic import parse_kafka_topic
from xunkemgmt_client.client.util.query_util import query_accounts
from xunkemgmt_client.support.accounting.logic.constants import FIAT_LIST
from xunkemgmt_client.support.pta.database.memcached_importer import \
    MemcachedStrategySummaryImporter


FLAGS = flags.FLAGS


def _get_coin_list():
  pi_dirs = [
    "/remote/iosg/home-2/public/bot-coin/coin_product_info/data",
    "../coin_product_info/data",
    "../../coin_product_info/data",
  ]
  meas = ['Spot.Binance.v1', 'Spot.Huobi.v1', 'Spot.Okex.v5']
  coins = []
  pi_paths = []
  for mea in meas:
    for pi_dir in pi_dirs:
      if not os.path.exists(pi_dir):
        continue
      trading_dates = sorted(os.listdir(pi_dir), reverse=True)
      for trading_date in trading_dates:
        pi_path = os.path.join(pi_dir, trading_date, f'{mea}.json')
        if os.path.exists(pi_path):
          pi_paths.append(pi_path)
          break
  for mea in meas:
    pi_path = f'data/coin2/product_info/{mea}.json'
    if os.path.exists(pi_path):
      pi_paths.append(pi_path)
  for pi_path in pi_paths:
    with open(pi_path) as f:
      pi = json.load(f)
      pi_proto = ProductInfoProtoBundle()
      json_format.ParseDict(pi, pi_proto, ignore_unknown_fields=True)
      for pi in pi_proto.product_infos:
        base = pi.symbol.split('-')[0]
        coins.append(base)
  for _, value in _gen_exchange_rate_coins(None).items():
    coins += value
  return sorted(list(set(coins)))


def _to_td_str(ts):
  td = datetime.datetime.utcfromtimestamp(ts / 10**9)
  td_str = td.date().strftime('%Y%m%d')
  return td_str


def _convert_summary_for_display(summary):
  assert isinstance(summary, StrategySummary)
  accounting_currency_list = []
  for _, acct_stats in summary.stat_per_account.items():
    market_type = acct_stats.account_request.market_type
    exchange = acct_stats.account_request.exchange
    for symbol, symbol_stats in acct_stats.stat_per_symbol.items():
      try:
        if is_inverse(market_type, exchange, symbol):
          if not symbol_stats.HasField('last_fill_ts'):
            continue
          ts = symbol_stats.last_fill_ts
          dt = datetime.datetime.utcfromtimestamp(ts / 10**9)
          multiplier = get_product_multiplier(market_type, exchange, symbol)
          accounting_currency = get_accounting_currency(market_type, exchange, symbol, dt)
          symbol_stats.turnover_mark = multiplier * symbol_stats.volume_mark
          symbol_stats.turnover_4hr = multiplier * symbol_stats.volume_4hr
          if accounting_currency not in accounting_currency_list:
            strat_stat = summary.stat_per_quote[accounting_currency]
            strat_stat.turnover_mark = 0
            strat_stat.turnover_4hr = 0
          strat_stat.turnover_mark += symbol_stats.turnover_mark
          strat_stat.turnover_4hr += symbol_stats.turnover_4hr
      except Exception:
        logging.error(traceback.format_exc())
        continue
  return summary


def launch_monitor(*,
                   kafka_config: KafkaConfig,
                   topics: typing.List[str],
                   offset_hour: typing.Optional[float],
                   exit_after_min: typing.Optional[int],
                   dump_result_after_sec: typing.Optional[int],
                   dump_result_frequency: typing.Optional[int],
                   print_result: bool,
                   db_config: typing.Optional[str],
                   latency_db_config: typing.Optional[str],
                   plot_pnl: bool,
                   plot_order: bool,
                   plot_dir: typing.Optional[str],
                   realtime_pnl_adj: bool,
                   pnl_adj_file: typing.Optional[str]=None,
                   strat_stat_log_dir: typing.Optional[str]=None):
  ioloop = IOLoop.current()
  if offset_hour is None:
    offset_hour = datetime.datetime.utcnow().hour + 3
  monitor = KafkaStrategyLogMonitor(kafka_config=kafka_config,
                                    topics=topics,
                                    offset_hour=offset_hour,
                                    exit_after_min=exit_after_min,
                                    dump_result_after_sec=dump_result_after_sec,
                                    dump_result_frequency=dump_result_frequency,
                                    print_result=print_result,
                                    db_config=db_config,
                                    latency_db_config=latency_db_config,
                                    plot_pnl=plot_pnl,
                                    plot_order=plot_order,
                                    plot_dir=plot_dir,
                                    realtime_pnl_adj=realtime_pnl_adj,
                                    pnl_adj_file=pnl_adj_file,
                                    strat_stat_log_dir=strat_stat_log_dir)
  monitor.start(ioloop=ioloop)
  signal.signal(signal.SIGTERM, monitor.signal_handler)
  signal.signal(signal.SIGINT, monitor.signal_handler)

  try:
    ioloop.start()
  except (KeyboardInterrupt, SystemExit):
    print()
    return


class KafkaStrategyLogMonitor(object):
  def __init__(self,
               *,
               kafka_config: KafkaConfig,
               topics: typing.List[str],
               offset_hour: typing.Optional[float]=None,
               exit_after_min: typing.Optional[int]=None,
               dump_result_after_sec: typing.Optional[int]=None,
               dump_result_frequency: typing.Optional[int]=None,
               print_result: bool=False,
               db_config: typing.Optional[str]=None,
               latency_db_config: typing.Optional[str]=None,
               plot_pnl: bool=False,
               plot_order: bool=False,
               plot_dir: typing.Optional[str]=None,
               realtime_pnl_adj: bool=False,
               pnl_adj_file: typing.Optional[str]=None,
               strat_stat_log_dir: typing.Optional[str]=None,):
    assert topics is not None and len(topics) > 0
    self._topics = topics
    assert offset_hour is not None and offset_hour >= 0
    self._offset_hour = offset_hour
    self._kafka_config = kafka_config
    self._kafka_consumer = None
    self._ioloop = None
    dump_result_after_sec = 0 if dump_result_after_sec is None else dump_result_after_sec
    self._dump_result_after = datetime.timedelta(seconds=dump_result_after_sec)
    self._dump_result_frequency = \
        dump_result_frequency if dump_result_frequency is not None else 20
    self._exit_after = \
        datetime.timedelta.max if exit_after_min is None else \
        datetime.timedelta(minutes=exit_after_min)
    self._print_result = print_result
    self._plot_pnl = plot_pnl
    self._plot_order = plot_order
    self._plot_last = False
    self._realtime_pnl_adj = realtime_pnl_adj
    self._pnl_adj_file = pnl_adj_file
    self._pnl_adj = None
    self._td_str = _to_td_str(get_timestamp())
    self._source_info = ','.join(kafka_config.kafka_servers)
    if plot_dir is not None:
      if strat_stat_log_dir is None:
        strat_stat_log_dir = tempfile.mkdtemp(prefix='pnl_stat_proto_log', dir='/tmp')
    self._strat_stat_log_dir = strat_stat_log_dir

    handlers = {}
    try:
      account_universe = query_accounts(as_proto=True)
    except Exception:
      logging.info('Fail to query account info in monitor initialization.')
      account_universe = None
    self._account_info = self._gen_account_info_map(account_universe)

    retry = 0
    query_ts = None
    ticker = None
    while retry <= 3:
      try:
        retry += 1
        query_ts = get_timestamp()
        base_list = list(set(_get_coin_list() + FIAT_LIST))
        price_list = query_exchange_rates(base_list, 'USD')
        ticker = {'%s-%s' % (base, 'USD'): price for base, price in zip(base_list, price_list)}
        break
      except Exception:
        logging.error(traceback.format_exc())
        continue
    else:
      logging.info('Fail to query ticker info in monitor initialization.')

    save_history = True if plot_dir is not None else False
    for topic in self._topics:
      _, strat_info = parse_kafka_topic(topic)
      handler = StrategySummaryCalculator(
          strategy_name=strat_info.strategy_name,
          machine=strat_info.machine,
          source_info=self._source_info,
          save_history=save_history,
          strat_stat_log_dir=self._strat_stat_log_dir,)
      handler.update_account_info_map(self._account_info)
      if ticker is not None and query_ts is not None:
        handler.update_ticker(ticker, query_ts)
      handlers[topic] = handler
    self._account_universe = account_universe
    self._handlers = handlers

    self._db_config_file = db_config
    self._latency_db_config_file = latency_db_config
    self._memcached_importer = None
    self._latency_importer = None
    self._plot_dir = plot_dir
    self._plotter = None

    self._coin_info_executor = AsyncCoinInfoExecutor()
    self._price_in_usd = {}
    self._executor = ThreadPoolExecutor(max_workers=8)

  def _gen_account_info_map(
      self, account_infos: typing.Optional[typing.List[AccountInfoProto]]
  ) -> typing.Dict[AccountRequest, AccountRequestProto]:
    info_map = {}
    if account_infos is not None:
      for each_info in account_infos:
        assert each_info.HasField('account_id')
        key = AccountRequest(market_type=each_info.market_type,
                             exchange=each_info.exchange,
                             owner=each_info.owner)
        each_info = AccountRequestProto(account_id=each_info.account_id,
                                        market_type=each_info.market_type,
                                        exchange=each_info.exchange,
                                        owner=each_info.owner)
        info_map[key] = each_info
    return info_map

  def _gen_strategy_info(
      self, strategy_infos: typing.Optional[typing.List[StrategyInfoProto]]
  ) -> typing.Dict[str, StrategyInfoProto]:
    return {each_info.strategy_name: each_info for each_info in strategy_infos}

  def _gen_memcached_importer(self) -> typing.Optional[MemcachedStrategySummaryImporter]:
    if not isinstance(self._memcached_importer, MemcachedStrategySummaryImporter):
      if self._db_config_file is not None:
        db_config = db_util.read_db_config(self._db_config_file)
        self._memcached_importer = MemcachedStrategySummaryImporter(db_config)
      else:
        self._memcached_importer = None
    return self._memcached_importer

  def _gen_latency_importer(self):
    if self._latency_importer is None:
      if self._latency_db_config_file is not None:
        from xunkemgmt_client.support.telemetry.database.clickhouse.db_importer import \
            LatencyStatsImporter
        db_config = db_util.read_db_config(self._latency_db_config_file)
        self._latency_importer = \
            LatencyStatsImporter.create_connector(db_config)
      else:
        self._latency_importer = None
    return self._latency_importer

  def _gen_plotter(self) -> typing.Optional[AsyncStrategyPlotExecutor]:
    ts = get_timestamp()
    td = datetime.datetime.utcfromtimestamp(ts / 10**9).date()
    pnl_adj = self._pnl_adj if self._realtime_pnl_adj else None
    self._plotter = AsyncStrategyPlotExecutor(
        trading_date=td,
        strat_stat_log_dir=self._strat_stat_log_dir,
        plot_dir=self._plot_dir,
        account_infos=self._account_universe,
        pnl_adj=pnl_adj,
    )
    return self._plotter

  @run_on_executor(executor='_executor')
  def _insert_strategy_summary(self, summary_msg: StrategySummaryMessage):
    memcached_importer = self._gen_memcached_importer()
    if isinstance(memcached_importer, MemcachedStrategySummaryImporter):
      memcached_importer.insert_strategy_summary_history(summary_msg)

  @run_on_executor(executor='_executor')
  def _insert_latency_stats(self, latency_stats: typing.List[StrategyLatencyStatsProto]):
    latency_importer = self._gen_latency_importer()
    if latency_importer is not None:
      try:
        latency_importer.bulk_insert_strategy_latency_stats(latency_stats)
      except Exception:
        logging.error(traceback.format_exc())

  async def dump_result(self):
    ts = get_timestamp()
    td = datetime.datetime.utcfromtimestamp(ts / 10**9).date()
    td_str = _to_td_str(ts)
    if td_str != self._td_str:
      logging.info('Exit stream processor due to trading date change.')
      self._ioloop.stop()
      return
    self._td_str = td_str
    results = []
    for handler in self._handlers.values():
      try:
        result = handler.get_result(ts)
        results.append(result)
      except Exception as e:
        logging.error('Fail to get result of %s, %s' % (
            handler._machine, handler._strategy_request.strategy_name))
        logging.error(traceback.format_exc())
        continue
    # results = filter_empty_summary(results)
    results, _ = adjust_pnl_from_config(results, None, td, td, self._pnl_adj)
    strategy_summary_message = StrategySummaryMessage(fetched_ts=ts, summaries=results)
    if self._print_result:
      print_strategy_summary(strategy_summary_message)

    await asyncio.wait_for(self._insert_strategy_summary(strategy_summary_message), 1)

    latency_stats = [result.latency_stats_1min for result in results]
    await asyncio.wait_for(self._insert_latency_stats(latency_stats), 10)

  def _get_pnl_plot_info(self):
    history = [handler.get_history() for handler in self._handlers.values()]
    return history

  def _get_order_plot_info(self):
    order_plot_info = [
        handler.get_order_plot_info(self._plot_last) for
        handler in self._handlers.values()
    ]
    return order_plot_info

  async def _dump_plot(self):
    if self._plot_pnl:
      plotter = self._gen_plotter()
      strat_infos = [parse_kafka_topic(topic)[1] for topic in self._topics]
      await asyncio.wait_for(plotter.do_pnl_plot(strat_infos), 10)
    if self._plot_order:
      order_plot_info = self._get_order_plot_info()
      plotter = self._gen_plotter()
      await asyncio.wait_for(plotter.do_order_plot(order_plot_info), 10)
      self._plot_last = True

  async def query_price_in_usd(self):
    coin_list = _get_coin_list()
    base_list = list(set(coin_list + FIAT_LIST))
    query_ts = get_timestamp()
    price_list = await asyncio.wait_for(
        self._coin_info_executor.query_ticker(base_list, 'USD'), 10)
    ticker = {'%s-%s' % (base, 'USD'): price for base, price in zip(base_list, price_list)}
    for handler in self._handlers.values():
      handler.update_ticker(ticker, query_ts)

  async def query_meta(self):
    strategy_list = await asyncio.wait_for(
        self._coin_info_executor.query_all_strategies(), 10)
    account_list = await asyncio.wait_for(
        self._coin_info_executor.query_all_accounts(), 10)
    exchange_api_list = await asyncio.wait_for(
        self._coin_info_executor.query_all_exchange_api(), 10)
    self._account_universe = account_list
    self._account_info = self._gen_account_info_map(account_list)
    self._strategy_info = self._gen_strategy_info(strategy_list)
    self._exchange_api_universe = exchange_api_list
    for handler in self._handlers.values():
      handler.update_account_info_map(self._account_info)
      handler.update_strategy_info_map(self._strategy_info)
      handler.update_exchange_api_universe(self._exchange_api_universe)

  async def query_options_pricing_params(self):
    options_pricing_params = await asyncio.wait_for(
        self._coin_info_executor.query_options_pricing_params(), 10)
    for handler in self._handlers.values():
      handler.update_options_pricing_params(options_pricing_params)

  async def query_pnl_adjust_info(self):
    pnl_adj = await asyncio.wait_for(
        self._coin_info_executor.query_pnl_adjust_info(self._pnl_adj_file), 10)
    curr_date = datetime.datetime.utcnow().date()
    start_ts = to_timestamp_int(curr_date - datetime.timedelta(days=1))
    end_ts = to_timestamp_int(curr_date + datetime.timedelta(days=1))
    request = QueryTransferRequestProto(
        transfer_status=TransferRequestProto.TRANSFER_CONFIRMED,
        start_completed_timestamp=start_ts,
        end_completed_timestamp=end_ts)
    pnl_adj_from_transfer = await asyncio.wait_for(
        self._coin_info_executor.query_pnl_adjust_info_from_transfer(request), 10)
    total_pnl_adj = {}
    keys = set(pnl_adj.keys()).union(set(pnl_adj_from_transfer.keys()))
    for key in keys:
      total_pnl_adj[key] = pnl_adj.get(key, []) + pnl_adj_from_transfer.get(key, [])
    self._pnl_adj = total_pnl_adj

  async def _loop(self):
    assert self._topics is not None
    logging.info('Hostname: %s' % get_hostname())
    logging.info('Topics: %s', ', '.join(self._topics))
    offset_nanosec = \
        int(datetime.timedelta(hours=self._offset_hour).total_seconds() * 10**9)
    start_ts = get_timestamp() - offset_nanosec
    try:
      reader = AsyncKafkaReader(self._topics,
                                kafka_config=self._kafka_config,
                                timestamp_from=start_ts)
      await reader.open()
    except Exception:
      logging.error('Fail to open kafka reader.')
      logging.error(traceback.format_exc())
      os._exit(0)
    try:
      while True:
        msg = None
        try:
          records = await reader.read()
          for msg in records:
            topic = msg.topic
            pb = StrategyLog()
            pb.ParseFromString(msg.value)
            handler = self._handlers[topic]
            handler.handle(msg.timestamp * 1e6, pb)
        except ConnectionError:
          continue
        except Exception:
          if msg is not None:
            logging.info(msg)
          logging.error(traceback.format_exc())
          continue
    finally:
      # sleep(0.1) to prevent the Segmentation Fault caused by reader.close()
      time.sleep(0.1)
      await reader.close()

    ioloop = IOLoop.current()
    ioloop.stop()

  def start(self, *, ioloop):
    self._ioloop = ioloop
    self._ioloop.add_callback(self._loop)
    dump_result_func = PeriodicCallback(self.dump_result, self._dump_result_frequency * 1000).start
    self._ioloop.add_timeout(self._dump_result_after, dump_result_func)
    self._ioloop.add_timeout(self._dump_result_after, self.dump_result)
    dump_plot_func = PeriodicCallback(self._dump_plot, 120 * 1000).start
    self._ioloop.add_timeout(self._dump_result_after, dump_plot_func)
    self._ioloop.add_timeout(self._dump_result_after, self._dump_plot)
    self._ioloop.add_callback(self.query_meta)
    self._ioloop.add_callback(self.query_options_pricing_params)
    self._ioloop.add_callback(self.query_pnl_adjust_info)
    PeriodicCallback(self.query_price_in_usd, 60 * 1000).start()
    PeriodicCallback(self.query_meta, 300 * 1000).start()
    PeriodicCallback(self.query_options_pricing_params, 20 * 1000).start()
    PeriodicCallback(self.query_pnl_adjust_info, 20 * 1000).start()
    self._ioloop.add_timeout(self._exit_after, ioloop.stop)

  def signal_handler(self, sig, frame):
    logging.warning('Caught signal: %s', sig)
    os._exit(0)


def main(argv):
  init_logging()
  atexit.register(lambda: logging.info('Exit stream processor'))
  strategy_names = []
  if FLAGS.strategy_name is not None:
    strategy_names = [elem.strip() for elem in FLAGS.strategy_name.split(',')]
  elif FLAGS.strategy_config_filename is not None:
    strategy_names = get_strat_from_json_config(FLAGS.strategy_config_filename)
  kafka_config = KafkaConfig.from_cmd_config(FLAGS.kafka_config_filename)
  topics = get_topics(strategy_names, kafka_config,
                      topic_type=TopicType[FLAGS.kafka_topic_type])
  launch_monitor(kafka_config=kafka_config,
                 topics=topics,
                 offset_hour=FLAGS.offset_hour,
                 exit_after_min=FLAGS.exit_after_min,
                 dump_result_after_sec=FLAGS.dump_result_after_sec,
                 dump_result_frequency=FLAGS.dump_result_frequency,
                 print_result=FLAGS.print_result,
                 plot_pnl=FLAGS.plot_pnl,
                 plot_order=FLAGS.plot_order,
                 plot_dir=FLAGS.plot_dir,
                 db_config=FLAGS.memcached_config,
                 latency_db_config=FLAGS.latency_db_config,
                 pnl_adj_file=FLAGS.pnl_adj_file,
                 realtime_pnl_adj=FLAGS.realtime_pnl_adj,
                 strat_stat_log_dir=FLAGS.strat_stat_log_dir)


if __name__ == '__main__':
  flags.DEFINE_string('kafka_config_filename',
                      '../../coin_deploy/support_monitor/config/kafka_aws_config.json',
                      'kafka config')

  flags.DEFINE_string('kafka_topic_type', 'STRAT', 'topic_type')

  flags.DEFINE_string('strategy_name', None, 'comma separated strategy names')

  flags.DEFINE_string('strategy_config_filename',
                      None,
                      'strategy_config_filename')

  flags.DEFINE_float('offset_hour', None, 'offset_hour')

  flags.DEFINE_string('pnl_adj_file', None, 'pnl_adj_file')

  flags.DEFINE_boolean('realtime_pnl_adj', False, 'realtime pnl adj')

  flags.DEFINE_integer('dump_result_after_sec', 60, 'dump_result_after_sec')

  flags.DEFINE_integer('dump_result_frequency', 20, 'dump_result_frequency')

  flags.DEFINE_integer('exit_after_min', None, 'exit_after_min')

  flags.DEFINE_boolean('print_result', False, 'Print result')

  flags.DEFINE_boolean('plot_pnl', False, 'plot pnl')

  flags.DEFINE_boolean('plot_order', False, 'plot order')

  flags.DEFINE_string('memcached_config', None, 'path of database config.')

  flags.DEFINE_string('latency_db_config', None, 'path of latency db config.')

  flags.DEFINE_string('plot_dir', None, 'plot dir')

  flags.DEFINE_string('strat_stat_log_dir', None, 'strat_stat_log_dir')

  app.run(main)
