# Copyright (c) 2019 Presto Labs Pte. Ltd.
# Author: jhkim

import collections
import datetime
import enum
import functools
import glob
import h5py
import numpy
import os
import pandas
import traceback

from absl import flags

from coin.base.book.types import TradeSide
import coin.strategy.mm.tool.archive_base as abase
import coin.proto.archive_flag_pb2 as afpb
from coin.base.datetime_util import to_timestamp_int


class BookCompress(enum.Enum):
  ALL = 0
  BBO_PQ_CHANGE = 1
  BBO_P_CHANGE = 2
  BBO_P_CHANGE_AND_TRADE = 3
  EVERY_1_SECOND = 4
  EVERY_N_SECOND = 5
  EVERY_N_BP = 6


BboTuple = collections.namedtuple('BboTuple',
                                  [
                                      'timestamp',
                                      'book_timestamp',
                                      'trade_timestamp',
                                      'ask0_price',
                                      'bid0_price',
                                      'ask0_qty',
                                      'bid0_qty',
                                      'buy_trade_price',
                                      'sell_trade_price',
                                      'trade_price',
                                      'trade_side',
                                      'trade_qty',
                                      'trade_qty_interval'
                                  ])


class BookDumperStrategy(object):
  def __init__(
      self,
      product,
      trading_date,
      compress,
      start_ts=None,
      end_ts=None,
      every_n_sec=None,
      every_n_bp=None):
    self.product = product
    self.products = [product]
    self.bbos = []
    self.prefix = "%s_%s" % (product.symbol, trading_date.strftime("%Y%m%d"))
    self.compress = compress
    if os.environ.get("full_detail", None) is not None:
      self.compress = BookCompress.ALL
    self.bbotuple = None
    self._start_ts = to_timestamp_int(start_ts or 0)
    self._end_ts = to_timestamp_int(end_ts or 0)
    if self.compress == BookCompress.EVERY_1_SECOND:
      self.compress = BookCompress.EVERY_N_SECOND
      every_n_sec = 1
    self._every_n_bp = every_n_bp
    if every_n_bp is not None:
      self.compress = BookCompress.EVERY_N_BP
    self._update_interval = (every_n_sec or 0) * 1e9
    self.last_book_time = self.last_trade_time = 0
    self.use_exchange_timestamp = bool(int(os.environ.get("use_exchange_timestamp", 0)))
    self._cum_trade_qty = 0
    self._prev_cum_trade_qty = 0
    self.last_midp = 0

  def on_book_reset(self, book_builder_name, book_builder):
    product = self.product
    book_builder.subscribe(product, functools.partial(self.on_book, product))
    book_builder.subscribe_trade(product, functools.partial(self.on_trade, product))

  def on_book(self, product, book):
    if book.timestamp < self._start_ts:
      return
    if self._end_ts and self._end_ts < book.timestamp:
      return
    if self.product != product:
      return
    if book.bid0() is None or book.ask0() is None:
      return

    midp = 0.5 * (book.bid0().price + book.ask0().price)

    # bbotuple = (book.bid0().price, book.bid0().qty, book.ask0().price, book.ask0().qty)
    if self.compress in [BookCompress.BBO_P_CHANGE, BookCompress.BBO_P_CHANGE_AND_TRADE]:
      bbotuple = (book.bid0().price, book.ask0().price)
    elif self.compress == BookCompress.BBO_PQ_CHANGE:
      bbotuple = (book.bid0().price, book.ask0().price, book.bid0().qty, book.ask0().qty)
    elif self.compress == BookCompress.EVERY_N_BP:
      if abs(self.last_midp - midp) < midp * self._every_n_bp * 1e-4:
        return
      bbotuple = (book.bid0().price, book.ask0().price)
      self.last_midp = midp
    elif self.compress == BookCompress.EVERY_N_SECOND:
      if self.last_book_time + self._update_interval > book.timestamp:
        return
      bbotuple = (book.bid0().price, book.ask0().price)
    else:
      bbotuple = None

    if self.bbotuple is not None and self.bbotuple == bbotuple:
      return

    self.bbotuple = bbotuple

    ts = book.exchange_timestamp if self.use_exchange_timestamp else book.timestamp
    self.last_book_time = ts

    self.bbos.append(
        BboTuple(ts,
                 ts,
                 None,
                 book.ask0().price,
                 book.bid0().price,
                 book.ask0().qty,
                 book.bid0().qty,
                 None,
                 None,
                 None,
                 None,
                 None,
                 self.get_trade_qty_interval()))

  def get_trade_qty_interval(self):
    ret = self._cum_trade_qty - self._prev_cum_trade_qty
    self._prev_cum_trade_qty = self._cum_trade_qty
    return ret

  def on_trade(self, product, trade):
    if trade.timestamp < self._start_ts:
      return
    if self._end_ts and self._end_ts < trade.timestamp:
      return
    if self.product != product:
      return

    self._cum_trade_qty += trade.qty

    if self.compress == BookCompress.EVERY_N_SECOND:
      if self.last_trade_time + self._update_interval > trade.timestamp:
        return

    ts = trade.exchange_timestamp if self.use_exchange_timestamp else trade.timestamp
    self.last_trade_time = ts

    if self.compress in [BookCompress.ALL, BookCompress.BBO_P_CHANGE_AND_TRADE]:
      self.bbos.append(
          BboTuple(ts,
                   None,
                   ts,
                   None,
                   None,
                   None,
                   None,
                   trade.price if trade.side == TradeSide.TRADE_BUY_SIDE else None,
                   trade.price if trade.side == TradeSide.TRADE_SELL_SIDE else None,
                   trade.price,
                   trade.side,
                   trade.qty,
                   self.get_trade_qty_interval()))

  def export(self):
    os.makedirs("signal_plot", exist_ok=True)
    pandas.DataFrame(self.bbos).to_csv("signal_plot/bbo_%s.csv" % self.prefix)


def parse_date(trading_date, time_range):
  add_hour, hours = time_range.split("-")
  add_hour = float(add_hour)
  hours = float(hours) - add_hour
  start_time = trading_date + datetime.timedelta(hours=add_hour)
  end_time = start_time + datetime.timedelta(hours=hours)
  return start_time, end_time


def dump_book(product,
              trading_date,
              time_range,
              compress=BookCompress.BBO_P_CHANGE_AND_TRADE,
              feed_sub_request=None,
              every_n_sec=None,
              every_n_bp=None):
  start_time, end_time = parse_date(trading_date, time_range)

  dumpstrategy = BookDumperStrategy(
      product, trading_date, compress, start_time, end_time,
      every_n_sec=every_n_sec, every_n_bp=every_n_bp)

  baseflags = afpb.BaseFlagProto(trading_date=trading_date.strftime("%Y%m%d"),
                                 time_range=time_range)

  fsflags = abase.get_feed_subscription_flags()

  if feed_sub_request is not None:
    abase.run_from_archive_base_with_feed_sub(baseflags=baseflags,
                                              feed_sub_flags_proto=fsflags,
                                              feed_sub_request=feed_sub_request,
                                              on_book_reset=dumpstrategy.on_book_reset)
  else:
    mear = abase.get_mear_light(product)
    abase.run_from_archive_product(
        trading_date=baseflags.trading_date,
        time_range=time_range,
        feed_machine=fsflags.feed_machine,
        product=product,
        api_version=mear.api_version,
        recipe=mear.recipe,
        on_book_reset=dumpstrategy.on_book_reset)
  return dumpstrategy.bbos

@functools.lru_cache(maxsize=256)
def _resolve_interval_file_paths_mea(market_type, exchange, trading_date, interval_root, file_frame):
  mea = f'{market_type}.{exchange}.*'
  date = f'{trading_date.strftime("%Y%m%d")}'
  files = glob.glob(
      f'{interval_root}/{mea}/{date}/{mea}--{file_frame}.h5')
  return files

def _resolve_interval_file_paths(product, trading_date, interval_root, file_frame):
  market_type = 'Futures' if product.product_type == 'Futures' else 'Spot'
  exchange = product.exchange
  return _resolve_interval_file_paths_mea(market_type, exchange, trading_date, interval_root, file_frame)


def dump_funding_rate(product, start_dt, end_dt, interval_root, force_dump=False):
  if force_dump or getattr(flags.FLAGS, 'include_funding_rate', False):
    if product.product_type == 'Futures':
      current_date = start_dt.date()
      end_date = end_dt.date()
      start_ts = start_dt.timestamp() * 1e9
      end_ts = end_dt.timestamp() * 1e9
      dfs = []
      while current_date <= end_date:
        dfs.append(dump_one_day_funding_rate(product, current_date, interval_root))
        current_date += datetime.timedelta(days=1)
      df = pandas.concat(dfs, sort=True).reset_index(drop=True)
      df = df.drop_duplicates(subset=['funding_time'], keep='last').reset_index(drop=True)
      df = df[(df.funding_time >= start_ts) & (df.funding_time <= end_ts)]
      df.funding_time = df.funding_time.astype(int)
      df = df.rename(columns={'funding_time': 'timestamp'})  # rename for sim analysis
      return df
  return pandas.DataFrame()


def dump_one_day_funding_rate(product, trading_date, interval_root):
  funding_time_paths = _resolve_interval_file_paths(product, trading_date, interval_root, 'time')
  for funding_time_path in funding_time_paths:
    try:
      with h5py.File(funding_time_path, 'r') as h5file:
        symbols = h5file['universe'][:].astype(str)
        symbols = numpy.char.replace(symbols, 'BCHABC', 'BCHN')
      sym_mask = numpy.where(symbols == product.symbol)[0]
      if len(sym_mask) == 0:
        continue
      assert len(sym_mask) == 1
      sym_mask = sym_mask[0]

      with h5py.File(funding_time_path, 'r') as h5file:
        close_funding_time = h5file['CLOSE_FUNDING_TIME'][:, sym_mask]
        open_funding_time = h5file['OPEN_FUNDING_TIME'][:, sym_mask]
      ohlc_filepath = funding_time_path.replace('time', 'ohlc')
      with h5py.File(ohlc_filepath, 'r') as ohlc_file:
        close_funding_rate = ohlc_file['CLOSE_FUNDING_RATE'][:, sym_mask]
        open_funding_rate = ohlc_file['OPEN_FUNDING_RATE'][:, sym_mask]
      open_df = pandas.DataFrame({'funding_time': open_funding_time,
                                  'funding_rate': open_funding_rate})
      close_df = pandas.DataFrame({'funding_time': close_funding_time,
                                   'funding_rate': close_funding_rate})
      open_df.index = open_df.index * 2
      close_df.index = close_df.index * 2 + 1
      df = pandas.concat([open_df, close_df], sort=True).sort_index().fillna(method='ffill')
      return df
    except Exception:
      import traceback
      traceback.print_exc()
  return pandas.DataFrame(columns=["funding_time", "funding_rate"])


def dump_book_from_cache2(product, trading_date, time_range):
  cache_root = "/remote/iosg/data-2/buckets/kline.derived.interval_h5/coin/PT1M"
  market_type = 'Futures' if product.product_type == 'Futures' else 'Spot'
  exchange = product.exchange
  mea = f'{market_type}.{exchange}*'
  date = f'{trading_date.strftime("%Y%m%d")}'
  cache_files = glob.glob(f'{cache_root}/{mea}/{date}/close.h5')
  if len(cache_files) != 1:
    return []
  cache_file = cache_files[0]
  try:
    dfread = pandas.read_hdf(cache_file)
    seri = dfread.loc[:, product.symbol]
    df = pandas.DataFrame([], columns=BboTuple._fields)
    df.timestamp = seri.index.astype(int)
    df.ask0_price = seri.to_numpy()
    df.bid0_price = df.ask0_price
    dfvolume = pandas.read_hdf(glob.glob(f'{cache_root}/{mea}/{date}/volume.h5')[0])
    df.trade_qty_interval = dfvolume.loc[:, product.symbol].astype(float).to_numpy()
    return df.values.tolist()
  except Exception:
    print(f"Error detected skip. {trading_date}, {product.symbol}")
    traceback.print_exc()
    return []


def test_dump_book_from_cache2():
  from coin.exchange.kr_rest.product.product_impl import generate_product_from_str2
  product = generate_product_from_str2('Futures', 'Binance', 'v1', 'BTC-USDT.PERPETUAL')
  ret1 = dump_book_from_cache2(product, datetime.datetime.strptime("20220520", "%Y%m%d"), "0-24")
  ret2 = dump_book_from_cache(product, datetime.datetime.strptime("20220520", "%Y%m%d"), "0-24",
      "/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M")


def dump_book_from_cache(product,
                         trading_date,
                         time_range,
                         cache_root):
  start_time, end_time = parse_date(trading_date, time_range)
  start_ts = start_time.timestamp() * 1e9
  end_ts = end_time.timestamp() * 1e9
  # to cover 00:00 data
  trading_date_1day_bef = trading_date - datetime.timedelta(days=1)
  cache_files = \
    _resolve_interval_file_paths(product, trading_date_1day_bef, cache_root, 'ohlc') + \
    _resolve_interval_file_paths(product, trading_date, cache_root, 'ohlc')
  date = f'{trading_date.strftime("%Y%m%d")}'

  bbo_df_list = list()
  for cache_file in cache_files:
    try:
      with h5py.File(cache_file, 'r') as h5file:
        ts = h5file['timestamp'][:]
        symbols = h5file['universe'][:].astype(str)
        symbols = numpy.char.replace(symbols, 'BCHABC', 'BCHN')
        sym_mask = numpy.where(symbols == product.symbol)[0]
        ts_mask = numpy.where((ts >= start_ts) & (ts <= end_ts))[0]
        if len(sym_mask) == 0 or len(ts_mask) == 0:
          continue
        bid0 = h5file['CLOSE_BID0'][:, sym_mask][ts_mask, :]
        ask0 = h5file['CLOSE_ASK0'][:, sym_mask][ts_mask, :]
      volume_file = cache_file.replace('ohlc', 'volume')
      with h5py.File(volume_file, 'r') as vol_file:
        volume = vol_file['VOLUME'][:, sym_mask][ts_mask, :]
      ts = ts[ts_mask]
      df = pandas.DataFrame([], columns=BboTuple._fields)
      df.timestamp = df.book_timestamp = pandas.Series(ts)
      df.ask0_price = pandas.Series(numpy.reshape(ask0, len(ask0)))
      df.bid0_price = pandas.Series(numpy.reshape(bid0, len(bid0)))
      df.trade_qty_interval = pandas.Series(numpy.reshape(volume, len(volume)))
      df = df.loc[~numpy.isnan(df.ask0_price) & ~numpy.isnan(df.bid0_price)]
      bbo_df_list.append(df)
    except Exception:
      print(f"Error detected on date {date} in file {cache_file}")
      traceback.print_exc()
      continue
  if len(bbo_df_list) > 0:
    return pandas.concat(bbo_df_list).values.tolist()
  print(f'symbol {product.symbol} not found on date {date} in {cache_files}, will fallback to kline cache')
  return dump_book_from_cache2(product, trading_date, time_range)
