# Copyright (c) 2023 Presto Labs Pte. Ltd.
# Author: jhkim

import os
import re
import glob
import fire
import datetime
import pandas
import numpy
import h5py

resolutions = {
  'PT5M': 5,
  'PT15M': 15,
  'PT60M': 60,
  'PT240M': 240,
  'P1DT': 1440,
}

def get_trading_dates(trading_date_str: str):
  trading_date_str = str(trading_date_str)
  if trading_date_str.find("-") >= 0:
    tdfrom, tdto = trading_date_str.split("-")
    return pandas.date_range(tdfrom, tdto).to_pydatetime()
  else:
    return [datetime.datetime.strptime(trading_date_str, '%Y%m%d')]

'''
# feed convert whole day
./pyrunner cc/appcoin2/support/order/h5_pt1m_aggr.py aggregate 20230518 PT5M \
--root_dir_in=/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M \
--root_dir_out=$HOME/PT5M

# feed convert specific day
./pyrunner cc/appcoin2/support/order/h5_pt1m_aggr.py aggregate 20230518 PT5M \
--root_dir_in=/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M/Futures.Binance.v1 \
--root_dir_out=$HOME/PT5M/Futures.Binance.v1

# stratlog convert whole day
./pyrunner cc/appcoin2/support/order/h5_pt1m_aggr.py aggregate 20230518 PT5M \
--root_dir_in=/remote/iosg/data-2/buckets/order.derived.interval_h5/coin/PT1M \
--root_dir_out=$HOME/pt5m_test

./pyrunner cc/appcoin2/support/order/h5_pt1m_aggr.py aggregate 20230518 PT240M \
--root_dir_in=/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M/Futures.Binance.v1 \
--root_dir_out=$HOME/PT240M/Futures.Binance.v1
'''

def fix_bbo_cache_timestamp_only_inplace(
    trading_date: str,
    resolution: str, # PT5M, P1DT
    root_dir: str = '/remote/iosg/data-2/buckets/order.derived.interval_h5/coin/PT5M'):
  assert resolution in resolutions
  unit = resolutions[resolution]
  if root_dir.endswith(resolution) or root_dir.endswith(f"{resolution}/"):
    for dirpath in glob.glob(os.path.join(root_dir, "*")):
      if os.path.isdir(dirpath):
        dirname = dirpath.split("/")[-1]
        root_dir_mea = dirpath
        fix_bbo_cache_timestamp_only_inplace(trading_date, resolution, root_dir_mea)
    return
  tds = get_trading_dates(trading_date)
  for td in tds:
    tdstr = td.strftime("%Y%m%d")
    dir_date = os.path.join(root_dir, tdstr)
    for path_in in glob.glob(os.path.join(dir_date, "*--time.h5")):
      try:
        with h5py.File(path_in, 'r') as h5in:
          pass
      except:
        print(f"! erratic {path_in}, skipping.")
        continue
      with h5py.File(path_in, 'r') as h5in:
        unit = int(1440 / h5in['timestamp'].shape[0])
        dti = pandas.DatetimeIndex(h5in['timestamp'])
      time0 = dti[0].time()
      assert time0.second == 0
      minutes = time0.hour * 60 + time0.minute
      if minutes != unit:
        offset = unit - minutes
        assert offset > 0
        with h5py.File(path_in, 'w') as h5in:
          print(f"adding {offset} in {path_in}")
          h5in['timestamp'] = (dti + pandas.Timedelta(minutes=offset)).astype(int).to_numpy()
      print(f"{path_in} start time is {time0}, good")


def aggregate(
    trading_date: str, # YYYYMMDD or YYYYMMDD-YYYYMMDD
    resolution: str, # PT5M, P1DT
    root_dir_in: str = '/remote/iosg/data-2/buckets/order.derived.interval_h5/coin/PT1M',
    root_dir_out: str = '/remote/iosg/data-2/buckets/order.derived.interval_h5/coin'):
  assert resolution in resolutions
  unit = resolutions[resolution]
  if (root_dir_in.find("feed.derived.interval_h5") >= 0
      and (root_dir_in.endswith("PT1M") or root_dir_in.endswith("PT1M/"))):
    for dirpath in glob.glob(os.path.join(root_dir_in, "*")):
      if os.path.isdir(dirpath):
        dirname = dirpath.split("/")[-1]
        root_dir_in_0 = dirpath
        root_dir_out_0 = os.path.join(root_dir_out, dirname)
        aggregate(trading_date, resolution, root_dir_in_0, root_dir_out_0)
    return

  def agg_arr_2d(arr, agg_unit, method=numpy.sum):
    arr = numpy.array(arr)
    if arr.shape[-1] == 0:
      return arr[::agg_unit]
    return method(arr.reshape(-1, agg_unit, arr.shape[-1]), axis=1)

  tds = get_trading_dates(trading_date)

  for td in tds:
    tdstr = td.strftime("%Y%m%d")
    dir_in = os.path.join(root_dir_in, tdstr)
    dir_out = os.path.join(root_dir_out, tdstr)
    if not os.path.exists(dir_in):
      print(f"! doesn't exist {dir_in}")
      continue
    print(f"{dir_in} -> {dir_out}")
    os.makedirs(dir_out, exist_ok=True)
    for path_in in glob.glob(os.path.join(dir_in, "*.h5")):
      filename = path_in.split("/")[-1]
      path_out = os.path.join(dir_out, filename)
      dict_out = {}
      if os.path.getsize(path_in) <= 96:
        print(f"! broken file {path_in}")
        continue
      try:
        with h5py.File(path_in, 'r') as h5in:
          pass
      except:
        print(f"! erratic {path_in}, skipping.")
        continue
      with h5py.File(path_in, 'r') as h5in:
        key_time = 'times' if 'times' in h5in else 'timestamp'
        if key_time in h5in and h5in[key_time].shape[0] < 1440:
          print(f"! dim not 1440, {path_in}, {h5in[key_time].shape}")
          continue
        key_vwaps = []
        for key in h5in.keys():
          if re.match("(RHS_.*|LHS_.*)", key):
            continue
          if re.match("(CLOSE_.*|timestamp)", key):
            # assign [-1]
            dict_out[key] = h5in[key][(unit-1)::unit]
          elif re.match('.*VWAP.*', key):
            key_amt = key.replace("VWAP", "AMT")
            key_qty = key.replace("VWAP", "QTY")
            if key_qty not in h5in:
              if not path_in.endswith("--vwap.h5"):
                print(f"! VWAP not recoverable. PASSING {path_in}")
              continue
            key_vwaps.append(key)
            dict_out[key_amt] = agg_arr_2d(h5in[key][:] * h5in[key_qty][:], unit, numpy.nansum)
          # exchange, market, currencies, strategy_name, symbols, ...
          elif h5in[key].shape == (1,) or key in ['pnl_currencies', 'symbols', 'universe']:
            dict_out[key] = h5in[key][:]
          elif (re.match("(.*FEE_RATE$|CONTRACT_VALUE|IS_INVERSE|times|RESERVE|BAL|PNL|POS|OPEN.*)", key)
                or re.match(".*(MARKET_CAP_IN_USD|SUPPLY|TVL).*", key)):
            dict_out[key] = h5in[key][::unit]
          elif re.match("(HIGH_.*)", key):
            dict_out[key] = agg_arr_2d(h5in[key], unit, numpy.max)
          elif re.match("(LOW_.*)", key):
            dict_out[key] = agg_arr_2d(h5in[key], unit, numpy.min)
          elif re.match("(SPREAD.*|STD.*|MEAN.*)", key):
            dict_out[key] = agg_arr_2d(h5in[key], unit, numpy.average)
          elif re.match(".*(CNT|QTY|AMT|VOLUME.*)$", key):
            dict_out[key] = agg_arr_2d(h5in[key], unit, numpy.nansum)
        for key_vwap in key_vwaps:
          key_amt = key_vwap.replace("VWAP", "AMT")
          key_qty = key_vwap.replace("VWAP", "QTY")
          dict_out[key_vwap] = dict_out[key_amt] / dict_out[key_qty]
        for filltype in ["MAKER", "TAKER"]:
          for side in ["BUY", "SELL"]:
            key_vwap_lhs = f"LHS_{filltype}_{side}_VWAP"
            key_vwap_rhs = f"RHS_{filltype}_{side}_VWAP"
            key_qty_lhs = f"LHS_{filltype}_{side}_QTY"
            key_qty_rhs = f"RHS_{filltype}_{side}_QTY"
            if key_vwap_lhs in h5in:
              qty_lhs = numpy.array(h5in[key_qty_lhs][:])
              vwap_lhs = numpy.array(h5in[key_vwap_lhs][:])
              qty_rhs = numpy.array(h5in[key_qty_rhs][:])
              vwap_rhs = numpy.array(h5in[key_vwap_rhs][:])
              qty_rhs_sum = agg_arr_2d(qty_rhs, unit, numpy.nansum)
              amt_rhs_sum = agg_arr_2d(qty_rhs * vwap_rhs, unit, numpy.nansum)
              qty_lhs_sum = agg_arr_2d(qty_lhs, unit, numpy.nansum)
              amt_lhs_sum = agg_arr_2d(qty_lhs * vwap_lhs, unit, numpy.nansum)
              qty_lhs_first = qty_lhs[::unit]
              amt_lhs_first = (qty_lhs * vwap_lhs)[::unit]
              amt_lhs_minus1 = amt_lhs_sum - amt_lhs_first
              qty_lhs_minus1 = qty_lhs_sum - qty_lhs_first
              qty_rhs_aggr = qty_lhs_minus1 + qty_rhs_sum
              amt_rhs_aggr = amt_lhs_minus1 + amt_rhs_sum
              dict_out[key_qty_lhs] = qty_lhs_first
              dict_out[key_vwap_lhs] = amt_lhs_first / qty_lhs_first
              dict_out[key_qty_rhs] = qty_rhs_aggr
              dict_out[key_vwap_rhs] = amt_rhs_aggr / qty_rhs_aggr
      with h5py.File(path_out, 'w') as h5out:
        for key in dict_out.keys():
          h5out[key] = dict_out[key]

if __name__ == "__main__":
  fire.Fire()
