# Copyright (c) 2021 Presto Labs Pte. Ltd.
# Author: hjkim

import json
import time
import copy
import os
import datetime
import re
import fire
import sys
import numpy
import logging
import zipfile
import shutil
from collections import defaultdict
from importlib.machinery import SourceFileLoader

coin_deploy_root = os.path.expandvars("$HOME/workspace/coin/python/coin_deploy")

kline_util = SourceFileLoader("kline_util", os.path.join(coin_deploy_root, "lm_agg2/kline_util.py")).load_module()

fastfeature_dir_path = os.path.expandvars("$HOME/workspace/fastfeature")
sys.path.append(os.path.join(fastfeature_dir_path, "coinstrat"))
os.environ["BATCH_INSTALL_ROOT"] = f"{fastfeature_dir_path}/third_party"
from coinstrat.strat_lm.app.midfreq.utils.reader import IntervalReader
from coinstrat.strat_lm.app.midfreq.utils.reader2 import IntervalReader2

okex_v5_begin_date = datetime.datetime(2021, 10, 1)
default_kline_request_dict = {}

intervalstr_to_minutes = {
  "1m": 1,
  "5m": 5,
  "1h": 60,
  "1d": 60 * 24
}

def fill_with_exchange_kline(df_dict, symbol, reader, start, end, intv):
  tmp_dict = {}
  close_shift1 = reader.get_df_by_key("CLOSE_MID")[symbol].shift(1).ffill()
  tmp_dict["open"] = reader.get_df_by_key("OPEN_MID")[symbol].fillna(close_shift1)
  tmp_dict["high"] = reader.get_df_by_key("HIGH_TRADE")[symbol] \
    .rolling(intv, min_periods=1).max().shift(-intv + 1).fillna(tmp_dict["open"])
  tmp_dict["low"] = reader.get_df_by_key("LOW_TRADE")[symbol] \
    .rolling(intv, min_periods=1).min().shift(-intv + 1).fillna(tmp_dict["open"])
  tmp_dict["close"] = tmp_dict["open"].shift(-intv).fillna(tmp_dict["open"])
  tmp_dict["volume"] = reader.get_df_by_key("VOLUME")[symbol] \
    .rolling(intv, min_periods=1).sum().shift(-intv + 1).fillna(0.0)
  
  for key in df_dict.keys():
    if key in ["open", "high", "low", "close", "volume"]:
      df_dict[key].loc[start:end, symbol] = tmp_dict[key][start:end]
    elif key in ["buy_volume", "sell_volume"]:
      df_dict[key].loc[start:end, symbol] = tmp_dict["volume"][start:end] / 2
    else:
      df_dict[key].loc[start:end, symbol] = df_dict[key][symbol].bfill()[start:end]

def get_ohlcv_df(reader, start_dt, end_dt, intv=5):
  # get df
  df_dict = {}
  close_shift1 = reader.get_df_by_key("CLOSE_MID").shift(1).ffill()
  df_dict["open"] = reader.get_df_by_key("OPEN_MID").fillna(close_shift1)
  df_dict["high"] = reader.get_df_by_key("HIGH_TRADE") \
    .rolling(intv, min_periods=1).max().shift(-intv + 1).fillna(df_dict["open"])
  df_dict["low"] = reader.get_df_by_key("LOW_TRADE") \
    .rolling(intv, min_periods=1).min().shift(-intv + 1).fillna(df_dict["open"])
  df_dict["close"] = df_dict["open"].shift(-intv).fillna(df_dict["open"])

  additional_fields = {
    "open_interest" : ("OPEN_OPEN_INTEREST", "CLOSE_OPEN_INTEREST"),
    "funding_rate": ("OPEN_FUNDING_RATE", "CLOSE_FUNDING_RATE")
  } if reader.exchange.startswith("Futures") else {}
  for proto_field, (o_val, c_val) in additional_fields.items():
    if o_val in reader.key_dict.keys() and c_val in reader.key_dict.keys():
      try:
        cshift_temp = reader.get_df_by_key(c_val).shift(1).ffill()
        df_dict[proto_field] = reader.get_df_by_key(o_val).fillna(cshift_temp)
        if (df_dict[proto_field].isna().astype(int) - 1).sum().sum() == 0:
          del df_dict[proto_field]
      except:
        print(f"load failed. skip {proto_field}, ..")

  for dfcol, readercol in [
      ("volume", "VOLUME"),
      ("buy_volume", "VOLUME_BUY"),
      ("sell_volume", "VOLUME_SELL")]:
    df_dict[dfcol] = \
        reader.get_df_by_key(readercol) \
        .rolling(intv, min_periods=1).sum().shift(-intv + 1).fillna(0.0)
  for key, df in df_dict.items():
    df_dict[key] = df.loc[start_dt.strftime("%Y-%m-%d %H:%M"):end_dt.strftime("%Y-%m-%d %H:%M")].iloc[:-1]
  return df_dict

def convert_to_klines(df_dict, symbol, intv):
  klines = []
  dts_failed = []
  for dt_str in list(df_dict[list(df_dict.keys())[0]].index)[::intv]:
    dt = datetime.datetime.strptime(dt_str, "%Y-%m-%d %H:%M")
    kline = {"kline_timestamp": int((time.mktime(dt.timetuple())) * 1e9)}
    for key, df in df_dict.items():
      kline[key] = float(df.loc[dt_str, symbol])
    if all([not numpy.isnan(kline[key]) for key in kline.keys()]):
      klines.append(kline)
    else:
      dts_failed.append(dt.strftime('%Y%m%d %H%M'))
  if len(dts_failed) > 0:
    logging.warning(f"kline data failure at {sorted(list(set(dts_failed)))} {symbol}")
  return klines

def _dump_kline_to_json(kline_json_path, kline_info):
  if kline_json_path.endswith(".json.zip"):
    zipobj = zipfile.ZipFile(kline_json_path, 'w')
    zipobj.writestr("kline.json", json.dumps(kline_info, indent=2), compress_type=zipfile.ZIP_DEFLATED)
  else:
    with open(kline_json_path, 'w') as f:
      json.dump(kline_info, f, indent=2)
      f.close()

def _convert_volume_info(volume_info, datestr, from_hrs, hours_warmedup):
  volume_info_total = []
  for volume_info_exch in volume_info["volume_info_by_exchs"]:
    volume_infos = volume_info_exch["volume_infos"]

    volume_info_list = []
    timestamp_end = int(datetime.datetime.strptime(str(datestr), "%Y%m%d").replace(hour=int(from_hrs)).timestamp() * 1e9)
    timestamp_end += hours_warmedup * 3600 * 1e9
    matching_ts = 0
    for volume_info_i in volume_infos:
      ts = int(volume_info_i["timestamp"])
      if matching_ts <= ts and ts <= timestamp_end:
        matching_ts = ts
    for volume_info_i in volume_infos:
      if matching_ts == int(volume_info_i["timestamp"]):
        volume_info_list.append((volume_info_i["volume"], volume_info_i["symbol"]))

    volume_info_total.append([symbol for _, symbol in sorted(volume_info_list, reverse=True)])
  return volume_info_total

# read config_obj from feature_repr and dump json as required
def dump_kline_json(filepath, datestr, from_hrs, kline_json, lm_strat_json=None, hours_warmedup=0):
  lm_export_path = "python/lm_export/coin_lm_export"
  repr_dict = {}
  volume_info_list = []
  if type(filepath) is str:
    if "xxgram" in filepath or "xyname" in filepath:
      _dump_kline_to_json(kline_json, {})
      return
    if filepath.endswith(".json"):
      feature_repr_filepath = filepath
      repr_dict = json.load(open(feature_repr_filepath, 'r'))
      if repr_dict['exchange'].replace("Futures", "") not in kline_util.api_ver_map.keys():
        _dump_kline_to_json(kline_json, {})
        return
      volume_info_path = filepath.replace("feature_info.json", "volume_info.json")
      if os.path.exists(volume_info_path):
        volume_info_list = _convert_volume_info(json.load(open(volume_info_path)), datestr, from_hrs, hours_warmedup=hours_warmedup)
    elif filepath.endswith(".zip"):
      profile_filename = filepath
      zipobj = zipfile.ZipFile(f"{lm_export_path}/{profile_filename}", 'r')
      repr_dict = json.load(zipobj.open('feature_info.json'))
      if lm_strat_json is not None:
        lm_strat_config = json.load(open(lm_strat_json))
        if "volume_info" in lm_strat_config:
          volume_info_list = _convert_volume_info(lm_strat_config["volume_info"], datestr, from_hrs, hours_warmedup=hours_warmedup)
      zipobj.close()
  elif type(filepath) is list:
    filename_list = filepath
    assert all([f.endswith(".zip") for f in filename_list])
    for f in filename_list:
      zipobj = zipfile.ZipFile(f"{lm_export_path}/{f}", 'r')
      feature_info_obj = json.load(zipobj.open('feature_info.json'))
      if lm_strat_json is not None:
        lm_strat_config = json.load(open(lm_strat_json))
        if "volume_info" in lm_strat_config:
          volume_info_list = _convert_volume_info(lm_strat_config["volume_info"], datestr, from_hrs, hours_warmedup=hours_warmedup)
      zipobj.close()
      if len(repr_dict) == 0:
        repr_dict = copy.deepcopy(feature_info_obj)
      else:
        assert(repr_dict["exchange"] == feature_info_obj["exchange"])
        repr_dict["features"].extend(feature_info_obj["features"])
        repr_dict["focus_symbols"].extend(feature_info_obj["focus_symbols"])
        repr_dict["symbols"].extend(feature_info_obj["symbols"])

  kline_request_dict = copy.deepcopy(default_kline_request_dict)

  for config_obj in [feature['config_obj'] for feature in repr_dict['features'] if 'config_obj' in feature and 'kline_request_dict' in feature['config_obj']]:
    for key, val in config_obj['kline_request_dict'].items():
      kline_request_dict[key] = val if key not in kline_request_dict else max(kline_request_dict[key], val)

  if not kline_request_dict:
    _dump_kline_to_json(kline_json, {})
    return

  sequential = False
  if "sequential" in kline_request_dict:
    sequential = bool(kline_request_dict.pop("sequential"))

  dump_ref = False
  if "dump_ref" in kline_request_dict:
    dump_ref = bool(kline_request_dict.pop("dump_ref"))

  kline_request_dict = {key: kline_request_dict[key] for key in sorted(kline_request_dict.keys(), key=lambda x: intervalstr_to_minutes[x])}

  end_dt = datetime.datetime.strptime(str(datestr), "%Y%m%d").replace(hour=int(from_hrs))
  start_dt_dict = {}
  end_dt_dict = {}
  min_start_dt = end_dt
  end_dt_it = copy.deepcopy(end_dt)
  for key, val in kline_request_dict.items():
    start_dt_dict[key] = end_dt_it - datetime.timedelta(minutes=intervalstr_to_minutes[key]*val)
    end_dt_dict[key] = end_dt_it
    min_start_dt = min(min_start_dt, start_dt_dict[key])
    if sequential:
      end_dt_it = start_dt_dict[key] - datetime.timedelta(minutes=intervalstr_to_minutes[key])
  start_dt_dict = {key: start_dt_dict[key] for key in sorted(start_dt_dict.keys(), key=lambda x: intervalstr_to_minutes[x], reverse=True)}

  mea_symbol_universe = defaultdict(list)
  target_symbols = repr_dict["focus_symbols"]
  ref_target_symbols = [symbol for symbol in repr_dict["symbols"] if symbol not in target_symbols]

  meas = []
  mea_pns = []
  for i, symbol_list in enumerate([target_symbols, ref_target_symbols]):
    is_ref = (i > 0)
    if symbol_list is None:
      continue
    for symbol_entry in symbol_list:
      if symbol_entry is None:
        continue
      mea, symbol = kline_util.parse_symbol_string(symbol_entry['symbol_string'])
      meas.append(mea)
      mea_pns.append((mea, symbol_entry['product_name']))
      mea_symbol_universe[mea].append(symbol)
  has_ref = ref_target_symbols and len(meas) > 1

  if volume_info_list and "focus_symbols_vu_rank" in repr_dict:
    require_quote_matching = repr_dict.get("focus_vu_require_quote_matching", False)
    mea_patt_symbol_universe = defaultdict(list)
    for exch_idx, (mea, symbol_patt) in enumerate(mea_pns):
      symbols = [s for s in volume_info_list[exch_idx] if re.match(symbol_patt, s)]
      mea_patt_symbol_universe[(mea, symbol_patt)].extend(symbols)
    if repr_dict["focus_symbols_vu_rank"] == "all":
      vu_rank = 10000
    else:
      vu_rank = int(repr_dict["focus_symbols_vu_rank"].split("-")[-1])
    cnt = 0
    mea_patt_symbol_universe_static = copy.deepcopy(mea_patt_symbol_universe)
    base_to_ref_map = defaultdict(dict)
    if has_ref:
      for mea, symbol_patt in mea_pns[1:]:
        for symbol in mea_patt_symbol_universe_static[(mea, symbol_patt)]:
          symbol_base = kline_util.get_norm_base(symbol)
          symbol_quote = kline_util.get_quote(symbol) if require_quote_matching else ""
          key = f"{symbol_base}-{symbol_quote}"

          # only the first matching symbol in each ref mea (with more volume) is served as ref symbol (ex: FTX KSHIB/SHIB)
          if (mea, symbol_patt) not in base_to_ref_map[key]:
            base_to_ref_map[key][(mea, symbol_patt)] = symbol
    mea_symbol_universe = defaultdict(list)
    for symbol in volume_info_list[0]:
      if cnt >= vu_rank:
        break
      symbol_base = kline_util.get_norm_base(symbol)
      symbol_quote = kline_util.get_quote(symbol) if require_quote_matching else ""
      key = f"{symbol_base}-{symbol_quote}"
      if not has_ref:
        mea_symbol_universe[meas[0]].append(symbol)
        cnt += 1
      else:
        if key in base_to_ref_map and len(base_to_ref_map[key]) == len(mea_pns) - 1:
          # focus-ref matching is n:1 (ex: both KSHIB.PERP and SHIB.PERP have SHIB-USDT as ref)
          mea_symbol_universe[meas[0]].append(symbol)
          for i, symbol in enumerate(base_to_ref_map[key].values()):
            if symbol not in mea_symbol_universe[meas[1 + i]]:
              mea_symbol_universe[meas[1 + i]].append(symbol)
          cnt += 1

  if has_ref and not dump_ref:
    for i in range(1, len(meas)):
      mea_symbol_universe[meas[i]] = []

  kline_info = {"klines": []}
  for mea, symbols in mea_symbol_universe.items():
    sf = mea.split(".")[0]
    sf = "" if sf == "Spot" else sf
    exch_name = mea.split(".")[1]

    symbols_with_expiry_date = []
    expiry_friday_date = kline_util.FindExpiryQuarterlyLastFriday(mea, end_dt + datetime.timedelta(hours=hours_warmedup)).strftime("%Y%m%d")
    for symbol in symbols:
      if symbol.endswith(".QUARTER"):
        symbols_with_expiry_date.append(symbol.replace(".QUARTER", f".{expiry_friday_date}"))
      else:
        symbols_with_expiry_date.append(symbol)

    reader = IntervalReader(
        start_date=(min_start_dt - datetime.timedelta(days=1)).strftime("%Y%m%d"),
        end_date=end_dt.strftime("%Y%m%d"),
        exchange=mea,
        symbols=symbols_with_expiry_date,
        use_pit_pi_info=True)
    
    reader2 = IntervalReader2(
        start_date=(min_start_dt - datetime.timedelta(days=1)).strftime("%Y%m%d"),
        end_date=end_dt.strftime("%Y%m%d"),
        exchange=mea,
        symbols=symbols_with_expiry_date,
        use_pit_pi_info=True,
        )

    for interval, start_dt in start_dt_dict.items():
      interval_min = intervalstr_to_minutes[interval]
      end_dt_it = end_dt_dict[interval]
      target_list = [(reader, start_dt, end_dt_it)]

      for r, s, e in target_list:
        df_dict = get_ohlcv_df(r, s, e, intv=interval_min)
        for i, symbol in enumerate(symbols):
          missing = df_dict["open"].index[df_dict["open"][symbols_with_expiry_date[i]].fillna(0).cumsum() == 0]
          if len(missing) > 0 and reader2.available:
            start2, end2 = missing[0], missing[-1]
            fill_with_exchange_kline(df_dict, symbols_with_expiry_date[i], reader2, start2, end2, interval_min)
          kline_info["klines"].append({
            "symbol": f"{exch_name}{sf}_{symbol}",
            "interval_ns": int(interval_min * 60 * 1e9),
            "ohlc": convert_to_klines(df_dict, symbols_with_expiry_date[i], intv=interval_min)
          })
  _dump_kline_to_json(kline_json, kline_info)

def copy_kline_json_from_cache(datestr, from_hrs, cache_dir, kline_json):
  if cache_dir:
    src = None
    for f in os.listdir(cache_dir):
      if f.startswith(f"cached_kline_json.{datestr}{str(from_hrs).zfill(2)}"):
        src = f
        break
    if src is not None:
      shutil.copyfile(os.path.join(cache_dir, src), kline_json)


if __name__ == "__main__":
  fire.Fire()
