# Copyright (c) 2022 Presto Labs Pte. Ltd.
# Author: gekim

import os
import glob
import h5py
import re
import numpy as np

from dataclasses import dataclass
from typing import List
from datetime import datetime, date

import datetime as dt
import pandas as pd
import cc.appcoin2.support.feed.py_fastfeed_reader as py_ffr


@dataclass(frozen=True)
class Channel:
  # spread, std, 
  # close_funding_time, open_funding_time, volume, vwap, 
  # open_trade, high_trade, low_trade, close_trade
  name: str

  symbols: List[str]
  # 
  # duration:
  # PT1M, P1DT
  duration: str = ""

class PrestoLabsClient:
  def __init__(self):
    self.sub_client_ = None

  def replay(
    self,
    mea: str,
    from_date: str,
    to_date: str,
    filters: List[Channel] = []):

    self._validate(from_date, to_date)

    from_date = dt.datetime.fromisoformat(from_date)
    to_date = dt.datetime.fromisoformat(to_date)

    current_date = from_date
    while current_date <= to_date:
      dfs = {}
      for filter in filters:
        if PrestoLabsIntervalFeedClient.IS(filter.name):
          if type(self.sub_client_) is not PrestoLabsIntervalFeedClient:
            self.sub_client_ = PrestoLabsIntervalFeedClient()
        elif PrestoLabsFastfeedClient.IS(filter.name):
          if type(self.sub_client_) is not PrestoLabsFastfeedClient:
            self.sub_client_ = PrestoLabsFastfeedClient()
          feed_gen = self.sub_client_.replay(mea, current_date, filter, dfs)
          for feed in feed_gen:
            yield feed
          continue 
        assert(self.sub_client_)
        self.sub_client_.replay(mea, current_date, filter, dfs)
      if len(dfs) > 0:
        df_ = self._merge_dataframe(dfs)
        if current_date == from_date:
          yield df_.columns.tolist()
        for v in df_.values.tolist():
          yield v
      current_date = current_date + dt.timedelta(days=1) 


  def _validate(self, from_date, to_date):
    if from_date is None or self._try_parse_as_iso_date(from_date) is False:
        raise ValueError(
            f"Invalid 'from_date' argument: {from_date}"
        )

    if to_date is None or self._try_parse_as_iso_date(to_date) is False:
        raise ValueError(
            f"Invalid 'to_date' argument: {to_date}."
        )

    if dt.datetime.fromisoformat(from_date) > dt.datetime.fromisoformat(to_date):
        raise ValueError(
            f"from_date is better than to_date"
        )

  def _try_parse_as_iso_date(self, date_string):
      try:
          dt.datetime.fromisoformat(date_string)
          return True
      except ValueError:
          return False
  
  def _merge_dataframe(self, dfs):
    df_i = 0
    df_rtn = None
    for df in dfs.values():
      df.set_index('timestamp', inplace=True)
      df.columns = [column+f'_{df_i}' for column in df.columns]
      df_i += 1
      if df_rtn is None:
        df_rtn = df
      else:
        df_rtn = pd.merge(df_rtn, df, how='outer', left_index=True, right_index=True)
    assert(df_rtn is not None)
    return df_rtn.reset_index(level=0)


class PrestoLabsFastfeedClient:
  FASTFEED_PATH = "/remote/iosg/coin-2/buckets/feed.derived.feed_cache.coin"

  FILTER_NAMES = {'trades', 'incremental_book_L2',
                 'book_snapshot_25', 'quotes', 'liquidations'}

  @staticmethod
  def IS(filter_name):
    return filter_name in PrestoLabsFastfeedClient.FILTER_NAMES

  def __init__(self):
    self.current_date_ = None
    self.reader_ = None
    
  def replay(self, mea: str, current_date, filter, dfs):
    mear = None
    feed_type = py_ffr.FEED_TYPE_INVALID
    if filter.name == 'trades':
      mear = mea + ".realtime"
      feed_type = py_ffr.FEED_TYPE_TRADE
    elif filter.name == 'incremental_book_L2':
      mear = mea + ".realtime"
      feed_type = py_ffr.FEED_TYPE_BOOK
    elif filter.name == 'book_snapshot_25':
      mear = mea + ".snapshot"
      feed_type = py_ffr.FEED_TYPE_BOOK
    elif filter.name == 'quotes':
      mear = mea + ".bbo"
      if not self.__check_mear(current_date, mear):
        mear = mea + ".l1_realtime"
      feed_type = py_ffr.FEED_TYPE_BOOK
    elif filter.name == 'liquidations':
      mear = mea + ".bbo"
      if not self.__check_mear(current_date, mear):
        mear = mea + ".l1_realtime"
      feed_type = py_ffr.FEED_TYPE_LIQUIDATION_ORDER

    if self.current_date_ != current_date:
      self.current_date_ = current_date
      self.reader_ = py_ffr.PyFastfeedReader(mear,
      filter.symbols,
      "feed-05.ap-northeast-1.aws",
      PrestoLabsFastfeedClient.FASTFEED_PATH,
      self.current_date_.strftime("%Y%m%dT000000"),
      self.current_date_.strftime("%Y%m%dT235959"))

    if self.reader_ and feed_type != py_ffr.FEED_TYPE_INVALID:
      while True:
        rtn, feed_str = self.reader_.load(feed_type)
        if not rtn: 
          del self.reader_ 
          self.reader_ = None
          break
        if feed_str: 
          yield feed_str
    return None

  def __check_mear(self, current_date, mear):
    root_dir = "/remote/iosg/coin-2/buckets/feed.derived.feed_cache.coin"
    path = f"{root_dir}/feed-05.ap-northeast-1.aws/{current_date}/{mear}"
    return os.path.exists(path)

class PrestoLabsIntervalFeedClient:
  INTERVAL_H5_COIN_PATH = "/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main"
  INTERVAL_H5_NAVI_PATH = "/remote/iosg/data-2/buckets/feed.derived.interval_h5/navi/main"

  FILTER_NAMES = {
    'other_stats': {'SPREAD', 'STD'},
    'time': {'CLOSE_FUNDING_TIME', 'OPEN_FUNDING_TIME'},
    'volume':{'VOLUME', 'VOLUME_DOLLAR',
    'VOLUME_BUY', 'VOLUME_BUY_DOLLAR', 'VOLUME_SELL', 'VOLUME_SELL_DOLLAR', 
    'VOLUME_LIQUIDATION_BUY_1',	'VOLUME_LIQUIDATION_BUY_DOLLAR_1',
    'VOLUME_LIQUIDATION_SELL_1', 'VOLUME_LIQUIDATION_SELL_DOLLAR_1'},
    'vwap':{'VWAP', 'VWAP_BUY', 'VWAP_SELL'}, 
    'ohlc':{'CLOSE_ASK0', 'CLOSE_BID0', 'CLOSE_FUNDING_RATE', 'CLOSE_INDEX', 'CLOSE_MARK_PRICE', 'CLOSE_MID', 'CLOSE_NAV', 'CLOSE_OPEN_INTEREST', 'CLOSE_TRADE',
    'HIGH_ASK0', 'HIGH_BID0', 'HIGH_FUNDING_RATE', 'HIGH_INDEX', 'HIGH_MARK_PRICE', 'HIGH_MID', 'HIGH_NAV', 'HIGH_OPEN_INTEREST', 'HIGH_TRADE',
    'LOW_ASK0', 'LOW_BID0', 'LOW_FUNDING_RATE', 'LOW_INDEX', 'LOW_MARK_PRICE', 'LOW_MID', 'LOW_NAV', 'LOW_OPEN_INTEREST', 'LOW_TRADE',
    'OPEN_ASK0', 'OPEN_BID0', 'OPEN_FUNDING_RATE', 'OPEN_INDEX', 'OPEN_MARK_PRICE', 'OPEN_MID', 'OPEN_NAV', 'OPEN_OPEN_INTEREST', 'OPEN_TRADE'}
  }

  @staticmethod
  def IS(filter_name):
    if filter_name in PrestoLabsIntervalFeedClient.FILTER_NAMES:
      return True
    for vs in PrestoLabsIntervalFeedClient.FILTER_NAMES.values():
      if filter_name in vs:
        return True
    return False

  def __init__(self):
    pass

  # timestamp, symbol, value
  def replay(self, mea: str, current_date, filter, dfs):
    interval_value = None
    if filter.name in PrestoLabsIntervalFeedClient.FILTER_NAMES:
      file_name = f"{mea}--{filter.name}.h5"
    else:
      for k in PrestoLabsIntervalFeedClient.FILTER_NAMES.keys():
        if filter.name in PrestoLabsIntervalFeedClient.FILTER_NAMES[k]:
          file_name = f"{mea}--{k}.h5"
          interval_value = filter.name
          break

    assert(file_name)

    is_coin = re.search("^[a-zA-Z]+\.[a-zA-Z]+\.v\d$", mea)
    if is_coin:
      path = PrestoLabsIntervalFeedClient.INTERVAL_H5_COIN_PATH
    else:
      path = PrestoLabsIntervalFeedClient.INTERVAL_H5_NAVI_PATH

    h5path = f"""{path}/{filter.duration.upper()}/{mea}/\
{current_date.strftime("%Y%m%d")}/{file_name}"""
    print(h5path)
    assert(os.path.exists(h5path))

    self.load_h5(h5path, dfs, filter.symbols, interval_value)

  def load_h5(self, file_path, dfs, symbols, interval_value):
    with h5py.File(file_path, 'r') as h5file:
      universe = h5file['universe'][:].astype(str)
      timestamps = h5file['timestamp'][:].astype(int)

      if interval_value:
        interval_values = [interval_value]
      else:
        interval_values = [s for s in list(h5file.keys()) if s not in ('universe', 'timestamp', 'session_names')]

      print(interval_values)
      for iv in interval_values:
        df = pd.DataFrame(h5file[iv][:], columns=universe)
        for symbol in symbols:
          df_one_symbol = df[[symbol]].copy()
          df_one_symbol.rename(columns={symbol:iv}, inplace=True)
          if symbol not in dfs:
            dfs[symbol] = pd.DataFrame({'timestamp': timestamps, 'symbol':symbol})
          dfs[symbol] = pd.concat([dfs[symbol], df_one_symbol], axis=1)
