# Copyright (c) 2019 Presto Labs Pte. Ltd.
# Author: taekwon(originally from experimental/prophet/craps/test_lt.py)

import datetime
import functools
import logging
import pandas as pd
import os

from absl import app, flags

import experimental.prophet as prophet
from experimental.prophet.graph import graph
from experimental.prophet.ops import timeseries
from experimental.prophet.ops import y_gen
from experimental.prophet.ops.fastfeed import fastfeed_coin
from experimental.prophet.ops.aggregator import scalar_value_aggregator

import coin.strategy.mm.tool.archive_base as abase

from coin.strategy.interval.universe import universe_btc


def symbol_basic(feed, nickname, inverse=False, price_multiplier=1., windows=None):
  windows = windows or \
    ['1m', '2m', '3m', '4m', '5m', '10m', '20m', '30m', '60m']

  with graph.namespace(nickname):
    book = graph.create_node('fastfeed.BookBestLevel', [feed.book])
    ask0_p = (book.outputs[1] * price_multiplier).named_ref('b.ask0_p')
    ask0_q = book.outputs[2].named_ref('b.ask0_q')
    bid0_p = (book.outputs[3] * price_multiplier).named_ref('b.bid0_p')
    bid0_q = book.outputs[4].named_ref('b.bid0_q')
    spread = (ask0_p - bid0_p).named_ref('b.spread')
    mid_p = ((ask0_p + bid0_p) / 2.).named_ref('b.mid_p')
    mid_p_std = timeseries.time_moving_window(mid_p, windows).std()

    ask0_q_sum = timeseries.time_moving_window(ask0_q, windows).sum()
    bid0_q_sum = timeseries.time_moving_window(bid0_q, windows).sum()

    mid_p_ret = []
    prev_y_mid_p_ret = []
    for w in windows:
      v = (mid_p - timeseries.time_shift(mid_p, w)).named_ref('mid_ret.' + w)
      mid_p_ret.append(v)
      prev_mid_p_ret = prophet.shift(v, 1)
      prev_y_mid_p_ret.append(prophet.time_shift_y(prev_mid_p_ret, w))

    book_vars = [
        ask0_p,
        ask0_q,
        *ask0_q_sum,
        bid0_p,
        bid0_q,
        *bid0_q_sum,
        spread,
        mid_p,
        *mid_p_std,
        *mid_p_ret
    ]

    inst_vars = []
    try:
      instrument = prophet.instrument(feed)
      oi = instrument.open_interest.named_ref('i.oi')
      # fr = instrument.funding_rate.named_ref('i.fr') # doesn't exist
      oi_w = []
      for w in windows:
        oi_w.append((oi - timeseries.time_shift(oi, w)).named_ref('oi.' + w))
      inst_vars = [oi, *oi_w]
    except Exception as e:
      print(nickname + ' has error in getting instrument due to ' + str(e))

    trade = graph.create_node('fastfeed.Trade', [feed.trade])
    last_trade_time = trade.outputs[0]
    tp = (trade.outputs[1] * price_multiplier).named_ref('t.p')
    tp_max = timeseries.time_moving_window(tp, windows).max()
    tp_min = timeseries.time_moving_window(tp, windows).min()

    if not inverse:
      tq = trade.outputs[2].named_ref('t.q')
      tv = (tp * tq).named_ref('t.v')
    else:
      tq = (trade.outputs[2] / tp).named_ref('t.q')
      tv = trade.outputs[2].named_ref('t.v')
    tside = trade.outputs[3].named_ref('t.side')

    with graph.control_if(tside == 1):
      buy_q_sum = timeseries.time_moving_window(tq.named_ref('t.buy.q'), windows).sum()
      buy_v_sum = timeseries.time_moving_window(tv.named_ref('t.buy.v'), windows).sum()

    with graph.control_if(tside == 2):
      sell_q_sum = timeseries.time_moving_window(tq.named_ref('t.sell.q'), windows).sum()
      sell_v_sum = timeseries.time_moving_window(tv.named_ref('t.sell.v'), windows).sum()

    tq_sum = timeseries.time_moving_window(tq, windows).sum()
    tv_sum = timeseries.time_moving_window(tv, windows).sum()
    fil_tq_sum = []
    fil_tv_sum = []
    tvwaps = []
    vwap_ret = []
    prev_y_vwap_ret = []
    for idx, window in enumerate(windows):
      tq = tq_sum[idx]
      fil_tq_sum.append(tq.named_ref(f't.q.sum{window}'))
      tv = tv_sum[idx]
      fil_tv_sum.append(tv.named_ref(f't.v.sum{window}'))
      vwap = (tv / tq).named_ref(f't.vwap.{window}')
      tvwaps.append(vwap)

      v = (vwap - timeseries.time_shift(vwap, window)).named_ref('vwap_ret.' + window)
      vwap_ret.append(v)

      prev_vwap_ret = prophet.shift(v, 1)
      double_window = str(int(window[:-1]) * 2) + 'm'
      prev_y_vwap_ret.append(prophet.time_shift_y(prev_vwap_ret, double_window))

    tp_std = timeseries.time_moving_window(tp, windows).std()
    trade_vars = [
        tp,
        *tp_max,
        *tp_min,
        *tvwaps,
        *vwap_ret,
        *fil_tq_sum,
        *fil_tv_sum,
        *tp_std,
        *buy_q_sum,
        *buy_v_sum,
        *sell_q_sum,
        *sell_v_sum
    ]

  return book_vars + trade_vars + inst_vars, [*prev_y_mid_p_ret, *prev_y_vwap_ret]


def model(symbol_infos, date, machine=None):
  machine = machine or 'feed-01.ap-northeast-1.aws'
  timer_1m = prophet.timer(f'1m')
  ts = prophet.global_timestamp()

  vars = []
  vars_ygen = []
  for symbol_info in symbol_infos:
    feed = fastfeed_coin(symbol_info.products[date], str(symbol_info.sub_req), machine)
    var_normal, var_ygen = symbol_basic(feed,
                                        symbol_info.nickname,
                                        inverse=symbol_info.inverse,
                                        price_multiplier=symbol_info.price_multiplier)
    vars += var_normal
    vars_ygen += var_ygen

  prev_ts = timeseries.shift(ts, 1)
  prev_vars = timeseries.shift(vars, 1)
  with graph.control_if((prev_ts % (60 * 10**9) == 0) & (prev_ts != ts)):
    aggregator = scalar_value_aggregator([prev_ts, *prev_vars, *vars_ygen])
  return aggregator


def model_wrapper(symbol_idx, date):
  symbol_info = universe_btc([date])[symbol_idx]
  return model([symbol_info], date)


def get_dump_root_dir():
  return '/tmp/%s/interval_data' % os.path.expanduser('~').replace('/home/', '')


def get_prophet_data(symbol_idx, trading_date, start_time, end_time):
  df = prophet.run_from_fastfeed_multiprocess(functools.partial(model_wrapper, symbol_idx),
                                              start_time,
                                              end_time,
                                              machine='feed-01.ap-northeast-1.aws',
                                              max_workers=None,
                                              inject_date_to_graph_func=True,
                                              use_run_cache=False,
                                              duration_after='7201s',
                                              duration_before='7201s')
  replace_map = dict()

  # remove .shift1
  for column in df.columns:
    replace_map[column] = column.rsplit('.', 1)[0]
    if replace_map[column][0] == 'Y':
      replace_map[column] = replace_map[column].rsplit('.', 1)[0]
  df.rename(columns=replace_map, inplace=True)
  df['tdate'] = pd.to_datetime(df['global_timestamp'])

  new_df = df[(start_time <= df['tdate']) & (df['tdate'] < end_time)]
  new_df = new_df.set_index('tdate', drop=True)
  new_df = new_df.asfreq('1min').reset_index().drop('tdate', axis=1)

  # For debug
  if flags.FLAGS.debug:
    # new_df = df
    pd.set_option('display.max_rows', None)
    product_name_str = universe_btc([trading_date])[symbol_idx].nickname
    print(product_name_str)
    print(list(new_df))
    # print(new_df)
    new_df['tdate'] = pd.to_datetime(new_df['global_timestamp'])
    # print(new_df[['tdate', f'{product_name_str}/b.mid_p',
    #   f'{product_name_str}/mid_ret.1m', f'Y/{product_name_str}/mid_ret.1m'
    #   ]])
    print(new_df[[
        'tdate',
        f'{product_name_str}/t.vwap.1m',
        f'{product_name_str}/vwap_ret.1m',
        f'Y/{product_name_str}/vwap_ret.1m',
        # f'{product_name_str}/vwap_ret.2m', f'Y/{product_name_str}/vwap_ret.2m',
    ]])
  return new_df


def main(argv):
  baseflags = abase.get_base_flags()

  trading_dates = abase.get_trading_dates(flags.FLAGS.trading_date)
  symbol_nicknames = [s.nickname for s in universe_btc(trading_dates)]

  ts_for_dumping = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")

  dump_root_dir = get_dump_root_dir()

  from concurrent.futures import ProcessPoolExecutor
  with ProcessPoolExecutor(max_workers=16) as executor:
    futures = dict()
    for symbol_idx, symbol_nickname in enumerate(symbol_nicknames):
      for trading_date in trading_dates:
        if 'week' in symbol_nickname and trading_date.weekday() == 4:
          continue

        baseflags.trading_date = trading_date.strftime("%Y%m%d")
        start_time, end_time = abase.get_time_range(baseflags)
        if symbol_idx not in futures:
          futures[symbol_idx] = list()

        logging.info('%d/%d %s %s %s',
                     symbol_idx + 1,
                     len(symbol_nicknames),
                     symbol_nickname,
                     start_time,
                     end_time)
        futures[symbol_idx].append(
            executor.submit(get_prophet_data, symbol_idx, trading_date, start_time, end_time))

    for symbol_idx, symbol_futures in futures.items():
      df_list = []
      for future in symbol_futures:
        res = future.result()
        df_list.append(res)
      df_total = pd.concat(df_list, ignore_index=True)
      print(df_total)
      symbol_nickname = symbol_nicknames[symbol_idx]

      if flags.FLAGS.dump:
        base_folder = f'{dump_root_dir}/{symbol_nickname}'
        folder_name = f'{base_folder}/dump_{ts_for_dumping}'
        os.system(f'mkdir -p {folder_name}')
        dump_file = folder_name + '/dump.pkl.gz'
        df_total.to_pickle(dump_file, compression='gzip')
        print(f'{folder_name}')
        os.system(f'mkdir -p {dump_root_dir}/recent')
        os.system(f'ln -sfn {folder_name} {dump_root_dir}/recent/{symbol_nickname}')


if __name__ == '__main__':
  abase.define_base_flags()
  abase.define_feed_archive_flags()

  flags.FLAGS.exchange = ""
  flags.FLAGS.market_type = ""
  flags.DEFINE_boolean('debug', False, 'Debug Flag')
  flags.DEFINE_boolean('dump', True, 'Dump')

  logging.basicConfig(level='DEBUG')
  app.run(main)
