# Copyright (c) 2023 Presto Labs Pte. Ltd.
# Author: soony
# pylint: disable=wrong-import-position,too-few-public-methods
# pylint: disable=unnecessary-comprehension,too-many-locals,broad-except
# pylint: disable=no-name-in-module
import os
from os.path import join as pjoin
import time
import json
import datetime
import tempfile
import concurrent.futures
from collections import OrderedDict

import tqdm
import fire
import numpy as np
import pandas as pd
from google.protobuf.json_format import MessageToDict, ParseDict

from xunkemgmt_client.tool.slack_noti import send_to_slack
from cc.appcoin2.strategy.lm.kline_generator import (
  get_symbols_from_pi, query_kline_as_pb, KlineListProto)
from cc.appcoin2.strategy.lm.kline_generator2 import (
  get_latest_data_ts, generate_request, convert_response_to_df,
  query_inverval_feed,
  get_kline_from_interval_feed as _get_kline_from_interval)

os.environ["TZ"] = "GMT"
time.tzset()

CURRENCY_MAP = {
  "Binance": "USDT",
  "Okex": "USDT",
  "Bybit": "USDT",
}
MEA_MAP = {
  "Binance": ["Futures.Binance.v1", "Spot.Binance.v1"],
  "Okex": ["Futures.Okex.v5-swap", "Spot.Okex.v5"],
  "Bybit": ["Futures.Bybit.v3-linear", "Spot.Bybit.v3"],
}

EXTRA_SYMBOLS = {
  "Binance": [
    ("1000SHIB-USDT", "SHIB-USDT"),
    ("1000XEC-USDT", "XEC-USDT"),
    ("1000LUNC-USDT", "LUNC-USDT")],
  "Bybit": [
    ("SHIB1000-USDT", "SHIB-USDT"),
    ("1000BONK-USDT", "BONK-USDT"),
    ("1000FLOKI-USDT", "FLOKI-USDT"),
    ("1000LUNC-USDT", "LUNC-USDT"),
    ("1000XEC-USDT", "XEC-USDT"),
    ("10000NFT-USDT", "NFT-USDT"),
    ("1000BTT-USDT", "BTT-USDT")],
}
MAX_CNT = {
  "Spot.Binance.v1": 499, "Futures.Binance.v1": 499,
  "Spot.Okex.v5": 300, "Futures.Okex.v5-swap": 300,
}
TOPICS_MAP = {
  0: ["volume", "volume_buy", "volume_sell",
      "open_trade", "close_trade", "high_trade", "low_trade"],
  1: ["volume", "volume_buy", "volume_sell",
      "open_mid", "close_mid", "high_trade", "low_trade",
      "close_open_interest", "close_funding_rate", "spread",
      "top_long_short_position_ratio", "top_long_short_account_ratio",
      "global_long_short_account_ratio"],
}
COLUMNS_NAME_MAP = {
  "timestamp": "klineTimestamp",
  "open_trade": "open",
  "high_trade": "high",
  "low_trade": "low",
  "close_trade": "close",
  "volume": "volume",
  "volume_buy": "buyVolume",
  "volume_sell": "sellVolume",
  "open_mid": "open",
  "close_mid": "close",
  "close_open_interest": "open_interest",
  "close_funding_rate": "funding_rate",
  "spread": "spread",
  "top_long_short_position_ratio": "top_long_short_position_ratio",
  "top_long_short_account_ratio": "top_long_short_account_ratio",
  "global_long_short_account_ratio": "global_long_short_account_ratio",
}
AGG_FNC_MAP = {
  'kline_timestamp': 'first',
  'open': 'first',
  'high': 'max',
  'low': 'min',
  'close': 'last',
  'volume': 'sum',
  'buy_volume': 'sum',
  'sell_volume': 'sum',
  'open_interest': 'last',
  'funding_rate': 'last',
  'spread': 'mean',
  'top_long_short_position_ratio': 'last',
  'top_long_short_account_ratio': 'last',
  'global_long_short_account_ratio': 'last',
}

def get_kline_from_rest(mea, symbols, latest_feed_dt, resolution="1m",
                        tolerance=2):
  now = datetime.datetime.utcnow()
  me = ":".join(mea.split(".")[:2])
  kline_info = {}
  faileds = []
  pbar = tqdm.tqdm(symbols)
  for symbol in pbar:
    product = f"{me}:{symbol}"
    pbar.set_description(product)
    max_cnt = MAX_CNT.get(mea, 200)
    if resolution == "60m":
      resolution = "1h"
    klinelist, failed = query_kline_as_pb(now, product, latest_feed_dt, now,
                                          kline_period=resolution,
                                          tolerance=tolerance, max_cnt=max_cnt)
    klinelist = MessageToDict(klinelist, including_default_value_fields=True,
                              preserving_proto_field_name=True)
    if "klines" in klinelist:
      kline_info[symbol] = klinelist['klines']
    faileds += failed
  return kline_info, faileds

def _get_kline_from_interval_feed(start_dt, end_dt, mea, symbols, resolution='1m', return_df=False, topic_index=0):
  print(f"start querying {mea} kline from interval_feed")
  from copy import deepcopy
  topics = deepcopy(TOPICS_MAP[topic_index])
  if "Okex" in mea and topic_index == 1:
    topics.remove("top_long_short_position_ratio")
    topics.remove("top_long_short_account_ratio")
    topics.remove("global_long_short_account_ratio")
  print("querying", topics)
  req_list = generate_request(
      mea=mea,
      symbols=symbols,
      start_dt=start_dt,
      end_dt=end_dt,
      resolution=resolution,
      topics=topics
  )

  fs = []
  with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
    for req in req_list:
      future = executor.submit(query_inverval_feed, req)
      fs.append(future)

  errors = []
  oneday_kline_df = []
  for n in range(len(fs)):
    result = fs[n].result()
    kline_df = convert_response_to_df(result)
    if len(kline_df) != 0:
      oneday_kline_df.append(kline_df)
      if len(result.error_message) > 0:
        errors.append(f"EXCEPTION: {req_list[n].start_time}-{req_list[n].end_time}, {result.error_message}")
    else:
      errors.append(f"ERROR: {req_list[n].start_time}-{req_list[n].end_time}, {result.error_message}")

  if len(errors) > 0:
    for err in errors:
      print(err)

  columns_name_map = {k: COLUMNS_NAME_MAP[k] for k in ["timestamp"] + topics}

  mae_kline_json = []
  symbol_kline_df = {}
  failed_symbol = []
  for symbol in symbols:
    df_list = [df[symbol] for df in oneday_kline_df if symbol in df.keys()]
    if len(df_list) == 0:
      print(f"Error: {symbol} have no data found in h5 file")
      failed_symbol.append(symbol)
      continue
    symbol_df = pd.concat(df_list).reset_index()
    del symbol_df['index']
    symbol_df = symbol_df.rename(columns_name_map, axis='columns')
    if return_df:
      symbol_kline_df[symbol] = symbol_df
      continue
    symbol_kline_json = symbol_df.to_json(orient="records")
    mae_kline_json.append(symbol_kline_json[1:-1])

  if return_df:
    return symbol_kline_df, failed_symbol
  # retrun json str
  return mae_kline_json, failed_symbol


def get_kline_from_interval(start_dt, end_dt, mea, symbols, resolution='1m',
                            to_resolution=5, topic_index=0):
  assert resolution in ["1m", "1d"]

  columns_map = {
    "klineTimestamp": "kline_timestamp",
    "buyVolume": "buy_volume",
    "sellVolume": "sell_volume",
  }

  # _klines, failed = _get_kline_from_interval(
  _klines, failed = _get_kline_from_interval_feed(
    start_dt, end_dt, mea, symbols, resolution, return_df=True, topic_index=topic_index)
  klines = {}
  for key, df in _klines.items():
    df.rename(columns_map, axis='columns', inplace=True)
    if resolution == "1m":
      df['kline_timestamp'] -= 60 * 1e9
    elif resolution == "1d":
      df['kline_timestamp'] -= 24 * 60 * 60 * 1e9

    _ohlc = df[["open", "high", "low", "close"]]
    ohlc = _ohlc.copy()
    ohlc["open"] = _ohlc["open"].fillna(method="bfill")
    ohlc["close"] = _ohlc["close"].fillna(method="ffill")
    ohlc.fillna(method="bfill", axis=1, inplace=True)
    ohlc.fillna(method="ffill", axis=1, inplace=True)
    df[["open", "high", "low", "close"]] = ohlc

    if topic_index == 1:
      other_items1 = ["open_interest", "funding_rate", "spread"]
      others = df[other_items1].copy()
      others.fillna(method="ffill", inplace=True)
      others.fillna(0.0, inplace=True)
      df[other_items1] = others

      if not "Okex" in mea:
        other_items2 = ["top_long_short_position_ratio",
                        "top_long_short_account_ratio",
                        "global_long_short_account_ratio"]
        others = df[other_items2].copy()
        others.replace(0.0, np.nan, inplace=True)
        others.fillna(method="ffill", inplace=True)
        others.fillna(0.0, inplace=True)
        df[other_items2] = others

    if to_resolution > 1:
      if resolution == "1m":
        interval = f"{to_resolution}T"
      elif resolution == "1d":
        interval = f"{to_resolution}D"

      df['timestamp'] = pd.to_datetime(df['kline_timestamp'], unit='ns')
      df = df.set_index('timestamp')
      df = df.resample(interval).agg(
        OrderedDict([(k, AGG_FNC_MAP[k]) for k in df.columns])
      )
    proto = ParseDict({"klines": df.to_dict('records')}, KlineListProto())
    klines[key] = MessageToDict(proto,
                                preserving_proto_field_name=True)["klines"]
  failed = [{
    "product": symbol,
    "kline_period": f"{to_resolution} * {resolution}",
    "trading_date": end_dt.strftime("%Y%m%d_%H%M%S"),
    "start": start_dt.strftime("%Y%m%d_%H%M%S"),
    "end": end_dt.strftime("%Y%m%d_%H%M%S"),
  } for symbol in failed]
  return klines, failed

def _query_kline_1d(mea, symbols, start_time, latest_feed_time=None,
                    end_time=None, tolerance=2, to_resolution=1, topic_index=0):
  assert to_resolution in [1, 2, 3]
  to_dt = lambda t: datetime.datetime(year=t.year, month=t.month, day=t.day)
  start_time = to_dt(start_time)
  end_time = to_dt(end_time) if end_time is not None else None
  if latest_feed_time is not None:
    latest_feed_time = to_dt(latest_feed_time)

  if latest_feed_time is None or latest_feed_time <= start_time:
    klines, failed = get_kline_from_rest(
      mea, symbols, start_time, f"{to_resolution}d", tolerance=tolerance)
  elif end_time is not None and end_time <= latest_feed_time:
    klines, failed = get_kline_from_interval(
      start_time, end_time, mea, symbols, "1d", to_resolution, topic_index)
  else:
    klines1, failed1 = get_kline_from_interval(
      start_time, latest_feed_time, mea, symbols, "1d", to_resolution, topic_index)
    symbols1 = [symbol for symbol in symbols if symbol not in failed1]
    klines2, failed2 = get_kline_from_rest(
      mea, symbols1, latest_feed_time, f"{to_resolution}d", tolerance=tolerance)
    failed = failed1 + failed2
    klines = {}
    for symbol in set(klines1.keys()) & set(klines2.keys()):
      klines[symbol] = klines1[symbol] + klines2[symbol]

  return klines, failed

def _query_kline_1m(mea, symbols, start_time, latest_feed_time=None,
                    end_time=None, tolerance=2, to_resolution=5, topic_index=0):
  assert to_resolution in [1, 3, 5, 15, 30, 60]

  if latest_feed_time is None or latest_feed_time <= start_time:
    klines, failed = get_kline_from_rest(
      mea, symbols, start_time, f"{to_resolution}m", tolerance=tolerance)
  elif end_time is not None and end_time <= latest_feed_time:
    klines, failed = get_kline_from_interval(
      start_time, end_time, mea, symbols, "1m", to_resolution, topic_index)
  else:
    klines1, failed1 = get_kline_from_interval(
      start_time, latest_feed_time, mea, symbols, "1m", to_resolution, topic_index)
    symbols1 = [symbol for symbol in symbols if symbol not in failed1]
    klines2, failed2 = get_kline_from_rest(
      mea, symbols1, latest_feed_time, f"{to_resolution}m", tolerance=tolerance)
    failed = failed1 + failed2
    klines = {}
    for symbol in set(klines1.keys()) & set(klines2.keys()):
      if topic_index == 1:
        other_items = ["open_interest", "funding_rate", "spread",
                        "top_long_short_position_ratio",
                        "top_long_short_account_ratio",
                        "global_long_short_account_ratio"]
        if "Okex" in mea:
          other_items.remove("top_long_short_position_ratio")
          other_items.remove("top_long_short_account_ratio")
          other_items.remove("global_long_short_account_ratio")
        last_kline_from_interval_feed = klines1[symbol][-1]
        for kline in klines2[symbol]:
          for item in other_items:
            if kline[item] == 0.0:
              kline[item] = last_kline_from_interval_feed[item]

      klines[symbol] = klines1[symbol] + klines2[symbol]

  return klines, failed

def _run(exchange, start, ckpt, now, latest_feed, tolerance,
         day_resolution=1, minute_resolution=5, is_test=False, topic_index=0):
  currency = CURRENCY_MAP[exchange]
  futures_symbols, spot_symbols = [], []
  for mea in MEA_MAP[exchange]:
    if "Futures" in mea:
      futures_symbols = [symbol.split(".")[0]
                         for symbol in get_symbols_from_pi("Futures", exchange)]
    else:
      spot_symbols = get_symbols_from_pi("Spot", exchange)

  extra_futures_symbols, extra_spot_symbols = [], []
  for extra_futures, extra_spot in EXTRA_SYMBOLS.get(exchange, []):
    if extra_futures in futures_symbols and extra_spot in spot_symbols:
      extra_futures_symbols.append(extra_futures)
      extra_spot_symbols.append(extra_spot)

  symbols = sorted(set.intersection(set(futures_symbols), set(spot_symbols)))
  symbols = [symbol for symbol in symbols if symbol.split("-")[1] == currency]
  print(f"target_symbols: {symbols}\nsymbols len: {len(symbols)}")
  kline_info = {"klines": []}
  failed_info = []

  for mea in MEA_MAP[exchange]:
    is_futures = "Futures" in mea
    _symbols = symbols.copy()

    if is_futures:
      _symbols += extra_futures_symbols
      _symbols = [f"{symbol}.PERPETUAL" for symbol in _symbols]
    else:
      _symbols += extra_spot_symbols

    if is_test:
      _symbols = _symbols[:2]

    klines1, failed1 = {}, []
    if start != ckpt:
      klines1, failed1 = _query_kline_1d(mea, _symbols, start, latest_feed,
                                         ckpt, tolerance=tolerance,
                                         to_resolution=day_resolution,
                                         topic_index=topic_index)
    klines2, failed2 = _query_kline_1m(mea, _symbols, ckpt, latest_feed, now,
                                       tolerance=tolerance,
                                       to_resolution=minute_resolution,
                                       topic_index=topic_index)

    failed_info += failed1 + failed2
    suffix = "Futures" if is_futures else ""

    for symbol in klines2.keys():
      kline_info["klines"].append({
        "symbol": f"{exchange}{suffix}_{symbol}",
        "ohlc": klines1.get(symbol, []) + klines2[symbol]
      })
  return kline_info, failed_info

def run(now=None, kline_path=None, exchange="Binance",
        ckpt_days=5, start_days=21, tolerance=2, use_slack=False,
        day_resolution=1, minute_resolution=5, is_test=False,
        topic_index=0):
  el_st = time.time()

  now = datetime.datetime.now() if now is None else \
    datetime.datetime.strptime(str(now), "%Y%m%d%H%M")
  today = datetime.datetime.strptime(now.strftime("%Y%m%d"), "%Y%m%d")
  ckpt = today - datetime.timedelta(days=ckpt_days)
  start = ckpt - datetime.timedelta(days=start_days - ckpt_days)
  latest_feed = get_latest_data_ts(MEA_MAP[exchange][0])

  if latest_feed is not None:
    latest_feed = datetime.datetime.utcfromtimestamp(latest_feed / 1e9)
    print(f"start: {start}, ckpt: {ckpt}, end: {now}, latest_feed: {latest_feed}, topic_index: {topic_index}")
  kline_info, failed_info = _run(exchange, start, ckpt, now, latest_feed,
                                 tolerance=tolerance, is_test=is_test,
                                 day_resolution=day_resolution,
                                 minute_resolution=minute_resolution,
                                 topic_index=topic_index)

  dt_str = now.strftime("%Y%m%d%H")
  kline_path = kline_path or \
    pjoin(tempfile.mkdtemp(prefix=f"coin_midfreq_kline_{dt_str}", dir="/tmp"),
          "kline_info.json")
  print(f"dump kline to {kline_path}")
  with open(kline_path, "w", encoding="utf8") as fout:
    if is_test:
      fout.write(json.dumps(kline_info, indent=2))
    else:
      fout.write(json.dumps(kline_info))

  elapsed = time.time() - el_st
  print(f"elapsed to get kline: {elapsed:.4e} sec")
  if use_slack and failed_info:
    failed_df = pd.DataFrame(failed_info)
    failed_cnt = failed_df['product'].value_counts().sort_values()
    send_to_slack('\n'.join([
                  f'Hostname: {os.environ.get("HOSTNAME", "")}',
                  f'kline_path: {kline_path}',
                  "",
                  "Failure counts",
                  failed_cnt.to_string(),
                  "",
                  "Failures",
                  failed_df.to_string()]),
                  "#coin_midfreq_checker",
                  'file',
                  now.strftime(f"Kline failures %Y%m%d_%H%M%S {exchange}"))


if __name__ == "__main__":
  fire.Fire()
