import concurrent.futures
import fire
import os
import toml
import tempfile
import pandas as pd
import coin2.service.feed.interval_feed_service_pb2 as fs_pb2
from os.path import join as pjoin
from datetime import datetime, timedelta, timezone
from xunkemgmt_client.client.api_client import XunkemgmtClient
from cc.appcoin2.strategy.lm.kline_generator import query_kline_as_pb
from google.protobuf.json_format import MessageToDict
from coin.base.datetime_util import to_timestamp_int
import json
import time

os.environ["TZ"] = "GMT"
time.tzset()

proto_topic = ['timestamp', 'spread', 'std', 'close_ask0', 'close_bid0', 'close_funding_rate', 'close_index', 'close_mark_price', 'close_mid', 'close_nav',
               'close_open_interest', 'close_trade', 'high_ask0', 'high_bid0', 'high_funding_rate', 'high_index', 'high_mark_price', 'high_mid', 'high_nav',
               'high_open_interest', 'high_trade', 'low_ask0', 'low_bid0', 'low_funding_rate', 'low_index', 'low_mark_price', 'low_mid', 'low_nav', 'low_open_interest',
               'low_trade', 'open_ask0', 'open_bid0', 'open_funding_rate', 'open_index', 'open_mark_price', 'open_mid', 'open_nav', 'open_open_interest', 'open_trade',
               'close_funding_time', 'open_funding_time', 'volume', 'volume_buy', 'volume_buy_dollar', 'volume_dollar', 'volume_liquidation_buy', 'volume_liquidation_buy_dollar',
               'volume_liquidation_sell', 'volume_liquidation_sell_dollar', 'volume_sell', 'volume_sell_dollar', 'vwap', 'vwap_buy', 'vwap_sell', 'market_cap_in_usd',
               'max_supply', 'total_supply', 'tvl_in_usd', 'circulating_supply', 'top_long_short_position_ratio', 'top_long_short_account_ratio', 'global_long_short_account_ratio']

default_query_topic = ["volume", "volume_buy", "volume_sell", "open_trade", "close_trade", "high_trade", "low_trade"]

def convert_response_to_df(response):
  interval_feed_df = {}
  if response.status == "success":
    for symbol, interval_feed in response.result.items():
      interval_feed_map = {}
      for topic in proto_topic:
        feed_data = interval_feed.__getattribute__(topic)
        if len(feed_data)>0:
          interval_feed_map[topic] = list(feed_data)
      interval_feed_df[symbol] = pd.DataFrame(interval_feed_map)
  return interval_feed_df


def convert_to_datetime(time_str):
  time_str = str(time_str)
  if "T" in time_str:
    dt = datetime.strptime(time_str, "%Y%m%dT%H%M")
  elif len(time_str) == 8:
    dt = datetime.strptime(time_str, "%Y%m%d")
  elif len(time_str) == 12:
    dt = datetime.strptime(time_str, "%Y%m%d%H%M")
  else:
    print(f"ERROR: wrong time format: {time_str}")
    return
  return dt


def get_subscribed_symbols(mea, currency='-USDT', expiry=".PERPETUAL"):
  symbol_groups_path = os.path.expanduser("data/coin2/feed/symbol_groups.toml")
  symbols = []
  toml_mea = mea.lower().replace(".", "_")
  with open(symbol_groups_path, "r") as fp:
    symbol_group = toml.load(fp)[toml_mea]
  if 'Futures' in mea and expiry is not None:
    for subscribed in symbol_group.values():
      symbols.extend([s for s in subscribed if currency in s and expiry in s])
  else:
    for subscribed in symbol_group.values():
      symbols.extend([s for s in subscribed if currency in s])
  return symbols


def query_inverval_feed(request_proto):
  with XunkemgmtClient() as client:
    response = client.query_interval_feed(request_proto)
    return response

def generate_request(mea, symbols, start_dt, end_dt, resolution='1m', topics=default_query_topic):
  query_list = []
  duration = (end_dt - start_dt).days
  if duration > 0:
    while start_dt < end_dt:
      next_day = (start_dt + timedelta(days=1)).date()
      next_day_dt = datetime(next_day.year, next_day.month, next_day.day)
      query_arg = fs_pb2.QueryIntervalFeedRequestProto(
          start_time=to_timestamp_int(start_dt),
          end_time=to_timestamp_int(min(next_day_dt, end_dt)),
          mea=mea,
          symbols=symbols,
          resolution=resolution,
          topics=topics,
      )
      start_dt = datetime(next_day.year, next_day.month, next_day.day)
      query_list.append(query_arg)
  else:
    query_list.append(fs_pb2.QueryIntervalFeedRequestProto(
        start_time=to_timestamp_int(start_dt),
        end_time=to_timestamp_int(end_dt),
        mea=mea,
        symbols=symbols,
        topics=topics,
    ))
  return query_list

def aggregate_1day_from_1m(start_dt,end_dt,mea,symbols):
  df_1m,_ = get_kline_from_interval_feed(start_dt,end_dt,mea,symbols,resolution='1m',return_df=True)
  df_1d = {}
  for symbol in symbols:
    symbol_df_1m = df_1m[symbol]
    aggregate ={}
    aggregate['klineTimestamp'] = symbol_df_1m['klineTimestamp'].iat[-1]
    aggregate['open'] = symbol_df_1m['open'].iat[0]
    aggregate['close'] = symbol_df_1m['close'].iat[-1]
    aggregate['high'] = max(symbol_df_1m['high'])
    aggregate['low'] = min(symbol_df_1m['low'])
    aggregate['volume'] = sum(symbol_df_1m['volume'])
    aggregate['buyVolume'] =sum(symbol_df_1m['buyVolume'])
    aggregate['sellVolume'] =sum(symbol_df_1m['sellVolume'])
    df_1d[symbol] = pd.DataFrame([aggregate])
  return df_1d

def get_kline_from_interval_feed(start_dt, end_dt, mea, symbols, resolution='1m', return_df=False, topics=default_query_topic):
  print(f"start querying {mea} {resolution} kline from interval_feed")
  if mea == "Spot.Coingecko.v3":
    req_list = generate_request(
        mea=mea,
        symbols=symbols,
        start_dt=start_dt,
        end_dt=end_dt,
        resolution=resolution,
        topics=['market_cap_in_usd']
    )
  else:
    req_list = generate_request(
        mea=mea,
        symbols=symbols,
        start_dt=start_dt,
        end_dt=end_dt,
        resolution=resolution,
        topics=topics
    )
  fs = []
  with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
    for req in req_list:
      future = executor.submit(query_inverval_feed, req)
      fs.append(future)

  errors = []
  oneday_kline_df = []
  aggregate = False
  for n in range(len(fs)):
    result = fs[n].result()
    kline_df = convert_response_to_df(result)
    if len(kline_df) != 0:
      oneday_kline_df.append(kline_df)
      if len(result.error_message) > 0:
        errors.append(f"EXCEPTION: {req_list[n].start_time}-{req_list[n].end_time}, {result.error_message}")
    else:
      errors.append(f"ERROR: {req_list[n].start_time}-{req_list[n].end_time}, {result.error_message}")

  if len(errors) > 0:
    for err in errors:
      print(err)

  if oneday_kline_df:
    last_ts = (list(oneday_kline_df[-1].values())[0]['timestamp'].iat[-1])
    aggregate_start_dt = datetime.utcfromtimestamp(last_ts//1e9)
    missing_period = (end_dt - aggregate_start_dt)
  else:
    missing_period = (end_dt - start_dt)
    aggregate_start_dt = start_dt
  
  aggregate_available_mea = ["Spot.Binance.v1", "Futures.Binance.v1", "Spot.Okex.v5", 
                             "Futures.Okex.v5-swap","Spot.Bybit.v3", "Futures.Bybit.v3-linear"]
  
  if mea in aggregate_available_mea and resolution == '1d' and missing_period == timedelta(days=1):
    print("try aggregate 1day interval feed from 1m")
    aggregate = True
    aggregate_1day_df = aggregate_1day_from_1m(aggregate_start_dt,end_dt,mea,symbols)

  columns_name_map = {
      "timestamp": "klineTimestamp",
      "open_trade": "open",
      "high_trade": "high",
      "low_trade": "low",
      "close_trade": "close",
      "volume": "volume",
      "volume_buy": "buyVolume",
      "volume_sell": "sellVolume",
      "market_cap_in_usd": "marketCap",
  }

  mae_kline_json = []
  symbol_kline_df = {}
  failed_symbol = []
  for symbol in symbols:
    df_list = [df[symbol] for df in oneday_kline_df if symbol in df.keys()]
    symbol_df = pd.DataFrame()
    if len(df_list) == 0 and not aggregate:
      print(f"exception: {symbol} have no data found in h5 file")
      failed_symbol.append(symbol)
      continue
    elif len(df_list) > 0:  
      symbol_df = pd.concat(df_list).reset_index()
      del symbol_df['index']
      symbol_df = symbol_df.rename(columns_name_map, axis='columns')
    if aggregate:
      symbol_df = pd.concat([symbol_df,aggregate_1day_df[symbol]]).reset_index()
      del symbol_df['index']
    if return_df:
      symbol_kline_df[symbol] = symbol_df
      continue
    symbol_kline_json = symbol_df.to_json(orient="records")
    mae_kline_json.append(symbol_kline_json[1:-1])

  if return_df:
    return symbol_kline_df, failed_symbol
  # retrun json str
  return mae_kline_json, failed_symbol


def get_kline_from_rest(mea, symbols, latest_feed_dt, resolution="1m"):
  print(f"start querying {mea} kline from exchange")
  now = datetime.utcnow()
  me = ":".join(mea.split(".")[:2])
  mae_kline_json = []
  failed_symbol = []
  for symbol in symbols:
    product = f"{me}:{symbol}"
    klinelist, failed = query_kline_as_pb(now, product, latest_feed_dt, now, kline_period=resolution)
    klinelist_dict = MessageToDict(klinelist)
    if "klines" in klinelist_dict.keys():
      mae_kline_json.append(json.dumps(klinelist_dict["klines"])[1:-1])
    else:
      print(f"ERROR: {latest_feed_dt}-{now} , {symbol} fail to query from exchange")
      failed_symbol.append(symbol)
  return mae_kline_json, failed_symbol


def get_latest_data_ts(mea="Spot.Binance.v1", symbols=None):
  end_dt = datetime.utcnow()
  start_dt = end_dt - timedelta(days=1.5)
  if symbols is None:
    symbols = ["BTC-USDT"] if "Spot" in mea else ["BTC-USDT.PERPETUAL"]
  req_list = generate_request(mea, symbols, start_dt, end_dt, topics=["volume"])
  for n in range(len(req_list) - 1, -1, -1):
    result = query_inverval_feed(req_list[n])
    if (result.latest_timestamp != 0):
      return result.latest_timestamp


def run(start_time=None, end_time=None, exchange="Binance", resolution='1m', kline_path=None, report_latest_time=False, test=False):
  st = datetime.now().timestamp()
  currency_map = {"Binance": "USDT", "Okex": "USDT", "Bybit": "USDT"}
  mea_map = {"Binance": ["Spot.Binance.v1", "Futures.Binance.v1"],
             "Okex": ["Spot.Okex.v5", "Futures.Okex.v5-swap"],
             "Bybit":["Spot.Bybit.v3", "Futures.Bybit.v3-linear"],}
  assert exchange in mea_map.keys()
  assert resolution in ["1m", "1d"]

  latest_data_ts = get_latest_data_ts(mea_map[exchange][0])
  if latest_data_ts is None:
    print("ERROR: No data found in the last 36 hours, please check if interval feed have issue")
    return
  latest_feed_dt = datetime.utcfromtimestamp(latest_data_ts / 1e9)
  if report_latest_time:
    print(f"interval feed latest data timestamp: {latest_data_ts}, datetime: {latest_feed_dt}")
    return
  if start_time is None:
    start_dt = datetime.utcnow() - timedelta(days=21)
    start_dt = datetime(year=start_dt.year, month=start_dt.month, day=start_dt.day)
  else:
    start_dt = convert_to_datetime(start_time)
  if end_time is None:
    end_dt = datetime.utcnow()
  else:
    end_dt = convert_to_datetime(end_time)
  assert (start_dt is not None) and (end_dt is not None) and (start_dt < end_dt)
  if start_dt > latest_feed_dt:
    print(f"WARNING: start time({start_dt}) later than the latest h5 data ({latest_feed_dt}), may cause unexpected error")

  mea_symbols = {}
  spot_products = []
  futures_products = []
  for mea in mea_map[exchange]:
    symbols = get_subscribed_symbols(mea, currency_map[exchange])
    if len(symbols) > 0:
      mea_symbols[mea] = symbols
      if 'Spot' in mea:
        spot_products.extend(mea_symbols[mea])
      if 'Futures' in mea:
        futures_products.extend([s.split('.')[0] for s in mea_symbols[mea]])
  query_product = spot_products
  if len(futures_products) > 0:
    query_product = set(spot_products) & set(futures_products)
  if (test):
    query_product = ["BTC-USDT","ETH-USDT"]
  print(f"query products: {query_product}")
  print(f"query mea: {list(mea_symbols.keys())}")
  print(f"query time: {start_dt} - {end_dt}")

  klines = []
  mea_fs = {}
  query_exchange = False
  with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
    for mea, symbols in mea_symbols.items():
      fs = []
      if 'Futures' in mea:
        mea_symbols[mea] = [s for s in mea_symbols[mea] if s.split(".")[0] in query_product]
      if 'Spot' in mea:
        mea_symbols[mea] = query_product
      future1 = executor.submit(get_kline_from_interval_feed, start_dt,
                                end_dt, mea, mea_symbols[mea], resolution)
      fs.append(future1)
      if end_dt > latest_feed_dt:
        query_exchange = True
        rest_start_dt = max(latest_feed_dt + timedelta(minutes=1), start_dt)
        future2 = executor.submit(get_kline_from_rest, mea, mea_symbols[mea],
                                  rest_start_dt)
        fs.append(future2)
      mea_fs[mea] = fs

  json_template = "{\"symbol\":\"%s\",\"olhc\":[%s]}"
  klines = []
  for mea, futures in mea_fs.items():
    feed_kline, feed_fail_symbol = futures[0].result()
    rest_kline, rest_fail_symbol = futures[1].result() if query_exchange else (None, None)
    symbol_prefix = mea.split(".")
    symbol_prefix = f"{symbol_prefix[1]}{symbol_prefix[0]}_"
    for symbol in mea_symbols[mea]:
      feed_kline_pointer = 0
      rest_kline_pointer = 0
      symbol_kline = []
      if symbol not in feed_fail_symbol:
        symbol_kline.append(feed_kline[feed_kline_pointer])
        feed_kline_pointer = feed_kline_pointer + 1
      if query_exchange and symbol not in rest_fail_symbol:
        symbol_kline.append(rest_kline[rest_kline_pointer])
        rest_kline_pointer = rest_kline_pointer + 1
      symbol_kline = ",".join(symbol_kline)
      if symbol_kline != "":
        klines.append(json_template % (symbol_prefix + symbol, symbol_kline))

  kline_path = kline_path or pjoin(tempfile.mkdtemp(
      prefix=f"coin_midfreq_kline_{start_dt}", dir="/tmp"), "kline_info.json")
  with open(kline_path, 'w') as fp:
    fp.write("{\"klines\":[")
    while(len(klines) > 20):
      fp.write(",".join(klines[:20]))
      klines = klines[20:]
    fp.write(",".join(klines))
    fp.write("]}")
  print(f"dump kline to: {kline_path}")
  print(f"generate compelete, total cost:{round(datetime.now().timestamp() - st,2)}s")

if __name__ == "__main__":
  fire.Fire()
