import collections
import datetime
import os
import pandas as pd
import numpy as np

from decimal import Decimal
from coin.support.feed_tool.feed_stats.logic.util import (
    get_exchange_api_id_from_request,
    fetch_git_commit_sha_datetime,
)
from coin.support.feed_tool.feed_stats.app.feed_stats_motion.presto_feed_dataset \
    import calculate_stats
import pypapyrus.dataset.presto_feed_dataset as dataset_mdl
from pypapyrus.dataset.presto_feed_dataset import h5_io

from xunkemgmt_client.market.feed_stats_v2.database.importer_base import \
    FeedStatsImporterBase
from xunkemgmt_client.market.feed_stats_v2.model.feed_stats import FeedStatsIndex

from xunkemgmt_client.market.feed_stats_v2.database.connection.coin2_database import \
    coin2_feed_stats_importer_config

# mapping from dateframe to database
feed_info_columns = {
    "date": "trading_date",
    "message__first": "start_time",
    "message__last": "end_time",
    "symbol": "symbol",
    "exchange": "exchange_api_id"
}

book_columns = {
    "book__first": "time_first_book",
    "book__last": "time_last_book",
    "book__count": "book_count",
    "book__max_diff": "no_book_duration",
    "bid_qty__average": "avg_bid0_qty",
    "ask_qty__average": "avg_ask0_qty",
    "bid_notional__average": "avg_bid0_notional",
    "ask_notional__average": "avg_ask0_notional",
    "trade_move__average": "avg_trade_move",
    "trade_stay__average": "avg_trade_stay",
    "bidask_spread__average": "avg_bid_ask_spread",
    "mpc__count": "mpc_count",
    "crossed_book__condition_count": "crossed_book_count",
    "twap__average": "twap",
    "book_parse_time_cost__average": "avg_book_parse_time",
    "book_parse_time_cost__total": "total_book_parse_time",
    "ask0_price_range__open":"ask0_price_open",
    "ask0_price_range__high":"ask0_price_high",
    "ask0_price_range__low":"ask0_price_low",
    "ask0_price_range__close":"ask0_price_close",
    "bid0_price_range__open":"bid0_price_open",
    "bid0_price_range__high":"bid0_price_high",
    "bid0_price_range__low":"bid0_price_low",
    "bid0_price_range__close":"bid0_price_close",
    "mid_price_range__open":"mid_price_open",
    "mid_price_range__high":"mid_price_high",
    "mid_price_range__low":"mid_price_low",
    "mid_price_range__close":"mid_price_close",
}

trade_columns = {
    "trade__first": "time_first_trade",
    "trade__last": "time_last_trade",
    "trade__count": "trade_count",
    "trade_volume__average": "avg_trade_size",
    "trade__max_diff": "no_trade_duration",
    "trade_volume__total": "total_volume",
    "trade_buy__total": "buy_volume",
    "trade_sell__total": "sell_volume",
    "trade_notional__average": "avg_notional",
    "trade_notional__total": "total_notional",
    "price_range__open": "price_open",
    "price_range__high": "price_high",
    "price_range__low": "price_low",
    "price_range__close": "price_close",
    "vwap__average": "vwap",
    "flip__count": "price_flip_count",
    "flip__average_stay": "avg_price_flip",
    "volatility__value": "volatility",
    "trade_leadingdigit__w_0": "trade_leadingdigit_0_w",
    "trade_leadingdigit__w_1": "trade_leadingdigit_1_w",
    "trade_leadingdigit__w_2": "trade_leadingdigit_2_w",
    "trade_leadingdigit__w_3": "trade_leadingdigit_3_w",
    "trade_leadingdigit__w_4": "trade_leadingdigit_4_w",
    "trade_leadingdigit__w_5": "trade_leadingdigit_5_w",
    "trade_leadingdigit__w_6": "trade_leadingdigit_6_w",
    "trade_leadingdigit__w_7": "trade_leadingdigit_7_w",
    "trade_leadingdigit__w_8": "trade_leadingdigit_8_w",
    "trade_leadingdigit__w_9": "trade_leadingdigit_9_w",
    "trade_parse_time_cost__average": "avg_trade_parse_time",
    "trade_parse_time_cost__total": "total_trade_parse_time",
}

cross_columns = {
    "normal_size__count": "normal_count",
    "normal_size__average": "avg_normal_size",
    "black_size__count": "black_count",
    "black_size__average": "avg_black_size",
    "buy_sweep_size__count": "buy_sweep_count",
    "buy_sweep_size__average": "avg_buy_sweep_size",
    "sell_sweep_size__count": "sell_sweep_count",
    "sell_sweep_size__average": "avg_sell_sweep_size",
    "between_book_trade__max_count": "max_between_book_trade_count",
    "between_book_trade__max_first": "max_between_book_trade_begin",
    "between_book_trade__max_total": "max_between_book_trade_duration",
    "outside_bid_ask_trade__max_count": "max_outside_bid_ask_trade_count",
    "outside_bid_ask_trade__max_first": "max_outside_bid_ask_trade_begin",
    "outside_bid_ask_trade__max_total": "max_outside_bid_ask_trade_duration",
    "message_timediff__mean": "avg_message_time_diff",
    "message_timediff__stddev": "stddev_message_time_diff",
    "message_timediff__sharpe": "sharpe_message_time_diff",
    "message_timediff__max": "max_message_time_diff",
}

non_tbs_columns = {
    "open_interest_qty_range__open": "open_interest_qty_open",
    "open_interest_qty_range__high": "open_interest_qty_high",
    "open_interest_qty_range__low": "open_interest_qty_low",
    "open_interest_qty_range__close": "open_interest_qty_close",
    "index_price_range__open": "index_price_open",
    "index_price_range__high": "index_price_high",
    "index_price_range__low": "index_price_low",
    "index_price_range__close": "index_price_close",
}

interval_stat_columns = {
    "open_interest_qty_range_interval__resolution": "interval_resolution",
    "open_interest_qty_range_interval__open": "open_interest_qty_interval_open",
    "open_interest_qty_range_interval__high": "open_interest_qty_interval_high",
    "open_interest_qty_range_interval__low": "open_interest_qty_interval_low",
    "open_interest_qty_range_interval__close": "open_interest_qty_interval_close",
    "index_price_range_interval__open": "index_price_interval_open",
    "index_price_range_interval__high": "index_price_interval_high",
    "index_price_range_interval__low": "index_price_interval_low",
    "index_price_range_interval__close": "index_price_interval_close",
    "trade_price_range_interval__open": "trade_price_interval_open",
    "trade_price_range_interval__high": "trade_price_interval_high",
    "trade_price_range_interval__low": "trade_price_interval_low",
    "trade_price_range_interval__close": "trade_price_interval_close",
    "mid_price_range_interval__open": "mid_price_interval_open",
    "mid_price_range_interval__high": "mid_price_interval_high",
    "mid_price_range_interval__low": "mid_price_interval_low",
    "mid_price_range_interval__close": "mid_price_interval_close",
    "ask0_price_range_interval__open": "ask0_price_interval_open",
    "ask0_price_range_interval__high": "ask0_price_interval_high",
    "ask0_price_range_interval__low": "ask0_price_interval_low",
    "ask0_price_range_interval__close": "ask0_price_interval_close",
    "bid0_price_range_interval__open": "bid0_price_interval_open",
    "bid0_price_range_interval__high": "bid0_price_interval_high",
    "bid0_price_range_interval__low": "bid0_price_interval_low",
    "bid0_price_range_interval__close": "bid0_price_interval_close",
    "trade_allvolume_interval__total": "total_volume_interval",
    "trade_noside_interval__total": "noside_volume_interval",
    "trade_buy_interval__total": "buy_volume_interval",
    "trade_sell_interval__total": "sell_volume_interval",
}

# Raw price and qty is multiplied by 1e6 and convert from double to int.
# Here convert it back.
int_to_double_columns = {
    # book
    "avg_bid0_qty": 1e3,
    "avg_ask0_qty": 1e3,
    "avg_bid0_notional": 1e11,
    "avg_ask0_notional": 1e11,
    "avg_trade_move": 1e3,
    "avg_trade_stay": 1e3,
    "avg_bid_ask_spread": 1e8,
    "twap": 1e8,
    "ask0_price_open": 1e8,
    "ask0_price_high": 1e8,
    "ask0_price_low": 1e8,
    "ask0_price_close": 1e8,
    "bid0_price_open": 1e8,
    "bid0_price_high": 1e8,
    "bid0_price_low": 1e8,
    "bid0_price_close": 1e8,
    "mid_price_open": 1e8,
    "mid_price_high": 1e8,
    "mid_price_low": 1e8,
    "mid_price_close": 1e8,

    # trade
    "avg_trade_size": 1e11,
    "total_volume": 1e3,
    "buy_volume": 1e3,
    "sell_volume": 1e3,
    "avg_notional": 1e11,
    "total_notional": 1e11,
    "price_open": 1e8,
    "price_high": 1e8,
    "price_low": 1e8,
    "price_close": 1e8,
    "vwap": 1e8,
    "avg_price_flip": 1e8,
    "volatility": 1e8,

    # cross
    "avg_normal_size": 1e3,
    "avg_black_size": 1e3,
    "avg_buy_sweep_size": 1e3,
    "avg_sell_sweep_size": 1e3,
    
    #non_tbs
    "open_interest_qty_open": 1e3,
    "open_interest_qty_high": 1e3,
    "open_interest_qty_low": 1e3,
    "open_interest_qty_close": 1e3,
    "index_price_open": 1e8,
    "index_price_high": 1e8,
    "index_price_low": 1e8,
    "index_price_close": 1e8,
    
    
    #interval
    "open_interest_qty_interval_open": 1e3,
    "open_interest_qty_interval_high": 1e3,
    "open_interest_qty_interval_low": 1e3,
    "open_interest_qty_interval_close": 1e3,
    "index_price_interval_open": 1e8,
    "index_price_interval_high": 1e8,
    "index_price_interval_low": 1e8,
    "index_price_interval_close": 1e8,
    "trade_price_interval_open": 1e8,
    "trade_price_interval_high": 1e8,
    "trade_price_interval_low": 1e8,
    "trade_price_interval_close": 1e8,
    "mid_price_interval_open": 1e8,
    "mid_price_interval_high": 1e8,
    "mid_price_interval_low": 1e8,
    "mid_price_interval_close": 1e8,
    "ask0_price_interval_open": 1e8,
    "ask0_price_interval_high": 1e8,
    "ask0_price_interval_low": 1e8,
    "ask0_price_interval_close": 1e8,
    "bid0_price_interval_open": 1e8,
    "bid0_price_interval_high": 1e8,
    "bid0_price_interval_low": 1e8,
    "bid0_price_interval_close": 1e8,
    "total_volume_interval": 1e3,
    "noside_volume_interval": 1e3,
    "buy_volume_interval":1e3,
    "sell_volume_interval":1e3,
    # "avg_bidask_spread_interval":1e8,
}

SubscriptionRequest = collections.namedtuple('SubscriptionRequest',
                                             ['market_type', 'exchange', 'api_version'])


def transform_int_to_double_columns(dataset):
  for col in set(dataset.columns):
    if col in int_to_double_columns.keys():
      dataset[col] = dataset[col].apply(lambda x: x / int_to_double_columns[col])
  return dataset


def transform_time_columns(dataset, columns):
  common_columns = list(set(dataset.columns) & set(columns))
  for col in common_columns:
    dataset.loc[dataset[col] == 0, col] = pd.NaT
    dataset[col] = pd.to_datetime(dataset[col], unit='ns')
  return dataset


def transform_date_columns(dataset, columns):
  common_columns = list(set(dataset.columns) & set(columns))
  for col in common_columns:
    dataset[col] = pd.to_datetime(dataset[col], format='%Y%m%d', exact=False)
  return dataset


# C++ string has no encoding information, python consider it as bytes.
# pandas to_csv output bytes to "b'xxxxxx'" format, and read_csv loading
# that column with str type
# Convert "b'xxxxxx'" to real str as quick fix
def transform_bytes_columns(dataset, columns):
  common_columns = list(set(dataset.columns) & set(columns))
  for col in common_columns:
    dataset[col] = dataset[col].apply(lambda x: x if type(x) == int else x.decode())
  return dataset


def get_max_feed_stats_id(conn):
  select_max_id = "select max(feed_stats_id) from FeedStatsInfo"
  result = conn.execute(select_max_id)
  id = result.first()[0]
  return id


def get_exchange_api_id(sub_request):
  exchange_api_id_list = get_exchange_api_id_from_request([sub_request])
  assert len(exchange_api_id_list) == 1
  return exchange_api_id_list[0]


def get_feed_stats_info(
    dataset,
    columns,
    exchange,
    exchange_api_id,
    machine,
    recipe,
    worker,
    job_start_time,
    run_for_raw):
  feed_df = dataset[list(columns.keys())]
  feed_df = feed_df.rename(columns, axis='columns')

  feed_df = transform_bytes_columns(feed_df, ["symbol", "trading_date", "exchange_api_id"])
  feed_df['machine'] = machine
  feed_df['calendar_date'] = feed_df['trading_date']
  git_commit_datetime, git_commit_sha = fetch_git_commit_sha_datetime('HEAD')
  feed_df['git_commit_sha'] = git_commit_sha
  feed_df['exchange_api_id'].eq(exchange).all()
  feed_df['exchange_api_id'] = exchange_api_id
  feed_df['native_symbol'] = feed_df['symbol']
  feed_df['git_commit_datetime'] = git_commit_datetime
  feed_df['recipe'] = recipe
  feed_df['worker'] = worker
  feed_df['job_start_time'] = job_start_time
  feed_df['feed_source'] = 'raw' if run_for_raw else 'fastfeed'

  feed_df = transform_time_columns(feed_df, ["start_time", "end_time", "git_commit_datetime"])
  feed_df = transform_date_columns(feed_df, ["trading_date", "calendar_date"])
  feed_df = feed_df.replace({pd.np.nan: None})
  return feed_df


def get_book_stats(dataset, columns):
  book_df = dataset[list(columns.keys())]
  book_df = book_df.rename(columns, axis='columns')

  book_df['book_hour_count'] = ''
  for hour in range(24):
    book_df = book_df.assign(
        book_hour_count=(book_df['book_hour_count'] + dataset["book_%d__count" % hour].astype(str)
                         + ':' + dataset["book_%d__max_diff" % hour].astype(str) + ', '))

  book_df = transform_time_columns(
      book_df, ["time_first_book", "time_last_book", "time_no_book_begin", "time_no_book_end"])

  book_df = transform_int_to_double_columns(book_df)
  book_df = book_df.replace({pd.np.nan: None})
  return book_df


def get_trade_stats(dataset, columns):
  trade_df = dataset[list(columns.keys())]
  trade_df = trade_df.rename(columns, axis='columns')

  trade_df['trade_hour_count'] = ''
  for hour in range(24):
    trade_df = trade_df.assign(
        trade_hour_count=(trade_df['trade_hour_count']
                          + dataset["trade_%d__count" % hour].astype(str) + ':'
                          + dataset["trade_%d__max_diff" % hour].astype(str) + ', '))

  trade_df = transform_time_columns(
      trade_df, ["time_first_trade", "time_last_trade", "time_no_trade_begin", "time_no_trade_end"])

  trade_df = transform_int_to_double_columns(trade_df)
  trade_df = trade_df.replace({pd.np.nan: None})
  return trade_df


def get_cross_stats(dataset, columns):
  cross_df = dataset[list(columns.keys())]
  cross_df = cross_df.rename(columns, axis='columns')

  cross_df = transform_time_columns(
      cross_df, ["max_between_book_trade_begin", "max_outside_bid_ask_trade_begin"])

  cross_df = transform_int_to_double_columns(cross_df)
  cross_df = cross_df.replace({pd.np.nan: None,pd.np.inf: None})
  return cross_df

def get_non_tbs_stats(dataset, columns):
  open_interest_df = dataset[list(columns.keys())]
  open_interest_df = open_interest_df.rename(columns,axis='columns')
  open_interest_df = transform_int_to_double_columns(open_interest_df)
  open_interest_df = open_interest_df.replace({pd.np.nan: None, pd.np.inf: None, Decimal(0): None})
  return open_interest_df

def get_interval_stats(dataset,columns):
  interval_feed_df = dataset[list(columns.keys())]
  interval_feed_df = interval_feed_df.rename(columns, axis='columns')
  interval_df = transform_interval_columns(interval_feed_df)
  return interval_df

def transform_interval_columns(dataset):
  def transform(stats_str,divisor):
    interval_stats = stats_str.decode("utf-8")[:-1].split(',')
    interval_stats = [float(x)/divisor for x in interval_stats]
    if any(interval_stats):
      interval_stats = [str(x) for x in interval_stats]
      return bytes(','.join(interval_stats),'utf-8')
    else :
      return None

  for col in set(dataset.columns):
    if col in int_to_double_columns.keys():
      dataset[col] = dataset[col].apply(transform,args=(int_to_double_columns[col],))
  return dataset 

class FeedStatsImporter(FeedStatsImporterBase):
  def __init__(self, *, db_config=None, connection=None):
    super().__init__(db_config=db_config, connection=connection)
    self._BookStats = self._connector.automap_base.classes.BookStats
    self._TradeStats = self._connector.automap_base.classes.TradeStats
    self._CrossStats = self._connector.automap_base.classes.CrossStats
    self._NontbsStats = self._connector.automap_base.classes.NontbsStats
    self._IntervalStats = self._connector.automap_base.classes.IntervalStats

  def insert_feed_stats_value(self, feed_stats_id, feed_stats):
    feed_stats['book_stats']['feed_stats_id'] = feed_stats_id
    feed_stats['trade_stats']['feed_stats_id'] = feed_stats_id
    feed_stats['cross_stats']['feed_stats_id'] = feed_stats_id
    feed_stats['non_tbs_stats']['feed_stats_id'] = feed_stats_id
    book_stats_obj = self._BookStats(**(feed_stats['book_stats']))
    trade_stats_obj = self._TradeStats(**(feed_stats['trade_stats']))
    cross_stats_obj = self._CrossStats(**(feed_stats['cross_stats']))
    non_tbs_stats_obj = self._NontbsStats(**(feed_stats['non_tbs_stats']))
    
    if(feed_stats['intreval_stats']):
      feed_stats['intreval_stats']['feed_stats_id'] = feed_stats_id
      intreval_stats_obj = self._IntervalStats(**(feed_stats['intreval_stats']))
      self._insert_obj_to_db([book_stats_obj, trade_stats_obj, cross_stats_obj, non_tbs_stats_obj, intreval_stats_obj])
    else:
      self._insert_obj_to_db([book_stats_obj, trade_stats_obj, cross_stats_obj, non_tbs_stats_obj])

  def insert_one_feed(self, stats_info, book, trade, cross, non_tbs, interval):
    feed_stats_index = FeedStatsIndex(**stats_info)
    feed_stats = {}
    feed_stats['book_stats'] = book
    feed_stats['trade_stats'] = trade
    feed_stats['cross_stats'] = cross
    feed_stats['non_tbs_stats'] = non_tbs
    feed_stats['intreval_stats'] = interval
    
    self.insert_feed_stats(feed_stats_index, feed_stats)

  def insert_feed_df(self, stats_info_df, book_df, trade_df, cross_df, non_tbs_df, interval_df):
    assert ((len(stats_info_df) == len(book_df)) and
           (len(book_df) == len(trade_df)) and
           (len(trade_df) == len(cross_df))), "feeds stats data format invalid"
    for i in range(len(stats_info_df)):
      stats_info_row = stats_info_df.iloc[i].to_dict()
      book_row = book_df.iloc[i].to_dict()
      trade_row = trade_df.iloc[i].to_dict()
      cross_row = cross_df.iloc[i].to_dict()
      non_tbs_row = non_tbs_df.iloc[i].to_dict()
      interval_row = None if interval_df is None else interval_df.iloc[i].to_dict()
      self.insert_one_feed(stats_info_row, book_row, trade_row, cross_row, non_tbs_row, interval_row)


def dump_stats_to_db(*,
                     output_root,
                     date_universe,
                     duration,
                     sub_request,
                     recipe,
                     group,
                     worker,
                     machine,
                     need_plot,
                     need_feed_csv,
                     run_for_raw,
                     interval):
  job_start_time = datetime.datetime.now()
  backrun_plan = calculate_stats(output_root=output_root,
                                 date_universe=date_universe,
                                 duration=duration,
                                 sub_request=sub_request,
                                 recipe=recipe,
                                 group=group,
                                 worker=worker,
                                 machine=machine,
                                 need_plot=need_plot,
                                 need_feed_csv=need_feed_csv,
                                 run_for_raw=run_for_raw,
                                 interval = interval)
  # basic_stat_concat contains everyting
  if len(backrun_plan.job.agg_result.recorder_input.universe_dataframe()) > 0:
    dataset = dataset_mdl.load_dataset(backrun_plan.job.agg_result.cfilename(0))
    stats_df = dataset.basic_stat_concat

    exchange_api_id = get_exchange_api_id(sub_request)
    feed_info_df = get_feed_stats_info(stats_df,
                                       feed_info_columns,
                                       sub_request.exchange,
                                       exchange_api_id,
                                       machine,
                                       recipe,
                                       worker,
                                       job_start_time,
                                       run_for_raw)
    book_df = get_book_stats(stats_df, book_columns)
    trade_df = get_trade_stats(stats_df, trade_columns)
    cross_df = get_cross_stats(stats_df, cross_columns)
    non_tbs_df = get_non_tbs_stats(stats_df,non_tbs_columns)
    interval_df = get_interval_stats(stats_df,interval_stat_columns) if interval else None
    
    importer = FeedStatsImporter(db_config=coin2_feed_stats_importer_config)
    importer.insert_feed_df(feed_info_df, book_df, trade_df, cross_df, non_tbs_df, interval_df)

    if need_feed_csv:
      feed_filename = backrun_plan.job.ind_result.cfilename(0).feed_filename
      feed_file_dir, feed_filename_base = feed_filename.rsplit('/', 1)
      for filename in os.listdir(feed_file_dir):
        if filename.startswith(feed_filename_base):
          feed_df = h5_io.read_data_frame(os.path.join(feed_file_dir, filename),
                                          None,
                                          skip_if_not=True)  # skipped due to its size sometimes
          feed_df.to_csv(os.path.join(feed_file_dir, '%s.csv' % filename), index=False)
