# Copyright (c) 2023 Presto Labs Pte. Ltd.
# Author: ziyan

import pypapyrus.dataset.presto_feed_dataset as dataset_mdl
import pandas as pd
from sqlalchemy.ext.automap import automap_base
from coin.support.feed_tool.feed_stats.app.feed_stats_motion.presto_feed_dataset import calculate_stats
from coin.support.feed_tool.feed_stats.app.feed_latency_stats.metadata_importer import FeedStatsImporter

latency_stat_columns = {
    "book_message_timediff_interval__mean":"book_latency_avg",
    "book_message_timediff_interval__max":"book_latency_max",
    "trade_message_timediff_interval__mean":"trade_latency_avg",
    "trade_message_timediff_interval__max":"trade_latency_max",
}

int_to_double_columns = {
    # trun ns to ms
  "book_latency_avg" : 1e6,
  "book_latency_max" : 1e6,
  "trade_latency_avg" : 1e6,
  "trade_latency_max" : 1e6,
}


class LatencyStatsImporter(FeedStatsImporter):
  def __init__(self):
    super().__init__()
    base = automap_base()
    base.prepare(autoload_with=self._engine)
    self._LatencyStatsMetadata = base.classes.LatencyStatsMetadata
    
  def get_registered_symbol_id(self,machine,mea):
    session = self._get_session()
    latency_meta_data = session.query(self._LatencyStatsMetadata).filter(
      self._LatencyStatsMetadata.machine == machine,
      self._LatencyStatsMetadata.mea == mea).all()
    
    registered_symbol = [row.symbol for row in latency_meta_data]
    registered_id = [row.id for row in latency_meta_data]
    registered_symbol_id = dict(zip(registered_symbol,registered_id))
    return registered_symbol_id

  def insert_into_db(self,latency_stats_df):
    latency_stats_df.to_sql('FeedLatencyStats',self._engine, if_exists='append', index=False)


def get_meta_df(dataset,sub_request,machine,db_reader):
  meta_df = dataset[['symbol']]
  meta_df["machine"] = machine
  meta_df["symbol"] = meta_df["symbol"].apply(lambda x: x.decode())
  mea =  ".".join([sub_request.market_type,sub_request.exchange,sub_request.api_version])
  meta_df["mea"]=mea
  registered_symbol_id = db_reader.get_registered_symbol_id(machine,mea)
  meta_df["id"]=[registered_symbol_id[s] if s in registered_symbol_id.keys() else None for s in meta_df["symbol"]]
  return meta_df

def get_latency_stats(dataset,columns):
  latency_feed_df = dataset[list(columns.keys())]
  latency_feed_df = latency_feed_df.rename(columns, axis='columns')
  interval_df = transform_interval_columns(latency_feed_df)
  return interval_df

def transform_interval_columns(dataset):
  def transform(stats_str,divisor):
    latency_stats = stats_str.decode("utf-8")[:-1].split(',')
    latency_stats = [round(float(x)/divisor,2) for x in latency_stats]
    if any(latency_stats):
      return latency_stats
    else :
      return None

  for col in set(dataset.columns):
    if col in int_to_double_columns.keys():
      dataset[col] = dataset[col].apply(transform,args=(int_to_double_columns[col],))
  return dataset 

def generate_latency_stats_df(start_time,interval,meta_df,latency_stats):
  latency_stats = pd.concat([meta_df,latency_stats],axis=1)
  latency_stats.dropna(inplace=True,subset=['id'])
  latency_stats_df = pd.DataFrame()
  for index,row in latency_stats.iterrows():
    periods = len(row['trade_latency_max'])
    dti= pd.Series(pd.date_range(start_time,periods=periods,freq=pd.DateOffset(nanoseconds=interval)))
    symbol_latency_df = pd.DataFrame()
    symbol_latency_df["trade_latency_avg"] = row["trade_latency_avg"]
    symbol_latency_df["trade_latency_max"] = row["trade_latency_max"]
    if row['mea'] == 'Spot.Binance.v1':
       symbol_latency_df["book_latency_avg"] = 0
       symbol_latency_df["book_latency_max"] = 0
    else:
      symbol_latency_df["book_latency_avg"] = row["book_latency_avg"]
      symbol_latency_df["book_latency_max"] = row["book_latency_max"]
    symbol_latency_df["meta_id"] = row["id"]
    symbol_latency_df["start_time"] = dti
    symbol_latency_df.fillna(0,inplace=True)
    latency_stats_df = pd.concat([latency_stats_df,symbol_latency_df])
  return latency_stats_df
  
def dump_latency_stats_to_db(*,
                     output_root,
                     date_universe,
                     duration,
                     sub_request,
                     recipe,
                     group,
                     worker,
                     machine,
                     need_plot,
                     need_feed_csv,
                     run_for_raw,
                     interval,
                     latency_stat_only):

  backrun_plan = calculate_stats(output_root=output_root,
                                 date_universe=date_universe,
                                 duration=duration,
                                 sub_request=sub_request,
                                 recipe=recipe,
                                 group=group,
                                 worker=worker,
                                 machine=machine,
                                 need_plot=need_plot,
                                 need_feed_csv=need_feed_csv,
                                 run_for_raw=run_for_raw,
                                 interval = interval,
                                 latency_stat_only = latency_stat_only)
  
  if len(backrun_plan.job.agg_result.recorder_input.universe_dataframe()) > 0:
    dataset = dataset_mdl.load_dataset(backrun_plan.job.agg_result.cfilename(0))
    stats_df = dataset.basic_stat_concat
    importer = LatencyStatsImporter()
    meta_df = get_meta_df(stats_df,sub_request,machine,importer)
    latency_stats = get_latency_stats(stats_df,latency_stat_columns) if interval else None
    latency_stats_df = generate_latency_stats_df(date_universe[0],interval,meta_df,latency_stats)
    importer.insert_into_db(latency_stats_df)