# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: leon

import datetime
import logging
import os
import re
import typing
from collections import namedtuple
from collections import OrderedDict

import h5py
import numpy as np
import pandas as pd
import pytz

from coin.base.datetime_util import (
    to_datetime,
    to_timestamp_int,
    convert_string_to_dates,
)
from coin.base.param_util import to_list
from coin.exchange.base.proto_logger import (
    gen_log_reader_iter,)
from coin.exchange.base.order_gateway_logger import (
    enumerate_logs as enumerate_og_logs)
from coin.exchange.base.strategy_logger import (
    enumerate_logs as enumerate_strat_logs)
from coin.exchange.base.subscription_logger import (
    enumerate_logs as enumerate_subscription_logs)
from coin.flow.archive import ArchiveReader
from coin.proto.coin_log_pb2 import LogFileResponseProto
from coin.proto.coin_request_pb2 import (
    AccountRequestProto, ExchangeApiRequestProto, StrategyRequestProto)
from coin.support.proto_log.logic.log_info import (
    OgInfo, StratInfo, SubscriptionInfo)
from coin.util.queue.config import KafkaConfig
from coin.util.queue.constants import TopicType
from coin.util.queue.tools.kafka_archive import run_from_kafka_archive
from coin.util.queue.tools.kafka_topic import generate_kafka_topic

def _gen_log_dir(*, log_root, trading_date, machine):
  date_str = trading_date.strftime('%Y%m%d')
  log_dir = os.path.join(log_root, machine, date_str)
  return log_dir


def _to_timestamp_int(start_time, end_time):
  start_ts = to_timestamp_int(start_time.replace(tzinfo=pytz.UTC))
  end_ts = to_timestamp_int(end_time.replace(tzinfo=pytz.UTC))
  return start_ts, end_ts


def _get_date_range(start_time, end_time):
  date_range = pd.date_range(start_time.date(), end_time.date()).tolist()
  date_range = [bd.to_pydatetime() for bd in date_range]
  return date_range


def log_timestamp_reset():
  log_reset_timestamp = None
  def log_timestamp_reset_impl(log_ts):
    nonlocal log_reset_timestamp
    reset = False
    log_time = to_datetime(log_ts)
    next_date = log_time.date() + datetime.timedelta(days=1)
    next_dt = datetime.datetime(
        next_date.year, next_date.month, next_date.day).replace(tzinfo=pytz.UTC)
    if log_reset_timestamp is None or log_ts >= log_reset_timestamp:
      reset = True
      log_reset_timestamp = to_timestamp_int(next_dt)
    return reset
  return log_timestamp_reset_impl

log_timestamp_reset = log_timestamp_reset()


def run_from_og_log_archive(on_log_callback,
                            market_type,
                            exchange,
                            start_time: datetime.datetime,
                            end_time: datetime.datetime,
                            root_dir,
                            machine,
                            owner=None):
  request = AccountRequestProto(market_type=market_type, exchange=exchange, owner=owner)
  start_ts, end_ts = _to_timestamp_int(start_time, end_time)
  dt_range = _get_date_range(start_time, end_time)
  log_paths = []
  for dt in dt_range:
    log_dir = _gen_log_dir(log_root=root_dir, trading_date=dt, machine=machine)
    log_paths += enumerate_og_logs(request, dt, log_dir)
  for ts, log in gen_log_reader_iter(log_paths):
    if ts < start_ts:
      continue
    if ts > end_ts:
      break
    on_log_callback(ts, log)


def run_from_strat_log_archive(*,
                               on_log_callback,
                               start_time: datetime.datetime,
                               end_time: datetime.datetime,
                               root_dir,
                               machine,
                               strategy_name):
  start_ts, end_ts = _to_timestamp_int(start_time, end_time)
  dt_range = _get_date_range(start_time, end_time)
  log_paths = []
  for dt in dt_range:
    log_dir = _gen_log_dir(log_root=root_dir, trading_date=dt, machine=machine)
    log_paths += enumerate_strat_logs(
        StrategyRequestProto(strategy_name=strategy_name), dt, log_dir)
  for ts, log in gen_log_reader_iter(log_paths):
    if ts < start_ts:
      continue
    if ts > end_ts:
      break
    on_log_callback(ts, log)

def _gen_pt1m_dir(*, log_root, trading_date):
  date_str = trading_date.strftime('%Y%m%d')
  log_dir = os.path.join(log_root, date_str)
  return log_dir

def byte2str(k):
  if type(k) == bytes or type(k) == np.bytes_:
    return k.decode('utf8')
  elif type(k) == str or type(k) == np.str_:
    return k
  else:
    raise TypeError('bad type, ', type(k))

def run_from_strat_log_pt1m(*,
                            on_log_callback,
                            start_time: datetime.datetime,
                            end_time: datetime.datetime,
                            root_dir,
                            strategy_name):
  start_ts, end_ts = _to_timestamp_int(start_time, end_time)
  dt_range = _get_date_range(start_time, end_time)
  files = []
  for dt in dt_range:
    log_dir = _gen_pt1m_dir(log_root=root_dir, trading_date=dt)
    log_path = '%s/%s.h5' % (log_dir, strategy_name)
    try:
      files.append(h5py.File(log_path, 'r', libver="latest"))
    except:
      logging.info('Fail to read pt1m log: %s' % log_path)

  for f in files:
    if f is None:
      continue
    nrows = f['times'].shape[0]
    nsymbols = f['symbols'].shape[0]
    npnl_currencies = f['pnl_currencies'].shape[0]
    symbols = list(map(byte2str, f['symbols']))
    pnl_currencies = list(map(byte2str, f['pnl_currencies']))

    assert nsymbols != npnl_currencies, "Cannot distinguish which is symbol column. Exceptional case"
    pnl_currency_cols = list(filter(lambda k: f[k].shape == (nrows, npnl_currencies), f.keys()))
    symbol_cols = list(filter(lambda k: f[k].shape == (nrows, nsymbols), f.keys()))

    exchange = byte2str(f['exchange'][0])
    exchange_type = f['exchange_type'][0]
    market = byte2str(f['market'][0])
    market_type = f['market_type'][0]

    loaded = {}
    for c in symbol_cols + pnl_currency_cols:
      loaded[c] = np.array(f[c])

    for i in range(nrows):
      ts = f['times'][i]
      if ts < start_ts:
        continue
      if ts > end_ts:
        break
      for s in range(nsymbols):
        log = {
          'type': 'symbol_stat',
          'timestamp': ts,
          'exchange': exchange,
          'exchange_type': exchange_type,
          'market': market,
          'market_type': market_type,
          'symbol': symbols[s],
        }
        for c in symbol_cols:
          log[c] = loaded[c][i][s]
        on_log_callback(ts, log)
      for s in range(npnl_currencies):
        log = {
          'type': 'pnl_currency_stat',
          'timestamp': ts,
          'exchange': exchange,
          'exchange_type': exchange_type,
          'market': market,
          'market_type': market_type,
          'currency': pnl_currencies[s],
        }
        for c in pnl_currency_cols:
          log[c] = loaded[c][i][s]
        on_log_callback(ts, log)
    f.close()

def run_from_subscription_log_archive(*,
                                      on_log_callback,
                                      start_time: datetime.datetime,
                                      end_time: datetime.datetime,
                                      root_dir,
                                      machine,
                                      market_type,
                                      exchange,
                                      api_version):
  request = ExchangeApiRequestProto(
      market_type=market_type, exchange=exchange, api_version=api_version)
  start_ts, end_ts = _to_timestamp_int(start_time, end_time)
  dt_range = _get_date_range(start_time, end_time)
  log_paths = []
  for dt in dt_range:
    log_dir = _gen_log_dir(log_root=root_dir, trading_date=dt, machine=machine)
    log_paths += enumerate_subscription_logs(request, dt, log_dir)
  for ts, log in gen_log_reader_iter(log_paths):
    if ts < start_ts:
      continue
    if ts > end_ts:
      break
    on_log_callback(ts, log)


def resolve_trading_date_range_string(trading_date_string):
  if (re.match('^[0-9]{8}-[0-9]{8}$', trading_date_string)):
    trading_dates = convert_string_to_dates(trading_date_string)
    return trading_dates

  trading_date = datetime.datetime.strptime(trading_date_string, '%Y%m%d')
  return [trading_date]


def _enumerate_machines(root_dir):
  machines = []
  assert os.path.isdir(root_dir)
  for machine in os.listdir(root_dir):
    assert os.path.isdir(root_dir)
    machines.append(machine)
  return machines


def enumerate_og_log_into_oginfos(root_dir, trading_date_str, parse_f):
  machines = _enumerate_machines(root_dir)
  assert machines, 'No machine found.'

  og_log_map = OrderedDict()
  for machine in machines:
    log_path = os.path.join(root_dir, machine, trading_date_str)
    if not os.path.isdir(log_path):
      continue
    for file_name in os.listdir(log_path):
      resp = parse_f(file_name)
      if resp is None:
        continue
      key = (resp.acct_req.market_type,
             resp.acct_req.exchange,
             resp.acct_req.owner,
             machine,
             trading_date_str)
      og_log_map.setdefault(key, [])
      og_log_map[key].append(file_name)

  og_infos = []
  for key in og_log_map.keys():
    og_infos.append(OgInfo(*key))
  return og_infos


def group_og_log(og_logs):
  groups = OrderedDict()
  for og_log in og_logs:
    key = (og_log.machine, og_log.strategy_name)
    groups.setdefault(key, [])
    groups[key].append(og_log)
  return groups.values()


def enumerate_strat_log_into_stratinfos(root_dir, trading_date_str, parse_f):
  machines = _enumerate_machines(root_dir)
  assert machines, 'No machine found.'

  strat_log_map = OrderedDict()
  for machine in machines:
    log_path = os.path.join(root_dir, machine, trading_date_str)
    if not os.path.isdir(log_path):
      continue
    for file_name in os.listdir(log_path):
      try:
        resp = parse_f(file_name)
        if resp is None:
          continue
        key = (resp.strat_req.strategy_name, machine, trading_date_str)
        strat_log_map.setdefault(key, [])
        strat_log_map[key].append(file_name)
      except:
        logging.info('Fail to parse file name: %s' % os.path.join(log_path, file_name))

  strat_infos = []
  for key in strat_log_map.keys():
    strat_infos.append(StratInfo(*key))
  return strat_infos


def enumerate_sub_log_into_subinfos(root_dir, trading_date_str, parse_f):
  machines = _enumerate_machines(root_dir)
  assert machines, 'No machine found.'

  sub_og_log_map = OrderedDict()
  for machine in machines:
    log_path = os.path.join(root_dir, machine, trading_date_str)
    if not os.path.isdir(log_path):
      continue
    for file_name in os.listdir(log_path):
      resp = parse_f(file_name)
      if resp is None:
        continue
      key = (resp.acct_req.market_type,
             resp.acct_req.exchange,
             resp.acct_req.api_version,
             machine,
             trading_date_str)
      sub_og_log_map.setdefault(key, [])
      sub_og_log_map[key].append(file_name)

  sub_infos = []
  for key in sub_og_log_map.keys():
    sub_infos.append(SubscriptionInfo(*key))
  return sub_infos


def generate_log_info(machine, log_file_rsp):
  assert isinstance(log_file_rsp, LogFileResponseProto)
  trading_date = datetime.datetime.strptime(log_file_rsp.date, '%Y%m%d').date()
  if log_file_rsp.type == LogFileResponseProto.OG_LOG:
    acct_req = log_file_rsp.acct_req
    ret = OgInfo(market_type=acct_req.market_type,
                 exchange=acct_req.exchange,
                 owner=acct_req.owner,
                 machine=machine,
                 trading_date=trading_date)
  elif log_file_rsp.type == LogFileResponseProto.STRAT_LOG:
    strat_req = log_file_rsp.strat_req
    ret = StratInfo(strategy_name=strat_req.strategy_name,
                    machine=machine,
                    trading_date=trading_date)
  elif log_file_rsp.type == LogFileResponseProto.SUBSCRIPTION_LOG:
    sub_req = log_file_rsp.acct_req
    ret = SubscriptionInfo(market_type=sub_req.market_type,
                           exchange=sub_req.exchange,
                           api_version=sub_req.api_version,
                           machine=machine,
                           trading_date=trading_date)
  else:
    log_type = log_file_rsp.LogFileType.Name(log_file_rsp.type)
    raise ValueError('Unknown log type %s' % log_type)
  return ret


def read_og_log(*, og_info, start_time, end_time, callback,
                kafka_config=None, kafka_topic_type=None, root_dir=None):
  machine = og_info.machine
  market_type = og_info.market_type
  exchange = og_info.exchange
  owner = og_info.owner
  if kafka_config is not None:
    topic = generate_kafka_topic(
        kafka_topic_type,
        OgInfo(market_type=market_type, exchange=exchange, owner=owner,
               machine=machine, trading_date=None))
    run_from_kafka_archive(topics=to_list(topic),
                           kafka_config=kafka_config,
                           timestamp_from=to_timestamp_int(start_time),
                           timestamp_to=to_timestamp_int(end_time),
                           callback=callback)
  else:
    run_from_og_log_archive(on_log_callback=callback,
                            market_type=market_type,
                            exchange=exchange,
                            start_time=start_time,
                            end_time=end_time,
                            root_dir=root_dir,
                            machine=machine,
                            owner=owner)


def read_strat_log(*,
                   strat_info: typing.Union[StratInfo, typing.List[StratInfo]],
                   start_time: datetime.datetime,
                   end_time: datetime.datetime,
                   callback: typing.Callable[[int, bytes], None],
                   kafka_config: typing.Optional[KafkaConfig]=None,
                   kafka_topic_type: typing.Optional[TopicType]=None,
                   root_dir: typing.Optional[str]=None):
  strat_infos = to_list(strat_info)
  if len(strat_infos) == 0:
    return
  if kafka_config is not None:
    topics = [
        generate_kafka_topic(
            kafka_topic_type,
            StratInfo(machine=strat_info.machine,
                      strategy_name=strat_info.strategy_name,
                      trading_date=None))
        for strat_info in strat_infos]
    run_from_kafka_archive(topics=topics,
                           kafka_config=kafka_config,
                           timestamp_from=to_timestamp_int(start_time),
                           timestamp_to=to_timestamp_int(end_time),
                           callback=callback)
  else:
    log_files = []
    dt_range = _get_date_range(start_time, end_time)
    start_ts = to_timestamp_int(start_time)
    end_ts = to_timestamp_int(end_time)
    for dt in dt_range:
      for strat_info in strat_infos:
        log_dir = _gen_log_dir(
            log_root=root_dir,
            trading_date=dt,
            machine=strat_info.machine)
        strat_req = StrategyRequestProto(strategy_name=strat_info.strategy_name)
        log_files += enumerate_strat_logs(strat_req, dt, log_dir)
    with ArchiveReader.from_file(log_files) as reader:
      while True:
        ret = reader.read()
        if ret is None:
          break
        ts = ret.timestamp
        if ts < start_ts:
          continue
        if ts > end_ts:
          break
        callback(ts, ret.data)


def read_subscription_log(*, subscription_info, start_time, end_time, callback,
                          kafka_config=None, kafka_topic_type=None, root_dir=None):
  market_type = subscription_info.market_type
  exchange = subscription_info.exchange
  api_version = subscription_info.api_version
  machine = subscription_info.machine
  if kafka_config is not None:
    topic = generate_kafka_topic(
        kafka_topic_type,
        SubscriptionInfo(market_type=market_type, exchange=exchange,
                         api_version=api_version, machine=machine, trading_date=None))
    run_from_kafka_archive(topics=to_list(topic),
                           kafka_config=kafka_config,
                           timestamp_from=to_timestamp_int(start_time),
                           timestamp_to=to_timestamp_int(end_time),
                           callback=callback)
  else:
    run_from_subscription_log_archive(on_log_callback=callback,
                                      start_time=start_time,
                                      end_time=end_time,
                                      root_dir=root_dir,
                                      machine=machine,
                                      market_type=market_type,
                                      exchange=exchange,
                                      api_version=api_version)
