# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jhkim, jshin, yuxuan

import datetime
import logging

import numpy
from absl.flags import FLAGS, DEFINE_integer, DEFINE_bool

from coin.database.base.data_util import DataUtil
from coin.database.builder.cache_builder_base import CacheBuilderBase

DEFINE_integer("delta_data_days", 5, "Number of past days to query from DB.")

DEFINE_bool("assert_no_bias",
            False,
            "Crash if bias between prev full and today partial data is found.")


def _get_prev_trading_date(date):
  prev_date = datetime.datetime.strptime(str(date), '%Y%m%d') \
      - datetime.timedelta(days=1)
  return int(prev_date.strftime('%Y%m%d'))


def _get_next_trading_date(date):
  next_date = datetime.datetime.strptime(str(date), '%Y%m%d') \
      + datetime.timedelta(days=1)
  return int(next_date.strftime('%Y%m%d'))


# This class will function as interface to available db data.
# This will work as a provider for high-level, user friendly interface.
class CacheBuilderBase2(CacheBuilderBase):
  def __init__(self, date, partial=False):
    CacheBuilderBase.__init__(self, date)
    self._all_dates = None
    self._all_symbols = None
    self._partial = partial
    self._assert_no_bias = FLAGS.assert_no_bias
    if self._partial:
      prev_date = _get_prev_trading_date(date)
      self._previous_loader = CacheBuilderBase2(prev_date)

  def get_all_dates(self):
    self.ensure_initialized()
    return self._all_dates

  def get_tmr_dates(self):
    all_dates = self.get_all_dates()
    return list(all_dates[1:]) + [_get_next_trading_date(all_dates[-1])]

  def get_next_dates(self):
    all_dates = self.get_all_dates()
    return list(all_dates) + [_get_next_trading_date(all_dates[-1])]

  def get_next_date(self, dateint):
    return _get_next_trading_date(dateint)

  def is_initialized(self):
    return (self._all_dates is not None) and (self._all_symbols is not None)

  def ensure_initialized(self):
    self.init_metadata('presto')
    assert self.is_initialized()

  def init_date_symbol(self, all_dates, all_symbols):
    assert not self.is_initialized()
    if self._partial:
      all_dates = all_dates[-FLAGS.delta_data_days:]
    self._all_dates = all_dates
    self._all_symbols = all_symbols

  def init_metadata(self, metadata_dir):
    assert not self.is_initialized()
    all_dates, all_symbols = self.read_metadata(metadata_dir)
    self.init_date_symbol(all_dates, all_symbols)
    if self._partial:
      self._previous_loader.init_metadata(metadata_dir)

  def read_metadata(self, metadata_dir):
    all_dates = self.read_cache('metadata', metadata_dir, 'dates', {'type': 'vector', 'align': 'd'})
    all_symbols = self.read_cache('metadata',
                                  metadata_dir,
                                  'symbols', {
                                      'type': 'vector', 'align': 'i'
                                  }).astype(str)
    return all_dates, all_symbols

  def build_metadata(self, metadata_dir, all_dates, all_symbols):
    CacheBuilderBase.build_cache(self,
                                 'metadata',
                                 metadata_dir,
                                 'dates',
                                 numpy.array(all_dates),
                                 True, {'align': 'd'})
    self.build_csv_cache('metadata', metadata_dir, 'dates', numpy.array(all_dates), {'align': 'd'})
    CacheBuilderBase.build_cache(self,
                                 'metadata',
                                 metadata_dir,
                                 'symbols',
                                 numpy.array(all_symbols),
                                 True, {'align': 'i'})
    self.build_csv_cache('metadata',
                         metadata_dir,
                         'symbols',
                         numpy.array(all_symbols), {'align': 'i'})

    tickers = numpy.empty((len(all_dates), len(all_symbols))).astype('S10')
    exchs = numpy.empty((len(all_dates), len(all_symbols))).astype('S8')
    tickers[:] = ""
    exchs[:] = ""
    for i, symb in enumerate(all_symbols):
      splits = symb.split("-")
      assert len(splits) == 2
      exchange, ticker = splits[0], splits[1]
      tickers[:, i] = ticker
      exchs[:, i] = exchange
    CacheBuilderBase.build_cache(self,
                                 'metadata',
                                 metadata_dir,
                                 'exch',
                                 exchs,
                                 True, {'align': 'di'})
    CacheBuilderBase.build_cache(self,
                                 'metadata',
                                 metadata_dir,
                                 'ticker',
                                 tickers,
                                 True, {'align': 'di'})

  def build_cache(self,
                  cache_dir_name,
                  vendor,
                  cache_name,
                  data,
                  compress,
                  dict_etc=None,
                  force_full=False):
    if self._partial and not force_full:
      dict_etc['type'] = 'matrix' \
          if dict_etc.get('align', None) in ['di', 'di2'] else 'vector'
      is_vector = dict_etc['type'] == 'vector'
      previous_data = self._previous_loader.read_cache(cache_dir_name,
                                                       vendor,
                                                       cache_name,
                                                       dict_etc=dict_etc)
      assert previous_data is not None, "prev cache not found, %s" % cache_name
      assert_no_bias = self._assert_no_bias
      sub_item = dict_etc.get('sub_item', None)
      if is_vector:
        if dict_etc.get('align', None) == 'd':
          data2 = _merge_prev_and_today_vector(previous_data, data[-1])
        else:
          data2 = _merge_prev_and_today_vector(previous_data, data)
        CacheBuilderBase.build_cache(self,
                                     cache_dir_name,
                                     vendor,
                                     cache_name,
                                     data2,
                                     compress,
                                     dict_etc=dict_etc)
      else:
        if sub_item == 'index':
          assert_no_bias = False
          data[:, :, 0] += previous_data[:, :, 1].sum()
        data2 = _merge_prev_and_today(cache_name,
                                      self._previous_loader._all_dates,
                                      self._previous_loader._all_symbols,
                                      previous_data,
                                      self._all_dates,
                                      self._all_symbols,
                                      data,
                                      assert_no_bias)
        CacheBuilderBase.build_cache(self,
                                     cache_dir_name,
                                     vendor,
                                     cache_name,
                                     data2,
                                     compress,
                                     dict_etc=dict_etc)
    else:
      CacheBuilderBase.build_cache(self,
                                   cache_dir_name,
                                   vendor,
                                   cache_name,
                                   data,
                                   compress,
                                   dict_etc=dict_etc)


def _symbol_changes(prev_symbols, delta_symbols):
  if prev_symbols.shape != delta_symbols.shape:
    return True
  return numpy.any(prev_symbols != delta_symbols)


def _find_shift_vector(prev_all_symbols, delta_symbols):
  existing = numpy.isin(delta_symbols, prev_all_symbols)
  prev_symbols_to_index = {symbol: i for i, symbol in enumerate(prev_all_symbols)}
  prev_data_shift_vector = []
  new_symbols_count = 0
  for i, symbol in enumerate(delta_symbols):
    if existing[i]:
      prev_data_shift_vector.append(prev_symbols_to_index[symbol])
    else:
      prev_data_shift_vector.append(prev_all_symbols.shape[0] + new_symbols_count)
      new_symbols_count += 1
  assert new_symbols_count == numpy.sum(~existing)
  existing_symbols_index = numpy.where(existing)[0]
  return numpy.array(prev_data_shift_vector), existing_symbols_index, \
      new_symbols_count


def _has_bias(prev_data, delta_data):
  return ~numpy.all(numpy.nan_to_num(prev_data) == numpy.nan_to_num(delta_data))


def _merge_prev_and_today_vector(prev_data, delta_data):
  if delta_data.dtype.type is numpy.unicode_:
    return numpy.hstack([prev_data, delta_data.astype(numpy.string_)])
  return numpy.hstack([prev_data, delta_data])


def _get_filled_array(fill, dtype, shape):
  if dtype.type is numpy.int_ and fill is None:
    return numpy.full(shape, 0, dtype=dtype)
  return numpy.full(shape, fill, dtype=dtype)


def _merge_prev_and_today(cache_name,
                          prev_all_dates,
                          prev_all_symbols,
                          prev_data,
                          delta_dates,
                          delta_symbols,
                          delta_data,
                          assert_no_bias=True,
                          fill=None):
  prev_date_count = prev_all_dates.shape[0]
  delta_date_count = delta_dates.shape[0]
  assert prev_date_count > 0
  assert delta_date_count > 0
  idx = numpy.where(delta_dates == prev_all_dates[-1])[0]
  start_index = 0 if idx.shape[0] == 0 else idx[0] + 1
  all_dates = numpy.hstack([prev_all_dates, delta_dates[-1:]])
  assert all_dates.shape[0] > prev_date_count

  if not _symbol_changes(prev_all_symbols, delta_symbols):
    if start_index > 0:
      if _has_bias(prev_data[-start_index:, :], delta_data[:start_index, :]):
        logging.warning("Bias found between previous full and delta %s" % cache_name)
        if assert_no_bias:
          assert False, "Bias found for cache %s" % cache_name
    combined_data = numpy.vstack([prev_data[:, :], delta_data[-1:, :]])
  else:
    prev_data_shift_vector, existing_symbols_index, new_symbols_count = \
        _find_shift_vector(prev_all_symbols, delta_symbols)
    logging.info("Existing symbols count %d, keep %d" %
                 (prev_all_symbols.shape[0], prev_data_shift_vector.shape[0]))
    logging.info("New symbols count %d, keep %d" % (delta_symbols.shape[0], delta_symbols.shape[0]))
    if new_symbols_count > 0:
      if prev_data.ndim < 3:
        patch = _get_filled_array(fill, prev_data.dtype, (prev_data.shape[0], new_symbols_count))
      elif prev_data.ndim == 3:
        patch = _get_filled_array(fill, prev_data.dtype, (prev_data.shape[0], new_symbols_count, 2))
      else:
        raise NotImplementedError("Unexpected ndim %d" % (prev_data.ndim))
      shifted_prev_data = \
          numpy.hstack([prev_data, patch])[:, prev_data_shift_vector]
    else:
      shifted_prev_data = prev_data[:, prev_data_shift_vector]
    combined_data = numpy.vstack(
        [shifted_prev_data, [delta_data[-1]] if prev_data.ndim == 3 else delta_data[-1]])
    if _has_bias(shifted_prev_data[-start_index:, existing_symbols_index],
                 delta_data[:-1, existing_symbols_index]):
      logging.warning("Bias found between previous full and delta %s" % cache_name)
      if assert_no_bias:
        assert False, "Bias found for cache %s" % cache_name
  return combined_data
