# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jhkim

import copy
import logging
import os

from absl.flags import FLAGS, DEFINE_string

from coin.database.base.serializer import CsvSerializer
from coin.database.base.serializer import TextSerializer
from coin.database.base.numpy_serializer import NumpySerializer
from coin.database.base.data_util import DataUtil

DEFINE_string('test_build_id', None, 'if set, build cache at YYYYMMDD_{test_build_id}')

DEFINE_string('static_cache_root', os.path.expanduser("~/matrix_cache"), '')


def wait_for_futures(futures):
  for future in futures:
    try:
      future.result()
    except Exception as exc:
      logging.info(exc.message)
      raise exc


class CacheBuilderBase(object):
  def __init__(self, date):
    self._target_date = date
    self._test_build_id = FLAGS.test_build_id
    self._static_cache_root = FLAGS.static_cache_root
    self._serializer = NumpySerializer()
    self._text_serializer = TextSerializer()
    self._csv_serializer = CsvSerializer()
    if self._test_build_id is not None:
      logging.warning("test build, id: %s", self._test_build_id)

  def get_date(self):
    return self._target_date

  def get_date_dirname(self):
    if self._test_build_id is not None:
      return "%s_%s" % (self.get_date(), self._test_build_id)
    else:
      return str(self.get_date())

  def get_local_cache_root(self):
    return os.path.join(self._static_cache_root, self.get_date_dirname())

  def read_cache(self, cache_dir_name, vendor, cache_name, dict_etc=None):
    cache_dict = self.combine_dict(cache_dir_name, vendor, cache_name, dict_etc)
    return self._serializer.deserialize(cache_dict)

  def build_cache(self, cache_dir_name, vendor, cache_name, data, compress, dict_etc=None):
    cache_dict = self.combine_dict(cache_dir_name, vendor, cache_name, dict_etc)
    self._serializer.serialize(cache_dict, data, compress)

  def build_csv_cache(self, cache_dir_name, vendor, cache_name, data, dict_etc=None):
    cache_dict = self.combine_dict(cache_dir_name, vendor, cache_name, dict_etc)
    self._csv_serializer.serialize(cache_dict, data, False)

  def build_text_cache(self, cache_dir_name, vendor, cache_name, text, dict_etc=None):
    cache_dict = self.combine_dict(cache_dir_name, vendor, cache_name, dict_etc)
    self._text_serializer.serialize(cache_dict, text, False)

  def combine_dict(self, cache_dir_name, vendor, cache_name, dict_etc):
    if dict_etc is None:
      result_dict = {}
    else:
      result_dict = copy.deepcopy(dict_etc)
    result_dict.update({
        'daily_feed_root': self._static_cache_root,
        'date': self.get_date_dirname(),
        'data_set': vendor,
        'table': cache_dir_name,
        'item': cache_name
    })
    if 'sub_item' not in result_dict:
      result_dict['sub_item'] = 'value'
    return result_dict
