# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jhkim

from absl import flags

from pyfastfeature.feature.feature_dumper import FeatureDumper

import coin.strategy.mm.dumper_base as dbase
import coin.strategy.mm.fastfeature.feed_manager as fmgr
import coin.strategy.mm.fastfeature.linear_model_factory as lmf

import coin.strategy.mm.tool.archive_base as abase


class FastFeatureDumper(object):
  def __init__(self,
               product_map,
               aggregation_map,
               feedspec,
               config_filename,
               recorder_config_filename,
               feed_converter):
    self._products = product_map.keys()
    self._product_proto_map = product_map
    self._feed_manager = fmgr.FeedManager(self.on_book,
                                          self.on_trade,
                                          aggregation_map,
                                          feedspec=feedspec)

    self._feed_converter = feed_converter

    self._feature_dumper = FeatureDumper(self._product_proto_map.values(),
                                         sampler_name_subset="",
                                         config_filename=config_filename,
                                         recorder_config_filename=recorder_config_filename)

    self._feed_converter.register_symbols(self._feature_dumper)

  def on_book_reset(self, book_builder_name, book_builder):
    self._feed_manager.set_book_reset(self._products, book_builder_name, book_builder)

  def on_book(self, product, book):
    self._feed_converter.update_book(product, book, self._feature_dumper)

  def on_trade(self, product, trade):
    self._feed_converter.update_trade(product, trade, self._feature_dumper)


def dump_single2(config_filename, recorder_config_filename, start_time, end_time, machine_str):
  machine_list = [machine.strip() for machine in machine_str.split(",")]

  factory = lmf.FeatureDumperFactory(FastFeatureDumper,
                                     config_filename,
                                     start_time,
                                     recorder_config_filename,
                                     flags.FLAGS.force_fastfeed)

  if factory.cant_use_fastfeed:
    print("SLOW FEED")
    dbase.run_from_archive(factory.feedspec.feed_sub_request,
                           factory.feature_dumper.on_book_reset,
                           start_time,
                           end_time,
                           machine=machine_list,
                           worker_id='1',
                           use_feed_arbitration=True,
                           feed_checker_config=factory.feed_checker_config)
  else:
    print("FAST FEED")
    sub_req_hack_map = None
    if factory.feedspec.feed_sub_request_feed_cache is not None:
      sub1_dict = {}
      sub2_dict = {}
      sub_req_hack_map = {}
      for sub2, products in (
          factory.feedspec.feed_sub_request_feed_cache._sub_request_to_products_map.items()):
        for product in products:
          sub2_dict[product] = sub2

      for sub1, products in (
          factory.feedspec.feed_sub_request._sub_request_to_products_map.items()):
        for product in products:
          sub1_dict[product] = sub1

      sub1_keys = sorted([str(sub1_key) for sub1_key in sub1_dict.keys()])
      sub2_keys = sorted([str(sub2_key) for sub2_key in sub2_dict.keys()])
      assert sub1_keys == sub2_keys, (sub1_keys, sub2_keys)
      for key in sub1_dict.keys():
        sub_req_hack_map[sub1_dict[key]] = sub2_dict[key]
    abase.run_from_feed_cache_with_feed_sub(factory.feedspec.feed_sub_request,
                                            factory.feature_dumper.on_book_reset,
                                            start_time,
                                            end_time,
                                            sub_req_hack_map=sub_req_hack_map,
                                            machine=machine_list[0],
                                            worker_ids=['1', '2'])
