# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: jhkim

import datetime
import logging
import sys
import tempfile
from concurrent.futures import ProcessPoolExecutor

import pandas

import coin.strategy.mm.fastfeature.fast_feature_dumper as ffdump
import coin.strategy.mm.fastfeature.experimental.universe as univ


def dump_multi(dump_configs):
  # max_workers=6
  max_workers = 1
  futures = []
  recorder_config_filenames = []
  if max_workers > 1:
    with ProcessPoolExecutor(max_workers=6) as executor:
      for dump_config in dump_configs:
        futures.append(executor.submit(ffdump.dump_single, **dump_config))
    for future in futures:
      recorder_config_filenames.append(future.result())
  else:
    for dump_config in dump_configs:
      print(dump_config['start_time'])
      recorder_config_filenames.append(ffdump.dump_single(**dump_config))


def main(argv):
  # fromdate = datetime.date(2018, 4, 4)
  fromdate = datetime.date(2018, 4, 4)
  todate = datetime.date(2018, 4, 12)
  machine = 'feed-01.eu-west-1.aws'
  products = univ.get_products()
  datelist = pandas.date_range(fromdate, todate).tolist()
  print(datelist)

  hours = 2

  dump_configs = []
  tmpdir = tempfile.mkdtemp()

  for i, date in enumerate(datelist):
    dump_configs.append({
        "output_root": tmpdir,
        "rowid": "%03d" % i,
        "start_time": date.to_datetime(),
        "hours": hours,
        "products": products,
        "machine": machine,
    })

  dump_multi(dump_configs)
  return 0


if __name__ == '__main__':
  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')
  try:
    sys.exit(main(sys.argv))
  except KeyboardInterrupt:
    sys.exit(1)
