import jinja2
import json

import pymotion.grid_util.simplepythonjob as simplepythonjob
import pymotion.grid_util.simpleshelljob as simpleshelljob
import pymotion.model_util.export_util as export_util
import pypapyrus.dataset.presto_feed_dataset as dataset_mdl
import pymotion.example_backtest.feed_backtest as backtest_mdl
import coin.support.feed_tool.feed_stats.app.feed_stats_motion.coin_env as coin_env
from coin.support.feed_tool.feed_cache.app.coin2_fast_feed_runner import get_fastfeed_setting

class FeedDatasetBacktest(backtest_mdl.FeedBacktest):
  def __init__(self,
               output_root,
               run_agg_job,
               continue_on_fail_ind_job,
               continue_on_fail_agg_job,
               duration,
               sub_request,
               recipe,
               group,
               worker,
               machine,
               run_for_raw):
    self._duration = duration
    self._sub_request = sub_request
    self._recipe = recipe
    self._group = group
    self._worker = worker
    self._machine = machine
    self._run_for_raw = run_for_raw
    super().__init__(output_root=output_root,
                     run_agg_job=run_agg_job,
                     continue_on_fail_ind_job=continue_on_fail_ind_job,
                     continue_on_fail_agg_job=continue_on_fail_agg_job)

  def cmd_dumper(
      self,  # sink for others
      *sink_args,  # directories
      stage_backrun_directory,  # universe_dataframe
      rowid,
      date_dim__value,  # expanded_configs
      ind__config_filename,  # sink for others
      **sink_kwargs):
    datestr = date_dim__value.strftime("%Y%m%dT%H%M")
    trading_date = date_dim__value.strftime("%Y%m%d")
    subscribe_config = stage_backrun_directory.gen_job_filename(rowid, "subscribe")

    template_loader = jinja2.FileSystemLoader(searchpath="./")
    template_env = jinja2.Environment(loader=template_loader)
    template = template_env.get_template(
        'coin/support/feed_tool/feed_stats/app/feed_stats_motion/symbol_group.json.tmpl')
    json_str = template.render(mea=self._get_mea_from_request(),
                               recipe=self._recipe,
                               group=self._group,
                               worker=self._worker,
                               machine=self._machine)
    export_util.export_json_file(json.loads(json_str), subscribe_config)
    common_driver = "./data/coin2/feed/ops/feed-stats/common.json"  # use fastfeed
    if self._run_for_raw:
      # use raw feed
      common_driver = get_fastfeed_setting(
          machine = self._machine,
          mea = self._get_mea_from_request(),
          arbitration = False,
          trading_date = trading_date,
          long_warmup = True)
      
    args = {
        "binary": "./bazel-bin/cc/appcoin2/research/feed_dumper_papyrus",
        "date": datestr,
        "duration": self._duration,
        "config_filename": ind__config_filename,
        "subscribe_config": subscribe_config,
        "common_driver": common_driver,
    }
    template = [
        '%(binary)s',
        '--date="%(date)s"',
        '--duration="%(duration)s"',
        '--recorder_config_filename="%(config_filename)s"',
        '--driver="%(common_driver)s"',
        '--driver="%(subscribe_config)s"',
    ]

    return simpleshelljob.create_plan(command_args=args,
                                      command_template=" ".join(template),
                                      pwd=coin_env.get_wrt_BATCH_INSTALL_ROOT("../"))
  
  def _get_mea_from_request(self):
    market = self._sub_request.market_type
    exchange = self._sub_request.exchange
    api = self._sub_request.api_version
    return ".".join([market,exchange,api])
    

  def cmd_aggregator(
      # sink for others
      *sink_args,  # directories
      stage_backrun_directory,  # universe_dataframe
      rowid,  # expanded_configs
      agg__config_filename,  # sink for others
      **sink_kwargs):
    return simplepythonjob.save_create_plan(dataset_mdl.functor_aggregator, [],
                                            {
                                                'config_filename': agg__config_filename,
                                                'allow_glob': True,
                                            },
                                            stage_backrun_directory.generate_job_filename(
                                                "job", ".pk", rowid),
                                            pwd=coin_env.get_wrt_BATCH_INSTALL_ROOT("../"))


def calculate_stats(*,
                    output_root,
                    date_universe,
                    duration,
                    sub_request,
                    recipe,
                    group,
                    worker,
                    machine,
                    need_plot,
                    need_feed_csv,
                    run_for_raw,
                    interval = None,
                    latency_stat_only = None):
  backrun_plan = FeedDatasetBacktest(output_root=output_root,
                                     run_agg_job=True,
                                     continue_on_fail_ind_job=True,
                                     continue_on_fail_agg_job=True,
                                     duration=duration,
                                     sub_request=sub_request,
                                     recipe=recipe,
                                     group=group,
                                     worker=worker,
                                     machine=machine,
                                     run_for_raw=run_for_raw)
  backrun_plan.set_recorder_option(book_print_level=5, interval=interval, latency_stat_only=latency_stat_only)
  recorder_output_request = ['basic_stat', 'basic_stat_concat']
  if need_plot:
    recorder_output_request.append('plot')
  if need_feed_csv:
    recorder_output_request.append('feed')
  backrun_plan.set_recorder_output(recorder_output_request=recorder_output_request)
  backrun_plan.set_date_universe(date_universe)
  backrun_job = backrun_plan.create_plan()
  assert backrun_job.runjob()
  return backrun_job
