import logging
import time

from absl import app, flags

import coin.base.database.util as db_util
from coin.base.param_util import filter_duplicate, split_string
from coin.support.market_quotes.logic.kafka_querier import KafkaMarketInfoQuerier
from coin.util.queue.config import KafkaConfig
from xunkemgmt_client.market.market_quotes.database.memcached_importer import \
  MemcachedMarketInfoImporter as MemcachedImporter
from coin.support.accounting.logic.query_util import \
  filter_invalid_request_and_result
from xunkemgmt_client.client.util.query_util import query_accounts


FLAGS = flags.FLAGS


def collect_market_info(request_list):
  kafka_config = KafkaConfig.from_cmd_config(FLAGS.kafka_config_filename)
  querier = KafkaMarketInfoQuerier(kafka_config)
  response_list = querier.query_ticker_history(request_list)
  request_list, response_list = \
      filter_invalid_request_and_result(request_list, response_list)

  db_config = db_util.read_db_config(FLAGS.memcached_config)
  importer = MemcachedImporter(db_config=db_config)
  importer.insert_ticker_history(request_list, response_list)


def main(_):
  start_time = time.time()

  request_list = query_accounts(
      active=True,
      market_types=split_string(FLAGS.market_type),
      exchanges=split_string(FLAGS.exchange),
      as_proto=True)
  request_list = filter_duplicate(
      request_list,
      key_func=lambda x: (x.market_type, x.exchange, x.api_version))
  assert len(request_list) > 0

  while True:
    logging.info('start markets info collection')
    collect_market_info(request_list)
    if FLAGS.duration is not None and \
       time.time() - start_time > FLAGS.duration:
      break
    time.sleep(FLAGS.query_frequency)


if __name__ == '__main__':
  flags.DEFINE_string('market_type', None, 'Comma separated market_type.')

  flags.DEFINE_string('exchange', None, 'Comma separated exchange.')

  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')

  flags.DEFINE_string('memcached_config',
                      "../../coin_deploy/support_accounting/db_config/memcached_config_prod.json",
                      'path of database config.')

  flags.DEFINE_integer('query_frequency', 30, 'query frequency in seconds.')

  flags.DEFINE_integer(
      'duration',
      None,
      'Duration for running collector in seconds. Eg. If set to 300, program will '
      'stop after running 300 seconds. If set to None, program will run forever')

  logging.basicConfig(level='INFO', format='%(levelname)8s %(asctime)s %(message)s')
  app.run(main)
