import os
import time
import logging
import traceback

from absl import app, flags

import coin.base.database.util as db_util
from coin.base.param_util import split_string
from coin.support.accounting.app.account_info_collector import generate_account_loggers
from coin.support.accounting.logic.balance_estimate import \
  generate_balance_estimate_history_result
from coin.support.accounting.logic.kafka_querier import KafkaAccountInfoQuerier
from coin.support.accounting.logic.position_estimate import \
  generate_position_estimate_history_result
from coin.support.accounting.logic.query_util import is_accounting_redundant
from xunkemgmt_client.support.accounting.database.mysql_importer import \
    MysqlAccountInfoImporter as MysqlImporter
from coin.support.accounting.logic.query_util import filter_invalid_request_and_result
from coin.util.queue.config import KafkaConfig
from xunkemgmt_client.support.accounting.database.memcached_importer import \
  MemcachedAccountInfoImporter as MemcachedImporter
from xunkemgmt_client.client.util.query_util import query_accounts


FLAGS = flags.FLAGS


def get_querier_and_importer():
  memcached_config = db_util.read_db_config(FLAGS.memcached_config)
  mysql_accounting_config = None if FLAGS.mysql_accounting_config is None \
                            else db_util.read_db_config(FLAGS.mysql_accounting_config)
  kafka_config = KafkaConfig.from_cmd_config(FLAGS.kafka_config_filename)
  querier = KafkaAccountInfoQuerier(kafka_config)
  importers = [MemcachedImporter(db_config=memcached_config)]
  if mysql_accounting_config is not None:
    importers.append(MysqlImporter(db_config=mysql_accounting_config))
  return (querier, importers)


def collect_balance_estimate(request_list, account_loggers):
  request_list = [req for req in request_list if not is_accounting_redundant(
      req.market_type, req.exchange, req.owner)]
  if len(request_list) == 0:
    return
  account_info_querier, importers = get_querier_and_importer()

  logging.info('start balance estimate query')
  response_list = generate_balance_estimate_history_result(
      request_list, account_info_querier)
  request_list, response_list = \
    filter_invalid_request_and_result(request_list, response_list)

  logging.info('start balance estimate insertion')
  for importer in importers:
    try:
      importer.insert_balance_estimate_history(request_list, response_list)
    except AssertionError as e:
      logging.error('Fail to insert balance estimate to db. %s', e)
      logging.error(traceback.format_exc())


  logging.info('start account estimate log writing')
  for request, response in zip(request_list, response_list):
    account_logger = account_loggers[
        (request.market_type, request.exchange, request.owner)]
    # As balance and ticker are also written to log in other apps, no need to write them
    # in estimate log
    balance_estimate = response.balance_estimate
    balance_estimate.balance.ClearField('each_balance')
    for exchange_ticker in balance_estimate.each_ticker:
      exchange_ticker.ClearField('each_ticker')
    account_logger.write_balance_estimate(balance_estimate)


def collect_position_estimate(request_list, account_loggers):
  request_list = [
      req for req in request_list if req.market_type in ('Futures', 'Options')]
  if len(request_list) == 0:
    return
  account_info_querier, importers = get_querier_and_importer()

  logging.info('start position estimate query')
  response_list = generate_position_estimate_history_result(
      request_list, account_info_querier)
  request_list, response_list = \
    filter_invalid_request_and_result(request_list, response_list)

  logging.info('start position estimate insertion')  
  for importer in importers:
    try:
      importer.insert_position_estimate_history(request_list, response_list)
    except AssertionError as e:
      logging.error('Fail to insert position estimate to db. %s', e)
      logging.error(traceback.format_exc())

  logging.info('start account estimate log writing')
  for request, response in zip(request_list, response_list):
    account_logger = account_loggers[
        (request.market_type, request.exchange, request.owner)]
    # As position and ticker are also written to log in other apps, no need to write them
    # in estimate log
    position_estimate = response.position_estimate
    position_estimate.position.ClearField('each_position')
    for exchange_ticker in position_estimate.each_ticker:
      exchange_ticker.ClearField('each_ticker')
    account_logger.write_position_estimate(position_estimate)


def collect_transfers(request_list, start_time, end_time):
  if len(request_list) == 0:
    return
  logging.info('start transfer history query')
  account_info_querier, importers = get_querier_and_importer()
  logging.info(f'query transfer time range {start_time} - {end_time}')
  transfer_response = account_info_querier.query_transfer_history(
      request_list, start_time, end_time)
  transfer_request, transfer_response = \
      filter_invalid_request_and_result(request_list, transfer_response)
  logging.info('start transfer history insertion')
  assert isinstance(importers[1], MysqlImporter)
  importers[1].upsert_transfer_history(transfer_request, transfer_response)


def collect_account_fills(request_list, start_time, end_time):
  if len(request_list) == 0:
    return
  logging.info('start account fill history query')
  account_info_querier, importers = get_querier_and_importer()
  logging.info(f'query account fill time range {start_time} - {end_time}')
  response_list = account_info_querier.query_account_fill_history(
    request_list, start_time, end_time)
  request_list, response_list = \
      filter_invalid_request_and_result(request_list, response_list)
  logging.info('start account fill history insertion')
  assert isinstance(importers[1], MysqlImporter)
  importers[1].insert_account_fill_history(request_list, response_list)


def main(_):
  start_time = time.time()

  request_list = query_accounts(
      active=True,
      market_types=split_string(FLAGS.market_type),
      exchanges=split_string(FLAGS.exchange),
      owners=split_string(FLAGS.owner),
      as_proto=True)
  assert len(request_list) > 0

  account_loggers = generate_account_loggers(
      request_list, log_root=os.path.expanduser('~/data/account_estimate_proto_log'))
  collect_start_time = int(time.time() * 10 ** 9)

  while True:
    collect_balance_estimate(request_list, account_loggers)
    collect_position_estimate(request_list, account_loggers)
    collect_end_time = int(time.time() * 10 ** 9)
    logging.info("end estimate collector")
    collect_transfers(request_list, collect_start_time, collect_end_time)
    logging.info("end transfer collector")
    collect_account_fills(request_list, collect_start_time, collect_end_time)
    logging.info("end account fill collector")
    collect_start_time = collect_end_time
    if FLAGS.duration is not None and \
       time.time() - start_time > FLAGS.duration:
      break
    time.sleep(FLAGS.query_frequency)


if __name__ == '__main__':
  flags.DEFINE_string('market_type', None, 'Comma separated market_type.')

  flags.DEFINE_string('exchange', None, 'Comma separated exchange.')

  flags.DEFINE_string('owner', None, 'Comma separated account owner.')

  flags.DEFINE_string('currency', None, 'Comma separated currency. WILL BE IGNORED.')

  flags.DEFINE_string('kafka_config_filename', None, 'kafka config')

  flags.DEFINE_string('mysql_accounting_config',
                      None,
                      'mysql db config')

  flags.DEFINE_string('memcached_config',
                      "../../coin_deploy/support_accounting/db_config/memcached_config_prod.json",
                      'path of database config.')

  flags.DEFINE_integer('query_frequency', 30, 'query frequency in seconds.')

  flags.DEFINE_integer(
      'duration',
      None,
      'Duration for running collector in seconds. Eg. If set to 300, program will '
      'stop after running 300 seconds. If set to None, program will run forever')

  logging.basicConfig(level='INFO', format='%(levelname)8s %(asctime)s %(message)s')
  app.run(main)
