import sys
import datetime
import logging
import json
import pandas as pd
import coin.base.database.util as db_util

from absl import app, flags
from sqlalchemy import create_engine, text
from coin.base.datetime_util import to_timestamp_int
from coin.base.query_util import query_exchange_rates
from xunkemgmt_client.client.util.query_util import query_accounts
from xunkemgmt_client.support.accounting.logic.constants import (
    ACCOUNTING_CURRENCY_CANDIDATES)
from xunkemgmt_client.tool.slack_noti import send_to_slack

FLAGS = flags.FLAGS

QUERY_BALANCE_SQL = """
SELECT * from AccountBalanceHistory
WHERE balance_hid in (
  SELECT max(balance_hid)
  FROM AccountBalanceHistory
  WHERE account_id IN %s
  AND query_timestamp >= '%s'
  AND query_timestamp < '%s'
  GROUP BY account_id);
"""

QUERY_POSITION_SQL = """
SELECT * from AccountPositionHistory
WHERE position_hid in (
  SELECT max(position_hid)
  FROM AccountPositionHistory
  WHERE account_id IN %s
  AND query_timestamp >= '%s'
  AND query_timestamp < '%s'
  GROUP BY account_id);
"""

QUERY_POSITION_ESTIMATE_SQL = """
SELECT * from PositionEstimateHistory
WHERE position_hid IN %s;
"""

DELETE_SYNC_SQL = """
DELETE FROM %s WHERE trading_date = '%s'
"""

MISSING_ESTIMATE = list()


def _query_daily_snapshot(engine, trading_date):
  account_ids = tuple(query_accounts()['account_id'])
  with engine.connect() as conn:
    # disable this parameter in mariadb 10.3.24, otherwise IN clause will be very slow
    # https://mariadb.com/kb/en/conversion-of-big-in-predicates-into-subqueries/
    conn.execute(text('SET in_predicate_conversion_threshold = 0'))
    balance_df = pd.read_sql(QUERY_BALANCE_SQL % (
        account_ids, trading_date, trading_date + datetime.timedelta(days=1),), conn)
    position_df = pd.read_sql(QUERY_POSITION_SQL % (
        account_ids, trading_date, trading_date + datetime.timedelta(days=1),), conn)
  return balance_df, position_df


def _gen_engine(db_config):
  db_url = 'mysql+mysqldb://%s:%s@%s/%s' % (
      db_config['user'],
      db_config['password'],
      db_config['host'],
      db_config['database'])
  engine = create_engine(db_url, echo=False, pool_recycle=3600)
  return engine


def _gen_balance_estimate_detail(trading_date, balance_dict, accounting_currency, excluded_assets):
  currencies = list(set(
      [elem['currency'] for elem in balance_dict.get('each_balance', [])]))
  prices = query_exchange_rates(currencies, accounting_currency, trading_date)
  prices_in_usd = query_exchange_rates(currencies, 'USD', trading_date)
  price_dict = {
      currency: price for currency, price in zip(currencies, prices)
      if price is not None}
  price_in_usd_dict = {
      currency: price for currency, price in zip(currencies, prices_in_usd)
      if price is not None}
  estimate_values = []
  for each_balance in balance_dict.get('each_balance', []):
    currency = each_balance['currency']
    price = price_dict.get(currency)
    price_in_usd = price_in_usd_dict.get(currency)
    if price is None or price_in_usd is None:
      if currency not in excluded_assets:
        raise ValueError('Fail to query price of %s %s on %s' % 
          (balance_dict.get('exchange'), currency, trading_date))
      else:
        continue
    estimate_value = {
        'accounting_currency': accounting_currency,
        'symbol': currency,
        'total_value': price * each_balance['total'],
        'total_value_in_usd': price_in_usd * each_balance['total'],
    }
    estimate_values.append(estimate_value)
  return estimate_values


def _gen_balance_estimate_df(balance_df):
  global MISSING_ESTIMATE
  estimates = []
  all_account_df = query_accounts()
  excluded_assets = list(
    pd.read_csv(FLAGS.exclude_estimate_config)['asset']) if FLAGS.exclude_estimate_config else []
  for _, row in balance_df.iterrows():
    account_df = all_account_df[all_account_df['account_id'] == row['account_id']]
    if len(account_df) != 1:
      logging.info('Fail to find accout: %s' % row['account_id'])
      continue
    account_df = account_df.iloc[0]
    accounting_currency = ACCOUNTING_CURRENCY_CANDIDATES[
        (account_df['market_type'], account_df['exchange'])][0]
    try:
      estimate_detail = _gen_balance_estimate_detail(
          row['trading_date'], json.loads(row['balance_dict']), accounting_currency, excluded_assets)
      total_estimate = sum(i['total_value'] for i in estimate_detail)
      total_estimate_in_usd = sum(i['total_value_in_usd'] for i in estimate_detail)
    except Exception as e:
      acct_info = (account_df['market_type'], account_df['exchange'],
        account_df['owner'], accounting_currency)
      logging.error(e)
      logging.error('Fail to calculate balance estimate. %s.%s.%s %s' % acct_info)
      MISSING_ESTIMATE.append(acct_info)
      estimate_detail = []
      total_estimate, total_estimate_in_usd = None, None
    estimate_dict = {
        'account_request': {
            'market_type': account_df['market_type'],
            'exchange': account_df['exchange'],
            'owner': account_df['owner'],},
        'query_ts': to_timestamp_int(row['query_timestamp']),
        'estimate': {
            'accounting_currency': accounting_currency,
            'total_value': total_estimate,
            'total_value_in_usd': total_estimate_in_usd,},
        'estimate_detail': estimate_detail
    }
    estimate = {
        'account_id': row['account_id'],
        'accounting_currency': accounting_currency,
        'trading_date': row['trading_date'],
        'query_timestamp': row['query_timestamp'],
        'total_value': total_estimate,
        'total_value_in_usd': total_estimate_in_usd,
        'balance_hid': row['balance_hid'],
        'estimate_dict': json.dumps(estimate_dict),
    }
    estimates.append(estimate)
  return pd.DataFrame(estimates)


def _gen_position_estimate_df(position_df):
  position_hids = tuple(position_df['position_hid'])
  engine_src = _gen_engine(db_util.read_db_config(FLAGS.mysql_config_src))
  with engine_src.connect() as conn:
    estimates_df = pd.read_sql(QUERY_POSITION_ESTIMATE_SQL % (position_hids,), conn)
  estimates_df['trading_date'] = position_df['trading_date']
  return estimates_df


def sync_daily_snapshot(engine, trading_date, account_info_df):
  if 'balance_hid' in account_info_df.columns:
    table = 'AccountBalanceIntervalHistory'
  elif 'position_hid' in account_info_df.columns:
    table = 'AccountPositionIntervalHistory'
  else:
    raise ValueError('abnormal account info data')
  account_info_df['trading_date'] = trading_date
  with engine.begin() as conn:
    conn.execute(text(DELETE_SYNC_SQL % (table, trading_date)))
    account_info_df.to_sql(
        table, conn, if_exists='append', index=False)


def populate_daily_estimate_snapshot(engine, trading_date, balance_df, position_df):
  balance_estimate_df = _gen_balance_estimate_df(balance_df)
  position_estimate_df = _gen_position_estimate_df(position_df)
  with engine.begin() as conn:
    conn.execute(text(DELETE_SYNC_SQL % ('BalanceEstimateIntervalHistory', trading_date)))
    balance_estimate_df.to_sql(
        'BalanceEstimateIntervalHistory', conn, if_exists='append', index=False)
    conn.execute(text(DELETE_SYNC_SQL % ('PositionEstimateIntervalHistory', trading_date)))
    position_estimate_df.to_sql(
        'PositionEstimateIntervalHistory', conn, if_exists='append', index=False)


def main(_):
  trading_date = FLAGS.trading_date or datetime.date.today() - datetime.timedelta(days=1)
  if isinstance(trading_date, str):
    trading_date = datetime.datetime.strptime(trading_date, "%Y-%m-%d").date()
  db_config_src = db_util.read_db_config(FLAGS.mysql_config_src)
  engine_src = _gen_engine(db_config_src)
  balance_df, position_df = _query_daily_snapshot(engine_src, trading_date)
  db_config_dst = db_util.read_db_config(FLAGS.mysql_config_dst)
  engine_dst = _gen_engine(db_config_dst)
  sync_daily_snapshot(engine_dst, trading_date, balance_df)
  logging.info('Succeed to insert %s balance' % trading_date)
  sync_daily_snapshot(engine_dst, trading_date, position_df)
  logging.info('Succeed to insert %s position' % trading_date)
  populate_daily_estimate_snapshot(engine_dst, trading_date, balance_df, position_df)
  logging.info('Succeed to insert %s balance estimate' % trading_date)
  missing_df = pd.DataFrame(MISSING_ESTIMATE,
    columns=['market_type', 'exchange', 'owner', 'accounting_currency'])
  noti_msg = f'[{trading_date}] Daily-balance\n' + \
    f'Missing account estimate:\n{missing_df.to_string() if len(missing_df) > 0 else None}'
  logging.info(noti_msg)
  if FLAGS.slack_receiver:
    send_to_slack(noti_msg, FLAGS.slack_receiver, 'file')
  sys.exit(len(MISSING_ESTIMATE))


if __name__ == '__main__':
  flags.DEFINE_string('trading_date', None, '%Y-%m-%d')
  flags.DEFINE_string('mysql_config_src',
                      "../../coin_deploy/support_accounting/db_config/mysql_config_prod.json",
                      'path of aws database config.')
  flags.DEFINE_string('mysql_config_dst',
                      "../../coin_deploy/support_accounting/db_config/iosg_config_prod.json",
                      'path of iosg database config.')
  flags.DEFINE_string('exclude_estimate_config',
                      "../../coin_deploy/support_accounting/balance_config/estimate_exclude_list.csv",
                      'path of kline price exclude config.')
  flags.DEFINE_string('slack_receiver', None, 'slack channel')
  app.run(main)
