import collections
import datetime
import logging
import math
import traceback
from concurrent.futures import ThreadPoolExecutor

import pandas as pd
from absl import app, flags

import coin.base.database.util as db_util
from coin.support.accounting.app.sync_daily_balance import _gen_engine

FLAGS = flags.FLAGS


def _get_entry_to_drop(engine, start_dt, end_dt, account_id, table, columns):
  assert table in ('BalanceEstimateHistory', 'PositionEstimateHistory'), table
  primary_key = 'hid'
  assert primary_key in columns, (primary_key, columns)

  sql0 = """
SELECT COUNT(*) as count FROM %s
WHERE account_id = %s AND query_timestamp >= '%s' AND query_timestamp <= '%s'
""" % (table, account_id, start_dt, end_dt)

  sql1 = """
SELECT MIN(%s) AS id FROM %s WHERE account_id = %s and query_timestamp >= '%s'
""" % (primary_key, table, account_id, start_dt)

  sql2 = """
SELECT MAX(%s) AS id FROM %s WHERE account_id = %s and query_timestamp <= '%s'
""" % (primary_key, table, account_id, end_dt)

  sql3 = """
SELECT %s FROM %s
WHERE account_id = %s AND %s >= %s AND %s <= %s
""" % (','.join(columns), table, account_id, primary_key, '%s', primary_key, '%s')

  num_entries = 10000
  interval_sec = FLAGS.resample_sec
  interval = '%dS' % interval_sec

  drop_df = pd.DataFrame()
  count = pd.read_sql(sql0, engine)['count'][0]
  min_id_df = pd.read_sql(sql1, engine)
  max_id_df = pd.read_sql(sql2, engine)
  if count > 0:
    min_id = pd.read_sql(sql1, engine)['id'][0]
    max_id = pd.read_sql(sql2, engine)['id'][0]
    logging.debug('min_id: %s, max_id: %s' % (min_id, max_id))
    iter_cnt = math.ceil(float(count) / num_entries)
    iter_step = math.ceil((max_id - min_id) / float(iter_cnt))
    logging.debug('iter_cnt: %s, iter_step: %s' % (iter_cnt, iter_step))
    curr_id = min_id
    while min_id <= curr_id <= max_id:
      next_id = curr_id + iter_step
      sql = sql3 % (curr_id, next_id if next_id <= max_id else max_id)
      curr_id = next_id
      df = pd.read_sql(sql, engine)
      if len(df) == 0:
        continue
      df['query_timestamp'] = pd.to_datetime(df['query_timestamp'])
      df = df.set_index('query_timestamp')
      resampled_df = df[primary_key].resample(interval).first()
      orig_hid = set(df[primary_key])
      resampled_hid = set([int(i) for i in resampled_df.dropna()])
      hid_to_drop = list(orig_hid.difference(resampled_hid))
      df = df[df[primary_key].isin(hid_to_drop)]
      drop_df = pd.concat([drop_df, df])
  return drop_df


def cleanup_balance_data(engine, start_dt, end_dt, account_id):
  entry_df = _get_entry_to_drop(
      engine, start_dt, end_dt, account_id,
      'BalanceEstimateHistory', ['hid', 'balance_hid', 'query_timestamp'])
  balance_hids = []
  sql = """
DELETE FROM %s WHERE %s in %s
"""
  if len(entry_df) > 0:
    with engine.connect() as conn:
      conn.execute(sql % ('BalanceEstimateHistory', 'hid', tuple(entry_df['hid'])))
      balance_hids = entry_df[pd.notnull(entry_df['balance_hid'])]['balance_hid']
      conn.execute(sql % ('AccountBalanceHistory', 'balance_hid', tuple(balance_hids)))
  dropped = {
      'AccountBalanceHistory': len(entry_df),
      'BalanceEstimateHistory': len(balance_hids)
  }
  for key, value in dropped.items():
    logging.info('account_id: %s, table: %s, dropped: %s' % (
        account_id, key, value))
  return dropped


def cleanup_position_data(engine, start_dt, end_dt, account_id):
  entry_df = _get_entry_to_drop(
      engine, start_dt, end_dt, account_id,
      'PositionEstimateHistory', ['hid', 'position_hid', 'query_timestamp'])
  position_hids = []
  sql = """
DELETE FROM %s WHERE %s in %s;
"""
  if len(entry_df) > 0:
    with engine.connect() as conn:
      conn.execute(sql % ('PositionEstimateHistory', 'hid', tuple(entry_df['hid'])))
      position_hids = entry_df[pd.notnull(entry_df['position_hid'])]['position_hid']
      conn.execute(
          sql % ('AccountPositionHistory', 'position_hid', tuple(position_hids)))
  dropped = {
      'AccountPositionHistory': len(entry_df),
      'PositionEstimateHistory': len(position_hids)
  }
  for key, value in dropped.items():
    logging.info('account_id: %s, table: %s, dropped: %s' % (
        account_id, key, value))
  return dropped


def cleanup_accounting_data(engine, start_dt, end_dt, data_type):
  if data_type == 'balance':
    table = 'AccountBalanceHistory'
    cleanup_func = cleanup_balance_data
  elif data_type == 'position':
    table = 'AccountPositionHistory'
    cleanup_func = cleanup_position_data
  else:
    raise ValueError(data_type)

  sql = """
SELECT DISTINCT(account_id) AS account_id FROM %s
""" % table
  df = pd.read_sql(sql, engine)
  max_workers = 16
  with ThreadPoolExecutor(max_workers=max_workers) as executor:
    future_list = []
    for account_id in df['account_id']:
      future = executor.submit(cleanup_func, engine, start_dt, end_dt, account_id)
      future_list.append((future, account_id, table))

  total_dropped = collections.defaultdict(int)
  for future, account_id, table in future_list:
    try:
      dropped = future.result()
      for key, value in dropped.items():
        total_dropped[key] += value
    except Exception as e:
      logging.error('Fail to clean up account_id: %s, data_type: %s.' % (
          account_id, data_type))
      logging.error(traceback.format_exc())
  for key, value in total_dropped.items():
    logging.info('%s total dropped entries: %s' % (key, value))


def main(argv):
  db_config = db_util.read_db_config(FLAGS.db_config)
  engine = _gen_engine(db_config)
  start_dt = datetime.datetime.strptime(FLAGS.start_date, '%Y%m%d')
  end_dt = datetime.datetime.strptime(FLAGS.end_date, '%Y%m%d')
  assert FLAGS.resample_sec > 0
  assert end_dt > start_dt and start_dt >= datetime.datetime(2022, 7, 21)
  cleanup_accounting_data(engine, start_dt, end_dt, 'balance')
  cleanup_accounting_data(engine, start_dt, end_dt, 'position')


if __name__ == '__main__':
  logging.basicConfig(level='INFO', format='%(levelname)s %(asctime)s %(name)s] %(message)s')
  flags.DEFINE_string('db_config', None, '')
  flags.DEFINE_string('start_date', None, '')
  flags.DEFINE_string('end_date', None, '')
  flags.DEFINE_integer('resample_sec', None, '')
  app.run(main)
