# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: inkyu, jaewon

import concurrent.futures
import datetime
import functools
import logging
import json
import urllib.request

import click
import kafka
import tabulate
from tornado.ioloop import IOLoop
from tornado import gen
from tornado.concurrent import run_on_executor
from absl import app, flags

from coin.base.datetime_util import to_timestamp_int
from coin.base.param_util import to_list
from coin.strategy.tool.monitor.log_parser_runner import create_stream_log_parser_v1
from coin.strategy.tool.monitor import config_v1
from coin.strategy.tool.monitor.plot import (get_plotter, refine_balance, refine_timeseries)


@functools.lru_cache()
def get_bitmex_xbtusd_volume_24h():
  try:
    xbtusd_info_url = 'https://www.bitmex.com/api/v1/instrument?symbol=XBTUSD&count=1'
    content = urllib.request.urlopen(xbtusd_info_url, timeout=5).read()
    parsed = json.loads(content)
    volume_24h = int(parsed[0]['volume24h'])
    return volume_24h
  except Exception:
    return None


def report(results, sort_by_name=True):
  headers = [
      'name',
      'pnl_mark',
      'pnl_4h',
      'pnl_24h',
      'turnover_mark',
      'turnover_4h',
      'turnover_24h',
      'position_1',
      'position_2',
      'fill_1',
      'fill_2',
      'mark_time'
  ]

  data = []
  pnl_mark_sum = 0
  position_sum = 0
  turnover_mark_sum = 0
  turnover_24h_sum = 0
  xbtusd_turnover_24h_sum = 0

  if sort_by_name:
    results = sorted(results, key=lambda v: v['name'])

  for result in results:
    retired = result['name'].startswith('RETIRED')
    entry = []
    for header in headers:
      if retired and header.startswith('position_'):
        entry.append(None)
      else:
        entry.append(result[header])

    data.append(entry)
    pnl_mark_sum += (result['pnl_mark'] or 0)
    if not retired:
      position_sum += (result['position_1'] or 0)
      position_sum += (result['position_2'] or 0)
    turnover_mark_sum += (result['turnover_mark'] or 0)
    turnover_24h_sum += (result['turnover_24h'] or 0)

    if (result['quote'] == 'BTC'
        and ('xbtusd' in result['name'] or 'bitmex-xbtusd' in result['name'])):
      xbtusd_turnover_24h_sum += (result['turnover_24h'] or 0)

  print(tabulate.tabulate(data, headers=headers, floatfmt='.3f'))
  if 'pnl_mark' in headers:
    print(' * Total pnl_mark:      {:18,.03f}'.format(pnl_mark_sum))
  if 'position_1' in headers:
    print(' * Total position:      {:18,.03f}'.format(position_sum))
  if 'turnover_mark' in headers and turnover_mark_sum != 0:
    print(' * Total turnover_mark: {:18,.03f}'.format(turnover_mark_sum))
  if 'turnover_24h' in headers and turnover_mark_sum != 0:
    print(' * Total turnover_24h:  {:18,.03f}'.format(turnover_24h_sum))
  if 'turnover_24h' in headers and xbtusd_turnover_24h_sum != 0:
    xbtusd_market_volume = get_bitmex_xbtusd_volume_24h()
    if xbtusd_market_volume is None:
      print(' * XBTUSD turnover_24h: {0:18,.03f}'.format(xbtusd_turnover_24h_sum))
    else:
      market_share_pct = 100. * xbtusd_turnover_24h_sum / xbtusd_market_volume
      print(' * XBTUSD turnover_24h: {0:18,.03f}  (market share: {1:,.02f}%)'.format(
          xbtusd_turnover_24h_sum, market_share_pct))


def plot_balance_pos(results, begin_time, end_time, mark_time, filename):
  assert begin_time < end_time
  with get_plotter(filename, begin_time, end_time, plot_secondary=True) as plots:
    for result in results:
      plots[0](*refine_balance(result, begin_time, end_time, mark_time))
      plots[1](*refine_timeseries(result, begin_time, end_time, 'position_dump'))


def _aggregate_by_key(iterable, key):
  res = {}
  for elem in iterable:
    k = elem[key]
    if k not in res:
      res[k] = []
    res[k].append(elem)
  return res


class KafkaSubscription(object):
  def __init__(self):
    self._name_by_topic = {}
    self._config_by_name = {}

  def subscribe_configs_v1(self, configs):
    for config in configs:
      if not isinstance(config, dict):
        continue
      name = config['name']
      self._config_by_name[name] = config
      for topic in config.get('topics', set()):
        self._name_by_topic[topic] = name
    return self

  def get_name(self, topic):
    return self._name_by_topic[topic]

  def get_config(self, name):
    return self._config_by_name[name]

  def get_all_topics(self):
    return sorted(self._name_by_topic.keys())

  def __getitem__(self, topic):
    name = self._name_by_topic.get(topic, None)
    if name is None:
      return None
    return self._config_by_name.get(name, None)


class CoroutineReader(object):
  def read(self):
    raise NotImplementedError()


class KafkaCoroutineReader(CoroutineReader):
  def __init__(self, *, kafka_servers, topics):
    assert topics, 'Specify topic'
    self._kafka_consumer = None
    self._kafka_servers = kafka_servers
    self._topics = topics
    self.executor = None

    self._last_offsets = {}
    self._end_offsets = {}

  def start(self, start_from=None):
    assert self.executor is None
    self.executor = concurrent.futures.ThreadPoolExecutor(1)
    self._kafka_consumer = kafka.KafkaConsumer(bootstrap_servers=self._kafka_servers)

    partitions = []
    for topic in self._topics:
      partition_ids = self._kafka_consumer.partitions_for_topic(topic)
      partitions += [kafka.TopicPartition(topic, partition_id) for partition_id in partition_ids]

    if start_from is None:
      offsets = {partition: None for partition in partitions}
    else:
      timestamp = int(to_timestamp_int(start_from) / 1.e6)
      timestamps = {partition: timestamp for partition in partitions}
      offsets = self._kafka_consumer.offsets_for_times(timestamps)

    self._kafka_consumer.assign(partitions)
    for partition, offset in offsets.items():
      if offset:
        self._kafka_consumer.seek(partition, offset.offset)
      else:
        self._kafka_consumer.seek_to_end(partition)

  def stop(self):
    assert self.executor
    self.executor.shutdown(wait=False)

  @run_on_executor
  def _kafka_read(self):
    record = next(self._kafka_consumer)
    last_offset = self._last_offsets.get(record.topic, -1)
    assert record.offset > last_offset, \
           '%d <= %d' % (record.offset, last_offset)
    self._last_offsets[record.topic] = record.offset
    return record

  @gen.coroutine
  def read(self):
    data = yield self._kafka_read()
    return data

  def _partition_for_topic(self, topic):
    partition_ids = list(self._kafka_consumer.partitions_for_topic(topic))
    assert len(partition_ids) <= 1, 'Multiple partition ids for topic: %s' % topic
    if len(partition_ids) == 1:
      return kafka.TopicPartition(topic, partition_ids[0])
    else:
      return None

  @run_on_executor
  def update_end_offsets(self, topics=None):
    if topics is None:
      topics = self._topics

    # TODO(inkyu): Make this more efficient.
    for topic in to_list(topics):
      partition = self._partition_for_topic(topic)
      end_offset = self._kafka_consumer.end_offsets([partition])[partition]
      self._end_offsets[topic] = end_offset

  def is_end(self, topic):
    if topic not in self._last_offsets:
      return False
    assert topic in self._end_offsets
    return self._last_offsets[topic] >= self._end_offsets[topic]


class Monitor(object):
  def __init__(self, *, coroutine_reader, subscription, clear_screen=False):
    assert isinstance(coroutine_reader, CoroutineReader), coroutine_reader

    self._coroutine_reader = coroutine_reader
    self._subscription = subscription
    self._clear_screen = clear_screen
    self._parsers = {}
    self._is_ready = {}

  def _append(self, topic, lines):
    config = self._subscription[topic]
    if config is None:
      return
    name = config['name']
    if name not in self._parsers:
      self._parsers[name] = create_stream_log_parser_v1(config=config)
    parser = self._parsers[name]
    if name not in self._is_ready:
      is_ready = self._coroutine_reader.is_end(topic)
      if is_ready:
        self._is_ready[name] = True
    for line in lines.split('\n'):
      parser.parse_log_line(line)

  @gen.coroutine
  def _run_loop(self):
    yield self._coroutine_reader.update_end_offsets()
    while True:
      record = yield self._coroutine_reader.read()
      self._append(record.topic, record.value.decode('utf-8'))

  def _print_stat(self):
    if self._clear_screen:
      click.clear()

    time_current = datetime.datetime.now()

    results = []
    for name, parser in self._parsers.items():
      result = parser.get_result(time_current=time_current)
      if self._is_ready.get(name):
        results.append(result)

    if results:
      print('Time: %s' % time_current.strftime('%Y%m%d-%H%M%S'))

    results_by_quote = _aggregate_by_key(results, 'quote')
    for quote, res_list in results_by_quote.items():
      print('Quote: %s' % quote)
      report(res_list)
      print()

  @gen.coroutine
  def _run_report_loop(self):
    while True:
      self._print_stat()
      yield gen.sleep(2)

  def start(self, ioloop):
    ioloop.add_callback(self._run_loop)
    ioloop.add_callback(self._run_report_loop)


def main(argv):
  FLAGS = flags.FLAGS
  ioloop = IOLoop.current()

  # Build from config
  subscription = KafkaSubscription().subscribe_configs_v1(config_v1.STRATEGIES)

  reader = KafkaCoroutineReader(kafka_servers=['coin-kafka.corp.prestolabs.io:9092'],
                                topics=subscription.get_all_topics())
  start_from = datetime.datetime.now() - datetime.timedelta(hours=26)
  reader.start(start_from=start_from)

  monitor = Monitor(coroutine_reader=reader,
                    subscription=subscription,
                    clear_screen=FLAGS.clear_screen)
  monitor.start(ioloop)

  try:
    ioloop.start()
  except (SystemExit, KeyboardInterrupt):
    print()


if __name__ == '__main__':
  logging.basicConfig(level='DEBUG', format='%(levelname)8s %(asctime)s %(name)s] %(message)s')

  flags.DEFINE_bool('clear_screen', True, 'If true, clear screen before printing monitor output.')

  app.run(main)
