# Copyright (c) 2018 Presto Labs Pte. Ltd.
# Author: leon

import csv
import datetime
import pathlib
import sys
from collections import namedtuple
from concurrent.futures import ProcessPoolExecutor
from functools import partial

from absl import (flags, app)

from coin.base.timestamp import get_timestamp
from coin.base.book.book_builder_base import ThinBookError
from coin.strategy.mm.dumper_base import run_from_archive
from coin.exchange.util.feed_checker import get_default_feed_checker_config
from coin.support.feed_tool.feed_research.all_products import (
    gen_huobi_feed_sub_requests,
    gen_okex_feed_sub_requests,
)
from coin.strategy.mm.tool.archive_base import run_from_feed_cache_with_feed_sub
from coin.exchange.util.feed_checker.types import FeedCheckerState
from coin.exchange.bitmex.kr_rest.futures_product import BitmexFuturesProduct
from coin.exchange.deribit_v1.kr_rest.futures_product import DeribitFuturesProduct
from coin.exchange.okex_futures.kr_rest.futures_product import OkexFuturesProduct
from coin.exchange.huobi_futures.kr_rest.futures_product import HuobiFuturesProduct
from coin.exchange.bitfinex_v2.kr_rest.product import BitfinexProduct
from coin.exchange.bitflyer_v1.kr_rest.futures_product import BitflyerFuturesProduct
from coin.exchange.bithumb.kr_rest.product import BithumbProduct
from coin.exchange.binance.kr_rest.product import BinanceProduct
from coin.exchange.okex.kr_rest.product import OkexProduct
from coin.exchange.gdax.kr_rest.product import GdaxProduct
from coin.exchange.upbit_v1.kr_rest.product import UpbitProduct
from coin.exchange.kraken.kr_rest.product import KrakenProduct
from coin.exchange.quoinex_v2.kr_rest.product import QuoinexProduct
from coin.exchange.hitbtc_v2.kr_rest.product import HitbtcProduct
from coin.exchange.huobi.kr_rest.product import HuobiProduct
from coin.exchange.binance_futures.kr_rest.futures_product import BinanceFuturesProduct

FLAGS = flags.FLAGS

CsvWriter = namedtuple('CsvWriter',
                       ['book_writer', 'book_file', 'notional_qty', 'trade_writer', 'trade_file'])


def get_notional_qty(product):
  if isinstance(product, (BitmexFuturesProduct, DeribitFuturesProduct)):
    return 5000
  elif isinstance(product, OkexFuturesProduct):
    return 100
  elif isinstance(product, HuobiFuturesProduct):
    return 100
  elif isinstance(product,
                  (BitflyerFuturesProduct,
                   BinanceProduct,
                   BithumbProduct,
                   UpbitProduct,
                   BitfinexProduct,
                   GdaxProduct,
                   OkexProduct,
                   KrakenProduct,
                   QuoinexProduct,
                   HitbtcProduct,
                   HuobiProduct,
                   BinanceFuturesProduct)):
    return 1.2
  else:
    raise ValueError(product)


class DumpStrategy(object):
  def __init__(self, feed_sub_request, csv_root, book_levels):
    for sub_request, products in feed_sub_request.gen_iter():
      self._sub_request = sub_request
      self._products = products
    self._csv_root = csv_root
    self._book_levels = book_levels
    pathlib.Path('%s/book' % csv_root).mkdir(parents=True, exist_ok=True)
    pathlib.Path('%s/trade' % csv_root).mkdir(parents=True, exist_ok=True)
    self._csv_writers = {}

  def prepare_book_csv_file(self, product):
    csv_file = open('%s/book/%s.csv' % (self._csv_root, product.full_symbol), 'w', encoding='utf-8')
    writer = csv.writer(csv_file, dialect='excel')
    csv_header = ['timestamp', 'true_bid', 'true_ask', 'bid_depth', 'ask_depth']
    for level in range(self._book_levels):
      csv_header += ['bid%d' % level, 'bid_qty%d' % level, 'ask%d' % level, 'ask_qty%d' % level]
    csv_header.append('invalid')
    writer.writerow(csv_header)
    csv_file.flush()
    return writer, csv_file

  def prepare_trade_csv_file(self, product):
    csv_file = open('%s/trade/%s.csv' % (self._csv_root, product.full_symbol),
                    'w',
                    encoding='utf-8')
    writer = csv.writer(csv_file, dialect='excel')
    csv_header = ['timestamp', 'price', 'qty', 'side', 'invalid']
    writer.writerow(csv_header)
    csv_file.flush()
    return writer, csv_file

  def on_book_reset(self, book_builder_name, book_builder):
    if FLAGS.use_feed_cache:
      for product in self._products:
        if book_builder_name == self._sub_request:
          book_builder.subscribe(product, partial(self.on_book, None, product))
          book_builder.subscribe_trade(product, partial(self.on_trade, None, product))
          book_writer, book_csv_file = self.prepare_book_csv_file(product)
          trade_writer, trade_csv_file = self.prepare_trade_csv_file(product)
          self._csv_writers[product] = CsvWriter(book_writer,
                                                 book_csv_file,
                                                 get_notional_qty(product),
                                                 trade_writer,
                                                 trade_csv_file)
    else:
      feed_checker = book_builder._feed_handler.feed_checker
      for product in self._products:
        if book_builder_name == self._sub_request:
          book_builder.subscribe_book(product, partial(self.on_book, feed_checker, product))
          book_builder.subscribe_trade(product, partial(self.on_trade, feed_checker, product))
          book_writer, book_csv_file = self.prepare_book_csv_file(product)
          trade_writer, trade_csv_file = self.prepare_trade_csv_file(product)
          self._csv_writers[product] = CsvWriter(book_writer,
                                                 book_csv_file,
                                                 get_notional_qty(product),
                                                 trade_writer,
                                                 trade_csv_file)
      feed_checker.start(timestamp=get_timestamp())

  def on_book(self, feed_checker, product, book):
    writer = self._csv_writers.get(product, None)
    assert writer is not None

    bids = book.get_bid_array(self._book_levels)
    asks = book.get_ask_array(self._book_levels)

    try:
      csv_line = [
          book.timestamp,
          book.get_bid_price_by_qty(writer.notional_qty),
          book.get_ask_price_by_qty(writer.notional_qty),
          book.depth_bid(),
          book.depth_ask()
      ]
    except (IndexError, ThinBookError):
      return
    for level in range(self._book_levels):
      csv_line += [bids[level][0], bids[level][1], asks[level][0], asks[level][1]]

    if feed_checker is not None:
      state = feed_checker.check_per_product_state(product=product, timestamp=book.timestamp)
    else:
      state = FeedCheckerState.READY

    csv_line.append(state.invalid)
    writer.book_writer.writerow(csv_line)
    writer.book_file.flush()
    if state.invalid:
      feed_checker.restart(timestamp=book.timestamp)

  def on_trade(self, feed_checker, product, trade):
    writer = self._csv_writers.get(product, None)
    assert writer is not None

    if feed_checker is not None:
      state = feed_checker.check_per_product_state(product=product, timestamp=trade.timestamp)
    else:
      state = FeedCheckerState.READY
    csv_line = [trade.timestamp, trade.price, trade.qty, trade.side, state.invalid]
    writer.trade_writer.writerow(csv_line)
    writer.trade_file.flush()
    if state.invalid:
      feed_checker.restart(timestamp=trade.timestamp)

  def close_csvs(self):
    for writer in self._csv_writers.values():
      writer.book_file.flush()
      writer.book_file.close()
      writer.trade_file.flush()
      writer.trade_file.close()


def launch(trading_date,
           machine,
           feed_sub_request,
           feed_root,
           endpoint_url,
           csv_root,
           book_levels,
           hours):
  products = feed_sub_request.all_products
  feed_checker_config = get_default_feed_checker_config(products)

  start_time = trading_date  # UTC 0 hour

  strategy = DumpStrategy(feed_sub_request, csv_root, book_levels)

  if FLAGS.use_feed_cache:
    run_from_feed_cache_with_feed_sub(feed_sub_request,
                                      strategy.on_book_reset,
                                      start_time,
                                      start_time + datetime.timedelta(hours=hours),
                                      machine=machine,
                                      worker_ids=['1', '2'])
  else:
    run_from_archive(feed_sub_request,
                     strategy.on_book_reset,
                     start_time,
                     start_time + datetime.timedelta(hours=hours),
                     machine=[machine],
                     root_dir=feed_root,
                     endpoint_url=endpoint_url,
                     feed_checker_config=feed_checker_config,
                     skip_failed_product=False)

  strategy.close_csvs()
  return 0


def main(argv):
  trading_date = FLAGS.trading_date
  machine = FLAGS.machine
  api_version = FLAGS.api_version
  feed_root = FLAGS.feed_root
  endpoint_url = FLAGS.endpoint_url
  csv_root = FLAGS.csv_root
  book_levels = FLAGS.book_levels
  assert trading_date, '--trading_date must be specified.'
  assert machine, '--machine must be specified.'
  assert csv_root, '--csv_root must be specified.'
  print('Running for %s %s ...' % (trading_date, machine))
  sys.stdout.flush()
  trading_date = datetime.datetime.strptime(trading_date, '%Y%m%d')

  feed_sub_requests = gen_okex_feed_sub_requests(trading_date, api_version)
  exchange = FLAGS.exchange
  if exchange is not None:
    if exchange == 'Okex':
      feed_sub_requests = gen_okex_feed_sub_requests(trading_date, api_version)
    elif exchange == 'Huobi':
      feed_sub_requests = gen_huobi_feed_sub_requests(trading_date, api_version)

  with ProcessPoolExecutor(max_workers=16) as executor:
    for feed_sub_request in feed_sub_requests:
      executor.submit(launch,
                      trading_date,
                      machine,
                      feed_sub_request,
                      feed_root,
                      endpoint_url,
                      csv_root,
                      book_levels,
                      FLAGS.hours)


if __name__ == '__main__':
  flags.DEFINE_boolean('use_feed_cache', True, 'Whether to use feed cache or not.')

  flags.DEFINE_string('trading_date', None, 'Trading date in form of %Y%m%d.')

  flags.DEFINE_string('machine', None, 'Instance machine name.')

  flags.DEFINE_string('api_version', None, 'api_version for okex case.')

  flags.DEFINE_string('feed_root', None, 'Feed files root directory.')

  flags.DEFINE_string('endpoint_url', None, 'Minio endpoint url.')

  flags.DEFINE_string('csv_root', None, 'Output csv files root directory.')

  flags.DEFINE_integer('book_levels', 5, 'How many levels of book to print in csv.')

  flags.DEFINE_integer('hours', 24, 'Time span in hours from 00:00:00 UTC.')

  flags.DEFINE_string('exchange', None, 'Okex or Huobi.')

  app.run(main)
