import functools
from typing import Generic, List, Optional, Tuple, TypeVar

import coin.proto.coin_data_replay_pb2 as cdr_pb2
import coin.support.proto_log.logic.util as proto_log_util
import coin2.service.account.account_info_service_pb2 as ais_pb2
from coin.base.datetime_util import to_datetime
from coin.base.param_util import to_list
from coin.exchange.base.strategy_logger import (
    enumerate_logs as enumerate_strat_logs)
from coin.flow.reader import (
    _ContainerBase,
    MergeReader)
from coin.proto.coin_request_pb2 import StrategyRequestProto
from coin.support.data_replay.constants import topic_to_kafka_topic_map
from coin.support.data_replay.reader.archive_reader import ArchiveReader
from coin.support.data_replay.reader.grpc_reader import (
    BalanceRealtimeHistoriesGrpcReader)
from coin.support.data_replay.reader.hdf5_reader import IntervalFeedHdf5Reader
from coin.support.data_replay.reader.kafka_reader import KafkaReader
from coin.support.data_replay.reader.interface import (
    RawData,
    IRawDataReader,
    TopicData,
    ITopicDataReader,)
from coin.support.proto_log.logic.util import StratInfo
from coin.util.queue.config import KafkaConfig
from coin.util.queue.tools.kafka_topic import generate_kafka_topic


class _TopicDataTimestampContainer(_ContainerBase):
  def query_priority(self):
    if self.reader is None:
      return self.INT64MIN
    record = self.reader.peek()
    if record is None:
      # On EOF, return the lowest number to make it out of pqueue.
      return (self.INT64MIN, cdr_pb2.UNKNOWN_TOPIC_DATA_TYPE, '')
    return (record.topic_data_value.timestamp,
            record.topic_data_info.topic_type,
            record.topic_data_info.data_source)

  def __lt__(self, other):
    return id(self) < id(other)


class _RawDataTimestampContainer(_ContainerBase):
  def query_priority(self):
    if self.reader is None:
      return self.INT64MIN
    record = self.reader.peek()
    if record is None:
      # On EOF, return the lowest number to make it out of pqueue.
      return self.INT64MIN
    return record.timestamp

  def __lt__(self, other):
    return id(self) < id(other)


class TimestampMergeReader(MergeReader):
  def __init__(self, container_type):
    super().__init__(container_type)


class TopicDataReader(ITopicDataReader):
  def __init__(self, request: cdr_pb2.TopicDataRequest):
    self._validate_request(request)
    self._topic_data_info, self._reader = self._create_reader(request)

  @property
  def topic_data_info(self) -> cdr_pb2.TopicDataInfo:
    return self._topic_data_info

  def _create_topic_data(self, data: RawData) -> Optional[TopicData]:
    if data is None:
      return None
    topic_data = TopicData(
        topic_data_info=self._topic_data_info,
        topic_data_value=data)
    return topic_data

  def _validate_request(self, request: cdr_pb2.TopicDataRequest):
    assert request.HasField('topic_type')
    assert request.HasField('start_timestamp')
    assert request.HasField('end_timestamp')
    if request.topic_type in (cdr_pb2.STRATEGY, cdr_pb2.TELEMETRY):
      assert request.HasField('machine')
      assert request.HasField('strategy_name')

  def _create_kafka_reader(self, request: cdr_pb2.TopicDataRequest
  ) -> Tuple[cdr_pb2.TopicDataInfo, IRawDataReader]:
    kafka_topic_type = topic_to_kafka_topic_map[request.topic_type]
    kafka_config = KafkaConfig.from_cmd_config(request.kafka_config_file)
    if request.topic_type in (cdr_pb2.STRATEGY, cdr_pb2.TELEMETRY):
      topic_info = StratInfo(strategy_name=request.strategy_name,
                             machine=request.machine,
                             trading_date=None)
      data_source = ','.join(kafka_config.kafka_servers)
      topic_data_info = cdr_pb2.TopicDataInfo(
          topic_type=request.topic_type,
          data_source=data_source,
          machine=request.machine,
          strategy_name=request.strategy_name)
    else:
      raise ValueError('unknown topic type: %s' % request.topic_type)
    kafka_topic = generate_kafka_topic(kafka_topic_type, topic_info)
    reader = KafkaReader(topics=to_list(kafka_topic),
                         kafka_config=kafka_config,
                         start_timestamp=request.start_timestamp,
                         end_timestamp=request.end_timestamp)
    return topic_data_info, reader

  def _create_archive_reader(self, request: cdr_pb2.TopicDataRequest
  ) -> Tuple[cdr_pb2.TopicDataInfo, IRawDataReader]:
    start_dt = to_datetime(request.start_timestamp)
    end_dt = to_datetime(request.end_timestamp)
    dt_range = proto_log_util._get_date_range(start_dt ,end_dt)
    if request.topic_type in (cdr_pb2.STRATEGY, cdr_pb2.TELEMETRY):
      strat_req = StrategyRequestProto(strategy_name=request.strategy_name)
      enumerate_func = functools.partial(enumerate_strat_logs, strat_req)
      topic_data_info = cdr_pb2.TopicDataInfo(
          topic_type=request.topic_type,
          data_source=request.root_dir,
          machine=request.machine,
          strategy_name=request.strategy_name)
    else:
      raise ValueError('unknown topic type: %s' % request.topic_type)
    log_files = []
    for dt in dt_range:
      log_dir = proto_log_util._gen_log_dir(
          log_root=request.root_dir,
          trading_date=dt,
          machine=request.machine)
      log_files += enumerate_func(dt, log_dir)
    archive_reader = ArchiveReader(
        request.start_timestamp, request.end_timestamp, log_files)
    return topic_data_info, archive_reader

  def _create_grpc_reader(self, request: cdr_pb2.TopicDataRequest
  ) -> Tuple[cdr_pb2.TopicDataInfo, IRawDataReader]:
    if request.topic_type in (cdr_pb2.BALANCE_REALTIME_HISTORIES,):
      grpc_request = ais_pb2.QueryBalanceRealtimeHistoriesRequestProto()
      request.grpc_request.Unpack(grpc_request)
      grpc_reader = BalanceRealtimeHistoriesGrpcReader(
          start_timestamp=request.start_timestamp,
          end_timestamp=request.end_timestamp,
          grpc_request=grpc_request)
    else:
      raise ValueError('unknown topic type: %s' % request.topic_type)
    topic_data_info = cdr_pb2.TopicDataInfo(
        topic_type=request.topic_type,
        grpc_request=request.grpc_request)
    return topic_data_info, grpc_reader

  def _create_reader(self, request: cdr_pb2.TopicDataRequest
  ) -> Tuple[cdr_pb2.TopicDataInfo, IRawDataReader]:
    if request.HasField('kafka_config_file'):
      return self._create_kafka_reader(request)
    elif request.HasField('root_dir'):
      return self._create_archive_reader(request)
    elif request.HasField('grpc_request'):
      return self._create_grpc_reader(request)
    else:
      raise ValueError('unknown data source')

  def read(self) -> Optional[TopicData]:
    ret = self._reader.read()
    topic_data = self._create_topic_data(ret)
    return topic_data

  def peek(self) -> Optional[TopicData]:
    ret = self._reader.peek()
    topic_data = self._create_topic_data(ret)
    return topic_data


_ReaderType = TypeVar('_ReaderType')
_DataType = TypeVar('_DataType')
class DataMergeReader(Generic[_ReaderType, _DataType]):
  def __init__(self, readers: List[_ReaderType], container_type):
    self._merge_reader = TimestampMergeReader(container_type)
    for reader in readers:
      self._merge_reader.add_reader(reader, self._on_reader_eof)

  def _on_reader_eof(self, reader):
    pass

  def read(self) -> Optional[_DataType]:
    return self._merge_reader.read()

  def peek(self) -> Optional[_DataType]:
    return self._merge_reader.peek()


if __name__ == '__main__':
  import datetime
  import json
  import google.protobuf.json_format as json_format
  from coin.base.datetime_util import to_timestamp_int
  from coin.proto.coin_strategy_pb2 import StrategyLog
  from coin.feed.fastfeed.feed_pb2 import Feed

  kafka_config_file = '../../coin_deploy/support_monitor/config/kafka_aws_config.json'

  machine_strats = [
      ('strategy-327.ap-northeast-1', 'vmm_sfty_delta_hedge'),
      ('strategy-327.ap-northeast-1', 'vmm_cfx_delta_hedge'),
  ]
  readers = []
  start_timestamp = to_timestamp_int(datetime.datetime(2023, 6, 13))
  end_timestamp=to_timestamp_int(datetime.datetime(2023, 6, 13, 1))
  for machine, strategy_name in machine_strats:
    request = cdr_pb2.TopicDataRequest(
        topic_type=cdr_pb2.STRATEGY,
        kafka_config_file=kafka_config_file,
        start_timestamp=start_timestamp,
        end_timestamp=end_timestamp,
        machine=machine,
        strategy_name=strategy_name,
    )
    reader = TopicDataReader(request)
  request = cdr_pb2.TopicDataRequest(
        topic_type=cdr_pb2.STRATEGY,
        root_dir='/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log/',
        start_timestamp=start_timestamp,
        end_timestamp=end_timestamp,
        machine='strategy-327.ap-northeast-1',
        strategy_name='vmm_oas_delta_hedge',
    )

  reader = KafkaReader(
      topics=[
        'STRAT_strategy-327.ap-northeast-1_vmm_sfty_delta_hedge',
      ],
      kafka_config=KafkaConfig.from_cmd_config(kafka_config_file),
      start_timestamp=start_timestamp,
      end_timestamp=end_timestamp)
  readers.append(reader)

  strat_req = StrategyRequestProto(strategy_name='vmm_oas_delta_hedge')
  machine = 'strategy-327.ap-northeast-1'
  log_files = []
  dt_range = proto_log_util._get_date_range(
      to_datetime(start_timestamp), to_datetime(end_timestamp))
  for dt in dt_range:
    log_dir = '/remote/iosg/strat-1/buckets/log.raw.coin/live/strat_proto_log/%s/%s' % (
      machine, dt.strftime('%Y%m%d'))
    log_files += enumerate_strat_logs(strat_req, dt, log_dir)
  reader = ArchiveReader(start_timestamp, end_timestamp, log_files)
  readers.append(reader)

  files = ['/remote/iosg/data-2/buckets/feed.derived.interval_h5/coin/main/PT1M/Spot.Mexc.v3/20230613/Spot.Mexc.v3--ohlc.h5']
  symbols = ['BTC-USDT', 'ETH-USDT']
  columns = ['OPEN_MID', 'CLOSE_MID']
  reader = IntervalFeedHdf5Reader(
      start_timestamp, end_timestamp, symbols, columns, files)
  readers.append(reader)

  merge_reader = DataMergeReader(readers, _RawDataTimestampContainer)
  datas = []
  prev_ts = 0
  while True:
    record = merge_reader.read()
    if record is None:
      break
    curr_ts = record.timestamp
    assert curr_ts >= prev_ts, (prev_ts, curr_ts)
    prev_ts = curr_ts
    hr_ts = to_datetime(curr_ts).strftime('%Y%m%d %H:%M:%S.%f')
    data = {}
    #data = json_format.MessageToDict(pb, preserving_proto_field_name=True)
    data['record_timestamp'] = curr_ts
    data['record_timestamp_human_readable'] = hr_ts
    datas.append(data)
  records = '[\n' + ',\n'.join(json.dumps(record) for record in datas) + '\n]'
  print(records)
