# from .add_factors import add_batch_factors
from .add_factors import add_batch_factors_dag
import asyncio
from common.exchange import (
    get_async_binance_portfolio_exchange,
    get_async_binance_portfolio_exchange_no_key,
)
from common.message import get_whole_factor_topic, iter_topic, send_topic
from common.variables import time_counts, TimeFrameType

# from pytypes import typechecked
import json
import pandas as pd
import queue
import ray
import threading
import time
from trade.data.add_factors import notify_factors, write_factor_to_db
from trade.factors import name_bfname


def get_topic_batch_symbols_and_factors(consider_symbols, consider_fnames, timeframe):
    return f"{get_whole_factor_topic(timeframe)}_{len(consider_symbols)}_{len(consider_fnames)}_{int(time.time())}"


class WatchFactorCenter:
    def __init__(
        self,
        consider_symbols: list[str],
        consider_fnames: list[str],
        timeframe: TimeFrameType,
    ):
        self.consider_fnames = consider_fnames
        self.consider_symbols = consider_symbols
        self.timeframe = timeframe
        self.cache_datas = {}
        self.need_num = len(consider_symbols) * len(consider_fnames)
        self.deal_time = None
        self.stop_sig = False
        self.consider_bfnames = list(set([name_bfname[x] for x in consider_fnames]))
        print(f"consider bf_names: {self.consider_bfnames}")
        self.batch_factors = queue.Queue(10)
        self.last_deal = None

    def add_factor_to_table(self, datas):
        df = pd.DataFrame(datas["factors"])
        df["symbol"] = datas["symbol"]
        df.set_index("xdatetime", inplace=True)
        write_factor_to_db(
            df,
            datas["timeframe"],
            datas["bfname"],
            datas["symbol"],
            datas["start"],
            datas["end"],
        )

    # def notify_factors(self, df, timeframe, bfname, symbol, start, end):
    #     datas = {
    #         "timeframe": timeframe,
    #         "bfname": bfname,
    #         "symbol": symbol,
    #         "start": start,
    #         "end": end,
    #         "factors": df.to_dict("records"),
    #     }
    #     self.push_kline_factors(datas)

    def push_kline_factors(self, datas: dict):

        if datas["bfname"] == "kline":
            self.add_factor_to_table(datas)
            add_batch_factors_dag(
                [x for x in self.consider_bfnames if x != "kline"],
                [],
                self.consider_symbols,
                datas["start"],
                datas["end"],
                datas["timeframe"],
                1000,
                n_jobs=1,
                verbose=1,
                callbacks=[notify_factors],
            )

        results = datas["factors"]
        for row in results:
            symbol = datas["symbol"]
            xdatetime = row["xdatetime"]
            if self.last_deal and xdatetime <= self.last_deal:
                print(f"已经处理过这之前的数据: {xdatetime}")
                continue
            if self.deal_time and xdatetime > self.deal_time:
                # print
                print(
                    f"Error: 缓存数据未补齐，但是已经有新数据了, 或者这是时间片上处理过后又新增的部分数据"
                )
                self.cache_datas = {}
                self.deal_time = None

            if self.deal_time and xdatetime < self.deal_time:
                print(f"之前已经有处理过的数据了，不再进行处理")
                continue

            self.deal_time = xdatetime
            for fname in row.keys():
                if fname in self.consider_fnames:
                    self.cache_datas[(symbol, fname)] = row[fname]

            if len(self.cache_datas) >= self.need_num:
                print(f"cache datas full", self.cache_datas)
                self.batch_factors.put(self.cache_datas, block=True)
                self.cache_datas = {}
                self.deal_time = None
                self.last_deal = self.deal_time

    def get_batch_factors(self):
        return self.batch_factors.get(True)

    def keep_watch_and_push(self):
        print("Start watch data")
        while True:
            with iter_topic(get_whole_factor_topic(self.timeframe)) as it:
                for results in it:  # results {"": [{}{}], "":""}
                    results = json.loads(results)
                    print(f"received results: {results}")
                    if (
                        results["timeframe"] != self.timeframe
                        or results["bfname"] not in self.consider_bfnames
                    ):
                        continue
                    print(f"to push results: {results}")
                    self.push_kline_factors(results)
            if self.stop_sig:
                break

    def start(self):
        self.stop_sig = False
        self.th = threading.Thread(target=self.keep_watch_and_push)
        self.th.daemon = True
        self.th.start()
        print("Start watch data thread")

    def stop(self):
        self.stop_sig = True

    def join(self):
        self.th.join()


class WatchDataCenter:
    def __init__(
        self,
        symbolsAndTimeframes: list[tuple[str, TimeFrameType]],
        is_trade: bool = False,
    ):
        self.symbols_timeframes = list(set([tuple(x) for x in symbolsAndTimeframes]))
        self.symbols = list(set([x[0] for x in symbolsAndTimeframes]))
        if is_trade:
            self.exchange = get_async_binance_portfolio_exchange()
        else:
            self.exchange = get_async_binance_portfolio_exchange_no_key()
        self.stop_sig = False
        self.th = None
        self.gs = self.group_by_tf(symbolsAndTimeframes)
        self.last_send_time = {}  # last time index of kline
        self.last_records = {}  # {(tf, symbol): [xdatetime, *ohlcv]}
        self.senders = {}  # {tf: send}
        self.last_record_lock = asyncio.Lock()

    @staticmethod
    def group_by_tf(symbolsAndTimeframes: list[tuple[str, TimeFrameType]]):
        results = {}
        for x, y in symbolsAndTimeframes:
            results.setdefault(y, []).append((x, y))
        return results

    def add_watches(self, symbolsAndTimeframes):
        self.symbols_timeframes = list(
            set([tuple(x) for x in self.symbols_timeframes + symbolsAndTimeframes])
        )

    async def send_records(self):
        while True:
            await asyncio.sleep(0.1)

            async with self.last_record_lock:
                to_deletes = []
                for (frame, symbol), pk in self.last_records.items():
                    if (
                        self.last_records[(frame, symbol)][0]
                        + time_counts[frame] * 1000
                        < time.time() * 1000
                    ):
                        msg = {
                            "timeframe": frame,
                            "bfname": "kline",
                            "symbol": symbol,
                            "start": pk[0],
                            "end": pk[0],
                            "factors": [
                                {
                                    "xdatetime": pk[0],
                                    "open": pk[1],
                                    "high": pk[2],
                                    "low": pk[3],
                                    "close": pk[4],
                                    "volume": pk[5],
                                }
                            ],
                        }
                        print(f"ontime send msg: {msg}")
                        self.senders[frame].send(json.dumps(msg))
                        self.last_send_time[(frame, symbol)] = pk[0]
                        to_deletes.append((frame, symbol))
                for kk in to_deletes:
                    del self.last_records[kk]

    async def continue_watch_job(self):
        print("Start watch data")
        asyncio.create_task(self.send_records())
        try:
            while True:

                for frame, y in self.gs.items():

                    results = await self.exchange.watch_ohlcv_for_symbols(y)
                    if frame not in self.senders:
                        self.senders[frame] = send_topic(get_whole_factor_topic(frame))
                        self.senders[frame].__enter__()
                    print(f"watch results: {results}")
                    async with self.last_record_lock:
                        for symbol, batch in results.items():
                            print(f"deal watch {symbol}")
                            for (
                                tf,
                                datas,
                            ) in batch.items():  # datas is 2维, tf是timeframe
                                print(f"deal watch {tf}")
                                for row in datas:
                                    if (tf, symbol) in self.last_send_time and row[
                                        0
                                    ] <= self.last_send_time[(frame, symbol)]:
                                        continue
                                    print(f"{time.time()} deal watch {row}")
                                    if (tf, symbol) in self.last_records and row[
                                        0
                                    ] > self.last_records[(tf, symbol)][0]:
                                        pk = self.last_records[(tf, symbol)]
                                        msg = {
                                            "timeframe": tf,
                                            "bfname": "kline",
                                            "symbol": symbol,
                                            "start": pk[0],
                                            "end": pk[0],
                                            "factors": [
                                                {
                                                    "xdatetime": pk[0],
                                                    "open": pk[1],
                                                    "high": pk[2],
                                                    "low": pk[3],
                                                    "close": pk[4],
                                                    "volume": pk[5],
                                                }
                                            ],
                                        }
                                        print(f"next push send msg: {msg}")
                                        self.senders[frame].send(json.dumps(msg))
                                        self.last_send_time[(frame, symbol)] = pk[0]
                                    self.last_records[(tf, symbol)] = row
                if self.stop_sig:
                    await self.exchange.close()
                    break

        except Exception as e:
            for x in self.senders.values():
                x.__exit__(None, None, None)
            raise Exception(e)

    def start(self):
        self.stop_sig = False
        self.th = threading.Thread(
            target=asyncio.run, args=(self.continue_watch_job(),)
        )
        self.th.daemon = True
        self.th.start()
        print("Start watch data thread")

    def stop(self):
        self.stop_sig = True

    def join(self):
        self.th.join()


@ray.remote(resources={"binance_data": 1}, max_retries=-1, retry_exceptions=True)
def remote_watch_ohlcv(
    symbolsAndTimeframes: list[tuple[str, TimeFrameType]], is_trade: bool = False
):
    obj = WatchDataCenter(symbolsAndTimeframes, is_trade)
    asyncio.run(obj.continue_watch_job())


@ray.remote
def remote_watch_multi_factors(
    consider_symbols: list[str],
    consider_fnames: list[str],
    timeframe: TimeFrameType,
):
    batch_looker = WatchFactorCenter(consider_symbols, consider_fnames, timeframe)
    batch_looker.start()
    while True:
        next_kline = batch_looker.get_batch_factors()
        print(next_kline)
