# from pytypes import typechecked
from ..factors import bfname_names
from .table_edit import add_factor_to_table, get_factor_from_table_only_time
from .table_orm import get_kline_table_orm
from common.message import get_whole_factor_topic, send_topic
from common.tools import get_cut_range_from_exist_range, get_unfilled_datetime
from common.variables import TimeFrameType
import itertools
import joblib
import json
import pandas as pd
import ray
from tqdm import tqdm
from trade.factors import bfname_names, bfname_obj, bfnames_order, name_bfname
from typing import Callable


def write_factor_to_db(
    results: pd.DataFrame | ray.ObjectRef,
    timeframe: TimeFrameType,
    bfname: str,
    symbol: str,
    start: int,
    end: int,
):
    orm = get_kline_table_orm(timeframe)
    idx_to_drop = results[(results.index < start) | (results.index > end)].index
    results.drop(index=idx_to_drop, inplace=True)
    results.reset_index(inplace=True)
    results[orm.symbol.name] = symbol
    results.dropna(inplace=True)
    to_write = results.melt(
        id_vars=[orm.xdatetime.name, orm.symbol.name],
        var_name=orm.fname.name,
        value_name=orm.value.name,
    )

    add_factor_to_table(
        to_write, timeframe, True, start, end, [symbol], bfname_names[bfname]
    )


def notify_factors(df, timeframe, bfname, symbol, start, end):
    if not hasattr(notify_factors, "sender"):
        notify_factors.sender = send_topic(get_whole_factor_topic(timeframe))
    datas = {
        "timeframe": timeframe,
        "bfname": bfname,
        "symbol": symbol,
        "start": start,
        "end": end,
        "factors": df.to_dict("records"),
    }
    notify_factors.sender.send(json.dumps(datas))


def add_factor(
    bfname: str,
    symbol: str,
    start: int,
    end: int,
    timeframe: TimeFrameType,
    callbacks: list[Callable] = None,
):
    """
    :param bfname: factor_name in factor_map
    :param symbol: BTC/USDT
    :param start: 20240101
    :param end: 20240531
    :param timeframe:
    :param cache_dir:
    :return:
    """
    if callbacks is None:
        callbacks = []
    callbacks = [*callbacks, write_factor_to_db]
    kline_df: pd.DataFrame
    factor_obj = bfname_obj[bfname]

    results = factor_obj.calculate(symbol, timeframe, start, end)
    if results is None or results.empty:
        return

    for func in callbacks:
        func(results, timeframe, bfname, symbol, start, end)
    return results, timeframe, bfname, symbol, start, end


@ray.remote(max_retries=3, resources={"binance_data": 1})
def remote_add_factor(*args, **kwargs):
    return add_factor(*args, **kwargs)


def add_batch_factors(
    bfname_list: list[str],
    symbol_list: list[str],
    start: int,
    end: int,
    timeframe: TimeFrameType,
    max_part: int = 1000,
    n_jobs: int = joblib.cpu_count(),
    verbose: int = 10,
    callbacks: callable = None,
    is_cover: bool = False,
):
    start = int(start)
    end = int(end)
    job_params = []
    for symbol, bfname in tqdm(
        itertools.product(symbol_list, bfname_list), desc="deal add_batch"
    ):
        fname_list = bfname_names[bfname]
        if is_cover:
            unfill_list = [[start, end]]

        else:
            exist_datetime = get_factor_from_table_only_time(
                timeframe, start, end, symbol, fname_list[0]
            )
            unfill_list = get_unfilled_datetime(exist_datetime, timeframe, start, end)
        unfill_cuts = get_cut_range_from_exist_range(unfill_list, timeframe, max_part)

        for xstart, xend in tqdm(unfill_cuts, desc=f"sub add_batch {symbol} {bfname}"):
            if n_jobs <= 1:
                add_factor(
                    bfname,
                    symbol,
                    xstart,
                    xend - 1,
                    timeframe,
                    callbacks,
                )
            else:
                job_params.append(
                    (
                        bfname,
                        symbol,
                        xstart,
                        xend - 1,
                        timeframe,
                        callbacks,
                    )
                )
    if job_params:
        # pool = joblib.Parallel(n_jobs=n_jobs, verbose=verbose)
        # pool(safe_delayed(add_factor)(*params) for params in job_params)
        refs = []
        for params in job_params:
            refs.append(remote_add_factor.remote(*params))
        ray.wait(refs, num_returns=len(refs))


def add_batch_factors_dag(
    bfname_list: list[str],
    fname_list: list[str],
    symbol_list: list[str],
    start: int,
    end: int,
    timeframe: TimeFrameType,
    max_part: int = 1000,
    n_jobs: int = joblib.cpu_count(),
    verbose: int = 10,
    callbacks: list[callable] = None,
    is_cover: bool = False,
):
    print(
        "start add batch factors dag", bfname_list, fname_list, symbol_list, start, end
    )
    all_bfnames = list(set(bfname_list + [name_bfname[x] for x in fname_list]))
    batch_bfnames = bfnames_order(all_bfnames)
    for bfnames in batch_bfnames:
        add_batch_factors(
            bfnames,
            symbol_list,
            start,
            end,
            timeframe,
            max_part,
            n_jobs,
            verbose,
            callbacks,
            is_cover,
        )


@ray.remote
def remote_add_batch_factors_dag(*args, **kwargs):
    return add_batch_factors_dag(*args, **kwargs)
