# from pytypes import typechecked
from common.clickhouse_query import (
    get_cut_range_from_table,
    get_factor_unfilled_range,
    write_to_feature_tb,
)
from common.kafka_message import get_factor_topic
from common.tools import safe_delayed
from common.variables import time_counts, TimeFrameType
from datetime import datetime
import itertools
import joblib
import json
from kafka import KafkaProducer
import pandas as pd
from tqdm import tqdm
from trade.factors import bfname_names, bfname_obj, bfnames_order, name_bfname
from trade.watch.kafka_watcher import KafkaWatcherKline
from typing import Callable


def add_factor(
    bfname: str,
    symbol: str,
    start: str,
    end: str,
    timeframe: TimeFrameType,
    callbacks: list[Callable] = None,
):
    """
    :param bfname: factor_name in factor_map
    :param symbol: BTC/USDT
    :param start: 20240101
    :param end: 20240531
    :param timeframe:
    :param cache_dir:
    :return:
    """

    kline_df: pd.DataFrame
    factor_obj = bfname_obj[bfname]
    factor_obj.calculate(
        symbol, timeframe, start, end, callbacks=[write_to_feature_tb] + callbacks
    )


def send_factor_kafka_message(
    symbol: str, df: pd.DataFrame, timeframe: TimeFrameType, obj
):
    kafka_producer = KafkaProducer(
        bootstrap_servers="xx_kafka:9092",  # Kafka 服务地址
        value_serializer=lambda v: json.dumps(v).encode(
            "utf-8"
        ),  # 序列化消息为 JSON 格式
    )
    topic = get_factor_topic(symbol, obj.dfname, timeframe)
    df = df.melt(id_vars="time", var_name="fname", value_name="value")
    df["symbol"] = symbol
    kafka_producer.send(topic, df.to_dict(orient="records"))


def add_batch_factors(
    bfname_list: list[str],
    symbol_list: list[str],
    start: str,
    end: str,
    timeframe: TimeFrameType,
    max_part: int = 1000,
    n_jobs: int = joblib.cpu_count(),
    verbose: int = 10,
    callbacks: callable = None,
    is_cover: bool = False,
):

    if n_jobs <= 1:
        for symbol, bfname in tqdm(
            itertools.product(symbol_list, bfname_list), desc="deal add_batch"
        ):
            fname_list = bfname_names[bfname]
            if is_cover:
                unfill_df = pd.DataFrame(
                    [
                        {
                            "start": start,
                            "end": end,
                            "delta": (
                                datetime.fromisoformat(end)
                                - datetime.fromisoformat(start)
                            ).seconds
                            / 60,
                        }
                    ]
                )
            else:
                unfill_df = get_factor_unfilled_range(
                    start, end, symbol, fname_list, timeframe
                )
            unfill_cuts = get_cut_range_from_table(unfill_df, timeframe, max_part)
            for xstart, xend in tqdm(
                unfill_cuts, desc=f"sub add_batch {symbol} {bfname}"
            ):
                xstart: datetime
                xend: datetime
                add_factor(
                    bfname,
                    symbol,
                    xstart.isoformat(),
                    xend.isoformat(),
                    timeframe,
                    callbacks,
                )
        return

    job_params = []
    for symbol, bfname in itertools.product(symbol_list, bfname_list):
        fname_list = bfname_names[bfname]
        unfill_df = get_factor_unfilled_range(start, end, symbol, fname_list, timeframe)
        unfill_cuts = get_cut_range_from_table(unfill_df, timeframe, max_part)
        for xstart, xend in unfill_cuts:
            xstart: datetime
            xend: datetime
            job_params.append(
                (
                    bfname,
                    symbol,
                    xstart.isoformat(),
                    xend.isoformat(),
                    timeframe,
                    callbacks,
                )
            )
    pool = joblib.Parallel(n_jobs=n_jobs, verbose=verbose)
    pool(safe_delayed(add_factor)(*params) for params in job_params)


def add_batch_factors_dag(
    bfname_list: list[str],
    fname_list: list[str],
    symbol_list: list[str],
    start: str,
    end: str,
    timeframe: TimeFrameType,
    max_part: int = 1000,
    n_jobs: int = joblib.cpu_count(),
    verbose: int = 10,
    callbacks: list[callable] = None,
):
    all_bfnames = list(set(bfname_list + [name_bfname[x] for x in fname_list]))
    batch_bfnames = bfnames_order(all_bfnames)
    for bfnames in batch_bfnames:
        add_batch_factors(
            bfnames,
            symbol_list,
            start,
            end,
            timeframe,
            max_part,
            n_jobs,
            verbose,
            callbacks,
        )


def watch_kline_add_factors(
    bfname_list: list[str],
    symbol_list: list[str],
    timeframe: TimeFrameType,
    n_jobs: int = joblib.cpu_count(),
    verbose: int = 10,
):
    watcher = KafkaWatcherKline(timeframe, "factor_from_kline")
    for xt in watcher.watch_batch_symbols(symbol_list):
        st = datetime.fromtimestamp(xt).isoformat()
        ed = datetime.fromtimestamp(xt + time_counts[timeframe]).isoformat()
        add_batch_factors(
            bfname_list,
            symbol_list,
            st,
            ed,
            timeframe,
            n_jobs=n_jobs,
            max_part=1000,
            verbose=verbose,
            callbacks=[send_factor_kafka_message],
        )


if __name__ == "__main__":
    # add_factor("rvi14", "BTC/USDT", "2020-11-09", "2021-02-19", "1m")
    add_batch_factors(
        ["rsi50"],  # "pascal14a",cfo14a_ohlc
        ["BTC/USDT"],
        "2020-01-01",
        "2025-02-19",
        "1m",
        n_jobs=4,
        max_part=10000,
    )
