import base64
import asyncio
from utils.util import Util
from utils import settings
from utils import restful_tdengine
from utils import async_tdengine3


async def send_index(res: list, topic_sn: str, kafka_producer):
    """
   发送至 kafka
   :param res: 订阅的数据
   :param kafka_producer: kafka 类
   :param gathser_num: 采集站编号
   """
    for line in res:
        ch_no = line["ChNo"],
        for con in line['Content']:
            code = con["Code"]
            if code in [100, 101, 107, 112]:
                data_list = []
                for n, val in enumerate(con['Values']):
                    data_line = {
                        "topic_sn": topic_sn,
                        "ch_no": ch_no,
                        "code": code,
                        "type": "float",
                        "timestamp": await Util.str_to_timestamp(line['Time'][n]),
                        "value": str(val)
                    }
                    asyncio.create_task(
                        kafka_producer.produce(
                            topic=settings.KAFKA_ALGO_PUSH_SETTING["kafka_topic"],
                            data=data_line,
                            key=f"{topic_sn}_{ch_no}_{code}",
                            partition=0,
                        )
                    )
                    data_list.append(data_line)
                database = settings.TDENGINE_SETTINGS['database']
                sqls = restful_tdengine.naive_insert_sqls(data_list, database)
                asyncio.create_task(
                    await async_tdengine3.async_data(
                        data=sqls,
                        url=settings.TDENGINE_DATABASE['url'].format(database),
                        auth=settings.TDENGINE_DATABASE['auth'],
                        func=async_tdengine3.fmt_td,
                        mapping=False
                    )
                )


async def send_wave(res, topic_sn, redis_client, kafka_producer, minio_producer):
    """
    发送波形数据
    :param res: 订阅的数据
    :param topic_sn: 采集站编号
    :param kafka_producer: kafka 类
    """
    decoded_data = False
    ch_no, code = res["ChNo"], res["Code"]
    if res['PackCount'] != 1:
        name_key = f'{res["Id"]}{res.get("ChType", "")}{ch_no}{res["Time"]}{code}'
        # 使用 redis 缓存
        redis_client.rpush(name_key, res['Content'])
        # 判断合包数是否与 PackCount 一致，一致则发送 kafka
        list_len = redis_client.llen(name_key)
        if list_len >= res['PackCount']:
            all_val = redis_client.lrange(name_key, 0, -1)
            redis_client.delete(name_key)
            original_values_list = []
            for line in all_val:
                decoded_data = base64.b64decode(line)
                original_values_list.extend(await Util.format_data(res, decoded_data))
    else:
        decoded_data = base64.b64decode(res["Content"])
        original_values_list = await Util.format_data(res, decoded_data)

    if decoded_data:
        data_line = {
            "timestamp": await Util.str_to_timestamp(res['Time']),
            "type": "list",
            "topic_sn": topic_sn,
            "ch_no": ch_no,
            "code": code,
            "value": original_values_list
        }
        # 发送至 kafka
        asyncio.create_task(
            kafka_producer.produce(
                topic=settings.KAFKA_ALGO_PUSH_SETTING["kafka_topic"],
                data=data_line,
                key=f"{topic_sn}_{ch_no}_{code}",
                partition=0,
            )
        )
        # 发送至 minio
        asyncio.create_task(minio_producer.insert_wave_data(data_line))
