import json
import time
import uuid

from loguru import logger

from CloudEngine.IO.BdfLoader import BdfLoader
from Connector.client import res_success, res_err
from CloudEngine.utils.memTable import memTbl
from service.DataCenter import db

# from RsFx import search_free_edge_cell


class CmdMeshFeaturesSearch:
    name = "cmd_mesh_features_search"
    id = uuid.uuid4()

    def __init__(self, service, Params, Tag, Type):
        self.service = service
        self.params = Params
        self.tag = Tag
        self.type = Type

    async def execute(self):
        if not memTbl.get("bdf"):
            await  res_err(self.tag, self.type, "内存中无可用数据")
            return
        # bdf_data = memTbl.get("bdf")
        # loader = BdfLoader("")
        # bdf_obj = json.loads(bdf_data)
        # loader.parse(bdf_obj)
        # rust_start = time.time()
        # all_res = search_free_edge_cell(loader.elements_id_index_map, loader.cells_id_index_map)
        # rust_end = time.time()
        # logger.info("rust计算消耗 {} s", rust_end - rust_start)
        # free_edge_result = all_res[0]
        # share_edge_result = all_res[1]
        # t_edge_result = all_res[2]
        # free_edge_back_data = []
        # share_edge_back_data = []
        # t_edge_back_data = []

        # # 处理数据
        # def set_result(i_result, i_back_data):
        #     for item in i_result:
        #         [point_index_1, point_index_2, cell_index] = item.split("#")
        #         i_back_data.append({
        #             "CellId": loader.cell_index_to_cell_id(int(cell_index)),
        #             "CellIndex": int(cell_index),
        #             "line": [int(point_index_1), int(point_index_2)],
        #             "line_points": [
        #                 bdf_obj["nodes"][3 * int(point_index_1)],
        #                 bdf_obj["nodes"][3 * int(point_index_1) + 1],
        #                 bdf_obj["nodes"][3 * int(point_index_1) + 2],
        #                 bdf_obj["nodes"][3 * int(point_index_2)],
        #                 bdf_obj["nodes"][3 * int(point_index_2) + 1],
        #                 bdf_obj["nodes"][3 * int(point_index_2) + 2],
        #             ],
        #             "points": loader.point_index_to_point_coordinate(
        #                 int(point_index_1)) + loader.point_index_to_point_coordinate(
        #                 int(point_index_2)),
        #         })

        # set_result(free_edge_result, free_edge_back_data)
        # set_result(share_edge_result, share_edge_back_data)
        # set_result(t_edge_result, t_edge_back_data)

        # IsBigData = False

        # back_data = {
        #     "FreeEdge": free_edge_back_data,
        #     "ShareEdge": share_edge_back_data,
        #     "TEdge": t_edge_back_data,
        # }
        # logger.success("py处理消耗 {}s", time.time() - rust_end)
        # size = free_edge_back_data.__sizeof__() + share_edge_back_data.__sizeof__() + t_edge_back_data.__sizeof__()
        # print("数据大小", size)
        # if size > 1024 * 10:
        #     IsBigData = True
        #     Datapath = db.S3.upload_json("/cmd/" + str(uuid.uuid4()) + ".json",
        #                                  json.dumps(back_data))
        #     logger.info("总消耗 {} s", time.time() - rust_start)
        #     await res_success(self.tag, self.type, {
        #         "IsBigData": IsBigData,
        #         "Data": "",
        #         "DataPath": Datapath,
        #         "DataEngine": "FileServer",
        #     })
        # else:
        #     logger.info("总消耗 {} s", time.time() - rust_start)
        #     await res_success(self.tag, self.type, {
        #         "IsBigData": IsBigData,
        #         "Data": back_data,
        #         "DataPath": "",
        #         "DataEngine": "FileServer",
        #     })
