import threading
import random
import time
from django.views.decorators.csrf import csrf_exempt
from rest_framework.decorators import api_view
from rest_framework.response import Response
from Util.SQL import *
from Model.ModelManage import Controller
#from Util.Space import space_model
import os


@csrf_exempt
@api_view(['GET', 'POST'])
def get_model(request):
    ori_string = "SELECT * FROM model;"
    model = select(ori_string)
    data_dict = model_data_process(model)
    return Response(data=data_dict)


@csrf_exempt
@api_view(['GET', 'POST'])
def get_one_model(request, model_id):
    ori_string = f"SELECT * FROM model WHERE model_id = '{model_id}';"
    model = select(ori_string)
    data_dict = model_data_process(model)
    return Response(data=data_dict)


@csrf_exempt
@api_view(['GET', 'POST'])
def model_upload(request):
    if request.method == 'POST':

        file = request.FILES.get('file')
        chunk_index = int(request.POST.get('chunkIndex'))
        total_chunks = int(request.POST.get('totalChunks'))
        modelInfo = request.POST.get('modelInfo')

        # 指定文件保存路径
        raw_path = get_path(os.path.abspath(__file__), 4, get_config("model", "raw_path"))

        modelInfo_dict = json.loads(modelInfo)
        model_name = f"{modelInfo_dict['model_id']}.pth"
        insert([{"model_id": modelInfo_dict['model_id'], "model_lock": 0}], "locks")

        temp_folder = os.path.join(raw_path, "temp_chunks", model_name)
        os.makedirs(temp_folder, exist_ok=True)

        chunk_file_path = os.path.join(temp_folder, f"{chunk_index}.part")
        with open(chunk_file_path, 'wb') as f:
            for chunk in file.chunks():
                f.write(chunk)

        # 检查是否所有片段都上传完成
        chunk_path = []
        for i in range(total_chunks):
            chunk_path.append(os.path.join(temp_folder, f"{i}.part"))
        all_chunks_uploaded = all(
            os.path.exists(chunk_path[i]) for i in range(total_chunks)
        )

        # 当所有片段都上传完成时，将它们按顺序合并
        if all_chunks_uploaded:
            condition = f"model_id = '{modelInfo_dict['model_id']}'"
            random_microseconds = random.randint(0, 100)
            time.sleep(random_microseconds / 1e6)
            lock = select(f"SELECT model_lock from locks WHERE {condition}")[0]
            if lock["model_lock"] == 0:
                update({"model_lock": 1}, condition, "locks")
                threading.Thread(target=combine_files, args=(raw_path, model_name, total_chunks, chunk_path, temp_folder, condition)).start()

                modelInfo_dict['format'] = {"raw": "pth", "ready": "onnx"}
                modelInfo_dict['json'] = ''
                if modelInfo_dict['type'] == "识别":
                    modelInfo_dict['type'] = "recognition"
                # 把数据存入format键中
                modelInfo_dict["format"]["raw"] = modelInfo_dict.pop("raw")
                modelInfo_dict["format"]["input_size"] = modelInfo_dict.pop("input_size")
                modelInfo_dict["format"]["final"] = modelInfo_dict.pop("final")

                controller = Controller()
                controller.process('upload', input_json=[modelInfo_dict])
                sql_string = f"SELECT format FROM model WHERE model_id = '{modelInfo_dict['model_id']}'"
                format_dict = select(sql_string)[0]
                update(format_dict, condition, "model")

    return Response()




@csrf_exempt
@api_view(['GET', 'POST'])
def model_validate(request, model_id):
    state = True
    try:
        controller = Controller(model_id=model_id)
        controller.process('validate')
    except Exception:
        delete(f"model_id = '{model_id}'", "model")
        state = False
    finally:
        return Response(data=state)


@csrf_exempt
@api_view(['GET', 'POST'])
def model_compress(request, model_id):
    state = True

    # 判断之前是否有压缩文件，已经压缩过就不用再压缩了
    src_file = (get_path(os.path.abspath(__file__), 4, get_config("model", "compress_path"))
                + f"/{model_id}/output/{model_id}.onnx")
    if os.path.exists(src_file):
        return Response(data=state)

    # 创建储存压缩数据的文件夹
    compress_path = get_path(os.path.abspath(__file__), 4, get_config("model", "compress_path"))
    compress_path += f"/{model_id}"
    input_path = compress_path + "/input"
    output_path = compress_path + "/output"
    os.makedirs(compress_path, exist_ok=True)
    os.makedirs(input_path, exist_ok=True)
    os.makedirs(output_path, exist_ok=True)

    try:
        controller = Controller(model_id=model_id)
        controller.process('compress', {'algorithm_id': 1, 'dataset_id': "imagenet", 'input_size': 224})
    except Exception:
        # delete(f"model_id = '{model_id}'", "model")
        state = False

    # 删除生成的压缩参数文件
    try:
        os.remove('../compress_params.bin')
    except FileNotFoundError:
        print()
    return Response(data=state)


@csrf_exempt
@api_view(['GET', 'POST'])
def model_test(request, model_id):
    state = True
    # 复制模型至evaluate文件夹
    try:
        pass
    except FileNotFoundError:
        state = False
    else:
        try:
            controller = Controller(model_id=model_id)
            controller.process('evaluate', {'device': 'gpu', 'dataset_id': "imagenet", 'input_size': 224})
        except Exception:
            # delete(f"model_id = '{model_id}'", "model")
            state = False
        else:
            sql_string = f"SELECT json FROM model WHERE model_id = '{model_id}';"
            data = select(sql_string)
            data_dict = model_data_process(data)
            return Response(data=data_dict)
    return Response(data=state)


@csrf_exempt
@api_view(['GET', 'POST'])
def model_change(request):
    data_dict = request.data
    update_data = {"name": data_dict['name'], "location": data_dict['location'],
                   "type": data_dict['type'], "attribute": data_dict['attribute'],
                   "state": data_dict['state']}
    condition = f"model_id = '{data_dict['modelId']}'"
    state = update(update_data, condition, "model")
    return Response(data=state)


@csrf_exempt
@api_view(['GET', 'POST'])
def model_data(request, model_id):
    sql_string = f"SELECT * FROM model WHERE model_id = '{model_id}';"
    data = select(sql_string)
    return Response(data=data[0])


@csrf_exempt
@api_view(['GET', 'POST'])
def model_save(request, model_id):
    json_data = {"state": "ready"}
    condition = f"model_id = '{model_id}'"
    state = update(json_data, condition, "model")
    return Response(data=state)


@csrf_exempt
@api_view(['POST'])
def deploy(request):
    satellite_id = request.POST.get('satellite_id')
    model_id = request.POST.get('model_id')
    gpsValue = request.POST.get('gpsValue')
    # space_model(satellite_id, model_id, 0)
    state = insert([{"satellite": satellite_id, "model": model_id, "gps": gpsValue}], "deploy")
    return Response(data=state)


@csrf_exempt
@api_view(['POST'])
def show_deploy(request):
    satellite = request.POST.get('satellite')
    models = select(f"SELECT * FROM deploy WHERE satellite = '{satellite}';")
    return Response(data=models)


def model_data_process(data):
    for item in data:
        data_str = item["json"]
        result_path = get_config("model", "result_path")
        if len(data_str) == 0:
            data_dict = {'evaluate': {'test_image': '', 'acc_value': '', 'cpu_occupancy': '',
                                      'gpu_occupancy': '', 'memory_occupancy': ''},
                         'compress': {'compress_algorithm_id': '', 'pre-compression_acc': '',
                                      'compressed_acc': '', 'pre-compression_size': '',
                                      'compressed_size': '', 'compression_ratio': ''}}
        else:
            data_str = data_str.replace("\\", "\\\\")
            data_dict = json.loads(data_str.replace("'", '"'))
            if "evaluate" in data_dict.keys():
                data_dict["evaluate"]["test_image"] = result_path + '/' + data_dict["evaluate"]["test_image"]
            else:
                data_dict["evaluate"] = {'test_image': '', 'acc_value': '', 'cpu_occupancy': '',
                                         'gpu_occupancy': '', 'memory_occupancy': ''}
            if "compress" not in data_dict.keys():
                data_dict["compress"] = {'compress_algorithm_id': '', 'pre-compression_acc': '', 'compressed_acc': '',
                                         'pre-compression_size': '', 'compressed_size': '', 'compression_ratio': ''}
        item["json"] = data_dict
    return data


def combine_files(raw_path, model_name, total_chunks, chunk_path, temp_folder, condition):
    final_file_path = os.path.join(raw_path, model_name)
    with open(final_file_path, 'wb') as final_file:
        for i in range(total_chunks):
            with open(chunk_path[i], 'rb') as chunk_file:
                final_file.write(chunk_file.read())

    try:
        for path in chunk_path:
            os.remove(path)
        os.rmdir(temp_folder)  # 删除临时文件夹
    except FileNotFoundError as e:
        pass
    update({"model_lock": 0}, condition, "locks")
