#include "lprnet_postprocess.h"

namespace sp::Model::LPRnet{

void registe_lprnet_postprocess(){
    lprnet_postprocess::registe();
}

lprnet_postprocess::lprnet_postprocess(std::vector<std::string> v): PythonFunction("/home/lx/SmartPipe/src/core/functions/Model/LPRnet/lprnet/", "/home/lx/SmartPipe/src/core/functions/Model/LPRnet/lprnet/repo/plate/", "lprnet", "lprnet", v){
    assert(v.size() == 0);
    name = "lprnet_postprocess";
    class_name = "Model-LPRnet-lprnet_postprocess";
    data_type_support.push_back(std::pair<std::vector<uint8_t>, uint8_t>({TENSOR},STRING));
}

lprnet_postprocess::lprnet_postprocess(): PythonFunction(){
    name = "lprnet_postprocess";
    class_name = "Model-LPRnet-lprnet_postprocess";
    pModulePath = "/home/lx/SmartPipe/src/core/functions/Model/LPRnet/lprnet/";
    pModuleHomePath = "/home/lx/SmartPipe/src/core/functions/Model/LPRnet/lprnet/repo/plate/";
    pModuleName = "lprnet";
    pClassName = "lprnet";
    data_type_support.push_back(std::pair<std::vector<uint8_t>, uint8_t>({TENSOR},STRING));
}

lprnet_postprocess::~lprnet_postprocess(){

}

void lprnet_postprocess::registe(){
    Function::Register("Model-LPRnet-lprnet_postprocess", createObject);
    Function::Register("Model-LPRnet-lprnet_postprocess", createObject2);
}

Function* lprnet_postprocess::createObject(std::vector<std::string> params){
    return new lprnet_postprocess(params);
}

Function* lprnet_postprocess::createObject2(){
    return new lprnet_postprocess();
}

void lprnet_postprocess::start(){
    // 构建实例
    PythonFunction::defaultStart();
    pInstance = PyObject_CallObject(pConstruct, nullptr);
}

bool lprnet_postprocess::waitForResource(){
    return true;
}

/*
    vector<vector<Tensor*>> -> [Tensor,Tensor,...]
        lprnet_postprocess: [Tensor,Tensor,...] -> [[String,String,...],[String,String,...],...]
    [[String,String,...],[String,String,...],...] -> vector<Head*> (Head* -> String* -> String* -> ...)
*/
void lprnet_postprocess::process(std::vector<std::vector<Data*>>& data_input, std::vector<Data*>& data_output){
    assert(data_input.size() != 0);
    assert(data_input[0].size() == 1);
    // 输入的tensor变成Python格式
    PyObject* data = convertToPython(data_input);
    // 调用Python函数
    pArgs = PyTuple_New(1);
    PyTuple_SetItem(pArgs, 0, data);
    pReturn = PyObject_CallMethod(pInstance, "postprocess", "O", pArgs);
    // 将结果解析成data并赋值给data_output
    convertToCpp(pReturn, data_input, data_output, 1);
    // 释放内存
    for(int i = 0; i < data_input.size(); i++){
        assert(data_input[i][0]->type == TENSOR);
        if(data_input[i][0]->subRefCount() == 0){
            executor->freeAll(data_input[i][0]);
        }
    }
}

bool lprnet_postprocess::releaseResource(){
    return true;
}

void lprnet_postprocess::finish(){
    PythonFunction::defaultFinish();
}

void lprnet_postprocess::copy(Function* other){
    Function::defaultCopy(other);
}

};