/***************************************************************************
 创建者: 华磊
 开始时间: 2020.8.14
 copyright: (C) 华友高科
 修改说明: (每次有修改就添加一条，带有 修改人，修改时间，修改描述)
 example (1) hualei 2020.5.8 类的具体实现代码编写
 ***************************************************************************/

#include "visiontrack.h"
#include "belttrack.h"
#include "GeneralDefine.h"

#include "coordinatemanager.h"
#include "regdata.h"
#include "motionmessage.h"
#include "frames.hpp"

#define D_VISION_PR_1_INDEX 51
#define D_VISION_PR_2_INDEX 52
#define D_VISION_POINT_COUNT_R_INDEX 49 //+visionId　区分视觉号
#define D_IF_USE_EXPOSURE_LATCH 0 //是否使用曝光锁存．

#define D_TRIG_DEBUG 1

VisionTrack::VisionTrack(int visionIdIn, RegData *regDataIn, VisionTrackConfig configIn, MotionMessage *motionMessageIn
                         , double deltaTimeIn, BeltTrack *trackIn, QObject *parent) : QObject(parent)
{
    visionId=visionIdIn;
    regData=regDataIn;
    visionConfig=configIn;
    beltTrack=trackIn;
    motionMessage=motionMessageIn;
    deltaTime=deltaTimeIn;
    isConnectedStatus=false;
    socketErrorCode=0;
    currentEncoderValue_compare=0;
    isCameraNeedResponse=false;
    visionResponseDifference=0;
    noResponseCount=0;
    robotId=1;
    isAutoTrig=false;
    debugFlag=0;
    isInWork=false;

    reset();

    //皮带使用的视觉希望在主线程创建，这样接收数据才不会受到指令线程是否空闲的影响。
    //静态视觉在哪里创建都可以。
    //导致在调用者的线程创建了socket对象，那么接收也必须在等待该线程有空了，才收的到。BlockingQueuedConnection会在槽接收线程创建对象。
   qDebug() << "Client Start: " << QObject::connect(this, SIGNAL(tcpStart_client_signal(QString,int,bool)),
                                                    this, SLOT(tcpStart_client_slot(QString,int,bool)),Qt::BlockingQueuedConnection);
   qDebug() << "Client Read: " << QObject::connect(this, SIGNAL(tcpRead_client_signal(QString,int,QByteArray&)),
                                                   this, SLOT(tcpRead_Client_slot(QString,int,QByteArray&)),Qt::DirectConnection);
   qDebug() << "Client Write: " << QObject::connect(this, SIGNAL(tcpWrite_clien_signal(QString,int,QByteArray)),
                                                    this, SLOT(tcpWrite_Client_slot(QString,int,QByteArray)),Qt::DirectConnection);

    readFinishedStatus = false;
    startFinishedStatus = false;
    writeFinishedStatus = false;
    lastReceiveData_camera.pointCount=0;
    lastReceiveData_camera.x=0;
    lastReceiveData_camera.y=0;
    lastReceiveData_camera.r=0;

    trigTimeOrDistance_offset=0;

    //初始化视觉转移矩阵
    frame_visionlink_in_userlink=new KDL::Frame;
    KDL::Rotation tmpRotation;
    tmpRotation=tmpRotation.RPY(0,0,visionConfig.cameraOffset_r*M_PI/180.0);
    KDL::Vector tmpVector(visionConfig.cameraOffset_x,visionConfig.cameraOffset_y,0);
    *frame_visionlink_in_userlink =KDL::Frame(tmpRotation,tmpVector);


}

int VisionTrack::forceConnectCamera()
{
    isAutoTrig=true;
    if(1!=tcpStart_client(QString::fromStdString(visionConfig.cameraIp),visionConfig.cameraPort,true))
    {
        addMsg(ENUM_MSG_ERROR, "McControllerInterpreter", "VisionTrack", 22102, robotId);
        return -1;
    }

    return 1;
}

int VisionTrack::setWork(bool isOnIn)
{
    isInWork=isOnIn;
    return 1;
}

int VisionTrack::setDebugFlag(int flagIn)
{
    debugFlag=flagIn;
    return 1;
}

int VisionTrack::forceTrigCamera(bool isHighIn)
{
    if(false==isConnectedStatus)
    {
        qDebug()<<"error,,,camera not connected";
        return 0;
    }
    switch(visionConfig.cameraTrigMethod)
    {
    case 0://定时触发
    case 1://皮带位移触发
    case 2://传感器检查到物体，而且皮带位移固定距离触发
    case 3://传感器检查到物体，而且皮带位移固定距离触发(纸皮长度自动计算)
    case 5://di后定距离触发＋物体长度＋di过滤
    {


    }
    case 4://指令触发
    default:
    {
        qDebug()<<"error,,,unsupport cameraTrigMethod";
        return -1;
    }

    }
    return -2;

}

int VisionTrack::getDoType(int &doDeviceTypeOut, int &doIndexOut)
{
    return beltTrack->getDoType(doDeviceTypeOut,doIndexOut);
}

int VisionTrack::setVisionTrigObjectLength( double lengthIn)
{
    visionConfig.objectLength=lengthIn;
    return 1;
}



int VisionTrack::reset()
{
    qDebug()<<"VisionTrack::reset currentEncoderLatchValueList.clear()";
    isInWork=false;
    isAutoTrig=false;
    noResponseCount=0;
    visionResponseDifference=0;
    isCameraNeedResponse=false;
    usingMutex.lock();
    visionPointBuffer.clear();
    currentEncoderLatchValueList.clear();
    usingMutex.unlock();

    trigTimeCount=0;
    trigStage=0;
    if(NULL!=beltTrack)
    {
        currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();
    }

    tmpSiOld=false;
    tmpTrigStage=0;

    return 1;
}

int VisionTrack::getVisionTrackDebugInfo(VisionTrackDebugInfo &visionInfoOut)
{
    visionInfoOut=debugInfo;
    visionInfoOut.isConnectedStatus=isConnectedStatus;
    visionInfoOut.socketErrorCode=socketErrorCode;
    visionInfoOut.cameraData=lastReceiveData_camera;
    visionInfoOut.trigTimeOrDistance_offset=trigTimeOrDistance_offset;
    visionInfoOut.noResponseCount=noResponseCount;
    visionInfoOut.responseDifference=visionResponseDifference;

    return 1;
}


bool VisionTrack::isInitialOk()
{
    return isInitialOkStatus;

}

bool VisionTrack::isSocketOkStatus()
{
    return isConnectedStatus;

}

int VisionTrack::modifyVisionTrackConfig(VisionTrackConfig configIn)
{
    visionConfig=configIn;
    KDL::Rotation tmpRotation;
    tmpRotation=tmpRotation.RPY(0,0,visionConfig.cameraOffset_r*M_PI/180.0);
    KDL::Vector tmpVector(visionConfig.cameraOffset_x,visionConfig.cameraOffset_y,0);
    *frame_visionlink_in_userlink =KDL::Frame(tmpRotation,tmpVector);
    return 1;
}

int VisionTrack::getUserCoordinateIndex()
{
    return visionConfig.userCoordinateIndex;

}



int VisionTrack::runVision()
{

    isAutoTrig=true;
    if(1!=tcpStart_client(QString::fromStdString(visionConfig.cameraIp),visionConfig.cameraPort,false))
    {
        addMsg(ENUM_MSG_ERROR, "McControllerInterpreter", "VisionTrack", 22102, robotId);
        return -1;
    }

    return 1;

}

int VisionTrack::trigVisionOnce()
{

    qDebug()<<"VisionTrack::trigVisionOnce() visionId"<<visionId;
    //本处不支持硬件触发
    QString tmpCommand;
    //发送命令帧
    switch(visionConfig.cameraProtocol)
    {
    case 1://default 3个数据
    {
        tmpCommand="\nstartGrab\r";
        break;
    }
    case 2://KEBA
    {
        tmpCommand="\nstartGrab\r";
        break;
    }
    default:{
        return -100;
    }
    }

    if(1!=tcpWrite_client(QString::fromStdString(visionConfig.cameraIp),visionConfig.cameraPort,
                          tmpCommand.toLatin1()))
    {
        addMsg(ENUM_MSG_ERROR, "McControllerInterpreter", "VisionTrack", 22103, robotId);
        return -1;
    }

    isCameraNeedResponse=true;
    visionResponseDifference++;
    if(0)
    {
        qDebug()<<"trig once visionResponseDifference++"<<visionResponseDifference<<"visionId"<<visionId;
    }

    return 1;
}

int VisionTrack::getVisionData()
{
    usingMutex.lock();
    if(visionPointBuffer.size()>0)
    {
        regData->setPrRegValueType(D_VISION_PR_1_INDEX,true);
        regData->setPrIJValue(D_VISION_PR_1_INDEX,0,visionPointBuffer.front().pos_x);
        regData->setPrIJValue(D_VISION_PR_1_INDEX,1,visionPointBuffer.front().pos_y);
        regData->setPrIJValue(D_VISION_PR_1_INDEX,5,visionPointBuffer.front().pos_r);
        regData->setPrRegValueType(D_VISION_PR_2_INDEX,true);
        regData->setPrIJValue(D_VISION_PR_2_INDEX,0,visionPointBuffer.front().pos_x);
        regData->setPrIJValue(D_VISION_PR_2_INDEX,1,visionPointBuffer.front().pos_y);
        regData->setPrIJValue(D_VISION_PR_2_INDEX,5,visionPointBuffer.front().pos_r);
        visionPointBuffer.dequeue();
    }

    usingMutex.unlock();
    return 1;

}

int VisionTrack::clearVisionData()
{
    if(visionConfig.isTrack)
    {
        beltTrack->reset();
    }
    return reset();
}

int VisionTrack::loopCommontimeOnce()
{
//        qDebug()<<"VisionTrack::loopCommontimeOnce()";
        sendRobotStatusInfo();

        QByteArray receiveData;
        tcpRead_client(QString::fromStdString(visionConfig.cameraIp),visionConfig.cameraPort,receiveData);

        if(0==receiveData.size() && true==isCameraNeedResponse)
        {
            noResponseCount++;
            if(0)
            {
                qDebug()<<"loopCommontimeOnce noResponseCount++"<<noResponseCount;
            }

            if(noResponseCount>visionConfig.cameraCommunicateTimeout)
            {
                qDebug()<<"noResponseCount>visionConfig.cameraCommunicateTimeout "<<noResponseCount
                       <<visionResponseDifference;
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22104,
                       robotId,visionConfig.cameraCommunicateTimeout,visionResponseDifference);
                return 0;
            }

        }
        else
        {
            isCameraNeedResponse=false;
            noResponseCount=0;
        }
        QVector<QVector<CameraReturnInfo>> resultOut;
        //如果同时接到２等多条条，也要可以正确存储。
        int tmpKey=processReceiveData(visionConfig.cameraProtocol,receiveData,resultOut);
        if(1==tmpKey && resultOut.size()>0)
        {
//            qDebug()<<"receive camera data"<<receiveData<<"size"<<resultOut.size();
//            int tmpReceiveCount=0;
//            for(int i=0;i<resultOut.size();i++)
//            {
//                tmpReceiveCount+=resultOut[i].size();
//            }
            visionResponseDifference-=resultOut.size();
            if(D_TRIG_DEBUG)
            {
                qDebug()<<"visionResponseDifference-=tmpReceiveCount visionResponseDifference"
                       <<visionResponseDifference<<"resultOut.size()"<<resultOut.size()
                      <<"visionId"<<visionId<<"receiveData"<<receiveData;
            }
            lastReceiveData_camera=resultOut.last().last();
            for(int i=0;i<resultOut.size();i++)
            {
                VisionObjectInfo tmpPoint;
                if(visionConfig.isTrack)
                {   if(D_IF_USE_EXPOSURE_LATCH)
                    {
                        qDebug()<<"error, not support getCurrentEncoderLatchValue";
                        return 0;
//                       tmpPoint.latch_encoderValue=beltTrack->getCurrentEncoderLatchValue();
                    }
                    else
                    {
                        if(0>getLatchValue(tmpPoint.latch_encoderValue))
                        {
                            qDebug()<<"0>getLatchValue(tmpPoint.latch_encoderValue)"<<receiveData;
                            if(isInWork)
                            {
                                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22110, robotId,visionConfig.cameraCommunicateTimeout);
                            }
                            else
                            {
                                addMsg(ENUM_MSG_REMIND, "ProgramEngine", "ProgramEngine", 22110, robotId,visionConfig.cameraCommunicateTimeout);
                            }

                            return 0;
                        }
                    }
                }

                for(int j=0;j<resultOut[i].size();j++)
                {
                    if(1==resultOut[i][j].pointCount)
                    {

                        translateCameraPointToRobotPoint(resultOut[i][j],tmpPoint);
                        tmpPoint.beltIndex=visionConfig.beltTrackIndex;
                        if(visionConfig.isTrack)
                        {
                            beltTrack->addVisionObject(tmpPoint);
                        }
                        else
                        {
                            //如果是定点识别，则需要相机程序根据物体最小间距，去除重复识别的对象。本程序不做处理。
                            visionPointBuffer.enqueue(tmpPoint);

                        }
                    }

                }

            }

        }
        else if(0==tmpKey)
        {
//            qDebug()<<"VisionTrack::getVisionData()没有接收到任何数据";
        }
        else if(-2==tmpKey)
        {
//            qDebug()<<"VisionTrack::getVisionData()数据末尾没有回车符";
        }
        else if(-1==tmpKey)
        {
//            qDebug()<<"VisionTrack::getVisionData()数据没有空格分隔符";
        }
        else
        {
//            qDebug()<<"VisionTrack::getVisionData()未知数据格式错误";
        }



//        qDebug()<<"VisionTrack::loopCommontimeOnce() toend";
        //更新缓冲区对象数量
        usingMutex.lock();
        regData->setRRegValue(D_VISION_POINT_COUNT_R_INDEX+visionId,visionPointBuffer.size());
        debugInfo.objectCount=visionPointBuffer.size();
        usingMutex.unlock();


        return 1;
}

int VisionTrack::loopRealtimeOnce(bool detectSiIn,bool &digitalOutput,bool &isDoChangeOut)
{
    tickCount++;
    isDoChangeOut=false;


    if(isAutoTrig)
    {
        if(0)
        {
            qDebug()<<"cameraTrigMethod"<<visionConfig.cameraTrigMethod<<"currentEncoderValue_compare"
                   <<currentEncoderValue_compare<<"CurrentEncoderValue"
                    <<beltTrack->getCurrentEncoderValue()<<"trigStage"<<trigStage;
        }
        switch(visionConfig.cameraTrigMethod)
        {
        case 0://定时触发
        {
            if(false==isConnectedStatus)
            {
                if(1)
                {
                    qDebug()<<"false==isConnectedStatus isConnectedStatus"<<isConnectedStatus;
                }
                break;
            }
            if(NULL==beltTrack)
            {
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22108, robotId);
                return 0;
            }
            trigTimeCount++;
            switch(trigStage)
            {
            case 0:
            {
                if(1)
                {
                    qDebug()<<"time trigStage 0,trigTimeCount"<<trigTimeCount;
                }
                isDoChangeOut=true;
                digitalOutput=true;
                trigStage=1;
                break;
            }
            case 1:
            {
                if(trigTimeCount>visionConfig.trigTimeOrDistance/deltaTime/2)
                {
                    trigStage=2;
                    if(1)
                    {
                        qDebug()<<"time trigStage 1,trigTimeCount"<<trigTimeCount;
                    }
                }
                break;
            }
            case 2:
            {
                if(1)
                {
                    qDebug()<<"time trigged 2,trigTimeCount"<<trigTimeCount;
                }
                isDoChangeOut=true;
                digitalOutput=false;//要求１变为０时拍照

                checkVisionResponceDifference();
                recordLatchValue();
                if(isConnectedStatus)
                {
                    visionResponseDifference++;
                    isCameraNeedResponse=true;
                    if(0)
                    {
                        qDebug()<<"time trigged visionResponseDifference++"<<visionResponseDifference<<"visionId"<<visionId;
                    }
                }
                trigStage=3;
                break;
            }
            case 3:
            {
                if(trigTimeCount>visionConfig.trigTimeOrDistance/deltaTime)
                {
                    trigStage=0;

                    if(1)
                    {
                        qDebug()<<"time trigged 3,trigTimeCount"<<trigTimeCount;
                    }
                    trigTimeCount=0;

                }
                break;
            }
            default:
            {
                trigStage=0;
                trigTimeCount=0;
            }
            }
            break;

        }
        case 1://皮带位移触发
        {
            if(false==isConnectedStatus )
            {
                if(1)
                {
                    qDebug()<<"false==isConnectedStatus  isConnectedStatus"<<isConnectedStatus;
                }
                break;
            }
            if(NULL==beltTrack)
            {
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22108, robotId);
                return 0;
            }



            double movedLength=beltTrack->getActualMovedDistance(currentEncoderValue_compare,
                                                                 beltTrack->getCurrentEncoderValue());
            if(0)
            {
                qDebug()<<"vision loop,,,movedLength"<<movedLength<<"visionId"<<visionId<<currentEncoderValue_compare
                       <<beltTrack->getCurrentEncoderValue();
            }
            if(movedLength<0 || movedLength>10)
            {
                trigStage=0;
                currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();
                movedLength=0;
                if(1)
                {
                    qDebug()<<"warn: movedLength<0 || movedLength>10cameraTrig movedLength"<<movedLength<<"visionId"<<visionId;
                }
            }


            switch(trigStage)
            {
            case 0:
            {
                if(D_TRIG_DEBUG)
                {
                    qDebug()<<"distance trigStage 0 movedLength"<<movedLength<<"visionId"<<visionId
                           <<"tick"<<tickCount<<currentEncoderValue_compare
                          <<beltTrack->getCurrentEncoderValue();
                }
                isDoChangeOut=true;
                digitalOutput=true;
                trigStage=1;
                break;
            }
            case 1:
            {
                if(movedLength>visionConfig.trigTimeOrDistance/2.0)
                {
                    if(D_TRIG_DEBUG)
                    {
                        qDebug()<<"distance trigStage 1 movedLength"<<movedLength<<"visionId"<<visionId
                               <<"tick"<<tickCount<<currentEncoderValue_compare
                              <<beltTrack->getCurrentEncoderValue();
                    }
                    trigStage=2;
                }
                break;
            }
            case 2:
            {
                if(D_TRIG_DEBUG)
                {
                    qDebug()<<"distance trigStage 2 movedLength"<<movedLength<<"visionId"<<visionId
                           <<"tick"<<tickCount<<currentEncoderValue_compare
                          <<beltTrack->getCurrentEncoderValue();
                }
                isDoChangeOut=true;
                digitalOutput=false;

                checkVisionResponceDifference();
                recordLatchValue();
                if(isConnectedStatus)
                {
                    visionResponseDifference++;
                    isCameraNeedResponse=true;
                    if(D_TRIG_DEBUG)
                    {
                        qDebug()<<"distance trigged visionResponseDifference++"
                               <<visionResponseDifference<<"visionId"<<visionId;
                    }
                }
                currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();
                trigStage=3;
                break;
            }
            case 3:
            {
                if(movedLength>visionConfig.trigTimeOrDistance/2.0)
                {
                    if(D_TRIG_DEBUG)
                    {
                        qDebug()<<"distance trigStage 3 movedLength"<<movedLength<<"trigTimeOrDistance"
                               <<visionConfig.trigTimeOrDistance<<"visionId"<<visionId
                              <<"tick"<<tickCount<<currentEncoderValue_compare
                             <<beltTrack->getCurrentEncoderValue();
                    }
                    currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();
                    trigStage=0;

                }
                break;
            }
            default:
            {
                trigStage=0;
            }
            }

            break;
        }
        case 2://传感器检查到物体，而且皮带位移固定距离触发.todo 增加di抗干扰，没有结束时，不开始新的触发判断．
        {
            if(false==isConnectedStatus )
            {
                break;
            }
            if(NULL==beltTrack)
            {
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22108, robotId);
                return 0;
            }

            if(false==tmpSiOld &&  true==detectSiIn && 0==tmpTrigStage)
            {
                currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();

            }
            tmpSiOld=detectSiIn;
            double movedLength=beltTrack->getActualMovedDistance(currentEncoderValue_compare,
                                                                 beltTrack->getCurrentEncoderValue());


            switch(trigStage)
            {
            case 0:
            {
                isDoChangeOut=true;
                digitalOutput=true;
                trigStage=1;
                break;
            }
            case 1:
            {
                if(movedLength<visionConfig.trigTimeOrDistance)
                {
                    trigStage=2;
                }
                break;
            }
            case 2:
            {
                isDoChangeOut=true;
                digitalOutput=false;

                checkVisionResponceDifference();
                recordLatchValue();
                if(isConnectedStatus)
                {
                    visionResponseDifference++;
                    isCameraNeedResponse=true;
                    if(0)
                    {
                        qDebug()<<"4 trigged visionResponseDifference++"<<visionResponseDifference;
                    }
                }
                trigStage=3;
                break;
            }
            case 3:
            {
                if(movedLength<2*visionConfig.trigTimeOrDistance)
                {
                    trigStage=0;

                }
                break;
            }
            default:
            {
                trigStage=0;
            }
            }



            break;
        }
        case 3://传感器检查到物体，而且皮带位移固定距离触发(纸皮长度自动计算)．todo 增加di抗干扰，没有结束时，不开始新的触发判断．
        {

            if(false==isConnectedStatus )
            {
                break;
            }
            if(NULL==beltTrack)
            {
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22108, robotId);
                return 0;
            }
            detectWorkPieceLength(detectSiIn,trigTimeOrDistance_offset);


            if(false==tmpSiOld &&  true==detectSiIn && 0==tmpTrigStage)
            {
                currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();

            }
            tmpSiOld=detectSiIn;
            double movedLength=beltTrack->getActualMovedDistance(currentEncoderValue_compare,
                                                                 beltTrack->getCurrentEncoderValue());

            switch(trigStage)
            {
            case 0:
            {
                isDoChangeOut=true;
                digitalOutput=true;
                trigStage=1;
                break;
            }
            case 1:
            {
                if(movedLength<(visionConfig.trigTimeOrDistance+trigTimeOrDistance_offset))
                {
                    trigStage=2;
                }
                break;
            }
            case 2:
            {
                isDoChangeOut=true;
                digitalOutput=false;

                checkVisionResponceDifference();
                recordLatchValue();
                if(isConnectedStatus)
                {
                    visionResponseDifference++;
                    isCameraNeedResponse=true;
                    if(0)
                    {
                        qDebug()<<"5 trigged visionResponseDifference++"<<visionResponseDifference;
                    }
                }
                trigStage=3;
                break;
            }
            case 3:
            {
                if(movedLength<visionConfig.trigTimeOrDistance+2.1*trigTimeOrDistance_offset)
                {
                    trigStage=0;

                }
                break;
            }
            default:
            {
                trigStage=0;
            }
            }



            break;
        }
        case 4://指令触发
        {
            break;
        }
        case 5://di后定距离触发＋物体长度＋di过滤． 增加di抗干扰，没有结束时，不开始新的触发判断．
        {
            if(false==isConnectedStatus )
            {
                break;
            }
            if(NULL==beltTrack)
            {
                addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22108, robotId);
                return 0;
            }


            if(false==tmpSiOld &&  true==detectSiIn && 0==tmpTrigStage)
            {
                currentEncoderValue_compare=beltTrack->getCurrentEncoderValue();

            }
            tmpSiOld=detectSiIn;
            double movedLength=beltTrack->getActualMovedDistance(currentEncoderValue_compare,
                                                                 beltTrack->getCurrentEncoderValue());


            switch(trigStage)
            {
            case 0:
            {
                isDoChangeOut=true;
                digitalOutput=true;
                trigStage=1;
                break;
            }
            case 1:
            {
                if(movedLength<(visionConfig.trigTimeOrDistance+visionConfig.objectLength/2.0))
                {
                    trigStage=2;
                }
                break;
            }
            case 2:
            {
                isDoChangeOut=true;
                digitalOutput=false;

                checkVisionResponceDifference();
                recordLatchValue();
                if(isConnectedStatus)
                {
                    visionResponseDifference++;
                    isCameraNeedResponse=true;
                    if(0)
                    {
                        qDebug()<<"6 trigged visionResponseDifference++"<<visionResponseDifference;
                    }
                }
                trigStage=3;
                break;
            }
            case 3:
            {
                if(movedLength<visionConfig.trigTimeOrDistance+visionConfig.objectLength*(0.5+0.5*visionConfig.trigFilterRatio))
                {
                    trigStage=0;
                }
                break;
            }
            default:
            {
                trigStage=0;
            }
            }

            break;
        }
        default:
        {
            addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22109, robotId,visionConfig.cameraTrigMethod);
            return 0;
        }

        }

    }

}

void VisionTrack::socketError_slot(QAbstractSocket::SocketError errorNum)
{
    qDebug()<<"VisionTrack::socketError_slot"<<errorNum;
    isConnectedStatus=false;
    socketErrorCode=errorNum;

}

int VisionTrack::sendRobotStatusInfo()
{

}

int VisionTrack::checkVisionResponceDifference()
{

    if(0)
    {
        //相机没有找到目标时，也返回字符串
        if(0==visionResponseDifference)
        {
            return 1;

        }
        else if(1==visionResponseDifference)
        {
            usingMutex.lock();
            currentEncoderLatchValueList.removeLast();
            if(0)
            {
                qDebug()<<"VisionTrack::currentEncoderLatchValueList.removeLast()";
            }
            usingMutex.unlock();
            addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22112, robotId,
                                                visionResponseDifference);
            return 0;
        }
        else
        {
            addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22106, robotId,
                                                visionResponseDifference);
            return -1;
        }



    }
    else
    {
        //相机没有找到目标时，不返回字符串
        if(0==visionResponseDifference)
        {
            return 1;

        }
        else if(1==visionResponseDifference)
        {
            usingMutex.lock();
            currentEncoderLatchValueList.removeLast();
            if(0)
            {
                qDebug()<<"VisionTrack::currentEncoderLatchValueList.removeLast()";
            }
            usingMutex.unlock();
            visionResponseDifference=0;
            return 1;
        }
        else
        {
            addMsg(ENUM_MSG_ERROR, "ProgramEngine", "ProgramEngine", 22106, robotId,
                                                visionResponseDifference);
            return -1;
        }

    }


    return 1;
}

int VisionTrack::getLatchValue(int &valueOut)
{

    int tmpKey=1;
    usingMutex.lock();
    if(0==currentEncoderLatchValueList.size())
    {
        tmpKey= -1;
    }
    else
    {
        valueOut=currentEncoderLatchValueList.dequeue();
    }

    usingMutex.unlock();
    return tmpKey;
}

int VisionTrack::recordLatchValue()
{


    usingMutex.lock();
    currentEncoderLatchValueList.enqueue(beltTrack->getCurrentEncoderValue());
//    if(visionConfig.cameraTrigMethod)
//    {
//        beltTrack->recordLatchValue();//对于di才有用．
//    }

    if(0)
    {
        qDebug()<<"VisionTrack::recordLatchValue"<<beltTrack->getCurrentEncoderValue();
    }
    usingMutex.unlock();
    return 1;
}

int VisionTrack::detectWorkPieceLength(bool detectSiIn,double &detectLengthOut)
{
    //检测新纸皮长度

    if(false==tmpSiOld &&  true==detectSiIn)
    {
        encoderValueForPieceLength=beltTrack->getCurrentEncoderValue();
    }
    else if(true==tmpSiOld &&  false==detectSiIn)
    {
        detectLengthOut=beltTrack->getActualMovedDistance(encoderValueForPieceLength,
                                                             beltTrack->getCurrentEncoderValue());

    }
    tmpSiOld=detectSiIn;
    return 1;

}

/*a是寄存器，b是要操作的位*/
#define BIT_SET(a,b) ((a) |= (1<<(b)))              //置位
#define BIT_CLEAR(a,b) ((a) &= ~(1<<(b)))    //清除位
#define BIT_FLIP(a,b) ((a) ^= (1<<(b)))           //位翻转

int VisionTrack::setOutPut(int bitIndex,bool isHigh, unsigned short &outPutValue)
{
//    unsigned short tmpValue=isHigh;
//    tmpValue=tmpValue<<bitIndex;
////    tmpValue=~tmpValue;
//    outPutValue=outPutValue|tmpValue;
    if(isHigh)
    {
        BIT_SET(outPutValue,bitIndex);
    }
    else
    {
        BIT_CLEAR(outPutValue,bitIndex);
    }
    return 1;
}


void VisionTrack::tcpStart_client_slot(const QString &ip, int port,bool forceReconnect)
{
    QString tmpId=ip+":"+QString::number(port);
    if(tcpClientList.keys().indexOf(tmpId) == -1)
    {
        QTcpSocket *tmpClientSocket = new QTcpSocket;
        qDebug() << " +++++++++connected flag:"
                 << connect(tmpClientSocket,SIGNAL(error(QAbstractSocket::SocketError)),
                            this,SLOT(socketError_slot(QAbstractSocket::SocketError)),Qt::DirectConnection);
        tmpClientSocket->abort();
        tmpClientSocket->connectToHost(ip,port);
        if(!tmpClientSocket->waitForConnected(500))//会堵塞等待在这里设定的时间。其它主线程驱动的逻辑会受影响。
        {
            // 登录失败
            qDebug() << "TcpManager::tcpStart_client_slot,TCP_START 登录失败:" << tmpClientSocket->errorString();
            delete tmpClientSocket;
            startFinishedStatus = false;
            return;
        }
        startFinishedStatus = true;
//        mutex.lock();
        tcpClientList.insert(tmpId,tmpClientSocket);
//        mutex.unlock();
    }
    else
    {
        qDebug()<<"tcpStart_client_slot"<<tcpClientList.value(tmpId)->state()
                  <<"forceReconnect-------------------"<<forceReconnect;


            if(tcpClientList.value(tmpId)->state() == QTcpSocket::ConnectedState
                    && false==forceReconnect)
            {
                // 已连接
                startFinishedStatus = true;
                return;
            }
            else
            {
                // 未连接
                tcpClientList.value(tmpId)->abort();
                tcpClientList.value(tmpId)->connectToHost(ip,port);
                if(!tcpClientList.value(tmpId)->waitForConnected(800))
                {
                    // 登录失败
                    qDebug() << "TCP_START 登录失败:" << tcpClientList.value(tmpId)->errorString();
                    startFinishedStatus = false;
                    delete tcpClientList.value(tmpId);
                    tcpClientList.remove(tmpId);
                    isConnectedStatus=false;
                    return;
                }
            }

     }

    startFinishedStatus = true;
}

void VisionTrack::tcpRead_Client_slot(const QString &ip, int port, QByteArray &dataOut)
{
    QString tmpId=ip+":"+QString::number(port);

    if(tcpClientList.keys().indexOf(tmpId) == -1)
    {
//        qDebug() << "tcpRead Client Error:无客户端Socket对象!";
        readFinishedStatus = false;
        return;
    }
    else
    {
        if(!tcpClientList.value(tmpId)->isValid())
        {
            readFinishedStatus = false;
            return;
        }

//        if(!tcpClientList.value(tmpId)->waitForReadyRead(100))//堵塞等待数据到来
//        {
//            readFinishedStatus = false;
//            return;
//        }
        dataOut = tcpClientList.value(tmpId)->readAll();
        tcpClientList.value(tmpId)->flush();
        readFinishedStatus = true;
//        qDebug() << "TCP_READ Recive Data:" << dataOut;
        return;
    }
}

void VisionTrack::tcpWrite_Client_slot(const QString &ip, int port, const QByteArray &dataIn)
{

    QString tmpId=ip+":"+QString::number(port);
    qDebug()<<"VisionTrack::tcpWrite_Client_slot"<<tmpId;
    if(tcpClientList.keys().indexOf(tmpId) == -1)
    {
        writeFinishedStatus = false;
        addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 22111, robotId);
        return;
    }
    else
    {
        if(tcpClientList.value(tmpId)->state() != QTcpSocket::ConnectedState)
        {
            qDebug() << "TCP_WRITE socket 未连接:" << tcpClientList.value(tmpId)->errorString();
            addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 22107, robotId);
            writeFinishedStatus = false;
            return;
        }

        qDebug() << "tmpManager client write Data:" << dataIn;
        tcpClientList.value(tmpId)->write(dataIn);
//        if(!tcpClientList.value(tmpId)->waitForBytesWritten(20))//不用等发送结果
//        {
//            qDebug() << "TCP_WRITE 发送数据失败:" << tcpClientList.value(tmpId)->errorString();
//            addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 12035, robotId);
//            writeFinishedStatus = false;
//            return;
//        }
        // 刷新缓存
        tcpClientList.value(tmpId)->flush();
//        tcpClientList.value(tmpId)->close();
    }
    writeFinishedStatus = true;
}

//void VisionTrack::newConnectionSlot()
//{

//}

int VisionTrack::tcpStart_client(const QString &ip, int port, bool forceReconnect)
{
    emit tcpStart_client_signal(ip,port,forceReconnect);
    if(!startFinishedStatus)
    {
        qDebug()<<"failed VisionTrack::tcpStart_client port"<<port;
        return -1;
    }
    isConnectedStatus=true;
    return 1;
}

int VisionTrack::tcpWrite_client(const QString &ip, int port, const QByteArray &dataIn)
{
//    emit tcpWrite_clien_signal(ip,port,dataIn);
//    if(!writeFinishedStatus)
//    {
//        return -1;
//    }
    tcpWrite_Client_slot(ip,port,dataIn);//创建套接字的线程才可以直接操作收发函数。
    return 1;
}

int VisionTrack::tcpRead_client(const QString &ip, int port, QByteArray &dataOut)
{
//    emit tcpRead_client_signal(ip,port,dataOut);不用信号槽，有延时。
//    if(!readFinishedStatus)
//    {
//        return -1;
//    }

    tcpRead_Client_slot(ip,port,dataOut);//创建套接字的线程才可以直接操作收发函数。


    return 1;
}

int VisionTrack::processReceiveData(int protocol, const QByteArray &dataIn,
                                                QVector<QVector<CameraReturnInfo> > &resultOut)
{
    switch(protocol)
    {
    case 1://default 3个数据
    {
        return processReceiveData_3value(dataIn,resultOut);
        break;
    }
    case 2://KEBA
    {
        return processReceiveData_keba(dataIn,resultOut);
        break;
    }
    default:{
        return -100;
    }
    }
    return 0;
}

int VisionTrack::processReceiveData_3value(const QByteArray &dataIn, QVector<QVector<CameraReturnInfo>> &resultOut)
{
    //    qDebug()<<"VisionTrack::processReceiveData_3value";
        resultOut.clear();
        if(0==dataIn.size())
        {
    //        qDebug()<<"0==dataIn.size()";
            return 0;
        }
        //拆分成多条
        QString tmp_frame=dataIn;

        int tmpCount=tmp_frame.count("\r");
        if(0>tmpCount)
        {
            return -1;
        }
        for(int i=0;i<tmpCount;i++)
        {
            QString tmpStr=tmp_frame.section("\r", i, i);

            if(0==tmpStr.size())
            {
                continue;
            }
            else
            {
//                tmpStr+="\r";
                QVector<CameraReturnInfo> tmpResult;
                if(1==processReceiveData_3value_oneTime(tmpStr,tmpResult))
                {
                    resultOut.append(tmpResult);
                }
                else
                {
                    CameraReturnInfo tmpPoint;
                    tmpPoint.pointCount=0;
                    tmpPoint.x=0;
                    tmpPoint.y=0;
                    tmpPoint.r=0;
                    tmpResult.clear();
                    tmpResult.append(tmpPoint);

                    resultOut.append(tmpResult);
                    qDebug() << "warn,,,相机反馈数据不符合格式要求"<<tmpStr;
                    addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 22105, robotId,0,0,tmpStr);
                }



            }
        }

        if(resultOut.size()>0)
        {
           return 1;
        }

        return 0;

}

int VisionTrack::processReceiveData_3value_oneTime(const QString &dataIn, QVector<CameraReturnInfo> &resultOut)
{
    //    qDebug()<<"processReceiveData_keba_oneTime";

        //1个数据帧里面可能含有多个物体点　todo
        CameraReturnInfo tmpResult;

        //                tmpStr+="\r";
        if(1==processReceiveData_3value_single(dataIn,tmpResult))
        {
            resultOut.append(tmpResult);
        }
        else
        {
//                qDebug() << "warn,,,相机反馈数据不符合格式要求"<<tmpStr;
//                        addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 22105, robotId,0,0,tmpStr);
        }

        if(resultOut.size()>0)
        {
           return 1;
        }
        return 0;
}

int VisionTrack::processReceiveData_3value_single(QString tmp_frame, CameraReturnInfo &resultOut)
{
    QString pattern = "(\\d) (-?\\d*\\.*\\d*) (-?\\d*\\.*\\d*) (-?\\d*\\.*\\d*)";
    QRegExp rx(pattern);

    bool isMatch=rx.exactMatch(tmp_frame);

    if(false==isMatch )
    {
        return -1;
    }

    resultOut.pointCount=rx.cap(1).toInt();
    resultOut.x=rx.cap(2).toDouble();
    resultOut.y=rx.cap(3).toDouble();
    resultOut.r=rx.cap(4).toDouble();

    return 1;
}

int VisionTrack::processReceiveData_keba(const QByteArray &dataIn, QVector<QVector<CameraReturnInfo> > &resultOut)
{
    //一个接收不允许有不同时间传过来的物体，因为锁存编码器不一样．
//    qDebug()<<"processReceiveData_keba";
    resultOut.clear();
    if(0==dataIn.size())
    {
//            qDebug()<<"0==dataIn.size()";
        return 0;
    }
//    qDebug()<<"VisionTrack::processReceiveData_keba"<<dataIn;
    //拆分成多条
    QString tmp_frame=dataIn;

    //done分割
    int tmpCount=tmp_frame.count("Done");
    if(0>=tmpCount)
    {
        return -1;
    }
    for(int i=0;i<tmpCount;i++)
    {

        QString tmpStr=tmp_frame.section("Done", i, i);

        if(0==tmpStr.size())
        {
            continue;
        }
        else
        {
//                tmpStr+="\r";
            QVector<CameraReturnInfo> tmpResult;
            if(1==processReceiveData_keba_oneTime(tmpStr,tmpResult))
            {
                resultOut.append(tmpResult);
            }
            else
            {
                CameraReturnInfo tmpPoint;
                tmpPoint.pointCount=0;
                tmpPoint.x=0;
                tmpPoint.y=0;
                tmpPoint.r=0;
                tmpResult.clear();
                tmpResult.append(tmpPoint);

                resultOut.append(tmpResult);
            }
        }
    }
    if(resultOut.size()>0)
    {
       return 1;
    }

    return 0;
}

int VisionTrack::processReceiveData_keba_oneTime(const QString &dataIn, QVector<CameraReturnInfo> &resultOut)
{
//    qDebug()<<"processReceiveData_keba_oneTime";

        //1个数据帧里面可能含有多个物体点　todo
    CameraReturnInfo tmpResult;

    int tmpCount=dataIn.count("\r\n");
    if(0>=tmpCount)
    {
        return -1;
    }
    for(int i=0;i<tmpCount;i++)
    {
        QString tmpStr=dataIn.section("\r\n", i, i);

        if(0==tmpStr.size())
        {
            continue;
        }
        else
        {
    //                tmpStr+="\r";
            if(1==processReceiveData_keba_single(tmpStr,tmpResult))
            {
                resultOut.append(tmpResult);
            }
            else
            {
//                qDebug() << "warn,,,相机反馈数据不符合格式要求"<<tmpStr;
    //                        addMsg(ENUM_MSG_WARNING, "ProgramEngine", "ProgramEngine", 22105, robotId,0,0,tmpStr);
            }
        }
    }
    if(resultOut.size()>0)
    {
       return 1;
    }
    return 0;

}

int VisionTrack::processReceiveData_keba_single(QString tmp_frame, CameraReturnInfo &resultOut)
{

//    qDebug()<<"processReceiveData_keba_single"<<tmp_frame;
    //todo 暂时不支持多个同时出现．
    QString pattern = "\\S*\\[X:(-?\\d*\\.*\\d*);Y:(-?\\d*\\.*\\d*);A:(-?\\d*\\.*\\d*)\\S*";
    QRegExp rx(pattern);
//    qDebug()<<rx;

    bool isMatch=rx.exactMatch(tmp_frame);

    if(false==isMatch )
    {
        return 0;
    }
    else
    {
        resultOut.pointCount=1;
        resultOut.x=rx.cap(1).toDouble();
        resultOut.y=rx.cap(2).toDouble();
        resultOut.r=rx.cap(3).toDouble();
    }

    return 1;
}



int VisionTrack::translateCameraPointToRobotPoint(const CameraReturnInfo &cameraPointIn, VisionObjectInfo &pointOut)
{

    //像素转换为ｍｍ
    double tmpX=cameraPointIn.x*visionConfig.cameraMmPerPix_x;
    double tmpY=cameraPointIn.y*visionConfig.cameraMmPerPix_y;
    KDL::Rotation tmpRotation;
    tmpRotation=tmpRotation.RPY(0,0,cameraPointIn.r*M_PI/180.0);
    KDL::Vector tmpVector(tmpX,tmpY,0);
    KDL::Frame frame_object_in_vision=KDL::Frame(tmpRotation,tmpVector);

    //坐标系转移
    KDL::Frame frame_object_in_user=(*frame_visionlink_in_userlink)*frame_object_in_vision;
//     qDebug()<<"frame_baselink_in_world"<<"x="<<frame_baselink_in_world.p[0]<<" ,y="<<frame_baselink_in_world.p[1]<<" ,r=";

    double tmp_roll,tmp_pitch,tmp_yaw;
    pointOut.pos_x=frame_object_in_user.p[0];
    pointOut.pos_y=frame_object_in_user.p[1];
    frame_object_in_user.M.GetRPY(tmp_roll,tmp_pitch,tmp_yaw);
    pointOut.pos_r=tmp_yaw*180.0/M_PI;
//    transformToPi(vmarkInWorldOut.rotate);
    return 1;


    //卡诺普相机标定流程
    //1)通过９点标定图像板，保证像素坐标与标定板平行，计算像素比。
    //2)重置视觉坐标系原点
    //3)如果工作区域不重合，则需要借助传送带做偏移，才可以标定用户坐标系和视觉坐标系的关系。如果重合，跳过该步骤。
    //4)建立用户坐标系，原点和视觉坐标系原点重合，方向也一样。

}

void VisionTrack::addMsg(int messageLevel, std::string componentName, std::string messageType
                            , int messageCode, int robotIdIn, int parameter1, int parameter2,
                         QString paraStr)
{
    Message tmpMsg;
    tmpMsg.MessageLevel = messageLevel;
    tmpMsg.componentClassName = componentName;
    tmpMsg.messageCode = messageCode;
    tmpMsg.messageType = messageType;
    tmpMsg.robotId = robotIdIn;

    string infomationStr;
    QString tmpStr;

    switch(messageCode)
    {
    case 22112:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机请求与应答次数偏差为1，丢弃上一次触发锁存！相机").arg(parameter1)
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22111:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机未连接！相机").arg(parameter1)
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22110:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")收到相机坐标时，编码器锁存队列无数据！跟踪功能仅支持相机硬触发方式！相机")
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")
                + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22109:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机触发方式(%1)不支持！相机").arg(parameter1)
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22108:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机触发方式设置错误，非跟踪模式，不支持该触发！相机")
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22107:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机套节字已经断开！相机")
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22106:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机请求与应答次数偏差%1过大！相机").arg(parameter1)
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22105:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机反馈数据不符合格式要求！相机")+paraStr
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22104:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机超过%1次没有应答请求！请求偏差为%2 。相机").arg(parameter1).arg(parameter2)
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22103:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")发送相机拍照命令失败！相机")
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22102:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")相机套接字创建失败！相机")
                + QString::fromStdString(visionConfig.cameraIp) +QObject::tr(":")  + QString::number(visionConfig.cameraPort);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22101:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(")皮带号(%1)超出范围！").arg(parameter1);
        infomationStr = tmpStr.toStdString();
        break;
    }
    case 22100:
    {
        tmpStr =  QObject::tr("机器人:")  + QString::number(robotId);
        tmpStr = tmpStr + QObject::tr(",跟踪工艺文件读取失败");
        infomationStr = tmpStr.toStdString();
        break;
    }



        default:
        {
            qDebug()<<"VisionTrack::addMsg====not match error code";
            break;
        }
    }

    tmpMsg.MessageInformation = infomationStr;

    #ifndef MOTION_SERVER_UNIT_TEST
    motionMessage->addMsg(tmpMsg);
    #endif
}
