﻿#include "RTMapperDataset.h"
#include "RTMapperNetInterface.h"

#include <QCoreApplication>
#include <QFileInfo>
#include <QDir>
#include <QImage>
#include <QtNetwork/QTcpSocket>
#include <QSocketNotifier>
#include <QPixmap>

#include <GSLAM/core/VecParament.h>
#include <GSLAM/core/Timer.h>
#include <GSLAM/core/Utils.h>
#include <GSLAM/core/Messenger.h>

#include <GSLAM/core/Event.h>
#include <GSLAM/core/TileManager.h>
#include <GSLAM/core/Svar.h>

extern "C"{
#include "libavcodec/avcodec.h"
#include "libavfilter/avfilter.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
}

#include "utils_xml.h"
#include "OfflineFIFO.h"
#include "Datastream_FIFO.h"
#include "QtUtils.h"

using GSLAM::FramePtr;


class Decoder{
public:
    Decoder()
        :isFFmpegInitialized(0)
    {
        initialize();
    }

    ~Decoder(){
        if (m_pCodecCtx)
        {
            avcodec_close(m_pCodecCtx);
            m_pCodecCtx = NULL;
        }

        av_free(m_pYUVFrame);
        av_free(m_pCodecCtx);
        av_parser_close(m_pCodecPaser);
        av_free(avPicture);
        if(m_pImgCtx) sws_freeContext(m_pImgCtx);
        if(m_PicBuf) delete[] m_PicBuf;
    }

    int initialize()
    {
        int codecID;
        codecID = AV_CODEC_ID_H264;
        //codecID = AV_CODEC_ID_H265;

        if (isFFmpegInitialized == 0)
        {
            avcodec_register_all();
            av_register_all();
            isFFmpegInitialized = 1;
        }
        m_pAVCodec = avcodec_find_decoder((AVCodecID) codecID);
        m_pCodecCtx = avcodec_alloc_context3(m_pAVCodec);
        m_pCodecPaser = av_parser_init((AVCodecID) codecID);
        if (m_pAVCodec == NULL || m_pCodecCtx == NULL)
        {
            qDebug()<<("m_pAVCodec == NULL||m_pCodecCtx == NULL");
            return 0;
        }

        // FIXME: new ffmpeg do not have these definitions
        //if (m_pAVCodec->capabilities & CODEC_CAP_TRUNCATED)
        //    m_pCodecCtx->flags |= CODEC_FLAG_TRUNCATED;

        m_pCodecCtx->thread_count = 4;
        m_pCodecCtx->thread_type = FF_THREAD_FRAME;
        m_pCodecCtx->flags2 |= AV_CODEC_FLAG2_SHOW_ALL;

        if (avcodec_open2(m_pCodecCtx, m_pAVCodec,NULL) < 0)
        {
            m_pAVCodec = NULL;
            return 0;
        }

        m_pYUVFrame = av_frame_alloc();
        avPicture=av_frame_alloc();

        if (m_pYUVFrame == NULL)
        {
            qDebug()<<(" CDecoder avcodec_alloc_frame() == NULL ");
            return 0;
        }
        qDebug()<<("CDecoder::prepare()2");
        return 1;
    }

    bool decode(RTMapperNetHeader& header,QByteArray& imageBuffer,GSLAM::GImage& image)
    {
        localBuffer.append(imageBuffer);
        int size=localBuffer.size();
        uint8_t *buff = (uint8_t*) localBuffer.data();
        int parseSize=0;
        if(size >= fillersize2 && memcmp(fillerbuffer2, buff+size-fillersize2, fillersize2) == 0)
        {
            parseSize=size-fillersize2;
        }
        else if (size >= audaudsize2 && memcmp(audaudbuffer2, buff+size-audaudsize2, audaudsize2) == 0)
        {
            parseSize=size-audaudsize2;
        }
        else if (size >= audsize2 && memcmp(audbuffer2, buff+size-audsize2, audsize2) == 0)
        {
            parseSize=size-audsize2;
        }
        else
        {
            parseSize=size;
        }

        int paserLength_In = parseSize;
        int paserLen;
        int decode_data_length;
        int got_picture = 0;
        while (paserLength_In > 0)
        {
            AVPacket packet;
            av_init_packet(&packet);
            if (m_pCodecPaser == NULL) {
                qDebug()<<("m_pCodecPaser == NULL");
                initialize();
            }
            if (m_pCodecCtx == NULL) {
                qDebug()<<("m_pCodecCtx == NULL");
                initialize();
            }
            paserLen = av_parser_parse2(m_pCodecPaser, m_pCodecCtx, &packet.data, &packet.size, buff,
                                        parseSize, AV_NOPTS_VALUE, AV_NOPTS_VALUE, AV_NOPTS_VALUE);

            localBuffer.remove(0,paserLen);

            if (packet.size > 0)
            {
                int got;
                paserLen= avcodec_decode_video2(m_pCodecCtx,m_pYUVFrame,&got,&packet);
                if(paserLen<0)
                {
                    av_free_packet(&packet);
                    return false;
                }

                if(got)
                {
                    int bytes=avpicture_get_size(AV_PIX_FMT_RGB32,m_pCodecCtx->width,m_pCodecCtx->height);
                    if(m_PicBytes!=bytes)
                    {
                        m_PicBytes=bytes;
                        if(m_PicBuf) delete[]m_PicBuf;
                        m_PicBuf =new uint8_t[m_PicBytes];
                        avpicture_fill((AVPicture*)avPicture,m_PicBuf,AV_PIX_FMT_RGB32,
                                       m_pCodecCtx->width,m_pCodecCtx->height);
                        if(m_pImgCtx) sws_freeContext(m_pImgCtx);
                        m_pImgCtx=NULL;
                    }

                    if(!m_pImgCtx)
                        m_pImgCtx=sws_getContext(m_pCodecCtx->width,m_pCodecCtx->height,m_pCodecCtx->pix_fmt,
                                                 m_pCodecCtx->width,m_pCodecCtx->height,AV_PIX_FMT_RGB32,SWS_BICUBIC,NULL,NULL,NULL);

                    //                        m_pYUVFrame->data[0]+= m_pYUVFrame->linesize[0]*(m_pCodecCtx->height-1);
                    //                        m_pYUVFrame->linesize[0]*=-1;
                    //                        m_pYUVFrame->data[1]+= m_pYUVFrame->linesize[1]*(m_pCodecCtx->height/2-1);
                    //                        m_pYUVFrame->linesize[1]*=-1;
                    //                        m_pYUVFrame->data[2]+= m_pYUVFrame->linesize[2]*(m_pCodecCtx->height/2-1);
                    //                        m_pYUVFrame->linesize[2]*=-1;

                    sws_scale(m_pImgCtx,(const uint8_t* const*)m_pYUVFrame->data,
                              m_pYUVFrame->linesize,0,m_pCodecCtx->height,avPicture->data,avPicture->linesize);

                    //                        QImage img=QImage(avPicture->data[0],m_pCodecCtx->width,m_pCodecCtx->height,QImage::Format_RGB32).copy();
                    image=GSLAM::GImage(m_pCodecCtx->height,m_pCodecCtx->width,
                                        GSLAM::GImageType<uchar,4>::Type,avPicture->data[0]);
                    if(image.empty()) return false;

                    // FIXME: if the image is DJI, and take photo every few seconds,
                    //        then, the video's width/height may changed
                    //        THIS MAY CAUSE segment fault in SLAM, therefore, disable take phote in App, and just
                    //        use video stream
                    if(image.cols==1280&&image.rows==720 && 0)
                    {
                        int64_t sum=0;
                        for(int y=0;y<image.rows;y++)
                            for(int x=0;x<160;x++)
                            {
                                pi::Array_<uchar,4> bytes=image.at<pi::Array_<uchar,4> >(x,y);
                                sum+=bytes.data[1];
                            }

                        if(sum/(image.rows*160)<10)
                        {
                            /*
                                GSLAM::GImage gimage(720,960,image.type());
                                for(int i=0;i<image.rows;i++)
                                    memcpy(gimage.data+i*4*960,image.data+i*4*1280+4*160,4*960);
                                image=gimage;
                                */

                            image=GSLAM::GImage();
                        }
                        else
                        {
                            image=image.clone();
                        }
                    }
                    else
                    {
                        image=image.clone();
                    }

                    av_free_packet(&packet);
                    return true;
                }
                av_free_packet(&packet);
                return false;

            }
            av_free_packet(&packet);
            return false;
        }

        return false;
    }


    bool decode2(RTMapperNetHeader& header, QByteArray& imageBuffer, GSLAM::GImage& image)
    {
        bool retval = false;

        AVPacket packet;
        av_new_packet(&packet, imageBuffer.size());
        memcpy(packet.data, imageBuffer.data(), imageBuffer.size());

        int got;
        int paserLen = avcodec_decode_video2(m_pCodecCtx, m_pYUVFrame, &got, &packet);
        if(paserLen<0)
        {
            goto DECODE2_RET;
        }

        if( got )
        {
            int bytes=avpicture_get_size(AV_PIX_FMT_RGB32, m_pCodecCtx->width, m_pCodecCtx->height);
            if( m_PicBytes != bytes )
            {
                m_PicBytes = bytes;
                if(m_PicBuf) delete[]m_PicBuf;
                m_PicBuf = new uint8_t[m_PicBytes];
                avpicture_fill((AVPicture*)avPicture, m_PicBuf, AV_PIX_FMT_RGB32,
                               m_pCodecCtx->width, m_pCodecCtx->height);
                if(m_pImgCtx) sws_freeContext(m_pImgCtx);
                m_pImgCtx=NULL;
            }

            if( !m_pImgCtx )
                m_pImgCtx = sws_getContext(m_pCodecCtx->width,m_pCodecCtx->height,m_pCodecCtx->pix_fmt,
                                           m_pCodecCtx->width,m_pCodecCtx->height,AV_PIX_FMT_RGB32,SWS_BICUBIC,NULL,NULL,NULL);

            sws_scale(m_pImgCtx, (const uint8_t* const*)m_pYUVFrame->data,
                      m_pYUVFrame->linesize,0,m_pCodecCtx->height,avPicture->data,avPicture->linesize);

            image=GSLAM::GImage(m_pCodecCtx->height,m_pCodecCtx->width,
                                GSLAM::GImageType<uchar,4>::Type,avPicture->data[0]);
            if(image.empty()) { goto DECODE2_RET; }

            // FIXME: if the image is DJI, and take photo every few seconds,
            //        then, the video's width/height may changed
            //        THIS MAY CAUSE segment fault in SLAM, therefore, disable take phote in App, and just
            //        use video stream
            if(image.cols==1280&&image.rows==720 && 0)
            {
                int64_t sum=0;
                for(int y=0;y<image.rows;y++)
                    for(int x=0;x<160;x++)
                    {
                        pi::Array_<uchar,4> bytes=image.at<pi::Array_<uchar,4> >(x,y);
                        sum+=bytes.data[1];
                    }

                if(sum/(image.rows*160)<10)
                {
                    /*
                        GSLAM::GImage gimage(720,960,image.type());
                        for(int i=0;i<image.rows;i++)
                            memcpy(gimage.data+i*4*960,image.data+i*4*1280+4*160,4*960);
                        image=gimage;
                        */

                    image=GSLAM::GImage();
                }
                else
                {
                    image=image.clone();
                }
            }
            else
            {
                image=image.clone();
            }

            retval = true;
        }

    DECODE2_RET:
        av_free_packet(&packet);
        return retval;
    }


    uint8_t audbuffer2[6] = {0x00,0x00,0x00,0x01,0x09,0x10};
    uint8_t audsize2 = 6;
    uint8_t fillerbuffer2[11] = {0x00,0x00,0x00,0x01,0x0C,0x00,0x00,0x00,0x01,0x09,0x10};
    uint8_t fillersize2 = 11;
    uint8_t audaudbuffer2[12] = {0x00,0x00,0x00,0x01,0x09,0x10, 0x00,0x00,0x00,0x01,0x09,0x10};
    uint8_t audaudsize2 = 12;
    int m_PicBytes=0;
    uint8_t *m_PicBuf=NULL;
    int isFFmpegInitialized;

    QByteArray localBuffer;

    AVFrame* m_pYUVFrame,*avPicture;
    AVCodecContext* m_pCodecCtx;
    AVCodec* m_pAVCodec;
    AVCodecParserContext* m_pCodecPaser;
    SwsContext* m_pImgCtx=NULL;
};

class DroneMapSaver
{
public:
    DroneMapSaver(std::string datasetPath="dronemap"):firstFrame(true),datasetFolder(datasetPath){
        svar.ParseLine("system mkdir -p "+datasetPath+"/rgb");
        traj.open(datasetPath+"/trajectory.txt");
    }
    GSLAM::SO3 PYR2Rotation(double pitch,double yaw,double roll)
    {
        if(pitch>-30) pitch=-90;
        if(fabs(180-fabs(roll))<30) roll+=180;
        GSLAM::SO3 camera2IMU(-0.5,0.5,-0.5,0.5);
        GSLAM::SO3 imu2world;
        imu2world.FromEulerAngle(-pitch,90.-yaw,roll);
        return imu2world*camera2IMU;
    }
    void append(GSLAM::FramePtr frame){
        if(firstFrame)
        {
            firstFrame=false;
            GSLAM::Svar var;
            GSLAM::Camera cam=frame->getCamera();
            GSLAM::Point3d pt;
            frame->getGPSLLA(pt);

            plane2ecef.get_translation()=GSLAM::GPS<>::GPS2XYZ(pt.y,pt.x,0);
            double D2R=3.1415925/180.;
            double lon=pt.x*D2R;
            double lat=pt.y*D2R;
            GSLAM::Point3d up(cos(lon)*cos(lat), sin(lon)*cos(lat), sin(lat));
            GSLAM::Point3d east(-sin(lon), cos(lon), 0);
            GSLAM::Point3d north=up.cross(east);
            double R[9]={east.x, north.x, up.x,
                         east.y, north.y, up.y,
                         east.z, north.z, up.z};
            plane2ecef.get_rotation().fromMatrix(R);

            var.insert("Plane","0 0 0 0 0 0 1");
            var.insert("GPS.Origin",std::to_string(pt.x)+" "+std::to_string(pt.y)+" 0");

            var.insert("Camera.CameraType","PinHole");
            var.insert("Camera.Paraments",VecParament<double>(cam.getParameters()).toString());
            var.save2file(datasetFolder+"/config.cfg");
        }

        GSLAM::Point3d t;
        if(!frame->getGPSECEF(t)) return ;

        GSLAM::SE3 se3;
        se3.get_translation()=plane2ecef.inverse()*t;
        if(!frame->getPitchYawRoll(t)) return;
        se3.get_rotation()=PYR2Rotation(t.x,t.y,t.z);

        std::string time=std::to_string(frame->timestamp());

        traj<<time<<" "<<se3<<std::endl;

        GSLAM::GImage img=frame->getImage();
        QImage qimage(img.data,img.cols,img.rows,QImage::Format_ARGB32);
        qimage.save(QString::fromStdString(datasetFolder)+"/rgb/"+QString(time.c_str())+".jpg");
    }

    bool          firstFrame;
    GSLAM::SE3    plane2ecef;
    std::string   datasetFolder;
    std::ofstream traj;
};
Decoder                 *decoder = NULL;

class DatasetNetwork: public RTMapperDataset
{
public:
    // for image queue use
    struct ImageBufItem
    {
        RTMapperNetHeader   netHeader;
        int                 frameID;
        int                 imageType;

        char                camName[64];
        double              camParam[16];
        int                 camParamN;
    };

    // RTMV frame
    struct RTMV_Frame
    {
    public:
        RTMapperNetHeader   header;
        QByteArray          buf;

    public:
        RTMV_Frame() {
            memset(&header, 0, sizeof(RTMapperNetHeader));
        }

        ~RTMV_Frame() {
            release();
        }

        void malloc(void) {
            if( buf.size() ) {
                buf.clear();
            }

            if( header.isValid() )
                buf.resize(header.payload_size);
        }

        void release(void) {
            if( buf.size() ) {
                buf.clear();
            }
        }
    };

public:
    DatasetNetwork(sv::Svar config):_frameId(0),_lastGrabId(0), _beginCap(0),_stopSignal(false)
                    ,_startNum(0),_autoStart(false),_autoStop(false)
                    ,_frameStatus("0"),_status("0"),_shouldStop(false)
                    ,_picNum(0)
//                    ,_socket(SPtr<QTcpSocket>(new QTcpSocket()))
    {
        if( decoder == NULL ) decoder = new Decoder();
        _subStop = GSLAM::Messenger::instance().subscribe("sbt.input.stop",0,&DatasetNetwork::getStopSignal,this);
        _subSignalStart = GSLAM::Messenger::instance().subscribe("signal.start",0,&DatasetNetwork::getStartSignal,this);

        options=config;
        open(config);
    }

    ~DatasetNetwork() {
        _beginCap = false;
        _shouldStop = true;

        while( !_readthread.joinable() || !_decodethread.joinable() ) GSLAM::Rate::sleep(0.01);
        _readthread.join();
        _decodethread.join();
    }

    virtual std::string type() const{return "DatasetNetwork";}

    virtual void  call(const std::string& command,void* arg=NULL) {
        if( "Stop" == command ) {
            _shouldStop = true;
            _beginCap = 0;
        }
    }

    virtual bool        isOpened(){
        return true;
    }

    virtual bool        open(sv::Svar options)
    {
        ip   = stdstring2qstring(options.GetString("ip", "127.0.0.1"));
        port = options.GetInt("port", 1212);

        QDir dir;
        QString tempDir = dir.tempPath()+"/Sibitu/"+QString("%1").arg(uint64_t(GSLAM::TicToc::timestamp()*1e15));
        LOG(INFO) << "FIFO cache dir is :"<<tempDir.toStdString();
        dir.setPath(tempDir);
        if( !dir.exists() ) dir.mkpath(tempDir);

        _imageQueue = SPtr<OfflineFifo>(new OfflineFifo(qstring2stdstring(tempDir) + "/vb"));

        _beginCap = 0;
        _videoSkip = options.GetInt("skip", 3);

        // determine ip's extname
        QFileInfo fninfo(ip);
        QString extname = fninfo.suffix().toLower();

        // start read/decode threads
        _shouldStop = false;
        _decodethread = std::thread(&DatasetNetwork::thread_decode, this);

        if( extname == "rtmv" )
            _readthread = std::thread(&DatasetNetwork::thread_read_file, this);
        else
            _readthread = std::thread(&DatasetNetwork::thread_read_tcp, this);

        getStartSignal(true);
        return true;
    }

    void thread_read_file()
    {
        using namespace pi::utils;
        typedef Datastream_FIFO<unsigned char> FIFO_u8;

        const int RTMV_PACK_MAXSIZE = 1024*1024*16;

        unsigned char *buf;
        int buf_len, nread, nfind, packIdx = 0;

        buf_len = 2160; // 128+2032
        buf = new unsigned char[RTMV_PACK_MAXSIZE];

        FIFO_u8 fifo(RTMV_PACK_MAXSIZE);
        RTMV_Frame *rtmvFrame = NULL;

        double startTimestamp = -1;
        int beginTS = 0;

        // open file
        QByteArray baStr = ip.toLocal8Bit();
        FILE *fp = fopen(baStr.data(), "rb");
        if( fp == NULL )
        {
            fprintf(stderr, "ERR: Can not open file %s\n", baStr.data());
            goto THREAD_READ_FILE_RET;
        }

        while( !feof(fp) && !_shouldStop )
        {
            // read some data & push to fifo
            nread = fread(buf, 1, sizeof(RTMapperNetHeader), fp);
            if( nread <= 0 ) break;
            fifo.write(buf, nread);

            // find package tag
            nfind = fifo.find_tag("PaVE");
            if( nfind == -1 ) nfind = fifo.find_tag("EVaP");
            if( nfind == -1 ) continue;

            // skip unused data
            if( nfind > 0 ) fifo.read(buf, nfind);

            // get header
            if( fifo.used() < sizeof(RTMapperNetHeader) )
                continue;

            if( rtmvFrame ) delete rtmvFrame;
            rtmvFrame = new RTMV_Frame;
            RTMapperNetHeader *pHeader = &(rtmvFrame->header);

            fifo.read((unsigned char*) pHeader, sizeof(RTMapperNetHeader));
            pHeader->convert();
            if( !pHeader->isValid() )
            {
                fprintf(stderr, "Header not PaVE.\n");
                continue;
            }

            if( svar.GetInt("Debug.DatasetNetwork", 0) )
            {
                printf("dataframe[%8d]: timestamp = %f, payload_size=%6d\n",
                       packIdx++, pHeader->timestamp, pHeader->payload_size);
                fflush(stdout);
            }

            // read new data to FIFO to fill payload
            if( !pHeader->isValid() || pHeader->payload_size <= 0 )
                continue;

            if( pHeader->payload_size > RTMV_PACK_MAXSIZE ) pHeader->payload_size = RTMV_PACK_MAXSIZE;
            rtmvFrame->malloc();

            while( fifo.used() < pHeader->payload_size )
            {
                int n = pHeader->payload_size - fifo.used();
                nread = fread(buf, 1, n, fp);
                if( nread <= 0 ) break;

                fifo.write(buf, nread);
            }
            if( fifo.used() < pHeader->payload_size )
                break;

            // read payload & send it
            int bufn = pHeader->payload_size;
            fifo.read((unsigned char*) rtmvFrame->buf.data(), bufn);

            // read flight status(FIXME: temporarily use 'pHeader->battery' to indicate flight status)
//            int flightStatus = pHeader->battery;
//            if(flightStatus == 100)

            // sync to stream's timestamp
            if( !beginTS )
            {
                startTimestamp = GSLAM::TicToc::timestamp() - pHeader->timestamp;
                beginTS = 1;
            }
            else
            {
                double ts = startTimestamp + pHeader->timestamp - GSLAM::TicToc::timestamp();
                GSLAM::Rate::sleep(ts);
            }

            // push to message queue
            {
                GSLAM::WriteMutex lock(_rtmvQueueMutex);

                _rtmvQueue.push_back(rtmvFrame);
                rtmvFrame = NULL;
            }
        }

    THREAD_READ_FILE_RET:
        if( rtmvFrame ) delete rtmvFrame;
        if( fp ) fclose(fp);
        if( buf ) delete [] buf;
    }

    void thread_read_tcp()
    {
//        LOG(INFO) << "thread_read_tcp ";
        using namespace pi::utils;
        typedef Datastream_FIFO<unsigned char> FIFO_u8;

        const int RTMV_PACK_MAXSIZE = 1024*1024*16;

        unsigned char *buf;
        int buf_len, nread, nfind, packIdx = 0;

        buf_len = 2160; // 128+2032
        buf = new unsigned char[buf_len];

        SPtr<FIFO_u8> fifo = SPtr<FIFO_u8>(new FIFO_u8(RTMV_PACK_MAXSIZE));
        SPtr<QTcpSocket> _socket = SPtr<QTcpSocket>(new QTcpSocket());
        RTMV_Frame *rtmvFrame = NULL;

//        LOG(INFO) << "_shouldStop: " << _shouldStop;
//        LOG(INFO) << "socketType: " << _socket->socketType();
//        LOG(INFO) << "SocketError" << _socket->error();
        while( !_shouldStop )
        {
            // try connect to remote side
            if( _socket->state() != QTcpSocket::SocketState::ConnectedState )
            {
                _socket->connectToHost(ip, port, QTcpSocket::ReadWrite);

                if( _socket->waitForConnected(3000) )
                {
                    _socket->write("DatasetNetwork\n");
                    _socket->flush();

                    LOG(INFO) << "connect to host: " << ip.toStdString() << ":" << port;
                }

                GSLAM::Rate::sleep(0.1);
                continue;
            }

            // read data & insert to FIFO
            if( _socket->bytesAvailable() < buf_len )
                if( !_socket->waitForReadyRead(10) ) continue;
            nread = _socket->read((char*) buf, (qint64) buf_len);

            if( nread <= 0 ) continue;

            fifo->write(buf, nread);

            // find package tag
            nfind = fifo->find_tag("PaVE");
            if( nfind == -1 ) nfind = fifo->find_tag("EVaP");
            if( nfind == -1 ) continue;

            // skip unused data
            if( nfind > 0 ) fifo->read(buf, nfind);
            // get header
            if( fifo->used() < sizeof(RTMapperNetHeader) )
                continue;

            if( rtmvFrame ) delete rtmvFrame;
            rtmvFrame = new RTMV_Frame;
            RTMapperNetHeader *pHeader = &(rtmvFrame->header);

            fifo->read((unsigned char*) pHeader, sizeof(RTMapperNetHeader));
            pHeader->convert();

            if( svar.GetInt("Debug.DatasetNetwork", 0) )
            {
                printf("dataframe[%8d]: timestamp = %f, payload_size=%6d, bufsize=%6d\n",
                       packIdx++, pHeader->timestamp, pHeader->payload_size, _rtmvQueue.size());
                fflush(stdout);
            }

            // read new data to FIFO to fill payload
//            LOG(INFO) << "pHeader payload size: " << pHeader->payload_size;
            if( !pHeader->isValid() || pHeader->payload_size <= 0 )
                continue;

            if( pHeader->payload_size > RTMV_PACK_MAXSIZE ) pHeader->payload_size = RTMV_PACK_MAXSIZE;
            rtmvFrame->malloc();

            while( fifo->used() < pHeader->payload_size )
            {
                int n = buf_len;
                nread = pHeader->payload_size - fifo->used();
                if( nread < buf_len ) n = nread;

                if( _socket->bytesAvailable() < n )
                    if( !_socket->waitForReadyRead(10) ) continue;
                nread = _socket->read((char*) buf, (qint64) n);
                if( nread <= 0 ) continue;

                fifo->write(buf, nread);
            }

            // read payload & push frame to queue
            {
                int bufn = pHeader->payload_size;
                int n;
                unsigned char *buf = (unsigned char*) rtmvFrame->buf.data();

                nread = 0;
                while( bufn > 0 )
                {
                    n = bufn;
                    if( n > buf_len ) n = buf_len;
                    fifo->read(buf+nread, n);

                    bufn  -= n;
                    nread += n;
                }

                {
                    GSLAM::WriteMutex lock(_rtmvQueueMutex);
//                    LOG(INFO) << "rtmvFrame.buf: " << rtmvFrame->buf.size();
                    _rtmvQueue.push_back(rtmvFrame);
                    rtmvFrame = NULL;
                }
            }
        }

        if( rtmvFrame ) delete rtmvFrame;
        if( buf ) delete [] buf;
    }

    void thread_decode()
    {
        RTMV_Frame *rtf;

        FILE *fpDebug = NULL;

        if( svar.GetInt("Debug.DatasetNetwork", 0) )
        {
            fpDebug = fopen("ffmpeg_stream", "wb");
        }

//        LOG(INFO) << "shouldStop: " << _shouldStop;
        while( !_shouldStop )
        {
            // wait for image queue size less then 10
//            LOG(INFO) << "image queue size: " << _imageQueue->size();
            if( 0 && _imageQueue->size() > 10 )
            {
                GSLAM::Rate::sleep(0.01);
                continue;
            }

            // get a RTMV frame
//            LOG(INFO) << "rtmv queue size: " << _rtmvQueue.size();
            if( _rtmvQueue.size() < 1 )
            {
                GSLAM::Rate::sleep(0.01);
                continue;
            }
            else
            {
                GSLAM::WriteMutex lock(_rtmvQueueMutex);

                if( _rtmvQueue.size() < 1 ) continue;

                rtf = _rtmvQueue.front();
                _rtmvQueue.pop_front();
            }

            if( svar.GetInt("Debug.DatasetNetwork", 0) )
            {
                if( fpDebug )
                    fwrite(rtf->buf.data(), 1, rtf->buf.size(), fpDebug);
            }

//            LOG(INFO) << "rtf->buf.size: " << rtf->buf.size();
            // decode image
            header = rtf->header;

            SPtr<RTMapperFrame> fr = decode(header, rtf->buf);

            delete rtf;
            rtf = NULL;

            if( !fr ) continue;

//            LOG(INFO) << "publish curframe successful!";
            GSLAM::Messenger::instance().publish("UI.output.curframe",FramePtr(fr));

            if(_frameId == 1)
            {
                GSLAM::Point3d lla;
                fr->getGPSLLA(lla);
                static GSLAM::Publisher pub = GSLAM::Messenger::instance().advertise<GSLAM::Point3d>("dataset.firstFrameLLA", 0);
                pub.publish(lla);
            }

            // push header & image to image queue
            if( _beginCap && (_frameId % _videoSkip == 0) )
            {
                ImageBufItem msgHeader;
                int buf_len;
                uint8_t *buf = NULL;

                GSLAM::GImage img = fr->_image;
                int imgBufSize = img.getWidth()*img.getHeight()*img.elemSize();

                memset(&msgHeader, 0, sizeof(ImageBufItem));
                msgHeader.netHeader = header;
                msgHeader.frameID = _frameId;
                msgHeader.imageType = img.type();

                strcpy(msgHeader.camName, fr->_cameraName.c_str());
                VecParament<double> camP = fr->_camera.getParameters();
                msgHeader.camParamN = camP.size();
                for(int i=0; i<camP.size(); i++) msgHeader.camParam[i] = camP[i];

                buf_len = sizeof(msgHeader) + imgBufSize;
                buf = (uint8_t*) malloc(buf_len);

                memcpy(buf, &msgHeader, sizeof(msgHeader));
                memcpy(buf+sizeof(msgHeader), img.data, imgBufSize);

                {
                    GSLAM::WriteMutex lock(_mutexFrame);
                    _imageQueue->push(buf, buf_len);
                }
            }
        }

        if( svar.GetInt("Debug.DatasetNetwork", 0) )
        {
            if( fpDebug )
                fclose(fpDebug);
        }
    }


    bool loadCamera(RTMapperNetHeader& header)
    {
        QString cn = QString("%1_%2_%3").arg(header.uavname).arg(_imgWidth).arg(_imgHeight);
        cn.replace(' ', '_');
        _cameraName = cn.toStdString();

        if(_cameraMap.find(_cameraName)!=_cameraMap.end()){
            _camera=_cameraMap[_cameraName];
            return true;
        }

        // try load from *.cam
        {
            std::string camFolder = options.GetString("calib","calib");
            GSLAM::Svar camVar;
            camVar.ParseFile(camFolder+"/"+_cameraName+".cam");
            VecParament<double> vecPara;
            vecPara=camVar.get_var(_cameraName+".Paraments",vecPara);

            if(vecPara.size()==11||vecPara.size()==6){
                _camera=GSLAM::Camera(vecPara.data);
                _cameraMap[_cameraName]=_camera;

                LOG(INFO) <<"Loaded calibrated camera: "<< _cameraName;
                return true;
            }
        }

        {
            VecParament<double> camP(6, 0);
            camP[0] = _imgWidth;
            camP[1] = _imgHeight;
            camP[2] = _imgWidth * 0.6;
            camP[3] = camP[2];
            camP[4] = camP[0] * 0.5;
            camP[5] = camP[1] * 0.5;

            _camera = GSLAM::Camera(camP.data);
            _cameraMap[_cameraName]=_camera;
            LOG(INFO)<<"WARNING: No calibration detected, use default "<<_cameraName;
        }

        LOG(INFO)<<"WARNING: No calibration detected, use default "<<_cameraName;
        return true;
    }

    GSLAM::GImage imageZoom(GSLAM::GImage& imgIn, double zoomScale)
    {
        int nc = imgIn.channels();
        int w = imgIn.cols;
        int h = imgIn.rows;

        int x = int(w*(1.0 - zoomScale)/2.0);
        int y = int(h*(1.0 - zoomScale)/2.0);
        int nw = int(w*zoomScale);
        int nh = int(h*zoomScale);

        QRect rect(x, y, nw, nh);
        QImage img1;

        if ( nc == 4 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_RGB32);
            img1 = qimg;
        }
        else if ( nc == 3 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_RGB888);
            img1 = qimg;
        }
        else if ( nc == 1 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_Indexed8);
            img1 = qimg;
        }
        else
        {
            return GSLAM::GImage();
        }

        QImage imgs = img1.copy(rect);
        QImage img2 = imgs.scaled(w, h);
        GSLAM::GImage image = qtg(img2);

        return image;
    }

    int imageSave(GSLAM::GImage& imgIn, const std::string& fnImg)
    {
        int nc = imgIn.channels();
        int w = imgIn.cols;
        int h = imgIn.rows;

        QImage img1;

        if ( nc == 4 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_RGB32);
            img1 = qimg;
        }
        else if ( nc == 3 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_RGB888);
            img1 = qimg;
        }
        else if ( nc == 1 )
        {
            QImage qimg((uchar*) imgIn.data, imgIn.cols, imgIn.rows, QImage::Format_Indexed8);
            img1 = qimg;
        }
        else
        {
            return -1;
        }

        QString fnPath = stdstring2qstring(fnImg);
        QFileInfo fInfo(fnPath);
        QDir imgDir = fInfo.absoluteDir();
        if( !imgDir.exists() ) imgDir.mkdir(imgDir.absolutePath());

        img1.save(fnPath);
        return 0;
    }


    SPtr<RTMapperFrame> decode(RTMapperNetHeader& header, QByteArray& imageBuffer)
    {
        GSLAM::GImage image;
        if(header.video_codec==VIDEOCODEC_JPEG)
        {
            _videoSkip = 1;
            QImage qimage;
//            qimage.loadFromData(imageBuffer,QString("JPG").toLocal8Bit().data());
            qimage.loadFromData(imageBuffer,"JPG");

//            FILE *fpDebug = NULL;
//            if ( (fpDebug=fopen("D:/testImage.jpg", "wb"))==NULL)
//                std::cout<<"Open File failed!"<<endl;
//            fwrite(imageBuffer.data(), imageBuffer.size(), 1, fpDebug);
//            fclose(fpDebug);

            image = qtg(qimage);
        }
        else if(header.video_codec==VIDEOCODEC_H264)
        {
            if(!decoder->decode(header,imageBuffer,image))
                return SPtr<RTMapperFrame>(NULL);
        }
        else if(header.video_codec == VIDEOCODEC_H264_DJINEW )
        {
            if(!decoder->decode2(header,imageBuffer,image))
                return SPtr<RTMapperFrame>(NULL);
        }
        else if(header.video_codec==VIDEOCODEC_NONE)
        {
            QImage qimage=QImage((uchar*)imageBuffer.data(),header.display_width,header.display_height,
                         QImage::Format_RGB888);
            image=qtg(qimage);
        }

        if(image.empty()) return SPtr<RTMapperFrame>(NULL);

//        LOG(INFO) << "header.display_width: " << header.display_width << ", image.cols: " << image.cols << "x" << image.rows;

        header.display_width  = image.cols;
        header.display_height = image.rows;
        _imgWidth  = image.cols;
        _imgHeight = image.rows;

        loadCamera(header);

        QString droneStatus = QString("%1").arg(header.battery);
//        LOG(INFO) << "droneStatus: " << droneStatus.toStdString().c_str();
        bool autoStart = false;
        if(droneStatus != _status)
        {
            if(droneStatus == "100")
            {
                autoStart = true;
            }
            else if(droneStatus == "0")
            {
                autoStart = false;
                _startNum = 0;
            }
            _status = droneStatus;
        }

        if(autoStart && _startNum == 0)
        {
            GSLAM::Messenger::instance().publish("UI.dataset.start",true);
            _stopSignal = false;
            _startNum++;
        }

        SPtr<RTMapperFrame> fr(new RTMapperFrame(++_frameId, header.timestamp));

        // NOTE: just for demonstration, it can be remove later!
        if( 1 )
        {
            double zoomImage = svar.GetDouble("Dataset.zoomImage", 1.0);
            if( fabs(zoomImage-1.0) < 1e-6 )
            {
                fr->_camera = _camera;
                fr->_image = image;
            }
            else
            {
                if( !svar.exist("Dataset.zoomImage.CameraScaled") )
                {
                    VecParament<double> camP = _camera.getParameters();

                    if( camP.size() > 4 )
                    {
                        // apply scal to [fx, fy]
                        camP[2] *= 1.0/zoomImage;
                        camP[3] *= 1.0/zoomImage;
                    }
                    _camera = GSLAM::Camera(camP.data);

                    svar.insert("Dataset.zoomImage.CameraScaled", "1");
                }

                fr->_image = imageZoom(image, zoomImage);
                fr->_camera = _camera;
            }
        }

        // NOTE: just for test, it can be remove later!
        //       save image to file
        if( 1 )
        {
            std::string svf = options.GetString("save_images", "");
            if( svf.size() > 0 && (_frameId % _videoSkip == 0) )
            {
                char fnBuf[1024];
                sprintf(fnBuf, "%s/%08d.jpg", svf.c_str(), fr->id());
                imageSave(image, fnBuf);
            }
        }

        fr->_cameraName = _cameraName;

        if(header.cam_pitch>-30) header.cam_pitch=-90;
        fr->_gpshpyr={header.lng,header.lat,header.alt,
                      fabs(header.lng)<=180?5:1e6,fabs(header.lng)<=180?5:1e6,fabs(header.lng)<=180?10:1e6,
                      header.H,10,
                      header.cam_pitch,header.cam_yaw,header.cam_roll,10,10,10};

        if(jvar.get("is_rad",false)){
            fr->_gpshpyr[0]*=180/3.1415926;
            fr->_gpshpyr[1]*=180/3.1415926;
        }
//        GSLAM::Messenger::instance().publish("sbt.output.curframe",FramePtr(fr));
        return fr;
    }

    GSLAM::GImage qtg(QImage qimage)
    {
        if(qimage.format()==QImage::Format_RGB32)
        {
            return GSLAM::GImage(qimage.height(),qimage.width(),
                       GSLAM::GImageType<uchar,4>::Type,qimage.bits(),true);
        }
        else if(qimage.format()==QImage::Format_RGB888){
            return GSLAM::GImage(qimage.height(),qimage.width(),
                       GSLAM::GImageType<uchar,3>::Type,qimage.bits(),true);
        }
        else if(qimage.format()==QImage::Format_Indexed8)
        {
            return GSLAM::GImage(qimage.height(),qimage.width(),
                       GSLAM::GImageType<uchar,1>::Type,qimage.bits(),true);
        }
        return GSLAM::GImage();
    }

    FramePtr grabLatestFrame()
    {
        int ntry = 500;

        _beginCap = 1;

        GSLAM::FramePtr finalframe;

        while( !_stopSignal )
        {
            if ( _imageQueue->size() > 0 )
            {
                uint8_t *buf = NULL;
                uint64_t buf_len;

                // get a image item
                {
                    GSLAM::WriteMutex lock(_mutexFrame);

                    if( 0 != _imageQueue->pop(&buf, &buf_len) ) continue;

                    if( buf == NULL ) continue;
                }

                // restore RTMapperNetHeader, image buffer
                ImageBufItem *pHeader = (ImageBufItem*) buf;
                RTMapperNetHeader netHeader = pHeader->netHeader;

                SPtr<RTMapperFrame> fr(new RTMapperFrame(pHeader->frameID, netHeader.timestamp));

                // restore camera settings
                VecParament<double> camParm(pHeader->camParamN, 0.0);
                for(int i=0; i<pHeader->camParamN; i++) camParm[i] = pHeader->camParam[i];
                fr->_camera = GSLAM::Camera(camParm.data);
                fr->_cameraName = pHeader->camName;

                // restore image
                fr->_image = GSLAM::GImage(fr->_camera.height(), fr->_camera.width(),
                                           pHeader->imageType,
                                           buf+sizeof(ImageBufItem), true);

                if(netHeader.cam_pitch>-30) netHeader.cam_pitch=-90;
                fr->_gpshpyr={netHeader.lng,netHeader.lat,netHeader.alt,
                              fabs(netHeader.lng)<=180?5:1e6,fabs(netHeader.lng)<=180?5:1e6,fabs(netHeader.lng)<=180?10:1e6,
                              netHeader.H,10,
                              netHeader.cam_pitch,netHeader.cam_yaw,netHeader.cam_roll,
                              10,10,10};

                if(jvar.get("is_rad",false)){
                    fr->_gpshpyr[0]*=180/3.1415926;
                    fr->_gpshpyr[1]*=180/3.1415926;
                }
                free(buf);

                QString droneStatus = QString("%1").arg(netHeader.battery);
                if(droneStatus != _frameStatus)
                {
                    if(droneStatus == "100")
                    {
                        _autoStart = true;
                        _autoStop = false;
                    }
                    else if(droneStatus == "0")
                    {
                        _autoStart = false;
                        _autoStop = true;
                    }

                    _frameStatus = droneStatus;
                }

                std::string manualContral = svar.GetString("mainWindow.manualContral","1");
                if(manualContral == "0")
                {
                    if(_autoStart)
                        finalframe=fr;
                    if(_autoStop)
                        GSLAM::Messenger::instance().publish("UI.dataset.stop",true);
                }
                else
                    finalframe=fr;
                continue;
            }

            if(finalframe)
                return finalframe;
            GSLAM::Rate::sleep(0.01);
        }

        return FramePtr();
    }

    virtual FramePtr grabFrame()
    {
        int ntry = 500;

        _beginCap = 1;

        while( !_stopSignal )
        {
            if ( _imageQueue->size() > 0 )
            {
                uint8_t *buf = NULL;
                uint64_t buf_len;

                // get a image item
                {
                    GSLAM::WriteMutex lock(_mutexFrame);

                    if( 0 != _imageQueue->pop(&buf, &buf_len) ) continue;

                    if( buf == NULL ) continue;
                }

                // restore RTMapperNetHeader, image buffer
                ImageBufItem *pHeader = (ImageBufItem*) buf;
                RTMapperNetHeader netHeader = pHeader->netHeader;

                SPtr<RTMapperFrame> fr(new RTMapperFrame(pHeader->frameID, netHeader.timestamp));

                // restore camera settings
                VecParament<double> camParm(pHeader->camParamN, 0.0);
                for(int i=0; i<pHeader->camParamN; i++) camParm[i] = pHeader->camParam[i];
                fr->_camera = GSLAM::Camera(camParm.data);
                fr->_cameraName = pHeader->camName;

                // restore image
                fr->_image = GSLAM::GImage(fr->_camera.height(), fr->_camera.width(),
                                           pHeader->imageType,
                                           buf+sizeof(ImageBufItem), true);

                if(netHeader.cam_pitch>-30) netHeader.cam_pitch=-90;
                fr->_gpshpyr={netHeader.lng,netHeader.lat,netHeader.alt,
                              fabs(netHeader.lng)<=180?5:1e6,fabs(netHeader.lng)<=180?5:1e6,fabs(netHeader.lng)<=180?10:1e6,
                              netHeader.H,10,
                              netHeader.cam_pitch,netHeader.cam_yaw,netHeader.cam_roll,
                              10,10,10};

                if(jvar.get("is_rad",false)){
                    fr->_gpshpyr[0]*=180/3.1415926;
                    fr->_gpshpyr[1]*=180/3.1415926;
                }
                free(buf);

                QString droneStatus = QString("%1").arg(netHeader.battery);
                if(droneStatus != _frameStatus)
                {
                    if(droneStatus == "100")
                    {
                        _autoStart = true;
                        _autoStop = false;
                    }
                    else if(droneStatus == "0")
                    {
                        _autoStart = false;
                        _autoStop = true;
                    }

                    _frameStatus = droneStatus;
                }

                std::string manualContral = svar.GetString("mainWindow.manualContral","1");
                if(manualContral == "0")
                {
                    if(_autoStart)
                        return fr;
                    if(_autoStop)
                        GSLAM::Messenger::instance().publish("UI.dataset.stop",true);
                }
                else
                    return fr;

            }

            GSLAM::Rate::sleep(0.01);
        }

        return FramePtr();
    }

    void getStopSignal( const bool& sign)
    {
        _stopSignal = sign;
        if(sign)
        {
            _autoStart = false;
            _autoStop = false;
            _frameStatus = "0";
            _status = "0";
        }
    }

    void getStartSignal(const bool& sign)
    {
        _startSignal = sign;
    }

    virtual bool  isOnline() const { return true; }

    virtual bool  setNewFrameCallback(GSLAM::GObjectHandle* handle) {
        _handle = handle;
        return true;
    }


protected:
//    SPtr<QTcpSocket> _socket;

    GSLAM::GObjectHandle*   _handle = NULL;
    std::thread             _readthread, _decodethread;


    GSLAM::Camera           _camera;
    std::string             _cameraName;

    std::map<std::string, GSLAM::Camera> _cameraMap;
    std::map<std::string, std::string>   _camNameMap;

    QString                 ip,_status,_frameStatus;
    quint16                 port;

    sv::Svar                options;
    RTMapperNetHeader       header;

    int                     _imgWidth, _imgHeight;

    int                     _beginCap;
    int                     _videoSkip;

    int                     _frameId,_lastGrabId,_startNum,_picNum;

    SPtr<OfflineFifo>       _imageQueue;
    GSLAM::MutexRW          _mutexFrame;

    std::deque<RTMV_Frame*> _rtmvQueue;
    GSLAM::MutexRW          _rtmvQueueMutex;

    GSLAM::Subscriber       _subStop,_subSignalStart;
    bool                    _stopSignal,_startSignal,_autoStop,_autoStart,_shouldStop;
};

using GSLAM::DatasetFactory;

void set_gps_rad(bool is_rad){
   jvar["is_rad"]=is_rad;
}

REGISTER_SVAR_MODULE(rtmv){
    jvar["set_gps_rad"] = set_gps_rad;
    sv::Class<DatasetNetwork>("DatasetNetwork")
            .unique_construct<sv::Svar>()
            .def("grabFrame",&DatasetNetwork::grabFrame)
            .def("grabLatestFrame",&DatasetNetwork::grabLatestFrame)
            ;

    using namespace GSLAM;
    using namespace std;

    sv::Class<GImage>("GImage")
                .construct<>()
                .construct<GImage>()
                .construct<int,int,int,uchar*,bool>()
                .def("empty",&GImage::empty)
                .def_readonly("data",&GImage::data)
                .def("elemSize",&GImage::elemSize)
                .def("elemSize1",&GImage::elemSize1)
                .def("channels",&GImage::channels)
                .def("type",&GImage::type)
                .def("total",&GImage::total)
                .def("clone",&GImage::clone)
                .def("row",&GImage::row)
                .def("width",&GImage::getWidth)
                .def("height",&GImage::getHeight)
                .def("__repr__",[](const GImage& img){
                    return to_string(img.cols)+"x"
                            +to_string(img.rows)+"x"+to_string(img.channels());})
                .def("__init__",[](sv::SvarBuffer buffer){
            if(buffer._holder.is<GImage>())
                return buffer._holder.as<GImage>();
            std::string format=buffer._format;
            std::vector<ssize_t> shape=buffer.shape;
            int channels=1;
            if(shape.size()==2)
                channels=1;
            else if(shape.size()==3)
                channels=shape[2];
            else
                LOG(FATAL)<<"Shape should be 2 or 3 demisson";

            int rows=shape[0];
            int cols=shape[1];

            static int lut[256]={0};
            lut['b']=0;lut['B']=1;
            lut['h']=2;lut['H']=3;
            lut['i']=4;lut['I']=5;
            lut['f']=6;lut['d']=7;
            int type=lut[format.front()]+((channels-1)<<3);
            return GImage(rows,cols,type,(uchar*)buffer.ptr(),true);
        })
        .def("__buffer__",[](GImage& self){
            std::string formats[]={"b","B","h","H","i","I","f","d"};
            std::string format   =formats[(self.type()&0x7)];
            return sv::SvarBuffer(self.data,self.elemSize1(),format,{self.rows,self.cols,self.channels()},{},self.holder);
        })
                ;

    sv::Class<MapFrame>("MapFrame")
            .def("id",[](MapFrame& self){return (int)self.id();})
                .def("timestamp",&MapFrame::timestamp)
                .def("setPose",(void (MapFrame::*)(const SE3&))&MapFrame::setPose)
                .def("setPoseSim3",(void (MapFrame::*)(const SIM3&))&MapFrame::setPose)
                .def("getPose",(SE3(MapFrame::*)()const)&MapFrame::getPose)
                .def("getPoseScale",&MapFrame::getPoseScale)
                .def("cameraNum",&MapFrame::cameraNum)
                .def("getCameraPose",&MapFrame::getCameraPose)
                .def("imageChannels",&MapFrame::imageChannels)
                .def("getCamera",&MapFrame::getCamera)
                .def("getImage",&MapFrame::getImage)
                .def("setImage",&MapFrame::setImage)
                .def("setCamera",&MapFrame::setCamera)
                .def("getIMUNum",&MapFrame::getIMUNum)
                .def("getIMUPose",&MapFrame::getIMUPose)
                .def("getAcceleration",&MapFrame::getAcceleration)
                .def("getAngularVelocity",&MapFrame::getAngularVelocity)
                .def("getMagnetic",&MapFrame::getMagnetic)
                .def("getAccelerationNoise",&MapFrame::getAccelerationNoise)
                .def("getAngularVNoise",&MapFrame::getAngularVNoise)
                .def("getPitchYawRoll",[](MapFrame& self)->sv::Svar{
        Point3d pyr;
        self.getPitchYawRoll(pyr);
        return {{"pitch",pyr.x},{"yaw",pyr.y},{"roll",pyr.z}};
    })
                .def("getPYRSigma",&MapFrame::getPYRSigma)
                .def("getGPSNum",&MapFrame::getGPSNum)
                .def("getGPSPose",&MapFrame::getGPSPose)
                .def("getGPSLLA",[](MapFrame& self)->sv::Svar{
        Point3d lla;
        self.getGPSLLA(lla);
        return {{"lng",lla.x},{"lat",lla.y},{"alt",lla.z}};
    })
                .def("getGPSLLASigma",&MapFrame::getGPSLLASigma)
                .def("getGPSECEF",&MapFrame::getGPSECEF)
                .def("getHeight2Ground",[](MapFrame& self)->sv::Svar{
        Point2d height;
        self.getHeight2Ground(height);
        return {{"height",height.x},{"sigma",height.y}};
    })
                .def("keyPointNum",&MapFrame::keyPointNum)
                .def("setKeyPoints",&MapFrame::setKeyPoints)
                .def("getKeyPoints",(std::vector<KeyPoint>(MapFrame::*)() const)&MapFrame::getKeyPoints)
                .def("getKeyPointColor",&MapFrame::getKeyPointColor)
                .def("getKeyPointIDepthInfo",&MapFrame::getKeyPointIDepthInfo)
                .def("getKeyPointObserve",&MapFrame::getKeyPointObserve)
                .def("getDescriptor",&MapFrame::getDescriptor)
                .def("getBoWVector",(BowVector (MapFrame::*)()const)&MapFrame::getBoWVector)
                .def("getFeatureVector",(FeatureVector (MapFrame::*)()const)&MapFrame::getFeatureVector)
                .def("getFeaturesInArea",&MapFrame::getFeaturesInArea)
                .def("observationNum",&MapFrame::observationNum)
                .def("getObservations",(std::map<GSLAM::PointID,size_t>(MapFrame::*)()const)&MapFrame::getObservations)
                .def("addObservation",&MapFrame::addObservation)
                .def("eraseObservation",&MapFrame::eraseObservation)
                .def("clearObservations",&MapFrame::clearObservations)
                .def("getParent",&MapFrame::getParent)
                .def("getChild",&MapFrame::getChild)
                .def("getParents",(FrameConnectionMap (MapFrame::*)()const)&MapFrame::getParents)
                .def("getChildren",(FrameConnectionMap (MapFrame::*)()const)&MapFrame::getChildren)
                .def("addParent",&MapFrame::addParent)
                .def("addChildren",&MapFrame::addChildren)
                .def("eraseParent",&MapFrame::eraseParent)
                .def("eraseChild",&MapFrame::eraseChild)
                .def("clearParents",&MapFrame::clearParents)
                .def("clearChildren",&MapFrame::clearChildren)
                .def("getMedianDepth",&MapFrame::getMedianDepth)
                .def("channelTypeString",&MapFrame::channelTypeString)
                .def("channelString",&MapFrame::channelString)
                ;

    sv::Class<Messenger>("Messenger")
                .construct<>()
                .def_static("instance",&Messenger::instance)
                .def("getPublishers",&Messenger::getPublishers)
                .def("getSubscribers",&Messenger::getSubscribers)
                .def("introduction",&Messenger::introduction)
                .def("advertise",[](Messenger msg,const std::string& topic,int queue_size){
            return msg.advertise<sv::Svar>(topic,queue_size);
        })
        .def("subscribe",[](Messenger msger,
             const std::string& topic, int queue_size,
             const sv::SvarFunction& callback){
            return msger.subscribe(topic,queue_size,callback);
        })
        .def("publish",[](Messenger* msger,std::string topic,sv::Svar msg){return msger->publish(topic,msg);});

        sv::Class<Publisher>("Publisher")
                .def("shutdown",&Publisher::shutdown)
                .def("getTopic",&Publisher::getTopic)
                .def("getTypeName",&Publisher::getTypeName)
                .def("getNumSubscribers",&Publisher::getNumSubscribers)
                .def("publish",[](Publisher* pubptr,sv::Svar msg){return pubptr->publish(msg);});

        sv::Class<Subscriber>("Subscriber")
                .def("shutdown",&Subscriber::shutdown)
                .def("getTopic",&Subscriber::getTopic)
                .def("getTypeName",&Subscriber::getTypeName)
                .def("getNumPublishers",&Subscriber::getNumPublishers);

        jvar["messenger"]=Messenger::instance();
}

EXPORT_SVAR_INSTANCE
