﻿#include "RTMapperDataset.h"

#include <deque>
//#include <sys/socket.h>
//#include <netinet/in.h>
//#include <arpa/inet.h>
#include <thread>
#include <queue>
#include <list>
#include <vector>

#include <QtNetwork/QTcpSocket>
#include <QtNetwork/QTcpServer>

#include <QObject>
#include <QSocketNotifier>
#include <QCoreApplication>
#include <QImage>
#include <QDateTime>
#include <QString>
#include <QStringList>
#include <QFile>

#include <GSLAM/core/VecParament.h>
#include <GSLAM/core/Timer.h>
#include <GSLAM/core/Utils.h>
#include <GSLAM/core/Messenger.h>

extern "C"{
#include "libavcodec/avcodec.h"
#include "libavfilter/avfilter.h"
#include "libavformat/avformat.h"
#include "libswscale/swscale.h"
#include "libavutil/imgutils.h"
}

#include "utils_xml.h"
#include "OfflineFIFO.h"
#include "json.hpp"

using GSLAM::FramePtr;

struct PoseInformation
{
    PoseInformation():pose_time_stamp(-1), lat(-1), lng(-1), alt(-1){}
    PoseInformation(double _pose_time_stamp, double _lat, double _lng, double _alt)
        :pose_time_stamp(_pose_time_stamp), lat(_lat), lng(_lng), alt(_alt){}
    double pose_time_stamp;
    double lat;
    double lng;
    double alt;

};

//inline std::string& str_tolower(std::string &s)
//{
//    for(size_t i=0; i < s.size(); i++) {
//        s[i] = tolower(s[i]);
//    }

//    return s;
//}

inline std::string int2str(int i)
{
    char buf[256];
    sprintf(buf, "%d", i);
    return buf;
}


class DatasetVideoFile: public RTMapperDataset
{
public:
    DatasetVideoFile():_frameId(0),_lastGrabId(0), _beginCap(0),_stopSignal(false), videoDelayTime(0),
                       firstFrameTime(-1), firstPoseTime(-1)
    {
        _subStop = GSLAM::Messenger::instance().subscribe("sbt.input.stop",0,&DatasetVideoFile::getStopSignal,this);
        _subSignalStart = GSLAM::Messenger::instance().subscribe("signal.start",0,&DatasetVideoFile::getStartSignal,this);
    }

    ~DatasetVideoFile()
    {
        _shouldStop=true;
        _beginCap = 0;
        while(!_readthread.joinable()) GSLAM::Rate::sleep(0.01);
        _readthread.join();
        if(!poseURL.empty()){
            while(!_listenPose.joinable()) GSLAM::Rate::sleep(0.01);
            _listenPose.join();
        }
        LOG(INFO) << "thread close";
    }

    virtual std::string type() const{return "DatasetVideoFile";}

    virtual void  call(const std::string& command,void* arg=NULL)
    {
        if( "Stop" == command )
        {
            _shouldStop = true;
            _beginCap = 0;
        }
    }

    virtual bool        isOpened()
    {
        return true;
    }

    virtual bool        open(const std::string& dataset)
    {
        LOG(INFO) << "open this dataset: " << dataset;
        if(!tinyxml2::loadSvarXML1(var, dataset))
            if(!var.ParseFile(dataset)) return false;

        videoDelayTime = var.GetDouble("Dataset.DelayTime", 0.0);
        videoURL = var.GetString("Dataset.VideoFile", "udp://127.0.0.1:1234");
        videoFPS = var.GetDouble("Dataset.VideoFPS", 30);
        poseURL = var.GetString("Dataset.PoseFile", "");

        videoSkip = var.GetInt("Dataset.Skip", 3);
        if( videoSkip <= 0 ) videoSkip = 1;

        int bufSize = var.GetInt("Dataset.BufSize", 50);
        if( bufSize < 1 ) bufSize = 1;

        // check wheter is network URL
        {
            std::string u = str_tolower(videoURL);
            std::string uh = u.substr(0, 6);
            std::string::size_type pa;
            std::string ub;

            if( "udp://" == uh || "http:/" == uh )
            {
                // FIXME: if user want to set their other argument?
                pa = u.find_last_of("?");
                if( pa == std::string::npos ) ub = u;
                else                          ub = u.substr(0, pa);

                videoURL = ub + "?buffer_size=" + int2str(bufSize*1024*1024);
            }
        }

        // create offline FIFO
        std::string tmpPath = svar.GetString("MainWindow.UserFolder", ".") + "/temp";
        GSLAM::utils::path_mkdir(tmpPath);
        _imageQueue = SPtr<OfflineFifo>(new OfflineFifo(tmpPath + "/vb"));

        // create read thread
        _readthread = std::thread(&DatasetVideoFile::decodeVideo, this);
        if (!poseURL.empty())
            _listenPose = std::thread(&DatasetVideoFile::decodePose, this);

        return true;
    }

    bool isLocalFile(std::string &url)
    {
        int i = url.size() - 1;
        for(; i >= 0; --i){
            if (url[i] == '.')
                break;
        }
        std::string suffix = url.substr(i+1);
        std::cout << "suffix: " << suffix << std::endl;
        if (suffix == "txt")
            return true;
        else{
            int i = url.size() - 1;
            for(; i >= 0; --i){
                if (url[i] == ':')
                    break;
            }        
            std::string port = url.substr(i + 1);
            posePort = std::atoi(port.c_str());
            poseURL = url.substr(0, i);

            if (poseURL.substr(0, 3) == "tcp")
                poseURL = url.substr(6);
            LOG(INFO) << "port: " << port;
            LOG(INFO) << "tcp url: " << poseURL;
            return false;
        }
    }

    int decodePose(void)
    {
        LOG(INFO) << "start pose decode";
        if (isLocalFile(poseURL)) {
            std::ifstream file(poseURL);
            if(!file.is_open())
                LOG(INFO) << "the local pose file is not open";

            std::string temp;
            while(getline(file, temp) && !_shouldStop){

                nlohmann::json j = nlohmann::json::parse(temp.c_str());

                if (j.find("timestamp") != j.end() && j.find("lat") != j.end() && j.find("lng") != j.end() && j.find("alt") != j.end())
                {
                    std::unique_lock<std::mutex> lock(poseMutex);
                    poseInfoQueue.emplace_back(PoseInformation(j["timestamp"], j["lat"], j["lng"], j["alt"]));
//                    LOG(INFO) << "time: " << j["timestamp"] << ", lat: " << j["lat"] << ", lng: " << j["lng"] << ", alt: " << j["alt"];
                }

                GSLAM::Rate::sleep(0.0001);
            }
            file.close();
            return 0;
        }

        char buf[1024];
        int buf_len, nread, nfind, packIdx = 0;

        SPtr<QTcpSocket> _socket = SPtr<QTcpSocket>(new QTcpSocket());
        double lastTime=-1;
        double minPosTimeDiff = svar.GetDouble("Dataset.MinPosTimeDiff",2);

        std::stringstream sst;

        while( !_shouldStop ){
            if (_socket->state() != QTcpSocket::SocketState::ConnectedState)
            {
                _socket->connectToHost(poseURL.c_str(), posePort, QTcpSocket::ReadWrite);
                if (_socket->waitForConnected(3000))
                {
                    LOG(INFO) << "connect to host: " << poseURL;
                }

                GSLAM::Rate::sleep(0.1);
                continue;
            }
            if(!_socket->waitForReadyRead(100) )
                continue;

            QByteArray bytes = _socket->readAll();
            if(bytes.size()) {
                sst.write(bytes.data(),bytes.size());
            }
            if(sst.str().empty()) continue;

            std::string line;
            while(std::getline(sst,line))
            {
                nlohmann::json j = nlohmann::json::parse(line,nullptr,false);
                if (j.find("timestamp") != j.end() && j.find("lat") != j.end() && j.find("lng") != j.end() && j.find("alt") != j.end())
                {
                    double pose_time=j["timestamp"].get<double>();
//                    LOG(INFO) << "line: " << line;
                    if(lastTime+minPosTimeDiff > pose_time) continue;
                    std::unique_lock<std::mutex> lock(poseMutex);
                    poseInfoQueue.emplace_back(PoseInformation(pose_time, j["lat"], j["lng"], j["alt"]));
                    LOG(INFO) << "time: " << pose_time << ", lat: " << j["lat"] << ", lng: " << j["lng"] << ", alt: " << j["alt"];
                    lastTime = pose_time;
                }
            }

            GSLAM::Rate::sleep(0.0001);
        }

        LOG(INFO) << "tcp close";
    }

    int decodeVideo(void)
    {
        LOG(INFO) << "start decode video";
        GSLAM::Point3d lla(0,0,0);
        static GSLAM::Publisher pub = GSLAM::Messenger::instance().advertise<GSLAM::Point3d>("dataset.firstFrameLLA", 0);
        pub.publish(lla);

        //get ts stream data
        AVFormatContext *pFormatCtx;
        int i,videoindex;
        AVCodecContext  *pCodecCtx;
        AVCodec         *pCodec;
        AVFrame         *pFrame, *pFrameBGR;
        unsigned char   *out_buffer;
        AVPacket        *packet;
        int             ret,got_picture;
        struct SwsContext *img_convert_ctx;

        double          frameTimestamp;

        _cameraName = "";
        _shouldStop = false;

        int count=0;
        int skip = videoSkip;

        GSLAM::Rate fpsRate(videoFPS);

        // regist FFMPEG codec
        av_register_all();
        avformat_network_init();
        pFormatCtx = avformat_alloc_context();
        // open video URL
        // support rtsp
        AVDictionary * opts = NULL;
        av_dict_set(&opts, "buffer_size", "1024000",0);
        av_dict_set(&opts, "stimeout", "3000000",0);
        int res = avformat_open_input(&pFormatCtx,videoURL.c_str(),NULL,&opts);
        while(res != 0)
        {
            LOG(INFO) << "Couldn't open input stream! Try Again";
            fprintf(stderr, "Couldn't open input stream! Try Again\n");
            res = avformat_open_input(&pFormatCtx,videoURL.c_str(),NULL,&opts);
            fpsRate.sleep();

            if( _shouldStop ) return -1;
        }

        LOG(INFO) <<"avformat input ok";
        while(avformat_find_stream_info(pFormatCtx,NULL) < 0)
        {
            LOG(INFO) << "Couldn't find stream information.\n";
            fprintf(stderr, "Couldn't find stream information.\n");
            fpsRate.sleep();

            if( _shouldStop ) return -1;
        }

        LOG(INFO) << "avformat stream ok";
        videoindex = -1;
        for (i = 0; i < pFormatCtx->nb_streams; i++){
            if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
            {
                videoindex = i;
                break;
            }
        }

        if (videoindex == -1)
        {
            LOG(INFO) <<"Didn't find a video stream.";
            fprintf(stderr, "Didn't find a video stream.\n");
            return -1;
        }
        pCodecCtx = pFormatCtx->streams[videoindex]->codec;
        pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
        pCodecCtx->width=pCodecCtx->coded_width;
        pCodecCtx->height=pCodecCtx->coded_height;
        pCodecCtx->pix_fmt=AV_PIX_FMT_YUV420P;
        pCodecCtx->bit_rate = 6000000;
        if (pCodec == NULL) {
            LOG(INFO) << "Codec not found.\n";
            printf("Codec not found.\n");
            return -1;
        }

        if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0) {
            LOG(INFO) << "Could not open codec.";
            printf("Could not open codec.\n");
            return -1;
        }

        pFrame = av_frame_alloc();
        pFrameBGR = av_frame_alloc();

        out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1));
        av_image_fill_arrays(pFrameBGR->data, pFrameBGR->linesize,
                             out_buffer,
                             AV_PIX_FMT_BGR24, pCodecCtx->width, pCodecCtx->height, 1);


        packet = (AVPacket *)av_malloc(sizeof(AVPacket));

        //Output video info
//        printf("--------------- File Information ----------------\n");
        av_dump_format(pFormatCtx, 0, videoURL.c_str(), 0);
//        printf("-------------------------------------------------\n");
        std::cout.flush();
        img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
                                         pCodecCtx->width, pCodecCtx->height,
                                         AV_PIX_FMT_BGR24, SWS_BILINEAR,
                                         NULL, NULL, NULL);

        LOG(INFO) << "decode video initialization ok";
        while (!_shouldStop)
        {
            if (av_read_frame(pFormatCtx, packet) < 0){
                av_free_packet(packet);
                continue;
            }
            if (packet->stream_index == videoindex)
            {
                ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);

                if (ret < 0)
                {
                    LOG(INFO) << "Decode Error";
                    printf("Decode Error.\n");
                    goto NEXT_FRAME;
                }

                if (got_picture)
                {
                    // sleep if read local file
                    fpsRate.sleep();
                    if ( count++ % skip != 0 && poseURL.empty()) goto NEXT_FRAME;

                    AVStream *stream = pFormatCtx->streams[packet->stream_index];
                    frameTimestamp = pFrame->pts * av_q2d(stream->time_base);

                    PoseInformation pos;
                    if(!getGoodPose(frameTimestamp,pos))
                        goto NEXT_FRAME;

//                    frameTimestamp = 0.001 * QDateTime::currentMSecsSinceEpoch();

                    sws_scale(img_convert_ctx, (const uchar* const*)pFrame->data, pFrame->linesize,
                              0, pCodecCtx->height,
                              pFrameBGR->data, pFrameBGR->linesize);

                    videoHeight = pCodecCtx->height;
                    videoWidth  = pCodecCtx->width;
                    {
                        SPtr<RTMapperFrame> fr(new RTMapperFrame(_frameId++, frameTimestamp));
                        fr->_image = GSLAM::GImage(videoHeight, videoWidth,
                                                   GSLAM::GImageType<uchar,3>::Type, pFrameBGR->data[0], true);
                        if(!poseURL.empty()){
                            fr->_gpshpyr = std::vector<double>({pos.lng,pos.lat,pos.alt,5.,5.,10.});
                        }

                        loadCamera();
                        fr->_camera=_camera;

                        if( _handle ) _handle->handle(fr);

                        if( _beginCap )
                        {
                            int buf_len;
                            uint8_t *buf;
                            size_t *p1;
                            double *p2;
                            double *p3;

                            buf_len = sizeof(size_t) + sizeof(double) + videoHeight*videoWidth*3 + 3 * sizeof(double);
                            buf = (uint8_t*) malloc(buf_len);

                            p1 = (size_t*) buf;                  *p1 = fr->id();
                            p2 = (double*) (buf+sizeof(size_t)); *p2 = frameTimestamp;
                            p3 = (double*) (buf+sizeof(size_t) + sizeof(double));
//                            *p3 = 0;
//                            *(p3 + 1) = 0;
//                            *(p3 + 2) = 0;
                            *p3 = pos.lng;
                            *(p3 + 1) = pos.lat;
                            *(p3 + 2) = pos.alt;

                            memcpy(buf+sizeof(size_t)+sizeof(double) + 3 * sizeof(double), pFrameBGR->data[0], videoHeight*videoWidth*3);

                            {
                                GSLAM::WriteMutex lock(_mutexFrame);
                                _imageQueue->push(buf, buf_len, true);
                            }

                            free(buf);
                        }

                        GSLAM::Messenger::instance().publish("UI.output.curframe",FramePtr(fr));

                        QString aaa = QString::number(pos.lat,'f',8)+","+QString::number(pos.lng,'f',8)+","+QString::number(pos.alt,'f',8)+"\r";
                        QFile file("D:/pos.txt");
//                        file.open(QIODevice::WriteOnly | QIODevice::Text);
                        file.open(QIODevice::WriteOnly|QIODevice::Append);
                        file.write(aaa.toUtf8());
                        file.close();
                        {
                            int nc = fr->_image.channels();
                            QImage img1;

                            if ( nc == 4 ) {
                                QImage qimg((uchar*) fr->_image.data, fr->_image.cols, fr->_image.rows, QImage::Format_RGB32);
                                img1 = qimg;
                            } else if ( nc == 3 ) {
                                QImage qimg((uchar*) fr->_image.data, fr->_image.cols, fr->_image.rows, QImage::Format_RGB888);
                                img1 = qimg;
                            } else if ( nc == 1 ) {
                                QImage qimg((uchar*) fr->_image.data, fr->_image.cols, fr->_image.rows, QImage::Format_Indexed8);
                                img1 = qimg;
                            }

                            QString fnPath = QString("D:/zxc/%1.jpg").arg(fr->id());
                            QFileInfo fInfo(fnPath);
                            QDir imgDir = fInfo.absoluteDir();
                            if( !imgDir.exists() ) imgDir.mkdir(imgDir.absolutePath());
                            img1.save(fnPath);
                        }
                    }
                }
            }

NEXT_FRAME:
            av_free_packet(packet);
        }

        LOG(INFO) << "decode video close";
        sws_freeContext(img_convert_ctx);
        av_frame_free(&pFrameBGR);
        av_frame_free(&pFrame);
        avcodec_close(pCodecCtx);
        avformat_close_input(&pFormatCtx);
    }

    bool getGoodPose(double videotime,PoseInformation& pos) {
        if(poseURL.empty()) return true;
        std::unique_lock<std::mutex> lock(poseMutex);
        if(videotime<0)
            return false;

        if(poseInfoQueue.empty()) return false;

        if(video2gps_offset<0) {
            video2gps_offset = poseInfoQueue.front().pose_time_stamp - videotime - videoDelayTime;
        }

        double curTime= videotime + video2gps_offset;

        int last=-1,next=-1;
        for(int i= 0;i< poseInfoQueue.size();++i){
            auto& posCur = poseInfoQueue[i];
            if(posCur.pose_time_stamp<=curTime)
            {
                last=i;
                continue;
            }

            next=i;
            break;
        }

        if(last<0){
            if(next>=0)
                /*LOG(INFO)<<"Pose faster than video."*/;
            else
                LOG(INFO)<<"Waiting pose";
            return false;
        }

//        if(next<0) return false;

        pos = poseInfoQueue[last];

        poseInfoQueue.erase(poseInfoQueue.begin(),poseInfoQueue.begin()+last+1);

        if(fabs(pos.pose_time_stamp-curTime)<0.1){
            QDateTime posTime = QDateTime::fromTime_t(pos.pose_time_stamp);

//            LOG(INFO)<<"Success obtained frame with videotime: "<<videotime
//                    <<", gpstime:" << pos.pose_time_stamp;
            return true;
        }
        else
//            LOG(INFO)<<"Precision not enough";

        return false;
    }

    void handle_video_frame(SPtr<RTMapperFrame> fr){

//        fr->_timestamp+= video2gps_offset;
//        double& curTime= fr->_timestamp;
//        if(!poseInfoQueue.empty()){
//            std::unique_lock<std::mutex> lock(poseMutex);
//            for(int i = 0; i < poseInfoQueue.size();){
//                if (frameInfoList.size() < 2)
//                    break;
//                if(firstPoseTime == -1)
//                    firstPoseTime = poseInfoQueue[i].pose_time_stamp - videoDelayTime;
//                double poseTime = poseInfoQueue[i].pose_time_stamp - firstPoseTime;
//                for(int j = 0, k = 1; frameInfoList.size() > 1;){
//                    double firstFrameTime = frameInfoList[i]->_timestamp;
//                    double secondFrameTime = frameInfoList[k]->_timestamp;
//                    if (poseTime > firstFrameTime && poseTime <= secondFrameTime){
//                        if (count++ % skip != 0){
//                            frameInfoList.erase(frameInfoList.begin());
//                            poseInfoQueue.erase(poseInfoQueue.begin());
//                            break;
//                        }
//                        else{
//                            SPtr<RTMapperFrame> fr = frameInfoList[i];
//                            if (_handle) _handle->handle(fr);

//                            LOG(INFO) << "poseTime: " << poseTime << ", secondFrameTime: " << secondFrameTime;
//                            if (_beginCap)
//                            {
//                                int buf_len;
//                                uint8_t *buf;
//                                size_t *p1;
//                                double *p2;
//                                double *p3;

//                                buf_len = sizeof(size_t) + sizeof(double) +
//                                          videoHeight * videoWidth * 3 + 3 * sizeof(double);
//                                buf = (uint8_t *) malloc(buf_len);

//                                p1 = (size_t *) buf;  *p1 = fr->id();
//                                p2 = (double *) (buf + sizeof(size_t));  *p2 = poseInfoQueue[i].pose_time_stamp;
//                                p3 = (double *) (buf + sizeof(size_t) + sizeof(double));
//                                *p3 = poseInfoQueue[i].lng;
//                                *(p3 + 1) = poseInfoQueue[i].lat;
//                                *(p3 + 2) = poseInfoQueue[i].alt;

//                                memcpy(buf + sizeof(size_t) + sizeof(double) + 3 * sizeof(double),
//                                       fr->_image.data, videoHeight * videoWidth * 3);

//                                {
//                                    GSLAM::WriteMutex lock(_mutexFrame);
//                                    _imageQueue->push(buf, buf_len, true);
//                                }

//                                free(buf);
//                            }
//                            GSLAM::Messenger::instance().publish("UI.output.curframe", FramePtr(fr));
//                            frameInfoList.erase(frameInfoList.begin());
//                            poseInfoQueue.erase(poseInfoQueue.begin());
//                            break;
//                        }
//                    }
//                    else if(poseTime < firstFrameTime){
//                        poseInfoQueue.erase(poseInfoQueue.begin());
//                        break;
//                    }
//                    else if(poseTime > firstFrameTime){
//                        frameInfoList.erase(frameInfoList.begin());
//                        continue;
//                    }
//                }
//            }
//        }
    }

    bool loadCamera(void)
    {
        bool res = false;
        // create camera
        if( _cameraName.isEmpty() )
        {
            std::string camName_prj = var.GetString("Dataset.Camera", "CamAuto");
            std::string cam = camName_prj+".Paraments";
            if(var.exist(cam))
            {
                QString camParaments = QString::fromStdString(var.GetString(cam,""));
                camParaments = camParaments.mid(1,camParaments.size()-2);
                QStringList camParamentsList = camParaments.split(QString(" "));
                std::vector<double> paras;
                for(int i=0;i<camParamentsList.size();++i)
                    paras.push_back(camParamentsList.at(i).toDouble());
                //FIXME:get_var will get the last data;
                VecParament<double> camP = var.get_var(camName_prj+".Paraments", VecParament<double>());
                _camera = GSLAM::Camera(camP.data);
                _cameraName = camName_prj.c_str();
                res = true;
            }
            else
            {
                QString cameraName;
                std::string camNameOnly;

                if( camName_prj != "CamAuto" )
                {
                    std::string::size_type p = camName_prj.find_first_of("_");
                    if( p == std::string::npos ) camNameOnly = camName_prj;
                    else camNameOnly = camName_prj.substr(0, p);

                    cameraName = QString("%1_%2_%3").arg((char*)camNameOnly.c_str()).arg(videoWidth).arg(videoHeight);
                    cameraName.replace(' ','_');
                    _cameraName = cameraName;
                }
                else
                {
                    _cameraName = QString::fromStdString(camName_prj);
                }

                std::string cam=_cameraName.toStdString();
                std::string camFolder = svar.GetString("MainWindow.CalibFolder",svar.GetString("MainWindow.DataFolder","")+"/calib");
                GSLAM::Svar camVar;
                LOG(INFO) << camFolder+"/"+cam+".cam";
                res = camVar.ParseFile(camFolder+"/"+cam+".cam");
                if( res )
                {
                    VecParament<double> vecPara;
                    vecPara=camVar.get_var(cam+".Paraments",vecPara);
                    _camera=GSLAM::Camera(vecPara.data);
                }
            }

            // check camera parameter's values
            if( _camera.width() != videoWidth || _camera.height() != videoHeight ) res = false;

            // set to auto detected camera parameters
            if( !res )
            {
                VecParament<double> camP(6, 0);
                camP[0] = videoWidth;
                camP[1] = videoHeight;
                camP[2] = camP[0] * 0.6;
                camP[3] = camP[2];
                camP[4] = camP[0] * 0.5;
                camP[5] = camP[1] * 0.5;

                _camera = GSLAM::Camera(camP.data);

//                scommand.Call("MainWindow", "ShowMessageBox Can not load camera parameters, use auto-detected values");
            }
        }

        return res;
    }

    virtual FramePtr grabFrame()
    {
        int ntry = 50;
        _beginCap = 1;

        while( !_stopSignal && !_shouldStop )
        {
            if ( _imageQueue->size() > 0 )
            {
                uint8_t *buf = NULL;
                uint64_t buf_len;
                // get a image item
                {
                    GSLAM::WriteMutex lock(_mutexFrame);

                    if( 0 != _imageQueue->pop(&buf, &buf_len) )/* goto NEXT_TRY*/;

                    if( buf == NULL )/* goto NEXT_TRY*/;
                }

                // automatically decrease CPU usage when image queue is larger 30
                if( _imageQueue->size() > 30 )
                {
//                    GSLAM::Rate::sleep(0.005);
                }

                // restore frameID, frameTS
                size_t frameID, *p1;
                double frameTS, *p2;
                double *p3;
                double lng, lat, alt;
                uint8_t *imgBuf = buf + sizeof(size_t) + sizeof(double) + 3 * sizeof(double);

                p1 = (size_t*) buf;                     frameID = *p1;
                p2 = (double*) (buf+sizeof(size_t));    frameTS = *p2;
                p3 = (double*) (buf+ sizeof(size_t)+ sizeof(double));
                lng = *p3; lat = *(p3 + 1); alt = *(p3 + 2);

                // load camera
                loadCamera();

                // create frame
                SPtr<RTMapperFrame> fr(new RTMapperFrame(frameID, frameTS));
                fr->_camera = _camera;
                fr->_cameraName = _cameraName.toStdString();

                fr->_image = GSLAM::GImage(videoHeight, videoWidth,
                                           GSLAM::GImageType<uchar,3>::Type, imgBuf, true);
//                fr->_imagePath = svar.GetString("MainWindow.UserFolder", ".") + "/temp/ti_" + std::to_string(fr->id());

//                LOG(INFO) << fr->_camera;

                if (!poseURL.empty())
                {
                    fr->_gpshpyr = std::vector<double>{lng, lat, alt, 0, 0, 0};
                    GSLAM::Point3d pos(lng,lat,alt);
                    LOG(INFO) << pos;
                    GSLAM::Messenger::instance().publish("setHomePOS",pos);
                }

                free(buf);

                GSLAM::Point3d lla;
                fr->getGPSLLA(lla);
                return fr;
            }
        }
        return FramePtr();
    }

    void getStopSignal( const bool& sign)
    {
        _stopSignal = sign;
    }

    void getStartSignal(const bool& sign)
    {
        _startSignal = sign;
    }

    virtual bool  isOnline()const{return true;}

    virtual bool  setNewFrameCallback(GSLAM::GObjectHandle* handle){
        _handle = handle;
        return true;
    }

protected:
    GSLAM::GObjectHandle*               _handle=NULL;
    SPtr<OfflineFifo>                   _imageQueue;

    std::thread             _readthread;
    std::thread             _listenPose;

    GSLAM::Camera           _camera;
    QString                 _cameraName;

    std::string             videoURL;
    double                  videoFPS;
    int                     videoSkip;

    int                     _beginCap;
    int                     videoWidth, videoHeight;

    GSLAM::Svar             var;

    int                     _frameId,_lastGrabId,_shouldStop;
    GSLAM::MutexRW          _mutexFrame;

    GSLAM::Subscriber       _subStop,_subSignalStart;
    bool                    _stopSignal,_startSignal;

    double firstPoseTime;
    double firstFrameTime;
    double videoDelayTime, video2gps_offset=-1;
    int socketFd;
    int posePort;
//    struct sockaddr_in addr;
    std::string poseURL;
    std::vector<PoseInformation> poseInfoQueue;
    std::vector<SPtr<RTMapperFrame>> frameInfoList;
    std::mutex poseMutex;
};
using GSLAM::DatasetFactory;

REGISTER_DATASET(DatasetVideoFile, video);

