#include<opencv2/opencv.hpp>
#include<iostream>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>

using namespace cv;
using namespace std;

/* 
show 2 methods to read a video stream from online-camera or offline-file
    C++ Class: VideoCapture
*/

int main()
{
    static GstClockTime timestamp = 0;
    // VideoCapture打开视频流方法1：直接在创建类时指定参数
    VideoCapture capFromCamera(0);
    capFromCamera.set(CV_CAP_PROP_FRAME_WIDTH,1280);
    capFromCamera.set(CV_CAP_PROP_FRAME_HEIGHT,720);
    
    //VideoCapture capFromFile("E:\\video\\video.avi");

    // VideoCapture打开视频流方法2：通过默认构造函数创建对象，然后通过open方法打开视频流
    //VideoCapture capFromFile;
    //capFromFile.open("E:\\video\\1.avi");

    if (!capFromCamera.isOpened())
    {
        return -1;
    }
        guint size = 1280*720*(3);
        GstBuffer *buffer;
        GstMemory *memory;
         //buffer = gst_buffer_new();
        // memory = gst_allocator_alloc(NULL, size, NULL);
    while (1)
    {
        Mat image;
        // get a new frame from camera
        bool frameA = capFromCamera.read(image); 
        // get a new frame from file 
      //  bool frameB = capFromFile.read(frameFromFile); 
    
      double frame_rate = capFromCamera.get(CV_CAP_PROP_FPS);
      //int frame_count = capFromCamera.get(CV_CAP_PROP_FRAME_COUNT);
      int frame_width = capFromCamera.get(CV_CAP_PROP_FRAME_WIDTH);
      int frame_height = capFromCamera.get(CV_CAP_PROP_FRAME_HEIGHT);
      

      std::cout<<"\r\n current:"
            << "  frame rate: "<<frame_rate<<" FPS"
            << "  frame_size: "<< frame_width<< " * " << frame_height<<std::endl;

      std::cout<<"\r\n cv mat:"
            << "  channel: "<<"---"
            << "  frame_size: "<< image.rows<< " * " << image.cols<<std::endl;

        // if (!frameA )
        // {
        //     cout << "摄像头关闭或视频文件读取到结尾" << endl;
        //     break;
        // }

    //     gst_buffer_insert_memory(buffer, -1, memory);
         GstMapInfo map;
	//     gst_buffer_map(buffer, &map, GST_MAP_WRITE);
    //     memcpy((guchar *)map.data, image.data, gst_buffer_get_size(buffer));

    //    GST_BUFFER_PTS(buffer) = timestamp;
	//    timestamp += GST_BUFFER_DURATION(buffer);


	//g_signal_emit_by_name(mAppSrc, "push-buffer", buffer, &ret);
	// gst_buffer_unmap(buffer, &map);
	// gst_buffer_unref(buffer);        


    }
    // 这两句可以省略，在程序结束前，VideoCapture会调用析构函数进行资源的释放
    capFromCamera.release();
  //  capFromFile.release();
    destroyAllWindows();
    cin.get();
    return 0;

}