#include "camera.hpp"

Camera::Camera(){
    
    iCameraCounts = 1;
    iStatus=-1;
    iDisplayFrames = 100;
    iplImage = nullptr;
    channel=3;


    pic_index_ = 0;
    sample_buffer_index_ = 0;

    CameraSdkInit(1);
    iStatus = CameraEnumerateDevice(&tCameraEnumList,&iCameraCounts);

	printf("count = %d\n", iCameraCounts);
    //有连接设备
    if(iCameraCounts>0){
        //相机初始化。初始化成功后，才能调用任何其他相机相关的操作接口
        iStatus = CameraInit(&tCameraEnumList,-1,-1,&hCamera);
        printf("state = %d\n", iStatus);
        //初始化成功
        
        if(iStatus==CAMERA_STATUS_SUCCESS){
            //读取配置文件 初始化摄像头
            CameraReadParameterFromFile(hCamera,"camera.Config");
            waitKey(10);    //10ms等待
            //获得相机的特性描述结构体。该结构体中包含了相机可设置的各种参数的范围信息。决定了相关函数的参数
            CameraGetCapability(hCamera,&tCapability);

            //g_pRgbBuffer_left = (unsigned char*)malloc(tCapability.sResolutionRange.iHeightMax*tCapability.sResolutionRange.iWidthMax*3);
            //g_pRgbBuffer_right = (unsigned char*)malloc(tCapability.sResolutionRange.iHeightMax*tCapability.sResolutionRange.iWidthMax*3);
            for(int i=0;i<sample_buffer_size;i++){
                g_pRgbBuffer_left[i] = (unsigned char*)CameraAlignMalloc(tCapability.sResolutionRange.iHeightMax*tCapability.sResolutionRange.iWidthMax,8);
                g_pRgbBuffer_right[i] = (unsigned char*)CameraAlignMalloc(tCapability.sResolutionRange.iHeightMax*tCapability.sResolutionRange.iWidthMax,8);            
            }
            /*让SDK进入工作模式，开始接收来自相机发送的图像
            数据。如果当前相机是触发模式，则需要接收到
            触发帧以后才会更新图像。    */
            CameraPlay(hCamera);
            //CameraSetIspOutFormat(hCamera,CAMERA_MEDIA_TYPE_BGR8);
            CameraSetIspOutFormat(hCamera,CAMERA_MEDIA_TYPE_MONO8);
            CameraSetMonochrome(hCamera,false);
            // outfile.open("pic_time.txt",ios::trunc);
            cam_outfile.open("pic_data.txt",ios::trunc);
            sleep(1);        //1s等待
            //开启摄像头接收线程

            camera_running_.store(true);
            camera_thread_ = std::thread(std::bind(&Camera::CameraSampleLoop,this));
            
        }
    }
}

Camera::~Camera(){
     CameraStop();
}


void Camera::CameraStop(){
    if(iCameraCounts>0){
        CameraUnInit(hCamera);
        iCameraCounts = 0;
        if(iStatus==CAMERA_STATUS_SUCCESS){
                camera_running_.store(false);
                camera_thread_.join();
                sleep(1);
                // outfile.close();
                for(int i=0;i<sample_buffer_size;i++){
                    if(g_pRgbBuffer_left!=NULL)
                        CameraAlignFree(g_pRgbBuffer_left[i]);
                    if(g_pRgbBuffer_right!=NULL)
                        CameraAlignFree(g_pRgbBuffer_right[i]);
                }
                // free(g_pRgbBuffer_right);
                // free(g_pRgbBuffer_left);
                iStatus = CAMERA_STATUS_TIME_OUT;
                cam_outfile.close();
        }
    }
}

void Camera::CameraSampleLoop(){
    // boost::format left_fmt("./left_pic/%06dl");
    // boost::format right_fmt("./right_pic/%06dr");

    while(camera_running_.load()){
        if(CameraGetImageBuffer(hCamera,&sFrameInfo,&pbyBuffer,2000) == CAMERA_STATUS_SUCCESS)
		{
            pic_index_++;
            // auto tp = std::chrono::system_clock::now();
            // std::time_t tt = std::chrono::system_clock::to_time_t(tp);
            // outfile << pic_index << "        " << tt << "seconds from 1970-01-01 00:00:00 UTC" << endl;
            CameraGetExposureTime(hCamera,&exposure_time);  //us
            exposure_time = exposure_time/1000;//ms
            cam_outfile << pic_index_ << "        " << exposure_time  << endl;
		    //CameraImageProcess(hCamera, pbyBuffer, g_pRgbBuffer,&sFrameInfo);
            //对多目相机帧内的某个单目图做ISP
           
		    CameraMultiEyeImageProcess(hCamera,0,pbyBuffer,&sFrameInfo,g_pRgbBuffer_left[sample_buffer_index_],&sFrameInfo_left,CAMERA_MEDIA_TYPE_MONO8,0);
            CameraMultiEyeImageProcess(hCamera,1,pbyBuffer,&sFrameInfo,g_pRgbBuffer_right[sample_buffer_index_],&sFrameInfo_right,CAMERA_MEDIA_TYPE_MONO8,0);

            // string left_path = (left_fmt % pic_index_).str();
            // string right_path = (right_fmt % pic_index_).str();
    
            // CameraSaveImage(hCamera, (char *)left_path.c_str() ,g_pRgbBuffer_left[sample_buffer_index_], &sFrameInfo_left, FILE_BMP_8BIT, 80);
            // CameraSaveImage(hCamera, (char *)right_path.c_str() ,g_pRgbBuffer_right[sample_buffer_index_], &sFrameInfo_right, FILE_BMP_8BIT, 80);
            
            //CameraSaveImage(hCamera, (char *)left_path.c_str() ,g_pRgbBuffer_left, &sFrameInfo_left, FILE_JPG, 80);
            //CameraSaveImage(hCamera, (char *)right_path.c_str() ,g_pRgbBuffer_right, &sFrameInfo_right, FILE_JPG, 80);
            
            printf("get %d th pictures\n",pic_index_);
            //在成功调用CameraGetImageBuffer后，必须调用CameraReleaseImageBuffer来释放获得的buffer。
			//否则再次调用CameraGetImageBuffer时，程序将被挂起一直阻塞，直到其他线程中调用CameraReleaseImageBuffer来释放了buffer
			CameraReleaseImageBuffer(hCamera,pbyBuffer);

            stereo_undistort_->Stereo_Undistort_Process(g_pRgbBuffer_left[sample_buffer_index_],g_pRgbBuffer_right[sample_buffer_index_]);
            sample_buffer_index_ = (sample_buffer_index_+1)%sample_buffer_size;
            // if(pic_index>=1000)
            //     camera_running_.store(false);
        }
    }
}