//RUNning on client.
#include "CameraApi.h"
//#include "hi_comm_isp.h"
//#include "mpi_isp.h"
//问题是不想设置的值也会被填充。 -->每一个值都用一个bool 判断是否需要改动。
#include <pthread.h>
#include <signal.h>
#include <sys/stat.h>
#include <fcntl.h>

#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <stdio.h>
#include <arpa/inet.h>
#include <unistd.h>
#include <malloc.h>
#include "common_define.h"
#include <string.h>
#define MAX_DEVICE_NUM 3
#ifdef TICTOC
#include "time_fun.h"

static struct timespec t1;
static float diff;
#endif
//全局结构体
stcamera_param  stcamera_param_g[MAX_DEVICE_NUM];
stcamera_return stcameraReturn_g;
//初始化一个全局变量时， 不能使用变量赋值。
//stcamera_param_g = (stcamera_param *)malloc(sizeof(stcamera_param));
int socket_fd_g=-1;
fd_set myset_g;
#ifdef MOCK
const u32 OP_TYPE_AUTO = 1;
const u32 OP_TYPE_MANUAL = 2;
#endif
int tcp_send(int cln_sock, fd_set myset, BYTE* send_buff, int size)
{
    int ret = select(cln_sock+1, NULL, &myset,  NULL, NULL);
    //可读
    if(ret>=0) {
        if( (ret = send(cln_sock, send_buff, size, 0)) == -1)
        {
            perror("recv");
        }
        //如果请求包正确
    }
    else
    {
        perror("select");
    }

    return ret;


}
int tcp_recv(int cln_sock, fd_set myset, BYTE* recv_buff, int size,int flag)
{
    int ret = select(cln_sock+1, &myset, NULL, NULL, NULL);
    //可读
#ifdef TICTOC
    t1 =tic();
#endif
    if(ret>=0) {
        if( (ret = recv(cln_sock, recv_buff, size, flag)) == -1)
        {
            perror("recv");
        }
        //如果请求包正确
    }
    else
    {
        perror("select");
    }
#ifdef TICTOC
    toc(t1,&tdiff);
    printf( "%s recv_from_server %f us\n", __FUNCTION__, tdiff );
#endif
    return ret;
}

//目前仅支持 设备0
MVSDK_API CameraSdkStatus  init(void)
{
    int ret = -1;
    char buff[512] = {0};
    struct sockaddr_in serverAddr = {0};
    socket_fd_g = socket(AF_INET, SOCK_STREAM, 0);
    if (socket_fd_g < 0)
    {
        perror("");
        return -1;
    }

    serverAddr.sin_family = AF_INET;
    serverAddr.sin_port = htons(SERVER_PORT);
    serverAddr.sin_addr.s_addr = inet_addr(SERVER_IPADDR);

    ret = connect(socket_fd_g, (const struct sockaddr *)&serverAddr, (socklen_t)sizeof(struct sockaddr));
    if (ret < 0)
    {
        perror("");
        return -1;
    }

    FD_ZERO(&myset_g);
    FD_SET(socket_fd_g, &myset_g);

#ifdef MOCK
    const char *c_str_buf="request";
    send(socket_fd_g, c_str_buf, sizeof(stcamera_param), 0);
#else




}

MVSDK_API CameraSdkStatus  CameraInit_Hawk(
//        tSdkCameraDevInfo*  pCameraInfo,

        CameraHandle       hCamera
)
{
    if(hCamera >= MAX_DEVICE_NUM)
    {
        return -1;
    }
    int pipe[MAX_DEVICE_NUM]={0,1,2};
    stcamera_param_g[hCamera].ViPipe = pipe[hCamera];


    stcamera_param_g[hCamera].first_time_get_param = TRUE;
//    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    tcp_send( socket_fd_g,  myset_g, (BYTE* )&stcamera_param_g[hCamera], sizeof(stcamera_param)) ;


#endif
    while(1)
    {

        //可读
#ifdef MOCK
        int size=recv(socket_fd_g, buff, sizeof(stcamera_param), 0);
        if(size>0) {
            fprintf(stderr, "after request %s\n", buff);
            break;
        }
#else
        int size=recv(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
        if(size>0) {
            fprintf(stderr, "%s %d after request expsure value is %d\n",__FUNCTION__,__LINE__,stcamera_param_g[hCamera].stExpAttr.stManual.u32ExpTime);
            break;
        }
        stcamera_param_g[hCamera].first_time_get_param = FALSE;
#endif
    }
#ifdef TEST_RTSP_ONLY
    //====for test start
    stcamera_param_g[hCamera].rtsp = TRUE;
    //====for test
#endif


    const char *c_str_buf1="shakehand_pass";
    send(socket_fd_g, c_str_buf1, sizeof(stcamera_param), 0);
    return 0;

}


/*Ezio add 191225*/
//如果是多个camera ?
#ifdef MOCK
#else
MVSDK_API CameraSdkStatus CameraSetAeState_Auto_All(
        CameraHandle    hCamera
)
{

    stcamera_param_g[hCamera].stExpAttr.enOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].stExpAttr.stManual.enAGainOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].stExpAttr.stManual.enDGainOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].stExpAttr.stManual.enExpTimeOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].stExpAttr.stManual.enISPDGainOpType = OP_TYPE_AUTO;

    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;

    return CAMERA_STATUS_SUCCESS;

}

MVSDK_API CameraSdkStatus CameraSetAeState_Mannual_ExposureTime(
        CameraHandle    hCamera,
        double          fExposureTime
)
{

    stcamera_param_g[hCamera].stExpAttr.enOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.enExpTimeOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.u32ExpTime = fExposureTime;


    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;

    return CAMERA_STATUS_SUCCESS;
}


MVSDK_API CameraSdkStatus CameraSetAeState_Auto_ExposureTime(
        CameraHandle    hCamera
)
{

    stcamera_param_g[hCamera].stExpAttr.stManual.enExpTimeOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;

    return CAMERA_STATUS_SUCCESS;
}



MVSDK_API CameraSdkStatus CameraSetAeState_Mannual_AGain(
        CameraHandle    hCamera,
        double          AGain
)
{

    stcamera_param_g[hCamera].stExpAttr.enOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.enAGainOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.u32AGain = AGain;

    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;

    return CAMERA_STATUS_SUCCESS;
}
MVSDK_API CameraSdkStatus CameraSetAeState_Auto_AGain(
        CameraHandle    hCamera
)
{

    stcamera_param_g[hCamera].stExpAttr.stManual.enAGainOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;
    return CAMERA_STATUS_SUCCESS;
}
MVSDK_API CameraSdkStatus CameraSetAeState_Mannual_DGain(
        CameraHandle    hCamera,
        double          DGain
)
{
    stcamera_param_g[hCamera].stExpAttr.enOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.enDGainOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.u32DGain = DGain;

    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;
    return CAMERA_STATUS_SUCCESS;
}
MVSDK_API CameraSdkStatus CameraSetAeState_Auto_DGain(
        CameraHandle    hCamera
)
{
    stcamera_param_g[hCamera].stExpAttr.stManual.enDGainOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;
    return CAMERA_STATUS_SUCCESS;
}
MVSDK_API CameraSdkStatus CameraSetAeState_Mannual_ISPDGain(
        CameraHandle    hCamera,
        double          ISPDGain
)
{
    stcamera_param_g[hCamera].stExpAttr.enOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.enISPDGainOpType = OP_TYPE_MANUAL;
    stcamera_param_g[hCamera].stExpAttr.stManual.u32ISPDGain = ISPDGain;

    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;
    return CAMERA_STATUS_SUCCESS;
}
MVSDK_API CameraSdkStatus CameraSetAeState_Auto_ISPDGain(
        CameraHandle    hCamera
)
{
    stcamera_param_g[hCamera].stExpAttr.stManual.enISPDGainOpType = OP_TYPE_AUTO;
    stcamera_param_g[hCamera].setparam = TRUE;
    //send to cam_server
    send(socket_fd_g, &stcamera_param_g[hCamera], sizeof(stcamera_param), 0);
    stcamera_param_g[hCamera].setparam = FALSE;
    return CAMERA_STATUS_SUCCESS;
}

#endif


int recv_from_server(stcamera_return *stcameraReturn_)
{

    int index =0;

    fprintf(stderr, "stcamera_return size %d\n", sizeof(stcamera_return));
    int size = tcp_recv( socket_fd_g,  myset_g, (BYTE* )stcameraReturn_, sizeof(stcamera_return),MSG_WAITALL);
//    int size = recv(socket_fd_g,  (BYTE* )(stcameraReturn_) , sizeof(stcamera_return), MSG_WAITALL);
    if(size <0)
        perror("recv");
    fprintf(stderr, "recv size: %d\n", size);
    stcameraReturn_ = (stcamera_return*) stcameraReturn_;
    if(size>=0)
        return 0;
    else
        return -1;
}

//相当于snap
MVSDK_API CameraSdkStatus  CameraGetImageBuffer(
        CameraHandle        hCamera,
        tSdkFrameHead*      pFrameInfo,
        BYTE*               pbyBuffer,
        UINT                wTimes
)
{
    printf("camera handler is %d\n", hCamera);
    //先发后收机制。
//    const char *image_request ="please send image";
    stcamera_param_g[hCamera].snap = TRUE;
    tcp_send( socket_fd_g,  myset_g, (BYTE* )&stcamera_param_g[hCamera], sizeof(stcamera_param)) ;
//    send(socket_fd_g,&stcamera_param_g[hCamera],sizeof(stcamera_param),0);
    stcamera_param_g[hCamera].snap = FALSE;
    printf("after send \n");
    //延时接收 ？timeout
    /*
    struct timeval timeout = {2,0};
    setsockopt(socket_fd_g,SOL_SOCKET,SO_RCVTIMEO,(char *)&timeout,sizeof(struct timeval));
     */

    int ret = recv_from_server(&stcameraReturn_g);


    printf("after recv \n");
    //0 is success
    if(ret >= 0 ) {
        //填充返回值
        //?bug
        //pbyBuffer = &(stcameraReturn_g.pFrameBuffer);
        //pFrameInfo = &(stcameraReturn_g.FrameInfo);
/*        for(int i=0; i<stcameraReturn_g.FrameBufferSize; i++)
        {

            printf("%d--",stcameraReturn_g.pFrameBuffer[i]);
            if(i%10 ==0)
                printf("\n");

        }
        printf("\n");*/
        memcpy(pbyBuffer, stcameraReturn_g.pFrameBuffer, stcameraReturn_g.FrameBufferSize);
        memcpy(pFrameInfo, &(stcameraReturn_g.FrameInfo), sizeof(stcameraReturn_g.FrameInfo));
        printf("after memcpy \n");
        return 0;
    } else
    {
        return -1;
    }




#if 0
    int filelen=0;
    int ret=recv(socket_fd_g, &filelen, sizeof(filelen), 0);
    if(ret>0)
    {
        BYTE *pic_buff;
        pic_buff = (BYTE*)malloc(filelen);
//        int size = recv(socket_fd_g, &stcameraReturn_g, sizeof(stcamera_return), 0);
//recvbuffer 缓冲区默认的接受大小 为6144 KB=6M
//如256KB
        //setsocketopt(s,SOL_SOCKET,SO_RCVBUF,(char*)&recvbufz,sizeof(recvbufz))
//        char recvbufz[14000000];
        int size =0;
        if(filelen > SIZE_256KB) {
            int i = 0;
            for (i = 0; i < filelen / SIZE_256KB; i++) {
                 size = recv(socket_fd_g, pic_buff+i*SIZE_256KB, SIZE_256KB, 0);
            }
            //256
            //256+56
            if(filelen % SIZE_256KB) {
                 size = recv(socket_fd_g, pic_buff+i*SIZE_256KB, filelen - SIZE_256KB * i, 0);
            }
        } else{
             size = recv(socket_fd_g, pic_buff, filelen  , 0);
        }


        if (size > 0) {
            fprintf(stderr, "%s %d receive img buff\n", __FUNCTION__, __LINE__);
            //不应该使用局部结构体
            //返回给上层用户
//            memcpy(pbyBuffer, stcameraReturn_g.pFrameBuffer, MAX_BUFF_SIZE);

            memcpy(pbyBuffer, pic_buff, filelen);
            return 0;

        } else {
            free(pic_buff);
            return -1;
        }
    }
    else
        return -1;
#endif

}