#include "mycamera.h"
#include <QPixmap>
#include "mainwindow.h"
#include "client.h"
//extern int allarb[W*H];
//摄像头
volatile int flag=1;
//封装函数，用来把yuv转换成rgb
extern  bool  is_memcpy;//继承主Main的

int yuvtorgb(int y,int u,int v)
{
    int r,g,b;
    int pix; //保存一个像素点的ARGB数据
    r=y+1.4075*(v-128);
    g=y-0.3455*(u-128)-0.7169*(v-128);
    b=y+1.779*(u-128);
    //优化：公式计算的结果有可能超出0---255范围
    if(r<0)
        r=0;
    if(g<0)
        g=0;
    if(b<0)
        b=0;
    if(r>255)
        r=255;
    if(g>255)
        g=255;
    if(b>255)
        b=255;
    //由于lcd需要的是argb，所有我打算直接在这里把rgb转换得到argb
    pix=0x00<<24|r<<16|g<<8|b;
    return pix;
}

//封装函数，把一帧画面完整的yuyv数据全部转换成argb数据
/*
    参数：yuyvdata --》你要转换的原始的yuyv数据
          argbdata --》保存转换得到的argb数据
*/
int yuyvtoargb(char *yuyvdata,int *argbdata)
{
    int i,j;
    /*
        循环究竟要循环多少次呢？--》你设置的画面大小(W*H个像素点)决定了循环的次数
        每一轮循环可以得到2个像素点
        总共的像素点是W*H个
        得出结论：循环的次数应该是(W*H)/2
    */
    for(i=0,j=0; i<(W*H); i+=2,j+=4)
    {
        argbdata[i]=yuvtorgb(yuyvdata[j],yuyvdata[j+1],yuyvdata[j+3]);
        argbdata[i+1]=yuvtorgb(yuyvdata[j+2],yuyvdata[j+1],yuyvdata[j+3]);
    }
    return 0;
}

int allyuyvtorgb(char *yuyvdata,char *rgbdata)
{
    int i,j;
    int pix;
    char *p;
    //有W*H个像素点
    for(i=0,j=0; i<W*H*3; i+=6,j+=4) // W*H*3/6  次数
    {
        pix=yuvtorgb(yuyvdata[j],yuyvdata[j+1],yuyvdata[j+3]);
        p=(char *)&pix;
        //根据学习的C语言指针运算规则
        //p+0-->B p+1-->G  p+2-->R  p+3 -->A
        rgbdata[i]=*(p+2);
        rgbdata[i+1]=*(p+1);
        rgbdata[i+2]=*(p+0);

        pix=yuvtorgb(yuyvdata[j+2],yuyvdata[j+1],yuyvdata[j+3]);
        p=(char *)&pix;
        rgbdata[i+3]=*(p+2);
        rgbdata[i+4]=*(p+1);
        rgbdata[i+5]=*(p+0);
    }
    return 0;
}
mycamera::mycamera()
{

}

int mycamera::camera_init()
{
#if 0
    int ret;
    int i;
    //第一步：打开摄像头的驱动
    camerafd=open("/dev/video7",O_RDWR);
    if(camerafd==-1)
    {
        perror("打开摄像头驱动失败了!\n");
        return -1;
    }


    //第二步：设置好摄像头画面的参数(宽，高，像素点的格式.....)
    struct v4l2_format myfmt;
    bzero(&myfmt,sizeof(myfmt));
    myfmt.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    myfmt.fmt.pix.width=W;  //画面宽
    myfmt.fmt.pix.height=H; //画面高
    myfmt.fmt.pix.pixelformat=V4L2_PIX_FMT_YUYV; //画面格式YUV
    ret=ioctl(camerafd,VIDIOC_S_FMT,&myfmt);
    if(ret==-1)
    {
        perror("设置采集格式失败了!\n");
        return -1;
    }

    //第三步：申请缓冲区
    struct v4l2_requestbuffers mybuf;
    bzero(&mybuf,sizeof(mybuf));
    mybuf.count=4;
    mybuf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    mybuf.memory=V4L2_MEMORY_MMAP;
    ret=ioctl(camerafd,VIDIOC_REQBUFS,&mybuf);
    if(ret==-1)
    {
        perror("申请缓冲区失败了!\n");
        return -1;
    }

    //第四步：分配缓冲区，映射得到缓冲区的首地址
    struct v4l2_buffer otherbuf;
    for(i=0; i<4; i++)
    {
        bzero(&otherbuf,sizeof(otherbuf));
        otherbuf.index=i; //缓冲块的索引
        otherbuf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        otherbuf.memory=V4L2_MEMORY_MMAP;
        ret=ioctl(camerafd,VIDIOC_QUERYBUF,&otherbuf);
        if(ret==-1)
        {
            perror("分配缓冲区失败了!\n");
            return -1;
        }

        //顺便映射得到每个缓冲区的首地址
        array[i].len=otherbuf.length; //存放缓冲区大小
        array[i].start=mmap(NULL,otherbuf.length,PROT_READ|PROT_WRITE,MAP_SHARED,camerafd,otherbuf.m.offset);
        if(array[i].start==NULL)
        {
            perror("映射缓冲区失败了!\n");
            return -1;
        }

        //申请画面入队--》下一步我就要启动摄像头，要先申请入队，再来启动摄像头
        ret=ioctl(camerafd,VIDIOC_QBUF,&otherbuf);
        if(ret==-1)
        {
            perror("入队失败了!\n");
            return -1;
        }
    }

    //第五步：启动摄像头采集画面
    enum v4l2_buf_type mytype=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret=ioctl(camerafd,VIDIOC_STREAMON,&mytype);
    if(ret==-1)
    {
        perror("启动摄像头采集画面失败了!\n");
        return -1;
    }
#endif
    return 0;
}

/*
     出队，入队显示摄像头画面320*240大小
     参数：x --》画面左上角坐标
          y --》画面左上角坐标
*/
#include <QDebug>
int mycamera::camera_capture(int x,int y)
{
    static int count = 0;
    int i,j;
    int ret;
    //定义数组存放转换得到的ARGB数据
    int argbbuf[W*H];
    struct v4l2_buffer otherbuf;
    //第六步：循环出队，入队，显示画面
    for(i=0; i<4; i++)
    {
        //出队
        bzero(&otherbuf,sizeof(otherbuf));
        otherbuf.index=i; //缓冲块的索引
        otherbuf.type=V4L2_BUF_TYPE_VIDEO_CAPTURE;
        otherbuf.memory=V4L2_MEMORY_MMAP;
        ret=ioctl(camerafd,VIDIOC_DQBUF,&otherbuf);
        if(ret==-1)
        {
            perror("出队失败了!\n");
            return -1;
        }

        yuyvtoargb((char *)(array[i].start),argbbuf);

        //******************************
         char rgbbuf[W*H*3];
        allyuyvtorgb((char *)(array[i].start),rgbbuf);
        if(count > 5)
        {
            count = 0;
            //qDebug() << "send";
            Client::get()->sendMsg(rgbbuf, sizeof(rgbbuf));
        }

        QImage image((uchar *)rgbbuf,W,H,QImage::Format_RGB888);
        emit sigImg(image);

        //入队
        ret=ioctl(camerafd,VIDIOC_QBUF,&otherbuf);
        if(ret==-1)
        {
            perror("入队失败了!\n");
            return -1;
        }

        msleep(1);
        count++;
    }
    return 0;
}

struct usrbuf
{
    void *start;
    int length;
};
//线程的任务函数
void mycamera::run()
{
    qDebug() << "run";
    //while(flag) //用线程来执行死循环
    {
        int camerafd;
        int ret;

        //打开摄像头
        camerafd=open("/dev/video7",O_RDWR);
        if(camerafd==-1)
        {
            perror("open camera failed!\n");
            return ;
        }
        //设置采集格式
        struct v4l2_format format;
        memset(&format, 0, sizeof format);
        format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        format.fmt.pix.width = 640;
        format.fmt.pix.height = 480;
        format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
        format.fmt.pix.field = V4L2_FIELD_NONE;
        ret=ioctl(camerafd,VIDIOC_S_FMT, &format);
        if(ret==-1)
        {
            perror("设置采集格式失败!\n");
            return ;
        }
        //申请缓存
        struct v4l2_requestbuffers req;
        memset(&req, 0, sizeof req);
        req.count = 4;
        req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        req.memory = V4L2_MEMORY_MMAP;
        if(ioctl(camerafd,VIDIOC_REQBUFS, &req)== -1)
        {
            perror("申请缓存失败!\n");
            return ;
        }
        //分配你刚才申请的缓冲块 --- 队列中设置了4个缓冲区 pbuf[i],
        struct usrbuf *pbuf= (struct usrbuf *)calloc(4,sizeof(struct usrbuf));
        for(int i = 0; i < 4; i++)
        {
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof buf);
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = i;
            if (ioctl(camerafd,VIDIOC_QUERYBUF,&buf) == -1)
            {
                perror("分配缓存失败!\n");
                return ;
            }
            // 四个缓冲区是直接映射camerafd数据的
            pbuf[i].start = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED,
                   camerafd, buf.m.offset);
            pbuf[i].length=buf.length;
         }
        //入队VIDIOC_QBUF
        for (int i = 0; i < 4; i++)
        {
            struct v4l2_buffer buf;
            memset(&buf, 0, sizeof buf);
            buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            buf.memory = V4L2_MEMORY_MMAP;
            buf.index = i;
            if (ioctl(camerafd,VIDIOC_QBUF,&buf) == -1)
            {
                perror("入队失败!\n");
                return ;
            }
        }
        //开始采集VIDIOC_STREAMON
        enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if(ioctl(camerafd, VIDIOC_STREAMON, &type) == -1)
            {
            perror("开始采集失败!\n");
            return ;
        }

        int lcdmem[800*480*4] = {0};
        unsigned char* rgb = (unsigned char*)malloc(640*480*3);
        struct timeval timeout;
        timeout.tv_sec = 1;
        timeout.tv_usec = 0;

        int count = 0;
        while(1)
        {
            struct timeval timeout;
            timeout.tv_sec = 1;
            timeout.tv_usec = 0;
            fd_set fds;
            FD_ZERO(&fds);
            FD_SET(camerafd, &fds);
            int r = select(camerafd + 1, &fds, 0, 0, &timeout); // 只要这个摄像头文件有动静就会结束阻塞
            for(int  i=0; i<4; i++)
            {
                struct v4l2_buffer buf;
                memset(&buf, 0, sizeof buf);
                buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
                buf.memory = V4L2_MEMORY_MMAP;
                buf.index=i;
                if (ioctl(camerafd, VIDIOC_DQBUF, &buf) == -1)// 读不出数据出错了 VIDIOC_DQBUF
                {
                    perror("出队失败!\n");
                    return ;
                }
                if(ioctl(camerafd, VIDIOC_QBUF, &buf) == -1)//  写不进数据
                {
                    perror("入队失败!\n");
                    return ;
                }

                // 正常数据
                // 把V4L2输出的YUYV格式数据转换成RGB
                yuyv2rgb0((unsigned char *)pbuf[i].start,rgb,640,480);


                emit sigImg(QImage((uchar *)rgb,W,H,QImage::Format_RGB888));
                if(count >= 5)
                {
                    count = 0;
                    Client::get()->sendMsg(rgb, 640*480*3);
                }
                msleep(10);

                count++;
            }
        }
    }
    qDebug() << "end";
}


int mycamera::camera_uninit()
{
    int i;
    int ret;
    enum v4l2_buf_type mytype=V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ret=ioctl(camerafd,VIDIOC_STREAMOFF,&mytype);
    if(ret==-1)
    {
        perror("关闭摄像头失败了!\n");
        return -1;
    }

    close(camerafd);
    return 0;
}

int mycamera::yuyv2rgb(int y, int u, int v)
{
     unsigned int pixel24 = 0;
     unsigned char *pixel = (unsigned char *)&pixel24;
     int r, g, b;
     static int  ruv, guv, buv;

     // 色度
     ruv = 1596*(v-128);
     guv = 391*(u-128) + 813*(v-128);
     buv = 2018*(u-128);

    // RGB
     r = (1164*(y-16) + ruv) / 1000;
     g = (1164*(y-16) - guv) / 1000;
     b = (1164*(y-16) + buv) / 1000;

     if(r > 255) r = 255;
     if(g > 255) g = 255;
     if(b > 255) b = 255;
     if(r < 0) r = 0;
     if(g < 0) g = 0;
     if(b < 0) b = 0;

     pixel[0] = r;
     pixel[1] = g;
     pixel[2] = b;

     return pixel24;
}


int mycamera::yuyv2rgb0(unsigned char *yuv, unsigned char *rgb, unsigned int width, unsigned int height)
{
     unsigned int in, out;
     int y0, u, y1, v;
     unsigned int pixel24;
     unsigned char *pixel = (unsigned char *)&pixel24;
     unsigned int size = width*height*2;

     for(in = 0, out = 0; in < size; in += 4, out += 6)
     {
    // YUYV
          y0 = yuv[in+0];
          u  = yuv[in+1];
          y1 = yuv[in+2];
          v  = yuv[in+3];

          pixel24 = yuyv2rgb(y0, u, v); // RGB
          rgb[out+0] = pixel[0];
          rgb[out+1] = pixel[1];
          rgb[out+2] = pixel[2];

          pixel24 = yuyv2rgb(y1, u, v);// RGB
          rgb[out+3] = pixel[0];
          rgb[out+4] = pixel[1];
          rgb[out+5] = pixel[2];

     }
     return 0;
}
