#include <stdio.h>
#include <stdlib.h> //malloc
#include <string.h> //memset
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/ip.h> //sockaddr_in
#include <arpa/inet.h> //inet_ntop
#include <signal.h>
#include <unistd.h> //chroot
#include <sys/ioctl.h>       //ioctl
#include <linux/videodev2.h> //V4L2
#include <sys/mman.h>        //mmap
//open
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>

#include <jpeglib.h>  //libjpeg头文件

#define BUF_NUM 3
#define BOUNDARY "video boundary--"

struct userbuf{
    void* addr;
    size_t size;
};

struct userbuf buffers[BUF_NUM];

//图像宽度
int width = 640;
//图像高度
int height = 480;
//压缩质量
int quality = 80;

int camera_init()
{
    //打开设备文件，需要读写权限
    int fd = open("/dev/video0", O_RDWR);
    if (fd < 0)
    {
        perror("open");
        return -1;
    }

    //获取设备能力(设备支持的操作)
    struct v4l2_capability cap;
    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
    {
        perror("VIDIOC_QUERYCAP");
        return -1;
    }

    //设备是否支持视频捕获
    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        fprintf(stderr, "The device does not handle single-planar video capture.\n");
        return -1;
    }

    //设备是否支持串流
    if (!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        fprintf(stderr, "The device does not handle frame streaming.\n");
        return -1;
    }

    //获取默认图像格式
    struct v4l2_format format;
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(fd, VIDIOC_G_FMT, &format) < 0)
    {
        perror("VIDIOC_G_FMT");
        return -1;
    }

    printf("default format: %c%c%c%c %dx%d\n",
           format.fmt.pix.pixelformat & 0xff,
           format.fmt.pix.pixelformat >> 8 & 0xff,
           format.fmt.pix.pixelformat >> 16 & 0xff,
           format.fmt.pix.pixelformat >> 24 & 0xff,
           format.fmt.pix.width,
           format.fmt.pix.height);

    format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    format.fmt.pix.width = width;
    format.fmt.pix.height = height;
    if (ioctl(fd, VIDIOC_S_FMT, &format) < 0)
    {
        perror("VIDIOC_S_FMT");
        return EXIT_FAILURE;
    }

    printf("set format: YUYV %dx%d\n", width, height);

    //分配缓冲区（可以分配多个缓冲区多线程并行处理）
    struct v4l2_requestbuffers bufrequest;
    bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //缓冲区使用方式
    bufrequest.memory = V4L2_MEMORY_MMAP;
    //缓冲区个数
    bufrequest.count = BUF_NUM;

    if (ioctl(fd, VIDIOC_REQBUFS, &bufrequest) < 0)
    {
        perror("VIDIOC_REQBUFS");
        return -1;
    }

    int i;
    for (i = 0; i < BUF_NUM; i++)
    {
        //获取缓冲区信息
        struct v4l2_buffer bufferinfo;
        memset(&bufferinfo, 0, sizeof(bufferinfo));
        bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        bufferinfo.memory = V4L2_MEMORY_MMAP;
        bufferinfo.index = i;

        if (ioctl(fd, VIDIOC_QUERYBUF, &bufferinfo) < 0)
        {
            perror("VIDIOC_QUERYBUF");
            return -1;
        }

        //映射驱动缓冲区到用户空间
        buffers[i].addr = mmap(NULL,
                                bufferinfo.length,
                                PROT_READ | PROT_WRITE,
                                MAP_SHARED,
                                fd,
                                bufferinfo.m.offset);
        if (buffers[i].addr == MAP_FAILED)
        {
            perror("mmap");
            return -1;
        }

        buffers[i].size = bufferinfo.length;

        //启动串流前需要缓冲区先入队
        if (ioctl(fd, VIDIOC_QBUF, &bufferinfo) < 0)
        {
            perror("VIDIOC_QBUF1");
            return -1;
        }
    }

    return fd;
}

//解析请求头，返回请求路径
char* parse_request(char *line)
{
    char method[10];
	char protocol[10];
    char* path = NULL;

	puts(line);
    //%ms会自动分配内存
	sscanf(line, "%9s %ms %9s", method, &path, protocol);

    printf("method = %s\n", method);
    printf("path = %s\n", path);
	printf("protocol = %s\n", protocol);

    return path;
}

int server_init(void)
{
    signal(SIGPIPE, SIG_IGN);

    int listenfd = socket(AF_INET, SOCK_STREAM, 0);
    if (listenfd < 0)
    {
        perror("socket");
        return -1;
    }

    int opt = 1;
    setsockopt(listenfd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));

    struct sockaddr_in saddr;
    memset(&saddr, 0, sizeof(saddr));

    saddr.sin_family = AF_INET;
    saddr.sin_addr.s_addr = htonl(INADDR_ANY);
    saddr.sin_port = htons(80);
    if (bind(listenfd, (struct sockaddr *)&saddr, sizeof(saddr)) < 0)
    {
        perror("bind");
        return -1;
    }

    if (listen(listenfd, 64) < 0)
    {
        perror("listen");
        return -1;
    }

    if (chroot(".") < 0)
    {
        perror("chroot");
        return -1;
    }

    return listenfd;
}

int jpeg_encode(const char* src, size_t src_size, unsigned char** dst, size_t* dst_size, int quality)
{
    //JPEG compression object
    struct jpeg_compress_struct cinfo;
    //JPEG error handler
    struct jpeg_error_mgr jerr;
    //set up the error handler
    cinfo.err = jpeg_std_error(&jerr);
    //initialize the JPEG compression object
    jpeg_create_compress(&cinfo);
    //将编码之后的图像写入内存(自动分配内存空间)
    jpeg_mem_dest(&cinfo, dst, dst_size);
    //压缩图像大小
    cinfo.image_width = width;
    cinfo.image_height = height;
    //每像素通道数
    cinfo.input_components = 3;
    //使用YCbCr颜色空间
    cinfo.in_color_space = JCS_YCbCr;
    //设置默认压缩参数
    jpeg_set_defaults(&cinfo);
    //设置压缩质量
    jpeg_set_quality(&cinfo, quality, TRUE);
    //开始压缩
    jpeg_start_compress(&cinfo, TRUE);

    JSAMPROW jrow;
    unsigned char buf[width * 3];
    while (cinfo.next_scanline < cinfo.image_height)
    {
        //将每个像素由YUV422转为YUV444
        for (int i = 0; i < cinfo.image_width; i += 2)
        {
            //Y0U0 Y1V1 Y2U2 Y3V3
            buf[i * 3] = src[i * 2];         //Y0 = Y0
            buf[i * 3 + 1] = src[i * 2 + 1]; //U0 = U0
            buf[i * 3 + 2] = src[i * 2 + 3]; //V0 = V1
            buf[i * 3 + 3] = src[i * 2 + 2]; //Y1 = Y1
            buf[i * 3 + 4] = src[i * 2 + 1]; //U1 = U0
            buf[i * 3 + 5] = src[i * 2 + 3]; //V1 = V1
        }
        jrow = (JSAMPROW)&buf;
        jpeg_write_scanlines(&cinfo, &jrow, 1);
        src += width * 2;
    }
    //停止压缩
    jpeg_finish_compress(&cinfo);
    //释放内存
    jpeg_destroy_compress(&cinfo);

    return 0;
}

void send_frame(const unsigned char* buf, size_t size, FILE* sfp)
{
    char* response = NULL;
    size_t resplen = 0;

    FILE* mfp = open_memstream(&response, &resplen);
    fprintf(mfp, "Content-Type: image/jpeg\r\n\r\n");
    fwrite(buf, 1, size, mfp);
    fprintf(mfp, "\r\n\r\n--%s\r\n", BOUNDARY);
    fclose(mfp);

    fprintf(sfp, "%zx\r\n", resplen);
    fwrite(response, 1, resplen, sfp);
    fprintf(sfp, "\r\n");
    free(response);
}

int main()
{
    int fd = camera_init();
    if (fd < 0)
    {
        return 1;
    }

    int listenfd = server_init();
    if (listenfd < 0)
    {
        return 1;
    }

    puts("web server start");

    while (1)
    {
        struct sockaddr_in caddr;
        socklen_t addrlen = sizeof(caddr);
        int connfd;
        connfd = accept(listenfd, (struct sockaddr *)&caddr, &addrlen);

        char ipstr[INET_ADDRSTRLEN];
        inet_ntop(AF_INET, &caddr.sin_addr, ipstr, sizeof ipstr);
        printf("client %s connected\n", ipstr);
        if (connfd < 0)
        {
            perror("accept");
            continue;
        }

        FILE* sfp = fdopen(connfd, "r+");
        if (!sfp)
        {
            perror("fdopen");
            continue;
        }

        char buf[BUFSIZ];

        fgets(buf, BUFSIZ, sfp);
        parse_request(buf);

        //读到空行结束
        while(fgets(buf, BUFSIZ, sfp) && strlen(buf) > 2)
        {
            printf(buf);
        }

        //启动串流
        int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        if (ioctl(fd, VIDIOC_STREAMON, &type) < 0)
        {
            perror("VIDIOC_STREAMON");
            return EXIT_FAILURE;
        }

        //发送响应头
        fprintf(sfp, "HTTP/1.1 200 OK\r\n");
        fprintf(sfp, "Content-Type: multipart/x-mixed-replace; boundary=%s\r\n", BOUNDARY);
        fprintf(sfp, "Transfer-Encoding: chunked\r\n");
        fprintf(sfp, "\r\n");

        while (1)
        {
            //出队前需要设置buffer的type，memory和reserve字段
            struct v4l2_buffer bufferinfo;
            memset(&bufferinfo, 0, sizeof(bufferinfo));
            bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
            bufferinfo.memory = V4L2_MEMORY_MMAP;

            //等待一帧图像捕获完成后将缓冲区出队（默认阻塞）
            if (ioctl(fd, VIDIOC_DQBUF, &bufferinfo) < 0)
            {
                perror("VIDIOC_QBUF");
                break;
            }

            unsigned char* jpeg_image = NULL;
            size_t image_size = 0;
            //将YUV格式的图像压缩为JPEG格式
            jpeg_encode(buffers[bufferinfo.index].addr, bufferinfo.bytesused, &jpeg_image, &image_size, quality);
            
            //缓冲区重新入队，清空标志位
            bufferinfo.flags = 0;

            if (ioctl(fd, VIDIOC_QBUF, &bufferinfo) < 0)
            {
                perror("VIDIOC_QBUF2");
                break;
            }

            //发送压缩后的图像
            send_frame(jpeg_image, image_size, sfp);
            if (ferror(sfp)) //客户端断开连接
            {
                printf("client disconnected\n");
                break;
            }

            //释放压缩时分配的内存
            free(jpeg_image);
        }

        fclose(sfp);
    }
}
