#include <stdio.h>
#include <stdlib.h> //malloc
#include <string.h> //memset
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/ip.h> //sockaddr_in
#include <arpa/inet.h> //inet_ntop
#include <signal.h>
#include <unistd.h> //chroot
#include <sys/ioctl.h>       //ioctl
#include <linux/videodev2.h> //V4L2
#include <sys/mman.h>        //mmap
//open
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>

#include <jpeglib.h>  //libjpeg头文件

#define BOUNDARY "frame-boundary--"
#define ARRAY_SIZE(a) (sizeof(a)/sizeof(a[0]))

struct userbuf{
    void* addr;
    size_t size;
};

struct camera
{
    int fd;
    int width;
    int height;
    int quality;
    struct userbuf buffers[2];
};

int jpeg_encode(const char* src, size_t src_size, unsigned char** dst, size_t* dst_size, int width, int height, int quality)
{
    //JPEG compression object
    struct jpeg_compress_struct cinfo;
    //JPEG error handler
    struct jpeg_error_mgr jerr;
    //set up the error handler
    cinfo.err = jpeg_std_error(&jerr);
    //initialize the JPEG compression object
    jpeg_create_compress(&cinfo);
    //将编码之后的图像写入内存(自动分配内存空间)
    jpeg_mem_dest(&cinfo, dst, dst_size);
    //压缩图像大小
    cinfo.image_width = width;
    cinfo.image_height = height;
    //每像素通道数
    cinfo.input_components = 3;
    //使用YCbCr颜色空间
    cinfo.in_color_space = JCS_YCbCr;
    //设置默认压缩参数
    jpeg_set_defaults(&cinfo);
    //设置压缩质量
    jpeg_set_quality(&cinfo, quality, TRUE);
    //开始压缩
    jpeg_start_compress(&cinfo, TRUE);

    JSAMPROW jrow;
    unsigned char buf[width * 3];
    while (cinfo.next_scanline < cinfo.image_height)
    {
        //将每个像素由YUV422转为YUV444
        for (int i = 0; i < cinfo.image_width; i += 2)
        {
            //Y0U0 Y1V1 Y2U2 Y3V3
            buf[i * 3] = src[i * 2];         //Y0 = Y0
            buf[i * 3 + 1] = src[i * 2 + 1]; //U0 = U0
            buf[i * 3 + 2] = src[i * 2 + 3]; //V0 = V1
            buf[i * 3 + 3] = src[i * 2 + 2]; //Y1 = Y1
            buf[i * 3 + 4] = src[i * 2 + 1]; //U1 = U0
            buf[i * 3 + 5] = src[i * 2 + 3]; //V1 = V1
        }
        jrow = (JSAMPROW)&buf;
        jpeg_write_scanlines(&cinfo, &jrow, 1);
        src += width * 2;
    }
    //停止压缩
    jpeg_finish_compress(&cinfo);
    //释放内存
    jpeg_destroy_compress(&cinfo);

    return 0;
}

struct camera* camera_open(const char* dev, int width, int height)
{
    struct camera* cam = malloc(sizeof(struct camera));
    if (!cam)
    {
        perror("malloc");
        return NULL;
    }
    cam->quality = 80;
    //打开设备文件，需要读写权限
    cam->fd = open(dev, O_RDWR);
    if (cam->fd < 0)
    {
        perror("open");
        free(cam);
        return NULL;
    }

    //获取设备能力(设备支持的操作)
    struct v4l2_capability cap;
    if (ioctl(cam->fd, VIDIOC_QUERYCAP, &cap) < 0)
    {
        perror("VIDIOC_QUERYCAP");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    //设备是否支持视频捕获
    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        fprintf(stderr, "The device does not handle single-planar video capture.\n");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    //设备是否支持串流
    if (!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        fprintf(stderr, "The device does not handle frame streaming.\n");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    //获取默认图像格式
    struct v4l2_format format;
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

    if (ioctl(cam->fd, VIDIOC_G_FMT, &format) < 0)
    {
        perror("VIDIOC_G_FMT");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    printf("default format: %c%c%c%c %dx%d\n",
           format.fmt.pix.pixelformat & 0xff,
           format.fmt.pix.pixelformat >> 8 & 0xff,
           format.fmt.pix.pixelformat >> 16 & 0xff,
           format.fmt.pix.pixelformat >> 24 & 0xff,
           format.fmt.pix.width,
           format.fmt.pix.height);

    format.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    format.fmt.pix.width = cam->width = width;
    format.fmt.pix.height = cam->height = height;
    if (ioctl(cam->fd, VIDIOC_S_FMT, &format) < 0)
    {
        perror("VIDIOC_S_FMT");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    printf("set format: YUYV %dx%d\n", width, height);

    //分配缓冲区（可以分配多个缓冲区多线程并行处理）
    struct v4l2_requestbuffers bufrequest;
    bufrequest.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //缓冲区使用方式
    bufrequest.memory = V4L2_MEMORY_MMAP;
    //缓冲区个数
    bufrequest.count = ARRAY_SIZE(cam->buffers);

    if (ioctl(cam->fd, VIDIOC_REQBUFS, &bufrequest) < 0)
    {
        perror("VIDIOC_REQBUFS");
        close(cam->fd);
        free(cam);
        return NULL;
    }

    for (int i = 0; i < ARRAY_SIZE(cam->buffers); i++)
    {
        //获取缓冲区信息
        struct v4l2_buffer bufferinfo;
        memset(&bufferinfo, 0, sizeof(bufferinfo));
        bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        bufferinfo.memory = V4L2_MEMORY_MMAP;
        bufferinfo.index = i;

        if (ioctl(cam->fd, VIDIOC_QUERYBUF, &bufferinfo) < 0)
        {
            perror("VIDIOC_QUERYBUF");
            close(cam->fd);
            free(cam);
            return NULL;
        }

        //映射驱动缓冲区到用户空间
        cam->buffers[i].addr = mmap(NULL,
                                bufferinfo.length,
                                PROT_READ | PROT_WRITE,
                                MAP_SHARED,
                                cam->fd,
                                bufferinfo.m.offset);
        if (cam->buffers[i].addr == MAP_FAILED)
        {
            perror("mmap");
            close(cam->fd);
            free(cam);
            return NULL;
        }

        cam->buffers[i].size = bufferinfo.length;

        //启动串流前需要缓冲区先入队
        if (ioctl(cam->fd, VIDIOC_QBUF, &bufferinfo) < 0)
        {
            perror("VIDIOC_QBUF1");
            munmap(cam->buffers[i].addr, bufferinfo.length);
            close(cam->fd);
            free(cam);
            return NULL;
        }
    }

    return cam;
}

void camera_close(struct camera* cam)
{
    if (!cam)
    {
        return;
    }
    close(cam->fd);
    free(cam);
}

int camera_start(struct camera* cam)
{
    //启动串流
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    return ioctl(cam->fd, VIDIOC_STREAMON, &type);
}

int camera_stop(struct camera* cam)
{
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    return ioctl(cam->fd, VIDIOC_STREAMOFF, &type);
}

//从摄像头读取一帧图像，并压缩为JPEG格式
int camera_read(struct camera* cam, unsigned char** paddr, size_t* psize)
{
    //出队前需要设置buffer的type，memory和reserve字段
    struct v4l2_buffer bufferinfo;
    memset(&bufferinfo, 0, sizeof(bufferinfo));
    bufferinfo.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    bufferinfo.memory = V4L2_MEMORY_MMAP;

    //等待一帧图像捕获完成后将缓冲区出队（默认阻塞）
    if (ioctl(cam->fd, VIDIOC_DQBUF, &bufferinfo) < 0)
    {
        perror("VIDIOC_QBUF");
        return -1;
    }

    //将YUV格式的图像压缩为JPEG格式
    jpeg_encode(cam->buffers[bufferinfo.index].addr,
                bufferinfo.bytesused, 
                paddr, 
                psize, 
                cam->width, 
                cam->height, 
                cam->quality);
    
    //缓冲区重新入队，清空标志位
    bufferinfo.flags = 0;

    if (ioctl(cam->fd, VIDIOC_QBUF, &bufferinfo) < 0)
    {
        perror("VIDIOC_QBUF2");
        free(*paddr);
        return -1;
    }
    return 0;
}

//解析请求头，返回请求路径
char* parse_request(char *line)
{
    char method[10];
	char protocol[10];
    char* path = NULL;

	puts(line);
    //%ms会自动分配内存
	sscanf(line, "%9s %ms %9s", method, &path, protocol);

    printf("method = %s\n", method);
    printf("path = %s\n", path);
	printf("protocol = %s\n", protocol);

    return path;
}

int server_init(int port)
{
    signal(SIGPIPE, SIG_IGN);

    int listenfd = socket(AF_INET, SOCK_STREAM, 0);
    if (listenfd < 0)
    {
        perror("socket");
        return -1;
    }

    int opt = 1;
    setsockopt(listenfd, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt));

    struct sockaddr_in saddr;
    memset(&saddr, 0, sizeof(saddr));

    saddr.sin_family = AF_INET;
    saddr.sin_addr.s_addr = htonl(INADDR_ANY);
    saddr.sin_port = htons(port);
    if (bind(listenfd, (struct sockaddr *)&saddr, sizeof(saddr)) < 0)
    {
        perror("bind");
        return -1;
    }

    if (listen(listenfd, 64) < 0)
    {
        perror("listen");
        return -1;
    }

    if (chroot(".") < 0)
    {
        perror("chroot");
        return -1;
    }

    return listenfd;
}

void dumpfile(char* filename, char* buf, size_t size)
{
    FILE* fp = fopen(filename, "w+");
    if (!fp)
    {
        perror("fopen");
        return;
    }

    fwrite(buf, 1, size, fp);

    fclose(fp);
}

void send_frame(const unsigned char* buf, size_t size, FILE* sfp)
{
    fprintf(sfp, "--%s\r\n", BOUNDARY);
    fprintf(sfp, "Content-Type: image/jpeg\r\n");
    fprintf(sfp, "Content-Length: %zd\r\n", size);
    fprintf(sfp, "\r\n");
    fwrite(buf, 1, size, sfp);
    fprintf(sfp, "\r\n\r\n");
}

void handle_stream(FILE* sfp, struct camera* cam)
{
    char buf[BUFSIZ];
    //读取请求头，读到空行结束
    while(fgets(buf, BUFSIZ, sfp) && strlen(buf) > 2)
    {
        printf("%s", buf);
    }

    if (camera_start(cam) < 0)
    {
        perror("camera_start");
        return;
    }

    //发送响应头
    fprintf(sfp, "HTTP/1.1 200 OK\r\n");
    fprintf(sfp, "Content-Type: multipart/x-mixed-replace; boundary=%s\r\n", BOUNDARY);
    fprintf(sfp, "Connection: close\r\n");
    fprintf(sfp, "\r\n");

    while (1)
    {
        unsigned char* frame = NULL;
        size_t frame_size = 0;
        //等待一帧图像捕获完成（默认阻塞）
        if (camera_read(cam, &frame, &frame_size) < 0)
        {
            perror("camera_start");
            continue;
        }

        //发送压缩后的图像
        send_frame(frame, frame_size, sfp);
        if (ferror(sfp)) //客户端断开连接
        {
            printf("client disconnected\n");
            free(frame);
            break;
        }

        //释放压缩时分配的内存
        free(frame);
    }

    camera_stop(cam);
    fclose(sfp);
}

void handle_snapshot(FILE* sfp, struct camera* cam)
{
    char buf[BUFSIZ];
    //读取请求头，读到空行结束
    while(fgets(buf, BUFSIZ, sfp) && strlen(buf) > 2)
    {
        printf("%s", buf);
    }

    if (camera_start(cam) < 0)
    {
        perror("camera_start");
        return;
    }

    unsigned char* frame = NULL;
    size_t frame_size = 0;
    //等待一帧图像捕获完成（默认阻塞）
    if (camera_read(cam, &frame, &frame_size) < 0)
    {
        perror("camera_start");
    }

    camera_stop(cam);

    //发送响应头
    fprintf(sfp, "HTTP/1.1 200 OK\r\n");
    fprintf(sfp, "Content-Type: Content-Type: image/jpeg\r\n");
    fprintf(sfp, "Content-Length: %zd\r\n", frame_size);
    fprintf(sfp, "Connection: close\r\n");
    fprintf(sfp, "\r\n");

    //发送压缩后的图像
    fwrite(frame, 1, frame_size, sfp);
    if (ferror(sfp)) //客户端断开连接
    {
        printf("client disconnected\n");
    }

    //释放压缩时分配的内存
    free(frame);
    fclose(sfp);
}

void handle_notfound(FILE* sfp, char* msg)
{
    char buf[BUFSIZ];
    //读取请求头，读到空行结束
    while(fgets(buf, BUFSIZ, sfp) && strlen(buf) > 2)
    {
        printf("%s", buf);
    }

    //发送响应头
    fprintf(sfp, "HTTP/1.1 404 Not Found\r\n");
    fprintf(sfp, "Content-Type: text/plain; charset=utf-8\r\n");
    fprintf(sfp, "Content-Length: %zd\r\n", strlen(msg));
    fprintf(sfp, "Connection: close\r\n");
    fprintf(sfp, "\r\n");
    fprintf(sfp, "%s", msg);

    fclose(sfp);
}

int main()
{
    struct camera * cam = camera_open("/dev/video0", 640, 480);
    if (!cam)
    {
        return EXIT_FAILURE;
    }

    int listenfd = server_init(80);
    if (listenfd < 0)
    {
        return EXIT_FAILURE;
    }

    puts("web server start");

    while (1)
    {
        struct sockaddr_in caddr;
        socklen_t addrlen = sizeof(caddr);
        //等待浏览器连接
        int connfd = accept(listenfd, (struct sockaddr *)&caddr, &addrlen);

        char ipstr[INET_ADDRSTRLEN];
        inet_ntop(AF_INET, &caddr.sin_addr, ipstr, sizeof ipstr);
        printf("client %s connected\n", ipstr);
        if (connfd < 0)
        {
            perror("accept");
            continue;
        }

        FILE* sfp = fdopen(connfd, "r+");
        if (!sfp)
        {
            perror("fdopen");
            continue;
        }

        char line[1024];
        //读取HTTP请求行
        fgets(line, sizeof(line), sfp);
        char* path = parse_request(line);

        if (path && !strcmp(path, "/stream"))
        {
            handle_stream(sfp, cam);
            continue;
        }

        if (path && !strcmp(path, "/snapshot"))
        {
            handle_snapshot(sfp, cam);
            continue;
        }

        handle_notfound(sfp, "请访问/stream或/snapshot路径");
    }

    camera_close(cam);
    return EXIT_SUCCESS;
}
