#include <stdio.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <arpa/inet.h>
#include <signal.h>
#include <unistd.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <linux/videodev2.h>
#include <string.h>
#include <sys/mman.h>
#include <stdlib.h>
#include <pthread.h>
#include <jpeglib.h>

void* buffers[20];

struct frame
{
    void* addr;
    size_t size;
    pthread_mutex_t lock;
} current_frame;

//图像宽度
int width = 640;
//图像高度
int height = 480;
//压缩质量
int quality = 80;

//将YUV422压缩为JPEG
int jpeg_encode(const char *src, size_t src_size, unsigned char **dst, size_t *dst_size, int quality)
{
    //JPEG compression object
    struct jpeg_compress_struct cinfo;
    //JPEG error handler
    struct jpeg_error_mgr jerr;
    //set up the error handler
    cinfo.err = jpeg_std_error(&jerr);
    //initialize the JPEG compression object
    jpeg_create_compress(&cinfo);
    //将编码之后的图像写入内存(自动分配内存空间)
    jpeg_mem_dest(&cinfo, dst, dst_size);
    //压缩图像大小
    cinfo.image_width = width;
    cinfo.image_height = height;
    //每像素通道数
    cinfo.input_components = 3;
    //使用YCbCr颜色空间
    cinfo.in_color_space = JCS_YCbCr;
    //设置默认压缩参数
    jpeg_set_defaults(&cinfo);
    //设置压缩质量
    jpeg_set_quality(&cinfo, quality, TRUE);
    //开始压缩
    jpeg_start_compress(&cinfo, TRUE);

    JSAMPROW jrow;
    unsigned char buf[width * 3];
    while (cinfo.next_scanline < cinfo.image_height)
    {
        //将每个像素由YUV422转为YUV444
        for (int i = 0; i < cinfo.image_width; i += 2)
        {
            //Y0U0 Y1V1 Y2U2 Y3V3
            buf[i * 3] = src[i * 2];         //Y0 = Y0
            buf[i * 3 + 1] = src[i * 2 + 1]; //U0 = U0
            buf[i * 3 + 2] = src[i * 2 + 3]; //V0 = V1
            buf[i * 3 + 3] = src[i * 2 + 2]; //Y1 = Y1
            buf[i * 3 + 4] = src[i * 2 + 1]; //U1 = U0
            buf[i * 3 + 5] = src[i * 2 + 3]; //V1 = V1
        }
        jrow = (JSAMPROW)&buf;
        jpeg_write_scanlines(&cinfo, &jrow, 1);
        src += width * 2;
    }
    //停止压缩
    jpeg_finish_compress(&cinfo);
    //释放内存
    jpeg_destroy_compress(&cinfo);

    return 0;
}

//打印设备支持的所有图像格式
void print_format(int fd)
{
    struct v4l2_fmtdesc fmtdesc;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmtdesc.index = 0;
    puts("support formats:");
    while (!ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc))
    {
        printf("%s\n", fmtdesc.description);
        //获取当前格式支持的分辨率
        struct v4l2_frmsizeenum frmsize;
        frmsize.pixel_format = fmtdesc.pixelformat;
        frmsize.index = 0;
        while (!ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frmsize))
        {
            if (V4L2_FRMSIZE_TYPE_DISCRETE == frmsize.type)
            {
                printf("  %dx%d\n", frmsize.discrete.width, frmsize.discrete.height);
                frmsize.index++;
            }
            else
            {
                break;
            }
        }
        fmtdesc.index++;
    }
}

//初始化摄像头
int cam_init(char* device, void* buffers[], int buffer_count)
{
    //以读写方式打开摄像头设备文件
    int fd = open(device, O_RDWR);
    if (fd == -1)
    {
        perror("open");
        return -1;
    }

    struct v4l2_capability cap;
    //获取设备能力
    int error = ioctl(fd, VIDIOC_QUERYCAP, &cap);
    if (error == -1)
    {
        perror("VIDIOC_QUERYCAP");
        return -2;
    }
    //设备是否支持视频捕获
    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        printf("Device does not support video capture interface\n");
        return -3;
    }
    //设备是否支持串流IO接口
    if (!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        printf("Device does not support streaming IO method\n");
        return -4;
    }

    struct v4l2_format fmt;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //获取驱动支持的图像格式
    error = ioctl(fd, VIDIOC_G_FMT, &fmt);
    if (error == -1)
    {
        perror("VIDIOC_G_FMT");
        return -5;
    }

    printf("default format: %c%c%c%c %dx%d\n",
           fmt.fmt.pix.pixelformat & 0xff,
           (fmt.fmt.pix.pixelformat >> 8) & 0xff,
           (fmt.fmt.pix.pixelformat >> 16) & 0xff,
           (fmt.fmt.pix.pixelformat >> 24) & 0xff,
           fmt.fmt.pix.width,
           fmt.fmt.pix.height);

    print_format(fd);

    fmt.fmt.pix.width = width;
    fmt.fmt.pix.height = height;

    //设置使用的图像格式
    error = ioctl(fd, VIDIOC_S_FMT, &fmt);
    if (error == -1)
    {
        perror("VIDIOC_S_FMT");
        return -6;
    }

    struct v4l2_requestbuffers reqbuf;
    memset(&reqbuf, 0, sizeof(reqbuf));
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; //设置缓冲区类型
    reqbuf.memory = V4L2_MEMORY_MMAP;  //缓冲区的使用方式
    reqbuf.count = buffer_count;  //需要分配的缓冲区个数

    //分配缓冲区
    error = ioctl(fd, VIDIOC_REQBUFS, &reqbuf);
    if (error == -1)
    {
        perror("VIDIOC_REQBUFS");
        return -7;
    }
    //检查缓冲区是否分配成功
    if (reqbuf.count != buffer_count)
    {
        printf("Not enough buffer memory\\n");
        return -8;
    }

    int i;
    for (i = 0; i < buffer_count; i++)
    {
        struct v4l2_buffer buffer;
        memset(&buffer, 0, sizeof(buffer));
        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buffer.memory = V4L2_MEMORY_MMAP;
        buffer.index = i;
        //查询缓冲区地址
        error = ioctl(fd, VIDIOC_QUERYBUF, &buffer);
        if (error == -1)
        {
            perror("VIDIOC_QUERYBUF");
            break;
        }

        printf("buffer address: %p, length: %d\n", buffer.m.offset, buffer.length);
        //将驱动分配的缓冲区从内核空间映射到用户空间
        buffers[i] = mmap(NULL, buffer.length,
                              PROT_READ | PROT_WRITE,
                              MAP_SHARED,
                              fd, buffer.m.offset);
        if (buffers[i] == MAP_FAILED)
        {
            perror("mmap");
            break;
        }
        //将缓冲区入队
        error = ioctl(fd, VIDIOC_QBUF, &buffer);
        if (error == -1)
        {
            perror("VIDIOC_QBUF");
            break;
        }
    }

    return fd;
}

//启动摄像头捕获图像
int cam_start(int fd)
{
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //启动串流
    int error = ioctl(fd, VIDIOC_STREAMON, &type);
    if (error)
    {
        perror("VIDIOC_STREAMON");
        return 1;
    }
    return 0;
}

//停止摄像头
int cam_stop(int fd)
{
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //停止串流
    int error = ioctl(fd, VIDIOC_STREAMOFF, &type);
    if (error)
    {
        perror("VIDIOC_STREAMOFF");
        return 1;
    }
    return 0;
}

//将缓冲区出队，返回V4L2缓冲区结构体
struct v4l2_buffer* cam_dequeue(int fd)
{
    struct v4l2_buffer* buffer = malloc(sizeof(struct v4l2_buffer));
    memset(buffer, 0, sizeof(struct v4l2_buffer));
    buffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buffer->memory = V4L2_MEMORY_MMAP;
    //等待缓冲区出队
    int error = ioctl(fd, VIDIOC_DQBUF, buffer);
    if (error == -1)
    {
        perror("VIDIOC_DQBUF");
        return NULL;
    }

    return buffer;
}

//缓冲区入队
int cam_queue(int fd, struct v4l2_buffer* buffer)
{
    int error = ioctl(fd, VIDIOC_QBUF, buffer);
    if (error == -1)
    {
        perror("VIDIOC_QBUF");
        return 1;
    }

    free(buffer);
    return 0;
}

size_t get_file_size(char* filename)
{
    //以只读方式打开文件
    FILE* fp = fopen(filename, "r");
    if (!fp)
    {
        perror("get_file_size");
        return 0;
    }
    //将文件流位置定位到文件末尾
    fseek(fp, 0, SEEK_END);
    //读取当前文件流位置相对于文件头的偏移（文件大小）
    long size = ftell(fp);
    fclose(fp);

    return size;
}

//发送文件到客户端
//filename: 要发送给客户端的文件
//fp: 客户端连接关联的文件流
//返回值：成功返回0，失败返回非0
int send_file(char* filename, FILE* fp)
{
    char buf[100];
    FILE* file = fopen(filename, "r");
    if (!file)
    {
        perror("send_file");
        return 1;
    }
    size_t readn;
    while (readn = fread(buf, 1, 100, file))
    {
        fwrite(buf, 1, readn, fp);
    }
    fclose(file);
    return 0;
}

//处理客户端请求
// connectfd: 客户端连接套接字
// 返回值：成功0，失败非0
int handle_client(int connectfd)
{

    //使用标准IO函数与客户端通信，需要将连接套接字与标准IO文件流绑定
    FILE* fp = fdopen(connectfd, "r+");
    if (!fp)
    {
        perror("fdopen");
        return 1;
    }

    char line[80];
    //循环接收客户端的输入
    while (1)
    {
        char method[10];
        char path[20];
        char protocol[10];
        //读取请求行（第一行），解析文件路径
        fscanf(fp, "%s %s %s\r\n", method, path, protocol);
        //printf("path = %s\n", path);

        //读取请求报文的头部字段，读到空行结束
        while (fgets(line, sizeof(line), fp))
        {
            if (line[0] == '\r')
            {
                break;
            }
            //打印请求消息内容
            //printf(line);
        }
        //判断客户端是否断开连接
        if (ferror(fp))
        {
            printf("client disconnected\n");
            break;
        }

        //发送采集到的图像给客户端
        pthread_mutex_lock(&current_frame.lock);
        fprintf(fp, "HTTP/1.1 200 OK\r\n");
        fprintf(fp, "Content-Type: image/jpeg\r\n");
        fprintf(fp, "Content-Length: %d\r\n", current_frame.size);
        fprintf(fp, "\r\n");
        fwrite(current_frame.addr, 1, current_frame.size, fp);
        pthread_mutex_unlock(&current_frame.lock);
    }

    //断开和客户端的TCP连接
    fclose(fp);
    return 0;
}

//摄像头采集线程处理函数
void* capture_thread(void* data)
{
    int fd = *(int*)data;

    cam_start(fd);

    while (1) //循环处理摄像头捕获到的每一帧图像
    {
        //出队（阻塞）
        struct v4l2_buffer* buffer = cam_dequeue(fd);
        if (buffer == NULL)
        {
            continue;
        }
        //设置当前帧
        pthread_mutex_lock(&current_frame.lock);
        if (current_frame.size)
        {
            free(current_frame.addr);
            current_frame.size = 0;
        }
        jpeg_encode(buffers[buffer->index], buffer->bytesused, &current_frame.addr, &current_frame.size, quality);
        pthread_mutex_unlock(&current_frame.lock);

        //重新将缓冲区入队
        cam_queue(fd, buffer);
    }

    cam_stop(fd);

    return NULL;
}

int main()
{
    int fd = cam_init("/dev/video0", buffers,
                      sizeof(buffers)/sizeof(buffers[0]));
    if (fd < 0)
    {
        return 1;
    }

    pthread_t tid;
    pthread_attr_t attr;
    pthread_attr_init(&attr);
    //创建视频采集子线程
    pthread_create(&tid, &attr, capture_thread, &fd);

    //创建监听套接字
    int listenfd = socket(PF_INET, SOCK_STREAM, 0);

    //本程序的IP地址和端口号
    struct sockaddr_in addr;
    addr.sin_family = AF_INET; //IPv4地址（地址类型）
    addr.sin_port = htons(80); //端口号（网络字节序）,程序使用小于1024的端口号需要使用root权限
    addr.sin_addr.s_addr = htonl(INADDR_ANY); //绑定本机所有IP地址
    //addr.sin_addr.s_addr = inet_addr("192.168.1.175"); //指定IP地址进行监听

    //将IP地址与监听套接字绑定
    int error = bind(listenfd, (struct sockaddr*)&addr, sizeof(addr));
    if (error)
    {
        //打印错误原因
        perror("bind");
        return 1;
    }

    error = listen(listenfd, 3);
    if (error)
    {
        perror("listen");
        return 2;
    }

    //忽略SIGPIPE信号，防止发送数据时被OS终止
    signal(SIGPIPE, SIG_IGN);
    //切换根目录，限制服务器只能访问当前路径下的文件
    chroot(".");
    printf("server start\n");

    //循环服务器
    while(1) //等待下一个客户端连接
    {
        struct sockaddr_in client_addr;
        socklen_t addrlen = sizeof(client_addr);
        //等待客户端连接
        int connectfd = accept(listenfd, (struct sockaddr*)&client_addr, &addrlen);
        if (connectfd == -1)
        {
            perror("accept");
            continue; //执行下一次循环
        }
        printf("client is connected, ip: %s, port: %d\n", inet_ntoa(client_addr.sin_addr), ntohs(client_addr.sin_port));

        handle_client(connectfd);
    }

    return 0;
}
