/*******************************************************************************
 * Copyleft (c) 2021 Kcode
 *
 * @file    myuvc.c
 * @brief   实现USB摄像头的视频数据传输（修改支持二合一摄像头，MJPEG）
 * @author  K
 * @version 0.0.1
 * @date    2021-07-22
 * @license MulanPSL-1.0
 *
 * 文件修改历史：
 * <时间>       | <版本>    | <作者>  | <描述>
 * 2021-07-24   | v0.0.1    | Kcode   | 实现USB摄像头的视频数据传输
 * -----------------------------------------------------------------------------
 ******************************************************************************/

#include <linux/kernel.h>
#include <linux/vmalloc.h>
#include <linux/list.h>
#include <linux/module.h>
#include <linux/usb.h>
#include <linux/videodev2.h>
#include <linux/wait.h>
#include <asm/atomic.h>
#include <asm/unaligned.h>


#include <media/v4l2-ctrls.h>
#include <media/v4l2-event.h>
#include <media/v4l2-ioctl.h>
#include <media/v4l2-common.h>
#include <media/videobuf-core.h>


#include "uvcvideo.h"

#define MYDRIVER_VERSION_NUMBER            1    /**< myuvc版本 */
#define MYMAX_PACKETS_NUM                 32    /**< usb允许最大传输次数 */
#define MYUVC_URBS                         5    /**< 分配的urb_buffers数量 */
#define MYUVC_CTRL_CONTROL_TIMEOUT       300    /**< 控制接口超时传输时间 */        
#define MYUVC_CTRL_STREAMING_TIMEOUT	3000    /**< 视频流接口超时传输时间 */

/* Values for bmHeaderInfo (Video and Still Image Payload Headers, 2.4.3.3) */
#define UVC_STREAM_EOH	(1 << 7)
#define UVC_STREAM_ERR	(1 << 6)
#define UVC_STREAM_STI	(1 << 5)
#define UVC_STREAM_RES	(1 << 4)
#define UVC_STREAM_SCR	(1 << 3)
#define UVC_STREAM_PTS	(1 << 2)
#define UVC_STREAM_EOF	(1 << 1)
#define UVC_STREAM_FID	(1 << 0)

/*!
 * 分辨率描述
 */
typedef struct frame_desc {
	int width;		/**< x分辨率 */
	int height;		/**< y分辨率 */
} FRAME_DESC_S;

/*!
 * uvc数据流控制结构体，参考：UVC 1.5 Class specification.pdf
 */
typedef struct myuvc_streaming_control {
	__u16 bmHint;           /**< 提示位图指示视频流接口在流参数协商时哪些字段需要保持不变。\
	                             例如，如果选择更喜欢帧速率而不是质量，则将设置dwFrameInterval位(1)。 */
	__u8  bFormatIndex;     /**< 视频格式索引  */
	__u8  bFrameIndex;      /**< 视频帧索引 */
	__u32 dwFrameInterval;  /**< 视频帧间隔  */
	__u16 wKeyFrameRate;    /**< 每个视频帧单元的关键帧速率 */
	__u16 wPFrameRate;      /**< 指定每个关键帧中P帧的数量 */
	__u16 wCompQuality;     /**< 压缩质量控制在抽象单元：  1(最低)到10000(最高) */
	__u16 wCompWindowSize;  /**< 压缩窗口大小 */
	__u16 wDelay;           /**< 指定内部视频流接口从视频数据捕获到USB上的显示延迟ms */
	__u32 dwMaxVideoFrameSize;          /**< 最大视频帧大小 */
	__u32 dwMaxPayloadTransferSize;     /**< 设备在一次有效载荷传输中可以发送或接收的最大字节数 */
	__u32 dwClockFrequency;             /**< 设备时钟频率，以Hz为单位 */
	__u8  bmFramingInfo;                /**< 位字段控制 */
	__u8  bPreferedVersion;             /**< 支持的首选有效负载格式版本 */
	__u8  bMinVersion;                  /**< 最小有效负载格式版本 */
	__u8  bMaxVersion;                  /**< 最大有效负载格式版本 */
}MYUVC_STREAMING_CONTROL_S;

/*!
 * 缓冲区的信息
 */
typedef struct myuvc_buffer {
    int state;                      /**< 状态位 */
    int vma_use_count;              /**< 是否已经被mmap */
	struct v4l2_buffer buf;         /**< 存储每个缓冲区的查询信息 */
    wait_queue_head_t wait;         /**< APP读取某个缓冲区，如果无数据，在此休眠 */
    struct list_head stream;        /**< mainqueue队列结点，供APP消费用 */
    struct list_head irq;           /**< irqqueue队列结点，供底层驱动生成用*/
}MYUVC_BUFFER_S; 

/*!
 * 存储分配的整块缓冲区
 */
typedef struct mvuvc_video_queue {
    void *mem;                          /**< 存储分配的内存 */
    int count;                          /**< 分配缓冲区个数 */
    int buf_size;                       /**< 每个缓冲区（页对齐）大小 */	
	MYUVC_BUFFER_S buffer[32];          /**< 存储每个缓冲区的信息 */

    struct urb *urb[MYMAX_PACKETS_NUM];        /**< urb_buffer的描述信息 */
    char *urb_buffer[MYMAX_PACKETS_NUM];       /**< 存储数据 */
    dma_addr_t urb_dma[MYMAX_PACKETS_NUM];     /**< urb_buffer的物理地址 */
    unsigned int urb_size;                     /**< urb_buffer的数量 */
    
    struct list_head mainqueue;         /**< mainqueue队列头结点，供APP消费用 */
    struct list_head irqqueue;          /**< irqqueue队列头结点，供底层驱动生成用*/
}MYUVC_VIDEO_QUEUE_S;

static int uvc_version = 0x100;             /**< 手工查看知道 */
static int s_myuvc_streaming_intf;        /**< 设备流接口号 */
static int s_myuvc_streaming_setting = 5;   /**< 设备流接口支持的设置项索引 */
static int s_myuvc_control_intf;   /**< 设备控制接口号 */
static struct video_device *s_myuvc_vdev;
static struct v4l2_format s_myuvc_format;	/**< USB摄像头的format */
static struct usb_device *s_myuvc_udev;
static MYUVC_VIDEO_QUEUE_S s_myuvc_queue;	/**< 存放分配的一整块缓冲区 */

static MYUVC_STREAMING_CONTROL_S s_myuvc_params;    /**< uvc数据流解析 */

static int s_last_fid     = -1;
static int s_pixel_bits  = 0;			/**< USB摄像头像素位 */
static int s_frame_index = 1;			/**< 指定分辨率数组下标 */
static int s_wMaxPackSie = 800;         /**< 实时传输端点一次能传输的最大字节数800bytes */
static int s_bInterval   = 1;           /**< 人工确定端点描述符为1 */
static int s_PU_ID        = 3;          /**< 人工确定PU的ID */
static int s_bEndpointAddress = 0x82; /**< 人工确定端点地址 */

static int s_dwMaxVideoFrameSize = 77312;   /**< 人工确地最大分辨率大小 */

static int s_cnt = 0;

static int s_frame_idx = 1;           /**< 指定分辨率的索引 */
static FRAME_DESC_S s_frame_arr[] = {
	{640, 480},
	{320, 240},
	{160, 120},
};	/**< 该USB摄像头所有支持的分辨率 */

/*!
 * 所支持usb设备类的接口
 */
static struct usb_device_id myuvc_ids[] = {
	/* Generic USB Video Class */
	{ USB_INTERFACE_INFO(USB_CLASS_VIDEO, 1, 0) },	 /**< VideoControl interface */
	{ USB_INTERFACE_INFO(USB_CLASS_VIDEO, 2, 0) }, 	/**< VideoStreaming interface */
	{}
};

/*!
 * @brief  Step1：打开myuvc_fops设备文件
 */
static int myuvc_open(struct file *file)
{
	return 0;
}

static int myuvc_vidioc_streamoff(struct file *file, 
									void *priv, enum v4l2_buf_type t);

/*!
 * 关闭myuvc_fops设备文件
 */
static int myuvc_close(struct file *file)
{
    myuvc_vidioc_streamoff(NULL, NULL, 0);
    s_cnt = 0 ;
	return 0;
}

/*!
 * @brief  Step2：查询是否为USB摄像头设备
 *         参考：uvc_v4l2_do_ioctl()
 */
static int myuvc_vidioc_querycap(struct file *file, void  *priv,
					struct v4l2_capability *cap)
{	
	/*!
	 * 清空内存、设置版本号和名字
	 */
	memset(cap, 0, sizeof *cap);
	strcpy(cap->driver, "myuvc");
	strcpy(cap->card, "myuvc");
	cap->version = MYDRIVER_VERSION_NUMBER;

	/*!
	 * V4L2_CAP_VIDEO_CAPTURE - 设备为视频捕捉设备
	 * V4L2_CAP_STREAMING     - 使用ioctl来读视频数据
	 */
	cap->capabilities = V4L2_CAP_VIDEO_CAPTURE | V4L2_CAP_STREAMING;

	return 0;
}

/*!
 * @brief  Step3：列举USB摄像头设备所支持的格式format
 *         参考：uvc_fmt()
 */
static int myuvc_vidioc_enum_fmt_vid_cap(struct file *file, 
							void *priv, struct v4l2_fmtdesc *f)
{
	/*!
	 * 当前USB摄像头只支持一种格式format(命令查看描述符信息可知)
	 */
	if (f->index >= 1)
		return -EINVAL;

	strcpy(f->description, "MJPEG");
	f->pixelformat = V4L2_PIX_FMT_MJPEG;
    f->type        = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    //f->flags       = ;

	return 0;
}

/*!
 * @brief  Step4：返回当前所使用的格式
 */
static int myuvc_vidioc_g_fmt_vid_cap(struct file *file, 
							void *priv, struct v4l2_format *f)
{
	memcpy(f, &s_myuvc_format, sizeof(s_myuvc_format));
	return 0;
}

/*!
 * @brief  Step5：测试驱动程序是否支持某种格式，强制设定格式
 *         参考：uvc_v4l2_try_format()/myvivi_vidioc_try_fmt_vid_cap()
 */
static int myuvc_vidioc_try_fmt_vid_cap(struct file *file,
							void *priv, struct v4l2_format *f)
{
	if ((f->type != V4L2_BUF_TYPE_VIDEO_CAPTURE) || \
		(f->fmt.pix.pixelformat != V4L2_PIX_FMT_MJPEG))
		return -EINVAL;

	/*!
	 * 手工确定分辨率、像素位、图片大小信息
	 */
	f->fmt.pix.width  = s_frame_arr[s_frame_index].width;
	f->fmt.pix.height = s_frame_arr[s_frame_index].height;

	f->fmt.pix.bytesperline = (f->fmt.pix.width * s_pixel_bits) >> 3;
	f->fmt.pix.sizeimage    = s_dwMaxVideoFrameSize;
    f->fmt.pix.field        = V4L2_FIELD_NONE;
    f->fmt.pix.colorspace   = V4L2_COLORSPACE_SRGB;
    f->fmt.pix.priv         = 0;
	
	return 0;
}

/*!
 * @brief  Step6：设置所支持的格式
 *         参考：myvivi_vidioc_s_fmt_vid_cap()
 */
static int myuvc_vidioc_s_fmt_vid_cap(struct file *file,
							void *priv, struct v4l2_format *f)
{
	int ret;

	/*!
	 * 测试是否支持该格式（强制设置格式）
	 */
	ret = myuvc_vidioc_try_fmt_vid_cap(file, NULL, f);	
	if (ret < 0)
		return ret;
	
	memcpy(&s_myuvc_format, f, sizeof(s_myuvc_format));

	return 0;
}

/*!
 * @brief  释放分配的缓冲区
 */
int myuvc_free_buffers(void)
{
    if (s_myuvc_queue.mem) {
        vfree(s_myuvc_queue.mem);
        memset(&s_myuvc_queue, 0, sizeof(s_myuvc_queue));
        s_myuvc_queue.mem = NULL;
    }
	return 0;
}

/*!
 * @brief  Step7：为该设备申请若干个缓冲区，分配头部信息
 *         参考：uvc_alloc_buffers()
 * @return  正数：返回成功分配内存的大小，负数：分配失败
 */
static int myuvc_vidioc_reqbufs(struct file *file,
						void *priv, struct v4l2_requestbuffers *p)
{
	int buf_num   = p->count ;
	int buf_size_unalign = s_myuvc_format.fmt.pix.sizeimage;
	int buf_size_align    = PAGE_ALIGN(s_myuvc_format.fmt.pix.sizeimage);
	unsigned int i;
	void *mem = NULL;
	int ret;

	if (buf_num > UVC_MAX_VIDEO_BUFFERS)
		buf_num = UVC_MAX_VIDEO_BUFFERS;

	/* 释放之前分配的缓存 */
	if ((ret = myuvc_free_buffers()) < 0)
		goto done;

	/* 如果不分配缓冲区，则退出 */
	if (buf_num == 0)
		goto done;

	/* 减少缓冲区的数量，直到分配成功 */
	for (; buf_num > 0; --buf_num) {
		mem = vmalloc_32(buf_num * buf_size_align);
		if (mem != NULL)
			break;
	}

	if (mem == NULL) {
		ret = -ENOMEM;
		goto done;
	}

	memset(&s_myuvc_queue, 0, sizeof(s_myuvc_queue));
	
	/*!
	 * 初始化mainqueue和irqqueue队列
     */
    INIT_LIST_HEAD(&s_myuvc_queue.mainqueue);
    INIT_LIST_HEAD(&s_myuvc_queue.irqqueue);
    
    /*!
	 * 缓存是一次性分配一个大的整体
	 * 需分别设置每个缓存的信息
	 */
	for (i = 0; i < buf_num; ++i) {
		s_myuvc_queue.buffer[i].buf.index     = i;
		s_myuvc_queue.buffer[i].buf.m.offset  = i * buf_size_align;
		s_myuvc_queue.buffer[i].buf.length    = buf_size_unalign;
		s_myuvc_queue.buffer[i].buf.type      = V4L2_BUF_TYPE_VIDEO_CAPTURE;
		s_myuvc_queue.buffer[i].buf.sequence  = 0;
		s_myuvc_queue.buffer[i].buf.field     = V4L2_FIELD_NONE;
		s_myuvc_queue.buffer[i].buf.memory    = V4L2_MEMORY_MMAP;
		s_myuvc_queue.buffer[i].buf.flags     = 0;

        /* 空闲状态 */
		s_myuvc_queue.buffer[i].state         = VIDEOBUF_IDLE;

        /* 初始化队列 */
		init_waitqueue_head(&s_myuvc_queue.buffer[i].wait);
	}

	s_myuvc_queue.mem = mem;
	s_myuvc_queue.count = buf_num;
	s_myuvc_queue.buf_size = buf_size_align;
	ret = buf_num;

done:
	return ret;
}

/*!
 * @brief  Step8：查询指定缓冲区的信息，如大小、偏移地址等
 *         得到信息后，APP可mmap进行地址映射，分配真正的存储数据的缓冲区
 *         参考：uvc_query_buffer()
 * @return  0：成功，负数：失败
 */
static int myuvc_vidioc_querybuf(struct file *file, 
								void *priv, struct v4l2_buffer *v4l2_buf)
{
    int ret = 0;

    if (v4l2_buf->index >= s_myuvc_queue.count) {
        ret = -EINVAL;
        goto done;
    }

    /* 拷贝该缓冲区的状态信息 */
    memcpy(v4l2_buf, &s_myuvc_queue.buffer[v4l2_buf->index].buf,
                sizeof(*v4l2_buf));

    /*!
     * 若已经该缓冲区已被mmap，则更新状态
     */
    if (s_myuvc_queue.buffer[v4l2_buf->index].vma_use_count)
        v4l2_buf->flags |= V4L2_BUF_FLAG_MAPPED;

    /*!
     * 更新状态
     */
	switch (s_myuvc_queue.buffer[v4l2_buf->index].state) {
	case VIDEOBUF_ERROR:
	case VIDEOBUF_DONE:
		v4l2_buf->flags |= V4L2_BUF_FLAG_DONE;
		break;
	case VIDEOBUF_QUEUED:
	case VIDEOBUF_ACTIVE:
		v4l2_buf->flags |= V4L2_BUF_FLAG_QUEUED;
		break;
	case VIDEOBUF_IDLE:
	default:
		break;
    }

done:
	return ret;
}

static void myuvc_vm_open(struct vm_area_struct *vma)
{
    MYUVC_BUFFER_S *buffer = vma->vm_private_data;
    buffer->vma_use_count++;
}

static void myuvc_vm_close(struct vm_area_struct *vma)
{
    MYUVC_BUFFER_S *buffer = vma->vm_private_data;
    buffer->vma_use_count--;
}

static struct vm_operations_struct myuvc_vm_ops = {
    .open       = myuvc_vm_open,
    .close      = myuvc_vm_close,
};

/*!
 * @brief  Step9：APPmmap进行地址映射可直接操作这块内存
 *         参考：uvc_v4l2_mmap()
 * @return 0：成功
 */
static int myuvc_mmap(struct file *file, struct vm_area_struct *vma)
{
    MYUVC_BUFFER_S *buffer;
    struct page *page;
    unsigned long addr, start, size;
    unsigned int i;
    int ret = 0;

    /* 获取虚拟内存的起始地址与大小 */
    start = vma->vm_start;
    size = vma->vm_end - vma->vm_start;

    /*!
     * APP调用mmap函数时，会传入offest参数
     * 根据这个offest，找出指定的缓冲区
     */
    for (i = 0; i < s_myuvc_queue.count; ++i) {
        buffer = &s_myuvc_queue.buffer[i];

        /* 缓冲区的offest == 虚拟地址的offest */
        if ((buffer->buf.m.offset >> PAGE_SHIFT) == vma->vm_pgoff)
            break;
    }

    /* 寻找失败 */
    if (i == s_myuvc_queue.count || size != s_myuvc_queue.buf_size) {
        ret = -EINVAL;
        goto done;
    }

    /*
    * VM_IO marks the area as being an mmaped region for I/O to a
    * device. It also prevents the region from being core dumped.
    */
    vma->vm_flags |= VM_IO;

    /*!
     * 根据虚拟地址找到缓冲区对应的page结构体
     */
    addr = (unsigned long)s_myuvc_queue.mem + buffer->buf.m.offset;
    while (size > 0) {
        page = vmalloc_to_page((void *)addr);

        /* 把page和APP传入的虚拟地址挂钩(映射到虚拟地址) */
        if ((ret = vm_insert_page(vma, start, page)) < 0)
            goto done;

        start += PAGE_SIZE;
        addr += PAGE_SIZE;
        size -= PAGE_SIZE;
    }

    vma->vm_ops = &myuvc_vm_ops;
    vma->vm_private_data = buffer;
    myuvc_vm_open(vma);

done:
    return ret;
}

/*!
 * @brief  Step10：把申请的缓冲区放入队列，底层的硬件操作函数将会把数据放入队列
 *         参考：uvc_queue_buffer()
 */
static int myuvc_vidioc_qbuf(struct file *file, 
								void *priv, struct v4l2_buffer *v4l2_buf)
{
    int ret = 0;
    struct myuvc_buffer *buf;

    /*!
     * 判断用户空间传入的V4L2_buf的类型是否为视频捕获
     */
    if (v4l2_buf->type != V4L2_BUF_TYPE_VIDEO_CAPTURE ||
	    v4l2_buf->memory != V4L2_MEMORY_MMAP)	    
		return -EINVAL;

        
    /*!
     * 判断用户空间传入的V4L2_buf的index是否大于内核分配的缓冲区的数量
     */
    if (v4l2_buf->index >= s_myuvc_queue.count) {
		ret = -EINVAL;
		goto done;
	}

    buf = &s_myuvc_queue.buffer[v4l2_buf->index];
    if (buf->state != VIDEOBUF_IDLE) { 
        ret = -EINVAL;
        goto done;
    }
        
    /*!
     * 修改状态：处于队列状态，且缓冲区已使用内存为0
     */
    buf->state = VIDEOBUF_QUEUED;
    buf->buf.bytesused = 0;

    /*!
     * 结点插入队列一：供APP使用
     * 当缓冲区没有数据时，放入mainqueue队列
     * 当缓冲区有数据时，APP从mainqueue队列中取出
     */
	list_add_tail(&buf->stream, &s_myuvc_queue.mainqueue);

    /*!
     * 结点插入队列二：供产生数据的函数使用
     * 当采集到数据时，从irqqueue队列中取出第一个缓冲区，存入数据
     */
	list_add_tail(&buf->irq, &s_myuvc_queue.irqqueue);

done:
	return ret;
}

/*!
 * 打印设置参数
 */
static void myuvc_print_streaming_params(MYUVC_STREAMING_CONTROL_S *ctrl)
{
        printk("video params:\n");
        printk("bmHint                   = %d\n", ctrl->bmHint);
        printk("bFormatIndex             = %d\n", ctrl->bFormatIndex);
        printk("bFrameIndex              = %d\n", ctrl->bFrameIndex);
        printk("dwFrameInterval          = %d\n", ctrl->dwFrameInterval);
        printk("wKeyFrameRate            = %d\n", ctrl->wKeyFrameRate);
        printk("wPFrameRate              = %d\n", ctrl->wPFrameRate);
        printk("wCompQuality             = %d\n", ctrl->wCompQuality);
        printk("wCompWindowSize          = %d\n", ctrl->wCompWindowSize);
        printk("wDelay                   = %d\n", ctrl->wDelay);
        printk("dwMaxVideoFrameSize      = %d\n", ctrl->dwMaxVideoFrameSize);
        printk("dwMaxPayloadTransferSize = %d\n", ctrl->dwMaxPayloadTransferSize);
        printk("dwClockFrequency         = %d\n", ctrl->dwClockFrequency);
        printk("bmFramingInfo            = %d\n", ctrl->bmFramingInfo);
        printk("bPreferedVersion         = %d\n", ctrl->bPreferedVersion);
        printk("bMinVersion              = %d\n", ctrl->bMinVersion);
        printk("bMinVersion              = %d\n", ctrl->bMinVersion);
}

/*!
 * @brief  根据数据包的数据获得当前设备的参数
 *         参考：uvc_get_video_ctrl()
 * @return 0：成功，负数：错误
 */
static int myuvc_get_streaming_params(MYUVC_STREAMING_CONTROL_S *ctrl)
{
    __u8 *data;
    __u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
	__u16 size;
	int ret = 0;
    unsigned int pipe;  /**< 端点 */

    /*!
     * 根据uvc设备版本设置数据宽度
     */
	size = uvc_version >= 0x0110 ? 34 : 26;
	data = kmalloc(size, GFP_KERNEL);
	if (data == NULL)
		return -ENOMEM;

    /* 确定端点 */
    pipe = (GET_CUR & 0x80) ? usb_rcvctrlpipe(s_myuvc_udev, 0)
                  : usb_sndctrlpipe(s_myuvc_udev, 0);

    /* 确定类型 */
    type |= (GET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT;

    ret = usb_control_msg(s_myuvc_udev, pipe, GET_CUR, type, VS_PROBE_CONTROL << 8,
            0 << 8 | s_myuvc_streaming_intf, data, size, 300);
    if (ret < 0)
        goto done;

    ctrl->bmHint = le16_to_cpup((__le16 *)&data[0]);
	ctrl->bFormatIndex = data[2];
	ctrl->bFrameIndex = data[3];
	ctrl->dwFrameInterval = le32_to_cpup((__le32 *)&data[4]);
	ctrl->wKeyFrameRate   = le16_to_cpup((__le16 *)&data[8]);
	ctrl->wPFrameRate     = le16_to_cpup((__le16 *)&data[10]);
	ctrl->wCompQuality    = le16_to_cpup((__le16 *)&data[12]);
	ctrl->wCompWindowSize = le16_to_cpup((__le16 *)&data[14]);
	ctrl->wDelay = le16_to_cpup((__le16 *)&data[16]);
	ctrl->dwMaxVideoFrameSize = get_unaligned_le32(&data[18]);
	ctrl->dwMaxPayloadTransferSize = get_unaligned_le32(&data[22]);

	if (size == 34) {
		ctrl->dwClockFrequency = get_unaligned_le32(&data[26]);
		ctrl->bmFramingInfo = data[30];
		ctrl->bPreferedVersion = data[31];
		ctrl->bMinVersion = data[32];
		ctrl->bMaxVersion = data[33];
	} else {
	    //ctrl->dwClockFrequency = video->dev->clock_frequency;
		ctrl->bmFramingInfo = 0;
		ctrl->bPreferedVersion = 0;
		ctrl->bMinVersion = 0;
		ctrl->bMaxVersion = 0;
	}

done:
    kfree(data);
    return (ret < 0) ? ret : 0;
}

/*!
 * @brief  发送数据包参数，测试设备是否支持
 *         参考：uvc_v4l2_try_format()\uvc_set_video_ctrl()
 * @return 0：成功，负数：错误
 */
static int myuvc_try_streaming_params(MYUVC_STREAMING_CONTROL_S *ctrl)
{
    __u8 *data;
    __u16 size;
    int ret;
	__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
	unsigned int pipe;
    
	memset(ctrl, 0, sizeof *ctrl);
    
	ctrl->bmHint = 1;	/* dwFrameInterval */
	ctrl->bFormatIndex = 1;
	ctrl->bFrameIndex  = s_frame_idx + 1;
	ctrl->dwFrameInterval = 333333;

    size = uvc_version >= 0x0110 ? 34 : 26;
    data = kzalloc(size, GFP_KERNEL);
    if (data == NULL)
        return -ENOMEM;

    /*!
     * 先设置数据包的参数
     */
    *(__le16 *)&data[0] = cpu_to_le16(ctrl->bmHint);
    data[2] = ctrl->bFormatIndex;
    data[3] = ctrl->bFrameIndex;
    *(__le32 *)&data[4] = cpu_to_le32(ctrl->dwFrameInterval);
    *(__le16 *)&data[8] = cpu_to_le16(ctrl->wKeyFrameRate);
    *(__le16 *)&data[10] = cpu_to_le16(ctrl->wPFrameRate);
    *(__le16 *)&data[12] = cpu_to_le16(ctrl->wCompQuality);
    *(__le16 *)&data[14] = cpu_to_le16(ctrl->wCompWindowSize);
    *(__le16 *)&data[16] = cpu_to_le16(ctrl->wDelay);
    put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[18]);
    put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[22]);

    if (size == 34) {
        put_unaligned_le32(ctrl->dwClockFrequency, &data[26]);
        data[30] = ctrl->bmFramingInfo;
        data[31] = ctrl->bPreferedVersion;
        data[32] = ctrl->bMinVersion;
        data[33] = ctrl->bMaxVersion;
    }

    pipe = (SET_CUR & 0x80) ? usb_rcvctrlpipe(s_myuvc_udev, 0)
                  : usb_sndctrlpipe(s_myuvc_udev, 0);
    type |= (SET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT;

    /*!
     * 后发送数据
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, SET_CUR, type, VS_PROBE_CONTROL << 8,
            0 << 8 | s_myuvc_streaming_intf, data, size, 300);

    kfree(data);
    
    return (ret < 0) ? ret : 0;
}

/*!
 * @brief  设置数据包参数，测试设备是否支持
 *         参考：uvc_v4l2_try_format()/uvc_set_video_ctrl()
 * @return 0：成功，负数：错误
 */
static int myuvc_set_streaming_params(MYUVC_STREAMING_CONTROL_S *ctrl)
{
    __u8 *data;
    __u16 size;
    int ret;
	__u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
	unsigned int pipe;

    size = uvc_version >= 0x0110 ? 34 : 26;
    data = kzalloc(size, GFP_KERNEL);
    if (data == NULL)
        return -ENOMEM;

    /*!
     * 先设置数据包的参数
     */
    *(__le16 *)&data[0] = cpu_to_le16(ctrl->bmHint);
    data[2] = ctrl->bFormatIndex;
    data[3] = ctrl->bFrameIndex;
    *(__le32 *)&data[4] = cpu_to_le32(ctrl->dwFrameInterval);
    *(__le16 *)&data[8] = cpu_to_le16(ctrl->wKeyFrameRate);
    *(__le16 *)&data[10] = cpu_to_le16(ctrl->wPFrameRate);
    *(__le16 *)&data[12] = cpu_to_le16(ctrl->wCompQuality);
    *(__le16 *)&data[14] = cpu_to_le16(ctrl->wCompWindowSize);
    *(__le16 *)&data[16] = cpu_to_le16(ctrl->wDelay);
    put_unaligned_le32(ctrl->dwMaxVideoFrameSize, &data[18]);
    put_unaligned_le32(ctrl->dwMaxPayloadTransferSize, &data[22]);

    if (size == 34) {
        put_unaligned_le32(ctrl->dwClockFrequency, &data[26]);
        data[30] = ctrl->bmFramingInfo;
        data[31] = ctrl->bPreferedVersion;
        data[32] = ctrl->bMinVersion;
        data[33] = ctrl->bMaxVersion;
    }

    pipe = (SET_CUR & 0x80) ? usb_rcvctrlpipe(s_myuvc_udev, 0)
                  : usb_sndctrlpipe(s_myuvc_udev, 0);
    type |= (SET_CUR & 0x80) ? USB_DIR_IN : USB_DIR_OUT;

    /*!
     * 后发送数据
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, SET_CUR, type, VS_COMMIT_CONTROL << 8,
            0 << 8 | s_myuvc_streaming_intf, data, size, 300);

    kfree(data);
    
    return (ret < 0) ? ret : 0;
}

/*!
 * @brief  分配urb_buffer/urb结构体失败是调用，释放已分配的空间
 *         参考：

 */
static void myuvc_uninit_urbs(void)
{
    int i;

    for (i = 0; i < MYUVC_URBS; i++) {
        if (s_myuvc_queue.urb_buffer[i]) {
            usb_free_coherent(s_myuvc_udev, s_myuvc_queue.urb_size,
                &s_myuvc_queue.urb_buffer[i], s_myuvc_queue.urb_dma[i]);
            
            s_myuvc_queue.urb_buffer[i] = NULL;
        }
        
        if (s_myuvc_queue.urb[i]) {
            usb_free_urb(s_myuvc_queue.urb[i]);
            s_myuvc_queue.urb[i] = NULL;
        }
    }
}

/*!
 * @brief  每次传输完数据，从irqqueu队列中取出空的缓冲区，把urb_buffer中的数据存到缓冲区中
 *         后再次提交URB。
 *         对于一个frame，其中的数据所属帧可能不同，所以需要根据数据的fid判断处理的数据，
 *         把每一帧的数据完整的放到每一个缓冲区中。
 *         参考：uvc_video_complete()
 */
static void myuvc_video_complete(struct urb *urb)
{
    u8 *src;
    u8 *dest;
    int i;
    int ret;
    int len;
    int nbytes;         /**< 缓冲区中存储数据的最小字节数 */
    int maxlen;         /**< 缓冲区中实际可以存储的最大数据大小 */
    static int fid;     /**< 记录frame_id(帧id)，在frame中数据所属的帧id */
    MYUVC_BUFFER_S *buf;

    /* 要修改影像資料，必須先宣告一個特別型態的指標變數，才能正確存取記憶體中的資料 */
    unsigned char *point_mem;
    static unsigned char *mem_temp = NULL;

    /* 初始化暫存用的記憶體位置 */
    static unsigned int nArrayTemp_Size = 1000;

	switch (urb->status) {
	case 0:
		break;

	default:
		printk("Non-zero status (%d) in video "
			"completion handler.\n", urb->status);
        return ;
	}

    /* 从irqqueu队列中取出第一个空缓冲区 */
	if (!list_empty(&s_myuvc_queue.irqqueue))
		buf = list_first_entry(&s_myuvc_queue.irqqueue, MYUVC_BUFFER_S, irq);
    else
        buf = NULL;
    
    /*!
     * 对每个URB的子包进行处理:
     * 每个ueb_buffer有多个帧
     * 把符合要求的urb_buffer中的所有帧的数据存储到空缓冲区中
     */
    for (i = 0; i < urb->number_of_packets; ++i) {
	    if (urb->iso_frame_desc[i].status < 0) {
		    printk("USB isochronous frame "
			    "lost (%d).\n", urb->iso_frame_desc[i].status);
		    continue;
	    }

        /* 数据源(urb_buffer的一帧) */
        src  = urb->transfer_buffer + urb->iso_frame_desc[i].offset;

        /* 获取urb_buffer(一帧)的实际可使用长度 */
        len  = urb->iso_frame_desc[i].actual_length;
        
        /*!
         * src[0]：头部长度
         * 根据头部信息判断该帧是否符合要求
         */
        if (len < 2 || src[0] < 2 || src[0] > len)
            continue;

        /*!
         * src[1]：错误状态
         * 跳过有错误标记的数据包（帧）
         */
        if (src[1] & UVC_STREAM_ERR) {
            printk("Dropping payload (error bit set).\n");
            continue;
        }

        /*!
         * 确定fid值
         * ip2970/ip2977
         */
        if (s_myuvc_udev->descriptor.idVendor == 0x1B3B) {
            /* have data in buffer */
            if ( len >= 16 )  {
                /* 資料必須從data[12]開始判斷，是因為前面的資料是封包專用 */
                if ( (src[12]==0xFF && src[13]==0xD8 && src[14]==0xFF) ||
                    (src[12]==0xD8 && src[13]==0xFF && src[14]==0xC4)) {
                    if(s_last_fid)
                        fid &= ~UVC_STREAM_FID;
                    else
                        fid |= UVC_STREAM_FID;
                }
            }
        } else {
            fid = src[1] & UVC_STREAM_FID;
        }

        /*!
         * 若缓冲区为空，则继续记录fid值，继续处理下一帧
         */
        if (buf == NULL) {
		    s_last_fid = fid;
		    continue;
	    }

        /*!
         * 根据fid判断当前帧的数据是否结束
         * VIDEOBUF_ACTIVE 表示 “正在接受数据”
         */
        if (buf->state != VIDEOBUF_ACTIVE) /* 还未开始接受数据 */ {   
            /* 若刚开始接受数据，那fid应该是一个新值，不等于原来的fid */
            if (fid == s_last_fid)
            	continue;
            
            /* 修改状态位，表示开始接受第一个数据 */
            buf->state = VIDEOBUF_ACTIVE;
        }

        /*!
         * 判断是否开始新一帧数据 且 该缓冲区已被使用
         * 是，则从队列中删除结点，唤醒进程
         */
        if (fid != s_last_fid && buf->buf.bytesused != 0) {
            buf->state = VIDEOBUF_DONE;

            /* 从irqqueue队列中删除结点，唤醒进程 */
            list_del(&buf->irq);
            wake_up(&buf->wait);

            /* 从irqqueue队列中取出下一个缓冲区，进入下一个存储流程 */
            if (!list_empty(&s_myuvc_queue.irqqueue))
                buf = list_first_entry(&s_myuvc_queue.irqqueue, MYUVC_BUFFER_S, irq);
            else
                buf = NULL;

            continue;
        }

        s_last_fid = fid;   /**< 记录当前fid值 */
        
        /* 数据最终去向(空的缓冲区的可存储数据)的地址 */
        dest = s_myuvc_queue.mem + buf->buf.m.offset + buf->buf.bytesused;
        
        len -= src[0];  /**< 除去头部长度后的urb_buffer(一帧)数据长度 */
    	maxlen = buf->buf.length - buf->buf.bytesused;
    	nbytes = min(len, maxlen);

        /*！
         * 把urb_buffer(一帧)中的数据（除去头部信息）
         * 按照最小存储字节数nbytes存入 到 取出的空缓冲区中 
         */
        memcpy(dest, src + src[0], nbytes);

        buf->buf.bytesused += nbytes;   /**< 记录已使用的空间 */

        /*!
         * 调整数据，支持ip2970/ip2977
         */
        if (s_myuvc_udev->descriptor.idVendor == 0x1B3B)
        {
            if(mem_temp == NULL)
                mem_temp = kmalloc(nArrayTemp_Size, GFP_KERNEL);

            /* 當收到的資料長度大於上一次的資料長度，則重新分配所需的空間+ */
            else if(nArrayTemp_Size <= nbytes) { 
                kfree(mem_temp);
                nArrayTemp_Size += 500;
                mem_temp = kmalloc(nArrayTemp_Size, GFP_KERNEL);
            }
            
            memset(mem_temp, 0x00, nArrayTemp_Size);
            // 指向資料儲存的記憶體位置
            point_mem = (unsigned char *)dest;
            if( *(point_mem) == 0xD8 && *(point_mem + 1) == 0xFF && *(point_mem + 2) == 0xC4){
                memcpy( mem_temp + 1, point_mem, nbytes);
                mem_temp[0] = 0xFF;
                memcpy( point_mem, mem_temp, nbytes + 1);
            }
        }
        
        /*!
         * 判断数据是否超过最大容量，强制结束
         */
        if (len > maxlen) {
            printk("Frame complete (overflow). \n");
            buf->state = VIDEOBUF_DONE;
        }
        
        /*!
         * 判断缓冲区是否接收完一帧数据
         * 如果urb_buffer(一帧)设置了EOF标记 且 缓冲区已接收到数据
         * 则将缓冲区状态标记为已完成
         */
        if (src[1] & UVC_STREAM_EOF && buf->buf.bytesused != 0) {
            printk("Frame complete (EOF found).\n");

            /* 实际urb_buffer(一帧)数据为空 */
            if (len == 0)
                printk("EOF in empty payload.\n");

            /* 修改缓冲区的状态位 */
            buf->state = VIDEOBUF_DONE;
        }

        /*!
         * 判断缓冲区是否接收完数据
         * 是，则唤醒等待数据的进程
         *     并从irqqueue中删除缓冲区，取出下一个缓冲区
         */
        if (buf->state == UVC_BUF_STATE_DONE ||
            buf->state == UVC_BUF_STATE_ERROR) {
            list_del(&buf->irq);
            wake_up(&buf->wait);

            /* 从irqqueue队列中取出下一个缓冲区 */
            if (!list_empty(&s_myuvc_queue.irqqueue))
                buf = list_first_entry(&s_myuvc_queue.irqqueue, MYUVC_BUFFER_S, irq);
            else
                buf = NULL;
        }
    }

    /*!
     * 再次提交urb
     */
    if ((ret = usb_submit_urb(urb, GFP_ATOMIC)) < 0) {
		printk("Failed to resubmit video URB (%d).\n", ret);
	}
}

/*!
 * @brief  分配与初始化URB
 *         参考： uvc_init_video_isoc()
 * @return 0：成功   -ENOMEM：失败
 */
 static int myuvc_alloc_init_urbs(void)
{
    u16 psize;
    u32 size;
    int i, j;
    int npackets;
    struct urb *urb;

    
    psize = s_wMaxPackSie;
    size  = s_myuvc_params.dwMaxVideoFrameSize; /**< 一帧数据最大长度 */
    npackets = DIV_ROUND_UP(size, psize);        /**< 每个urb_buffer的帧数 */

    /*!
     * urb_buffer中所有帧的大小 
     */
    size  = s_myuvc_queue.urb_size = psize * npackets;  

    if (npackets > MYMAX_PACKETS_NUM)
        npackets = MYMAX_PACKETS_NUM;
    
    /*!
     * 分配urb_buffers  ：存储数据的缓冲区
     * 分配urb  结构体：指向urb_buffers
     */
	for (i = 0; i < MYUVC_URBS; ++i) {
		s_myuvc_queue.urb_buffer[i] = usb_alloc_coherent(
			s_myuvc_udev, size, GFP_KERNEL | __GFP_NOWARN, 
			   &s_myuvc_queue.urb_dma[i]);

        s_myuvc_queue.urb[i]  = usb_alloc_urb(npackets, GFP_KERNEL);

        if (!s_myuvc_queue.urb_buffer[i] || !s_myuvc_queue.urb[i]) {
            myuvc_uninit_urbs();
            return -ENOMEM;
        }
    }

    /*!
     * 设置urb
     */
    for (i = 0; i < MYUVC_URBS; ++i) {
        urb = s_myuvc_queue.urb[i];
        
        urb->dev = s_myuvc_udev;
        urb->context = NULL;
        urb->pipe = usb_rcvisocpipe(s_myuvc_udev, s_bEndpointAddress);
        urb->transfer_flags = URB_ISO_ASAP | URB_NO_TRANSFER_DMA_MAP;
        urb->interval = s_bInterval;
        urb->transfer_buffer = s_myuvc_queue.urb_buffer[i];
        urb->transfer_dma = s_myuvc_queue.urb_dma[i];
        urb->complete = myuvc_video_complete;
        urb->number_of_packets = npackets;
        urb->transfer_buffer_length = size;

        /* 每次传输的数据所存放的地址 */
        for (j = 0; j < npackets; ++j) {
            urb->iso_frame_desc[j].offset = j * psize;
            urb->iso_frame_desc[j].length = psize;
        }
   } 
    
    return 0;
}

/*!
 * @brief  Step11：启动数据传输
 *         参考：uvc_video_enable()-->uvc_commit_video()/uvc_init_video()
 * @return 0：成功
 */
static int myuvc_vidioc_streamon(struct file *file, 
									void *priv, enum v4l2_buf_type t)
{
    int i;
    int ret = 0;
    
    /*！
     * 1. 向USB摄像头设置参数: 比如使用哪个format, 使用这个format下的哪个frame(分辨率) 
     * 参考: uvc_set_video_ctrl / uvc_get_video_ctrl
     * 1.1 根据一个结构体uvc_streaming_control设置数据包: 可以手工设置,也可以读出后再修改
     * 1.2 调用usb_control_msg发出数据包
     */
     
    /* a.测试参数 */
    ret = myuvc_try_streaming_params(&s_myuvc_params);
    if (ret < 0)
        printk("myuvc_try_streaming_params ret : %d\n", ret);

    /* b.取出参数 */
    ret = myuvc_get_streaming_params(&s_myuvc_params);
    if (ret < 0)
        printk("myuvc_get_streaming_params ret : %d\n", ret);

    /* c.设置参数 */
    ret = myuvc_set_streaming_params(&s_myuvc_params);
    if (ret < 0)
        printk("myuvc_set_streaming_params ret : %d\n", ret);

    /* d.打印参数 */
    myuvc_print_streaming_params(&s_myuvc_params);

    /*!
     * e. 设置VideoStreaming Interface所使用的setting
     * e.1 从myuvc_params确定带宽
     * e.2 根据setting的endpoint能传输的wMaxPacketSize
     *     找到能满足该带宽的setting
     */
    /*!
     * 手工确定:
     * bandwidth = s_myuvc_params.dwMaxPayloadTransferSize = 800
     * 观察lsusb -v -d 0x1e4e:的结果:
     *                wMaxPacketSize     0x0320  1x 1024 bytes
     * bAlternateSetting       5
     */
    usb_set_interface(s_myuvc_udev, s_myuvc_streaming_intf, 
                                        s_myuvc_streaming_setting);
    
    /*!
     * 2. 分配设置URB
     */
    myuvc_alloc_init_urbs();
    
    /*!
     * 3. 提交URB以接收数据
     */
    for (i = 0; i < UVC_URBS; ++i) {
		if ((ret = usb_submit_urb(s_myuvc_queue.urb[i], GFP_KERNEL)) < 0) {
			printk("Failed to submit URB %u (%d).\n", i, ret);
			myuvc_uninit_urbs();
        
			return ret;
		}
	}

	return 0;
}

/*!
 * @brief  Step12：APP调用poll/select确定缓存是否有数据
 *         参考：uvc_v4l2_poll()
 */
static unsigned int myuvc_poll(struct file *file, 
									struct poll_table_struct *wait)
{
    MYUVC_BUFFER_S *buf;
	unsigned int mask = 0;

    /* 判断队列是否为空 */
	if (list_empty(&s_myuvc_queue.mainqueue)) {
		mask |= POLLERR;
		goto done;
	}

    /* 取出第一个缓冲区 */
	buf = list_first_entry(&s_myuvc_queue.mainqueue, MYUVC_BUFFER_S, stream);

    /* 进入休眠 */
	poll_wait(file, &buf->wait, wait);
    
	if (buf->state == VIDEOBUF_DONE ||
	    buf->state == VIDEOBUF_ERROR)
		mask |= POLLIN | POLLRDNORM;

done:
	return mask;
}


/*!
 * @brief  Step13：APP通过poll/select确定缓冲区有数据后，从mainqueue队列中取出并删除缓冲区
 *         参考：uvc_dequeue_buffer()
 */
static int myuvc_vidioc_dqbuf(struct file *file,
								void *priv, struct v4l2_buffer *v4l2_buf)
{
    int ret = 0;
    MYUVC_BUFFER_S *buf;

    /*!
     * 判断mainqueue队列是否为空
     */
    if (list_empty(&s_myuvc_queue.mainqueue)) {
        printk("[E] Empty buffer queue.\n");
        ret = -EINVAL;
        goto done;
    }

    /*!
     * 从队列中取出缓冲区
     */
    buf = list_first_entry(&s_myuvc_queue.mainqueue, MYUVC_BUFFER_S, stream);

   /*!
    * 判断状态
    */
    switch (buf->state) {
    case VIDEOBUF_ERROR:    /**< 错误 */
        ret = -EIO;

    case VIDEOBUF_DONE:     /**< 完成 */
        buf->state =VIDEOBUF_IDLE;
        break;

    /*!
     * 使用中
     */
    case VIDEOBUF_IDLE:
    case VIDEOBUF_QUEUED:
    case VIDEOBUF_ACTIVE:
        
    default:
        ret = -EINVAL;
        goto done;
    }

    /*!
     * 从mainqueue队列中删除结点
     */
    list_del(&buf->stream);

    /*!
     * 备份缓冲区的数据，供数据处理过程时可获得该数据部分信息
     */
    memcpy(v4l2_buf, &buf->buf, sizeof *v4l2_buf);

done:
    return ret;
}

/*!
 * @brief  Step14：APP已经mmap映射缓存，可直接读数据
 *         Step15：再次调用myuvc_vidioc_dqbuf()，把缓存尾插法放入队列
 *         Step16：在其调用myuvc_poll()
 */

/*!
 * @brief  Step17：不使用时，停止摄像头数据传输
 *         参考：uvc_video_enable()
 */
static int myuvc_vidioc_streamoff(struct file *file, 
									void *priv, enum v4l2_buf_type t)
{
    struct urb *urb;
    unsigned int i;

    /*!
     * 取消urb传输请求并等待它完成
     */
    for (i = 0; i < MYUVC_URBS; ++i) {
        if ((urb = s_myuvc_queue.urb[i]) == NULL)
            continue;

        usb_kill_urb(urb);
        s_myuvc_queue.urb[i] = NULL;
    }

    /*!
     * 释放内存
     */
    myuvc_uninit_urbs();

    /*!
     * 设置VideoStreaming Interface，
     */
    usb_set_interface(s_myuvc_udev, s_myuvc_streaming_intf, 0);

	return 0;
}


/*!
 * @brief  根据调节亮度的值所改的获取值函数
 *         参考：uvc_get_le_value()
 */
static __s32 myuvc_get_le_value(const __u8 *data)
{
    int bits = 16;
    int offset = 0;
    __s32 value = 0;
    __u8 mask;

    data += offset / 8;
    offset &= 7;
    mask = ((1LL << bits) - 1) << offset;

    for (; bits > 0; data++) {
        __u8 byte = *data & mask;
        value |= offset > 0 ? (byte >> offset) : (byte << (-offset));
        bits -= 8 - (offset > 0 ? offset : 0);
        offset -= 8;
        mask = (1 << bits) - 1;
    }

    /* Sign-extend the value if needed */
    value |= -(value & (1 << (16 - 1)));

    return value;
}

/*!
 * @brief  根据指定格式设置数据
 *         参考：uvc_set_le_value()
 */
static void myuvc_set_le_value(__s32 value, __u8 *data)
{
	int bits = 16;
	int offset = 0;
	__u8 mask;

	data += offset / 8;
	offset &= 7;

	for (; bits > 0; data++) {
		mask = ((1LL << bits) - 1) << offset;
		*data = (*data & ~mask) | ((value << offset) & mask);
		value >>= offset ? offset : 8;
		bits -= 8 - offset;
		offset = 0;
	}
}

/*!
 * @brief  发起USB控制传输获得亮度的最小值、最大值、默认值、阶梯值
 *         参考：uvc_query_v4l2_ctrl()
 */
static int myuvc_vidioc_queryctrl(struct file * file,
                            void * fh, struct v4l2_queryctrl * ctrl)
{
   __u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
	unsigned int pipe;
    int ret;
    u8 data[2];
    
    /* 只支持调节亮度 */
    if (ctrl->id != V4L2_CID_BRIGHTNESS)
        return -EINVAL;

    /*!
     * 内存空间清零设置
     */
	memset(ctrl, 0, sizeof *ctrl);
	strcpy(ctrl->name, "MYUVC_BRIGHTNESS");
	ctrl->id = V4L2_CID_BRIGHTNESS;
	ctrl->type = V4L2_CTRL_TYPE_INTEGER;
	ctrl->flags = 0;

    /*!
     * 从端口中接收数据，设置数据类型为输入
     */
	pipe = usb_rcvctrlpipe(s_myuvc_udev, 0);
	type = USB_DIR_IN;

    /*!
     * 发送数据到USB摄像头的VideoControl Interface的PU，获取最小亮度
     */
	ret = usb_control_msg(s_myuvc_udev, pipe, GET_MIN, type,  PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, 
			                data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO;    
    ctrl->minimum = myuvc_get_le_value(data);      /**< 解析获取的数值 */

    /*!
     * 获取最大亮度并解析
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, GET_MAX, type, PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO;
    ctrl->maximum =  myuvc_get_le_value(data);

    /*!
     * 获取亮度调节阶梯值并解析
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, GET_RES, type, PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO;
    ctrl->step = myuvc_get_le_value(data);

    /*!
     * 获取默认亮度并解析
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, GET_DEF, type, PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO; 
		ctrl->default_value = myuvc_get_le_value(data);

    printk("Brightness: min =%d, max = %d, step = %d, default = %d\n", 
                ctrl->minimum, ctrl->maximum, ctrl->step, ctrl->default_value);
  
    return 0;
}              

/*!
 * @brief  获取当前指定的控制信息：把USB传入的亮度值通过USB传输发送给硬件
 *         参考：uvc_ctrl_get()
 */
static int myuvc_vidioc_g_ctrl(struct file * file, 
                            void * fh, struct v4l2_control * ctrl)
                            
{
    __u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
    unsigned int pipe;
    int ret = 0;
    u8 data[2];

    /* 只支持调节亮度 */
    if (ctrl->id != V4L2_CID_BRIGHTNESS)
        return -EINVAL;

    /*!
     * 从端口中接收数据，设置数据类型为输入
     */
    pipe = usb_rcvctrlpipe(s_myuvc_udev, 0);
	type = USB_DIR_IN;

    /*!
     * 启动USB传输，获取当前亮度并解析
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, GET_CUR, type, PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO;    
    ctrl->value = myuvc_get_le_value(data);      /**< 解析获取的数值 */

    return ret;
}
                            
/*!
 * @brief  设置当前指定的控制信息：发起USB传输获取当前亮度值
 *         参考：uvc_ctrl_set()/uvc_ctrl_commit()
 */
static int myuvc_vidioc_s_ctrl(struct file * file,
                            void * fh, struct v4l2_control * ctrl)
{
    u8 type = USB_TYPE_CLASS | USB_RECIP_INTERFACE;
    unsigned int pipe;
    int ret = 0;
    u8 data[2];

    /* 只支持调节亮度 */
    if (ctrl->id != V4L2_CID_BRIGHTNESS)
        return -EINVAL;

    /*!
     * 设置控制信息的数据为指定位数
     */
    myuvc_set_le_value(ctrl->value, data);

    /*!
     * 发送数据到端口，设置数据类型为输出
     */
    pipe = usb_sndctrlpipe(s_myuvc_udev, 0);
	type = USB_DIR_OUT;

    /*!
     * 启动USB传输，设置亮度
     */
    ret = usb_control_msg(s_myuvc_udev, pipe, SET_CUR, type, PU_BRIGHTNESS_CONTROL << 8,
			    s_PU_ID << 8 | s_myuvc_control_intf, data, 2, MYUVC_CTRL_CONTROL_TIMEOUT);
    if (ret != 2)
        return -EIO;    

    return ret;
}

/*!
 * 所支持的ioclt函数
 */
static const struct v4l2_ioctl_ops myuvc_ioctl_ops = {
	// 表示它是一个摄像头设备
	.vidioc_querycap      = myuvc_vidioc_querycap,

	/* 用于列举、获得、测试、设置摄像头的数据的格式 */
	.vidioc_enum_fmt_vid_cap  = myuvc_vidioc_enum_fmt_vid_cap,
	.vidioc_g_fmt_vid_cap     = myuvc_vidioc_g_fmt_vid_cap,
	.vidioc_try_fmt_vid_cap   = myuvc_vidioc_try_fmt_vid_cap,
	.vidioc_s_fmt_vid_cap     = myuvc_vidioc_s_fmt_vid_cap,

	/* 缓冲区操作: 申请/查询/放入队列/取出队列 */
	.vidioc_reqbufs       = myuvc_vidioc_reqbufs,
	.vidioc_querybuf      = myuvc_vidioc_querybuf,
	.vidioc_qbuf          = myuvc_vidioc_qbuf,
	.vidioc_dqbuf         = myuvc_vidioc_dqbuf,

    /* 查询/获得/设置属性 */
    .vidioc_queryctrl     = myuvc_vidioc_queryctrl,
    .vidioc_g_ctrl        = myuvc_vidioc_g_ctrl,
    .vidioc_s_ctrl        = myuvc_vidioc_s_ctrl,

	/* 启动/停止 */
	.vidioc_streamon      = myuvc_vidioc_streamon,
	.vidioc_streamoff     = myuvc_vidioc_streamoff,   
};

static const struct v4l2_file_operations myuvc_fops = {
	.owner			= THIS_MODULE,
    .open       	= myuvc_open,
    .release    	= myuvc_close,
    .mmap       	= myuvc_mmap,
    .unlocked_ioctl = video_ioctl2,
    .poll       	= myuvc_poll,
};

static void myuvc_release(struct video_device *vdev)
{}

static int myuvc_probe(struct usb_interface *intf,
		     const struct usb_device_id *id)
{	

    int ret = 0;

    /* 1. 根据probe函数的参数intf获取usb_device接口并存储在s_myuvc_udev参数中 */
    struct usb_device *dev = interface_to_usbdev(intf);   
    s_myuvc_udev = dev;
	printk("myuvc_probe : cnt = %d\n", s_cnt++);

    /* 2. 根据probe的调用次数，获取对应的数据流和控制接口号 */
    if (s_cnt == 1) {
        s_myuvc_control_intf = intf->cur_altsetting->desc.bInterfaceNumber;
    } else if (s_cnt == 2) {
        s_myuvc_streaming_intf = intf->cur_altsetting->desc.bInterfaceNumber;
    }

	/*!
	 * myuvc_probe()调用第二次后执行
	 */
    if (s_cnt == 2) {
		/* 1、分配一个video_device结构体 */
		s_myuvc_vdev = video_device_alloc();

		/* 2、设置 */
		/* 注册过程需要用到release，必须设置 */
		s_myuvc_vdev->release     = myuvc_release;
		s_myuvc_vdev->fops        = &myuvc_fops;
		s_myuvc_vdev->ioctl_ops	  = &myuvc_ioctl_ops;

		/*! 
		 * 3、注册结构体 
		 * -1 - 自动分配次设备号
		 */
		video_register_device(s_myuvc_vdev, VFL_TYPE_GRABBER, -1);

       
       // myuvc_vidioc_streamon(NULL, NULL, 0);
	}
	
    return 0;
}
			 
static void myuvc_disconnect(struct usb_interface *intf)
{
	s_cnt = 0;

	printk("myuvc_disconnect : cnt = %d\n", s_cnt++);
	
	/*!
	 * myuvc_disconnect()调用第二次后执行
	 */
	if (s_cnt == 2) {
		/* 注销结构体 */
		video_unregister_device(s_myuvc_vdev);
		
		/* 释放结构体 */
		video_device_release(s_myuvc_vdev);		
	}

    s_cnt = 0;
}

struct usb_driver myuvc_driver = {
	.name		= "myuvcvideo",
	.probe		= myuvc_probe,
	.disconnect	= myuvc_disconnect,
	.id_table	= myuvc_ids,
};
   
static int myuvc_init(void)
{
	int result;

	result = usb_register(&myuvc_driver);
	if (result)
		printk("USB register error!\n");
	return result;
}

static void myuvc_cleanup(void)
{	
	usb_deregister(&myuvc_driver);
}

module_init(myuvc_init);
module_exit(myuvc_cleanup);
MODULE_LICENSE("GPL");
