﻿#include "webcamframesource.h"

#include <stdio.h>
#include <fcntl.h>
#include <sys/syscall.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>

#define VIDEO_WIDTH 640
#define VIDEO_HEIGHT 480
#define FRAME_PER_SEC 10.0

pid_t gettid()
{
    return syscall(SYS_gettid);
}

WebcamFrameSource::WebcamFrameSource(UsageEnvironment &env)
    : FramedSource(env)
{
    fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);

    if(!capture_open("/dev/video0", VIDEO_WIDTH, VIDEO_HEIGHT, FRAME_PER_SEC)) {
        envir() << __func__<< ": open /dev/video0 err\n";
        ::exit(-1);
    }
    m_started = 0;
    mp_token = 0;
}

WebcamFrameSource::~WebcamFrameSource()
{
    fprintf(stderr, "[%d] %s .... calling\n", gettid(), __func__);

    if (m_started) {
        envir().taskScheduler().unscheduleDelayedTask(mp_token);
    }

    if(vid >= 0)
        capture_close();
}

int WebcamFrameSource::capture_open(const char *dev_name, int width, int height, int fps)
{
    vid = open(dev_name, O_RDWR);//读写方式打开摄像头
    if (vid < 0) return 0;

    //查询设备属性
    v4l2_capability caps;
    ioctl(vid, VIDIOC_QUERYCAP, &caps);
    if(caps.capabilities & V4L2_CAP_VIDEO_CAPTURE == 0) {//支持图像获取
        envir() << __func__<< ": can't support video capture!\n";
        ::close(vid);
        return 0;
    }
    //if(caps.capabilities & V4L2_CAP_READWRITE == 0) {//支持read/write操作
    if(caps.capabilities & V4L2_CAP_STREAMING == 0) {//支持Streaming方式访问
        envir() << __func__<< ": can't support streaming mode\n";
        ::close(vid);
        return 0;
    }

    int rc;
    //查询并显示所有支持的格式
    v4l2_fmtdesc fmt_desc = {0, V4L2_BUF_TYPE_VIDEO_CAPTURE, 0};
    while(1) {
        rc = ioctl(vid, VIDIOC_ENUM_FMT, &fmt_desc);
        fmt_desc.index++;//要查询的格式序号
        if (rc >= 0) {
            fprintf(stderr, "\t support %s\n", fmt_desc.description);
        }
        else break;
    }

    //设置当前格式
    v4l2_format fmt;
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;//视频捕获设备
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_H264;
    fmt.fmt.pix.width = width;
    fmt.fmt.pix.height = height;
    rc = ioctl(vid, VIDIOC_S_FMT, &fmt);
    if(rc < 0) {
        envir() << __func__<< ": VIDIOC_S_FMT err\n";
        ::close(vid);
        return 0;
    } else if(fmt.fmt.pix.pixelformat != V4L2_PIX_FMT_H264) {
        envir() << __func__<< ": can't support V4L2_PIX_FMT_H264\n";
        ::close(vid);
        return 0;
    }
    fprintf(stderr, "capture_width=%d, height=%d, bytesperline=%d\n", fmt.fmt.pix.width,
            fmt.fmt.pix.height, fmt.fmt.pix.bytesperline);

    //设置理想帧率
    v4l2_streamparm setfps = {V4L2_BUF_TYPE_VIDEO_CAPTURE, 0};
    setfps.parm.capture.timeperframe.numerator = 1;//分子
    setfps.parm.capture.timeperframe.denominator = fps;//分母
    rc = ioctl(vid, VIDIOC_S_PARM, &setfps);
    if(rc < 0) {
        envir() << __func__<< ": VIDIOC_S_PARM err\n";
        ::close(vid);
        return 0;
    }

    //MMAP 内存映射,向设备申请缓冲区
    v4l2_requestbuffers reqBufs = {3, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, 0};
    rc = ioctl(vid, VIDIOC_REQBUFS, &reqBufs);//申请缓冲区
    if (rc < 0) {
        envir() << __func__<< ": don't support MEMORY_MMAP mode!\n";
        ::close(vid);
        return 0;
    }
    fprintf(stderr, "%s: using MEMORY_MMAP mode, buf cnt=%d\n", __func__, reqBufs.count);

    //获取缓冲帧的地址
    for (int i = 0; i < 3; i++) {
        v4l2_buffer buf = {i, V4L2_BUF_TYPE_VIDEO_CAPTURE, 0};
        buf.memory = V4L2_MEMORY_MMAP;
        rc = ioctl(vid, VIDIOC_QUERYBUF, &buf);
        if(rc < 0) {
            envir() << __func__<< ": VIDIOC_QUERYBUF ERR\n";
            ::close(vid);
            return 0;
        }

        bufs[i].length = buf.length;
        bufs[i].start = mmap(NULL, buf.length, PROT_READ|PROT_WRITE,
                                  MAP_SHARED, vid, buf.m.offset);
        if(bufs[i].start == MAP_FAILED) {
            envir() << __func__<< ": mmap() ERR\n";
            ::close(vid);
            return 0;
        }

        //把缓冲帧放入缓冲队列
        if (ioctl(vid, VIDIOC_QBUF, &buf) < 0) {
            envir() << __func__<< ": VIDIOC_QBUF err\n";
            ::close(vid);
            return 0;
        }
    }

    //启动数据流
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(vid, VIDIOC_STREAMON, &type) < 0) {
        envir() << __func__<< ": VIDIOC_STREAMON err\n";
        ::close(vid);
        return 0;
    }

    return 1;
}

void WebcamFrameSource::capture_close()
{
    //关闭数据流
    int type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(vid, VIDIOC_STREAMOFF, &type) < 0) {
        envir() << __func__<< ": VIDIOC_STREAMON err\n";
    }

    //解除映射
    for (int i = 0; i < 3; i++) {
        munmap(bufs[i].start, bufs[i].length);
    }
    //关闭视频设备
    ::close(vid);
    vid = -1;
}

void WebcamFrameSource::doGetNextFrame()
{
    if (m_started) return;
    m_started = 1;

    // 根据 fps, 计算等待时间
    double delay = 1000.0 / FRAME_PER_SEC;
    int to_delay = delay * 1000;	// us

    mp_token = envir().taskScheduler().scheduleDelayedTask(to_delay,
            getNextFrame, this);
}

void WebcamFrameSource::getNextFrame(void *ptr)
{
    ((WebcamFrameSource*)ptr)->getNextFrame1();
}

void WebcamFrameSource::getNextFrame1()
{
    //将已捕获的视频内存拉出队列
    v4l2_buffer buf = {0, V4L2_BUF_TYPE_VIDEO_CAPTURE, 0};
    buf.memory = V4L2_MEMORY_MMAP;
    if(ioctl(vid, VIDIOC_DQBUF, &buf) < 0) {
        fprintf(stderr, "%s: VIDIOC_DQBUF err\n", __func__);
        m_started = 0;
        return;
    }

    // save outbuf
    gettimeofday(&fPresentationTime, 0);//视频流输出时间
    fFrameSize = buf.bytesused;
    if (fFrameSize > fMaxSize) {
        fNumTruncatedBytes = fFrameSize - fMaxSize;//裁减掉长度
        fFrameSize = fMaxSize;
    }
    else {
        fNumTruncatedBytes = 0;
    }

    memmove(fTo, bufs[buf.index].start, fFrameSize);

    // notify
    afterGetting(this);

    if (ioctl(vid, VIDIOC_QBUF, &buf) < 0) {
        fprintf(stderr, "%s: VIDIOC_QBUF err\n", __func__);
    }
    m_started = 0;
}

WebcamMediaSubsession::WebcamMediaSubsession(UsageEnvironment &env)
    : OnDemandServerMediaSubsession(env, True), // reuse the first source
      fAuxSDPLine(NULL), fDoneFlag(0), fDummyRTPSink(NULL)
{}

WebcamMediaSubsession::~WebcamMediaSubsession()
{
    delete[] fAuxSDPLine;
}

void WebcamMediaSubsession::chkForAuxSDPLine(void *ptr)
{
    ((WebcamMediaSubsession *)ptr)->chkForAuxSDPLine1();
}

void WebcamMediaSubsession::chkForAuxSDPLine1 ()
{
    if (fDummyRTPSink->auxSDPLine()) {
        envir().taskScheduler().unscheduleDelayedTask(nextTask());
        fDoneFlag = 0xff;
    }
    else {
        int delay = 100*1000;	// 100ms
        nextTask() = envir().taskScheduler().scheduleDelayedTask(delay,
                chkForAuxSDPLine, this);
    }
}

char const* WebcamMediaSubsession::getAuxSDPLine(RTPSink* rtpSink, FramedSource* inputSource)
{
    if (fAuxSDPLine) return fAuxSDPLine;

    fDummyRTPSink = rtpSink;
    fDummyRTPSink->startPlaying(*inputSource, NULL, NULL);
    //fDummyRTPSink->startPlaying(*inputSource, afterPlayingDummy, this);
    chkForAuxSDPLine1();

    envir().taskScheduler().doEventLoop(&fDoneFlag);
    fAuxSDPLine = strDup(fDummyRTPSink->auxSDPLine());
    fDummyRTPSink->stopPlaying();

    return fAuxSDPLine;
}

RTPSink* WebcamMediaSubsession::createNewRTPSink(Groupsock *rtpsock, unsigned char type, FramedSource *source)
{
    return H264VideoRTPSink::createNew(envir(), rtpsock, type);
}

FramedSource* WebcamMediaSubsession::createNewStreamSource (unsigned /*clientSessionId*/, unsigned& estBitrate)
{
    estBitrate = 500;
    return H264VideoStreamFramer::createNew(envir(), new WebcamFrameSource(envir()));
}
