﻿#include "InputDevice.h"
#include "CameraTool.h"

#pragma comment(lib,"strmiids.lib")

InputDevice::InputDevice()
{
    _index = false;
    _device = new DShow::Device();

    DShow::SetLogCallback(InputDevice::onLog, this);
}

InputDevice::~InputDevice()
{
    if (_device)
    {
        delete _device;
        _device = nullptr;
    }
}

void InputDevice::setup(DShow::VideoDevice* videoDevice, DShow::VideoInfo* videoInfo, DShow::AudioDevice* audioDevice)
{
    _device->ResetGraph();

    if (videoDevice && videoInfo)
    {
        DShow::VideoProc function = std::bind(
            &InputDevice::onVideoBuffer, this,
            std::placeholders::_1, std::placeholders::_2, std::placeholders::_3,
            std::placeholders::_4, std::placeholders::_5, std::placeholders::_6);

        DShow::VideoConfig videoConfig;

        videoConfig.name = videoDevice->name;
        videoConfig.path = videoDevice->path;

        videoConfig.useDefaultConfig = false;

        videoConfig.callback = function;
        videoConfig.cx = videoInfo->minCX;
        videoConfig.cy_abs = videoInfo->minCY;
        videoConfig.format = DShow::VideoFormat::Any;

        _device->SetVideoConfig(&videoConfig);
    }

    if (audioDevice)
    {
        DShow::AudioProc function = std::bind(
            &InputDevice::onAudioBuffer, this,
            std::placeholders::_1, std::placeholders::_2, std::placeholders::_3,
            std::placeholders::_4, std::placeholders::_5);

        DShow::AudioInfo& audioInfoItem = audioDevice->caps.front();
        DShow::AudioConfig audioConfig;

        audioConfig.name = audioDevice->name;
        audioConfig.path = audioDevice->path;

        audioConfig.useDefaultConfig = false;

        audioConfig.callback = function;
        audioConfig.sampleRate = audioInfoItem.maxSampleRate;
        audioConfig.channels = audioInfoItem.maxChannels;

        _device->SetAudioConfig(&audioConfig);
    }

    _device->ConnectFilters();
}

void InputDevice::setVideoCallBack(VideoCallBack& callBack)
{
    _videoCallBack = callBack;
}

void InputDevice::setAudioCallBack(AudioCallBack& callBack)
{
    _audioCallBack = callBack;
}

void InputDevice::start()
{
    _device->Start();
}

void InputDevice::stop()
{
    _device->Stop();
}

/** 获取缓存的数据 */
std::shared_ptr<Device::VideoBuffer>& InputDevice::buffer()
{
    return _buffer[_index];
}

/** 枚举摄像头 */
bool InputDevice::EnumVideoDevices(std::vector<DShow::VideoDevice>& devices)
{
    return DShow::Device::EnumVideoDevices(devices);
}

/** 枚举麦克风 */
bool InputDevice::EnumAudioDevices(std::vector<DShow::AudioDevice>& devices)
{
    return DShow::Device::EnumAudioDevices(devices);
}

/** 摄像头数据回调 */
void InputDevice::onVideoBuffer(const DShow::VideoConfig& config, unsigned char* data,
    size_t size, long long startTime, long long stopTime, long rotationt)
{
    if (!_videoCallBack)
    {
        return;
    }

    Device::VideoBuffer* videoBuffer = new Device::VideoBuffer();
    videoBuffer->width = config.cx;
    videoBuffer->height = config.cy_abs;
    videoBuffer->startTime = startTime;
    videoBuffer->stopTime = stopTime;
    videoBuffer->frameInterval = config.frameInterval;

    switch (config.format)
    {
    case DShow::VideoFormat::YUY2:
    {
        int width = config.cx;
        int height = config.cy_abs;

        unsigned char* yuvData = (unsigned char*)malloc(size);
        yuyv422_to_yuv420(data, yuvData, width, height);

        // 写入缓存
        videoBuffer->type = Device::ImageType::YUV420;
        videoBuffer->buffer = std::shared_ptr<unsigned char>(yuvData);

        _buffer[_index] = std::shared_ptr<Device::VideoBuffer>(videoBuffer);
        _index = !_index;
        break;
    }
    case DShow::VideoFormat::MJPEG:
    {
        int width = config.cx;
        int height = config.cy_abs;

        unsigned char* yuvData = (unsigned char*)malloc(width * height * 2);
        unsigned char* flag = yuvData + int(width * height * 1.7);

        if (flag)
        {
            *flag = 'a';
        }

        // 解码jpeg
        JpegToYuv(data, size, &yuvData, width, height);

        if (flag && *flag != 'a')
        {
            // yuv422转yuv420
            unsigned char* yuv420Data = (unsigned char*)malloc(width * height * 2);
            yuv422_to_yuv420(yuvData, yuv420Data, width, height);
            free(yuvData);
            videoBuffer->buffer = std::shared_ptr<unsigned char>(yuv420Data);
        }
        else
        {
            videoBuffer->buffer = std::shared_ptr<unsigned char>(yuvData);
        }

        // 写入缓存
        videoBuffer->type = Device::ImageType::YUV420;

        _buffer[_index] = std::shared_ptr<Device::VideoBuffer>(videoBuffer);
        _index = !_index;
        break;
    }
    default:
        break;
    }

    // 发送回调
    _videoCallBack();
}

void InputDevice::onAudioBuffer(const DShow::AudioConfig& config, unsigned char* data, 
    size_t size, long long startTime, long long stopTime)
{
    if (!_audioCallBack)
    {
        return;
    }

    _audioCallBack(config, data, size, startTime, stopTime);
}

/** 日志回调 */
void InputDevice::onLog(DShow::LogType type, const wchar_t* msg, void* param)
{
    printf("LogType:%d, message:%ls\n", type, msg);
}
