// DXGI + D3D11 Desktop Duplication -> NV12 GPU conversion -> save raw NV12
// Compile with: d3d11.lib dxgi.lib

#include <windows.h>
#include <d3d11.h>
#include <dxgi1_2.h>
#include <wrl/client.h>
#include <vector>
#include <fstream>
#include <string>
#include <stdio.h>

using namespace Microsoft::WRL;

#pragma comment(lib, "d3d11.lib")

#define FRAME_COUNT 10

ComPtr<ID3D11Device> gDevice;
ComPtr<ID3D11DeviceContext> gContext;
ComPtr<ID3D11VideoDevice> gVideoDevice;
ComPtr<ID3D11VideoContext> gVideoContext;
ComPtr<ID3D11VideoProcessorEnumerator> gVPEnum;
ComPtr<ID3D11VideoProcessor> gVP;
ComPtr<ID3D11VideoProcessorOutputView> gOutputView;
ComPtr<ID3D11Texture2D> gNV12Tex;

bool InitD3D11() {
    D3D_FEATURE_LEVEL level;
    if (FAILED(D3D11CreateDevice(
        nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
        D3D11_CREATE_DEVICE_BGRA_SUPPORT, nullptr, 0,
        D3D11_SDK_VERSION, &gDevice, &level, &gContext))) return false;

    gDevice.As(&gVideoDevice);
    gContext.As(&gVideoContext);
    return true;
}

ComPtr<IDXGIOutputDuplication> InitDuplication(UINT& width, UINT& height) {
    ComPtr<IDXGIDevice> dxgiDevice;
    gDevice.As(&dxgiDevice);

    ComPtr<IDXGIAdapter> adapter;
    dxgiDevice->GetAdapter(&adapter);

    ComPtr<IDXGIOutput> output;
    adapter->EnumOutputs(0, &output);

    DXGI_OUTPUT_DESC desc;
    output->GetDesc(&desc);

    ComPtr<IDXGIOutput1> output1;
    output.As(&output1);

    DXGI_OUTDUPL_DESC duplDesc;
    ComPtr<IDXGIOutputDuplication> duplication;
    output1->DuplicateOutput(gDevice.Get(), &duplication);
    duplication->GetDesc(&duplDesc);

    width = duplDesc.ModeDesc.Width;
    height = duplDesc.ModeDesc.Height;

    return duplication;
}

bool InitVideoProcessor(UINT width, UINT height) {
    D3D11_VIDEO_PROCESSOR_CONTENT_DESC desc = {};
    desc.InputFrameFormat = D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE;
    desc.InputWidth = width;
    desc.InputHeight = height;
    desc.OutputWidth = width;
    desc.OutputHeight = height;
    desc.Usage = D3D11_VIDEO_USAGE_PLAYBACK_NORMAL;

    if (FAILED(gVideoDevice->CreateVideoProcessorEnumerator(&desc, &gVPEnum))) return false;
    if (FAILED(gVideoDevice->CreateVideoProcessor(gVPEnum.Get(), 0, &gVP))) return false;

    D3D11_TEXTURE2D_DESC texDesc = {};
    texDesc.Width = width;
    texDesc.Height = height;
    texDesc.MipLevels = 1;
    texDesc.ArraySize = 1;
    texDesc.Format = DXGI_FORMAT_NV12;
    texDesc.SampleDesc.Count = 1;
    texDesc.Usage = D3D11_USAGE_DEFAULT;
    texDesc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;

    if (FAILED(gDevice->CreateTexture2D(&texDesc, nullptr, &gNV12Tex))) return false;

    D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC ovDesc = {};
    ovDesc.ViewDimension = D3D11_VPOV_DIMENSION_TEXTURE2D;
    ovDesc.Texture2D.MipSlice = 0;
    if (FAILED(gVideoDevice->CreateVideoProcessorOutputView(
        gNV12Tex.Get(), gVPEnum.Get(), &ovDesc, &gOutputView))) return false;

    return true;
}

void SaveNV12(ID3D11Texture2D* nv12Tex, UINT width, UINT height, UINT frameIdx) {
    D3D11_TEXTURE2D_DESC desc = {};
    nv12Tex->GetDesc(&desc);

    desc.Usage = D3D11_USAGE_STAGING;
    desc.BindFlags = 0;
    desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
    desc.MiscFlags = 0;

    ComPtr<ID3D11Texture2D> stagingTex;
    gDevice->CreateTexture2D(&desc, nullptr, &stagingTex);
    gContext->CopyResource(stagingTex.Get(), nv12Tex);

    D3D11_MAPPED_SUBRESOURCE mapped = {};
    gContext->Map(stagingTex.Get(), 0, D3D11_MAP_READ, 0, &mapped);

    UINT ySize = width * height;
    UINT uvSize = width * height / 2;
    std::vector<uint8_t> data(ySize + uvSize);

    for (UINT y = 0; y < height; ++y) {
        memcpy(data.data() + y * width, (BYTE*)mapped.pData + y * mapped.RowPitch, width);
    }
    for (UINT y = 0; y < height / 2; ++y) {
        memcpy(data.data() + ySize + y * width, (BYTE*)mapped.pData + (y + height) * mapped.RowPitch, width);
    }

    gContext->Unmap(stagingTex.Get(), 0);

    char filename[64];
    sprintf_s(filename, "frame_%04d.nv12", frameIdx);
    std::ofstream ofs(filename, std::ios::binary);
    ofs.write((char*)data.data(), data.size());
    ofs.close();
}

int main() {
    if (!InitD3D11()) return -1;

    UINT width = 0, height = 0;
    auto duplication = InitDuplication(width, height);
    if (!duplication) return -1;

    if (!InitVideoProcessor(width, height)) return -1;

    DXGI_OUTDUPL_FRAME_INFO frameInfo = {};
    ComPtr<IDXGIResource> desktopResource;
    ComPtr<ID3D11Texture2D> acquiredTex;

    for (UINT i = 0; i < FRAME_COUNT; ++i) {
        HRESULT hr = duplication->AcquireNextFrame(100, &frameInfo, &desktopResource);
        if (FAILED(hr)) continue;

        desktopResource.As(&acquiredTex);

        ComPtr<ID3D11VideoProcessorInputView> inputView;
        D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC ivDesc = {};
        ivDesc.ViewDimension = D3D11_VPIV_DIMENSION_TEXTURE2D;
        ivDesc.Texture2D.ArraySlice = 0;

        gVideoDevice->CreateVideoProcessorInputView(
            acquiredTex.Get(), gVPEnum.Get(), &ivDesc, &inputView);

        D3D11_VIDEO_PROCESSOR_STREAM stream = {};
        stream.Enable = TRUE;
        stream.pInputSurface = inputView.Get();

        gVideoContext->VideoProcessorBlt(
            gVP.Get(), gOutputView.Get(), 0, 1, &stream);

        SaveNV12(gNV12Tex.Get(), width, height, i);

        duplication->ReleaseFrame();
    }

    return 0;
}
