#include <cstring>
#include "glad/glad.h"
#include <GL/glext.h>

#include <X11/Xlib.h>
#include <GL/glx.h>

#include <iostream>

#define XR_USE_PLATFORM_XLIB //指定平台
#define XR_USE_GRAPHICS_API_OPENGL //指定渲染API
#define XR_EXTENSION_PROTOTYPES
#include <openxr/openxr.h>
#include <openxr/openxr_platform.h>

#include <GLFW/glfw3.h>
#include <vector>

//判断openxr接口调用情况
#define CALL_XR(result) {           \
     if(XR_SUCCESS != result){      \
            std::string info;       \
            info += __FILE__;                       \
            info += "[" + std::to_string(__LINE__) + "]";\
            info += ": call openxr function error~";     \
            throw std::runtime_error(info.c_str());      \
     }                                                   \
}

const int WIDTH = 600;
const int HEIGHT = 400;
static const char* vertexShaderSource =
        "#version 330 core\n"
        "layout(location = 0) in vec3 aPos;\n"
        "layout(location = 1) in vec4 aColor;\n"
        "out vec4 vertexColor;\n"
        "void main() {\n"
        "	gl_Position = vec4(aPos, 1.0);\n"
        "	vertexColor = aColor;\n"
        "}\n";

static const char* fragmentShaderSource =
        "#version 330 core\n"
        "in vec4 vertexColor;\n"
        "out vec4 FragColor;\n"
        "void main() {\n"
        "	FragColor = vertexColor;\n"
        "}\n";

struct Geometry {
    GLuint shaderProgramId; //着色器程序
    std::vector<GLuint> frameBuffers;//帧缓冲列表
    GLuint VAO;//顶点数组对象
};

XrInstance instance;
XrSystemId systemId;
XrSession session;
GLFWwindow* pWindow;
XrSpace appSpace;
std::vector<XrViewConfigurationView> configurationViews;
std::vector<XrView> views;
std::vector<XrSwapchain> swapchains;
std::vector<XrSwapchainImageOpenGLKHR> swapchainImages;
std::vector<XrCompositionLayerProjectionView> projectionViews;
Geometry geometry;
bool isSessionRunning;
XrSessionState sessionState;

XrPosef identity = {
        .orientation = {0, 0, 0, 1},
        .position = {0, 0, 0},
};
XrFovf identityFov = {
        .angleLeft = 0,
        .angleRight = 0,
        .angleUp = 0,
        .angleDown = 0,
};

void init();
void frame();
void cleanUp();

//init func
void createInstance();
void initWindow(XrGraphicsBindingOpenGLXlibKHR* graphicsBinding);
void createSession();
void createSpace();
void createSwapchain();
void initViews();
void initGl();
void pollXrEvents(bool* quitLoop, bool* shouldRender);
void rendFrame();
void renderImage(int viewIndex, GLuint frameBuffer, const XrSwapchainImageOpenGLKHR& image);

/**
 * @param array 格式列表
 * @param arrayCount 格式列表数量
 * @param shouldChoose 最佳选择
 * @param fallback 是否降级
 * @return 选择的格式
 */
int64_t chooseFormat(const int64_t* array, uint32_t arrayCount, int64_t shouldChoose, bool fallback);
/**
 * 编译着色器
 * @param type 类型
 * @param source 源码
 * @return 着色器id，如果失败抛出runtime_error
 */
GLuint compileShader(GLuint type, const char* source);
/**
 * 设置顶点数组对象
 * @param vao Vertex Array Object
 */
void setVAO(GLuint* vao);
/**
 * session状态变化事件
 * @param event 事件
 * @param quitLoop 退出循环
 * @param shouldRender 是否可以渲染
 */
void onSessionStateChangedEvent(XrEventDataSessionStateChanged* event, bool* quitLoop, bool* shouldRender);

int main() {
    std::cout << "Hello, OpenXR demo!" << std::endl;

    init();
    frame();
    cleanUp();

    return EXIT_SUCCESS;
}

void init(){
    createInstance();
    createSession();
    createSpace();
    createSwapchain();
    initViews();
    initGl();
}

void frame(){
    bool quitLoop = false; //标识是否结束循环
    bool shouldRender;//是否可以渲染

    while (!quitLoop) {
        glfwPollEvents();
        if(glfwWindowShouldClose(pWindow)) {
            quitLoop = true;
        }

        pollXrEvents(&quitLoop, &shouldRender);
        if(quitLoop){
            break;//退出循环
        }

        //渲染
        if(shouldRender) {
            rendFrame();
        }
    }
}

void cleanUp(){
    glDeleteVertexArrays(1, &geometry.VAO);
    glDeleteProgram(geometry.shaderProgramId);
    glDeleteFramebuffers(geometry.frameBuffers.size(), geometry.frameBuffers.data());

    for (const auto &item: swapchains) {
        xrDestroySwapchain(item);
    }
    xrDestroySpace(appSpace);
    glfwDestroyWindow(pWindow);
    glfwTerminate();
    xrDestroySession(session);
    xrDestroyInstance(instance);
}

void createInstance(){
    const char *extensions[] = { XR_KHR_OPENGL_ENABLE_EXTENSION_NAME };
    int extension_count = sizeof(extensions) / sizeof(extensions[0]);

    XrApplicationInfo applicationInfo = {
            .applicationName = "hello openxr",
            .applicationVersion = 1,
            .engineName = "no engine",
            .engineVersion = 0,
            .apiVersion = XR_CURRENT_API_VERSION,
    };

    XrInstanceCreateInfo createInfo = {
        .type = XR_TYPE_INSTANCE_CREATE_INFO,
        .next = nullptr,
        .createFlags = 0,
        .applicationInfo = applicationInfo,
        .enabledApiLayerCount = 0,
        .enabledApiLayerNames = nullptr,
        .enabledExtensionCount = static_cast<uint32_t>(extension_count),
        .enabledExtensionNames = reinterpret_cast<const char *const *>(extensions),
    };
    CALL_XR(xrCreateInstance(&createInfo, &instance));
}

void createSession(){

    //获得System信息
    XrSystemGetInfo systemGetInfo = {
            .type = XR_TYPE_SYSTEM_GET_INFO,
            .next = nullptr,
            //XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY: 跟踪显示器连接到用户的头部。用户不能触摸显示器本身。VR头显就是这种形式的一个例子。
            //XR_FORM_FACTOR_HANDHELD_DISPLAY：跟踪显示是在用户的手中，独立于用户的头部。用户可以触摸显示器，允许使用屏幕空间的UI。使用直通视频运行AR体验的手机就是这种形式因素的一个例子。
            .formFactor = XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY,
    };
    CALL_XR(xrGetSystem(instance, &systemGetInfo, &systemId));

    //查询渲染API的需求条件
    PFN_xrGetOpenGLGraphicsRequirementsKHR pfnXrGetOpenGlGraphicsRequirementsKhr;
    CALL_XR(xrGetInstanceProcAddr(instance, "xrGetOpenGLGraphicsRequirementsKHR",
                          reinterpret_cast<PFN_xrVoidFunction*>(&pfnXrGetOpenGlGraphicsRequirementsKhr)));
    XrGraphicsRequirementsOpenGLKHR graphicsRequirements = {XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR};
    CALL_XR(pfnXrGetOpenGlGraphicsRequirementsKhr(instance, systemId, &graphicsRequirements));

    //渲染API绑定
    XrGraphicsBindingOpenGLXlibKHR graphicsBinding = {
            .type = XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR,
            .next = nullptr,
            //... 在 initWindow() 中初始化
    };

    //初始化窗口
    initWindow(&graphicsBinding);

    //创建session
    XrSessionCreateInfo sessionCreateInfo = {
        .type = XR_TYPE_SESSION_CREATE_INFO,
        .next = reinterpret_cast<const XrBaseInStructure*>(&graphicsBinding),
        .createFlags = 0,
        .systemId = systemId
    };

    CALL_XR(xrCreateSession(instance, &sessionCreateInfo, &session));
}

static void keyCallback(GLFWwindow* window, int key, int scancode, int action, int mods){
    if(GLFW_KEY_ESCAPE == key && action == GLFW_PRESS){
        glfwSetWindowShouldClose(pWindow, GLFW_TRUE);
    }
}

void initWindow(XrGraphicsBindingOpenGLXlibKHR* graphicsBinding){
    if(!glfwInit()){
        throw std::runtime_error("failed to init glfw.");
    }
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);// 主版本
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0);// 次版本
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);//使用核心模式

    pWindow = glfwCreateWindow(WIDTH, HEIGHT, "hello openxr", nullptr, nullptr);
    if(!pWindow){
        throw std::runtime_error("failed to create window.");
    }
    glfwMakeContextCurrent(pWindow);

    bool bLoadSuc = gladLoadGLLoader((GLADloadproc) glfwGetProcAddress);
    if(!bLoadSuc){
        throw std::runtime_error("failed glad load glProcAddress!");
    }

    //debug
    glEnable(GL_DEBUG_OUTPUT);

    glfwSetKeyCallback(pWindow, keyCallback);

    //设置值
    graphicsBinding->xDisplay = XOpenDisplay(nullptr);
    graphicsBinding->glxContext = glXGetCurrentContext();
    graphicsBinding->glxDrawable = glXGetCurrentDrawable();
}

void createSpace(){
    XrReferenceSpaceCreateInfo createInfo = {
        .type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO,
        .next = nullptr,
        //暂时使用本地参考系
        .referenceSpaceType = XR_REFERENCE_SPACE_TYPE_LOCAL,
        .poseInReferenceSpace = identity
    };
    CALL_XR(xrCreateReferenceSpace(session, &createInfo, &appSpace));
}

void createSwapchain(){
    //1、查询交换链支持的格式
    uint32_t formatCountOutput;
    CALL_XR(xrEnumerateSwapchainFormats(session, 0, &formatCountOutput, nullptr));
    if(formatCountOutput <= 0){
        throw std::runtime_error("can not find swapchain support formats!");
    }
    int64_t formats[formatCountOutput];
    CALL_XR(xrEnumerateSwapchainFormats(session, formatCountOutput, &formatCountOutput, formats));
    //color format
    int64_t  colorFormat = chooseFormat(formats, formatCountOutput, GL_SRGB8_ALPHA8_EXT, true);

    //2、查询视图数量
    uint32_t configViewCount;
    CALL_XR(xrEnumerateViewConfigurationViews(instance, systemId, XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
                                              0, &configViewCount,nullptr));
    if(configViewCount <= 0){
        throw std::runtime_error("can not find config views!");
    }
    configurationViews.resize(configViewCount);
    CALL_XR(xrEnumerateViewConfigurationViews(instance, systemId, XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
                                              0, &configViewCount, configurationViews.data()));

    for (uint32_t i = 0; i < configViewCount; i++) {
        printf("View Configuration View %d:\n", i);
        printf("\tResolution       : Recommended %dx%d, Max: %dx%d\n",
               configurationViews[i].recommendedImageRectWidth,
               configurationViews[i].recommendedImageRectHeight, configurationViews[i].maxImageRectWidth,
               configurationViews[i].maxImageRectHeight);
        printf("\tSwapchain Samples: Recommended: %d, Max: %d)\n",
               configurationViews[i].recommendedSwapchainSampleCount,
               configurationViews[i].maxSwapchainSampleCount);
    }

    //3、创建swapchain和设置swapchain images
    swapchains.resize(configViewCount);
    for(int i = 0; i < configViewCount; i++){
        XrSwapchainCreateInfo createInfo = {
            .type = XR_TYPE_SWAPCHAIN_CREATE_INFO,
            .next = nullptr,
            .createFlags = 0,
            .usageFlags = XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_SAMPLED_BIT,
            .format = colorFormat,
            .sampleCount = configurationViews[i].maxSwapchainSampleCount,
            .width = WIDTH,
            .height = HEIGHT,
            .faceCount = 1,
            .arraySize = 1,
            .mipCount = 1
        };
        CALL_XR(xrCreateSwapchain(session, &createInfo, &swapchains[i]));

        uint32_t imageCountOutput;
        CALL_XR(xrEnumerateSwapchainImages(swapchains[i], 0, &imageCountOutput, nullptr));
        if(imageCountOutput > 0){
            swapchainImages.resize(imageCountOutput);
            for (auto &item: swapchainImages) {
                item.type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR;
            }

            CALL_XR(xrEnumerateSwapchainImages(swapchains[i], 0, &imageCountOutput,
                                               reinterpret_cast<XrSwapchainImageBaseHeader *>(swapchainImages.data())));
        }
    }
}

/**
 * @param array 格式列表
 * @param arrayCount 格式列表数量
 * @param shouldChoose 最佳选择
 * @param fallback 是否降级
 * @return 选择的格式
 */
int64_t chooseFormat(const int64_t* array, uint32_t arrayCount, int64_t shouldChoose, bool fallback) {
    if(arrayCount <= 0){
        throw std::runtime_error("arrayCount is required!");
    }
    int64_t chooseFormat = -1;
    for(int i = 0; i < arrayCount; i++){
        if(shouldChoose == array[i]){
            chooseFormat = array[i];
            break;
        }
    }

    if(chooseFormat == -1 && fallback){
        chooseFormat = array[0];
    }

    return chooseFormat;
}

void initViews(){
    views.resize(configurationViews.size());
    for (auto &item: views) {
        item.type = XR_TYPE_VIEW;
        item.next = nullptr;
    }

    uint32_t size = configurationViews.size();
    projectionViews.resize(size);
    for(int i = 0; i < size; i++){
        projectionViews[i].type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
        projectionViews[i].next = nullptr;
        projectionViews[i].pose = identity;
        projectionViews[i].fov = identityFov;
        projectionViews[i].subImage.swapchain = swapchains[i];
        projectionViews[i].subImage.imageRect.offset = {0, 0};
        projectionViews[i].subImage.imageRect.extent = {WIDTH, HEIGHT};
        projectionViews[i].subImage.imageArrayIndex = 0;
    }
}

void initGl() {
    uint32_t size = configurationViews.size();
    geometry.frameBuffers.resize(size);
    for(int i = 0; i < size; i++){
        glGenFramebuffers(1, &geometry.frameBuffers[i]);
    }

    //编译着色器
    GLuint vsId = compileShader(GL_VERTEX_SHADER, vertexShaderSource);
    GLuint fsId = compileShader(GL_FRAGMENT_SHADER, fragmentShaderSource);
    geometry.shaderProgramId = glCreateProgram();
    glAttachShader(geometry.shaderProgramId, vsId);
    glAttachShader(geometry.shaderProgramId, fsId);

    glDeleteShader(vsId);
    glDeleteShader(fsId);

    glLinkProgram(geometry.shaderProgramId);
    GLint suc;
    glGetProgramiv(geometry.shaderProgramId, GL_LINK_STATUS, &suc);
    if(suc < 0){
        GLchar log[512];
        glGetProgramInfoLog(geometry.shaderProgramId, 512, nullptr, log);
        throw std::runtime_error(log);
    }

    setVAO(&geometry.VAO);
}

/**
 * 编译着色器
 * @param type 类型
 * @param source 源码
 * @return 着色器id，如果失败抛出runtime_error
 */
GLuint compileShader(GLuint type, const char* source){
    GLuint rt = glCreateShader(type);
    const GLchar* glCharSource[] = {source};
    glShaderSource(rt, 1, glCharSource, nullptr);
    glCompileShader(rt);

    GLint suc;
    glGetShaderiv(rt, GL_COMPILE_STATUS, &suc);
    if(suc < 0){
        GLchar log[512];
        glGetShaderInfoLog(rt, 512, nullptr, log);
        throw std::runtime_error(log);
    }
    return rt;
}

/**
 * 设置顶点数组对象
 * @param vao Vertex Array Object
 */
void setVAO(GLuint* vao){
    float vertices[] = {
            0.0f, 0.5f, 0.0f, //第一个顶点
            0.0f, 0.0f, 0.0f, //第二个顶点
            0.5f, 0.0f, 0.0f, //第三个顶点
    };

    float colors[] = {
            1.0f, 0.0f, 0.0f, 1.0f,
            0.0f, 1.0f, 0.0f, 1.0f,
            0.0f, 0.0f, 1.0f, 0.0f
    };

    glGenVertexArrays(1, vao);
    glBindVertexArray(*vao);

    GLuint vbos[2];
    glGenBuffers(2, vbos);

    glBindBuffer(GL_ARRAY_BUFFER, vbos[0]);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
    glEnableVertexAttribArray(0);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, nullptr);

    glBindBuffer(GL_ARRAY_BUFFER, vbos[1]);
    glBufferData(GL_ARRAY_BUFFER, sizeof(colors), colors, GL_STATIC_DRAW);
    glEnableVertexAttribArray(1);
    glVertexAttribPointer(1, 4, GL_FLOAT, GL_FALSE, 0, nullptr);

    glBindVertexArray(0);
}

void pollXrEvents(bool* quitLoop, bool* shouldRender){
    XrEventDataBuffer eventDataBuffer = {
        .type = XR_TYPE_EVENT_DATA_BUFFER,
        .next = nullptr,
    };
    XrResult result = xrPollEvent(instance, &eventDataBuffer);
    if(result != XR_SUCCESS){
        return;
    }

    //处理事件
    switch (eventDataBuffer.type) {
        default:
            std::cout << "do not dispose this event: " << eventDataBuffer.type << std::endl;
            break;
        case XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING: //丢失连接
            std::cout << "instance_loss_pending" << std::endl;
            *quitLoop = true;
            break;
        case XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED: //session状态变化
            std::cout << "session_state_changed" << std::endl;
            auto* event = (XrEventDataSessionStateChanged*) &eventDataBuffer;
            onSessionStateChangedEvent(event, quitLoop, shouldRender);
            break;
    }
}

static std::string print_session_state(int64_t state){
    switch (state) {
        case XR_SESSION_STATE_UNKNOWN:
            return "unknown";
        case XR_SESSION_STATE_IDLE:
            return "idle";
        case XR_SESSION_STATE_READY:
            return "ready";
        case XR_SESSION_STATE_SYNCHRONIZED:
            return "synchronized";
        case XR_SESSION_STATE_VISIBLE:
            return "visible";
        case XR_SESSION_STATE_FOCUSED:
            return "focused";
        case XR_SESSION_STATE_STOPPING:
            return "stopping";
        case XR_SESSION_STATE_LOSS_PENDING:
            return "loss_pending";
        case XR_SESSION_STATE_EXITING:
            return "exiting";
        case XR_SESSION_STATE_MAX_ENUM:
            return "MAX_ENUM～～it is a bug!";
    }
    return "";
}

void onSessionStateChangedEvent(XrEventDataSessionStateChanged* event, bool* quitLoop, bool* shouldRender){
    auto state = static_cast<XrSessionState>(event->state);
    std::cout << "监测到session状态变化，从" << print_session_state(sessionState) << "变化到：" << print_session_state(state) << std::endl;
    sessionState = state;

    XrSessionBeginInfo beginInfo = {
            .type = XR_TYPE_SESSION_BEGIN_INFO,
            .next = nullptr,
            .primaryViewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO
    };

    switch (state) {
        case XR_SESSION_STATE_MAX_ENUM:
        case XR_SESSION_STATE_IDLE:
        case XR_SESSION_STATE_UNKNOWN:
            *shouldRender = false;
            break;

        case XR_SESSION_STATE_FOCUSED:
        case XR_SESSION_STATE_SYNCHRONIZED:
        case XR_SESSION_STATE_VISIBLE:
            *shouldRender = true;
            break;

        case XR_SESSION_STATE_READY:
            if(!isSessionRunning) {
                CALL_XR(xrBeginSession(session, &beginInfo));
            }
            *shouldRender = true;
            break;

        case XR_SESSION_STATE_STOPPING:
            if(isSessionRunning) {
                CALL_XR(xrEndSession(session));
            }
            *shouldRender = false;
            break;

        case XR_SESSION_STATE_EXITING:
        case XR_SESSION_STATE_LOSS_PENDING:
            CALL_XR(xrDestroySession(session));
            *shouldRender = false;
            *quitLoop = true;
            break;
    }
}

void rendFrame(){
    //1.waitFrame， 注意需要和 beginFrame成对出现
    XrFrameWaitInfo waitInfo = {
            .type = XR_TYPE_FRAME_WAIT_INFO,
            .next = nullptr
    };
    XrFrameState frameState = {
            .type = XR_TYPE_FRAME_STATE,
            .next = nullptr,
    };
    CALL_XR(xrWaitFrame(session, &waitInfo, &frameState));

    //2.beginFrame
    XrFrameBeginInfo beginInfo = {
            .type = XR_TYPE_FRAME_BEGIN_INFO,
            .next = nullptr
    };
    CALL_XR(xrBeginFrame(session, &beginInfo));

    if(frameState.shouldRender){
        for(int i = 0; i < configurationViews.size(); i++){
            //3. 获取交换链中图片
            XrSwapchainImageAcquireInfo acquireInfo = {
                    .type = XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO,
                    .next = nullptr,
            };
            uint32_t imgIndex;
            CALL_XR(xrAcquireSwapchainImage(swapchains[i], &acquireInfo, &imgIndex));

            XrSwapchainImageWaitInfo imageWaitInfo = {
                    .type = XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO,
                    .next = nullptr,
                    .timeout = 1000
            };
            xrWaitSwapchainImage(swapchains[i], &imageWaitInfo);

            //4. 绘制
            renderImage(i, geometry.frameBuffers[i], swapchainImages[i]);

            //5. 释放交换链中贴图
            XrSwapchainImageReleaseInfo releaseInfo = {
                    .type = XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO,
                    .next = nullptr
            };
            xrReleaseSwapchainImage(swapchains[i], &releaseInfo);
        }
    }

    //6. 获得视图位置
    uint32_t viewCountOutput;
    XrViewState viewState = {
            .type = XR_TYPE_VIEW_STATE,
            .next = nullptr,
            .viewStateFlags = XR_VIEW_STATE_POSITION_VALID_BIT
    };
    XrViewLocateInfo locateInfo = {
            .type = XR_TYPE_VIEW_LOCATE_INFO,
            .next = nullptr,
            .viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
            .displayTime = frameState.predictedDisplayTime,
            .space = appSpace,
    };
    xrLocateViews(session, &locateInfo, &viewState, 0, &viewCountOutput, views.data());

    int submittedLayerCount = 1;
    const XrCompositionLayerBaseHeader* submittedLayers[] = {
            (const XrCompositionLayerBaseHeader* const)&projectionViews};

    //如果view State 返回 orientation_valid_bit，就不提交Layer了
    if ((viewState.viewStateFlags & XR_VIEW_STATE_ORIENTATION_VALID_BIT) == 0) {
        submittedLayerCount = 0;
    }
    //如果不能渲染，也不提交Layer了
    if (!frameState.shouldRender) {
        submittedLayerCount = 0;
    }

    //7.提交Layers，并执行xrEndFrame()
    XrFrameEndInfo endInfo = {
            .type = XR_TYPE_FRAME_END_INFO,
            .next = nullptr,
            .displayTime = frameState.predictedDisplayTime,
            .environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE,
            .layerCount = static_cast<uint32_t>(submittedLayerCount),
            .layers = submittedLayers,
    };
    CALL_XR(xrEndFrame(session, &endInfo));

    glfwSwapBuffers(pWindow);
}

void renderImage(int viewIndex, GLuint frameBuffer, const XrSwapchainImageOpenGLKHR& image){
    glBindFramebuffer(1, frameBuffer);
    glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, image.image, 0);

    glViewport(0, 0, WIDTH, HEIGHT);
    glScissor(0, 0, WIDTH, HEIGHT);//裁剪
    glClearColor(0.0f, 0.0f, 0.2f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);

    glUseProgram(geometry.shaderProgramId);
    glBindVertexArray(geometry.VAO);
    glPointSize(5);
    glDrawArrays(GL_TRIANGLES, 0, 3);

    if (viewIndex == 0) {
        glBlitNamedFramebuffer((GLuint)frameBuffer,             // readFramebuffer
                               (GLuint)0,                       // backbuffer     // drawFramebuffer
                               (GLint)0,                        // srcX0
                               (GLint)0,                        // srcY0
                               (GLint)WIDTH,                    // srcX1
                               (GLint)HEIGHT,                   // srcY1
                               (GLint)0,                        // dstX0
                               (GLint)0,                        // dstY0
                               (GLint)WIDTH,                    // dstX1
                               (GLint)HEIGHT,                   // dstY1
                               (GLbitfield)GL_COLOR_BUFFER_BIT, // mask
                               (GLenum)GL_LINEAR);              // filter
    }

    glBindFramebuffer(GL_FRAMEBUFFER, 0);
}










