#include "agxv_private.h"
#include "vk_descriptors.h"

VKAPI_ATTR VkResult VKAPI_CALL agxv_CreateDescriptorSetLayout(
    VkDevice _device,
    const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
    const VkAllocationCallbacks *pAllocator,
    VkDescriptorSetLayout *pSetLayout)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    struct agxv_descriptor_set_layout *set_layout;

    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);

    uint32_t num_bindings = 0;
    uint32_t immutable_sampler_count = 0;
    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
        num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
        /* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
       *
       *    "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
       *    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
       *    pImmutableSamplers can be used to initialize a set of immutable
       *    samplers. [...]  If descriptorType is not one of these descriptor
       *    types, then pImmutableSamplers is ignored.
       *
       * We need to be careful here and only parse pImmutableSamplers if we
       * have one of the right descriptor types.
       */
        VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
        if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
             desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
            pCreateInfo->pBindings[j].pImmutableSamplers)
            immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
    }

    size_t size = sizeof(struct agxv_descriptor_set_layout) +
                  num_bindings * sizeof(set_layout->binding[0]) +
                  immutable_sampler_count * sizeof(struct agxv_sampler *);

    set_layout = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
                            VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    if (!set_layout)
        return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);

    vk_object_base_init(&device->vk, &set_layout->base,
                        VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
    set_layout->ref_cnt = 1;
    /* We just allocate all the samplers at the end of the struct */
    struct agxv_sampler **samplers =
        (struct agxv_sampler **)&set_layout->binding[num_bindings];

    set_layout->alloc = pAllocator;
    set_layout->binding_count = num_bindings;
    set_layout->shader_stages = 0;
    set_layout->size = 0;

    VkDescriptorSetLayoutBinding *bindings = NULL;
    VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
                                                pCreateInfo->bindingCount,
                                                &bindings);
    if (result != VK_SUCCESS) {
        vk_object_base_finish(&set_layout->base);
        vk_free2(&device->vk.alloc, pAllocator, set_layout);
        return vk_error(device, result);
    }

    uint32_t dynamic_offset_count = 0;
    for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
        const VkDescriptorSetLayoutBinding *binding = bindings + j;
        uint32_t b = binding->binding;

        set_layout->binding[b].array_size = binding->descriptorCount;
        set_layout->binding[b].descriptor_index = set_layout->size;
        set_layout->binding[b].type = binding->descriptorType;
        set_layout->binding[b].valid = true;
        set_layout->size += binding->descriptorCount;

        for (gl_shader_stage stage = MESA_SHADER_VERTEX; stage < MESA_SHADER_STAGES; stage++) {
            set_layout->binding[b].stage[stage].const_buffer_index = -1;
            set_layout->binding[b].stage[stage].shader_buffer_index = -1;
            set_layout->binding[b].stage[stage].sampler_index = -1;
            set_layout->binding[b].stage[stage].sampler_view_index = -1;
            set_layout->binding[b].stage[stage].image_index = -1;
        }

        if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
            binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
            set_layout->binding[b].dynamic_index = dynamic_offset_count;
            dynamic_offset_count += binding->descriptorCount;
        }
        switch (binding->descriptorType) {
        case VK_DESCRIPTOR_TYPE_SAMPLER:
        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
            // agxv_foreach_stage(s, binding->stageFlags) {
            //     set_layout->binding[b].stage[s].sampler_index = set_layout->stage[s].sampler_count;
            //     set_layout->stage[s].sampler_count += binding->descriptorCount;
            // }
            if (binding->pImmutableSamplers) {
                set_layout->binding[b].immutable_samplers = samplers;
                samplers += binding->descriptorCount;

                for (uint32_t i = 0; i < binding->descriptorCount; i++)
                    set_layout->binding[b].immutable_samplers[i] =
                        agxv_sampler_from_handle(binding->pImmutableSamplers[i]);
            }
            break;
        default:
            break;
        }

        switch (binding->descriptorType) {
        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
            // agxv_foreach_stage(s, binding->stageFlags) {
            //     set_layout->binding[b].stage[s].const_buffer_index = set_layout->stage[s].const_buffer_count;
            //     set_layout->stage[s].const_buffer_count += binding->descriptorCount;
            // }
            break;
        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
            // agxv_foreach_stage(s, binding->stageFlags) {
            //     set_layout->binding[b].stage[s].shader_buffer_index = set_layout->stage[s].shader_buffer_count;
            //     set_layout->stage[s].shader_buffer_count += binding->descriptorCount;
            // }
            break;

        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
            // agxv_foreach_stage(s, binding->stageFlags) {
            //     set_layout->binding[b].stage[s].image_index = set_layout->stage[s].image_count;
            //     set_layout->stage[s].image_count += binding->descriptorCount;
            // }
            break;
        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
            // agxv_foreach_stage(s, binding->stageFlags) {
            //     set_layout->binding[b].stage[s].sampler_view_index = set_layout->stage[s].sampler_view_count;
            //     set_layout->stage[s].sampler_view_count += binding->descriptorCount;
            // }
            break;
        default:
            break;
        }

        set_layout->shader_stages |= binding->stageFlags;
    }

    free(bindings);

    set_layout->dynamic_offset_count = dynamic_offset_count;

    *pSetLayout = agxv_descriptor_set_layout_to_handle(set_layout);

    return VK_SUCCESS;
}

static void agxv_descriptor_set_layout_destroy(struct agxv_device *device,
                                               struct agxv_descriptor_set_layout *layout)
{
    assert(layout->ref_cnt == 0);
    vk_object_base_finish(&layout->base);
    vk_free2(&device->vk.alloc, layout->alloc, layout);
}

static inline void
agxv_descriptor_set_layout_unref(struct agxv_device *device,
                                 struct agxv_descriptor_set_layout *layout)
{
    assert(layout && layout->ref_cnt >= 1);
    if (p_atomic_dec_zero(&layout->ref_cnt))
        agxv_descriptor_set_layout_destroy(device, layout);
}

static inline void
agxv_descriptor_set_layout_ref(struct agxv_descriptor_set_layout *layout)
{
    assert(layout && layout->ref_cnt >= 1);
    p_atomic_inc(&layout->ref_cnt);
}

VKAPI_ATTR void VKAPI_CALL agxv_DestroyDescriptorSetLayout(VkDevice _device,
                                                          VkDescriptorSetLayout _set_layout,
                                                          const VkAllocationCallbacks *pAllocator)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    AGXV_FROM_HANDLE(agxv_descriptor_set_layout, set_layout, _set_layout);

    if (!_set_layout)
        return;

    agxv_descriptor_set_layout_unref(device, set_layout);
}

VKAPI_ATTR VkResult VKAPI_CALL agxv_CreatePipelineLayout(VkDevice _device,
                                                         const VkPipelineLayoutCreateInfo *pCreateInfo,
                                                         const VkAllocationCallbacks *pAllocator,
                                                         VkPipelineLayout *pPipelineLayout)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    struct agxv_pipeline_layout *layout;

    assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);

    layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8,
                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    if (layout == NULL)
        return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);

    vk_object_base_init(&device->vk, &layout->base,
                        VK_OBJECT_TYPE_PIPELINE_LAYOUT);
    layout->num_sets = pCreateInfo->setLayoutCount;

    for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++)
    {
        AGXV_FROM_HANDLE(agxv_descriptor_set_layout, set_layout,
                         pCreateInfo->pSetLayouts[set]);
        layout->set[set].layout = set_layout;
        agxv_descriptor_set_layout_ref(set_layout);
    }

    layout->push_constant_size = 0;
    for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i)
    {
        const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
        layout->push_constant_size = MAX2(layout->push_constant_size,
                                          range->offset + range->size);
    }
    layout->push_constant_size = align(layout->push_constant_size, 16);
    *pPipelineLayout = agxv_pipeline_layout_to_handle(layout);

    return VK_SUCCESS;
}

VKAPI_ATTR void VKAPI_CALL agxv_DestroyPipelineLayout(VkDevice _device,
                                                      VkPipelineLayout _pipelineLayout,
                                                      const VkAllocationCallbacks *pAllocator)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    AGXV_FROM_HANDLE(agxv_pipeline_layout, pipeline_layout, _pipelineLayout);

    if (!_pipelineLayout)
        return;
    for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
        agxv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);

    vk_object_base_finish(&pipeline_layout->base);
    vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
}

static VkResult
agxv_descriptor_set_create(struct agxv_device *device,
                           struct agxv_descriptor_set_layout *layout,
                           struct agxv_descriptor_set **out_set)
{
    struct agxv_descriptor_set *set;
    size_t size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]);

    set = vk_alloc(&device->vk.alloc /* XXX: Use the pool */, size, 8,
                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    if (!set)
        return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);

    /* A descriptor set may not be 100% filled. Clear the set so we can
    * later detect holes in it.
    */
    memset(set, 0, size);

    vk_object_base_init(&device->vk, &set->base,
                        VK_OBJECT_TYPE_DESCRIPTOR_SET);
    set->layout = layout;
    agxv_descriptor_set_layout_ref(layout);

    /* Go through and fill out immutable samplers if we have any */
    struct agxv_descriptor *desc = set->descriptors;
    for (uint32_t b = 0; b < layout->binding_count; b++) {
        if (layout->binding[b].immutable_samplers) {
            for (uint32_t i = 0; i < layout->binding[b].array_size; i++)
                desc[i].info.sampler = layout->binding[b].immutable_samplers[i];
        }
        desc += layout->binding[b].array_size;
    }

    *out_set = set;

    return VK_SUCCESS;
}

VKAPI_ATTR VkResult VKAPI_CALL agxv_AllocateDescriptorSets(
    VkDevice _device,
    const VkDescriptorSetAllocateInfo *pAllocateInfo,
    VkDescriptorSet *pDescriptorSets)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    AGXV_FROM_HANDLE(agxv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
    VkResult result = VK_SUCCESS;
    struct agxv_descriptor_set *set;
    uint32_t i;

    for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
        AGXV_FROM_HANDLE(agxv_descriptor_set_layout, layout,
                         pAllocateInfo->pSetLayouts[i]);

        result = agxv_descriptor_set_create(device, layout, &set);
        if (result != VK_SUCCESS)
            break;

        list_addtail(&set->link, &pool->sets);
        pDescriptorSets[i] = agxv_descriptor_set_to_handle(set);
    }

    if (result != VK_SUCCESS)
        agxv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
                                i, pDescriptorSets);

    return result;
}

static void
agxv_descriptor_set_destroy(struct agxv_device *device,
                            struct agxv_descriptor_set *set)
{
    agxv_descriptor_set_layout_unref(device, set->layout);
    vk_object_base_finish(&set->base);
    vk_free(&device->vk.alloc, set);
}

VKAPI_ATTR VkResult VKAPI_CALL agxv_FreeDescriptorSets(
    VkDevice _device,
    VkDescriptorPool descriptorPool,
    uint32_t count,
    const VkDescriptorSet *pDescriptorSets)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);

    for (uint32_t i = 0; i < count; i++) {
        AGXV_FROM_HANDLE(agxv_descriptor_set, set, pDescriptorSets[i]);

        if (!set)
            continue;
        list_del(&set->link);
        agxv_descriptor_set_destroy(device, set);
    }
    return VK_SUCCESS;
}

VKAPI_ATTR VkResult VKAPI_CALL agxv_CreateDescriptorPool(
    VkDevice _device,
    const VkDescriptorPoolCreateInfo *pCreateInfo,
    const VkAllocationCallbacks *pAllocator,
    VkDescriptorPool *pDescriptorPool)
{
    AGXV_FROM_HANDLE(agxv_device, device, _device);
    struct agxv_descriptor_pool *pool;
    size_t size = sizeof(struct agxv_descriptor_pool);
    pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
                      VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
    if (!pool)
        return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);

    vk_object_base_init(&device->vk, &pool->base,
                        VK_OBJECT_TYPE_DESCRIPTOR_POOL);
    pool->flags = pCreateInfo->flags;
    list_inithead(&pool->sets);
    *pDescriptorPool = agxv_descriptor_pool_to_handle(pool);

    return VK_SUCCESS;
}

static void agxv_reset_descriptor_pool(struct agxv_device *device,
                                       struct agxv_descriptor_pool *pool)
{
    struct agxv_descriptor_set *set, *tmp;
    LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
        agxv_descriptor_set_layout_unref(device, set->layout);
        list_del(&set->link);
        vk_free(&device->vk.alloc, set);
    }
}

VKAPI_ATTR void VKAPI_CALL agxv_UpdateDescriptorSets(
    VkDevice _device,
    uint32_t descriptorWriteCount,
    const VkWriteDescriptorSet *pDescriptorWrites,
    uint32_t descriptorCopyCount,
    const VkCopyDescriptorSet *pDescriptorCopies)
{
    for (uint32_t i = 0; i < descriptorWriteCount; i++) {
        const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
        AGXV_FROM_HANDLE(agxv_descriptor_set, set, write->dstSet);
        const struct agxv_descriptor_set_binding_layout *bind_layout =
            &set->layout->binding[write->dstBinding];
        struct agxv_descriptor *desc =
            &set->descriptors[bind_layout->descriptor_index];
        desc += write->dstArrayElement;

        switch (write->descriptorType) {
        case VK_DESCRIPTOR_TYPE_SAMPLER:
            for (uint32_t j = 0; j < write->descriptorCount; j++) {
                AGXV_FROM_HANDLE(agxv_sampler, sampler,
                                 write->pImageInfo[j].sampler);

                desc[j] = (struct agxv_descriptor) {
                    .type = VK_DESCRIPTOR_TYPE_SAMPLER,
                    .info.sampler = sampler,
                };
            }
            break;

        case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
            for (uint32_t j = 0; j < write->descriptorCount; j++) {
                AGXV_FROM_HANDLE(agxv_image_view, iview,
                                 write->pImageInfo[j].imageView);
                desc[j].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
                desc[j].info.iview = iview;
                /*
             * All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those
             * with a descriptorCount of zero, must all either use immutable samplers or must all not
             * use immutable samplers
             */
                if (bind_layout->immutable_samplers) {
                    desc[j].info.sampler = bind_layout->immutable_samplers[j];
                } else {
                    AGXV_FROM_HANDLE(agxv_sampler, sampler,
                                     write->pImageInfo[j].sampler);

                    desc[j].info.sampler = sampler;
                }
            }
            break;

        case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
        case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
        case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
            for (uint32_t j = 0; j < write->descriptorCount; j++) {
                AGXV_FROM_HANDLE(agxv_image_view, iview,
                                 write->pImageInfo[j].imageView);

                desc[j] = (struct agxv_descriptor) {
                    .type = write->descriptorType,
                    .info.iview = iview,
                };
            }
            break;

        case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
        case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
            for (uint32_t j = 0; j < write->descriptorCount; j++) {
                AGXV_FROM_HANDLE(agxv_buffer_view, bview,
                                 write->pTexelBufferView[j]);

                desc[j] = (struct agxv_descriptor) {
                    .type = write->descriptorType,
                    .info.buffer_view = bview,
                };
            }
            break;

        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
        case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
        case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
            for (uint32_t j = 0; j < write->descriptorCount; j++) {
                assert(write->pBufferInfo[j].buffer);
                AGXV_FROM_HANDLE(agxv_buffer, buffer, write->pBufferInfo[j].buffer);
                assert(buffer);
                desc[j] = (struct agxv_descriptor) {
                    .type = write->descriptorType,
                    .info.offset = write->pBufferInfo[j].offset,
                    .info.buffer = buffer,
                    .info.range = write->pBufferInfo[j].range,
                };
            }
            break;

        default:
            break;
        }
    }

    for (uint32_t i = 0; i < descriptorCopyCount; i++) {
        const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
        AGXV_FROM_HANDLE(agxv_descriptor_set, src, copy->srcSet);
        AGXV_FROM_HANDLE(agxv_descriptor_set, dst, copy->dstSet);

        const struct agxv_descriptor_set_binding_layout *src_layout =
            &src->layout->binding[copy->srcBinding];
        struct agxv_descriptor *src_desc =
            &src->descriptors[src_layout->descriptor_index];
        src_desc += copy->srcArrayElement;

        const struct agxv_descriptor_set_binding_layout *dst_layout =
            &dst->layout->binding[copy->dstBinding];
        struct agxv_descriptor *dst_desc =
            &dst->descriptors[dst_layout->descriptor_index];
        dst_desc += copy->dstArrayElement;

        for (uint32_t j = 0; j < copy->descriptorCount; j++)
            dst_desc[j] = src_desc[j];
    }
}
