blob: 62f2b32bc21bfcd98e5c68d8327c810390960461 [file] [log] [blame]
/*
* Copyright (c) 2015-2021 The Khronos Group Inc.
* Copyright (c) 2015-2021 Valve Corporation
* Copyright (c) 2015-2021 LunarG, Inc.
* Copyright (c) 2015-2021 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Author: Chia-I Wu <olvaffe@gmail.com>
* Author: Chris Forbes <chrisf@ijw.co.nz>
* Author: Courtney Goeltzenleuchter <courtney@LunarG.com>
* Author: Mark Lobodzinski <mark@lunarg.com>
* Author: Mike Stroyan <mike@LunarG.com>
* Author: Tobin Ehlis <tobine@google.com>
* Author: Tony Barbour <tony@LunarG.com>
* Author: Cody Northrop <cnorthrop@google.com>
* Author: Dave Houlton <daveh@lunarg.com>
* Author: Jeremy Kniager <jeremyk@lunarg.com>
* Author: Shannon McPherson <shannon@lunarg.com>
* Author: John Zulauf <jzulauf@lunarg.com>
*/
#include "../layer_validation_tests.h"
#include "vk_extension_helper.h"
#include <algorithm>
#include <array>
#include <chrono>
#include <memory>
#include <mutex>
#include <thread>
#include "cast_utils.h"
//
// POSITIVE VALIDATION TESTS
//
// These tests do not expect to encounter ANY validation errors pass only if this is true
TEST_F(VkPositiveLayerTest, CopyNonupdatedDescriptors) {
TEST_DESCRIPTION("Copy non-updated descriptors");
unsigned int i;
ASSERT_NO_FATAL_FAILURE(Init());
OneOffDescriptorSet src_descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
{1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
{2, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, nullptr},
});
OneOffDescriptorSet dst_descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
{1, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
});
m_errorMonitor->ExpectSuccess();
const unsigned int copy_size = 2;
VkCopyDescriptorSet copy_ds_update[copy_size];
memset(copy_ds_update, 0, sizeof(copy_ds_update));
for (i = 0; i < copy_size; i++) {
copy_ds_update[i].sType = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET;
copy_ds_update[i].srcSet = src_descriptor_set.set_;
copy_ds_update[i].srcBinding = i;
copy_ds_update[i].dstSet = dst_descriptor_set.set_;
copy_ds_update[i].dstBinding = i;
copy_ds_update[i].descriptorCount = 1;
}
vk::UpdateDescriptorSets(m_device->device(), 0, NULL, copy_size, copy_ds_update);
m_errorMonitor->VerifyNotFound();
}
TEST_F(VkPositiveLayerTest, DeleteDescriptorSetLayoutsBeforeDescriptorSets) {
TEST_DESCRIPTION("Create DSLayouts and DescriptorSets and then delete the DSLayouts before the DescriptorSets.");
ASSERT_NO_FATAL_FAILURE(Init());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
VkResult err;
m_errorMonitor->ExpectSuccess();
VkDescriptorPoolSize ds_type_count = {};
ds_type_count.type = VK_DESCRIPTOR_TYPE_SAMPLER;
ds_type_count.descriptorCount = 1;
VkDescriptorPoolCreateInfo ds_pool_ci = {};
ds_pool_ci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
ds_pool_ci.pNext = NULL;
ds_pool_ci.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT;
ds_pool_ci.maxSets = 1;
ds_pool_ci.poolSizeCount = 1;
ds_pool_ci.pPoolSizes = &ds_type_count;
VkDescriptorPool ds_pool_one;
err = vk::CreateDescriptorPool(m_device->device(), &ds_pool_ci, NULL, &ds_pool_one);
ASSERT_VK_SUCCESS(err);
VkDescriptorSetLayoutBinding dsl_binding = {};
dsl_binding.binding = 0;
dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
dsl_binding.descriptorCount = 1;
dsl_binding.stageFlags = VK_SHADER_STAGE_ALL;
dsl_binding.pImmutableSamplers = NULL;
VkDescriptorSet descriptorSet;
{
const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
VkDescriptorSetAllocateInfo alloc_info = {};
alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
alloc_info.descriptorSetCount = 1;
alloc_info.descriptorPool = ds_pool_one;
alloc_info.pSetLayouts = &ds_layout.handle();
err = vk::AllocateDescriptorSets(m_device->device(), &alloc_info, &descriptorSet);
ASSERT_VK_SUCCESS(err);
} // ds_layout destroyed
err = vk::FreeDescriptorSets(m_device->device(), ds_pool_one, 1, &descriptorSet);
vk::DestroyDescriptorPool(m_device->device(), ds_pool_one, NULL);
m_errorMonitor->VerifyNotFound();
}
// This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest, IgnoreUnrelatedDescriptor) {
TEST_DESCRIPTION(
"Ensure that the vkUpdateDescriptorSets validation code is ignoring VkWriteDescriptorSet members that are not related to "
"the descriptor type specified by VkWriteDescriptorSet::descriptorType. Correct validation behavior will result in the "
"test running to completion without validation errors.");
const uintptr_t invalid_ptr = 0xcdcdcdcd;
ASSERT_NO_FATAL_FAILURE(Init());
// Verify VK_FORMAT_R8_UNORM supports VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT
const VkFormat format_texel_case = VK_FORMAT_R8_UNORM;
const char *format_texel_case_string = "VK_FORMAT_R8_UNORM";
VkFormatProperties format_properties;
vk::GetPhysicalDeviceFormatProperties(gpu(), format_texel_case, &format_properties);
if (!(format_properties.bufferFeatures & VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT)) {
printf("%s Test requires %s to support VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT\n", kSkipPrefix, format_texel_case_string);
return;
}
// Image Case
{
m_errorMonitor->ExpectSuccess();
VkImageObj image(m_device);
image.Init(32, 32, 1, VK_FORMAT_B8G8R8A8_UNORM, VK_IMAGE_USAGE_SAMPLED_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
VkImageView view = image.targetView(VK_FORMAT_B8G8R8A8_UNORM);
OneOffDescriptorSet descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
});
VkDescriptorImageInfo image_info = {};
image_info.imageView = view;
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VkWriteDescriptorSet descriptor_write;
memset(&descriptor_write, 0, sizeof(descriptor_write));
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstSet = descriptor_set.set_;
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = 1;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
descriptor_write.pImageInfo = &image_info;
// Set pBufferInfo and pTexelBufferView to invalid values, which should
// be
// ignored for descriptorType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE.
// This will most likely produce a crash if the parameter_validation
// layer
// does not correctly ignore pBufferInfo.
descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
m_errorMonitor->VerifyNotFound();
}
// Buffer Case
{
m_errorMonitor->ExpectSuccess();
uint32_t queue_family_index = 0;
VkBufferCreateInfo buffer_create_info = {};
buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
buffer_create_info.size = 1024;
buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
buffer_create_info.queueFamilyIndexCount = 1;
buffer_create_info.pQueueFamilyIndices = &queue_family_index;
VkBufferObj buffer;
buffer.init(*m_device, buffer_create_info);
OneOffDescriptorSet descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
});
VkDescriptorBufferInfo buffer_info = {};
buffer_info.buffer = buffer.handle();
buffer_info.offset = 0;
buffer_info.range = 1024;
VkWriteDescriptorSet descriptor_write;
memset(&descriptor_write, 0, sizeof(descriptor_write));
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstSet = descriptor_set.set_;
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = 1;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptor_write.pBufferInfo = &buffer_info;
// Set pImageInfo and pTexelBufferView to invalid values, which should
// be
// ignored for descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER.
// This will most likely produce a crash if the parameter_validation
// layer
// does not correctly ignore pImageInfo.
descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
descriptor_write.pTexelBufferView = reinterpret_cast<const VkBufferView *>(invalid_ptr);
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
m_errorMonitor->VerifyNotFound();
}
// Texel Buffer Case
{
m_errorMonitor->ExpectSuccess();
uint32_t queue_family_index = 0;
VkBufferCreateInfo buffer_create_info = {};
buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
buffer_create_info.size = 1024;
buffer_create_info.usage = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
buffer_create_info.queueFamilyIndexCount = 1;
buffer_create_info.pQueueFamilyIndices = &queue_family_index;
VkBufferObj buffer;
buffer.init(*m_device, buffer_create_info);
VkBufferViewCreateInfo buff_view_ci = {};
buff_view_ci.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
buff_view_ci.buffer = buffer.handle();
buff_view_ci.format = format_texel_case;
buff_view_ci.range = VK_WHOLE_SIZE;
VkBufferView buffer_view;
VkResult err = vk::CreateBufferView(m_device->device(), &buff_view_ci, NULL, &buffer_view);
ASSERT_VK_SUCCESS(err);
OneOffDescriptorSet descriptor_set(m_device,
{
{0, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
});
VkWriteDescriptorSet descriptor_write;
memset(&descriptor_write, 0, sizeof(descriptor_write));
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstSet = descriptor_set.set_;
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = 1;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER;
descriptor_write.pTexelBufferView = &buffer_view;
// Set pImageInfo and pBufferInfo to invalid values, which should be
// ignored for descriptorType ==
// VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER.
// This will most likely produce a crash if the parameter_validation
// layer
// does not correctly ignore pImageInfo and pBufferInfo.
descriptor_write.pImageInfo = reinterpret_cast<const VkDescriptorImageInfo *>(invalid_ptr);
descriptor_write.pBufferInfo = reinterpret_cast<const VkDescriptorBufferInfo *>(invalid_ptr);
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
m_errorMonitor->VerifyNotFound();
vk::DestroyBufferView(m_device->device(), buffer_view, NULL);
}
}
TEST_F(VkPositiveLayerTest, ImmutableSamplerOnlyDescriptor) {
TEST_DESCRIPTION("Bind a DescriptorSet with only an immutable sampler and make sure that we don't warn for no update.");
ASSERT_NO_FATAL_FAILURE(Init());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
OneOffDescriptorSet descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
});
VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
VkSampler sampler;
VkResult err = vk::CreateSampler(m_device->device(), &sampler_ci, NULL, &sampler);
ASSERT_VK_SUCCESS(err);
const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
m_errorMonitor->ExpectSuccess();
m_commandBuffer->begin();
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
&descriptor_set.set_, 0, nullptr);
m_errorMonitor->VerifyNotFound();
vk::DestroySampler(m_device->device(), sampler, NULL);
m_commandBuffer->EndRenderPass();
m_commandBuffer->end();
}
// This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest, EmptyDescriptorUpdateTest) {
TEST_DESCRIPTION("Update last descriptor in a set that includes an empty binding");
VkResult err;
ASSERT_NO_FATAL_FAILURE(Init());
if (IsPlatform(kNexusPlayer)) {
printf("%s This test should not run on Nexus Player\n", kSkipPrefix);
return;
}
m_errorMonitor->ExpectSuccess();
// Create layout with two uniform buffer descriptors w/ empty binding between them
OneOffDescriptorSet ds(m_device, {
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
{1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0 /*!*/, 0, nullptr},
{2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
});
// Create a buffer to be used for update
VkBufferCreateInfo buff_ci = {};
buff_ci.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
buff_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
buff_ci.size = 256;
buff_ci.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
VkBuffer buffer;
err = vk::CreateBuffer(m_device->device(), &buff_ci, NULL, &buffer);
ASSERT_VK_SUCCESS(err);
// Have to bind memory to buffer before descriptor update
VkMemoryAllocateInfo mem_alloc = {};
mem_alloc.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
mem_alloc.pNext = NULL;
mem_alloc.allocationSize = 512; // one allocation for both buffers
mem_alloc.memoryTypeIndex = 0;
VkMemoryRequirements mem_reqs;
vk::GetBufferMemoryRequirements(m_device->device(), buffer, &mem_reqs);
bool pass = m_device->phy().set_memory_type(mem_reqs.memoryTypeBits, &mem_alloc, 0);
if (!pass) {
printf("%s Failed to allocate memory.\n", kSkipPrefix);
vk::DestroyBuffer(m_device->device(), buffer, NULL);
return;
}
// Make sure allocation is sufficiently large to accommodate buffer requirements
if (mem_reqs.size > mem_alloc.allocationSize) {
mem_alloc.allocationSize = mem_reqs.size;
}
VkDeviceMemory mem;
err = vk::AllocateMemory(m_device->device(), &mem_alloc, NULL, &mem);
ASSERT_VK_SUCCESS(err);
err = vk::BindBufferMemory(m_device->device(), buffer, mem, 0);
ASSERT_VK_SUCCESS(err);
// Only update the descriptor at binding 2
VkDescriptorBufferInfo buff_info = {};
buff_info.buffer = buffer;
buff_info.offset = 0;
buff_info.range = VK_WHOLE_SIZE;
VkWriteDescriptorSet descriptor_write = {};
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstBinding = 2;
descriptor_write.descriptorCount = 1;
descriptor_write.pTexelBufferView = nullptr;
descriptor_write.pBufferInfo = &buff_info;
descriptor_write.pImageInfo = nullptr;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptor_write.dstSet = ds.set_;
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
m_errorMonitor->VerifyNotFound();
// Cleanup
vk::FreeMemory(m_device->device(), mem, NULL);
vk::DestroyBuffer(m_device->device(), buffer, NULL);
}
// This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest, PushDescriptorNullDstSetTest) {
TEST_DESCRIPTION("Use null dstSet in CmdPushDescriptorSetKHR");
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
} else {
printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitState());
m_errorMonitor->ExpectSuccess();
auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
if (push_descriptor_prop.maxPushDescriptors < 1) {
// Some implementations report an invalid maxPushDescriptors of 0
printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
VkDescriptorSetLayoutBinding dsl_binding = {};
dsl_binding.binding = 2;
dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
dsl_binding.descriptorCount = 1;
dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
dsl_binding.pImmutableSamplers = NULL;
const VkDescriptorSetLayoutObj ds_layout(m_device, {dsl_binding});
// Create push descriptor set layout
const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
// Use helper to create graphics pipeline
CreatePipelineHelper helper(*this);
helper.InitInfo();
helper.InitState();
helper.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &ds_layout});
helper.CreateGraphicsPipeline();
const float vbo_data[3] = {1.f, 0.f, 1.f};
VkConstantBufferObj vbo(m_device, sizeof(vbo_data), (const void *)&vbo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
VkDescriptorBufferInfo buff_info;
buff_info.buffer = vbo.handle();
buff_info.offset = 0;
buff_info.range = sizeof(vbo_data);
VkWriteDescriptorSet descriptor_write = {};
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstBinding = 2;
descriptor_write.descriptorCount = 1;
descriptor_write.pTexelBufferView = nullptr;
descriptor_write.pBufferInfo = &buff_info;
descriptor_write.pImageInfo = nullptr;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptor_write.dstSet = 0; // Should not cause a validation error
// Find address of extension call and make the call
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
(PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
assert(vkCmdPushDescriptorSetKHR != nullptr);
m_commandBuffer->begin();
// In Intel GPU, it needs to bind pipeline before push descriptor set.
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_);
vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, helper.pipeline_layout_.handle(), 0, 1,
&descriptor_write);
m_errorMonitor->VerifyNotFound();
}
// This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest, PushDescriptorUnboundSetTest) {
TEST_DESCRIPTION("Ensure that no validation errors are produced for not bound push descriptor sets");
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
} else {
printf("%s Push Descriptors Extension not supported, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitState());
auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
if (push_descriptor_prop.maxPushDescriptors < 1) {
// Some implementations report an invalid maxPushDescriptors of 0
printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
m_errorMonitor->ExpectSuccess();
// Create descriptor set layout
VkDescriptorSetLayoutBinding dsl_binding = {};
dsl_binding.binding = 2;
dsl_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
dsl_binding.descriptorCount = 1;
dsl_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
dsl_binding.pImmutableSamplers = NULL;
OneOffDescriptorSet descriptor_set(m_device, {dsl_binding}, 0, nullptr, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
nullptr);
// Create push descriptor set layout
const VkDescriptorSetLayoutObj push_ds_layout(m_device, {dsl_binding}, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
// Create PSO
char const fsSource[] = R"glsl(
#version 450
layout(location=0) out vec4 x;
layout(set=0) layout(binding=2) uniform foo1 { float x; } bar1;
layout(set=1) layout(binding=2) uniform foo2 { float y; } bar2;
void main(){
x = vec4(bar1.x) + vec4(bar2.y);
}
)glsl";
VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
CreatePipelineHelper pipe(*this);
pipe.InitInfo();
pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
pipe.InitState();
// Now use the descriptor layouts to create a pipeline layout
pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&push_ds_layout, &descriptor_set.layout_});
pipe.CreateGraphicsPipeline();
const float bo_data[1] = {1.f};
VkConstantBufferObj buffer(m_device, sizeof(bo_data), (const void *)&bo_data, VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
// Update descriptor set
descriptor_set.WriteDescriptorBufferInfo(2, buffer.handle(), 0, sizeof(bo_data));
descriptor_set.UpdateDescriptorSets();
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
(PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
assert(vkCmdPushDescriptorSetKHR != nullptr);
m_commandBuffer->begin();
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
// Push descriptors and bind descriptor set
vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
descriptor_set.descriptor_writes.data());
vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 1, 1,
&descriptor_set.set_, 0, NULL);
// No errors should be generated.
vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
m_errorMonitor->VerifyNotFound();
m_commandBuffer->EndRenderPass();
m_commandBuffer->end();
}
// This is a positive test. No failures are expected.
TEST_F(VkPositiveLayerTest, BindingPartiallyBound) {
TEST_DESCRIPTION("Ensure that no validation errors for invalid descriptors if binding is PARTIALLY_BOUND");
SetTargetApiVersion(VK_API_VERSION_1_1);
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
return;
}
InitFramework(m_errorMonitor);
bool descriptor_indexing = DeviceExtensionSupported(gpu(), nullptr, VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
descriptor_indexing =
descriptor_indexing && DeviceExtensionSupported(gpu(), nullptr, VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
if (descriptor_indexing) {
m_device_extension_names.push_back(VK_KHR_MAINTENANCE_3_EXTENSION_NAME);
m_device_extension_names.push_back(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
} else {
printf("%s %s and/or %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_MAINTENANCE_3_EXTENSION_NAME,
VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME);
return;
}
VkPhysicalDeviceFeatures2KHR features2 = {};
auto indexing_features = LvlInitStruct<VkPhysicalDeviceDescriptorIndexingFeaturesEXT>();
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
(PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&indexing_features);
vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
if (!indexing_features.descriptorBindingPartiallyBound) {
printf("Partially bound bindings not supported, skipping test\n");
return;
}
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
m_errorMonitor->ExpectSuccess();
VkDescriptorBindingFlagsEXT ds_binding_flags[2] = {};
VkDescriptorSetLayoutBindingFlagsCreateInfoEXT layout_createinfo_binding_flags = {};
ds_binding_flags[0] = 0;
// No Error
ds_binding_flags[1] = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
// Uncomment for Error
// ds_binding_flags[1] = 0;
layout_createinfo_binding_flags.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT;
layout_createinfo_binding_flags.pNext = NULL;
layout_createinfo_binding_flags.bindingCount = 2;
layout_createinfo_binding_flags.pBindingFlags = ds_binding_flags;
// Prepare descriptors
OneOffDescriptorSet descriptor_set(m_device,
{
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
{1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
},
0, &layout_createinfo_binding_flags, 0);
const VkPipelineLayoutObj pipeline_layout(m_device, {&descriptor_set.layout_});
uint32_t qfi = 0;
VkBufferCreateInfo buffer_create_info = {};
buffer_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
buffer_create_info.size = 32;
buffer_create_info.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
buffer_create_info.queueFamilyIndexCount = 1;
buffer_create_info.pQueueFamilyIndices = &qfi;
VkBufferObj buffer;
buffer.init(*m_device, buffer_create_info);
VkDescriptorBufferInfo buffer_info[2] = {};
buffer_info[0].buffer = buffer.handle();
buffer_info[0].offset = 0;
buffer_info[0].range = sizeof(uint32_t);
VkBufferCreateInfo index_buffer_create_info = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
index_buffer_create_info.size = sizeof(uint32_t);
index_buffer_create_info.usage = VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
VkBufferObj index_buffer;
index_buffer.init(*m_device, index_buffer_create_info);
// Only update binding 0
VkWriteDescriptorSet descriptor_writes[2] = {};
descriptor_writes[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_writes[0].dstSet = descriptor_set.set_;
descriptor_writes[0].dstBinding = 0;
descriptor_writes[0].descriptorCount = 1;
descriptor_writes[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptor_writes[0].pBufferInfo = buffer_info;
vk::UpdateDescriptorSets(m_device->device(), 1, descriptor_writes, 0, NULL);
char const *shader_source = R"glsl(
#version 450
layout(set = 0, binding = 0) uniform foo_0 { int val; } doit;
layout(set = 0, binding = 1) uniform foo_1 { int val; } readit;
void main() {
if (doit.val == 0)
gl_Position = vec4(0.0);
else
gl_Position = vec4(readit.val);
}
)glsl";
VkShaderObj vs(m_device, shader_source, VK_SHADER_STAGE_VERTEX_BIT, this);
VkPipelineObj pipe(m_device);
pipe.AddShader(&vs);
pipe.AddDefaultColorAttachment();
pipe.CreateVKPipeline(pipeline_layout.handle(), m_renderPass);
VkCommandBufferBeginInfo begin_info = {};
begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
m_commandBuffer->begin(&begin_info);
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
&descriptor_set.set_, 0, nullptr);
vk::CmdBindIndexBuffer(m_commandBuffer->handle(), index_buffer.handle(), 0, VK_INDEX_TYPE_UINT32);
VkViewport viewport = {0, 0, 16, 16, 0, 1};
vk::CmdSetViewport(m_commandBuffer->handle(), 0, 1, &viewport);
VkRect2D scissor = {{0, 0}, {16, 16}};
vk::CmdSetScissor(m_commandBuffer->handle(), 0, 1, &scissor);
vk::CmdDrawIndexed(m_commandBuffer->handle(), 1, 1, 0, 0, 0);
vk::CmdEndRenderPass(m_commandBuffer->handle());
m_commandBuffer->end();
m_commandBuffer->QueueCommandBuffer();
m_errorMonitor->VerifyNotFound();
}
TEST_F(VkPositiveLayerTest, PushDescriptorSetUpdatingSetNumber) {
TEST_DESCRIPTION(
"Ensure that no validation errors are produced when the push descriptor set number changes "
"between two vk::CmdPushDescriptorSetKHR calls.");
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix,
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
return;
}
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
} else {
printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
return;
}
ASSERT_NO_FATAL_FAILURE(InitState());
auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
if (push_descriptor_prop.maxPushDescriptors < 1) {
// Some implementations report an invalid maxPushDescriptors of 0
printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
m_errorMonitor->ExpectSuccess();
// Create a descriptor to push
const uint32_t buffer_data[4] = {4, 5, 6, 7};
VkConstantBufferObj buffer_obj(
m_device, sizeof(buffer_data), &buffer_data,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT);
ASSERT_TRUE(buffer_obj.initialized());
VkDescriptorBufferInfo buffer_info = {buffer_obj.handle(), 0, VK_WHOLE_SIZE};
PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR =
(PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
ASSERT_TRUE(vkCmdPushDescriptorSetKHR != nullptr);
const VkDescriptorSetLayoutBinding ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
nullptr};
const VkDescriptorSetLayoutBinding ds_binding_1 = {1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
nullptr};
const VkDescriptorSetLayoutObj ds_layout(m_device, {ds_binding_0, ds_binding_1});
ASSERT_TRUE(ds_layout.initialized());
const VkDescriptorSetLayoutBinding push_ds_binding_0 = {0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT,
nullptr};
const VkDescriptorSetLayoutObj push_ds_layout(m_device, {push_ds_binding_0},
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
ASSERT_TRUE(push_ds_layout.initialized());
m_commandBuffer->begin();
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
VkPipelineObj pipe0(m_device);
VkPipelineObj pipe1(m_device);
{
// Note: the push descriptor set is set number 2.
const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &push_ds_layout, &ds_layout});
ASSERT_TRUE(pipeline_layout.initialized());
char const *fsSource = R"glsl(
#version 450
layout(location=0) out vec4 x;
layout(set=2) layout(binding=0) uniform foo { vec4 y; } bar;
void main(){
x = bar.y;
}
)glsl";
VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
VkPipelineObj &pipe = pipe0;
pipe.SetViewport(m_viewports);
pipe.SetScissor(m_scissors);
pipe.AddShader(&vs);
pipe.AddShader(&fs);
pipe.AddDefaultColorAttachment();
pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
// Note: pushing to desciptor set number 2.
vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 2, 1,
&descriptor_write);
vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
}
m_errorMonitor->VerifyNotFound();
{
// Note: the push descriptor set is now set number 3.
const VkPipelineLayoutObj pipeline_layout(m_device, {&ds_layout, &ds_layout, &ds_layout, &push_ds_layout});
ASSERT_TRUE(pipeline_layout.initialized());
const VkWriteDescriptorSet descriptor_write = vk_testing::Device::write_descriptor_set(
vk_testing::DescriptorSet(), 0, 0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, &buffer_info);
char const *fsSource = R"glsl(
#version 450
layout(location=0) out vec4 x;
layout(set=3) layout(binding=0) uniform foo { vec4 y; } bar;
void main(){
x = bar.y;
}
)glsl";
VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
VkPipelineObj &pipe = pipe1;
pipe.SetViewport(m_viewports);
pipe.SetScissor(m_scissors);
pipe.AddShader(&vs);
pipe.AddShader(&fs);
pipe.AddDefaultColorAttachment();
pipe.CreateVKPipeline(pipeline_layout.handle(), renderPass());
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.handle());
// Note: now pushing to desciptor set number 3.
vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 3, 1,
&descriptor_write);
vk::CmdDraw(m_commandBuffer->handle(), 3, 1, 0, 0);
}
m_errorMonitor->VerifyNotFound();
m_commandBuffer->EndRenderPass();
m_commandBuffer->end();
}
TEST_F(VkPositiveLayerTest, DynamicOffsetWithInactiveBinding) {
// Create a descriptorSet w/ dynamic descriptors where 1 binding is inactive
// We previously had a bug where dynamic offset of inactive bindings was still being used
m_errorMonitor->ExpectSuccess();
ASSERT_NO_FATAL_FAILURE(Init());
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
OneOffDescriptorSet descriptor_set(m_device,
{
{2, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
{0, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
{1, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, nullptr},
});
// Create two buffers to update the descriptors with
// The first will be 2k and used for bindings 0 & 1, the second is 1k for binding 2
uint32_t qfi = 0;
VkBufferCreateInfo buffCI = {};
buffCI.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
buffCI.size = 2048;
buffCI.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
buffCI.queueFamilyIndexCount = 1;
buffCI.pQueueFamilyIndices = &qfi;
VkBufferObj dynamic_uniform_buffer_1, dynamic_uniform_buffer_2;
dynamic_uniform_buffer_1.init(*m_device, buffCI);
buffCI.size = 1024;
dynamic_uniform_buffer_2.init(*m_device, buffCI);
// Update descriptors
const uint32_t BINDING_COUNT = 3;
VkDescriptorBufferInfo buff_info[BINDING_COUNT] = {};
buff_info[0].buffer = dynamic_uniform_buffer_1.handle();
buff_info[0].offset = 0;
buff_info[0].range = 256;
buff_info[1].buffer = dynamic_uniform_buffer_1.handle();
buff_info[1].offset = 256;
buff_info[1].range = 512;
buff_info[2].buffer = dynamic_uniform_buffer_2.handle();
buff_info[2].offset = 0;
buff_info[2].range = 512;
VkWriteDescriptorSet descriptor_write;
memset(&descriptor_write, 0, sizeof(descriptor_write));
descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptor_write.dstSet = descriptor_set.set_;
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = BINDING_COUNT;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
descriptor_write.pBufferInfo = buff_info;
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, NULL);
m_commandBuffer->begin();
m_commandBuffer->BeginRenderPass(m_renderPassBeginInfo);
// Create PSO to be used for draw-time errors below
char const *fsSource = R"glsl(
#version 450
layout(location=0) out vec4 x;
layout(set=0) layout(binding=0) uniform foo1 { int x; int y; } bar1;
layout(set=0) layout(binding=2) uniform foo2 { int x; int y; } bar2;
void main(){
x = vec4(bar1.y) + vec4(bar2.y);
}
)glsl";
VkShaderObj vs(m_device, bindStateVertShaderText, VK_SHADER_STAGE_VERTEX_BIT, this);
VkShaderObj fs(m_device, fsSource, VK_SHADER_STAGE_FRAGMENT_BIT, this);
CreatePipelineHelper pipe(*this);
pipe.InitInfo();
pipe.InitState();
pipe.shader_stages_ = {vs.GetStageCreateInfo(), fs.GetStageCreateInfo()};
pipe.pipeline_layout_ = VkPipelineLayoutObj(m_device, {&descriptor_set.layout_});
pipe.CreateGraphicsPipeline();
vk::CmdBindPipeline(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_);
// This update should succeed, but offset of inactive binding 1 oversteps binding 2 buffer size
// we used to have a bug in this case.
uint32_t dyn_off[BINDING_COUNT] = {0, 1024, 256};
vk::CmdBindDescriptorSets(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipe.pipeline_layout_.handle(), 0, 1,
&descriptor_set.set_, BINDING_COUNT, dyn_off);
m_commandBuffer->Draw(1, 0, 0, 0);
m_errorMonitor->VerifyNotFound();
m_commandBuffer->EndRenderPass();
m_commandBuffer->end();
}
TEST_F(VkPositiveLayerTest, CreateDescriptorSetBindingWithIgnoredSamplers) {
TEST_DESCRIPTION("Test that layers conditionally do ignore the pImmutableSamplers on vkCreateDescriptorSetLayout");
bool prop2_found = false;
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
prop2_found = true;
} else {
printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
}
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
bool push_descriptor_found = false;
if (prop2_found && DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
// In addition to the extension being supported we need to have at least one available
// Some implementations report an invalid maxPushDescriptors of 0
push_descriptor_found = GetPushDescriptorProperties(instance(), gpu()).maxPushDescriptors > 0;
} else {
printf("%s %s Extension not supported, skipping push descriptor sub-tests\n", kSkipPrefix,
VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
}
ASSERT_NO_FATAL_FAILURE(InitState());
const uint64_t fake_address_64 = 0xCDCDCDCDCDCDCDCD;
const uint64_t fake_address_32 = 0xCDCDCDCD;
const void *fake_pointer =
sizeof(void *) == 8 ? reinterpret_cast<void *>(fake_address_64) : reinterpret_cast<void *>(fake_address_32);
const VkSampler *hopefully_undereferencable_pointer = reinterpret_cast<const VkSampler *>(fake_pointer);
// regular descriptors
m_errorMonitor->ExpectSuccess();
{
const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
{0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{6, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{7, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{8, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
};
const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr, 0,
static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
VkDescriptorSetLayout dsl;
const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
ASSERT_VK_SUCCESS(err);
vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
}
m_errorMonitor->VerifyNotFound();
if (push_descriptor_found) {
// push descriptors
m_errorMonitor->ExpectSuccess();
{
const VkDescriptorSetLayoutBinding non_sampler_bindings[] = {
{0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{1, VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{3, VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{4, VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{5, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
{6, VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 1, VK_SHADER_STAGE_FRAGMENT_BIT, hopefully_undereferencable_pointer},
};
const VkDescriptorSetLayoutCreateInfo dslci = {VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO, nullptr,
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
static_cast<uint32_t>(size(non_sampler_bindings)), non_sampler_bindings};
VkDescriptorSetLayout dsl;
const VkResult err = vk::CreateDescriptorSetLayout(m_device->device(), &dslci, nullptr, &dsl);
ASSERT_VK_SUCCESS(err);
vk::DestroyDescriptorSetLayout(m_device->device(), dsl, nullptr);
}
m_errorMonitor->VerifyNotFound();
}
}
TEST_F(VkPositiveLayerTest, PushingDescriptorSetWithImmutableSampler) {
TEST_DESCRIPTION("Use a push descriptor with an immutable sampler.");
if (InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
} else {
printf("%s Did not find VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME; skipped.\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (DeviceExtensionSupported(gpu(), nullptr, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME)) {
m_device_extension_names.push_back(VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
} else {
printf("%s %s Extension not supported, skipping tests\n", kSkipPrefix, VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME);
return;
}
auto push_descriptor_prop = GetPushDescriptorProperties(instance(), gpu());
if (push_descriptor_prop.maxPushDescriptors < 1) {
// Some implementations report an invalid maxPushDescriptors of 0
printf("%s maxPushDescriptors is zero, skipping tests\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitState());
VkSamplerCreateInfo sampler_ci = SafeSaneSamplerCreateInfo();
vk_testing::Sampler sampler;
sampler.init(*m_device, sampler_ci);
VkSampler sampler_handle = sampler.handle();
VkImageObj image(m_device);
image.InitNoLayout(32, 32, 1, VK_FORMAT_R8G8B8A8_UNORM, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_TILING_OPTIMAL, 0);
VkImageView imageView = image.targetView(VK_FORMAT_R8G8B8A8_UNORM);
auto vkCmdPushDescriptorSetKHR =
(PFN_vkCmdPushDescriptorSetKHR)vk::GetDeviceProcAddr(m_device->device(), "vkCmdPushDescriptorSetKHR");
std::vector<VkDescriptorSetLayoutBinding> ds_bindings = {
{0, VK_DESCRIPTOR_TYPE_SAMPLER, 1, VK_SHADER_STAGE_ALL, &sampler_handle}};
OneOffDescriptorSet descriptor_set(m_device, ds_bindings);
VkDescriptorSetLayoutObj push_dsl(m_device, ds_bindings, VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR);
VkPipelineLayoutObj pipeline_layout(m_device, {&push_dsl});
VkDescriptorImageInfo img_info = {};
img_info.sampler = sampler_handle;
img_info.imageView = imageView;
img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VkWriteDescriptorSet descriptor_write = LvlInitStruct<VkWriteDescriptorSet>();
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = 1;
descriptor_write.pTexelBufferView = nullptr;
descriptor_write.pBufferInfo = nullptr;
descriptor_write.pImageInfo = &img_info;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
descriptor_write.dstSet = descriptor_set.set_;
m_errorMonitor->ExpectSuccess();
m_commandBuffer->begin();
vkCmdPushDescriptorSetKHR(m_commandBuffer->handle(), VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout.handle(), 0, 1,
&descriptor_write);
m_commandBuffer->end();
m_errorMonitor->VerifyNotFound();
}
TEST_F(VkPositiveLayerTest, BindVertexBuffers2EXTNullDescriptors) {
TEST_DESCRIPTION("Test nullDescriptor works wih CmdBindVertexBuffers variants");
if (!InstanceExtensionSupported(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
printf("%s Did not find required instance extension %s; skipped.\n", kSkipPrefix,
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
return;
}
m_instance_extension_names.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME)) {
printf("%s Extension %s not supported by device; skipped.\n", kSkipPrefix, VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
return;
}
m_device_extension_names.push_back(VK_EXT_ROBUSTNESS_2_EXTENSION_NAME);
if (!DeviceExtensionSupported(gpu(), nullptr, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME)) {
printf("%s Extension %s is not supported; skipped.\n", kSkipPrefix, VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
return;
}
m_device_extension_names.push_back(VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME);
auto robustness2_features = LvlInitStruct<VkPhysicalDeviceRobustness2FeaturesEXT>();
auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&robustness2_features);
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
(PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
ASSERT_TRUE(vkGetPhysicalDeviceFeatures2KHR != nullptr);
vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
if (!robustness2_features.nullDescriptor) {
printf("%s nullDescriptor feature not supported, skipping test\n", kSkipPrefix);
return;
}
PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT =
(PFN_vkCmdBindVertexBuffers2EXT)vk::GetInstanceProcAddr(instance(), "vkCmdBindVertexBuffers2EXT");
ASSERT_TRUE(vkCmdBindVertexBuffers2EXT != nullptr);
VkCommandPoolCreateFlags pool_flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2, pool_flags));
ASSERT_NO_FATAL_FAILURE(InitViewport());
ASSERT_NO_FATAL_FAILURE(InitRenderTarget());
m_errorMonitor->ExpectSuccess();
OneOffDescriptorSet descriptor_set(m_device, {
{0, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 1, VK_SHADER_STAGE_ALL, nullptr},
{1, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
{2, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 1, VK_SHADER_STAGE_ALL, nullptr},
});
descriptor_set.WriteDescriptorImageInfo(0, VK_NULL_HANDLE, VK_NULL_HANDLE, VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE);
descriptor_set.WriteDescriptorBufferInfo(1, VK_NULL_HANDLE, 0, VK_WHOLE_SIZE, VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
VkBufferView buffer_view = VK_NULL_HANDLE;
descriptor_set.WriteDescriptorBufferView(2, buffer_view, VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
descriptor_set.UpdateDescriptorSets();
descriptor_set.descriptor_writes.clear();
m_commandBuffer->begin();
VkBuffer buffer = VK_NULL_HANDLE;
VkDeviceSize offset = 0;
vk::CmdBindVertexBuffers(m_commandBuffer->handle(), 0, 1, &buffer, &offset);
vkCmdBindVertexBuffers2EXT(m_commandBuffer->handle(), 0, 1, &buffer, &offset, nullptr, nullptr);
m_commandBuffer->end();
m_errorMonitor->VerifyNotFound();
}
TEST_F(VkPositiveLayerTest, CopyMutableDescriptors) {
TEST_DESCRIPTION("Copy mutable descriptors.");
AddRequiredExtensions(VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME);
ASSERT_NO_FATAL_FAILURE(InitFramework(m_errorMonitor));
if (!AreRequestedExtensionsEnabled()) {
printf("%s Extension %s is not supported, skipping test.\n", kSkipPrefix, VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME);
return;
}
auto mutable_descriptor_type_features = LvlInitStruct<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE>();
auto features2 = LvlInitStruct<VkPhysicalDeviceFeatures2KHR>(&mutable_descriptor_type_features);
PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR =
(PFN_vkGetPhysicalDeviceFeatures2KHR)vk::GetInstanceProcAddr(instance(), "vkGetPhysicalDeviceFeatures2KHR");
vkGetPhysicalDeviceFeatures2KHR(gpu(), &features2);
if (mutable_descriptor_type_features.mutableDescriptorType == VK_FALSE) {
printf("%s mutableDescriptorType feature not supported. Skipped.\n", kSkipPrefix);
return;
}
ASSERT_NO_FATAL_FAILURE(InitState(nullptr, &features2));
m_errorMonitor->ExpectSuccess();
VkDescriptorType descriptor_types[] = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER};
VkMutableDescriptorTypeListVALVE mutable_descriptor_type_lists[2] = {};
mutable_descriptor_type_lists[0].descriptorTypeCount = 2;
mutable_descriptor_type_lists[0].pDescriptorTypes = descriptor_types;
mutable_descriptor_type_lists[1].descriptorTypeCount = 0;
mutable_descriptor_type_lists[1].pDescriptorTypes = nullptr;
VkMutableDescriptorTypeCreateInfoVALVE mdtci = LvlInitStruct<VkMutableDescriptorTypeCreateInfoVALVE>();
mdtci.mutableDescriptorTypeListCount = 2;
mdtci.pMutableDescriptorTypeLists = mutable_descriptor_type_lists;
VkDescriptorPoolSize pool_sizes[2] = {};
pool_sizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
pool_sizes[0].descriptorCount = 2;
pool_sizes[1].type = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE;
pool_sizes[1].descriptorCount = 2;
VkDescriptorPoolCreateInfo ds_pool_ci = LvlInitStruct<VkDescriptorPoolCreateInfo>(&mdtci);
ds_pool_ci.maxSets = 2;
ds_pool_ci.poolSizeCount = 2;
ds_pool_ci.pPoolSizes = pool_sizes;
vk_testing::DescriptorPool pool;
pool.init(*m_device, ds_pool_ci);
VkDescriptorSetLayoutBinding bindings[2] = {};
bindings[0].binding = 0;
bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE;
bindings[0].descriptorCount = 1;
bindings[0].stageFlags = VK_SHADER_STAGE_ALL;
bindings[0].pImmutableSamplers = nullptr;
bindings[1].binding = 1;
bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
bindings[1].descriptorCount = 1;
bindings[1].stageFlags = VK_SHADER_STAGE_ALL;
bindings[1].pImmutableSamplers = nullptr;
VkDescriptorSetLayoutCreateInfo create_info = LvlInitStruct<VkDescriptorSetLayoutCreateInfo>(&mdtci);
create_info.bindingCount = 2;
create_info.pBindings = bindings;
vk_testing::DescriptorSetLayout set_layout;
set_layout.init(*m_device, create_info);
VkDescriptorSetLayout set_layout_handle = set_layout.handle();
VkDescriptorSetLayout layouts[2] = {set_layout_handle, set_layout_handle};
VkDescriptorSetAllocateInfo allocate_info = LvlInitStruct<VkDescriptorSetAllocateInfo>();
allocate_info.descriptorPool = pool.handle();
allocate_info.descriptorSetCount = 2;
allocate_info.pSetLayouts = layouts;
VkDescriptorSet descriptor_sets[2];
vk::AllocateDescriptorSets(device(), &allocate_info, descriptor_sets);
VkBufferCreateInfo buffer_ci = LvlInitStruct<VkBufferCreateInfo>();
buffer_ci.size = 32;
buffer_ci.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
VkBufferObj buffer;
buffer.init(*m_device, buffer_ci);
VkDescriptorBufferInfo buffer_info = {};
buffer_info.buffer = buffer.handle();
buffer_info.offset = 0;
buffer_info.range = buffer_ci.size;
VkWriteDescriptorSet descriptor_write = LvlInitStruct<VkWriteDescriptorSet>();
descriptor_write.dstSet = descriptor_sets[0];
descriptor_write.dstBinding = 0;
descriptor_write.descriptorCount = 1;
descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptor_write.pBufferInfo = &buffer_info;
vk::UpdateDescriptorSets(m_device->device(), 1, &descriptor_write, 0, nullptr);
VkCopyDescriptorSet copy_set = LvlInitStruct<VkCopyDescriptorSet>();
copy_set.srcSet = descriptor_sets[0];
copy_set.srcBinding = 0;
copy_set.dstSet = descriptor_sets[1];
copy_set.dstBinding = 1;
copy_set.descriptorCount = 1;
vk::UpdateDescriptorSets(m_device->device(), 0, nullptr, 1, &copy_set);
m_errorMonitor->VerifyNotFound();
}