2022-04-11 10:18:31 -07:00
|
|
|
/*
|
|
|
|
* Shader runner which uses libvkd3d-shader to compile HLSL -> D3D bytecode -> SPIR-V
|
|
|
|
*
|
|
|
|
* Copyright 2020-2022 Zebediah Figura for CodeWeavers
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Lesser General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2.1 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Lesser General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Lesser General Public
|
|
|
|
* License along with this library; if not, write to the Free Software
|
|
|
|
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
|
|
|
|
*/
|
|
|
|
|
2023-01-18 04:53:51 -08:00
|
|
|
#ifndef _WIN32
|
|
|
|
|
2022-04-18 18:28:45 -07:00
|
|
|
#define VK_NO_PROTOTYPES
|
2022-05-17 06:45:40 -07:00
|
|
|
#define VKD3D_TEST_NO_DEFS
|
2022-04-11 10:18:31 -07:00
|
|
|
#include "config.h"
|
|
|
|
#include "vulkan/vulkan.h"
|
|
|
|
#include "vkd3d_shader.h"
|
|
|
|
#include "vkd3d.h"
|
|
|
|
#include "shader_runner.h"
|
|
|
|
#include "vkd3d_test.h"
|
|
|
|
|
|
|
|
struct vulkan_resource
|
|
|
|
{
|
|
|
|
struct resource r;
|
|
|
|
|
|
|
|
VkBuffer buffer;
|
|
|
|
VkImage image;
|
|
|
|
VkImageView view;
|
|
|
|
VkDeviceMemory memory;
|
|
|
|
|
|
|
|
uint32_t binding;
|
|
|
|
};
|
|
|
|
|
|
|
|
static struct vulkan_resource *vulkan_resource(struct resource *r)
|
|
|
|
{
|
|
|
|
return CONTAINING_RECORD(r, struct vulkan_resource, r);
|
|
|
|
}
|
|
|
|
|
|
|
|
#define DECLARE_VK_PFN(name) PFN_##name name;
|
|
|
|
|
2022-04-18 18:28:45 -07:00
|
|
|
DECLARE_VK_PFN(vkGetInstanceProcAddr)
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
struct vulkan_shader_runner
|
|
|
|
{
|
|
|
|
struct shader_runner r;
|
|
|
|
|
|
|
|
VkInstance instance;
|
|
|
|
VkPhysicalDevice phys_device;
|
|
|
|
VkDevice device;
|
|
|
|
VkQueue queue;
|
|
|
|
VkCommandPool command_pool;
|
|
|
|
VkCommandBuffer cmd_buffer;
|
|
|
|
VkDescriptorPool descriptor_pool;
|
|
|
|
|
|
|
|
struct vulkan_sampler
|
|
|
|
{
|
|
|
|
VkSampler vk_sampler;
|
|
|
|
uint32_t binding;
|
|
|
|
} samplers[MAX_SAMPLERS];
|
|
|
|
|
|
|
|
DECLARE_VK_PFN(vkCreateInstance);
|
|
|
|
#define VK_INSTANCE_PFN DECLARE_VK_PFN
|
|
|
|
#define VK_DEVICE_PFN DECLARE_VK_PFN
|
|
|
|
#include "vulkan_procs.h"
|
|
|
|
};
|
|
|
|
|
|
|
|
static struct vulkan_shader_runner *vulkan_shader_runner(struct shader_runner *r)
|
|
|
|
{
|
|
|
|
return CONTAINING_RECORD(r, struct vulkan_shader_runner, r);
|
|
|
|
}
|
|
|
|
|
|
|
|
#define VK_CALL(f) (runner->f)
|
|
|
|
|
|
|
|
static void begin_command_buffer(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkCommandBufferBeginInfo buffer_begin_desc = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO};
|
|
|
|
|
|
|
|
VK_CALL(vkBeginCommandBuffer(runner->cmd_buffer, &buffer_begin_desc));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void end_command_buffer(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkSubmitInfo submit_desc = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO};
|
|
|
|
|
|
|
|
VK_CALL(vkEndCommandBuffer(runner->cmd_buffer));
|
|
|
|
|
|
|
|
submit_desc.commandBufferCount = 1;
|
|
|
|
submit_desc.pCommandBuffers = &runner->cmd_buffer;
|
|
|
|
VK_CALL(vkQueueSubmit(runner->queue, 1, &submit_desc, VK_NULL_HANDLE));
|
|
|
|
VK_CALL(vkQueueWaitIdle(runner->queue));
|
|
|
|
}
|
|
|
|
|
|
|
|
static void transition_image_layout(struct vulkan_shader_runner *runner,
|
|
|
|
VkImage image, VkImageLayout src_layout, VkImageLayout dst_layout)
|
|
|
|
{
|
|
|
|
VkImageMemoryBarrier barrier = {.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER};
|
|
|
|
|
|
|
|
barrier.srcAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
|
|
|
|
barrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
|
|
|
|
barrier.oldLayout = src_layout;
|
|
|
|
barrier.newLayout = dst_layout;
|
|
|
|
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
|
|
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
|
|
barrier.image = image;
|
|
|
|
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
barrier.subresourceRange.baseMipLevel = 0;
|
|
|
|
barrier.subresourceRange.levelCount = 1;
|
|
|
|
barrier.subresourceRange.baseArrayLayer = 0;
|
|
|
|
barrier.subresourceRange.layerCount = 1;
|
|
|
|
|
|
|
|
VK_CALL(vkCmdPipelineBarrier(runner->cmd_buffer, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
|
|
|
|
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 0, NULL, 0, NULL, 1, &barrier));
|
|
|
|
}
|
|
|
|
|
|
|
|
static unsigned int select_vulkan_memory_type(const struct vulkan_shader_runner *runner,
|
|
|
|
uint32_t memory_type_mask, VkMemoryPropertyFlags required_flags)
|
|
|
|
{
|
|
|
|
VkPhysicalDeviceMemoryProperties memory_info;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
VK_CALL(vkGetPhysicalDeviceMemoryProperties(runner->phys_device, &memory_info));
|
|
|
|
|
|
|
|
for (i = 0; i < memory_info.memoryTypeCount; ++i)
|
|
|
|
{
|
|
|
|
if (!(memory_type_mask & (1u << i)))
|
|
|
|
continue;
|
|
|
|
if ((memory_info.memoryTypes[i].propertyFlags & required_flags) == required_flags)
|
|
|
|
return i;
|
|
|
|
}
|
|
|
|
|
|
|
|
fatal_error("No valid memory types found matching mask %#x, property flags %#x.\n",
|
|
|
|
memory_type_mask, required_flags);
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkDeviceMemory allocate_memory(const struct vulkan_shader_runner *runner,
|
|
|
|
const VkMemoryRequirements *memory_reqs, VkMemoryPropertyFlags flags)
|
|
|
|
{
|
|
|
|
VkMemoryAllocateInfo alloc_info = {.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
|
|
|
|
VkDeviceMemory vk_memory;
|
|
|
|
VkResult vr;
|
|
|
|
|
|
|
|
alloc_info.allocationSize = memory_reqs->size;
|
|
|
|
alloc_info.memoryTypeIndex = select_vulkan_memory_type(runner,
|
|
|
|
memory_reqs->memoryTypeBits, flags);
|
|
|
|
vr = VK_CALL(vkAllocateMemory(runner->device, &alloc_info, NULL, &vk_memory));
|
|
|
|
ok(vr == VK_SUCCESS, "Got unexpected VkResult %d.\n", vr);
|
|
|
|
|
|
|
|
return vk_memory;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkBuffer create_buffer(const struct vulkan_shader_runner *runner, VkDeviceSize size,
|
|
|
|
VkBufferUsageFlags usage, VkMemoryPropertyFlags memory_flags, VkDeviceMemory *memory)
|
|
|
|
{
|
|
|
|
VkBufferCreateInfo buffer_info = {.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO};
|
|
|
|
VkMemoryRequirements memory_reqs;
|
|
|
|
VkBuffer buffer;
|
|
|
|
|
|
|
|
buffer_info.size = size;
|
|
|
|
buffer_info.usage = usage;
|
|
|
|
buffer_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateBuffer(runner->device, &buffer_info, NULL, &buffer));
|
|
|
|
VK_CALL(vkGetBufferMemoryRequirements(runner->device, buffer, &memory_reqs));
|
|
|
|
*memory = allocate_memory(runner, &memory_reqs, memory_flags);
|
|
|
|
VK_CALL(vkBindBufferMemory(runner->device, buffer, *memory, 0));
|
|
|
|
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkImage create_2d_image(const struct vulkan_shader_runner *runner, uint32_t width, uint32_t height,
|
|
|
|
VkImageUsageFlags usage, VkFormat format, VkDeviceMemory *memory)
|
|
|
|
{
|
|
|
|
VkImageCreateInfo image_info = {.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO};
|
|
|
|
VkMemoryRequirements memory_reqs;
|
|
|
|
VkImage image;
|
|
|
|
|
|
|
|
image_info.imageType = VK_IMAGE_TYPE_2D;
|
|
|
|
image_info.format = format;
|
|
|
|
image_info.extent.width = width;
|
|
|
|
image_info.extent.height = height;
|
|
|
|
image_info.extent.depth = 1;
|
|
|
|
image_info.mipLevels = 1;
|
|
|
|
image_info.arrayLayers = 1;
|
|
|
|
image_info.samples = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
image_info.tiling = VK_IMAGE_TILING_OPTIMAL;
|
|
|
|
image_info.usage = usage;
|
|
|
|
image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
|
|
|
|
image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateImage(runner->device, &image_info, NULL, &image));
|
|
|
|
|
|
|
|
VK_CALL(vkGetImageMemoryRequirements(runner->device, image, &memory_reqs));
|
|
|
|
*memory = allocate_memory(runner, &memory_reqs, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
|
|
|
|
VK_CALL(vkBindImageMemory(runner->device, image, *memory, 0));
|
|
|
|
|
|
|
|
return image;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkImageView create_2d_image_view(const struct vulkan_shader_runner *runner, VkImage image, VkFormat format)
|
|
|
|
{
|
|
|
|
VkImageViewCreateInfo view_info = {.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
|
|
|
|
VkImageView view;
|
|
|
|
|
|
|
|
view_info.image = image;
|
|
|
|
view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
|
|
|
|
view_info.format = format;
|
|
|
|
view_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
|
|
|
|
view_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
|
|
|
|
view_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
|
|
|
|
view_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
|
|
|
|
view_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
view_info.subresourceRange.baseMipLevel = 0;
|
|
|
|
view_info.subresourceRange.levelCount = 1;
|
|
|
|
view_info.subresourceRange.baseArrayLayer = 0;
|
|
|
|
view_info.subresourceRange.layerCount = 1;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateImageView(runner->device, &view_info, NULL, &view));
|
|
|
|
return view;
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct resource *vulkan_runner_create_resource(struct shader_runner *r, const struct resource_params *params)
|
|
|
|
{
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
|
|
|
|
|
|
|
struct vulkan_resource *resource;
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
VkBufferImageCopy region = {0};
|
|
|
|
VkDeviceMemory staging_memory;
|
|
|
|
VkBuffer staging_buffer;
|
|
|
|
VkFormat format;
|
|
|
|
void *data;
|
|
|
|
|
|
|
|
resource = calloc(1, sizeof(*resource));
|
2022-06-08 16:18:35 -07:00
|
|
|
init_resource(&resource->r, params);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
switch (params->type)
|
|
|
|
{
|
2022-06-08 16:18:37 -07:00
|
|
|
case RESOURCE_TYPE_RENDER_TARGET:
|
|
|
|
format = vkd3d_get_vk_format(params->format);
|
|
|
|
|
|
|
|
resource->image = create_2d_image(runner, params->width, params->height,
|
|
|
|
VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, format, &resource->memory);
|
|
|
|
resource->view = create_2d_image_view(runner, resource->image, format);
|
|
|
|
|
|
|
|
begin_command_buffer(runner);
|
|
|
|
transition_image_layout(runner, resource->image,
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
|
|
|
|
end_command_buffer(runner);
|
|
|
|
break;
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
case RESOURCE_TYPE_TEXTURE:
|
2022-06-08 16:18:35 -07:00
|
|
|
case RESOURCE_TYPE_UAV:
|
|
|
|
{
|
|
|
|
VkImageUsageFlagBits usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
|
|
|
|
VkImageLayout layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
format = vkd3d_get_vk_format(params->format);
|
|
|
|
|
2022-06-08 16:18:35 -07:00
|
|
|
if (params->type == RESOURCE_TYPE_UAV)
|
|
|
|
{
|
|
|
|
layout = VK_IMAGE_LAYOUT_GENERAL;
|
|
|
|
usage |= VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
|
|
|
|
}
|
|
|
|
|
|
|
|
resource->image = create_2d_image(runner, params->width, params->height, usage, format, &resource->memory);
|
2022-04-11 10:18:31 -07:00
|
|
|
resource->view = create_2d_image_view(runner, resource->image, format);
|
|
|
|
|
|
|
|
staging_buffer = create_buffer(runner, params->data_size,
|
|
|
|
VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &staging_memory);
|
|
|
|
VK_CALL(vkMapMemory(device, staging_memory, 0, VK_WHOLE_SIZE, 0, &data));
|
|
|
|
memcpy(data, params->data, params->data_size);
|
|
|
|
VK_CALL(vkUnmapMemory(device, staging_memory));
|
|
|
|
|
|
|
|
begin_command_buffer(runner);
|
|
|
|
|
|
|
|
transition_image_layout(runner, resource->image,
|
|
|
|
VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
|
|
|
|
|
|
|
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
region.imageSubresource.layerCount = 1;
|
|
|
|
region.imageExtent.width = params->width;
|
|
|
|
region.imageExtent.height = params->height;
|
|
|
|
region.imageExtent.depth = 1;
|
|
|
|
VK_CALL(vkCmdCopyBufferToImage(runner->cmd_buffer, staging_buffer, resource->image,
|
|
|
|
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, ®ion));
|
|
|
|
|
2022-06-08 16:18:35 -07:00
|
|
|
transition_image_layout(runner, resource->image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layout);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
end_command_buffer(runner);
|
|
|
|
|
|
|
|
VK_CALL(vkFreeMemory(device, staging_memory, NULL));
|
|
|
|
VK_CALL(vkDestroyBuffer(device, staging_buffer, NULL));
|
|
|
|
break;
|
2022-06-08 16:18:35 -07:00
|
|
|
}
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
case RESOURCE_TYPE_VERTEX_BUFFER:
|
|
|
|
resource->buffer = create_buffer(runner, params->data_size, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
|
|
|
|
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &resource->memory);
|
|
|
|
|
|
|
|
VK_CALL(vkMapMemory(device, resource->memory, 0, VK_WHOLE_SIZE, 0, &data));
|
|
|
|
memcpy(data, params->data, params->data_size);
|
|
|
|
VK_CALL(vkUnmapMemory(device, resource->memory));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return &resource->r;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vulkan_runner_destroy_resource(struct shader_runner *r, struct resource *res)
|
|
|
|
{
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
|
|
|
struct vulkan_resource *resource = vulkan_resource(res);
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
|
|
|
|
if (resource->memory)
|
|
|
|
VK_CALL(vkFreeMemory(device, resource->memory, NULL));
|
|
|
|
if (resource->image)
|
|
|
|
VK_CALL(vkDestroyImage(device, resource->image, NULL));
|
|
|
|
if (resource->view)
|
|
|
|
VK_CALL(vkDestroyImageView(device, resource->view, NULL));
|
|
|
|
if (resource->buffer)
|
|
|
|
VK_CALL(vkDestroyBuffer(device, resource->buffer, NULL));
|
|
|
|
|
|
|
|
free(resource);
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool compile_shader(const struct vulkan_shader_runner *runner, const char *source, const char *type,
|
|
|
|
struct vkd3d_shader_code *dxbc, struct vkd3d_shader_code *spirv)
|
|
|
|
{
|
|
|
|
struct vkd3d_shader_spirv_target_info spirv_info = {.type = VKD3D_SHADER_STRUCTURE_TYPE_SPIRV_TARGET_INFO};
|
|
|
|
struct vkd3d_shader_interface_info interface_info = {.type = VKD3D_SHADER_STRUCTURE_TYPE_INTERFACE_INFO};
|
|
|
|
struct vkd3d_shader_hlsl_source_info hlsl_info = {.type = VKD3D_SHADER_STRUCTURE_TYPE_HLSL_SOURCE_INFO};
|
|
|
|
struct vkd3d_shader_compile_info info = {.type = VKD3D_SHADER_STRUCTURE_TYPE_COMPILE_INFO};
|
|
|
|
struct vkd3d_shader_resource_binding bindings[MAX_RESOURCES + MAX_SAMPLERS];
|
|
|
|
struct vkd3d_shader_push_constant_buffer push_constants;
|
|
|
|
struct vkd3d_shader_resource_binding *binding;
|
|
|
|
char profile[7];
|
|
|
|
unsigned int i;
|
|
|
|
char *messages;
|
|
|
|
int ret;
|
|
|
|
|
|
|
|
static const char *const shader_models[] =
|
|
|
|
{
|
|
|
|
[SHADER_MODEL_2_0] = "4_0",
|
|
|
|
[SHADER_MODEL_4_0] = "4_0",
|
|
|
|
[SHADER_MODEL_4_1] = "4_1",
|
|
|
|
[SHADER_MODEL_5_0] = "5_0",
|
|
|
|
[SHADER_MODEL_5_1] = "5_1",
|
|
|
|
};
|
|
|
|
|
|
|
|
info.next = &hlsl_info;
|
|
|
|
info.source.code = source;
|
|
|
|
info.source.size = strlen(source);
|
|
|
|
info.source_type = VKD3D_SHADER_SOURCE_HLSL;
|
|
|
|
info.target_type = VKD3D_SHADER_TARGET_DXBC_TPF;
|
|
|
|
info.log_level = VKD3D_SHADER_LOG_WARNING;
|
|
|
|
|
|
|
|
hlsl_info.entry_point = "main";
|
|
|
|
sprintf(profile, "%s_%s", type, shader_models[runner->r.minimum_shader_model]);
|
|
|
|
hlsl_info.profile = profile;
|
|
|
|
|
|
|
|
ret = vkd3d_shader_compile(&info, dxbc, &messages);
|
|
|
|
if (messages && vkd3d_test_state.debug_level)
|
|
|
|
trace("%s\n", messages);
|
|
|
|
vkd3d_shader_free_messages(messages);
|
|
|
|
if (ret)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
info.next = &spirv_info;
|
|
|
|
info.source = *dxbc;
|
|
|
|
info.source_type = VKD3D_SHADER_SOURCE_DXBC_TPF;
|
|
|
|
info.target_type = VKD3D_SHADER_TARGET_SPIRV_BINARY;
|
|
|
|
|
|
|
|
spirv_info.next = &interface_info;
|
|
|
|
spirv_info.environment = VKD3D_SHADER_SPIRV_ENVIRONMENT_VULKAN_1_0;
|
|
|
|
|
|
|
|
push_constants.register_space = 0;
|
|
|
|
push_constants.register_index = 0;
|
|
|
|
push_constants.shader_visibility = VKD3D_SHADER_VISIBILITY_ALL;
|
|
|
|
push_constants.offset = 0;
|
|
|
|
push_constants.size = runner->r.uniform_count * sizeof(*runner->r.uniforms);
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.resource_count; ++i)
|
|
|
|
{
|
|
|
|
const struct vulkan_resource *resource = vulkan_resource(runner->r.resources[i]);
|
|
|
|
|
|
|
|
switch (resource->r.type)
|
|
|
|
{
|
2022-06-08 16:18:37 -07:00
|
|
|
case RESOURCE_TYPE_RENDER_TARGET:
|
2022-04-11 10:18:31 -07:00
|
|
|
case RESOURCE_TYPE_VERTEX_BUFFER:
|
|
|
|
break;
|
|
|
|
|
|
|
|
case RESOURCE_TYPE_TEXTURE:
|
2022-06-08 16:18:35 -07:00
|
|
|
case RESOURCE_TYPE_UAV:
|
2022-04-11 10:18:31 -07:00
|
|
|
binding = &bindings[interface_info.binding_count++];
|
2022-06-08 16:18:35 -07:00
|
|
|
if (resource->r.type == RESOURCE_TYPE_UAV)
|
|
|
|
binding->type = VKD3D_SHADER_DESCRIPTOR_TYPE_UAV;
|
|
|
|
else
|
|
|
|
binding->type = VKD3D_SHADER_DESCRIPTOR_TYPE_SRV;
|
2022-04-11 10:18:31 -07:00
|
|
|
binding->register_space = 0;
|
|
|
|
binding->register_index = resource->r.slot;
|
|
|
|
binding->shader_visibility = VKD3D_SHADER_VISIBILITY_ALL;
|
|
|
|
binding->flags = VKD3D_SHADER_BINDING_FLAG_IMAGE;
|
|
|
|
binding->binding.set = 0;
|
|
|
|
binding->binding.binding = resource->binding;
|
|
|
|
binding->binding.count = 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.sampler_count; ++i)
|
|
|
|
{
|
|
|
|
binding = &bindings[interface_info.binding_count++];
|
|
|
|
binding->type = VKD3D_SHADER_DESCRIPTOR_TYPE_SAMPLER;
|
|
|
|
binding->register_space = 0;
|
|
|
|
binding->register_index = runner->r.samplers[i].slot;
|
|
|
|
binding->shader_visibility = VKD3D_SHADER_VISIBILITY_ALL;
|
|
|
|
binding->flags = VKD3D_SHADER_BINDING_FLAG_IMAGE;
|
|
|
|
binding->binding.set = 0;
|
|
|
|
binding->binding.binding = runner->samplers[i].binding;
|
|
|
|
binding->binding.count = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
interface_info.bindings = bindings;
|
|
|
|
|
|
|
|
interface_info.push_constant_buffer_count = 1;
|
|
|
|
interface_info.push_constant_buffers = &push_constants;
|
|
|
|
|
|
|
|
ret = vkd3d_shader_compile(&info, spirv, &messages);
|
|
|
|
if (messages && vkd3d_test_state.debug_level)
|
|
|
|
trace("%s\n", messages);
|
|
|
|
vkd3d_shader_free_messages(messages);
|
|
|
|
if (ret)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool create_shader_stage(const struct vulkan_shader_runner *runner,
|
|
|
|
VkPipelineShaderStageCreateInfo *stage_info, const char *type, enum VkShaderStageFlagBits stage,
|
|
|
|
const char *source, struct vkd3d_shader_code *dxbc_ptr)
|
|
|
|
{
|
|
|
|
VkShaderModuleCreateInfo module_info = {.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO};
|
|
|
|
struct vkd3d_shader_code spirv, dxbc;
|
|
|
|
|
|
|
|
if (!dxbc_ptr)
|
|
|
|
dxbc_ptr = &dxbc;
|
|
|
|
|
|
|
|
if (!compile_shader(runner, source, type, dxbc_ptr, &spirv))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
if (dxbc_ptr == &dxbc)
|
|
|
|
vkd3d_shader_free_shader_code(&dxbc);
|
|
|
|
|
|
|
|
memset(stage_info, 0, sizeof(*stage_info));
|
|
|
|
stage_info->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
|
|
|
|
stage_info->stage = stage;
|
|
|
|
stage_info->pName = "main";
|
|
|
|
|
|
|
|
module_info.codeSize = spirv.size;
|
|
|
|
module_info.pCode = spirv.code;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateShaderModule(runner->device, &module_info, NULL, &stage_info->module));
|
|
|
|
vkd3d_shader_free_shader_code(&spirv);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkPrimitiveTopology vulkan_primitive_topology_from_d3d(D3D_PRIMITIVE_TOPOLOGY topology)
|
|
|
|
{
|
|
|
|
switch (topology)
|
|
|
|
{
|
|
|
|
default:
|
|
|
|
fatal_error("Unhandled primitive topology %#x.\n", topology);
|
|
|
|
/* fall through */
|
|
|
|
case D3D_PRIMITIVE_TOPOLOGY_TRIANGLELIST:
|
|
|
|
return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
|
|
|
|
case D3D_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP:
|
|
|
|
return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkPipelineLayout create_pipeline_layout(const struct vulkan_shader_runner *runner,
|
|
|
|
VkDescriptorSetLayout set_layout)
|
|
|
|
{
|
|
|
|
VkPipelineLayoutCreateInfo layout_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO};
|
|
|
|
VkPushConstantRange push_constant_range;
|
|
|
|
VkPipelineLayout pipeline_layout;
|
|
|
|
|
|
|
|
layout_desc.setLayoutCount = 1;
|
|
|
|
layout_desc.pSetLayouts = &set_layout;
|
|
|
|
|
|
|
|
if (runner->r.uniform_count)
|
|
|
|
{
|
|
|
|
layout_desc.pushConstantRangeCount = 1;
|
|
|
|
layout_desc.pPushConstantRanges = &push_constant_range;
|
|
|
|
|
|
|
|
push_constant_range.stageFlags = VK_SHADER_STAGE_ALL;
|
|
|
|
push_constant_range.offset = 0;
|
|
|
|
push_constant_range.size = runner->r.uniform_count * sizeof(*runner->r.uniforms);
|
|
|
|
}
|
|
|
|
|
|
|
|
VK_CALL(vkCreatePipelineLayout(runner->device, &layout_desc, NULL, &pipeline_layout));
|
|
|
|
|
|
|
|
return pipeline_layout;
|
|
|
|
}
|
|
|
|
|
2021-08-13 14:37:31 -07:00
|
|
|
static VkPipeline create_graphics_pipeline(const struct vulkan_shader_runner *runner, VkRenderPass render_pass,
|
2022-04-11 10:18:31 -07:00
|
|
|
VkPipelineLayout pipeline_layout, D3D_PRIMITIVE_TOPOLOGY primitive_topology)
|
|
|
|
{
|
|
|
|
VkPipelineInputAssemblyStateCreateInfo ia_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO};
|
|
|
|
VkPipelineRasterizationStateCreateInfo rs_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO};
|
|
|
|
VkPipelineVertexInputStateCreateInfo input_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO};
|
|
|
|
VkPipelineColorBlendStateCreateInfo blend_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO};
|
|
|
|
VkPipelineMultisampleStateCreateInfo ms_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO};
|
|
|
|
static const VkViewport viewport = {.width = RENDER_TARGET_WIDTH, .height = RENDER_TARGET_HEIGHT, .maxDepth = 1};
|
|
|
|
VkPipelineViewportStateCreateInfo vp_desc = {.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO};
|
|
|
|
static const VkRect2D rt_rect = {.extent.width = RENDER_TARGET_WIDTH, .extent.height = RENDER_TARGET_HEIGHT};
|
|
|
|
VkGraphicsPipelineCreateInfo pipeline_desc = {.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO};
|
2022-06-08 16:18:37 -07:00
|
|
|
VkPipelineColorBlendAttachmentState attachment_desc[MAX_RESOURCES] = {0};
|
2022-04-11 10:18:31 -07:00
|
|
|
VkVertexInputAttributeDescription input_attributes[32];
|
|
|
|
VkVertexInputBindingDescription input_bindings[32];
|
|
|
|
struct vkd3d_shader_signature vs_input_signature;
|
|
|
|
VkPipelineShaderStageCreateInfo stage_desc[2];
|
|
|
|
struct vkd3d_shader_code vs_dxbc;
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
VkPipeline pipeline;
|
|
|
|
unsigned int i, j;
|
|
|
|
int ret;
|
|
|
|
|
2022-04-14 03:52:33 -07:00
|
|
|
memset(stage_desc, 0, sizeof(stage_desc));
|
|
|
|
ret = create_shader_stage(runner, &stage_desc[0], "vs", VK_SHADER_STAGE_VERTEX_BIT, runner->r.vs_source, &vs_dxbc)
|
|
|
|
&& create_shader_stage(runner, &stage_desc[1], "ps", VK_SHADER_STAGE_FRAGMENT_BIT, runner->r.ps_source, NULL);
|
|
|
|
todo_if (runner->r.is_todo) ok(ret, "Failed to compile shaders.\n");
|
|
|
|
if (!ret)
|
2022-04-11 10:18:31 -07:00
|
|
|
{
|
|
|
|
VK_CALL(vkDestroyShaderModule(device, stage_desc[0].module, NULL));
|
2022-04-14 03:52:33 -07:00
|
|
|
VK_CALL(vkDestroyShaderModule(device, stage_desc[1].module, NULL));
|
2022-04-11 10:18:31 -07:00
|
|
|
return VK_NULL_HANDLE;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = vkd3d_shader_parse_input_signature(&vs_dxbc, &vs_input_signature, NULL);
|
|
|
|
ok(!ret, "Failed to parse input signature, error %d.\n", ret);
|
|
|
|
|
|
|
|
if (runner->r.input_element_count > ARRAY_SIZE(input_attributes))
|
|
|
|
fatal_error("Input element count %zu is too high.\n", runner->r.input_element_count);
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.input_element_count; ++i)
|
|
|
|
{
|
|
|
|
VkVertexInputAttributeDescription *attribute = &input_attributes[i];
|
|
|
|
const struct input_element *element = &runner->r.input_elements[i];
|
|
|
|
const struct vkd3d_shader_signature_element *signature_element;
|
|
|
|
|
|
|
|
signature_element = vkd3d_shader_find_signature_element(&vs_input_signature, element->name, element->index, 0);
|
|
|
|
|
|
|
|
attribute->location = signature_element->register_index;
|
|
|
|
attribute->binding = element->slot;
|
|
|
|
attribute->format = vkd3d_get_vk_format(element->format);
|
|
|
|
/* The offset will be filled below. */
|
|
|
|
}
|
|
|
|
|
|
|
|
input_desc.vertexAttributeDescriptionCount = runner->r.input_element_count;
|
|
|
|
input_desc.pVertexAttributeDescriptions = input_attributes;
|
|
|
|
input_desc.pVertexBindingDescriptions = input_bindings;
|
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
blend_desc.attachmentCount = 0;
|
|
|
|
blend_desc.pAttachments = attachment_desc;
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
for (i = 0; i < runner->r.resource_count; ++i)
|
|
|
|
{
|
|
|
|
const struct vulkan_resource *resource = vulkan_resource(runner->r.resources[i]);
|
|
|
|
|
|
|
|
switch (resource->r.type)
|
|
|
|
{
|
|
|
|
case RESOURCE_TYPE_TEXTURE:
|
2022-06-08 16:18:35 -07:00
|
|
|
case RESOURCE_TYPE_UAV:
|
2022-04-11 10:18:31 -07:00
|
|
|
break;
|
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
case RESOURCE_TYPE_RENDER_TARGET:
|
|
|
|
attachment_desc[blend_desc.attachmentCount++].colorWriteMask =
|
|
|
|
VK_COLOR_COMPONENT_R_BIT | VK_COLOR_COMPONENT_G_BIT
|
|
|
|
| VK_COLOR_COMPONENT_B_BIT | VK_COLOR_COMPONENT_A_BIT;
|
|
|
|
break;
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
case RESOURCE_TYPE_VERTEX_BUFFER:
|
|
|
|
{
|
|
|
|
VkVertexInputBindingDescription *binding = &input_bindings[input_desc.vertexBindingDescriptionCount++];
|
|
|
|
|
|
|
|
binding->binding = resource->r.slot;
|
|
|
|
binding->stride = 0;
|
|
|
|
binding->inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
|
|
|
|
|
|
|
|
for (j = 0; j < runner->r.input_element_count; ++j)
|
|
|
|
{
|
|
|
|
if (runner->r.input_elements[j].slot == resource->r.slot)
|
|
|
|
{
|
|
|
|
input_attributes[j].offset = binding->stride;
|
|
|
|
binding->stride += runner->r.input_elements[j].texel_size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ia_desc.topology = vulkan_primitive_topology_from_d3d(primitive_topology);
|
|
|
|
|
|
|
|
vp_desc.viewportCount = 1;
|
|
|
|
vp_desc.pViewports = &viewport;
|
|
|
|
vp_desc.scissorCount = 1;
|
|
|
|
vp_desc.pScissors = &rt_rect;
|
|
|
|
|
2023-04-10 11:45:15 -07:00
|
|
|
rs_desc.cullMode = VK_CULL_MODE_NONE;
|
2022-04-11 10:18:31 -07:00
|
|
|
rs_desc.lineWidth = 1.0f;
|
|
|
|
|
|
|
|
ms_desc.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
|
|
|
|
pipeline_desc.stageCount = ARRAY_SIZE(stage_desc);
|
|
|
|
pipeline_desc.pStages = stage_desc;
|
|
|
|
pipeline_desc.pVertexInputState = &input_desc;
|
|
|
|
pipeline_desc.pInputAssemblyState = &ia_desc;
|
|
|
|
pipeline_desc.pViewportState = &vp_desc;
|
|
|
|
pipeline_desc.pRasterizationState = &rs_desc;
|
|
|
|
pipeline_desc.pMultisampleState = &ms_desc;
|
|
|
|
pipeline_desc.pColorBlendState = &blend_desc;
|
|
|
|
pipeline_desc.layout = pipeline_layout;
|
2022-06-08 16:18:36 -07:00
|
|
|
pipeline_desc.renderPass = render_pass;
|
2022-04-11 10:18:31 -07:00
|
|
|
pipeline_desc.subpass = 0;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateGraphicsPipelines(runner->device, VK_NULL_HANDLE, 1, &pipeline_desc, NULL, &pipeline));
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyShaderModule(device, stage_desc[0].module, NULL));
|
|
|
|
VK_CALL(vkDestroyShaderModule(device, stage_desc[1].module, NULL));
|
|
|
|
vkd3d_shader_free_shader_signature(&vs_input_signature);
|
|
|
|
vkd3d_shader_free_shader_code(&vs_dxbc);
|
|
|
|
|
|
|
|
return pipeline;
|
|
|
|
}
|
|
|
|
|
2021-08-13 14:37:31 -07:00
|
|
|
static VkPipeline create_compute_pipeline(const struct vulkan_shader_runner *runner, VkPipelineLayout pipeline_layout)
|
|
|
|
{
|
|
|
|
VkComputePipelineCreateInfo pipeline_desc = {.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO};
|
|
|
|
VkPipeline pipeline;
|
|
|
|
bool ret;
|
|
|
|
|
|
|
|
ret = create_shader_stage(runner, &pipeline_desc.stage, "cs",
|
|
|
|
VK_SHADER_STAGE_COMPUTE_BIT, runner->r.cs_source, NULL);
|
|
|
|
todo_if (runner->r.is_todo) ok(ret, "Failed to compile shader.\n");
|
|
|
|
if (!ret)
|
|
|
|
return VK_NULL_HANDLE;
|
|
|
|
|
|
|
|
pipeline_desc.layout = pipeline_layout;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateComputePipelines(runner->device, VK_NULL_HANDLE, 1, &pipeline_desc, NULL, &pipeline));
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyShaderModule(runner->device, pipeline_desc.stage.module, NULL));
|
|
|
|
|
|
|
|
return pipeline;
|
|
|
|
}
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
static VkSamplerAddressMode vk_address_mode_from_d3d12(D3D12_TEXTURE_ADDRESS_MODE mode)
|
|
|
|
{
|
|
|
|
switch (mode)
|
|
|
|
{
|
|
|
|
case D3D12_TEXTURE_ADDRESS_MODE_WRAP:
|
|
|
|
return VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
|
|
|
case D3D12_TEXTURE_ADDRESS_MODE_MIRROR:
|
|
|
|
return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
|
|
|
|
case D3D12_TEXTURE_ADDRESS_MODE_CLAMP:
|
|
|
|
return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
|
|
|
|
case D3D12_TEXTURE_ADDRESS_MODE_BORDER:
|
|
|
|
return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
|
|
|
|
default:
|
|
|
|
fatal_error("Unhandled sampler address mode %#x.\n", mode);
|
|
|
|
return VK_SAMPLER_ADDRESS_MODE_REPEAT;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static VkDescriptorSetLayout create_descriptor_set_layout(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkDescriptorSetLayoutCreateInfo set_desc = {.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO};
|
|
|
|
VkDescriptorSetLayoutBinding bindings[MAX_RESOURCES + MAX_SAMPLERS];
|
|
|
|
VkDescriptorSetLayoutBinding *binding;
|
|
|
|
VkDescriptorSetLayout set_layout;
|
|
|
|
uint32_t binding_index = 0;
|
|
|
|
size_t i;
|
|
|
|
|
|
|
|
if (runner->r.resource_count > ARRAY_SIZE(bindings))
|
|
|
|
fatal_error("Resource count %zu is too high.\n", runner->r.resource_count);
|
|
|
|
|
|
|
|
set_desc.pBindings = bindings;
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.resource_count; ++i)
|
|
|
|
{
|
|
|
|
struct vulkan_resource *resource = vulkan_resource(runner->r.resources[i]);
|
|
|
|
|
|
|
|
switch (resource->r.type)
|
|
|
|
{
|
2022-06-08 16:18:37 -07:00
|
|
|
case RESOURCE_TYPE_RENDER_TARGET:
|
2022-04-11 10:18:31 -07:00
|
|
|
case RESOURCE_TYPE_VERTEX_BUFFER:
|
|
|
|
break;
|
|
|
|
|
|
|
|
case RESOURCE_TYPE_TEXTURE:
|
2022-06-08 16:18:35 -07:00
|
|
|
case RESOURCE_TYPE_UAV:
|
2022-04-11 10:18:31 -07:00
|
|
|
binding = &bindings[set_desc.bindingCount++];
|
|
|
|
|
|
|
|
resource->binding = binding_index++;
|
|
|
|
|
|
|
|
binding->binding = resource->binding;
|
2022-06-08 16:18:35 -07:00
|
|
|
if (resource->r.type == RESOURCE_TYPE_UAV)
|
|
|
|
binding->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
else
|
|
|
|
binding->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
2022-04-11 10:18:31 -07:00
|
|
|
binding->descriptorCount = 1;
|
|
|
|
binding->stageFlags = VK_SHADER_STAGE_ALL;
|
|
|
|
binding->pImmutableSamplers = NULL;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.sampler_count; ++i)
|
|
|
|
{
|
|
|
|
VkSamplerCreateInfo sampler_desc = {.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO};
|
|
|
|
struct vulkan_sampler *vulkan_sampler = &runner->samplers[i];
|
|
|
|
const struct sampler *sampler = &runner->r.samplers[i];
|
|
|
|
|
|
|
|
sampler_desc.magFilter = (sampler->filter & 0x4) ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
|
|
|
|
sampler_desc.minFilter = (sampler->filter & 0x1) ? VK_FILTER_LINEAR : VK_FILTER_NEAREST;
|
|
|
|
sampler_desc.mipmapMode = (sampler->filter & 0x10) ? VK_SAMPLER_MIPMAP_MODE_LINEAR : VK_SAMPLER_MIPMAP_MODE_NEAREST;
|
|
|
|
sampler_desc.addressModeU = vk_address_mode_from_d3d12(sampler->u_address);
|
|
|
|
sampler_desc.addressModeV = vk_address_mode_from_d3d12(sampler->v_address);
|
|
|
|
sampler_desc.addressModeW = vk_address_mode_from_d3d12(sampler->w_address);
|
|
|
|
|
|
|
|
VK_CALL(vkCreateSampler(runner->device, &sampler_desc, NULL, &vulkan_sampler->vk_sampler));
|
|
|
|
vulkan_sampler->binding = binding_index++;
|
|
|
|
|
|
|
|
binding = &bindings[set_desc.bindingCount++];
|
|
|
|
|
|
|
|
binding->binding = vulkan_sampler->binding;
|
|
|
|
binding->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
|
|
binding->descriptorCount = 1;
|
|
|
|
binding->stageFlags = VK_SHADER_STAGE_ALL;
|
|
|
|
binding->pImmutableSamplers = &vulkan_sampler->vk_sampler;
|
|
|
|
}
|
|
|
|
|
|
|
|
VK_CALL(vkCreateDescriptorSetLayout(runner->device, &set_desc, NULL, &set_layout));
|
|
|
|
|
|
|
|
return set_layout;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void bind_resources(struct vulkan_shader_runner *runner, VkPipelineBindPoint bind_point,
|
|
|
|
VkDescriptorSetLayout set_layout, VkPipelineLayout pipeline_layout)
|
|
|
|
{
|
|
|
|
VkDescriptorSetAllocateInfo set_desc = {.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO};
|
|
|
|
VkCommandBuffer cmd_buffer = runner->cmd_buffer;
|
|
|
|
VkDescriptorSet descriptor_set;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
set_desc.descriptorPool = runner->descriptor_pool;
|
|
|
|
set_desc.descriptorSetCount = 1;
|
|
|
|
set_desc.pSetLayouts = &set_layout;
|
|
|
|
|
|
|
|
VK_CALL(vkAllocateDescriptorSets(runner->device, &set_desc, &descriptor_set));
|
|
|
|
|
|
|
|
for (i = 0; i < runner->r.resource_count; ++i)
|
|
|
|
{
|
|
|
|
const struct vulkan_resource *resource = vulkan_resource(runner->r.resources[i]);
|
|
|
|
VkWriteDescriptorSet write = {.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET};
|
|
|
|
static const VkDeviceSize zero_offset;
|
|
|
|
VkDescriptorImageInfo image_info;
|
|
|
|
|
|
|
|
switch (resource->r.type)
|
|
|
|
{
|
|
|
|
case RESOURCE_TYPE_TEXTURE:
|
2022-06-08 16:18:35 -07:00
|
|
|
case RESOURCE_TYPE_UAV:
|
2022-04-11 10:18:31 -07:00
|
|
|
image_info.imageView = resource->view;
|
|
|
|
image_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
|
|
|
|
|
|
write.dstSet = descriptor_set;
|
|
|
|
write.dstBinding = resource->binding;
|
|
|
|
write.dstArrayElement = 0;
|
|
|
|
write.descriptorCount = 1;
|
|
|
|
write.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
|
|
|
write.pImageInfo = &image_info;
|
|
|
|
|
2022-06-08 16:18:35 -07:00
|
|
|
if (resource->r.type == RESOURCE_TYPE_UAV)
|
|
|
|
{
|
|
|
|
image_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
|
|
|
|
write.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
}
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
VK_CALL(vkUpdateDescriptorSets(runner->device, 1, &write, 0, NULL));
|
|
|
|
break;
|
|
|
|
|
|
|
|
case RESOURCE_TYPE_VERTEX_BUFFER:
|
|
|
|
if (bind_point == VK_PIPELINE_BIND_POINT_GRAPHICS)
|
|
|
|
VK_CALL(vkCmdBindVertexBuffers(cmd_buffer, resource->r.slot, 1, &resource->buffer, &zero_offset));
|
|
|
|
break;
|
2022-06-08 16:18:37 -07:00
|
|
|
|
|
|
|
case RESOURCE_TYPE_RENDER_TARGET:
|
|
|
|
break;
|
2022-04-11 10:18:31 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
VK_CALL(vkCmdBindDescriptorSets(cmd_buffer, bind_point, pipeline_layout, 0, 1, &descriptor_set, 0, NULL));
|
|
|
|
|
|
|
|
if (runner->r.uniform_count)
|
|
|
|
VK_CALL(vkCmdPushConstants(cmd_buffer, pipeline_layout, VK_SHADER_STAGE_ALL, 0,
|
|
|
|
runner->r.uniform_count * sizeof(*runner->r.uniforms), runner->r.uniforms));
|
|
|
|
|
|
|
|
/* The descriptor set will be freed by resetting the descriptor pool. */
|
|
|
|
}
|
|
|
|
|
2022-06-08 16:18:36 -07:00
|
|
|
static void create_render_pass_and_framebuffer(struct vulkan_shader_runner *runner,
|
|
|
|
VkRenderPass *render_pass, VkFramebuffer *fb)
|
|
|
|
{
|
|
|
|
VkRenderPassCreateInfo render_pass_desc = {.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
|
|
|
|
VkFramebufferCreateInfo fb_desc = {.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO};
|
2022-06-08 16:18:37 -07:00
|
|
|
VkAttachmentDescription attachment_descs[MAX_RESOURCES] = {0};
|
|
|
|
VkAttachmentReference color_refs[MAX_RESOURCES] = {0};
|
2022-06-08 16:18:36 -07:00
|
|
|
VkSubpassDescription subpass_desc = {0};
|
2022-06-08 16:18:37 -07:00
|
|
|
VkImageView rtvs[MAX_RESOURCES];
|
|
|
|
unsigned int rt_count = 0;
|
|
|
|
unsigned int i;
|
2022-06-08 16:18:36 -07:00
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
for (i = 0; i < runner->r.resource_count; ++i)
|
|
|
|
{
|
|
|
|
const struct vulkan_resource *resource = vulkan_resource(runner->r.resources[i]);
|
|
|
|
VkAttachmentDescription *attachment_desc = &attachment_descs[rt_count];
|
|
|
|
VkAttachmentReference *color_ref = &color_refs[rt_count];
|
|
|
|
|
|
|
|
if (resource->r.type != RESOURCE_TYPE_RENDER_TARGET)
|
|
|
|
continue;
|
2022-06-08 16:18:36 -07:00
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
attachment_desc->format = vkd3d_get_vk_format(resource->r.format);
|
|
|
|
attachment_desc->samples = VK_SAMPLE_COUNT_1_BIT;
|
|
|
|
attachment_desc->loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
|
|
|
|
attachment_desc->storeOp = VK_ATTACHMENT_STORE_OP_STORE;
|
|
|
|
attachment_desc->stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
|
|
|
|
attachment_desc->stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
|
|
|
|
attachment_desc->initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
|
|
|
attachment_desc->finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
|
|
|
|
|
|
|
color_ref->attachment = rt_count;
|
|
|
|
color_ref->layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
|
|
|
|
|
|
|
rtvs[rt_count] = resource->view;
|
|
|
|
|
|
|
|
++rt_count;
|
|
|
|
}
|
2022-06-08 16:18:36 -07:00
|
|
|
|
|
|
|
subpass_desc.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
|
2022-06-08 16:18:37 -07:00
|
|
|
subpass_desc.colorAttachmentCount = rt_count;
|
|
|
|
subpass_desc.pColorAttachments = color_refs;
|
2022-06-08 16:18:36 -07:00
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
render_pass_desc.attachmentCount = rt_count;
|
|
|
|
render_pass_desc.pAttachments = attachment_descs;
|
2022-06-08 16:18:36 -07:00
|
|
|
render_pass_desc.subpassCount = 1;
|
|
|
|
render_pass_desc.pSubpasses = &subpass_desc;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateRenderPass(runner->device, &render_pass_desc, NULL, render_pass));
|
|
|
|
|
|
|
|
fb_desc.renderPass = *render_pass;
|
2022-06-08 16:18:37 -07:00
|
|
|
fb_desc.attachmentCount = rt_count;
|
|
|
|
fb_desc.pAttachments = rtvs;
|
2022-06-08 16:18:36 -07:00
|
|
|
fb_desc.width = RENDER_TARGET_WIDTH;
|
|
|
|
fb_desc.height = RENDER_TARGET_HEIGHT;
|
|
|
|
fb_desc.layers = 1;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateFramebuffer(runner->device, &fb_desc, NULL, fb));
|
|
|
|
}
|
|
|
|
|
2021-08-13 14:37:31 -07:00
|
|
|
static bool vulkan_runner_dispatch(struct shader_runner *r, unsigned int x, unsigned int y, unsigned int z)
|
|
|
|
{
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
|
|
|
|
|
|
|
VkCommandBuffer cmd_buffer = runner->cmd_buffer;
|
|
|
|
VkDescriptorSetLayout set_layout;
|
|
|
|
VkPipelineLayout pipeline_layout;
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
VkPipeline pipeline;
|
|
|
|
bool ret = false;
|
|
|
|
unsigned int i;
|
|
|
|
|
|
|
|
/* Create this before compiling shaders, it will assign resource bindings. */
|
|
|
|
set_layout = create_descriptor_set_layout(runner);
|
|
|
|
|
|
|
|
pipeline_layout = create_pipeline_layout(runner, set_layout);
|
|
|
|
if (!(pipeline = create_compute_pipeline(runner, pipeline_layout)))
|
|
|
|
goto out;
|
|
|
|
|
|
|
|
begin_command_buffer(runner);
|
|
|
|
|
|
|
|
VK_CALL(vkCmdBindPipeline(cmd_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline));
|
|
|
|
|
|
|
|
bind_resources(runner, VK_PIPELINE_BIND_POINT_COMPUTE, set_layout, pipeline_layout);
|
|
|
|
|
|
|
|
VK_CALL(vkCmdDispatch(cmd_buffer, x, y, z));
|
|
|
|
|
|
|
|
end_command_buffer(runner);
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyPipeline(device, pipeline, NULL));
|
|
|
|
VK_CALL(vkResetDescriptorPool(device, runner->descriptor_pool, 0));
|
|
|
|
|
|
|
|
ret = true;
|
|
|
|
out:
|
|
|
|
for (i = 0; i < runner->r.sampler_count; ++i)
|
|
|
|
VK_CALL(vkDestroySampler(device, runner->samplers[i].vk_sampler, NULL));
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyPipelineLayout(device, pipeline_layout, NULL));
|
|
|
|
VK_CALL(vkDestroyDescriptorSetLayout(device, set_layout, NULL));
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2022-04-14 03:52:32 -07:00
|
|
|
static bool vulkan_runner_draw(struct shader_runner *r,
|
2022-04-11 10:18:31 -07:00
|
|
|
D3D_PRIMITIVE_TOPOLOGY primitive_topology, unsigned int vertex_count)
|
|
|
|
{
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
|
|
|
|
|
|
|
static const VkRect2D rt_rect = {.extent.width = RENDER_TARGET_WIDTH, .extent.height = RENDER_TARGET_HEIGHT};
|
|
|
|
VkRenderPassBeginInfo pass_begin_desc = {.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO};
|
|
|
|
VkCommandBuffer cmd_buffer = runner->cmd_buffer;
|
|
|
|
VkDescriptorSetLayout set_layout;
|
|
|
|
VkPipelineLayout pipeline_layout;
|
|
|
|
VkDevice device = runner->device;
|
2022-06-08 16:18:36 -07:00
|
|
|
VkRenderPass render_pass;
|
2022-04-11 10:18:31 -07:00
|
|
|
VkPipeline pipeline;
|
2022-06-08 16:18:36 -07:00
|
|
|
VkFramebuffer fb;
|
2021-08-13 14:37:31 -07:00
|
|
|
bool ret = false;
|
2022-04-11 10:18:31 -07:00
|
|
|
unsigned int i;
|
|
|
|
|
2022-06-08 16:18:36 -07:00
|
|
|
create_render_pass_and_framebuffer(runner, &render_pass, &fb);
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
/* Create this before compiling shaders, it will assign resource bindings. */
|
|
|
|
set_layout = create_descriptor_set_layout(runner);
|
|
|
|
|
|
|
|
pipeline_layout = create_pipeline_layout(runner, set_layout);
|
2021-08-13 14:37:31 -07:00
|
|
|
if (!(pipeline = create_graphics_pipeline(runner, render_pass, pipeline_layout, primitive_topology)))
|
2022-04-11 10:18:31 -07:00
|
|
|
goto out;
|
|
|
|
|
|
|
|
begin_command_buffer(runner);
|
|
|
|
|
2022-06-08 16:18:36 -07:00
|
|
|
pass_begin_desc.renderPass = render_pass;
|
|
|
|
pass_begin_desc.framebuffer = fb;
|
2022-04-11 10:18:31 -07:00
|
|
|
pass_begin_desc.renderArea = rt_rect;
|
|
|
|
|
|
|
|
VK_CALL(vkCmdBeginRenderPass(cmd_buffer, &pass_begin_desc, VK_SUBPASS_CONTENTS_INLINE));
|
|
|
|
|
|
|
|
VK_CALL(vkCmdBindPipeline(cmd_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline));
|
|
|
|
|
|
|
|
bind_resources(runner, VK_PIPELINE_BIND_POINT_GRAPHICS, set_layout, pipeline_layout);
|
|
|
|
|
|
|
|
VK_CALL(vkCmdDraw(cmd_buffer, vertex_count, 1, 0, 0));
|
|
|
|
|
|
|
|
VK_CALL(vkCmdEndRenderPass(cmd_buffer));
|
|
|
|
end_command_buffer(runner);
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyPipeline(device, pipeline, NULL));
|
|
|
|
VK_CALL(vkResetDescriptorPool(device, runner->descriptor_pool, 0));
|
|
|
|
|
2021-08-13 14:37:31 -07:00
|
|
|
ret = true;
|
2022-04-11 10:18:31 -07:00
|
|
|
out:
|
|
|
|
for (i = 0; i < runner->r.sampler_count; ++i)
|
|
|
|
VK_CALL(vkDestroySampler(device, runner->samplers[i].vk_sampler, NULL));
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyPipelineLayout(device, pipeline_layout, NULL));
|
|
|
|
VK_CALL(vkDestroyDescriptorSetLayout(device, set_layout, NULL));
|
2022-06-08 16:18:36 -07:00
|
|
|
VK_CALL(vkDestroyRenderPass(device, render_pass, NULL));
|
|
|
|
VK_CALL(vkDestroyFramebuffer(device, fb, NULL));
|
2022-04-14 03:52:32 -07:00
|
|
|
|
|
|
|
return ret;
|
2022-04-11 10:18:31 -07:00
|
|
|
}
|
|
|
|
|
2022-05-17 06:45:39 -07:00
|
|
|
struct vulkan_resource_readback
|
2022-04-11 10:18:31 -07:00
|
|
|
{
|
2022-05-17 06:45:39 -07:00
|
|
|
struct resource_readback rb;
|
|
|
|
VkDeviceMemory memory;
|
|
|
|
VkBuffer buffer;
|
|
|
|
};
|
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
static struct resource_readback *vulkan_runner_get_resource_readback(struct shader_runner *r, struct resource *res)
|
2022-04-11 10:18:31 -07:00
|
|
|
{
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
2022-05-17 06:45:41 -07:00
|
|
|
struct vulkan_resource_readback *rb = malloc(sizeof(*rb));
|
2022-06-08 16:18:37 -07:00
|
|
|
struct vulkan_resource *resource = vulkan_resource(res);
|
2022-04-11 10:18:31 -07:00
|
|
|
VkDevice device = runner->device;
|
|
|
|
VkBufferImageCopy region = {0};
|
2022-06-08 16:18:38 -07:00
|
|
|
VkImageLayout layout;
|
2022-04-11 10:18:31 -07:00
|
|
|
|
2022-06-08 16:18:38 -07:00
|
|
|
if (resource->r.type == RESOURCE_TYPE_RENDER_TARGET)
|
|
|
|
layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
|
|
|
|
else
|
|
|
|
layout = VK_IMAGE_LAYOUT_GENERAL;
|
2022-06-08 16:18:37 -07:00
|
|
|
|
|
|
|
rb->rb.width = resource->r.width;
|
|
|
|
rb->rb.height = resource->r.height;
|
2022-05-17 06:45:41 -07:00
|
|
|
rb->rb.depth = 1;
|
2022-05-17 06:45:39 -07:00
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
rb->rb.row_pitch = rb->rb.width * resource->r.texel_size;
|
2022-04-11 10:18:31 -07:00
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
rb->buffer = create_buffer(runner, rb->rb.row_pitch * rb->rb.height,
|
2022-05-17 06:45:41 -07:00
|
|
|
VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT, &rb->memory);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
begin_command_buffer(runner);
|
|
|
|
|
2022-06-08 16:18:38 -07:00
|
|
|
transition_image_layout(runner, resource->image, layout, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
region.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
region.imageSubresource.layerCount = 1;
|
2022-06-08 16:18:37 -07:00
|
|
|
region.imageExtent.width = resource->r.width;
|
|
|
|
region.imageExtent.height = resource->r.height;
|
2022-04-11 10:18:31 -07:00
|
|
|
region.imageExtent.depth = 1;
|
|
|
|
|
2022-06-08 16:18:37 -07:00
|
|
|
VK_CALL(vkCmdCopyImageToBuffer(runner->cmd_buffer, resource->image,
|
2022-05-17 06:45:41 -07:00
|
|
|
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, rb->buffer, 1, ®ion));
|
2022-04-11 10:18:31 -07:00
|
|
|
|
2022-06-08 16:18:38 -07:00
|
|
|
transition_image_layout(runner, resource->image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, layout);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
end_command_buffer(runner);
|
|
|
|
|
2022-05-17 06:45:41 -07:00
|
|
|
VK_CALL(vkMapMemory(device, rb->memory, 0, VK_WHOLE_SIZE, 0, &rb->rb.data));
|
|
|
|
return &rb->rb;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void vulkan_runner_release_readback(struct shader_runner *r, struct resource_readback *rb)
|
|
|
|
{
|
|
|
|
struct vulkan_resource_readback *vulkan_rb = CONTAINING_RECORD(rb, struct vulkan_resource_readback, rb);
|
|
|
|
struct vulkan_shader_runner *runner = vulkan_shader_runner(r);
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
|
|
|
|
VK_CALL(vkUnmapMemory(device, vulkan_rb->memory));
|
2022-04-11 10:18:31 -07:00
|
|
|
|
2022-05-17 06:45:41 -07:00
|
|
|
VK_CALL(vkFreeMemory(device, vulkan_rb->memory, NULL));
|
|
|
|
VK_CALL(vkDestroyBuffer(device, vulkan_rb->buffer, NULL));
|
|
|
|
free(vulkan_rb);
|
2022-04-11 10:18:31 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
static const struct shader_runner_ops vulkan_runner_ops =
|
|
|
|
{
|
|
|
|
.create_resource = vulkan_runner_create_resource,
|
|
|
|
.destroy_resource = vulkan_runner_destroy_resource,
|
2021-08-13 14:37:31 -07:00
|
|
|
.dispatch = vulkan_runner_dispatch,
|
2022-04-11 10:18:31 -07:00
|
|
|
.draw = vulkan_runner_draw,
|
2022-06-08 16:18:37 -07:00
|
|
|
.get_resource_readback = vulkan_runner_get_resource_readback,
|
2022-05-17 06:45:41 -07:00
|
|
|
.release_readback = vulkan_runner_release_readback,
|
2022-04-11 10:18:31 -07:00
|
|
|
};
|
|
|
|
|
|
|
|
static bool get_graphics_queue_index(const struct vulkan_shader_runner *runner, uint32_t *index)
|
|
|
|
{
|
|
|
|
VkQueueFamilyProperties *queue_properties;
|
|
|
|
uint32_t count, i;
|
|
|
|
|
|
|
|
count = 0;
|
|
|
|
VK_CALL(vkGetPhysicalDeviceQueueFamilyProperties(runner->phys_device, &count, NULL));
|
|
|
|
queue_properties = malloc(count * sizeof(*queue_properties));
|
|
|
|
VK_CALL(vkGetPhysicalDeviceQueueFamilyProperties(runner->phys_device, &count, queue_properties));
|
|
|
|
|
|
|
|
for (i = 0; i < count; ++i)
|
|
|
|
{
|
|
|
|
if (queue_properties[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)
|
|
|
|
{
|
|
|
|
free(queue_properties);
|
|
|
|
*index = i;
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
free(queue_properties);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
static const char *const required_device_extensions[] =
|
|
|
|
{
|
|
|
|
VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME,
|
|
|
|
};
|
|
|
|
|
|
|
|
static bool has_extension(const VkExtensionProperties *extensions, uint32_t count, const char *extension_name)
|
|
|
|
{
|
|
|
|
uint32_t i;
|
|
|
|
|
|
|
|
for (i = 0; i < count; ++i)
|
|
|
|
{
|
|
|
|
if (!strcmp(extensions[i].extensionName, extension_name))
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool check_device_extensions(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkPhysicalDevice phys_device = runner->phys_device;
|
|
|
|
VkExtensionProperties *extensions;
|
|
|
|
uint32_t i, count;
|
|
|
|
|
|
|
|
VK_CALL(vkEnumerateDeviceExtensionProperties(phys_device, NULL, &count, NULL));
|
|
|
|
extensions = calloc(count, sizeof(*extensions));
|
|
|
|
VK_CALL(vkEnumerateDeviceExtensionProperties(phys_device, NULL, &count, extensions));
|
|
|
|
|
|
|
|
for (i = 0; i < ARRAY_SIZE(required_device_extensions); ++i)
|
|
|
|
{
|
|
|
|
if (!has_extension(extensions, count, required_device_extensions[i]))
|
|
|
|
{
|
|
|
|
skip("The selected Vulkan device does not support %s.\n", required_device_extensions[i]);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool init_vulkan_runner(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkDescriptorPoolCreateInfo descriptor_pool_desc = {.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO};
|
|
|
|
VkCommandBufferAllocateInfo cmd_buffer_desc = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO};
|
|
|
|
VkCommandPoolCreateInfo command_pool_desc = {.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO};
|
|
|
|
VkDeviceQueueCreateInfo queue_desc = {.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO};
|
|
|
|
VkInstanceCreateInfo instance_desc = {.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO};
|
|
|
|
VkDeviceCreateInfo device_desc = {.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO};
|
|
|
|
VkPhysicalDeviceFeatures ret_features, features;
|
2022-06-08 16:18:35 -07:00
|
|
|
VkDescriptorPoolSize descriptor_pool_sizes[3];
|
2022-04-11 10:18:31 -07:00
|
|
|
static const float queue_priority = 1.0f;
|
|
|
|
VkFormatProperties format_props;
|
|
|
|
uint32_t count, graphics_index;
|
|
|
|
VkDevice device;
|
2022-04-18 18:28:45 -07:00
|
|
|
void *libvulkan;
|
2022-04-11 10:18:31 -07:00
|
|
|
VkResult vr;
|
|
|
|
|
2022-04-18 18:28:45 -07:00
|
|
|
if (!(libvulkan = vkd3d_dlopen(SONAME_LIBVULKAN)))
|
|
|
|
{
|
|
|
|
skip("Failed to load %s: %s.\n", SONAME_LIBVULKAN, vkd3d_dlerror());
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
vkGetInstanceProcAddr = vkd3d_dlsym(libvulkan, "vkGetInstanceProcAddr");
|
|
|
|
|
2022-04-11 10:18:31 -07:00
|
|
|
runner->vkCreateInstance = (void *)vkGetInstanceProcAddr(NULL, "vkCreateInstance");
|
|
|
|
|
|
|
|
if ((vr = VK_CALL(vkCreateInstance(&instance_desc, NULL, &runner->instance))) < 0)
|
|
|
|
{
|
|
|
|
skip("Failed to create a Vulkan instance, vr %d.\n", vr);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
#define VK_INSTANCE_PFN(name) runner->name = (void *)vkGetInstanceProcAddr(runner->instance, #name);
|
|
|
|
#include "vulkan_procs.h"
|
|
|
|
|
|
|
|
count = 1;
|
|
|
|
if ((vr = VK_CALL(vkEnumeratePhysicalDevices(runner->instance, &count, &runner->phys_device))) < 0)
|
|
|
|
{
|
|
|
|
skip("Failed to enumerate physical devices, vr %d.\n", vr);
|
|
|
|
goto out_destroy_instance;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!count)
|
|
|
|
{
|
|
|
|
skip("No Vulkan devices are available.\n");
|
|
|
|
goto out_destroy_instance;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!get_graphics_queue_index(runner, &graphics_index))
|
|
|
|
{
|
|
|
|
skip("The selected Vulkan device does not support graphics operations.\n");
|
|
|
|
goto out_destroy_instance;
|
|
|
|
}
|
|
|
|
|
|
|
|
device_desc.pQueueCreateInfos = &queue_desc;
|
|
|
|
device_desc.queueCreateInfoCount = 1;
|
|
|
|
device_desc.enabledExtensionCount = ARRAY_SIZE(required_device_extensions);
|
|
|
|
device_desc.ppEnabledExtensionNames = required_device_extensions;
|
|
|
|
|
|
|
|
queue_desc.queueFamilyIndex = graphics_index;
|
|
|
|
queue_desc.queueCount = 1;
|
|
|
|
queue_desc.pQueuePriorities = &queue_priority;
|
|
|
|
|
|
|
|
if (!check_device_extensions(runner))
|
|
|
|
goto out_destroy_instance;
|
|
|
|
|
|
|
|
VK_CALL(vkGetPhysicalDeviceFormatProperties(runner->phys_device, VK_FORMAT_R32G32B32A32_SFLOAT, &format_props));
|
|
|
|
if (!(format_props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
|
|
|
|
{
|
|
|
|
skip("The selected Vulkan device does not support R32G32B32A32_SFLOAT render targets.\n");
|
|
|
|
goto out_destroy_instance;
|
|
|
|
}
|
|
|
|
|
|
|
|
VK_CALL(vkGetPhysicalDeviceFeatures(runner->phys_device, &ret_features));
|
|
|
|
|
|
|
|
device_desc.pEnabledFeatures = &features;
|
|
|
|
memset(&features, 0, sizeof(features));
|
|
|
|
|
|
|
|
/* FIXME: Probably make these optional. */
|
|
|
|
|
|
|
|
#define ENABLE_FEATURE(x) \
|
|
|
|
if (!ret_features.x) \
|
|
|
|
{ \
|
|
|
|
skip("The selected Vulkan device does not support " #x ".\n"); \
|
|
|
|
goto out_destroy_instance; \
|
|
|
|
} \
|
|
|
|
features.x = VK_TRUE
|
|
|
|
|
2022-06-08 16:18:35 -07:00
|
|
|
ENABLE_FEATURE(fragmentStoresAndAtomics);
|
2022-04-11 10:18:31 -07:00
|
|
|
ENABLE_FEATURE(shaderImageGatherExtended);
|
2022-06-08 16:18:35 -07:00
|
|
|
ENABLE_FEATURE(shaderStorageImageWriteWithoutFormat);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
if ((vr = VK_CALL(vkCreateDevice(runner->phys_device, &device_desc, NULL, &device))))
|
|
|
|
{
|
|
|
|
skip("Failed to create device, vr %d.\n", vr);
|
|
|
|
goto out_destroy_instance;
|
|
|
|
}
|
|
|
|
runner->device = device;
|
|
|
|
|
|
|
|
#define VK_DEVICE_PFN(name) runner->name = (void *)VK_CALL(vkGetDeviceProcAddr(device, #name));
|
|
|
|
#include "vulkan_procs.h"
|
|
|
|
|
|
|
|
VK_CALL(vkGetDeviceQueue(device, graphics_index, 0, &runner->queue));
|
|
|
|
|
|
|
|
command_pool_desc.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
|
|
|
|
command_pool_desc.queueFamilyIndex = graphics_index;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateCommandPool(device, &command_pool_desc, NULL, &runner->command_pool));
|
|
|
|
|
|
|
|
cmd_buffer_desc.commandPool = runner->command_pool;
|
|
|
|
cmd_buffer_desc.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
|
|
|
|
cmd_buffer_desc.commandBufferCount = 1;
|
|
|
|
|
|
|
|
VK_CALL(vkAllocateCommandBuffers(device, &cmd_buffer_desc, &runner->cmd_buffer));
|
|
|
|
|
|
|
|
descriptor_pool_sizes[0].type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
|
|
|
descriptor_pool_sizes[0].descriptorCount = MAX_RESOURCES;
|
|
|
|
descriptor_pool_sizes[1].type = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
|
|
descriptor_pool_sizes[1].descriptorCount = MAX_SAMPLERS;
|
2022-06-08 16:18:35 -07:00
|
|
|
descriptor_pool_sizes[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
descriptor_pool_sizes[2].descriptorCount = MAX_RESOURCES;
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
descriptor_pool_desc.maxSets = 1;
|
|
|
|
descriptor_pool_desc.poolSizeCount = ARRAY_SIZE(descriptor_pool_sizes);
|
|
|
|
descriptor_pool_desc.pPoolSizes = descriptor_pool_sizes;
|
|
|
|
|
|
|
|
VK_CALL(vkCreateDescriptorPool(device, &descriptor_pool_desc, NULL, &runner->descriptor_pool));
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
|
|
|
out_destroy_instance:
|
|
|
|
VK_CALL(vkDestroyInstance(runner->instance, NULL));
|
|
|
|
return false;
|
|
|
|
};
|
|
|
|
|
|
|
|
static void cleanup_vulkan_runner(struct vulkan_shader_runner *runner)
|
|
|
|
{
|
|
|
|
VkDevice device = runner->device;
|
|
|
|
|
|
|
|
VK_CALL(vkDestroyDescriptorPool(device, runner->descriptor_pool, NULL));
|
|
|
|
VK_CALL(vkFreeCommandBuffers(device, runner->command_pool, 1, &runner->cmd_buffer));
|
|
|
|
VK_CALL(vkDestroyCommandPool(device, runner->command_pool, NULL));
|
|
|
|
VK_CALL(vkDestroyDevice(device, NULL));
|
|
|
|
VK_CALL(vkDestroyInstance(runner->instance, NULL));
|
|
|
|
}
|
|
|
|
|
2023-02-21 15:46:36 -08:00
|
|
|
void run_shader_tests_vulkan(void)
|
2022-04-11 10:18:31 -07:00
|
|
|
{
|
|
|
|
struct vulkan_shader_runner runner = {0};
|
|
|
|
|
|
|
|
if (!init_vulkan_runner(&runner))
|
|
|
|
return;
|
|
|
|
|
2023-02-21 15:46:36 -08:00
|
|
|
run_shader_tests(&runner.r, &vulkan_runner_ops);
|
2022-04-11 10:18:31 -07:00
|
|
|
|
|
|
|
cleanup_vulkan_runner(&runner);
|
|
|
|
}
|
2023-01-18 04:53:51 -08:00
|
|
|
|
|
|
|
#endif
|