2016-02-21 18:05:01 +01:00
# define __STDC_LIMIT_MACROS
2015-10-10 16:41:19 +02:00
# include <cstdlib>
2016-02-21 18:05:01 +01:00
# include <cstdint>
2015-10-10 16:41:19 +02:00
# include <assert.h>
# include <cstring>
# include <iostream>
2016-03-13 16:22:46 +01:00
# include "base/basictypes.h"
2018-01-20 21:47:16 +01:00
# include "base/display.h"
2015-10-10 16:41:19 +02:00
# include "VulkanContext.h"
2017-02-25 00:25:46 +01:00
# include "GPU/Common/ShaderCommon.h"
2017-11-09 12:26:08 +01:00
# include "Common/StringUtils.h"
2018-01-20 21:47:16 +01:00
# include "Core/Config.h"
2015-10-10 16:41:19 +02:00
2017-11-26 16:13:04 +01:00
// Change this to 1, 2, and 3 to fake failures in a few places, so that
// we can test our fallback-to-GL code.
# define SIMULATE_VULKAN_FAILURE 0
2015-10-10 16:41:19 +02:00
# ifdef USE_CRT_DBG
# undef new
# endif
2016-02-21 18:05:01 +01:00
# ifdef _MSC_VER
2015-12-20 23:39:03 +01:00
# pragma warning(push)
# pragma warning(disable:4996)
2016-02-21 18:05:01 +01:00
# endif
# include "ext/glslang/SPIRV/GlslangToSpv.h"
# ifdef _MSC_VER
2015-12-20 23:39:03 +01:00
# pragma warning(pop)
2016-02-21 18:05:01 +01:00
# endif
2015-10-10 16:41:19 +02:00
# ifdef USE_CRT_DBG
# define new DBG_NEW
# endif
2016-03-13 16:22:46 +01:00
static const char * validationLayers [ ] = {
2016-03-28 22:14:04 +02:00
" VK_LAYER_LUNARG_standard_validation " ,
2016-03-13 16:22:46 +01:00
/*
" VK_LAYER_GOOGLE_threading " ,
" VK_LAYER_LUNARG_draw_state " ,
" VK_LAYER_LUNARG_image " ,
" VK_LAYER_LUNARG_mem_tracker " ,
" VK_LAYER_LUNARG_object_tracker " ,
" VK_LAYER_LUNARG_param_checker " ,
*/
2016-10-10 22:08:44 -07:00
/*
// For layers included in the Android NDK.
" VK_LAYER_GOOGLE_threading " ,
" VK_LAYER_LUNARG_parameter_validation " ,
" VK_LAYER_LUNARG_core_validation " ,
" VK_LAYER_LUNARG_image " ,
" VK_LAYER_LUNARG_object_tracker " ,
" VK_LAYER_LUNARG_swapchain " ,
" VK_LAYER_GOOGLE_unique_objects " ,
*/
2016-03-13 16:22:46 +01:00
} ;
2017-11-09 12:26:08 +01:00
std : : string VulkanVendorString ( uint32_t vendorId ) {
switch ( vendorId ) {
case VULKAN_VENDOR_INTEL : return " Intel " ;
case VULKAN_VENDOR_NVIDIA : return " nVidia " ;
case VULKAN_VENDOR_AMD : return " AMD " ;
case VULKAN_VENDOR_ARM : return " ARM " ;
case VULKAN_VENDOR_QUALCOMM : return " Qualcomm " ;
case VULKAN_VENDOR_IMGTEC : return " Imagination " ;
default :
return StringFromFormat ( " %08x " , vendorId ) ;
}
}
2017-11-10 13:02:24 +01:00
const char * PresentModeString ( VkPresentModeKHR presentMode ) {
switch ( presentMode ) {
case VK_PRESENT_MODE_IMMEDIATE_KHR : return " IMMEDIATE " ;
case VK_PRESENT_MODE_MAILBOX_KHR : return " MAILBOX " ;
case VK_PRESENT_MODE_FIFO_KHR : return " FIFO " ;
case VK_PRESENT_MODE_FIFO_RELAXED_KHR : return " FIFO_RELAXED " ;
default : return " UNKNOWN " ;
}
}
2017-08-28 14:12:56 +02:00
VulkanContext : : VulkanContext ( ) {
2017-11-26 16:13:04 +01:00
# if SIMULATE_VULKAN_FAILURE == 1
return ;
# endif
2016-02-21 18:05:01 +01:00
if ( ! VulkanLoad ( ) ) {
2016-03-13 09:33:39 -07:00
init_error_ = " Failed to load Vulkan driver library " ;
2016-02-21 18:05:01 +01:00
// No DLL?
return ;
}
2017-08-28 13:45:04 +02:00
// We can get the list of layers and extensions without an instance so we can use this information
// to enable the extensions we need that are available.
2017-08-28 14:12:56 +02:00
GetInstanceLayerProperties ( ) ;
2017-08-28 13:45:04 +02:00
GetInstanceLayerExtensionList ( nullptr , instance_extension_properties_ ) ;
2017-08-28 14:12:56 +02:00
}
2017-12-18 12:22:12 +01:00
VkResult VulkanContext : : CreateInstance ( const CreateInfo & info ) {
2017-11-26 16:13:04 +01:00
if ( ! vkCreateInstance ) {
init_error_ = " Vulkan not loaded - can't create instance " ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
2017-12-18 12:22:12 +01:00
flags_ = info . flags ;
2017-08-28 13:45:04 +02:00
2015-10-10 16:41:19 +02:00
// List extensions to try to enable.
2017-08-28 13:45:04 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_SURFACE_EXTENSION_NAME ) ;
2016-02-21 18:05:01 +01:00
# ifdef _WIN32
2017-08-28 13:45:04 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_WIN32_SURFACE_EXTENSION_NAME ) ;
2016-10-12 11:13:16 +02:00
# elif defined(__ANDROID__)
2017-08-28 14:37:15 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_ANDROID_SURFACE_EXTENSION_NAME ) ;
2017-12-13 22:58:45 +01:00
# else
2017-12-26 02:55:37 +03:00
# if defined(VK_USE_PLATFORM_XLIB_KHR)
2018-01-12 22:18:58 +03:00
if ( IsInstanceExtensionAvailable ( VK_KHR_XLIB_SURFACE_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_KHR_XLIB_SURFACE_EXTENSION_NAME ) ;
}
2017-12-26 02:55:37 +03:00
# endif
//#if defined(VK_USE_PLATFORM_XCB_KHR)
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
//#endif
//#if defined(VK_USE_PLATFORM_MIR_KHR)
// instance_extensions_enabled_.push_back(VK_KHR_MIR_SURFACE_EXTENSION_NAME);
//#endif
# if defined(VK_USE_PLATFORM_WAYLAND_KHR)
2018-01-12 22:18:58 +03:00
if ( IsInstanceExtensionAvailable ( VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME ) ;
}
2017-12-26 02:55:37 +03:00
# endif
2016-02-21 18:05:01 +01:00
# endif
2015-10-10 16:41:19 +02:00
2017-08-28 14:12:56 +02:00
if ( flags_ & VULKAN_FLAG_VALIDATE ) {
2017-12-30 21:31:43 +01:00
if ( IsInstanceExtensionAvailable ( VK_EXT_DEBUG_REPORT_EXTENSION_NAME ) ) {
for ( size_t i = 0 ; i < ARRAY_SIZE ( validationLayers ) ; i + + ) {
instance_layer_names_ . push_back ( validationLayers [ i ] ) ;
device_layer_names_ . push_back ( validationLayers [ i ] ) ;
}
instance_extensions_enabled_ . push_back ( VK_EXT_DEBUG_REPORT_EXTENSION_NAME ) ;
} else {
ELOG ( " Validation layer extension not available - not enabling Vulkan validation. " ) ;
flags_ & = ~ VULKAN_FLAG_VALIDATE ;
2016-03-13 16:22:46 +01:00
}
2015-10-10 16:41:19 +02:00
}
2017-12-30 21:03:28 +01:00
// Validate that all the instance extensions we ask for are actually available.
for ( auto ext : instance_extensions_enabled_ ) {
if ( ! IsInstanceExtensionAvailable ( ext ) )
WLOG ( " WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway. " , ext ) ;
}
VkApplicationInfo app_info { VK_STRUCTURE_TYPE_APPLICATION_INFO } ;
2017-12-18 12:22:12 +01:00
app_info . pApplicationName = info . app_name ;
app_info . applicationVersion = info . app_ver ;
app_info . pEngineName = info . app_name ;
2016-03-12 14:03:26 -08:00
// Let's increment this when we make major engine/context changes.
2017-05-31 20:04:11 -07:00
app_info . engineVersion = 2 ;
2016-04-07 22:51:52 +02:00
app_info . apiVersion = VK_API_VERSION_1_0 ;
2016-03-13 16:22:46 +01:00
2017-12-30 21:03:28 +01:00
VkInstanceCreateInfo inst_info { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO } ;
2015-10-10 16:41:19 +02:00
inst_info . flags = 0 ;
inst_info . pApplicationInfo = & app_info ;
2017-08-28 13:45:04 +02:00
inst_info . enabledLayerCount = ( uint32_t ) instance_layer_names_ . size ( ) ;
inst_info . ppEnabledLayerNames = instance_layer_names_ . size ( ) ? instance_layer_names_ . data ( ) : nullptr ;
inst_info . enabledExtensionCount = ( uint32_t ) instance_extensions_enabled_ . size ( ) ;
inst_info . ppEnabledExtensionNames = instance_extensions_enabled_ . size ( ) ? instance_extensions_enabled_ . data ( ) : nullptr ;
2015-10-10 16:41:19 +02:00
2017-11-26 16:13:04 +01:00
# if SIMULATE_VULKAN_FAILURE == 2
VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER ;
# else
2017-08-28 13:45:04 +02:00
VkResult res = vkCreateInstance ( & inst_info , nullptr , & instance_ ) ;
2017-11-26 16:13:04 +01:00
# endif
2016-02-25 18:52:33 +01:00
if ( res ! = VK_SUCCESS ) {
2016-03-13 16:22:46 +01:00
if ( res = = VK_ERROR_LAYER_NOT_PRESENT ) {
2016-03-13 17:05:03 +01:00
WLOG ( " Validation on but layers not available - dropping layers " ) ;
2016-03-13 16:22:46 +01:00
// Drop the validation layers and try again.
2017-08-28 13:45:04 +02:00
instance_layer_names_ . clear ( ) ;
device_layer_names_ . clear ( ) ;
2016-03-13 16:22:46 +01:00
inst_info . enabledLayerCount = 0 ;
2017-08-28 13:45:04 +02:00
inst_info . ppEnabledLayerNames = nullptr ;
res = vkCreateInstance ( & inst_info , nullptr , & instance_ ) ;
2016-03-13 17:05:03 +01:00
if ( res ! = VK_SUCCESS )
ELOG ( " Failed to create instance even without validation: %d " , res ) ;
} else {
ELOG ( " Failed to create instance : %d " , res ) ;
2016-03-13 16:22:46 +01:00
}
2016-02-25 18:52:33 +01:00
}
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Failed to create Vulkan instance " ;
2017-08-28 14:12:56 +02:00
return res ;
2016-03-13 09:33:39 -07:00
}
2015-10-10 16:41:19 +02:00
2016-02-21 18:05:01 +01:00
VulkanLoadInstanceFunctions ( instance_ ) ;
2017-11-09 12:21:20 +01:00
if ( ! CheckLayers ( instance_layer_properties_ , instance_layer_names_ ) ) {
WLOG ( " CheckLayers for instance failed " ) ;
// init_error_ = "Failed to validate instance layers";
// return;
}
2015-10-10 16:41:19 +02:00
uint32_t gpu_count = 1 ;
2017-11-26 16:13:04 +01:00
# if SIMULATE_VULKAN_FAILURE == 3
gpu_count = 0 ;
# else
2017-08-28 13:45:04 +02:00
res = vkEnumeratePhysicalDevices ( instance_ , & gpu_count , nullptr ) ;
2017-11-26 16:13:04 +01:00
# endif
2017-10-02 14:08:35 +02:00
if ( gpu_count < = 0 ) {
ELOG ( " Vulkan driver found but no supported GPU is available " ) ;
init_error_ = " No Vulkan physical devices found " ;
vkDestroyInstance ( instance_ , nullptr ) ;
instance_ = nullptr ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
2017-08-28 14:12:56 +02:00
assert ( gpu_count > 0 ) ;
2015-10-10 16:41:19 +02:00
physical_devices_ . resize ( gpu_count ) ;
res = vkEnumeratePhysicalDevices ( instance_ , & gpu_count , physical_devices_ . data ( ) ) ;
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Failed to enumerate physical devices " ;
2017-10-02 14:08:35 +02:00
vkDestroyInstance ( instance_ , nullptr ) ;
2017-11-04 20:44:11 -07:00
instance_ = nullptr ;
2017-08-28 14:12:56 +02:00
return res ;
2016-03-13 09:33:39 -07:00
}
2015-10-10 16:41:19 +02:00
2017-08-28 14:12:56 +02:00
return VK_SUCCESS ;
2015-10-10 16:41:19 +02:00
}
VulkanContext : : ~ VulkanContext ( ) {
2017-11-09 16:02:05 +01:00
assert ( instance_ = = VK_NULL_HANDLE ) ;
}
void VulkanContext : : DestroyInstance ( ) {
2017-08-28 13:45:04 +02:00
vkDestroyInstance ( instance_ , nullptr ) ;
2016-02-21 18:05:01 +01:00
VulkanFree ( ) ;
2017-11-09 16:02:05 +01:00
instance_ = VK_NULL_HANDLE ;
2015-10-10 16:41:19 +02:00
}
2017-08-19 17:32:10 +02:00
void VulkanContext : : BeginFrame ( ) {
2016-01-03 00:46:41 +01:00
FrameData * frame = & frame_ [ curFrame_ ] ;
2016-01-06 00:38:45 +01:00
// Process pending deletes.
frame - > deleteList . PerformDeletes ( device_ ) ;
2017-05-07 11:28:57 +02:00
}
2016-01-02 02:08:05 +01:00
2017-05-07 11:28:57 +02:00
void VulkanContext : : EndFrame ( ) {
2017-08-19 17:32:10 +02:00
frame_ [ curFrame_ ] . deleteList . Take ( globalDeleteList_ ) ;
2017-08-17 17:55:21 +02:00
curFrame_ + + ;
if ( curFrame_ > = inflightFrames_ ) {
curFrame_ = 0 ;
}
2016-01-02 02:08:05 +01:00
}
2016-01-06 00:38:45 +01:00
void VulkanContext : : WaitUntilQueueIdle ( ) {
// Should almost never be used
vkQueueWaitIdle ( gfx_queue_ ) ;
}
bool VulkanContext : : MemoryTypeFromProperties ( uint32_t typeBits , VkFlags requirements_mask , uint32_t * typeIndex ) {
// Search memtypes to find first index with those properties
for ( uint32_t i = 0 ; i < 32 ; i + + ) {
if ( ( typeBits & 1 ) = = 1 ) {
// Type is available, does it match user properties?
if ( ( memory_properties . memoryTypes [ i ] . propertyFlags & requirements_mask ) = = requirements_mask ) {
* typeIndex = i ;
return true ;
}
}
typeBits > > = 1 ;
}
// No memory types matched, return failure
return false ;
}
2017-08-19 17:32:10 +02:00
bool VulkanContext : : InitObjects ( ) {
2017-11-12 21:50:54 -08:00
if ( ! InitQueue ( ) ) {
return false ;
}
2015-10-10 16:41:19 +02:00
2017-08-19 17:32:10 +02:00
if ( ! InitSwapchain ( ) ) {
2017-11-20 11:57:54 +01:00
// Destroy queue?
2017-04-15 16:26:26 -07:00
return false ;
}
return true ;
2015-10-10 16:41:19 +02:00
}
void VulkanContext : : DestroyObjects ( ) {
2017-11-09 16:58:59 +01:00
ILOG ( " VulkanContext::DestroyObjects (including swapchain) " ) ;
2017-08-19 17:32:10 +02:00
if ( swapchain_ ! = VK_NULL_HANDLE )
vkDestroySwapchainKHR ( device_ , swapchain_ , nullptr ) ;
swapchain_ = VK_NULL_HANDLE ;
2016-03-21 19:41:20 -07:00
vkDestroySurfaceKHR ( instance_ , surface_ , nullptr ) ;
surface_ = VK_NULL_HANDLE ;
2015-10-10 16:41:19 +02:00
}
2017-08-28 13:45:04 +02:00
VkResult VulkanContext : : GetInstanceLayerExtensionList ( const char * layerName , std : : vector < VkExtensionProperties > & extensions ) {
2015-10-10 16:41:19 +02:00
VkResult res ;
do {
2017-08-28 15:04:28 +02:00
uint32_t instance_extension_count ;
2017-08-28 13:45:04 +02:00
res = vkEnumerateInstanceExtensionProperties ( layerName , & instance_extension_count , nullptr ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( instance_extension_count = = 0 )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
extensions . resize ( instance_extension_count ) ;
res = vkEnumerateInstanceExtensionProperties ( layerName , & instance_extension_count , extensions . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
return res ;
}
2017-08-28 14:12:56 +02:00
VkResult VulkanContext : : GetInstanceLayerProperties ( ) {
2015-10-10 16:41:19 +02:00
/*
* It ' s possible , though very rare , that the number of
* instance layers could change . For example , installing something
* could include new layers that the loader would pick up
* between the initial query for the count and the
* request for VkLayerProperties . The loader indicates that
* by returning a VK_INCOMPLETE status and will update the
* the count parameter .
* The count parameter will be updated with the number of
* entries loaded into the data pointer - in case the number
* of layers went down or is smaller than the size given .
*/
2017-08-28 15:04:28 +02:00
uint32_t instance_layer_count ;
std : : vector < VkLayerProperties > vk_props ;
VkResult res ;
2015-10-10 16:41:19 +02:00
do {
2017-08-28 13:45:04 +02:00
res = vkEnumerateInstanceLayerProperties ( & instance_layer_count , nullptr ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( ! instance_layer_count )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 15:04:28 +02:00
vk_props . resize ( instance_layer_count ) ;
res = vkEnumerateInstanceLayerProperties ( & instance_layer_count , vk_props . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
// Now gather the extension list for each instance layer.
for ( uint32_t i = 0 ; i < instance_layer_count ; i + + ) {
2017-08-28 14:12:56 +02:00
LayerProperties layer_props ;
2015-10-10 16:41:19 +02:00
layer_props . properties = vk_props [ i ] ;
2017-08-28 13:45:04 +02:00
res = GetInstanceLayerExtensionList ( layer_props . properties . layerName , layer_props . extensions ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
instance_layer_properties_ . push_back ( layer_props ) ;
2015-10-10 16:41:19 +02:00
}
return res ;
}
2017-08-28 13:45:04 +02:00
// Pass layerName == nullptr to get the extension list for the device.
VkResult VulkanContext : : GetDeviceLayerExtensionList ( const char * layerName , std : : vector < VkExtensionProperties > & extensions ) {
2015-10-10 16:41:19 +02:00
VkResult res ;
do {
2017-08-28 15:04:28 +02:00
uint32_t device_extension_count ;
res = vkEnumerateDeviceExtensionProperties ( physical_devices_ [ physical_device_ ] , layerName , & device_extension_count , nullptr ) ;
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( ! device_extension_count )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
extensions . resize ( device_extension_count ) ;
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceExtensionProperties ( physical_devices_ [ physical_device_ ] , layerName , & device_extension_count , extensions . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
return res ;
}
2017-08-28 13:45:04 +02:00
VkResult VulkanContext : : GetDeviceLayerProperties ( ) {
2015-10-10 16:41:19 +02:00
/*
* It ' s possible , though very rare , that the number of
* instance layers could change . For example , installing something
* could include new layers that the loader would pick up
* between the initial query for the count and the
* request for VkLayerProperties . The loader indicates that
* by returning a VK_INCOMPLETE status and will update the
* the count parameter .
* The count parameter will be updated with the number of
* entries loaded into the data pointer - in case the number
* of layers went down or is smaller than the size given .
*/
2017-08-28 15:04:28 +02:00
uint32_t device_layer_count ;
std : : vector < VkLayerProperties > vk_props ;
VkResult res ;
2015-10-10 16:41:19 +02:00
do {
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceLayerProperties ( physical_devices_ [ physical_device_ ] , & device_layer_count , nullptr ) ;
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( device_layer_count = = 0 )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
vk_props . resize ( device_layer_count ) ;
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceLayerProperties ( physical_devices_ [ physical_device_ ] , & device_layer_count , vk_props . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
2017-08-28 13:45:04 +02:00
// Gather the list of extensions for each device layer.
2015-10-10 16:41:19 +02:00
for ( uint32_t i = 0 ; i < device_layer_count ; i + + ) {
2017-08-28 14:12:56 +02:00
LayerProperties layer_props ;
2015-10-10 16:41:19 +02:00
layer_props . properties = vk_props [ i ] ;
2017-08-28 13:45:04 +02:00
res = GetDeviceLayerExtensionList ( layer_props . properties . layerName , layer_props . extensions ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
device_layer_properties_ . push_back ( layer_props ) ;
2015-10-10 16:41:19 +02:00
}
return res ;
}
2017-08-28 14:12:56 +02:00
// Returns true if all layer names specified in check_names can be found in given layer properties.
bool VulkanContext : : CheckLayers ( const std : : vector < LayerProperties > & layer_props , const std : : vector < const char * > & layer_names ) const {
2015-10-10 16:41:19 +02:00
uint32_t check_count = ( uint32_t ) layer_names . size ( ) ;
uint32_t layer_count = ( uint32_t ) layer_props . size ( ) ;
for ( uint32_t i = 0 ; i < check_count ; i + + ) {
2017-08-28 14:12:56 +02:00
bool found = false ;
2015-10-10 16:41:19 +02:00
for ( uint32_t j = 0 ; j < layer_count ; j + + ) {
if ( ! strcmp ( layer_names [ i ] , layer_props [ j ] . properties . layerName ) ) {
2017-08-28 14:12:56 +02:00
found = true ;
2015-10-10 16:41:19 +02:00
}
}
if ( ! found ) {
std : : cout < < " Cannot find layer: " < < layer_names [ i ] < < std : : endl ;
2017-08-28 14:12:56 +02:00
return false ;
2015-10-10 16:41:19 +02:00
}
}
2017-08-28 14:12:56 +02:00
return true ;
2015-10-10 16:41:19 +02:00
}
2017-11-09 12:21:20 +01:00
int VulkanContext : : GetBestPhysicalDevice ( ) {
// Rules: Prefer discrete over embedded.
// Prefer nVidia over Intel.
int maxScore = - 1 ;
int best = - 1 ;
for ( size_t i = 0 ; i < physical_devices_ . size ( ) ; i + + ) {
int score = 0 ;
VkPhysicalDeviceProperties props ;
vkGetPhysicalDeviceProperties ( physical_devices_ [ i ] , & props ) ;
switch ( props . deviceType ) {
case VK_PHYSICAL_DEVICE_TYPE_CPU :
score + = 1 ;
break ;
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU :
score + = 20 ;
break ;
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU :
score + = 10 ;
break ;
}
if ( props . vendorID = = VULKAN_VENDOR_AMD ) {
score + = 5 ;
} else if ( props . vendorID = = VULKAN_VENDOR_NVIDIA ) {
score + = 5 ;
}
if ( score > maxScore ) {
2017-11-09 12:26:08 +01:00
best = ( int ) i ;
2017-11-09 12:21:20 +01:00
maxScore = score ;
}
}
return best ;
}
2017-08-28 14:12:56 +02:00
void VulkanContext : : ChooseDevice ( int physical_device ) {
physical_device_ = physical_device ;
2017-12-15 15:29:19 +01:00
ILOG ( " Chose physical device %d: %p " , physical_device , physical_devices_ [ physical_device ] ) ;
2015-10-10 16:41:19 +02:00
2017-08-28 15:22:18 +02:00
GetDeviceLayerProperties ( ) ;
if ( ! CheckLayers ( device_layer_properties_ , device_layer_names_ ) ) {
2017-11-09 12:21:20 +01:00
WLOG ( " CheckLayers for device %d failed " , physical_device ) ;
2017-08-28 15:22:18 +02:00
}
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceQueueFamilyProperties ( physical_devices_ [ physical_device_ ] , & queue_count , nullptr ) ;
2016-03-13 09:33:39 -07:00
assert ( queue_count > = 1 ) ;
2015-10-10 16:41:19 +02:00
2016-03-13 09:33:39 -07:00
queue_props . resize ( queue_count ) ;
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceQueueFamilyProperties ( physical_devices_ [ physical_device_ ] , & queue_count , queue_props . data ( ) ) ;
2016-03-13 09:33:39 -07:00
assert ( queue_count > = 1 ) ;
2015-10-10 16:41:19 +02:00
2016-03-21 20:11:28 +01:00
// Detect preferred formats, in this order.
static const VkFormat depthStencilFormats [ ] = {
VK_FORMAT_D24_UNORM_S8_UINT ,
VK_FORMAT_D32_SFLOAT_S8_UINT ,
VK_FORMAT_D16_UNORM_S8_UINT ,
} ;
deviceInfo_ . preferredDepthStencilFormat = VK_FORMAT_UNDEFINED ;
2016-03-27 09:25:25 -07:00
for ( size_t i = 0 ; i < ARRAY_SIZE ( depthStencilFormats ) ; i + + ) {
2016-03-21 20:11:28 +01:00
VkFormatProperties props ;
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceFormatProperties ( physical_devices_ [ physical_device_ ] , depthStencilFormats [ i ] , & props ) ;
2016-03-21 20:11:28 +01:00
if ( props . optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2016-03-21 20:23:53 +01:00
deviceInfo_ . preferredDepthStencilFormat = depthStencilFormats [ i ] ;
2016-03-21 20:11:28 +01:00
break ;
}
}
2017-11-15 09:07:51 +01:00
if ( deviceInfo_ . preferredDepthStencilFormat = = VK_FORMAT_UNDEFINED ) {
// WTF? This is bad.
ELOG ( " Could not find a usable depth stencil format. " ) ;
}
2016-03-21 20:11:28 +01:00
2016-03-13 09:33:39 -07:00
// This is as good a place as any to do this
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceMemoryProperties ( physical_devices_ [ physical_device_ ] , & memory_properties ) ;
vkGetPhysicalDeviceProperties ( physical_devices_ [ physical_device_ ] , & gpu_props ) ;
2015-10-10 16:41:19 +02:00
2016-01-03 14:00:05 +01:00
// Optional features
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceFeatures ( physical_devices_ [ physical_device_ ] , & featuresAvailable_ ) ;
2016-01-03 14:00:05 +01:00
memset ( & featuresEnabled_ , 0 , sizeof ( featuresEnabled_ ) ) ;
// Enable a few safe ones if they are available.
if ( featuresAvailable_ . dualSrcBlend ) {
2016-03-19 01:57:13 +01:00
featuresEnabled_ . dualSrcBlend = true ;
2016-01-03 14:00:05 +01:00
}
if ( featuresAvailable_ . largePoints ) {
featuresEnabled_ . largePoints = true ;
}
if ( featuresAvailable_ . wideLines ) {
featuresEnabled_ . wideLines = true ;
}
if ( featuresAvailable_ . geometryShader ) {
featuresEnabled_ . geometryShader = true ;
}
if ( featuresAvailable_ . logicOp ) {
featuresEnabled_ . logicOp = true ;
}
2016-01-19 18:41:45 +01:00
if ( featuresAvailable_ . depthClamp ) {
featuresEnabled_ . depthClamp = true ;
}
if ( featuresAvailable_ . depthBounds ) {
featuresEnabled_ . depthBounds = true ;
}
2016-03-17 21:59:16 -07:00
if ( featuresAvailable_ . samplerAnisotropy ) {
featuresEnabled_ . samplerAnisotropy = true ;
}
2017-08-28 13:45:04 +02:00
// For easy wireframe mode, someday.
if ( featuresEnabled_ . fillModeNonSolid ) {
featuresEnabled_ . fillModeNonSolid = true ;
}
2016-01-03 14:00:05 +01:00
2017-08-28 13:45:04 +02:00
GetDeviceLayerExtensionList ( nullptr , device_extension_properties_ ) ;
device_extensions_enabled_ . push_back ( VK_KHR_SWAPCHAIN_EXTENSION_NAME ) ;
2017-08-28 14:12:56 +02:00
}
bool VulkanContext : : EnableDeviceExtension ( const char * extension ) {
for ( auto & iter : device_extension_properties_ ) {
if ( ! strcmp ( iter . extensionName , extension ) ) {
device_extensions_enabled_ . push_back ( extension ) ;
return true ;
}
2017-08-28 13:45:04 +02:00
}
2017-08-28 14:12:56 +02:00
return false ;
}
VkResult VulkanContext : : CreateDevice ( ) {
if ( ! init_error_ . empty ( ) | | physical_device_ < 0 ) {
ELOG ( " Vulkan init failed: %s " , init_error_ . c_str ( ) ) ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
VkDeviceQueueCreateInfo queue_info = { VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO } ;
float queue_priorities [ 1 ] = { 1.0f } ;
queue_info . queueCount = 1 ;
queue_info . pQueuePriorities = queue_priorities ;
bool found = false ;
for ( int i = 0 ; i < ( int ) queue_count ; i + + ) {
if ( queue_props [ i ] . queueFlags & VK_QUEUE_GRAPHICS_BIT ) {
queue_info . queueFamilyIndex = i ;
found = true ;
break ;
}
}
assert ( found ) ;
2017-08-28 13:45:04 +02:00
2017-12-30 21:31:43 +01:00
deviceExtensionsLookup_ . DEDICATED_ALLOCATION = EnableDeviceExtension ( VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME ) ;
2017-08-28 13:45:04 +02:00
VkDeviceCreateInfo device_info { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO } ;
2016-03-13 09:33:39 -07:00
device_info . queueCreateInfoCount = 1 ;
device_info . pQueueCreateInfos = & queue_info ;
2017-08-28 13:45:04 +02:00
device_info . enabledLayerCount = ( uint32_t ) device_layer_names_ . size ( ) ;
device_info . ppEnabledLayerNames = device_info . enabledLayerCount ? device_layer_names_ . data ( ) : nullptr ;
device_info . enabledExtensionCount = ( uint32_t ) device_extensions_enabled_ . size ( ) ;
device_info . ppEnabledExtensionNames = device_info . enabledExtensionCount ? device_extensions_enabled_ . data ( ) : nullptr ;
2016-01-03 14:00:05 +01:00
device_info . pEnabledFeatures = & featuresEnabled_ ;
2017-08-28 14:12:56 +02:00
VkResult res = vkCreateDevice ( physical_devices_ [ physical_device_ ] , & device_info , nullptr , & device_ ) ;
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Unable to create Vulkan device " ;
ELOG ( " Unable to create Vulkan device " ) ;
} else {
VulkanLoadDeviceFunctions ( device_ ) ;
}
2017-12-15 15:29:19 +01:00
ILOG ( " Device created. \n " ) ;
2016-03-13 09:33:39 -07:00
return res ;
2015-10-10 16:41:19 +02:00
}
2015-12-31 14:06:18 +01:00
VkResult VulkanContext : : InitDebugMsgCallback ( PFN_vkDebugReportCallbackEXT dbgFunc , int bits , void * userdata ) {
2015-10-10 16:41:19 +02:00
VkDebugReportCallbackEXT msg_callback ;
2016-02-25 18:52:33 +01:00
if ( ! ( flags_ & VULKAN_FLAG_VALIDATE ) ) {
WLOG ( " Not registering debug report callback - extension not enabled! " ) ;
return VK_SUCCESS ;
}
ILOG ( " Registering debug report callback " ) ;
VkDebugReportCallbackCreateInfoEXT cb = { } ;
2015-10-10 16:41:19 +02:00
cb . sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT ;
cb . pNext = nullptr ;
2015-12-31 14:06:18 +01:00
cb . flags = bits ;
2015-10-10 16:41:19 +02:00
cb . pfnCallback = dbgFunc ;
2015-12-31 14:06:18 +01:00
cb . pUserData = userdata ;
2017-12-15 15:29:19 +01:00
VkResult res = dyn_vkCreateDebugReportCallbackEXT ( instance_ , & cb , nullptr , & msg_callback ) ;
2015-10-10 16:41:19 +02:00
switch ( res ) {
case VK_SUCCESS :
msg_callbacks . push_back ( msg_callback ) ;
break ;
case VK_ERROR_OUT_OF_HOST_MEMORY :
return VK_ERROR_INITIALIZATION_FAILED ;
default :
return VK_ERROR_INITIALIZATION_FAILED ;
}
return res ;
}
void VulkanContext : : DestroyDebugMsgCallback ( ) {
2016-04-03 00:01:56 +02:00
while ( msg_callbacks . size ( ) > 0 ) {
2017-12-15 15:29:19 +01:00
dyn_vkDestroyDebugReportCallbackEXT ( instance_ , msg_callbacks . back ( ) , nullptr ) ;
2016-04-03 00:01:56 +02:00
msg_callbacks . pop_back ( ) ;
}
2015-10-10 16:41:19 +02:00
}
2017-12-18 12:22:12 +01:00
void VulkanContext : : InitSurface ( WindowSystem winsys , void * data1 , void * data2 , int width , int height ) {
winsys_ = winsys ;
winsysData1_ = data1 ;
winsysData2_ = data2 ;
ReinitSurface ( width , height ) ;
}
void VulkanContext : : ReinitSurface ( int width , int height ) {
if ( surface_ ! = VK_NULL_HANDLE ) {
ILOG ( " Destroying Vulkan surface (%d, %d) " , width_ , height_ ) ;
vkDestroySurfaceKHR ( instance_ , surface_ , nullptr ) ;
surface_ = VK_NULL_HANDLE ;
}
ILOG ( " Creating Vulkan surface (%d, %d) " , width , height ) ;
switch ( winsys_ ) {
2016-01-24 11:22:06 +01:00
# ifdef _WIN32
2017-12-18 12:22:12 +01:00
case WINDOWSYSTEM_WIN32 :
{
HINSTANCE connection = ( HINSTANCE ) winsysData1_ ;
HWND window = ( HWND ) winsysData2_ ;
2015-10-10 16:41:19 +02:00
2017-12-18 12:22:12 +01:00
RECT rc ;
GetClientRect ( window , & rc ) ;
width = rc . right - rc . left ;
height = rc . bottom - rc . top ;
2016-03-21 19:41:20 -07:00
2017-12-18 12:22:12 +01:00
VkWin32SurfaceCreateInfoKHR win32 { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR } ;
win32 . flags = 0 ;
win32 . hwnd = window ;
win32 . hinstance = connection ;
VkResult res = vkCreateWin32SurfaceKHR ( instance_ , & win32 , nullptr , & surface_ ) ;
assert ( res = = VK_SUCCESS ) ;
break ;
2016-03-21 19:41:20 -07:00
}
2017-12-18 12:22:12 +01:00
# endif
# if defined(__ANDROID__)
case WINDOWSYSTEM_ANDROID :
{
ANativeWindow * wnd = ( ANativeWindow * ) winsysData1_ ;
VkAndroidSurfaceCreateInfoKHR android { VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR } ;
android . flags = 0 ;
android . window = wnd ;
VkResult res = vkCreateAndroidSurfaceKHR ( instance_ , & android , nullptr , & surface_ ) ;
assert ( res = = VK_SUCCESS ) ;
break ;
}
# endif
2017-12-13 22:58:45 +01:00
# if defined(VK_USE_PLATFORM_XLIB_KHR)
case WINDOWSYSTEM_XLIB :
{
VkXlibSurfaceCreateInfoKHR xlib = { VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR } ;
xlib . flags = 0 ;
xlib . dpy = ( Display * ) winsysData1_ ;
xlib . window = ( Window ) winsysData2_ ;
VkResult res = vkCreateXlibSurfaceKHR ( instance_ , & xlib , nullptr , & surface_ ) ;
assert ( res = = VK_SUCCESS ) ;
break ;
}
# endif
# if defined(VK_USE_PLATFORM_XCB_KHR)
case WINDOWSYSTEM_XCB :
{
VkXCBSurfaceCreateInfoKHR xcb = { VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR } ;
xcb . flags = 0 ;
xcb . connection = ( Connection * ) winsysData1_ ;
xcb . window = ( Window ) ( uintptr_t ) winsysData2_ ;
VkResult res = vkCreateXcbSurfaceKHR ( instance_ , & xcb , nullptr , & surface_ ) ;
2017-12-26 02:55:37 +03:00
assert ( res = = VK_SUCCESS ) ;
break ;
}
# endif
# if defined(VK_USE_PLATFORM_WAYLAND_KHR)
case WINDOWSYSTEM_WAYLAND :
{
VkWaylandSurfaceCreateInfoKHR wayland = { VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR } ;
wayland . flags = 0 ;
wayland . display = ( wl_display * ) winsysData1_ ;
wayland . surface = ( wl_surface * ) winsysData2_ ;
VkResult res = vkCreateWaylandSurfaceKHR ( instance_ , & wayland , nullptr , & surface_ ) ;
2017-12-13 22:58:45 +01:00
assert ( res = = VK_SUCCESS ) ;
break ;
}
# endif
2017-12-18 12:22:12 +01:00
default :
_assert_msg_ ( G3D , false , " Vulkan support for chosen window system not implemented " ) ;
break ;
2016-03-21 19:41:20 -07:00
}
width_ = width ;
height_ = height ;
2016-01-24 11:22:06 +01:00
}
2017-11-12 21:50:54 -08:00
bool VulkanContext : : InitQueue ( ) {
2015-10-10 16:41:19 +02:00
// Iterate over each queue to learn whether it supports presenting:
2016-03-21 19:41:20 -07:00
VkBool32 * supportsPresent = new VkBool32 [ queue_count ] ;
2015-10-10 16:41:19 +02:00
for ( uint32_t i = 0 ; i < queue_count ; i + + ) {
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceSurfaceSupportKHR ( physical_devices_ [ physical_device_ ] , i , surface_ , & supportsPresent [ i ] ) ;
2015-10-10 16:41:19 +02:00
}
// Search for a graphics queue and a present queue in the array of queue
// families, try to find one that supports both
uint32_t graphicsQueueNodeIndex = UINT32_MAX ;
uint32_t presentQueueNodeIndex = UINT32_MAX ;
for ( uint32_t i = 0 ; i < queue_count ; i + + ) {
if ( ( queue_props [ i ] . queueFlags & VK_QUEUE_GRAPHICS_BIT ) ! = 0 ) {
if ( graphicsQueueNodeIndex = = UINT32_MAX ) {
graphicsQueueNodeIndex = i ;
}
if ( supportsPresent [ i ] = = VK_TRUE ) {
graphicsQueueNodeIndex = i ;
presentQueueNodeIndex = i ;
break ;
}
}
}
if ( presentQueueNodeIndex = = UINT32_MAX ) {
// If didn't find a queue that supports both graphics and present, then
// find a separate present queue.
for ( uint32_t i = 0 ; i < queue_count ; + + i ) {
if ( supportsPresent [ i ] = = VK_TRUE ) {
presentQueueNodeIndex = i ;
break ;
}
}
}
delete [ ] supportsPresent ;
// Generate error if could not find both a graphics and a present queue
if ( graphicsQueueNodeIndex = = UINT32_MAX | | presentQueueNodeIndex = = UINT32_MAX ) {
2017-11-12 21:50:54 -08:00
ELOG ( " Could not find a graphics and a present queue " ) ;
return false ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
graphics_queue_family_index_ = graphicsQueueNodeIndex ;
2015-10-10 16:41:19 +02:00
// Get the list of VkFormats that are supported:
2017-12-15 15:29:19 +01:00
uint32_t formatCount = 0 ;
2017-08-28 14:12:56 +02:00
VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR ( physical_devices_ [ physical_device_ ] , surface_ , & formatCount , nullptr ) ;
2017-12-15 15:29:19 +01:00
_assert_msg_ ( G3D , res = = VK_SUCCESS , " Failed to get formats for device %p: %d surface: %p " , physical_devices_ [ physical_device_ ] , ( int ) res , surface_ ) ;
if ( res ! = VK_SUCCESS ) {
2017-11-12 21:50:54 -08:00
return false ;
2017-12-15 15:29:19 +01:00
}
std : : vector < VkSurfaceFormatKHR > surfFormats ( formatCount ) ;
res = vkGetPhysicalDeviceSurfaceFormatsKHR ( physical_devices_ [ physical_device_ ] , surface_ , & formatCount , surfFormats . data ( ) ) ;
2015-10-10 16:41:19 +02:00
assert ( res = = VK_SUCCESS ) ;
2017-11-12 21:50:54 -08:00
if ( res ! = VK_SUCCESS ) {
return false ;
}
2015-10-10 16:41:19 +02:00
// If the format list includes just one entry of VK_FORMAT_UNDEFINED,
// the surface has no preferred format. Otherwise, at least one
// supported format will be returned.
2016-07-01 10:39:34 -07:00
if ( formatCount = = 0 | | ( formatCount = = 1 & & surfFormats [ 0 ] . format = = VK_FORMAT_UNDEFINED ) ) {
2016-02-25 18:52:33 +01:00
ILOG ( " swapchain_format: Falling back to B8G8R8A8_UNORM " ) ;
2017-08-19 17:32:10 +02:00
swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM ;
2015-10-10 16:41:19 +02:00
} else {
2017-08-19 17:32:10 +02:00
swapchainFormat_ = VK_FORMAT_UNDEFINED ;
2016-07-01 10:39:34 -07:00
for ( uint32_t i = 0 ; i < formatCount ; + + i ) {
if ( surfFormats [ i ] . colorSpace ! = VK_COLORSPACE_SRGB_NONLINEAR_KHR ) {
continue ;
}
if ( surfFormats [ i ] . format = = VK_FORMAT_B8G8R8A8_UNORM | | surfFormats [ i ] . format = = VK_FORMAT_R8G8B8A8_UNORM ) {
2017-08-19 17:32:10 +02:00
swapchainFormat_ = surfFormats [ i ] . format ;
2016-07-01 10:39:34 -07:00
break ;
}
}
2017-08-19 17:32:10 +02:00
if ( swapchainFormat_ = = VK_FORMAT_UNDEFINED ) {
2016-07-01 10:39:34 -07:00
// Okay, take the first one then.
2017-08-19 17:32:10 +02:00
swapchainFormat_ = surfFormats [ 0 ] . format ;
2016-07-01 10:39:34 -07:00
}
2017-08-19 17:32:10 +02:00
ILOG ( " swapchain_format: %d (/%d) " , swapchainFormat_ , formatCount ) ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
vkGetDeviceQueue ( device_ , graphics_queue_family_index_ , 0 , & gfx_queue_ ) ;
2016-02-25 18:52:33 +01:00
ILOG ( " gfx_queue_: %p " , gfx_queue_ ) ;
2017-11-12 21:50:54 -08:00
return true ;
2015-10-10 16:41:19 +02:00
}
2017-08-19 17:32:10 +02:00
bool VulkanContext : : InitSwapchain ( ) {
2017-12-18 12:22:12 +01:00
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & surfCapabilities_ ) ;
2015-10-10 16:41:19 +02:00
assert ( res = = VK_SUCCESS ) ;
uint32_t presentModeCount ;
2017-08-28 14:12:56 +02:00
res = vkGetPhysicalDeviceSurfacePresentModesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & presentModeCount , nullptr ) ;
2015-10-10 16:41:19 +02:00
assert ( res = = VK_SUCCESS ) ;
2016-01-10 14:24:10 +01:00
VkPresentModeKHR * presentModes = new VkPresentModeKHR [ presentModeCount ] ;
2015-10-10 16:41:19 +02:00
assert ( presentModes ) ;
2017-08-28 14:12:56 +02:00
res = vkGetPhysicalDeviceSurfacePresentModesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & presentModeCount , presentModes ) ;
2015-10-10 16:41:19 +02:00
assert ( res = = VK_SUCCESS ) ;
VkExtent2D swapChainExtent ;
// width and height are either both -1, or both not -1.
2017-11-15 13:57:22 +01:00
if ( surfCapabilities_ . currentExtent . width = = ( uint32_t ) - 1 ) {
2015-10-10 16:41:19 +02:00
// If the surface size is undefined, the size is set to
// the size of the images requested.
2016-03-21 19:41:20 -07:00
ILOG ( " initSwapchain: %dx%d " , width_ , height_ ) ;
swapChainExtent . width = width_ ;
swapChainExtent . height = height_ ;
2016-01-06 23:08:26 +01:00
} else {
2015-10-10 16:41:19 +02:00
// If the surface size is defined, the swap chain size must match
2017-11-15 13:57:22 +01:00
swapChainExtent = surfCapabilities_ . currentExtent ;
2015-10-10 16:41:19 +02:00
}
2016-02-25 18:52:33 +01:00
// TODO: Find a better way to specify the prioritized present mode while being able
// to fall back in a sensible way.
2016-04-07 22:51:52 +02:00
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR ;
2015-10-10 16:41:19 +02:00
for ( size_t i = 0 ; i < presentModeCount ; i + + ) {
2017-11-10 13:02:24 +01:00
ILOG ( " Supported present mode: %d (%s) " , presentModes [ i ] , PresentModeString ( presentModes [ i ] ) ) ;
2016-02-25 18:52:33 +01:00
}
for ( size_t i = 0 ; i < presentModeCount ; i + + ) {
2016-04-07 22:51:52 +02:00
if ( swapchainPresentMode = = VK_PRESENT_MODE_MAX_ENUM_KHR ) {
2016-02-25 18:52:33 +01:00
// Default to the first present mode from the list.
swapchainPresentMode = presentModes [ i ] ;
}
2016-01-02 02:08:05 +01:00
if ( ( flags_ & VULKAN_FLAG_PRESENT_MAILBOX ) & & presentModes [ i ] = = VK_PRESENT_MODE_MAILBOX_KHR ) {
2015-10-10 16:41:19 +02:00
swapchainPresentMode = VK_PRESENT_MODE_MAILBOX_KHR ;
break ;
}
2016-01-06 23:08:26 +01:00
if ( ( flags_ & VULKAN_FLAG_PRESENT_FIFO_RELAXED ) & & presentModes [ i ] = = VK_PRESENT_MODE_FIFO_RELAXED_KHR ) {
swapchainPresentMode = VK_PRESENT_MODE_FIFO_RELAXED_KHR ;
break ;
}
2016-01-02 02:08:05 +01:00
if ( ( flags_ & VULKAN_FLAG_PRESENT_IMMEDIATE ) & & presentModes [ i ] = = VK_PRESENT_MODE_IMMEDIATE_KHR ) {
2015-10-10 16:41:19 +02:00
swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR ;
2016-01-02 02:08:05 +01:00
break ;
2015-10-10 16:41:19 +02:00
}
}
2016-10-12 11:13:16 +02:00
# ifdef __ANDROID__
2016-02-25 18:52:33 +01:00
// HACK
swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR ;
# endif
2017-11-10 13:02:24 +01:00
ILOG ( " Chosen present mode: %d (%s) " , swapchainPresentMode , PresentModeString ( swapchainPresentMode ) ) ;
2016-01-10 14:24:10 +01:00
delete [ ] presentModes ;
2015-10-10 16:41:19 +02:00
// Determine the number of VkImage's to use in the swap chain (we desire to
// own only 1 image at a time, besides the images being displayed and
// queued for display):
2017-11-15 13:57:22 +01:00
uint32_t desiredNumberOfSwapChainImages = surfCapabilities_ . minImageCount + 1 ;
2016-02-25 18:52:33 +01:00
ILOG ( " numSwapChainImages: %d " , desiredNumberOfSwapChainImages ) ;
2017-11-15 13:57:22 +01:00
if ( ( surfCapabilities_ . maxImageCount > 0 ) & &
( desiredNumberOfSwapChainImages > surfCapabilities_ . maxImageCount ) )
2015-10-10 16:41:19 +02:00
{
// Application must settle for fewer images than desired:
2017-11-15 13:57:22 +01:00
desiredNumberOfSwapChainImages = surfCapabilities_ . maxImageCount ;
2015-10-10 16:41:19 +02:00
}
VkSurfaceTransformFlagBitsKHR preTransform ;
2017-11-15 13:57:22 +01:00
if ( surfCapabilities_ . supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR ) {
2015-10-10 16:41:19 +02:00
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR ;
} else {
2017-11-15 13:57:22 +01:00
preTransform = surfCapabilities_ . currentTransform ;
2015-10-10 16:41:19 +02:00
}
2016-04-02 23:57:23 +02:00
VkSwapchainCreateInfoKHR swap_chain_info = { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR } ;
2016-02-25 18:52:33 +01:00
swap_chain_info . surface = surface_ ;
2015-10-10 16:41:19 +02:00
swap_chain_info . minImageCount = desiredNumberOfSwapChainImages ;
2017-08-19 17:32:10 +02:00
swap_chain_info . imageFormat = swapchainFormat_ ;
2016-07-01 10:39:34 -07:00
swap_chain_info . imageColorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR ;
2015-10-10 16:41:19 +02:00
swap_chain_info . imageExtent . width = swapChainExtent . width ;
swap_chain_info . imageExtent . height = swapChainExtent . height ;
swap_chain_info . preTransform = preTransform ;
swap_chain_info . imageArrayLayers = 1 ;
swap_chain_info . presentMode = swapchainPresentMode ;
swap_chain_info . oldSwapchain = VK_NULL_HANDLE ;
swap_chain_info . clipped = true ;
2017-11-15 13:57:22 +01:00
swap_chain_info . imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT ;
if ( surfCapabilities_ . supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT )
swap_chain_info . imageUsage | = VK_IMAGE_USAGE_TRANSFER_DST_BIT ;
2017-11-15 13:18:29 +01:00
# ifndef ANDROID
// We don't support screenshots on Android
2017-11-15 13:57:22 +01:00
// Add more usage flags if they're supported.
if ( surfCapabilities_ . supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT )
swap_chain_info . imageUsage | = VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
2017-11-15 13:18:29 +01:00
# endif
2015-10-10 16:41:19 +02:00
swap_chain_info . imageSharingMode = VK_SHARING_MODE_EXCLUSIVE ;
swap_chain_info . queueFamilyIndexCount = 0 ;
swap_chain_info . pQueueFamilyIndices = NULL ;
2016-10-10 22:06:40 -07:00
// OPAQUE is not supported everywhere.
2017-11-15 13:57:22 +01:00
if ( surfCapabilities_ . supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR ) {
2016-10-10 22:06:40 -07:00
swap_chain_info . compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR ;
} else {
// This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
swap_chain_info . compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR ;
}
2015-10-10 16:41:19 +02:00
2017-08-19 17:32:10 +02:00
res = vkCreateSwapchainKHR ( device_ , & swap_chain_info , NULL , & swapchain_ ) ;
2017-04-15 16:26:26 -07:00
if ( res ! = VK_SUCCESS ) {
2017-11-20 11:57:54 +01:00
ELOG ( " vkCreateSwapchainKHR failed! " ) ;
2017-04-15 16:26:26 -07:00
return false ;
}
2015-10-10 16:41:19 +02:00
2017-04-15 16:26:26 -07:00
return true ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
VkFence VulkanContext : : CreateFence ( bool presignalled ) {
2015-10-10 16:41:19 +02:00
VkFence fence ;
2017-08-28 13:45:04 +02:00
VkFenceCreateInfo fenceInfo { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO } ;
2016-04-03 00:01:56 +02:00
fenceInfo . flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0 ;
vkCreateFence ( device_ , & fenceInfo , NULL , & fence ) ;
2015-10-10 16:41:19 +02:00
return fence ;
}
void VulkanContext : : DestroyDevice ( ) {
2017-11-09 16:02:05 +01:00
ILOG ( " VulkanContext::DestroyDevice (performing deletes) " ) ;
2017-08-22 17:18:54 +02:00
// If there happen to be any pending deletes, now is a good time.
for ( int i = 0 ; i < ARRAY_SIZE ( frame_ ) ; i + + ) {
frame_ [ i ] . deleteList . PerformDeletes ( device_ ) ;
}
Delete ( ) . PerformDeletes ( device_ ) ;
2016-03-21 19:41:20 -07:00
vkDestroyDevice ( device_ , nullptr ) ;
device_ = nullptr ;
2015-10-10 16:41:19 +02:00
}
2016-01-03 18:31:03 +01:00
VkPipelineCache VulkanContext : : CreatePipelineCache ( ) {
VkPipelineCache cache ;
2017-10-20 18:09:05 +02:00
VkPipelineCacheCreateInfo pc { VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO } ;
2016-01-03 18:31:03 +01:00
pc . pInitialData = nullptr ;
pc . initialDataSize = 0 ;
pc . flags = 0 ;
VkResult res = vkCreatePipelineCache ( device_ , & pc , nullptr , & cache ) ;
assert ( VK_SUCCESS = = res ) ;
return cache ;
}
2016-01-05 21:18:43 +01:00
bool VulkanContext : : CreateShaderModule ( const std : : vector < uint32_t > & spirv , VkShaderModule * shaderModule ) {
2017-10-20 18:09:05 +02:00
VkShaderModuleCreateInfo sm { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO } ;
2016-01-05 21:18:43 +01:00
sm . pCode = spirv . data ( ) ;
sm . codeSize = spirv . size ( ) * sizeof ( uint32_t ) ;
sm . flags = 0 ;
2017-08-28 13:45:04 +02:00
VkResult result = vkCreateShaderModule ( device_ , & sm , nullptr , shaderModule ) ;
2016-01-05 21:18:43 +01:00
if ( result ! = VK_SUCCESS ) {
return false ;
} else {
return true ;
}
}
2015-10-10 16:41:19 +02:00
2017-12-05 13:05:11 +01:00
void TransitionImageLayout2 ( VkCommandBuffer cmd , VkImage image , int baseMip , int numMipLevels , VkImageAspectFlags aspectMask ,
2017-10-20 14:47:36 +02:00
VkImageLayout oldImageLayout , VkImageLayout newImageLayout ,
VkPipelineStageFlags srcStageMask , VkPipelineStageFlags dstStageMask ,
2017-12-05 13:05:11 +01:00
VkAccessFlags srcAccessMask , VkAccessFlags dstAccessMask ) {
2017-10-20 18:09:05 +02:00
VkImageMemoryBarrier image_memory_barrier { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER } ;
2017-10-20 14:47:36 +02:00
image_memory_barrier . srcAccessMask = srcAccessMask ;
image_memory_barrier . dstAccessMask = dstAccessMask ;
image_memory_barrier . oldLayout = oldImageLayout ;
image_memory_barrier . newLayout = newImageLayout ;
image_memory_barrier . image = image ;
image_memory_barrier . subresourceRange . aspectMask = aspectMask ;
2017-12-05 13:05:11 +01:00
image_memory_barrier . subresourceRange . baseMipLevel = baseMip ;
2017-11-08 17:01:38 +01:00
image_memory_barrier . subresourceRange . levelCount = numMipLevels ;
image_memory_barrier . subresourceRange . layerCount = 1 ; // We never use more than one layer, and old Mali drivers have problems with VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS.
2017-10-21 13:05:52 +02:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
2017-10-20 14:47:36 +02:00
vkCmdPipelineBarrier ( cmd , srcStageMask , dstStageMask , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
}
2015-10-10 16:41:19 +02:00
EShLanguage FindLanguage ( const VkShaderStageFlagBits shader_type ) {
switch ( shader_type ) {
case VK_SHADER_STAGE_VERTEX_BIT :
return EShLangVertex ;
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT :
return EShLangTessControl ;
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT :
return EShLangTessEvaluation ;
case VK_SHADER_STAGE_GEOMETRY_BIT :
return EShLangGeometry ;
case VK_SHADER_STAGE_FRAGMENT_BIT :
return EShLangFragment ;
case VK_SHADER_STAGE_COMPUTE_BIT :
return EShLangCompute ;
default :
return EShLangVertex ;
}
}
// Compile a given string containing GLSL into SPV for use by VK
// Return value of false means an error was encountered.
bool GLSLtoSPV ( const VkShaderStageFlagBits shader_type ,
const char * pshader ,
2016-01-02 02:08:05 +01:00
std : : vector < unsigned int > & spirv , std : : string * errorMessage ) {
2015-10-10 16:41:19 +02:00
2016-01-10 14:24:10 +01:00
glslang : : TProgram program ;
2015-10-10 16:41:19 +02:00
const char * shaderStrings [ 1 ] ;
TBuiltInResource Resources ;
init_resources ( Resources ) ;
// Enable SPIR-V and Vulkan rules when parsing GLSL
EShMessages messages = ( EShMessages ) ( EShMsgSpvRules | EShMsgVulkanRules ) ;
EShLanguage stage = FindLanguage ( shader_type ) ;
2016-01-10 14:24:10 +01:00
glslang : : TShader shader ( stage ) ;
2015-10-10 16:41:19 +02:00
shaderStrings [ 0 ] = pshader ;
2016-01-10 14:24:10 +01:00
shader . setStrings ( shaderStrings , 1 ) ;
2015-10-10 16:41:19 +02:00
2016-01-10 14:24:10 +01:00
if ( ! shader . parse ( & Resources , 100 , false , messages ) ) {
puts ( shader . getInfoLog ( ) ) ;
puts ( shader . getInfoDebugLog ( ) ) ;
2016-01-02 02:08:05 +01:00
if ( errorMessage ) {
2016-01-10 14:24:10 +01:00
* errorMessage = shader . getInfoLog ( ) ;
( * errorMessage ) + = shader . getInfoDebugLog ( ) ;
2016-01-02 02:08:05 +01:00
}
2015-10-10 16:41:19 +02:00
return false ; // something didn't work
}
2016-01-10 14:24:10 +01:00
// Note that program does not take ownership of &shader, so this is fine.
program . addShader ( & shader ) ;
2015-10-10 16:41:19 +02:00
if ( ! program . link ( messages ) ) {
2016-01-10 14:24:10 +01:00
puts ( shader . getInfoLog ( ) ) ;
puts ( shader . getInfoDebugLog ( ) ) ;
2016-01-02 02:08:05 +01:00
if ( errorMessage ) {
2016-01-10 14:24:10 +01:00
* errorMessage = shader . getInfoLog ( ) ;
( * errorMessage ) + = shader . getInfoDebugLog ( ) ;
2016-01-02 02:08:05 +01:00
}
2015-10-10 16:41:19 +02:00
return false ;
}
2016-01-02 02:08:05 +01:00
// Can't fail, parsing worked, "linking" worked.
2015-10-10 16:41:19 +02:00
glslang : : GlslangToSpv ( * program . getIntermediate ( stage ) , spirv ) ;
return true ;
}
void init_glslang ( ) {
glslang : : InitializeProcess ( ) ;
}
void finalize_glslang ( ) {
glslang : : FinalizeProcess ( ) ;
}
2015-12-31 01:07:06 +01:00
const char * VulkanResultToString ( VkResult res ) {
switch ( res ) {
case VK_NOT_READY : return " VK_NOT_READY " ;
case VK_TIMEOUT : return " VK_TIMEOUT " ;
case VK_EVENT_SET : return " VK_EVENT_SET " ;
case VK_EVENT_RESET : return " VK_EVENT_RESET " ;
case VK_INCOMPLETE : return " VK_INCOMPLETE " ;
case VK_ERROR_OUT_OF_HOST_MEMORY : return " VK_ERROR_OUT_OF_HOST_MEMORY " ;
case VK_ERROR_OUT_OF_DEVICE_MEMORY : return " VK_ERROR_OUT_OF_DEVICE_MEMORY " ;
case VK_ERROR_INITIALIZATION_FAILED : return " VK_ERROR_INITIALIZATION_FAILED " ;
case VK_ERROR_DEVICE_LOST : return " VK_ERROR_DEVICE_LOST " ;
case VK_ERROR_MEMORY_MAP_FAILED : return " VK_ERROR_MEMORY_MAP_FAILED " ;
case VK_ERROR_LAYER_NOT_PRESENT : return " VK_ERROR_LAYER_NOT_PRESENT " ;
case VK_ERROR_EXTENSION_NOT_PRESENT : return " VK_ERROR_EXTENSION_NOT_PRESENT " ;
case VK_ERROR_FEATURE_NOT_PRESENT : return " VK_ERROR_FEATURE_NOT_PRESENT " ;
case VK_ERROR_INCOMPATIBLE_DRIVER : return " VK_ERROR_INCOMPATIBLE_DRIVER " ;
case VK_ERROR_TOO_MANY_OBJECTS : return " VK_ERROR_TOO_MANY_OBJECTS " ;
case VK_ERROR_FORMAT_NOT_SUPPORTED : return " VK_ERROR_FORMAT_NOT_SUPPORTED " ;
case VK_ERROR_SURFACE_LOST_KHR : return " VK_ERROR_SURFACE_LOST_KHR " ;
case VK_SUBOPTIMAL_KHR : return " VK_SUBOPTIMAL_KHR " ;
case VK_ERROR_OUT_OF_DATE_KHR : return " VK_ERROR_OUT_OF_DATE_KHR " ;
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR : return " VK_ERROR_INCOMPATIBLE_DISPLAY_KHR " ;
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR : return " VK_ERROR_NATIVE_WINDOW_IN_USE_KHR " ;
2017-11-07 00:08:39 +01:00
case VK_ERROR_OUT_OF_POOL_MEMORY_KHR : return " VK_ERROR_OUT_OF_POOL_MEMORY_KHR " ;
2017-12-15 15:24:15 +01:00
case VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR : return " VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR " ;
2015-12-31 01:07:06 +01:00
2017-11-07 00:08:39 +01:00
default :
return " VK_ERROR_...(unknown) " ;
}
2017-11-09 12:21:20 +01:00
}
2017-11-09 17:40:32 +01:00
void VulkanDeleteList : : Take ( VulkanDeleteList & del ) {
2018-01-16 17:32:50 +01:00
assert ( cmdPools_ . empty ( ) ) ;
assert ( descPools_ . empty ( ) ) ;
assert ( modules_ . empty ( ) ) ;
assert ( buffers_ . empty ( ) ) ;
assert ( bufferViews_ . empty ( ) ) ;
assert ( images_ . empty ( ) ) ;
assert ( imageViews_ . empty ( ) ) ;
assert ( deviceMemory_ . empty ( ) ) ;
assert ( samplers_ . empty ( ) ) ;
assert ( pipelines_ . empty ( ) ) ;
assert ( pipelineCaches_ . empty ( ) ) ;
assert ( renderPasses_ . empty ( ) ) ;
assert ( framebuffers_ . empty ( ) ) ;
assert ( pipelineLayouts_ . empty ( ) ) ;
assert ( descSetLayouts_ . empty ( ) ) ;
assert ( callbacks_ . empty ( ) ) ;
2017-11-09 17:40:32 +01:00
cmdPools_ = std : : move ( del . cmdPools_ ) ;
descPools_ = std : : move ( del . descPools_ ) ;
modules_ = std : : move ( del . modules_ ) ;
buffers_ = std : : move ( del . buffers_ ) ;
bufferViews_ = std : : move ( del . bufferViews_ ) ;
images_ = std : : move ( del . images_ ) ;
imageViews_ = std : : move ( del . imageViews_ ) ;
deviceMemory_ = std : : move ( del . deviceMemory_ ) ;
samplers_ = std : : move ( del . samplers_ ) ;
pipelines_ = std : : move ( del . pipelines_ ) ;
pipelineCaches_ = std : : move ( del . pipelineCaches_ ) ;
renderPasses_ = std : : move ( del . renderPasses_ ) ;
framebuffers_ = std : : move ( del . framebuffers_ ) ;
pipelineLayouts_ = std : : move ( del . pipelineLayouts_ ) ;
descSetLayouts_ = std : : move ( del . descSetLayouts_ ) ;
callbacks_ = std : : move ( del . callbacks_ ) ;
2017-12-10 14:36:24 +01:00
del . cmdPools_ . clear ( ) ;
del . descPools_ . clear ( ) ;
del . modules_ . clear ( ) ;
del . buffers_ . clear ( ) ;
del . images_ . clear ( ) ;
del . imageViews_ . clear ( ) ;
del . deviceMemory_ . clear ( ) ;
del . samplers_ . clear ( ) ;
del . pipelines_ . clear ( ) ;
del . pipelineCaches_ . clear ( ) ;
del . renderPasses_ . clear ( ) ;
del . framebuffers_ . clear ( ) ;
del . pipelineLayouts_ . clear ( ) ;
del . descSetLayouts_ . clear ( ) ;
del . callbacks_ . clear ( ) ;
2017-11-09 17:40:32 +01:00
}
void VulkanDeleteList : : PerformDeletes ( VkDevice device ) {
for ( auto & cmdPool : cmdPools_ ) {
vkDestroyCommandPool ( device , cmdPool , nullptr ) ;
}
cmdPools_ . clear ( ) ;
for ( auto & descPool : descPools_ ) {
vkDestroyDescriptorPool ( device , descPool , nullptr ) ;
}
descPools_ . clear ( ) ;
for ( auto & module : modules_ ) {
vkDestroyShaderModule ( device , module , nullptr ) ;
}
modules_ . clear ( ) ;
for ( auto & buf : buffers_ ) {
vkDestroyBuffer ( device , buf , nullptr ) ;
}
buffers_ . clear ( ) ;
for ( auto & bufView : bufferViews_ ) {
vkDestroyBufferView ( device , bufView , nullptr ) ;
}
bufferViews_ . clear ( ) ;
for ( auto & image : images_ ) {
vkDestroyImage ( device , image , nullptr ) ;
}
images_ . clear ( ) ;
for ( auto & imageView : imageViews_ ) {
vkDestroyImageView ( device , imageView , nullptr ) ;
}
imageViews_ . clear ( ) ;
for ( auto & mem : deviceMemory_ ) {
vkFreeMemory ( device , mem , nullptr ) ;
}
deviceMemory_ . clear ( ) ;
for ( auto & sampler : samplers_ ) {
vkDestroySampler ( device , sampler , nullptr ) ;
}
samplers_ . clear ( ) ;
for ( auto & pipeline : pipelines_ ) {
vkDestroyPipeline ( device , pipeline , nullptr ) ;
}
pipelines_ . clear ( ) ;
for ( auto & pcache : pipelineCaches_ ) {
vkDestroyPipelineCache ( device , pcache , nullptr ) ;
}
pipelineCaches_ . clear ( ) ;
for ( auto & renderPass : renderPasses_ ) {
vkDestroyRenderPass ( device , renderPass , nullptr ) ;
}
renderPasses_ . clear ( ) ;
for ( auto & framebuffer : framebuffers_ ) {
vkDestroyFramebuffer ( device , framebuffer , nullptr ) ;
}
framebuffers_ . clear ( ) ;
for ( auto & pipeLayout : pipelineLayouts_ ) {
vkDestroyPipelineLayout ( device , pipeLayout , nullptr ) ;
}
pipelineLayouts_ . clear ( ) ;
for ( auto & descSetLayout : descSetLayouts_ ) {
vkDestroyDescriptorSetLayout ( device , descSetLayout , nullptr ) ;
}
descSetLayouts_ . clear ( ) ;
for ( auto & callback : callbacks_ ) {
callback . func ( callback . userdata ) ;
}
callbacks_ . clear ( ) ;
}