2016-02-21 18:05:01 +01:00
# define __STDC_LIMIT_MACROS
2015-10-10 16:41:19 +02:00
# include <cstdlib>
2016-02-21 18:05:01 +01:00
# include <cstdint>
2015-10-10 16:41:19 +02:00
# include <cstring>
# include <iostream>
2016-03-13 16:22:46 +01:00
# include "base/basictypes.h"
2018-01-20 21:47:16 +01:00
# include "base/display.h"
2020-08-15 12:12:57 +02:00
# include "Common/Log.h"
2020-06-21 22:49:44 +02:00
# include "Common/Vulkan/VulkanContext.h"
# include "Common/Vulkan/VulkanDebug.h"
2017-02-25 00:25:46 +01:00
# include "GPU/Common/ShaderCommon.h"
2017-11-09 12:26:08 +01:00
# include "Common/StringUtils.h"
2018-01-20 21:47:16 +01:00
# include "Core/Config.h"
2015-10-10 16:41:19 +02:00
2017-11-26 16:13:04 +01:00
// Change this to 1, 2, and 3 to fake failures in a few places, so that
// we can test our fallback-to-GL code.
# define SIMULATE_VULKAN_FAILURE 0
2015-10-10 16:41:19 +02:00
# ifdef USE_CRT_DBG
# undef new
# endif
2016-02-21 18:05:01 +01:00
# ifdef _MSC_VER
2015-12-20 23:39:03 +01:00
# pragma warning(push)
# pragma warning(disable:4996)
2016-02-21 18:05:01 +01:00
# endif
# include "ext/glslang/SPIRV/GlslangToSpv.h"
# ifdef _MSC_VER
2015-12-20 23:39:03 +01:00
# pragma warning(pop)
2016-02-21 18:05:01 +01:00
# endif
2015-10-10 16:41:19 +02:00
# ifdef USE_CRT_DBG
# define new DBG_NEW
# endif
2020-06-21 22:56:01 +02:00
VulkanLogOptions g_LogOptions ;
2016-03-13 16:22:46 +01:00
static const char * validationLayers [ ] = {
2020-05-18 00:39:18 +02:00
" VK_LAYER_KHRONOS_validation " ,
2016-10-10 22:08:44 -07:00
/*
// For layers included in the Android NDK.
" VK_LAYER_GOOGLE_threading " ,
" VK_LAYER_LUNARG_parameter_validation " ,
" VK_LAYER_LUNARG_core_validation " ,
" VK_LAYER_LUNARG_image " ,
" VK_LAYER_LUNARG_object_tracker " ,
" VK_LAYER_LUNARG_swapchain " ,
" VK_LAYER_GOOGLE_unique_objects " ,
*/
2016-03-13 16:22:46 +01:00
} ;
2017-11-09 12:26:08 +01:00
std : : string VulkanVendorString ( uint32_t vendorId ) {
switch ( vendorId ) {
case VULKAN_VENDOR_INTEL : return " Intel " ;
2019-02-04 14:33:01 +01:00
case VULKAN_VENDOR_NVIDIA : return " NVIDIA " ;
2017-11-09 12:26:08 +01:00
case VULKAN_VENDOR_AMD : return " AMD " ;
case VULKAN_VENDOR_ARM : return " ARM " ;
case VULKAN_VENDOR_QUALCOMM : return " Qualcomm " ;
case VULKAN_VENDOR_IMGTEC : return " Imagination " ;
default :
return StringFromFormat ( " %08x " , vendorId ) ;
}
}
2017-11-10 13:02:24 +01:00
const char * PresentModeString ( VkPresentModeKHR presentMode ) {
switch ( presentMode ) {
case VK_PRESENT_MODE_IMMEDIATE_KHR : return " IMMEDIATE " ;
case VK_PRESENT_MODE_MAILBOX_KHR : return " MAILBOX " ;
case VK_PRESENT_MODE_FIFO_KHR : return " FIFO " ;
case VK_PRESENT_MODE_FIFO_RELAXED_KHR : return " FIFO_RELAXED " ;
2019-02-05 13:10:05 +01:00
case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR : return " SHARED_DEMAND_REFRESH_KHR " ;
case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR : return " SHARED_CONTINUOUS_REFRESH_KHR " ;
2017-11-10 13:02:24 +01:00
default : return " UNKNOWN " ;
}
}
2017-08-28 14:12:56 +02:00
VulkanContext : : VulkanContext ( ) {
2019-09-01 22:20:44 +02:00
// Do nothing here.
2017-08-28 14:12:56 +02:00
}
2017-12-18 12:22:12 +01:00
VkResult VulkanContext : : CreateInstance ( const CreateInfo & info ) {
2017-11-26 16:13:04 +01:00
if ( ! vkCreateInstance ) {
init_error_ = " Vulkan not loaded - can't create instance " ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
2020-06-23 21:57:02 +02:00
instance_layer_names_ . clear ( ) ;
device_layer_names_ . clear ( ) ;
2019-09-01 22:20:44 +02:00
// We can get the list of layers and extensions without an instance so we can use this information
// to enable the extensions we need that are available.
GetInstanceLayerProperties ( ) ;
GetInstanceLayerExtensionList ( nullptr , instance_extension_properties_ ) ;
2019-06-24 18:31:56 +02:00
if ( ! IsInstanceExtensionAvailable ( VK_KHR_SURFACE_EXTENSION_NAME ) ) {
// Cannot create a Vulkan display without VK_KHR_SURFACE_EXTENSION.
init_error_ = " Vulkan not loaded - no surface extension " ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
2017-12-18 12:22:12 +01:00
flags_ = info . flags ;
2017-08-28 13:45:04 +02:00
2015-10-10 16:41:19 +02:00
// List extensions to try to enable.
2017-08-28 13:45:04 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_SURFACE_EXTENSION_NAME ) ;
2016-02-21 18:05:01 +01:00
# ifdef _WIN32
2017-08-28 13:45:04 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_WIN32_SURFACE_EXTENSION_NAME ) ;
2016-10-12 11:13:16 +02:00
# elif defined(__ANDROID__)
2017-08-28 14:37:15 +02:00
instance_extensions_enabled_ . push_back ( VK_KHR_ANDROID_SURFACE_EXTENSION_NAME ) ;
2017-12-13 22:58:45 +01:00
# else
2017-12-26 02:55:37 +03:00
# if defined(VK_USE_PLATFORM_XLIB_KHR)
2018-01-12 22:18:58 +03:00
if ( IsInstanceExtensionAvailable ( VK_KHR_XLIB_SURFACE_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_KHR_XLIB_SURFACE_EXTENSION_NAME ) ;
}
2017-12-26 02:55:37 +03:00
# endif
//#if defined(VK_USE_PLATFORM_XCB_KHR)
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
//#endif
# if defined(VK_USE_PLATFORM_WAYLAND_KHR)
2018-01-12 22:18:58 +03:00
if ( IsInstanceExtensionAvailable ( VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME ) ;
}
2017-12-26 02:55:37 +03:00
# endif
2020-01-19 17:34:21 +08:00
# if defined(VK_USE_PLATFORM_METAL_EXT)
if ( IsInstanceExtensionAvailable ( VK_EXT_METAL_SURFACE_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_EXT_METAL_SURFACE_EXTENSION_NAME ) ;
}
# endif
2016-02-21 18:05:01 +01:00
# endif
2015-10-10 16:41:19 +02:00
2017-08-28 14:12:56 +02:00
if ( flags_ & VULKAN_FLAG_VALIDATE ) {
2019-02-05 13:00:23 +01:00
if ( IsInstanceExtensionAvailable ( VK_EXT_DEBUG_UTILS_EXTENSION_NAME ) ) {
// Enable the validation layers
for ( size_t i = 0 ; i < ARRAY_SIZE ( validationLayers ) ; i + + ) {
instance_layer_names_ . push_back ( validationLayers [ i ] ) ;
device_layer_names_ . push_back ( validationLayers [ i ] ) ;
}
instance_extensions_enabled_ . push_back ( VK_EXT_DEBUG_UTILS_EXTENSION_NAME ) ;
extensionsLookup_ . EXT_debug_utils = true ;
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Vulkan debug_utils validation enabled. " ) ;
2017-12-30 21:31:43 +01:00
} else {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Validation layer extension not available - not enabling Vulkan validation. " ) ;
2017-12-30 21:31:43 +01:00
flags_ & = ~ VULKAN_FLAG_VALIDATE ;
2016-03-13 16:22:46 +01:00
}
2015-10-10 16:41:19 +02:00
}
2019-02-05 10:05:22 +01:00
if ( IsInstanceExtensionAvailable ( VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME ) ) {
instance_extensions_enabled_ . push_back ( VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME ) ;
extensionsLookup_ . KHR_get_physical_device_properties2 = true ;
}
2017-12-30 21:03:28 +01:00
// Validate that all the instance extensions we ask for are actually available.
for ( auto ext : instance_extensions_enabled_ ) {
if ( ! IsInstanceExtensionAvailable ( ext ) )
2020-08-15 12:12:57 +02:00
WARN_LOG ( G3D , " WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway. " , ext ) ;
2017-12-30 21:03:28 +01:00
}
VkApplicationInfo app_info { VK_STRUCTURE_TYPE_APPLICATION_INFO } ;
2017-12-18 12:22:12 +01:00
app_info . pApplicationName = info . app_name ;
app_info . applicationVersion = info . app_ver ;
app_info . pEngineName = info . app_name ;
2016-03-12 14:03:26 -08:00
// Let's increment this when we make major engine/context changes.
2017-05-31 20:04:11 -07:00
app_info . engineVersion = 2 ;
2016-04-07 22:51:52 +02:00
app_info . apiVersion = VK_API_VERSION_1_0 ;
2016-03-13 16:22:46 +01:00
2017-12-30 21:03:28 +01:00
VkInstanceCreateInfo inst_info { VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO } ;
2015-10-10 16:41:19 +02:00
inst_info . flags = 0 ;
inst_info . pApplicationInfo = & app_info ;
2017-08-28 13:45:04 +02:00
inst_info . enabledLayerCount = ( uint32_t ) instance_layer_names_ . size ( ) ;
inst_info . ppEnabledLayerNames = instance_layer_names_ . size ( ) ? instance_layer_names_ . data ( ) : nullptr ;
inst_info . enabledExtensionCount = ( uint32_t ) instance_extensions_enabled_ . size ( ) ;
inst_info . ppEnabledExtensionNames = instance_extensions_enabled_ . size ( ) ? instance_extensions_enabled_ . data ( ) : nullptr ;
2015-10-10 16:41:19 +02:00
2017-11-26 16:13:04 +01:00
# if SIMULATE_VULKAN_FAILURE == 2
VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER ;
# else
2017-08-28 13:45:04 +02:00
VkResult res = vkCreateInstance ( & inst_info , nullptr , & instance_ ) ;
2017-11-26 16:13:04 +01:00
# endif
2016-02-25 18:52:33 +01:00
if ( res ! = VK_SUCCESS ) {
2016-03-13 16:22:46 +01:00
if ( res = = VK_ERROR_LAYER_NOT_PRESENT ) {
2020-08-15 12:12:57 +02:00
WARN_LOG ( G3D , " Validation on but instance layer not available - dropping layers " ) ;
2016-03-13 16:22:46 +01:00
// Drop the validation layers and try again.
2017-08-28 13:45:04 +02:00
instance_layer_names_ . clear ( ) ;
device_layer_names_ . clear ( ) ;
2016-03-13 16:22:46 +01:00
inst_info . enabledLayerCount = 0 ;
2017-08-28 13:45:04 +02:00
inst_info . ppEnabledLayerNames = nullptr ;
res = vkCreateInstance ( & inst_info , nullptr , & instance_ ) ;
2016-03-13 17:05:03 +01:00
if ( res ! = VK_SUCCESS )
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Failed to create instance even without validation: %d " , res ) ;
2016-03-13 17:05:03 +01:00
} else {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Failed to create instance : %d " , res ) ;
2016-03-13 16:22:46 +01:00
}
2016-02-25 18:52:33 +01:00
}
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Failed to create Vulkan instance " ;
2017-08-28 14:12:56 +02:00
return res ;
2016-03-13 09:33:39 -07:00
}
2015-10-10 16:41:19 +02:00
2019-02-05 13:00:23 +01:00
VulkanLoadInstanceFunctions ( instance_ , extensionsLookup_ ) ;
2017-11-09 12:21:20 +01:00
if ( ! CheckLayers ( instance_layer_properties_ , instance_layer_names_ ) ) {
2020-08-15 12:12:57 +02:00
WARN_LOG ( G3D , " CheckLayers for instance failed " ) ;
2017-11-09 12:21:20 +01:00
// init_error_ = "Failed to validate instance layers";
// return;
}
2015-10-10 16:41:19 +02:00
uint32_t gpu_count = 1 ;
2017-11-26 16:13:04 +01:00
# if SIMULATE_VULKAN_FAILURE == 3
gpu_count = 0 ;
# else
2017-08-28 13:45:04 +02:00
res = vkEnumeratePhysicalDevices ( instance_ , & gpu_count , nullptr ) ;
2017-11-26 16:13:04 +01:00
# endif
2017-10-02 14:08:35 +02:00
if ( gpu_count < = 0 ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Vulkan driver found but no supported GPU is available " ) ;
2017-10-02 14:08:35 +02:00
init_error_ = " No Vulkan physical devices found " ;
vkDestroyInstance ( instance_ , nullptr ) ;
instance_ = nullptr ;
return VK_ERROR_INITIALIZATION_FAILED ;
}
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( gpu_count > 0 ) ;
2015-10-10 16:41:19 +02:00
physical_devices_ . resize ( gpu_count ) ;
2018-04-15 09:56:37 +02:00
physicalDeviceProperties_ . resize ( gpu_count ) ;
2015-10-10 16:41:19 +02:00
res = vkEnumeratePhysicalDevices ( instance_ , & gpu_count , physical_devices_ . data ( ) ) ;
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Failed to enumerate physical devices " ;
2017-10-02 14:08:35 +02:00
vkDestroyInstance ( instance_ , nullptr ) ;
2017-11-04 20:44:11 -07:00
instance_ = nullptr ;
2017-08-28 14:12:56 +02:00
return res ;
2016-03-13 09:33:39 -07:00
}
2015-10-10 16:41:19 +02:00
2019-02-05 10:05:22 +01:00
if ( extensionsLookup_ . KHR_get_physical_device_properties2 ) {
for ( uint32_t i = 0 ; i < gpu_count ; i + + ) {
VkPhysicalDeviceProperties2 props2 { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 } ;
VkPhysicalDevicePushDescriptorPropertiesKHR pushProps { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR } ;
VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT } ;
props2 . pNext = & pushProps ;
pushProps . pNext = & extHostMemProps ;
vkGetPhysicalDeviceProperties2KHR ( physical_devices_ [ i ] , & props2 ) ;
// Don't want bad pointers sitting around.
props2 . pNext = nullptr ;
pushProps . pNext = nullptr ;
physicalDeviceProperties_ [ i ] . properties = props2 . properties ;
physicalDeviceProperties_ [ i ] . pushDescriptorProperties = pushProps ;
physicalDeviceProperties_ [ i ] . externalMemoryHostProperties = extHostMemProps ;
}
} else {
for ( uint32_t i = 0 ; i < gpu_count ; i + + ) {
vkGetPhysicalDeviceProperties ( physical_devices_ [ i ] , & physicalDeviceProperties_ [ i ] . properties ) ;
}
2018-04-15 09:56:37 +02:00
}
2020-06-21 22:59:55 +02:00
if ( extensionsLookup_ . EXT_debug_utils ) {
InitDebugUtilsCallback ( ) ;
}
2017-08-28 14:12:56 +02:00
return VK_SUCCESS ;
2015-10-10 16:41:19 +02:00
}
VulkanContext : : ~ VulkanContext ( ) {
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( instance_ = = VK_NULL_HANDLE ) ;
2017-11-09 16:02:05 +01:00
}
void VulkanContext : : DestroyInstance ( ) {
2020-06-21 22:59:55 +02:00
if ( extensionsLookup_ . EXT_debug_utils ) {
while ( utils_callbacks . size ( ) > 0 ) {
vkDestroyDebugUtilsMessengerEXT ( instance_ , utils_callbacks . back ( ) , nullptr ) ;
utils_callbacks . pop_back ( ) ;
}
}
2017-08-28 13:45:04 +02:00
vkDestroyInstance ( instance_ , nullptr ) ;
2016-02-21 18:05:01 +01:00
VulkanFree ( ) ;
2017-11-09 16:02:05 +01:00
instance_ = VK_NULL_HANDLE ;
2015-10-10 16:41:19 +02:00
}
2017-08-19 17:32:10 +02:00
void VulkanContext : : BeginFrame ( ) {
2016-01-03 00:46:41 +01:00
FrameData * frame = & frame_ [ curFrame_ ] ;
2016-01-06 00:38:45 +01:00
// Process pending deletes.
frame - > deleteList . PerformDeletes ( device_ ) ;
2017-05-07 11:28:57 +02:00
}
2016-01-02 02:08:05 +01:00
2017-05-07 11:28:57 +02:00
void VulkanContext : : EndFrame ( ) {
2017-08-19 17:32:10 +02:00
frame_ [ curFrame_ ] . deleteList . Take ( globalDeleteList_ ) ;
2017-08-17 17:55:21 +02:00
curFrame_ + + ;
if ( curFrame_ > = inflightFrames_ ) {
curFrame_ = 0 ;
}
2016-01-02 02:08:05 +01:00
}
2020-02-29 23:40:55 -08:00
void VulkanContext : : UpdateInflightFrames ( int n ) {
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( n > = 1 & & n < = MAX_INFLIGHT_FRAMES ) ;
2020-02-29 23:40:55 -08:00
inflightFrames_ = n ;
if ( curFrame_ > = inflightFrames_ ) {
curFrame_ = 0 ;
}
}
2016-01-06 00:38:45 +01:00
void VulkanContext : : WaitUntilQueueIdle ( ) {
// Should almost never be used
vkQueueWaitIdle ( gfx_queue_ ) ;
}
bool VulkanContext : : MemoryTypeFromProperties ( uint32_t typeBits , VkFlags requirements_mask , uint32_t * typeIndex ) {
// Search memtypes to find first index with those properties
for ( uint32_t i = 0 ; i < 32 ; i + + ) {
if ( ( typeBits & 1 ) = = 1 ) {
// Type is available, does it match user properties?
if ( ( memory_properties . memoryTypes [ i ] . propertyFlags & requirements_mask ) = = requirements_mask ) {
* typeIndex = i ;
return true ;
}
}
typeBits > > = 1 ;
}
// No memory types matched, return failure
return false ;
}
2020-07-18 20:35:39 +02:00
void VulkanContext : : DestroySwapchain ( ) {
if ( swapchain_ ! = VK_NULL_HANDLE ) {
vkDestroySwapchainKHR ( device_ , swapchain_ , nullptr ) ;
swapchain_ = VK_NULL_HANDLE ;
2017-11-12 21:50:54 -08:00
}
2015-10-10 16:41:19 +02:00
}
2020-07-18 20:35:39 +02:00
void VulkanContext : : DestroySurface ( ) {
if ( surface_ ! = VK_NULL_HANDLE ) {
vkDestroySurfaceKHR ( instance_ , surface_ , nullptr ) ;
surface_ = VK_NULL_HANDLE ;
}
2015-10-10 16:41:19 +02:00
}
2017-08-28 13:45:04 +02:00
VkResult VulkanContext : : GetInstanceLayerExtensionList ( const char * layerName , std : : vector < VkExtensionProperties > & extensions ) {
2015-10-10 16:41:19 +02:00
VkResult res ;
do {
2017-08-28 15:04:28 +02:00
uint32_t instance_extension_count ;
2017-08-28 13:45:04 +02:00
res = vkEnumerateInstanceExtensionProperties ( layerName , & instance_extension_count , nullptr ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( instance_extension_count = = 0 )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
extensions . resize ( instance_extension_count ) ;
res = vkEnumerateInstanceExtensionProperties ( layerName , & instance_extension_count , extensions . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
return res ;
}
2017-08-28 14:12:56 +02:00
VkResult VulkanContext : : GetInstanceLayerProperties ( ) {
2015-10-10 16:41:19 +02:00
/*
* It ' s possible , though very rare , that the number of
* instance layers could change . For example , installing something
* could include new layers that the loader would pick up
* between the initial query for the count and the
* request for VkLayerProperties . The loader indicates that
* by returning a VK_INCOMPLETE status and will update the
* the count parameter .
* The count parameter will be updated with the number of
* entries loaded into the data pointer - in case the number
* of layers went down or is smaller than the size given .
*/
2017-08-28 15:04:28 +02:00
uint32_t instance_layer_count ;
std : : vector < VkLayerProperties > vk_props ;
VkResult res ;
2015-10-10 16:41:19 +02:00
do {
2017-08-28 13:45:04 +02:00
res = vkEnumerateInstanceLayerProperties ( & instance_layer_count , nullptr ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( ! instance_layer_count )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 15:04:28 +02:00
vk_props . resize ( instance_layer_count ) ;
res = vkEnumerateInstanceLayerProperties ( & instance_layer_count , vk_props . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
// Now gather the extension list for each instance layer.
for ( uint32_t i = 0 ; i < instance_layer_count ; i + + ) {
2017-08-28 14:12:56 +02:00
LayerProperties layer_props ;
2015-10-10 16:41:19 +02:00
layer_props . properties = vk_props [ i ] ;
2017-08-28 13:45:04 +02:00
res = GetInstanceLayerExtensionList ( layer_props . properties . layerName , layer_props . extensions ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
instance_layer_properties_ . push_back ( layer_props ) ;
2015-10-10 16:41:19 +02:00
}
return res ;
}
2017-08-28 13:45:04 +02:00
// Pass layerName == nullptr to get the extension list for the device.
VkResult VulkanContext : : GetDeviceLayerExtensionList ( const char * layerName , std : : vector < VkExtensionProperties > & extensions ) {
2015-10-10 16:41:19 +02:00
VkResult res ;
do {
2017-08-28 15:04:28 +02:00
uint32_t device_extension_count ;
res = vkEnumerateDeviceExtensionProperties ( physical_devices_ [ physical_device_ ] , layerName , & device_extension_count , nullptr ) ;
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( ! device_extension_count )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
extensions . resize ( device_extension_count ) ;
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceExtensionProperties ( physical_devices_ [ physical_device_ ] , layerName , & device_extension_count , extensions . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
return res ;
}
2017-08-28 13:45:04 +02:00
VkResult VulkanContext : : GetDeviceLayerProperties ( ) {
2015-10-10 16:41:19 +02:00
/*
* It ' s possible , though very rare , that the number of
* instance layers could change . For example , installing something
* could include new layers that the loader would pick up
* between the initial query for the count and the
* request for VkLayerProperties . The loader indicates that
* by returning a VK_INCOMPLETE status and will update the
* the count parameter .
* The count parameter will be updated with the number of
* entries loaded into the data pointer - in case the number
* of layers went down or is smaller than the size given .
*/
2017-08-28 15:04:28 +02:00
uint32_t device_layer_count ;
std : : vector < VkLayerProperties > vk_props ;
VkResult res ;
2015-10-10 16:41:19 +02:00
do {
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceLayerProperties ( physical_devices_ [ physical_device_ ] , & device_layer_count , nullptr ) ;
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
if ( device_layer_count = = 0 )
2015-10-10 16:41:19 +02:00
return VK_SUCCESS ;
2017-08-28 13:45:04 +02:00
vk_props . resize ( device_layer_count ) ;
2017-08-28 15:04:28 +02:00
res = vkEnumerateDeviceLayerProperties ( physical_devices_ [ physical_device_ ] , & device_layer_count , vk_props . data ( ) ) ;
2015-10-10 16:41:19 +02:00
} while ( res = = VK_INCOMPLETE ) ;
2017-08-28 13:45:04 +02:00
// Gather the list of extensions for each device layer.
2015-10-10 16:41:19 +02:00
for ( uint32_t i = 0 ; i < device_layer_count ; i + + ) {
2017-08-28 14:12:56 +02:00
LayerProperties layer_props ;
2015-10-10 16:41:19 +02:00
layer_props . properties = vk_props [ i ] ;
2017-08-28 13:45:04 +02:00
res = GetDeviceLayerExtensionList ( layer_props . properties . layerName , layer_props . extensions ) ;
2017-08-28 15:04:28 +02:00
if ( res ! = VK_SUCCESS )
2015-10-10 16:41:19 +02:00
return res ;
2017-08-28 13:45:04 +02:00
device_layer_properties_ . push_back ( layer_props ) ;
2015-10-10 16:41:19 +02:00
}
return res ;
}
2017-08-28 14:12:56 +02:00
// Returns true if all layer names specified in check_names can be found in given layer properties.
bool VulkanContext : : CheckLayers ( const std : : vector < LayerProperties > & layer_props , const std : : vector < const char * > & layer_names ) const {
2015-10-10 16:41:19 +02:00
uint32_t check_count = ( uint32_t ) layer_names . size ( ) ;
uint32_t layer_count = ( uint32_t ) layer_props . size ( ) ;
for ( uint32_t i = 0 ; i < check_count ; i + + ) {
2017-08-28 14:12:56 +02:00
bool found = false ;
2015-10-10 16:41:19 +02:00
for ( uint32_t j = 0 ; j < layer_count ; j + + ) {
if ( ! strcmp ( layer_names [ i ] , layer_props [ j ] . properties . layerName ) ) {
2017-08-28 14:12:56 +02:00
found = true ;
2015-10-10 16:41:19 +02:00
}
}
if ( ! found ) {
std : : cout < < " Cannot find layer: " < < layer_names [ i ] < < std : : endl ;
2017-08-28 14:12:56 +02:00
return false ;
2015-10-10 16:41:19 +02:00
}
}
2017-08-28 14:12:56 +02:00
return true ;
2015-10-10 16:41:19 +02:00
}
2018-04-15 09:56:37 +02:00
int VulkanContext : : GetPhysicalDeviceByName ( std : : string name ) {
for ( size_t i = 0 ; i < physical_devices_ . size ( ) ; i + + ) {
2019-02-05 10:05:22 +01:00
if ( physicalDeviceProperties_ [ i ] . properties . deviceName = = name )
2018-04-15 09:56:37 +02:00
return ( int ) i ;
}
return - 1 ;
}
2017-11-09 12:21:20 +01:00
int VulkanContext : : GetBestPhysicalDevice ( ) {
// Rules: Prefer discrete over embedded.
// Prefer nVidia over Intel.
int maxScore = - 1 ;
int best = - 1 ;
for ( size_t i = 0 ; i < physical_devices_ . size ( ) ; i + + ) {
int score = 0 ;
VkPhysicalDeviceProperties props ;
vkGetPhysicalDeviceProperties ( physical_devices_ [ i ] , & props ) ;
switch ( props . deviceType ) {
case VK_PHYSICAL_DEVICE_TYPE_CPU :
score + = 1 ;
break ;
2018-06-17 11:30:22 -07:00
case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU :
score + = 2 ;
break ;
2017-11-09 12:21:20 +01:00
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU :
score + = 20 ;
break ;
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU :
score + = 10 ;
break ;
2018-06-17 11:30:22 -07:00
default :
break ;
2017-11-09 12:21:20 +01:00
}
if ( props . vendorID = = VULKAN_VENDOR_AMD ) {
score + = 5 ;
} else if ( props . vendorID = = VULKAN_VENDOR_NVIDIA ) {
score + = 5 ;
}
if ( score > maxScore ) {
2017-11-09 12:26:08 +01:00
best = ( int ) i ;
2017-11-09 12:21:20 +01:00
maxScore = score ;
}
}
return best ;
}
2017-08-28 14:12:56 +02:00
void VulkanContext : : ChooseDevice ( int physical_device ) {
physical_device_ = physical_device ;
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Chose physical device %d: %p " , physical_device , physical_devices_ [ physical_device ] ) ;
2015-10-10 16:41:19 +02:00
2017-08-28 15:22:18 +02:00
GetDeviceLayerProperties ( ) ;
if ( ! CheckLayers ( device_layer_properties_ , device_layer_names_ ) ) {
2020-08-15 12:12:57 +02:00
WARN_LOG ( G3D , " CheckLayers for device %d failed " , physical_device ) ;
2017-08-28 15:22:18 +02:00
}
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceQueueFamilyProperties ( physical_devices_ [ physical_device_ ] , & queue_count , nullptr ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( queue_count > = 1 ) ;
2015-10-10 16:41:19 +02:00
2019-08-21 09:02:23 +02:00
queueFamilyProperties_ . resize ( queue_count ) ;
vkGetPhysicalDeviceQueueFamilyProperties ( physical_devices_ [ physical_device_ ] , & queue_count , queueFamilyProperties_ . data ( ) ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( queue_count > = 1 ) ;
2015-10-10 16:41:19 +02:00
2016-03-21 20:11:28 +01:00
// Detect preferred formats, in this order.
static const VkFormat depthStencilFormats [ ] = {
VK_FORMAT_D24_UNORM_S8_UINT ,
VK_FORMAT_D32_SFLOAT_S8_UINT ,
VK_FORMAT_D16_UNORM_S8_UINT ,
} ;
deviceInfo_ . preferredDepthStencilFormat = VK_FORMAT_UNDEFINED ;
2016-03-27 09:25:25 -07:00
for ( size_t i = 0 ; i < ARRAY_SIZE ( depthStencilFormats ) ; i + + ) {
2016-03-21 20:11:28 +01:00
VkFormatProperties props ;
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceFormatProperties ( physical_devices_ [ physical_device_ ] , depthStencilFormats [ i ] , & props ) ;
2016-03-21 20:11:28 +01:00
if ( props . optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT ) {
2016-03-21 20:23:53 +01:00
deviceInfo_ . preferredDepthStencilFormat = depthStencilFormats [ i ] ;
2016-03-21 20:11:28 +01:00
break ;
}
}
2017-11-15 09:07:51 +01:00
if ( deviceInfo_ . preferredDepthStencilFormat = = VK_FORMAT_UNDEFINED ) {
// WTF? This is bad.
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Could not find a usable depth stencil format. " ) ;
2017-11-15 09:07:51 +01:00
}
2016-03-21 20:11:28 +01:00
2019-01-26 12:03:20 +01:00
// This is as good a place as any to do this.
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceMemoryProperties ( physical_devices_ [ physical_device_ ] , & memory_properties ) ;
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Memory Types (%d): " , memory_properties . memoryTypeCount ) ;
2019-01-25 12:49:18 +01:00
for ( int i = 0 ; i < ( int ) memory_properties . memoryTypeCount ; i + + ) {
2019-01-26 12:03:20 +01:00
// Don't bother printing dummy memory types.
if ( ! memory_properties . memoryTypes [ i ] . propertyFlags )
continue ;
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " %d: Heap %d; Flags: %s%s%s%s " , i , memory_properties . memoryTypes [ i ] . heapIndex ,
2019-01-26 12:03:20 +01:00
( memory_properties . memoryTypes [ i ] . propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ) ? " DEVICE_LOCAL " : " " ,
( memory_properties . memoryTypes [ i ] . propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT ) ? " HOST_VISIBLE " : " " ,
( memory_properties . memoryTypes [ i ] . propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT ) ? " HOST_CACHED " : " " ,
( memory_properties . memoryTypes [ i ] . propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT ) ? " HOST_COHERENT " : " " ) ;
2019-01-23 18:34:25 +01:00
}
2015-10-10 16:41:19 +02:00
2016-01-03 14:00:05 +01:00
// Optional features
2019-02-05 10:05:22 +01:00
if ( extensionsLookup_ . KHR_get_physical_device_properties2 ) {
VkPhysicalDeviceFeatures2 features2 { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR } ;
vkGetPhysicalDeviceFeatures2KHR ( physical_devices_ [ physical_device_ ] , & features2 ) ;
deviceFeatures_ . available = features2 . features ;
} else {
vkGetPhysicalDeviceFeatures ( physical_devices_ [ physical_device_ ] , & deviceFeatures_ . available ) ;
}
deviceFeatures_ . enabled = { } ;
2016-01-03 14:00:05 +01:00
// Enable a few safe ones if they are available.
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . dualSrcBlend ) {
deviceFeatures_ . enabled . dualSrcBlend = true ;
2016-01-03 14:00:05 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . largePoints ) {
deviceFeatures_ . enabled . largePoints = true ;
2016-01-03 14:00:05 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . wideLines ) {
deviceFeatures_ . enabled . wideLines = true ;
2016-01-03 14:00:05 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . logicOp ) {
deviceFeatures_ . enabled . logicOp = true ;
2016-01-03 14:00:05 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . depthClamp ) {
deviceFeatures_ . enabled . depthClamp = true ;
2016-01-19 18:41:45 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . depthBounds ) {
deviceFeatures_ . enabled . depthBounds = true ;
2016-01-19 18:41:45 +01:00
}
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . samplerAnisotropy ) {
deviceFeatures_ . enabled . samplerAnisotropy = true ;
2016-03-17 21:59:16 -07:00
}
2017-08-28 13:45:04 +02:00
// For easy wireframe mode, someday.
2019-02-05 10:05:22 +01:00
if ( deviceFeatures_ . available . fillModeNonSolid ) {
deviceFeatures_ . enabled . fillModeNonSolid = true ;
2017-08-28 13:45:04 +02:00
}
2016-01-03 14:00:05 +01:00
2017-08-28 13:45:04 +02:00
GetDeviceLayerExtensionList ( nullptr , device_extension_properties_ ) ;
device_extensions_enabled_ . push_back ( VK_KHR_SWAPCHAIN_EXTENSION_NAME ) ;
2017-08-28 14:12:56 +02:00
}
bool VulkanContext : : EnableDeviceExtension ( const char * extension ) {
for ( auto & iter : device_extension_properties_ ) {
if ( ! strcmp ( iter . extensionName , extension ) ) {
device_extensions_enabled_ . push_back ( extension ) ;
return true ;
}
2017-08-28 13:45:04 +02:00
}
2017-08-28 14:12:56 +02:00
return false ;
}
VkResult VulkanContext : : CreateDevice ( ) {
if ( ! init_error_ . empty ( ) | | physical_device_ < 0 ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Vulkan init failed: %s " , init_error_ . c_str ( ) ) ;
2017-08-28 14:12:56 +02:00
return VK_ERROR_INITIALIZATION_FAILED ;
}
2019-02-05 13:00:23 +01:00
VkDeviceQueueCreateInfo queue_info { VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO } ;
float queue_priorities [ 1 ] = { 1.0f } ;
2017-08-28 14:12:56 +02:00
queue_info . queueCount = 1 ;
queue_info . pQueuePriorities = queue_priorities ;
bool found = false ;
for ( int i = 0 ; i < ( int ) queue_count ; i + + ) {
2019-08-21 09:02:23 +02:00
if ( queueFamilyProperties_ [ i ] . queueFlags & VK_QUEUE_GRAPHICS_BIT ) {
2017-08-28 14:12:56 +02:00
queue_info . queueFamilyIndex = i ;
found = true ;
break ;
}
}
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( found ) ;
2017-08-28 13:45:04 +02:00
2019-02-05 13:00:23 +01:00
extensionsLookup_ . KHR_maintenance1 = EnableDeviceExtension ( VK_KHR_MAINTENANCE1_EXTENSION_NAME ) ;
extensionsLookup_ . KHR_maintenance2 = EnableDeviceExtension ( VK_KHR_MAINTENANCE2_EXTENSION_NAME ) ;
extensionsLookup_ . KHR_maintenance3 = EnableDeviceExtension ( VK_KHR_MAINTENANCE3_EXTENSION_NAME ) ;
extensionsLookup_ . KHR_multiview = EnableDeviceExtension ( VK_KHR_MULTIVIEW_EXTENSION_NAME ) ;
2019-01-31 14:18:13 +01:00
if ( EnableDeviceExtension ( VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME ) ) {
2019-02-05 10:05:22 +01:00
extensionsLookup_ . KHR_get_memory_requirements2 = true ;
extensionsLookup_ . KHR_dedicated_allocation = EnableDeviceExtension ( VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME ) ;
2019-01-31 14:18:13 +01:00
}
2019-02-05 13:00:23 +01:00
if ( EnableDeviceExtension ( VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME ) ) {
if ( EnableDeviceExtension ( VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME ) ) {
extensionsLookup_ . EXT_external_memory_host = EnableDeviceExtension ( VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME ) ;
}
}
if ( EnableDeviceExtension ( VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME ) ) {
extensionsLookup_ . KHR_create_renderpass2 = true ;
extensionsLookup_ . KHR_depth_stencil_resolve = EnableDeviceExtension ( VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME ) ;
}
2019-02-05 10:35:38 +01:00
extensionsLookup_ . EXT_shader_stencil_export = EnableDeviceExtension ( VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME ) ;
2017-12-30 21:31:43 +01:00
2018-02-24 16:55:32 +01:00
VkDeviceCreateInfo device_info { VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO } ;
2016-03-13 09:33:39 -07:00
device_info . queueCreateInfoCount = 1 ;
device_info . pQueueCreateInfos = & queue_info ;
2017-08-28 13:45:04 +02:00
device_info . enabledLayerCount = ( uint32_t ) device_layer_names_ . size ( ) ;
device_info . ppEnabledLayerNames = device_info . enabledLayerCount ? device_layer_names_ . data ( ) : nullptr ;
device_info . enabledExtensionCount = ( uint32_t ) device_extensions_enabled_ . size ( ) ;
device_info . ppEnabledExtensionNames = device_info . enabledExtensionCount ? device_extensions_enabled_ . data ( ) : nullptr ;
2019-02-05 10:05:22 +01:00
device_info . pEnabledFeatures = & deviceFeatures_ . enabled ;
2017-08-28 14:12:56 +02:00
VkResult res = vkCreateDevice ( physical_devices_ [ physical_device_ ] , & device_info , nullptr , & device_ ) ;
2016-03-13 09:33:39 -07:00
if ( res ! = VK_SUCCESS ) {
init_error_ = " Unable to create Vulkan device " ;
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Unable to create Vulkan device " ) ;
2016-03-13 09:33:39 -07:00
} else {
2019-02-05 13:00:23 +01:00
VulkanLoadDeviceFunctions ( device_ , extensionsLookup_ ) ;
2016-03-13 09:33:39 -07:00
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Device created. \n " ) ;
2018-03-17 11:03:24 +01:00
VulkanSetAvailable ( true ) ;
2016-03-13 09:33:39 -07:00
return res ;
2015-10-10 16:41:19 +02:00
}
2020-06-21 22:56:01 +02:00
VkResult VulkanContext : : InitDebugUtilsCallback ( ) {
// We're intentionally skipping VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT and
// VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, just too spammy.
int bits = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT ;
2019-02-05 13:00:23 +01:00
VkDebugUtilsMessengerCreateInfoEXT callback1 { VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT } ;
callback1 . messageSeverity = bits ;
callback1 . messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT ;
2020-06-21 22:49:44 +02:00
callback1 . pfnUserCallback = & VulkanDebugUtilsCallback ;
2020-06-21 22:56:01 +02:00
callback1 . pUserData = ( void * ) & g_LogOptions ;
2019-02-05 13:00:23 +01:00
VkDebugUtilsMessengerEXT messenger ;
VkResult res = vkCreateDebugUtilsMessengerEXT ( instance_ , & callback1 , nullptr , & messenger ) ;
if ( res ! = VK_SUCCESS ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Failed to register debug callback with vkCreateDebugUtilsMessengerEXT " ) ;
2019-02-05 13:00:23 +01:00
// Do error handling for VK_ERROR_OUT_OF_MEMORY
} else {
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Debug callback registered with vkCreateDebugUtilsMessengerEXT. " ) ;
2019-02-05 13:00:23 +01:00
utils_callbacks . push_back ( messenger ) ;
}
return res ;
}
2020-08-08 21:29:29 +02:00
void VulkanContext : : SetDebugNameImpl ( uint64_t handle , VkObjectType type , const char * name ) {
VkDebugUtilsObjectNameInfoEXT info { VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT } ;
info . pObjectName = name ;
info . objectHandle = handle ;
info . objectType = type ;
vkSetDebugUtilsObjectNameEXT ( device_ , & info ) ;
}
2019-01-25 12:49:18 +01:00
VkResult VulkanContext : : InitSurface ( WindowSystem winsys , void * data1 , void * data2 ) {
2017-12-18 12:22:12 +01:00
winsys_ = winsys ;
winsysData1_ = data1 ;
winsysData2_ = data2 ;
2019-01-25 12:49:18 +01:00
return ReinitSurface ( ) ;
2017-12-18 12:22:12 +01:00
}
2019-01-25 12:49:18 +01:00
VkResult VulkanContext : : ReinitSurface ( ) {
2017-12-18 12:22:12 +01:00
if ( surface_ ! = VK_NULL_HANDLE ) {
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Destroying Vulkan surface (%d, %d) " , swapChainExtent_ . width , swapChainExtent_ . height ) ;
2017-12-18 12:22:12 +01:00
vkDestroySurfaceKHR ( instance_ , surface_ , nullptr ) ;
surface_ = VK_NULL_HANDLE ;
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Creating Vulkan surface for window (%p %p) " , winsysData1_ , winsysData2_ ) ;
2020-07-18 20:35:39 +02:00
VkResult retval = VK_SUCCESS ;
2017-12-18 12:22:12 +01:00
switch ( winsys_ ) {
2016-01-24 11:22:06 +01:00
# ifdef _WIN32
2017-12-18 12:22:12 +01:00
case WINDOWSYSTEM_WIN32 :
{
VkWin32SurfaceCreateInfoKHR win32 { VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR } ;
win32 . flags = 0 ;
2019-01-25 12:49:18 +01:00
win32 . hwnd = ( HWND ) winsysData2_ ;
win32 . hinstance = ( HINSTANCE ) winsysData1_ ;
2020-07-18 20:35:39 +02:00
retval = vkCreateWin32SurfaceKHR ( instance_ , & win32 , nullptr , & surface_ ) ;
break ;
2016-03-21 19:41:20 -07:00
}
2017-12-18 12:22:12 +01:00
# endif
# if defined(__ANDROID__)
case WINDOWSYSTEM_ANDROID :
{
ANativeWindow * wnd = ( ANativeWindow * ) winsysData1_ ;
VkAndroidSurfaceCreateInfoKHR android { VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR } ;
android . flags = 0 ;
android . window = wnd ;
2020-07-18 20:35:39 +02:00
retval = vkCreateAndroidSurfaceKHR ( instance_ , & android , nullptr , & surface_ ) ;
break ;
2017-12-18 12:22:12 +01:00
}
# endif
2020-01-19 17:34:21 +08:00
# if defined(VK_USE_PLATFORM_METAL_EXT)
case WINDOWSYSTEM_METAL_EXT :
{
VkMetalSurfaceCreateInfoEXT metal { VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT } ;
metal . flags = 0 ;
metal . pLayer = winsysData1_ ;
metal . pNext = winsysData2_ ;
2020-07-18 20:35:39 +02:00
retval = vkCreateMetalSurfaceEXT ( instance_ , & metal , nullptr , & surface_ ) ;
break ;
2020-01-19 17:34:21 +08:00
}
# endif
2017-12-13 22:58:45 +01:00
# if defined(VK_USE_PLATFORM_XLIB_KHR)
case WINDOWSYSTEM_XLIB :
{
2019-02-05 13:10:05 +01:00
VkXlibSurfaceCreateInfoKHR xlib { VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR } ;
2017-12-13 22:58:45 +01:00
xlib . flags = 0 ;
xlib . dpy = ( Display * ) winsysData1_ ;
xlib . window = ( Window ) winsysData2_ ;
2020-07-18 20:35:39 +02:00
retval = vkCreateXlibSurfaceKHR ( instance_ , & xlib , nullptr , & surface_ ) ;
break ;
2017-12-13 22:58:45 +01:00
}
# endif
# if defined(VK_USE_PLATFORM_XCB_KHR)
case WINDOWSYSTEM_XCB :
{
2019-02-05 13:10:05 +01:00
VkXCBSurfaceCreateInfoKHR xcb { VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR } ;
2017-12-13 22:58:45 +01:00
xcb . flags = 0 ;
xcb . connection = ( Connection * ) winsysData1_ ;
xcb . window = ( Window ) ( uintptr_t ) winsysData2_ ;
2020-07-18 20:35:39 +02:00
retval = vkCreateXcbSurfaceKHR ( instance_ , & xcb , nullptr , & surface_ ) ;
break ;
2017-12-26 02:55:37 +03:00
}
# endif
# if defined(VK_USE_PLATFORM_WAYLAND_KHR)
case WINDOWSYSTEM_WAYLAND :
{
2019-02-05 13:10:05 +01:00
VkWaylandSurfaceCreateInfoKHR wayland { VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR } ;
2017-12-26 02:55:37 +03:00
wayland . flags = 0 ;
wayland . display = ( wl_display * ) winsysData1_ ;
wayland . surface = ( wl_surface * ) winsysData2_ ;
2020-07-18 20:35:39 +02:00
retval = vkCreateWaylandSurfaceKHR ( instance_ , & wayland , nullptr , & surface_ ) ;
break ;
2017-12-13 22:58:45 +01:00
}
# endif
2017-12-18 12:22:12 +01:00
default :
2020-07-19 17:47:02 +02:00
_assert_msg_ ( false , " Vulkan support for chosen window system not implemented " ) ;
2019-01-25 12:49:18 +01:00
return VK_ERROR_INITIALIZATION_FAILED ;
2016-03-21 19:41:20 -07:00
}
2020-07-18 20:35:39 +02:00
if ( retval ! = VK_SUCCESS ) {
return retval ;
}
if ( ! ChooseQueue ( ) ) {
return VK_ERROR_INITIALIZATION_FAILED ;
}
return VK_SUCCESS ;
2016-01-24 11:22:06 +01:00
}
2020-07-18 20:35:39 +02:00
bool VulkanContext : : ChooseQueue ( ) {
2015-10-10 16:41:19 +02:00
// Iterate over each queue to learn whether it supports presenting:
2016-03-21 19:41:20 -07:00
VkBool32 * supportsPresent = new VkBool32 [ queue_count ] ;
2015-10-10 16:41:19 +02:00
for ( uint32_t i = 0 ; i < queue_count ; i + + ) {
2017-08-28 14:12:56 +02:00
vkGetPhysicalDeviceSurfaceSupportKHR ( physical_devices_ [ physical_device_ ] , i , surface_ , & supportsPresent [ i ] ) ;
2015-10-10 16:41:19 +02:00
}
// Search for a graphics queue and a present queue in the array of queue
// families, try to find one that supports both
uint32_t graphicsQueueNodeIndex = UINT32_MAX ;
uint32_t presentQueueNodeIndex = UINT32_MAX ;
for ( uint32_t i = 0 ; i < queue_count ; i + + ) {
2019-08-21 09:02:23 +02:00
if ( ( queueFamilyProperties_ [ i ] . queueFlags & VK_QUEUE_GRAPHICS_BIT ) ! = 0 ) {
2015-10-10 16:41:19 +02:00
if ( graphicsQueueNodeIndex = = UINT32_MAX ) {
graphicsQueueNodeIndex = i ;
}
if ( supportsPresent [ i ] = = VK_TRUE ) {
graphicsQueueNodeIndex = i ;
presentQueueNodeIndex = i ;
break ;
}
}
}
if ( presentQueueNodeIndex = = UINT32_MAX ) {
// If didn't find a queue that supports both graphics and present, then
// find a separate present queue.
for ( uint32_t i = 0 ; i < queue_count ; + + i ) {
if ( supportsPresent [ i ] = = VK_TRUE ) {
presentQueueNodeIndex = i ;
break ;
}
}
}
delete [ ] supportsPresent ;
// Generate error if could not find both a graphics and a present queue
if ( graphicsQueueNodeIndex = = UINT32_MAX | | presentQueueNodeIndex = = UINT32_MAX ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " Could not find a graphics and a present queue " ) ;
2017-11-12 21:50:54 -08:00
return false ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
graphics_queue_family_index_ = graphicsQueueNodeIndex ;
2015-10-10 16:41:19 +02:00
// Get the list of VkFormats that are supported:
2017-12-15 15:29:19 +01:00
uint32_t formatCount = 0 ;
2017-08-28 14:12:56 +02:00
VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR ( physical_devices_ [ physical_device_ ] , surface_ , & formatCount , nullptr ) ;
2020-07-19 17:47:02 +02:00
_assert_msg_ ( res = = VK_SUCCESS , " Failed to get formats for device %d: %d " , physical_device_ , ( int ) res ) ;
2017-12-15 15:29:19 +01:00
if ( res ! = VK_SUCCESS ) {
2017-11-12 21:50:54 -08:00
return false ;
2017-12-15 15:29:19 +01:00
}
std : : vector < VkSurfaceFormatKHR > surfFormats ( formatCount ) ;
res = vkGetPhysicalDeviceSurfaceFormatsKHR ( physical_devices_ [ physical_device_ ] , surface_ , & formatCount , surfFormats . data ( ) ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( res = = VK_SUCCESS ) ;
2017-11-12 21:50:54 -08:00
if ( res ! = VK_SUCCESS ) {
return false ;
}
2015-10-10 16:41:19 +02:00
// If the format list includes just one entry of VK_FORMAT_UNDEFINED,
// the surface has no preferred format. Otherwise, at least one
// supported format will be returned.
2016-07-01 10:39:34 -07:00
if ( formatCount = = 0 | | ( formatCount = = 1 & & surfFormats [ 0 ] . format = = VK_FORMAT_UNDEFINED ) ) {
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " swapchain_format: Falling back to B8G8R8A8_UNORM " ) ;
2017-08-19 17:32:10 +02:00
swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM ;
2015-10-10 16:41:19 +02:00
} else {
2017-08-19 17:32:10 +02:00
swapchainFormat_ = VK_FORMAT_UNDEFINED ;
2016-07-01 10:39:34 -07:00
for ( uint32_t i = 0 ; i < formatCount ; + + i ) {
if ( surfFormats [ i ] . colorSpace ! = VK_COLORSPACE_SRGB_NONLINEAR_KHR ) {
continue ;
}
if ( surfFormats [ i ] . format = = VK_FORMAT_B8G8R8A8_UNORM | | surfFormats [ i ] . format = = VK_FORMAT_R8G8B8A8_UNORM ) {
2017-08-19 17:32:10 +02:00
swapchainFormat_ = surfFormats [ i ] . format ;
2016-07-01 10:39:34 -07:00
break ;
}
}
2017-08-19 17:32:10 +02:00
if ( swapchainFormat_ = = VK_FORMAT_UNDEFINED ) {
2016-07-01 10:39:34 -07:00
// Okay, take the first one then.
2017-08-19 17:32:10 +02:00
swapchainFormat_ = surfFormats [ 0 ] . format ;
2016-07-01 10:39:34 -07:00
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " swapchain_format: %d (/%d) " , swapchainFormat_ , formatCount ) ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
vkGetDeviceQueue ( device_ , graphics_queue_family_index_ , 0 , & gfx_queue_ ) ;
2017-11-12 21:50:54 -08:00
return true ;
2015-10-10 16:41:19 +02:00
}
2019-01-25 12:49:18 +01:00
int clamp ( int x , int a , int b ) {
if ( x < a )
return a ;
if ( x > b )
return b ;
return x ;
}
2019-06-20 23:58:18 +02:00
static std : : string surface_transforms_to_string ( VkSurfaceTransformFlagsKHR transformFlags ) {
std : : string str ;
if ( transformFlags & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR ) str + = " IDENTITY " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR ) str + = " ROTATE_90 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR ) str + = " ROTATE_180 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR ) str + = " ROTATE_270 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR ) str + = " HMIRROR " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR ) str + = " HMIRROR_90 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR ) str + = " HMIRROR_180 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR ) str + = " HMIRROR_270 " ;
if ( transformFlags & VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR ) str + = " INHERIT " ;
return str ;
}
2017-08-19 17:32:10 +02:00
bool VulkanContext : : InitSwapchain ( ) {
2017-12-18 12:22:12 +01:00
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & surfCapabilities_ ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( res = = VK_SUCCESS ) ;
2015-10-10 16:41:19 +02:00
uint32_t presentModeCount ;
2017-08-28 14:12:56 +02:00
res = vkGetPhysicalDeviceSurfacePresentModesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & presentModeCount , nullptr ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( res = = VK_SUCCESS ) ;
2016-01-10 14:24:10 +01:00
VkPresentModeKHR * presentModes = new VkPresentModeKHR [ presentModeCount ] ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( presentModes ) ;
2017-08-28 14:12:56 +02:00
res = vkGetPhysicalDeviceSurfacePresentModesKHR ( physical_devices_ [ physical_device_ ] , surface_ , & presentModeCount , presentModes ) ;
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( res = = VK_SUCCESS ) ;
2015-10-10 16:41:19 +02:00
2019-01-25 12:49:18 +01:00
swapChainExtent_ . width = clamp ( surfCapabilities_ . currentExtent . width , surfCapabilities_ . minImageExtent . width , surfCapabilities_ . maxImageExtent . width ) ;
swapChainExtent_ . height = clamp ( surfCapabilities_ . currentExtent . height , surfCapabilities_ . minImageExtent . height , surfCapabilities_ . maxImageExtent . height ) ;
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " surfCapabilities_.current: %dx%d min: %dx%d max: %dx%d computed: %dx%d " ,
2020-07-18 20:35:39 +02:00
surfCapabilities_ . currentExtent . width , surfCapabilities_ . currentExtent . height ,
surfCapabilities_ . minImageExtent . width , surfCapabilities_ . minImageExtent . height ,
surfCapabilities_ . maxImageExtent . width , surfCapabilities_ . maxImageExtent . height ,
swapChainExtent_ . width , swapChainExtent_ . height ) ;
2019-01-25 12:49:18 +01:00
2016-02-25 18:52:33 +01:00
// TODO: Find a better way to specify the prioritized present mode while being able
// to fall back in a sensible way.
2016-04-07 22:51:52 +02:00
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR ;
2020-07-18 20:35:39 +02:00
std : : string modes = " " ;
2015-10-10 16:41:19 +02:00
for ( size_t i = 0 ; i < presentModeCount ; i + + ) {
2020-07-18 20:35:39 +02:00
modes + = PresentModeString ( presentModes [ i ] ) ;
if ( i ! = presentModeCount - 1 ) {
modes + = " , " ;
}
2016-02-25 18:52:33 +01:00
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Supported present modes: %s " , modes . c_str ( ) ) ;
2016-02-25 18:52:33 +01:00
for ( size_t i = 0 ; i < presentModeCount ; i + + ) {
2020-02-29 22:45:18 -08:00
bool match = false ;
match = match | | ( ( flags_ & VULKAN_FLAG_PRESENT_MAILBOX ) & & presentModes [ i ] = = VK_PRESENT_MODE_MAILBOX_KHR ) ;
match = match | | ( ( flags_ & VULKAN_FLAG_PRESENT_FIFO_RELAXED ) & & presentModes [ i ] = = VK_PRESENT_MODE_FIFO_RELAXED_KHR ) ;
match = match | | ( ( flags_ & VULKAN_FLAG_PRESENT_FIFO ) & & presentModes [ i ] = = VK_PRESENT_MODE_FIFO_KHR ) ;
match = match | | ( ( flags_ & VULKAN_FLAG_PRESENT_IMMEDIATE ) & & presentModes [ i ] = = VK_PRESENT_MODE_IMMEDIATE_KHR ) ;
// Default to the first present mode from the list.
if ( match | | swapchainPresentMode = = VK_PRESENT_MODE_MAX_ENUM_KHR ) {
2016-02-25 18:52:33 +01:00
swapchainPresentMode = presentModes [ i ] ;
}
2020-02-29 22:45:18 -08:00
if ( match ) {
2016-01-02 02:08:05 +01:00
break ;
2015-10-10 16:41:19 +02:00
}
}
2016-10-12 11:13:16 +02:00
# ifdef __ANDROID__
2016-02-25 18:52:33 +01:00
// HACK
swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR ;
# endif
2016-01-10 14:24:10 +01:00
delete [ ] presentModes ;
2015-10-10 16:41:19 +02:00
// Determine the number of VkImage's to use in the swap chain (we desire to
// own only 1 image at a time, besides the images being displayed and
// queued for display):
2017-11-15 13:57:22 +01:00
uint32_t desiredNumberOfSwapChainImages = surfCapabilities_ . minImageCount + 1 ;
if ( ( surfCapabilities_ . maxImageCount > 0 ) & &
( desiredNumberOfSwapChainImages > surfCapabilities_ . maxImageCount ) )
2015-10-10 16:41:19 +02:00
{
// Application must settle for fewer images than desired:
2017-11-15 13:57:22 +01:00
desiredNumberOfSwapChainImages = surfCapabilities_ . maxImageCount ;
2015-10-10 16:41:19 +02:00
}
2020-01-19 17:34:21 +08:00
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Chosen present mode: %d (%s). numSwapChainImages: %d/%d " ,
2020-07-18 20:35:39 +02:00
swapchainPresentMode , PresentModeString ( swapchainPresentMode ) ,
desiredNumberOfSwapChainImages , surfCapabilities_ . maxImageCount ) ;
2019-06-21 00:53:51 +02:00
// We mostly follow the practices from
// https://arm-software.github.io/vulkan_best_practice_for_mobile_developers/samples/surface_rotation/surface_rotation_tutorial.html
2020-01-19 17:34:21 +08:00
//
2015-10-10 16:41:19 +02:00
VkSurfaceTransformFlagBitsKHR preTransform ;
2019-06-20 23:58:18 +02:00
std : : string supportedTransforms = surface_transforms_to_string ( surfCapabilities_ . supportedTransforms ) ;
std : : string currentTransform = surface_transforms_to_string ( surfCapabilities_ . currentTransform ) ;
2019-06-21 00:53:51 +02:00
g_display_rotation = DisplayRotation : : ROTATE_0 ;
g_display_rot_matrix . setIdentity ( ) ;
bool swapChainExtentSwap = false ;
if ( surfCapabilities_ . currentTransform & ( VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR | VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR ) ) {
2015-10-10 16:41:19 +02:00
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR ;
2019-06-21 00:53:51 +02:00
} else if ( surfCapabilities_ . currentTransform & ( VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR ) ) {
// Normal, sensible rotations. Let's handle it.
2017-11-15 13:57:22 +01:00
preTransform = surfCapabilities_ . currentTransform ;
2019-06-21 00:53:51 +02:00
g_display_rot_matrix . setIdentity ( ) ;
switch ( surfCapabilities_ . currentTransform ) {
case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR :
g_display_rotation = DisplayRotation : : ROTATE_90 ;
g_display_rot_matrix . setRotationZ90 ( ) ;
std : : swap ( swapChainExtent_ . width , swapChainExtent_ . height ) ;
break ;
case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR :
g_display_rotation = DisplayRotation : : ROTATE_180 ;
g_display_rot_matrix . setRotationZ180 ( ) ;
break ;
case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR :
g_display_rotation = DisplayRotation : : ROTATE_270 ;
g_display_rot_matrix . setRotationZ270 ( ) ;
std : : swap ( swapChainExtent_ . width , swapChainExtent_ . height ) ;
break ;
2019-08-17 10:59:08 -07:00
default :
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( false ) ;
2019-06-21 00:53:51 +02:00
}
} else {
// Let the OS rotate the image (potentially slow on many Android devices)
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR ;
2015-10-10 16:41:19 +02:00
}
2020-07-18 20:35:39 +02:00
2019-06-21 00:53:51 +02:00
std : : string preTransformStr = surface_transforms_to_string ( preTransform ) ;
2020-07-18 20:35:39 +02:00
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Transform supported: %s current: %s chosen: %s " , supportedTransforms . c_str ( ) , currentTransform . c_str ( ) , preTransformStr . c_str ( ) ) ;
2015-10-10 16:41:19 +02:00
2019-09-02 19:06:29 +02:00
if ( physicalDeviceProperties_ [ physical_device_ ] . properties . vendorID = = VULKAN_VENDOR_IMGTEC ) {
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Applying PowerVR hack (rounding off the width!) " ) ;
2019-09-02 19:06:29 +02:00
// Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
// To keep the size consistent even with pretransform, do this after the swap. Should be fine.
// This is fixed in newer PowerVR drivers but I don't know the cutoff.
swapChainExtent_ . width & = ~ 31 ;
}
2019-02-05 13:10:05 +01:00
VkSwapchainCreateInfoKHR swap_chain_info { VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR } ;
2016-02-25 18:52:33 +01:00
swap_chain_info . surface = surface_ ;
2015-10-10 16:41:19 +02:00
swap_chain_info . minImageCount = desiredNumberOfSwapChainImages ;
2017-08-19 17:32:10 +02:00
swap_chain_info . imageFormat = swapchainFormat_ ;
2019-06-21 14:09:45 +02:00
swap_chain_info . imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR ;
2019-01-25 12:49:18 +01:00
swap_chain_info . imageExtent . width = swapChainExtent_ . width ;
swap_chain_info . imageExtent . height = swapChainExtent_ . height ;
2015-10-10 16:41:19 +02:00
swap_chain_info . preTransform = preTransform ;
swap_chain_info . imageArrayLayers = 1 ;
swap_chain_info . presentMode = swapchainPresentMode ;
swap_chain_info . oldSwapchain = VK_NULL_HANDLE ;
swap_chain_info . clipped = true ;
2017-11-15 13:57:22 +01:00
swap_chain_info . imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT ;
2020-07-18 20:35:39 +02:00
2019-06-21 14:09:45 +02:00
// Don't ask for TRANSFER_DST for the swapchain image, we don't use that.
// if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
// swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2017-11-15 13:57:22 +01:00
2017-11-15 13:18:29 +01:00
# ifndef ANDROID
// We don't support screenshots on Android
2017-11-15 13:57:22 +01:00
// Add more usage flags if they're supported.
if ( surfCapabilities_ . supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT )
swap_chain_info . imageUsage | = VK_IMAGE_USAGE_TRANSFER_SRC_BIT ;
2017-11-15 13:18:29 +01:00
# endif
2015-10-10 16:41:19 +02:00
swap_chain_info . imageSharingMode = VK_SHARING_MODE_EXCLUSIVE ;
swap_chain_info . queueFamilyIndexCount = 0 ;
swap_chain_info . pQueueFamilyIndices = NULL ;
2016-10-10 22:06:40 -07:00
// OPAQUE is not supported everywhere.
2017-11-15 13:57:22 +01:00
if ( surfCapabilities_ . supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR ) {
2016-10-10 22:06:40 -07:00
swap_chain_info . compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR ;
} else {
// This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
swap_chain_info . compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR ;
}
2015-10-10 16:41:19 +02:00
2017-08-19 17:32:10 +02:00
res = vkCreateSwapchainKHR ( device_ , & swap_chain_info , NULL , & swapchain_ ) ;
2017-04-15 16:26:26 -07:00
if ( res ! = VK_SUCCESS ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " vkCreateSwapchainKHR failed! " ) ;
2017-04-15 16:26:26 -07:00
return false ;
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " Created swapchain: %dx%d " , swap_chain_info . imageExtent . width , swap_chain_info . imageExtent . height ) ;
2017-04-15 16:26:26 -07:00
return true ;
2015-10-10 16:41:19 +02:00
}
2016-01-02 02:08:05 +01:00
VkFence VulkanContext : : CreateFence ( bool presignalled ) {
2015-10-10 16:41:19 +02:00
VkFence fence ;
2017-08-28 13:45:04 +02:00
VkFenceCreateInfo fenceInfo { VK_STRUCTURE_TYPE_FENCE_CREATE_INFO } ;
2016-04-03 00:01:56 +02:00
fenceInfo . flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0 ;
vkCreateFence ( device_ , & fenceInfo , NULL , & fence ) ;
2015-10-10 16:41:19 +02:00
return fence ;
}
2018-03-11 17:14:11 +01:00
void VulkanContext : : PerformPendingDeletes ( ) {
2017-08-22 17:18:54 +02:00
for ( int i = 0 ; i < ARRAY_SIZE ( frame_ ) ; i + + ) {
frame_ [ i ] . deleteList . PerformDeletes ( device_ ) ;
}
Delete ( ) . PerformDeletes ( device_ ) ;
2018-03-11 17:14:11 +01:00
}
void VulkanContext : : DestroyDevice ( ) {
2020-07-18 20:37:55 +02:00
if ( swapchain_ ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " DestroyDevice: Swapchain should have been destroyed. " ) ;
2020-07-18 20:37:55 +02:00
}
if ( surface_ ) {
2020-08-15 12:12:57 +02:00
ERROR_LOG ( G3D , " DestroyDevice: Surface should have been destroyed. " ) ;
2020-07-18 20:37:55 +02:00
}
2020-08-15 12:12:57 +02:00
INFO_LOG ( G3D , " VulkanContext::DestroyDevice (performing deletes) " ) ;
2018-03-11 17:14:11 +01:00
PerformPendingDeletes ( ) ;
2017-08-22 17:18:54 +02:00
2016-03-21 19:41:20 -07:00
vkDestroyDevice ( device_ , nullptr ) ;
device_ = nullptr ;
2015-10-10 16:41:19 +02:00
}
2016-01-05 21:18:43 +01:00
bool VulkanContext : : CreateShaderModule ( const std : : vector < uint32_t > & spirv , VkShaderModule * shaderModule ) {
2017-10-20 18:09:05 +02:00
VkShaderModuleCreateInfo sm { VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO } ;
2016-01-05 21:18:43 +01:00
sm . pCode = spirv . data ( ) ;
sm . codeSize = spirv . size ( ) * sizeof ( uint32_t ) ;
sm . flags = 0 ;
2017-08-28 13:45:04 +02:00
VkResult result = vkCreateShaderModule ( device_ , & sm , nullptr , shaderModule ) ;
2016-01-05 21:18:43 +01:00
if ( result ! = VK_SUCCESS ) {
return false ;
} else {
return true ;
}
}
2015-10-10 16:41:19 +02:00
2017-12-05 13:05:11 +01:00
void TransitionImageLayout2 ( VkCommandBuffer cmd , VkImage image , int baseMip , int numMipLevels , VkImageAspectFlags aspectMask ,
2017-10-20 14:47:36 +02:00
VkImageLayout oldImageLayout , VkImageLayout newImageLayout ,
VkPipelineStageFlags srcStageMask , VkPipelineStageFlags dstStageMask ,
2017-12-05 13:05:11 +01:00
VkAccessFlags srcAccessMask , VkAccessFlags dstAccessMask ) {
2018-03-15 16:10:12 +01:00
# ifdef VULKAN_USE_GENERAL_LAYOUT_FOR_COLOR
if ( aspectMask = = VK_IMAGE_ASPECT_COLOR_BIT ) {
// Hack to disable transaction elimination on ARM Mali.
if ( oldImageLayout = = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL | | oldImageLayout = = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL )
oldImageLayout = VK_IMAGE_LAYOUT_GENERAL ;
if ( newImageLayout = = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL | | newImageLayout = = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL )
newImageLayout = VK_IMAGE_LAYOUT_GENERAL ;
}
# endif
# ifdef VULKAN_USE_GENERAL_LAYOUT_FOR_DEPTH_STENCIL
if ( aspectMask ! = VK_IMAGE_ASPECT_COLOR_BIT ) {
// Hack to disable transaction elimination on ARM Mali.
if ( oldImageLayout = = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL | | oldImageLayout = = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL )
oldImageLayout = VK_IMAGE_LAYOUT_GENERAL ;
if ( newImageLayout = = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL | | newImageLayout = = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL )
newImageLayout = VK_IMAGE_LAYOUT_GENERAL ;
}
# endif
2017-10-20 18:09:05 +02:00
VkImageMemoryBarrier image_memory_barrier { VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER } ;
2017-10-20 14:47:36 +02:00
image_memory_barrier . srcAccessMask = srcAccessMask ;
image_memory_barrier . dstAccessMask = dstAccessMask ;
image_memory_barrier . oldLayout = oldImageLayout ;
image_memory_barrier . newLayout = newImageLayout ;
image_memory_barrier . image = image ;
image_memory_barrier . subresourceRange . aspectMask = aspectMask ;
2017-12-05 13:05:11 +01:00
image_memory_barrier . subresourceRange . baseMipLevel = baseMip ;
2017-11-08 17:01:38 +01:00
image_memory_barrier . subresourceRange . levelCount = numMipLevels ;
image_memory_barrier . subresourceRange . layerCount = 1 ; // We never use more than one layer, and old Mali drivers have problems with VK_REMAINING_ARRAY_LAYERS/VK_REMAINING_MIP_LEVELS.
2017-10-21 13:05:52 +02:00
image_memory_barrier . srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
image_memory_barrier . dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED ;
2017-10-20 14:47:36 +02:00
vkCmdPipelineBarrier ( cmd , srcStageMask , dstStageMask , 0 , 0 , nullptr , 0 , nullptr , 1 , & image_memory_barrier ) ;
}
2015-10-10 16:41:19 +02:00
EShLanguage FindLanguage ( const VkShaderStageFlagBits shader_type ) {
switch ( shader_type ) {
case VK_SHADER_STAGE_VERTEX_BIT :
return EShLangVertex ;
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT :
return EShLangTessControl ;
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT :
return EShLangTessEvaluation ;
case VK_SHADER_STAGE_GEOMETRY_BIT :
return EShLangGeometry ;
case VK_SHADER_STAGE_FRAGMENT_BIT :
return EShLangFragment ;
case VK_SHADER_STAGE_COMPUTE_BIT :
return EShLangCompute ;
default :
return EShLangVertex ;
}
}
// Compile a given string containing GLSL into SPV for use by VK
// Return value of false means an error was encountered.
bool GLSLtoSPV ( const VkShaderStageFlagBits shader_type ,
const char * pshader ,
2016-01-02 02:08:05 +01:00
std : : vector < unsigned int > & spirv , std : : string * errorMessage ) {
2015-10-10 16:41:19 +02:00
2016-01-10 14:24:10 +01:00
glslang : : TProgram program ;
2015-10-10 16:41:19 +02:00
const char * shaderStrings [ 1 ] ;
2018-08-30 22:51:55 +08:00
EProfile profile = ECoreProfile ;
int defaultVersion = 450 ;
2015-10-10 16:41:19 +02:00
TBuiltInResource Resources ;
init_resources ( Resources ) ;
// Enable SPIR-V and Vulkan rules when parsing GLSL
EShMessages messages = ( EShMessages ) ( EShMsgSpvRules | EShMsgVulkanRules ) ;
EShLanguage stage = FindLanguage ( shader_type ) ;
2016-01-10 14:24:10 +01:00
glslang : : TShader shader ( stage ) ;
2015-10-10 16:41:19 +02:00
shaderStrings [ 0 ] = pshader ;
2016-01-10 14:24:10 +01:00
shader . setStrings ( shaderStrings , 1 ) ;
2015-10-10 16:41:19 +02:00
2018-08-30 22:51:55 +08:00
if ( ! shader . parse ( & Resources , defaultVersion , profile , false , true , messages ) ) {
2016-01-10 14:24:10 +01:00
puts ( shader . getInfoLog ( ) ) ;
puts ( shader . getInfoDebugLog ( ) ) ;
2016-01-02 02:08:05 +01:00
if ( errorMessage ) {
2016-01-10 14:24:10 +01:00
* errorMessage = shader . getInfoLog ( ) ;
( * errorMessage ) + = shader . getInfoDebugLog ( ) ;
2016-01-02 02:08:05 +01:00
}
2015-10-10 16:41:19 +02:00
return false ; // something didn't work
}
2016-01-10 14:24:10 +01:00
// Note that program does not take ownership of &shader, so this is fine.
program . addShader ( & shader ) ;
2015-10-10 16:41:19 +02:00
if ( ! program . link ( messages ) ) {
2016-01-10 14:24:10 +01:00
puts ( shader . getInfoLog ( ) ) ;
puts ( shader . getInfoDebugLog ( ) ) ;
2016-01-02 02:08:05 +01:00
if ( errorMessage ) {
2016-01-10 14:24:10 +01:00
* errorMessage = shader . getInfoLog ( ) ;
( * errorMessage ) + = shader . getInfoDebugLog ( ) ;
2016-01-02 02:08:05 +01:00
}
2015-10-10 16:41:19 +02:00
return false ;
}
2016-01-02 02:08:05 +01:00
// Can't fail, parsing worked, "linking" worked.
2018-08-30 22:51:55 +08:00
glslang : : SpvOptions options ;
options . disableOptimizer = false ;
options . optimizeSize = false ;
options . generateDebugInfo = false ;
glslang : : GlslangToSpv ( * program . getIntermediate ( stage ) , spirv , & options ) ;
2015-10-10 16:41:19 +02:00
return true ;
}
void init_glslang ( ) {
glslang : : InitializeProcess ( ) ;
}
void finalize_glslang ( ) {
glslang : : FinalizeProcess ( ) ;
}
2015-12-31 01:07:06 +01:00
const char * VulkanResultToString ( VkResult res ) {
switch ( res ) {
case VK_NOT_READY : return " VK_NOT_READY " ;
case VK_TIMEOUT : return " VK_TIMEOUT " ;
case VK_EVENT_SET : return " VK_EVENT_SET " ;
case VK_EVENT_RESET : return " VK_EVENT_RESET " ;
case VK_INCOMPLETE : return " VK_INCOMPLETE " ;
case VK_ERROR_OUT_OF_HOST_MEMORY : return " VK_ERROR_OUT_OF_HOST_MEMORY " ;
case VK_ERROR_OUT_OF_DEVICE_MEMORY : return " VK_ERROR_OUT_OF_DEVICE_MEMORY " ;
case VK_ERROR_INITIALIZATION_FAILED : return " VK_ERROR_INITIALIZATION_FAILED " ;
case VK_ERROR_DEVICE_LOST : return " VK_ERROR_DEVICE_LOST " ;
case VK_ERROR_MEMORY_MAP_FAILED : return " VK_ERROR_MEMORY_MAP_FAILED " ;
case VK_ERROR_LAYER_NOT_PRESENT : return " VK_ERROR_LAYER_NOT_PRESENT " ;
case VK_ERROR_EXTENSION_NOT_PRESENT : return " VK_ERROR_EXTENSION_NOT_PRESENT " ;
case VK_ERROR_FEATURE_NOT_PRESENT : return " VK_ERROR_FEATURE_NOT_PRESENT " ;
case VK_ERROR_INCOMPATIBLE_DRIVER : return " VK_ERROR_INCOMPATIBLE_DRIVER " ;
case VK_ERROR_TOO_MANY_OBJECTS : return " VK_ERROR_TOO_MANY_OBJECTS " ;
case VK_ERROR_FORMAT_NOT_SUPPORTED : return " VK_ERROR_FORMAT_NOT_SUPPORTED " ;
case VK_ERROR_SURFACE_LOST_KHR : return " VK_ERROR_SURFACE_LOST_KHR " ;
case VK_SUBOPTIMAL_KHR : return " VK_SUBOPTIMAL_KHR " ;
case VK_ERROR_OUT_OF_DATE_KHR : return " VK_ERROR_OUT_OF_DATE_KHR " ;
case VK_ERROR_INCOMPATIBLE_DISPLAY_KHR : return " VK_ERROR_INCOMPATIBLE_DISPLAY_KHR " ;
case VK_ERROR_NATIVE_WINDOW_IN_USE_KHR : return " VK_ERROR_NATIVE_WINDOW_IN_USE_KHR " ;
2017-11-07 00:08:39 +01:00
case VK_ERROR_OUT_OF_POOL_MEMORY_KHR : return " VK_ERROR_OUT_OF_POOL_MEMORY_KHR " ;
2017-12-15 15:24:15 +01:00
case VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR : return " VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR " ;
2015-12-31 01:07:06 +01:00
2017-11-07 00:08:39 +01:00
default :
return " VK_ERROR_...(unknown) " ;
}
2017-11-09 12:21:20 +01:00
}
2017-11-09 17:40:32 +01:00
void VulkanDeleteList : : Take ( VulkanDeleteList & del ) {
2020-08-15 12:12:57 +02:00
_dbg_assert_ ( cmdPools_ . empty ( ) ) ;
_dbg_assert_ ( descPools_ . empty ( ) ) ;
_dbg_assert_ ( modules_ . empty ( ) ) ;
_dbg_assert_ ( buffers_ . empty ( ) ) ;
_dbg_assert_ ( bufferViews_ . empty ( ) ) ;
_dbg_assert_ ( images_ . empty ( ) ) ;
_dbg_assert_ ( imageViews_ . empty ( ) ) ;
_dbg_assert_ ( deviceMemory_ . empty ( ) ) ;
_dbg_assert_ ( samplers_ . empty ( ) ) ;
_dbg_assert_ ( pipelines_ . empty ( ) ) ;
_dbg_assert_ ( pipelineCaches_ . empty ( ) ) ;
_dbg_assert_ ( renderPasses_ . empty ( ) ) ;
_dbg_assert_ ( framebuffers_ . empty ( ) ) ;
_dbg_assert_ ( pipelineLayouts_ . empty ( ) ) ;
_dbg_assert_ ( descSetLayouts_ . empty ( ) ) ;
_dbg_assert_ ( callbacks_ . empty ( ) ) ;
2017-11-09 17:40:32 +01:00
cmdPools_ = std : : move ( del . cmdPools_ ) ;
descPools_ = std : : move ( del . descPools_ ) ;
modules_ = std : : move ( del . modules_ ) ;
buffers_ = std : : move ( del . buffers_ ) ;
bufferViews_ = std : : move ( del . bufferViews_ ) ;
images_ = std : : move ( del . images_ ) ;
imageViews_ = std : : move ( del . imageViews_ ) ;
deviceMemory_ = std : : move ( del . deviceMemory_ ) ;
samplers_ = std : : move ( del . samplers_ ) ;
pipelines_ = std : : move ( del . pipelines_ ) ;
pipelineCaches_ = std : : move ( del . pipelineCaches_ ) ;
renderPasses_ = std : : move ( del . renderPasses_ ) ;
framebuffers_ = std : : move ( del . framebuffers_ ) ;
pipelineLayouts_ = std : : move ( del . pipelineLayouts_ ) ;
descSetLayouts_ = std : : move ( del . descSetLayouts_ ) ;
callbacks_ = std : : move ( del . callbacks_ ) ;
2017-12-10 14:36:24 +01:00
del . cmdPools_ . clear ( ) ;
del . descPools_ . clear ( ) ;
del . modules_ . clear ( ) ;
del . buffers_ . clear ( ) ;
del . images_ . clear ( ) ;
del . imageViews_ . clear ( ) ;
del . deviceMemory_ . clear ( ) ;
del . samplers_ . clear ( ) ;
del . pipelines_ . clear ( ) ;
del . pipelineCaches_ . clear ( ) ;
del . renderPasses_ . clear ( ) ;
del . framebuffers_ . clear ( ) ;
del . pipelineLayouts_ . clear ( ) ;
del . descSetLayouts_ . clear ( ) ;
del . callbacks_ . clear ( ) ;
2017-11-09 17:40:32 +01:00
}
void VulkanDeleteList : : PerformDeletes ( VkDevice device ) {
2018-03-22 22:23:28 +01:00
for ( auto & callback : callbacks_ ) {
callback . func ( callback . userdata ) ;
}
callbacks_ . clear ( ) ;
2017-11-09 17:40:32 +01:00
for ( auto & cmdPool : cmdPools_ ) {
vkDestroyCommandPool ( device , cmdPool , nullptr ) ;
}
cmdPools_ . clear ( ) ;
for ( auto & descPool : descPools_ ) {
vkDestroyDescriptorPool ( device , descPool , nullptr ) ;
}
descPools_ . clear ( ) ;
for ( auto & module : modules_ ) {
vkDestroyShaderModule ( device , module , nullptr ) ;
}
modules_ . clear ( ) ;
for ( auto & buf : buffers_ ) {
vkDestroyBuffer ( device , buf , nullptr ) ;
}
buffers_ . clear ( ) ;
for ( auto & bufView : bufferViews_ ) {
vkDestroyBufferView ( device , bufView , nullptr ) ;
}
bufferViews_ . clear ( ) ;
for ( auto & image : images_ ) {
vkDestroyImage ( device , image , nullptr ) ;
}
images_ . clear ( ) ;
for ( auto & imageView : imageViews_ ) {
vkDestroyImageView ( device , imageView , nullptr ) ;
}
imageViews_ . clear ( ) ;
for ( auto & mem : deviceMemory_ ) {
vkFreeMemory ( device , mem , nullptr ) ;
}
deviceMemory_ . clear ( ) ;
for ( auto & sampler : samplers_ ) {
vkDestroySampler ( device , sampler , nullptr ) ;
}
samplers_ . clear ( ) ;
for ( auto & pipeline : pipelines_ ) {
vkDestroyPipeline ( device , pipeline , nullptr ) ;
}
pipelines_ . clear ( ) ;
for ( auto & pcache : pipelineCaches_ ) {
vkDestroyPipelineCache ( device , pcache , nullptr ) ;
}
pipelineCaches_ . clear ( ) ;
for ( auto & renderPass : renderPasses_ ) {
vkDestroyRenderPass ( device , renderPass , nullptr ) ;
}
renderPasses_ . clear ( ) ;
for ( auto & framebuffer : framebuffers_ ) {
vkDestroyFramebuffer ( device , framebuffer , nullptr ) ;
}
framebuffers_ . clear ( ) ;
for ( auto & pipeLayout : pipelineLayouts_ ) {
vkDestroyPipelineLayout ( device , pipeLayout , nullptr ) ;
}
pipelineLayouts_ . clear ( ) ;
for ( auto & descSetLayout : descSetLayouts_ ) {
vkDestroyDescriptorSetLayout ( device , descSetLayout , nullptr ) ;
}
descSetLayouts_ . clear ( ) ;
}
2019-02-07 14:56:29 +01:00
void VulkanContext : : GetImageMemoryRequirements ( VkImage image , VkMemoryRequirements * mem_reqs , bool * dedicatedAllocation ) {
2020-08-08 23:18:17 +02:00
if ( Extensions ( ) . KHR_dedicated_allocation ) {
2019-02-07 14:56:29 +01:00
VkImageMemoryRequirementsInfo2KHR memReqInfo2 { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR } ;
memReqInfo2 . image = image ;
VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR } ;
VkMemoryDedicatedRequirementsKHR memDedicatedReq { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR } ;
memReq2 . pNext = & memDedicatedReq ;
vkGetImageMemoryRequirements2KHR ( GetDevice ( ) , & memReqInfo2 , & memReq2 ) ;
* mem_reqs = memReq2 . memoryRequirements ;
* dedicatedAllocation =
( memDedicatedReq . requiresDedicatedAllocation ! = VK_FALSE ) | |
( memDedicatedReq . prefersDedicatedAllocation ! = VK_FALSE ) ;
} else {
vkGetImageMemoryRequirements ( GetDevice ( ) , image , mem_reqs ) ;
* dedicatedAllocation = false ;
}
}
2019-09-04 21:42:47 +02:00
bool IsHashMaliDriverVersion ( const VkPhysicalDeviceProperties & props ) {
// ARM used to put a hash in place of the driver version.
// Now they only use major versions. We'll just make a bad heuristic.
uint32_t major = VK_VERSION_MAJOR ( props . driverVersion ) ;
uint32_t minor = VK_VERSION_MINOR ( props . driverVersion ) ;
uint32_t branch = VK_VERSION_PATCH ( props . driverVersion ) ;
if ( branch > 0 )
return true ;
if ( branch > 100 | | major > 100 )
return true ;
return false ;
}
// From Sascha's code
std : : string FormatDriverVersion ( const VkPhysicalDeviceProperties & props ) {
if ( props . vendorID = = VULKAN_VENDOR_NVIDIA ) {
// For whatever reason, NVIDIA has their own scheme.
// 10 bits = major version (up to r1023)
// 8 bits = minor version (up to 255)
// 8 bits = secondary branch version/build version (up to 255)
// 6 bits = tertiary branch/build version (up to 63)
uint32_t major = ( props . driverVersion > > 22 ) & 0x3ff ;
uint32_t minor = ( props . driverVersion > > 14 ) & 0x0ff ;
uint32_t secondaryBranch = ( props . driverVersion > > 6 ) & 0x0ff ;
uint32_t tertiaryBranch = ( props . driverVersion ) & 0x003f ;
return StringFromFormat ( " %d.%d.%d.%d " , major , minor , secondaryBranch , tertiaryBranch ) ;
} else if ( props . vendorID = = VULKAN_VENDOR_ARM ) {
// ARM used to just put a hash here. No point in splitting it up.
if ( IsHashMaliDriverVersion ( props ) ) {
return StringFromFormat ( " (hash) % 08 x " , props.driverVersion) ;
}
}
// Qualcomm has an inscrutable versioning scheme. Let's just display it as normal.
// Standard scheme, use the standard macros.
uint32_t major = VK_VERSION_MAJOR ( props . driverVersion ) ;
uint32_t minor = VK_VERSION_MINOR ( props . driverVersion ) ;
uint32_t branch = VK_VERSION_PATCH ( props . driverVersion ) ;
return StringFromFormat ( " %d.%d.%d (%08x) " , major, minor, branch, props.driverVersion) ;
}