2017-08-16 23:03:30 +02:00
# pragma once
2017-10-28 00:37:06 +02:00
// VulkanRenderManager takes the role that a GL driver does of sequencing and optimizing render passes.
// Only draws and binds are handled here, resource creation and allocations are handled as normal -
// that's the nice thing with Vulkan.
2021-09-10 23:35:31 -07:00
# include <algorithm>
2017-11-05 08:13:18 -08:00
# include <atomic>
2017-08-19 17:32:10 +02:00
# include <condition_variable>
2017-11-05 08:13:18 -08:00
# include <cstdint>
# include <mutex>
# include <thread>
2019-06-16 21:57:22 +02:00
# include <queue>
2017-08-16 23:03:30 +02:00
2022-10-10 10:53:27 +02:00
# include "Common/Math/Statistics.h"
2022-06-11 23:28:26 +02:00
# include "Common/Thread/Promise.h"
2020-10-04 10:10:55 +02:00
# include "Common/System/Display.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/Vulkan/VulkanContext.h"
2020-10-04 00:25:21 +02:00
# include "Common/Data/Convert/SmallDataConvert.h"
2023-08-02 11:38:31 +02:00
# include "Common/Data/Collections/FastVec.h"
2020-10-04 00:25:21 +02:00
# include "Common/Math/math_util.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/DataFormat.h"
2022-11-24 10:38:49 +01:00
# include "Common/GPU/MiscTypes.h"
2020-10-04 23:24:14 +02:00
# include "Common/GPU/Vulkan/VulkanQueueRunner.h"
2022-11-27 10:22:37 +01:00
# include "Common/GPU/Vulkan/VulkanFramebuffer.h"
2022-12-03 14:52:06 -08:00
# include "Common/GPU/thin3d.h"
2017-08-16 23:03:30 +02:00
2021-11-22 09:53:09 +01:00
// Forward declaration
VK_DEFINE_HANDLE ( VmaAllocation ) ;
2020-10-11 13:07:08 +02:00
struct BoundingRect {
int x1 ;
int y1 ;
int x2 ;
int y2 ;
BoundingRect ( ) {
Reset ( ) ;
}
void Reset ( ) {
x1 = 65535 ;
y1 = 65535 ;
x2 = - 65535 ;
y2 = - 65535 ;
}
bool Empty ( ) const {
return x2 < 0 ;
}
void SetRect ( int x , int y , int width , int height ) {
x1 = x ;
y1 = y ;
x2 = width ;
y2 = height ;
}
void Apply ( const VkRect2D & rect ) {
if ( rect . offset . x < x1 ) x1 = rect . offset . x ;
if ( rect . offset . y < y1 ) y1 = rect . offset . y ;
int rect_x2 = rect . offset . x + rect . extent . width ;
int rect_y2 = rect . offset . y + rect . extent . height ;
if ( rect_x2 > x2 ) x2 = rect_x2 ;
if ( rect_y2 > y2 ) y2 = rect_y2 ;
}
VkRect2D ToVkRect2D ( ) const {
VkRect2D rect ;
rect . offset . x = x1 ;
rect . offset . y = y1 ;
rect . extent . width = x2 - x1 ;
rect . extent . height = y2 - y1 ;
return rect ;
}
} ;
2019-06-16 20:29:38 +02:00
// All the data needed to create a graphics pipeline.
2022-12-13 15:16:11 +01:00
// TODO: Compress this down greatly.
2023-05-29 12:41:06 +02:00
class VKRGraphicsPipelineDesc : public Draw : : RefCountedObject {
public :
VKRGraphicsPipelineDesc ( ) : Draw : : RefCountedObject ( " VKRGraphicsPipelineDesc " ) { }
2019-06-16 20:29:38 +02:00
VkPipelineCache pipelineCache = VK_NULL_HANDLE ;
VkPipelineColorBlendStateCreateInfo cbs { VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO } ;
VkPipelineColorBlendAttachmentState blend0 { } ;
VkPipelineDepthStencilStateCreateInfo dss { VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO } ;
VkDynamicState dynamicStates [ 6 ] { } ;
VkPipelineDynamicStateCreateInfo ds { VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO } ;
VkPipelineRasterizationStateCreateInfo rs { VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO } ;
2022-06-11 23:28:26 +02:00
// Replaced the ShaderStageInfo with promises here so we can wait for compiles to finish.
2022-09-07 12:37:45 +02:00
Promise < VkShaderModule > * vertexShader = nullptr ;
Promise < VkShaderModule > * fragmentShader = nullptr ;
2022-10-01 20:01:23 -07:00
Promise < VkShaderModule > * geometryShader = nullptr ;
2022-06-11 23:28:26 +02:00
2022-11-07 22:33:15 +01:00
// These are for pipeline creation failure logging.
// TODO: Store pointers to the string instead? Feels iffy but will probably work.
std : : string vertexShaderSource ;
std : : string fragmentShaderSource ;
std : : string geometryShaderSource ;
2022-12-13 15:16:11 +01:00
VkPrimitiveTopology topology ;
2019-06-16 20:29:38 +02:00
VkVertexInputAttributeDescription attrs [ 8 ] { } ;
VkVertexInputBindingDescription ibd { } ;
VkPipelineVertexInputStateCreateInfo vis { VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO } ;
VkPipelineViewportStateCreateInfo views { VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO } ;
2022-09-06 13:30:18 +02:00
2022-09-07 12:37:45 +02:00
VkPipelineLayout pipelineLayout = VK_NULL_HANDLE ;
2022-09-06 13:30:18 +02:00
// Does not include the render pass type, it's passed in separately since the
// desc is persistent.
2022-09-07 12:37:45 +02:00
RPKey rpKey { } ;
2019-06-16 20:29:38 +02:00
} ;
// All the data needed to create a compute pipeline.
struct VKRComputePipelineDesc {
VkPipelineCache pipelineCache ;
VkComputePipelineCreateInfo pipe { VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO } ;
} ;
2022-09-07 16:11:15 +02:00
// Wrapped pipeline. Doesn't own desc.
2019-06-16 20:29:38 +02:00
struct VKRGraphicsPipeline {
2022-11-28 23:56:55 +01:00
VKRGraphicsPipeline ( PipelineFlags flags , const char * tag ) : flags_ ( flags ) , tag_ ( tag ) { }
2023-01-09 09:48:41 +01:00
~ VKRGraphicsPipeline ( ) ;
2022-09-23 10:56:46 +02:00
2023-02-01 11:42:25 +01:00
bool Create ( VulkanContext * vulkan , VkRenderPass compatibleRenderPass , RenderPassType rpType , VkSampleCountFlagBits sampleCount , double scheduleTime , int countToCompile ) ;
2022-09-07 16:11:15 +02:00
2022-12-01 23:41:31 +01:00
void DestroyVariants ( VulkanContext * vulkan , bool msaaOnly ) ;
2022-11-28 18:20:30 +01:00
2022-09-07 16:11:15 +02:00
// This deletes the whole VKRGraphicsPipeline, you must remove your last pointer to it when doing this.
void QueueForDeletion ( VulkanContext * vulkan ) ;
u32 GetVariantsBitmask ( ) const ;
2022-11-07 22:33:15 +01:00
void LogCreationFailure ( ) const ;
2022-12-03 14:52:06 -08:00
VKRGraphicsPipelineDesc * desc = nullptr ;
2022-11-05 22:06:53 +01:00
Promise < VkPipeline > * pipeline [ ( size_t ) RenderPassType : : TYPE_COUNT ] { } ;
2022-11-28 18:20:30 +01:00
VkSampleCountFlagBits SampleCount ( ) const { return sampleCount_ ; }
2023-01-13 10:14:29 +01:00
const char * Tag ( ) const { return tag_ . c_str ( ) ; }
2023-02-01 11:42:25 +01:00
2022-11-28 18:20:30 +01:00
private :
2023-01-09 09:48:41 +01:00
void DestroyVariantsInstant ( VkDevice device ) ;
2022-11-28 18:20:30 +01:00
std : : string tag_ ;
2022-11-28 23:56:55 +01:00
PipelineFlags flags_ ;
2022-11-28 18:20:30 +01:00
VkSampleCountFlagBits sampleCount_ = VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM ;
2019-06-16 20:29:38 +02:00
} ;
struct VKRComputePipeline {
2022-09-23 10:56:46 +02:00
~ VKRComputePipeline ( ) {
delete pipeline ;
2019-06-16 21:57:22 +02:00
}
2022-09-23 10:56:46 +02:00
2019-06-16 20:29:38 +02:00
VKRComputePipelineDesc * desc = nullptr ;
2022-09-23 10:56:46 +02:00
Promise < VkPipeline > * pipeline = nullptr ;
2019-06-16 20:29:38 +02:00
2022-06-12 13:23:24 +02:00
bool CreateAsync ( VulkanContext * vulkan ) ;
2021-12-02 17:57:40 -08:00
bool Pending ( ) const {
return pipeline = = VK_NULL_HANDLE & & desc ! = nullptr ;
}
2019-06-16 20:29:38 +02:00
} ;
2019-06-16 21:57:22 +02:00
struct CompileQueueEntry {
2022-11-28 11:50:28 +01:00
CompileQueueEntry ( VKRGraphicsPipeline * p , VkRenderPass _compatibleRenderPass , RenderPassType _renderPassType , VkSampleCountFlagBits _sampleCount )
: type ( Type : : GRAPHICS ) , graphics ( p ) , compatibleRenderPass ( _compatibleRenderPass ) , renderPassType ( _renderPassType ) , sampleCount ( _sampleCount ) { }
2023-01-13 10:14:29 +01:00
CompileQueueEntry ( VKRComputePipeline * p ) : type ( Type : : COMPUTE ) , compute ( p ) , renderPassType ( RenderPassType : : DEFAULT ) , sampleCount ( VK_SAMPLE_COUNT_1_BIT ) , compatibleRenderPass ( VK_NULL_HANDLE ) { } // renderpasstype here shouldn't matter
2019-06-16 21:57:22 +02:00
enum class Type {
GRAPHICS ,
COMPUTE ,
} ;
Type type ;
2022-09-06 13:30:18 +02:00
VkRenderPass compatibleRenderPass ;
RenderPassType renderPassType ;
2019-06-16 21:57:22 +02:00
VKRGraphicsPipeline * graphics = nullptr ;
VKRComputePipeline * compute = nullptr ;
2022-11-28 11:50:28 +01:00
VkSampleCountFlagBits sampleCount ;
2019-06-16 21:57:22 +02:00
} ;
2017-08-16 23:03:30 +02:00
class VulkanRenderManager {
public :
2023-08-16 11:45:26 +02:00
VulkanRenderManager ( VulkanContext * vulkan , bool useThread , HistoryBuffer < FrameTimeData , FRAME_TIME_HISTORY_LENGTH > & frameTimeHistory ) ;
2017-08-19 17:32:10 +02:00
~ VulkanRenderManager ( ) ;
2017-08-18 15:08:40 +02:00
// Makes sure that the GPU has caught up enough that we can start writing buffers of this frame again.
2021-12-19 22:49:42 +01:00
void BeginFrame ( bool enableProfiling , bool enableLogProfiler ) ;
2023-08-10 09:50:01 +02:00
// These can run on a different thread!
2017-10-28 16:47:08 +02:00
void Finish ( ) ;
2023-08-10 09:50:01 +02:00
void Present ( ) ;
2017-11-01 21:42:19 +01:00
// Zaps queued up commands. Use if you know there's a risk you've queued up stuff that has already been deleted. Can happen during in-game shutdown.
void Wipe ( ) ;
2022-11-24 10:38:49 +01:00
void SetInvalidationCallback ( InvalidationCallback callback ) {
invalidationCallback_ = callback ;
}
2022-10-18 00:26:10 +02:00
// This starts a new step containing a render pass (unless it can be trivially merged into the previous one, which is pretty common).
2020-05-24 20:27:58 +02:00
//
// After a "CopyFramebuffer" or the other functions that start "steps", you need to call this beforce
// making any new render state changes or draw calls.
//
// The following dynamic state needs to be reset by the caller after calling this (and will thus not safely carry over from
// the previous one):
// * Viewport/Scissor
// * Stencil parameters
// * Blend color
//
// (Most other state is directly decided by your choice of pipeline and descriptor set, so not handled here).
//
// It can be useful to use GetCurrentStepId() to figure out when you need to send all this state again, if you're
// not keeping track of your calls to this function on your own.
2022-10-23 11:21:35 +02:00
void BindFramebufferAsRenderTarget ( VKRFramebuffer * fb , VKRRenderPassLoadAction color , VKRRenderPassLoadAction depth , VKRRenderPassLoadAction stencil , uint32_t clearColor , float clearDepth , uint8_t clearStencil , const char * tag ) ;
2020-05-24 20:27:58 +02:00
// Returns an ImageView corresponding to a framebuffer. Is called BindFramebufferAsTexture to maintain a similar interface
// as the other backends, even though there's no actual binding happening here.
2022-10-18 00:26:10 +02:00
// For layer, we use the same convention as thin3d, where layer = -1 means all layers together. For texturing, that means that you
// get an array texture view.
VkImageView BindFramebufferAsTexture ( VKRFramebuffer * fb , int binding , VkImageAspectFlags aspectBits , int layer ) ;
2020-05-24 20:27:58 +02:00
2023-02-05 10:52:52 +01:00
bool CopyFramebufferToMemory ( VKRFramebuffer * src , VkImageAspectFlags aspectBits , int x , int y , int w , int h , Draw : : DataFormat destFormat , uint8_t * pixels , int pixelStride , Draw : : ReadbackMode mode , const char * tag ) ;
2020-05-21 11:24:05 +02:00
void CopyImageToMemorySync ( VkImage image , int mipLevel , int x , int y , int w , int h , Draw : : DataFormat destFormat , uint8_t * pixels , int pixelStride , const char * tag ) ;
2017-08-19 17:32:10 +02:00
2017-11-22 12:24:05 +01:00
void CopyFramebuffer ( VKRFramebuffer * src , VkRect2D srcRect , VKRFramebuffer * dst , VkOffset2D dstPos , VkImageAspectFlags aspectMask , const char * tag ) ;
void BlitFramebuffer ( VKRFramebuffer * src , VkRect2D srcRect , VKRFramebuffer * dst , VkRect2D dstRect , VkImageAspectFlags aspectMask , VkFilter filter , const char * tag ) ;
2017-08-19 17:32:10 +02:00
2019-06-16 20:29:38 +02:00
// Deferred creation, like in GL. Unlike GL though, the purpose is to allow background creation and avoiding
// stalling the emulation thread as much as possible.
2022-09-06 13:30:18 +02:00
// We delay creating pipelines until the end of the current render pass, so we can create the right type immediately.
2022-09-07 15:19:20 +02:00
// Unless a variantBitmask is passed in, in which case we can just go ahead.
// WARNING: desc must stick around during the lifetime of the pipeline! It's not enough to build it on the stack and drop it.
2023-01-13 10:14:29 +01:00
VKRGraphicsPipeline * CreateGraphicsPipeline ( VKRGraphicsPipelineDesc * desc , PipelineFlags pipelineFlags , uint32_t variantBitmask , VkSampleCountFlagBits sampleCount , bool cacheLoad , const char * tag ) ;
2022-09-07 15:19:20 +02:00
VKRComputePipeline * CreateComputePipeline ( VKRComputePipelineDesc * desc ) ;
2019-06-16 21:57:22 +02:00
2022-09-07 15:19:20 +02:00
void NudgeCompilerThread ( ) {
2019-06-16 21:57:22 +02:00
compileMutex_ . lock ( ) ;
compileCond_ . notify_one ( ) ;
compileMutex_ . unlock ( ) ;
2019-06-16 20:29:38 +02:00
}
2022-09-01 14:21:34 +02:00
void BindPipeline ( VKRGraphicsPipeline * pipeline , PipelineFlags flags , VkPipelineLayout pipelineLayout ) {
2023-09-11 16:57:18 +02:00
_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER & & pipeline ! = nullptr ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : BIND_GRAPHICS_PIPELINE ;
2022-09-07 15:19:20 +02:00
pipelinesToCheck_ . push_back ( pipeline ) ;
2022-09-03 22:04:01 +02:00
data . graphics_pipeline . pipeline = pipeline ;
2022-09-01 14:21:34 +02:00
data . graphics_pipeline . pipelineLayout = pipelineLayout ;
2022-11-24 10:03:16 +01:00
// This can be used to debug cases where depth/stencil rendering is used on color-only framebuffers.
// if ((flags & PipelineFlags::USES_DEPTH_STENCIL) && curRenderStep_->render.framebuffer && !curRenderStep_->render.framebuffer->HasDepth()) {
// DebugBreak();
// }
2019-06-16 20:29:38 +02:00
curPipelineFlags_ | = flags ;
}
2022-09-01 14:21:34 +02:00
void BindPipeline ( VKRComputePipeline * pipeline , PipelineFlags flags , VkPipelineLayout pipelineLayout ) {
2019-06-16 20:29:38 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
_dbg_assert_ ( pipeline ! = nullptr ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : BIND_COMPUTE_PIPELINE ;
2022-09-03 22:04:01 +02:00
data . compute_pipeline . pipeline = pipeline ;
2022-09-01 14:21:34 +02:00
data . compute_pipeline . pipelineLayout = pipelineLayout ;
2019-06-16 20:29:38 +02:00
curPipelineFlags_ | = flags ;
}
2017-08-18 15:08:40 +02:00
void SetViewport ( const VkViewport & vp ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
_dbg_assert_ ( ( int ) vp . width > = 0 ) ;
_dbg_assert_ ( ( int ) vp . height > = 0 ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : VIEWPORT ;
2019-10-13 20:43:26 +02:00
data . viewport . vp . x = vp . x ;
data . viewport . vp . y = vp . y ;
data . viewport . vp . width = vp . width ;
data . viewport . vp . height = vp . height ;
// We can't allow values outside this range unless we use VK_EXT_depth_range_unrestricted.
// Sometimes state mapping produces 65536/65535 which is slightly outside.
// TODO: This should be fixed at the source.
data . viewport . vp . minDepth = clamp_value ( vp . minDepth , 0.0f , 1.0f ) ;
data . viewport . vp . maxDepth = clamp_value ( vp . maxDepth , 0.0f , 1.0f ) ;
2020-05-17 20:40:22 -07:00
curStepHasViewport_ = true ;
2017-08-16 23:03:30 +02:00
}
2021-12-08 22:34:47 +01:00
// It's OK to set scissor outside the valid range - the function will automatically clip.
void SetScissor ( int x , int y , int width , int height ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
2020-10-11 13:07:08 +02:00
2021-12-08 22:34:47 +01:00
if ( x < 0 ) {
width + = x ; // since x is negative, this shrinks width.
x = 0 ;
}
if ( y < 0 ) {
height + = y ;
y = 0 ;
2020-10-11 13:07:08 +02:00
}
2021-09-10 01:13:28 +02:00
2021-12-08 22:34:47 +01:00
if ( x + width > curWidth_ ) {
width = curWidth_ - x ;
}
if ( y + height > curHeight_ ) {
height = curHeight_ - y ;
2020-10-11 13:07:08 +02:00
}
2021-12-07 21:46:10 +01:00
2021-12-08 22:34:47 +01:00
// Check validity.
if ( width < 0 | | height < 0 | | x > = curWidth_ | | y > = curHeight_ ) {
// TODO: If any of the dimensions are now zero or negative, we should flip a flag and not do draws, probably.
// Instead, if we detect an invalid scissor rectangle, we just put a 1x1 rectangle in the upper left corner.
x = 0 ;
y = 0 ;
width = 1 ;
height = 1 ;
}
VkRect2D rc ;
rc . offset . x = x ;
rc . offset . y = y ;
rc . extent . width = width ;
rc . extent . height = height ;
2021-12-07 21:46:10 +01:00
2020-10-11 13:07:08 +02:00
curRenderArea_ . Apply ( rc ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : SCISSOR ;
2019-10-13 19:56:25 +02:00
data . scissor . scissor = rc ;
2020-05-17 21:23:32 -07:00
curStepHasScissor_ = true ;
2017-08-16 23:03:30 +02:00
}
2017-08-22 13:25:45 +02:00
void SetStencilParams ( uint8_t writeMask , uint8_t compareMask , uint8_t refValue ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : STENCIL ;
2017-08-22 13:25:45 +02:00
data . stencil . stencilWriteMask = writeMask ;
data . stencil . stencilCompareMask = compareMask ;
data . stencil . stencilRef = refValue ;
}
2019-10-13 21:15:01 +02:00
void SetBlendFactor ( uint32_t color ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : BLEND ;
2019-10-13 21:15:01 +02:00
data . blendColor . color = color ;
2017-08-18 15:08:40 +02:00
}
2017-11-01 14:18:39 +01:00
void PushConstants ( VkPipelineLayout pipelineLayout , VkShaderStageFlags stages , int offset , int size , void * constants ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( size + offset < 40 ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : PUSH_CONSTANTS ;
2017-11-01 14:18:39 +01:00
data . push . stages = stages ;
data . push . offset = offset ;
data . push . size = size ;
memcpy ( data . push . data , constants , size ) ;
}
2017-08-16 23:03:30 +02:00
void Clear ( uint32_t clearColor , float clearZ , int clearStencil , int clearMask ) ;
2022-08-28 23:16:48 +02:00
// Cheaply set that we don't care about the contents of a surface at the start of the current render pass.
// This set the corresponding load-op of the current render pass to DONT_CARE.
// Useful when we don't know at bind-time whether we will overwrite the surface or not.
void SetLoadDontCare ( VkImageAspectFlags aspects ) {
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
if ( aspects & VK_IMAGE_ASPECT_COLOR_BIT )
curRenderStep_ - > render . colorLoad = VKRRenderPassLoadAction : : DONT_CARE ;
if ( aspects & VK_IMAGE_ASPECT_DEPTH_BIT )
curRenderStep_ - > render . depthLoad = VKRRenderPassLoadAction : : DONT_CARE ;
if ( aspects & VK_IMAGE_ASPECT_STENCIL_BIT )
curRenderStep_ - > render . stencilLoad = VKRRenderPassLoadAction : : DONT_CARE ;
}
// Cheaply set that we don't care about the contents of a surface at the end of the current render pass.
// This set the corresponding store-op of the current render pass to DONT_CARE.
void SetStoreDontCare ( VkImageAspectFlags aspects ) {
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER ) ;
if ( aspects & VK_IMAGE_ASPECT_COLOR_BIT )
curRenderStep_ - > render . colorStore = VKRRenderPassStoreAction : : DONT_CARE ;
if ( aspects & VK_IMAGE_ASPECT_DEPTH_BIT )
curRenderStep_ - > render . depthStore = VKRRenderPassStoreAction : : DONT_CARE ;
if ( aspects & VK_IMAGE_ASPECT_STENCIL_BIT )
curRenderStep_ - > render . stencilStore = VKRRenderPassStoreAction : : DONT_CARE ;
}
2022-09-01 14:21:34 +02:00
void Draw ( VkDescriptorSet descSet , int numUboOffsets , const uint32_t * uboOffsets , VkBuffer vbuffer , int voffset , int count , int offset = 0 ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER & & curStepHasViewport_ & & curStepHasScissor_ ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : DRAW ;
2017-08-18 15:08:40 +02:00
data . draw . count = count ;
2020-05-10 20:34:42 -07:00
data . draw . offset = offset ;
2017-08-18 15:08:40 +02:00
data . draw . ds = descSet ;
2017-08-16 23:03:30 +02:00
data . draw . vbuffer = vbuffer ;
2017-08-18 15:08:40 +02:00
data . draw . voffset = voffset ;
data . draw . numUboOffsets = numUboOffsets ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( numUboOffsets < = ARRAY_SIZE ( data . draw . uboOffsets ) ) ;
2017-08-18 15:08:40 +02:00
for ( int i = 0 ; i < numUboOffsets ; i + + )
data . draw . uboOffsets [ i ] = uboOffsets [ i ] ;
2017-08-22 12:55:30 +02:00
curRenderStep_ - > render . numDraws + + ;
2017-08-16 23:03:30 +02:00
}
2023-05-30 00:16:14 +02:00
void DrawIndexed ( VkDescriptorSet descSet , int numUboOffsets , const uint32_t * uboOffsets , VkBuffer vbuffer , int voffset , VkBuffer ibuffer , int ioffset , int count , int numInstances ) {
2020-07-19 17:47:02 +02:00
_dbg_assert_ ( curRenderStep_ & & curRenderStep_ - > stepType = = VKRStepType : : RENDER & & curStepHasViewport_ & & curStepHasScissor_ ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : DRAW_INDEXED ;
2017-08-18 15:08:40 +02:00
data . drawIndexed . count = count ;
2017-08-22 13:25:45 +02:00
data . drawIndexed . instances = numInstances ;
2017-08-18 15:08:40 +02:00
data . drawIndexed . ds = descSet ;
2017-08-16 23:03:30 +02:00
data . drawIndexed . vbuffer = vbuffer ;
data . drawIndexed . voffset = voffset ;
data . drawIndexed . ibuffer = ibuffer ;
data . drawIndexed . ioffset = ioffset ;
2017-08-18 15:08:40 +02:00
data . drawIndexed . numUboOffsets = numUboOffsets ;
2020-08-16 00:38:55 +02:00
_dbg_assert_ ( numUboOffsets < = ARRAY_SIZE ( data . drawIndexed . uboOffsets ) ) ;
2017-08-18 15:08:40 +02:00
for ( int i = 0 ; i < numUboOffsets ; i + + )
data . drawIndexed . uboOffsets [ i ] = uboOffsets [ i ] ;
2017-08-22 12:55:30 +02:00
curRenderStep_ - > render . numDraws + + ;
2017-08-16 23:03:30 +02:00
}
2022-10-13 22:34:21 +02:00
// These can be useful both when inspecting in RenderDoc, and when manually inspecting recorded commands
// in the debugger.
void DebugAnnotate ( const char * annotation ) {
2023-02-27 10:39:49 +01:00
_dbg_assert_ ( curRenderStep_ ) ;
2023-05-17 01:10:40 +02:00
VkRenderData & data = curRenderStep_ - > commands . push_uninitialized ( ) ;
data . cmd = VKRRenderCommand : : DEBUG_ANNOTATION ;
2022-10-13 22:34:21 +02:00
data . debugAnnotation . annotation = annotation ;
}
2017-08-19 17:32:10 +02:00
VkCommandBuffer GetInitCmd ( ) ;
2017-10-27 22:10:36 +02:00
2020-11-01 23:37:32 +01:00
bool CreateBackbuffers ( ) ;
2017-08-22 12:55:30 +02:00
void DestroyBackbuffers ( ) ;
2017-11-12 21:50:54 -08:00
bool HasBackbuffers ( ) {
2022-09-17 08:43:13 +02:00
return queueRunner_ . HasBackbuffers ( ) ;
2017-11-12 21:50:54 -08:00
}
2020-03-01 08:53:46 -08:00
void SetInflightFrames ( int f ) {
newInflightFrames_ = f < 1 | | f > VulkanContext : : MAX_INFLIGHT_FRAMES ? VulkanContext : : MAX_INFLIGHT_FRAMES : f ;
2020-02-29 23:40:55 -08:00
}
2018-01-17 13:59:32 +01:00
VulkanContext * GetVulkanContext ( ) {
return vulkan_ ;
}
2018-03-29 14:36:04 +02:00
// Be careful with this. Only meant to be used for fetching render passes for shader cache initialization.
VulkanQueueRunner * GetQueueRunner ( ) {
return & queueRunner_ ;
}
2019-08-21 00:03:00 +02:00
std : : string GetGpuProfileString ( ) const {
2019-09-18 00:16:08 +02:00
return frameData_ [ vulkan_ - > GetCurFrame ( ) ] . profile . profileSummary ;
2019-08-21 00:03:00 +02:00
}
2020-06-21 22:34:37 +02:00
bool NeedsSwapchainRecreate ( ) const {
// Accepting a few of these makes shutdown simpler.
return outOfDateFrames_ > VulkanContext : : MAX_INFLIGHT_FRAMES ;
}
2022-10-10 10:53:27 +02:00
void ResetStats ( ) ;
2023-05-02 22:04:14 +02:00
void DrainCompileQueue ( ) ;
2022-10-10 10:53:27 +02:00
2017-08-18 15:08:40 +02:00
private :
2020-10-11 11:47:24 +02:00
void EndCurRenderStep ( ) ;
2022-09-23 15:24:26 +02:00
void ThreadFunc ( ) ;
void CompileThreadFunc ( ) ;
2022-09-23 19:39:00 +02:00
void Run ( VKRRenderThreadTask & task ) ;
2017-10-28 18:03:27 +02:00
// Bad for performance but sometimes necessary for synchronous CPU readbacks (screenshots and whatnot).
void FlushSync ( ) ;
2017-11-09 16:28:22 +01:00
void StopThread ( ) ;
2017-11-05 08:40:11 -08:00
2023-08-01 18:04:44 +02:00
void PresentWaitThreadFunc ( ) ;
2023-08-03 12:59:25 +02:00
void PollPresentTiming ( ) ;
2023-08-01 18:04:44 +02:00
2022-09-19 18:07:50 +02:00
FrameDataShared frameDataShared_ ;
2017-08-19 17:32:10 +02:00
FrameData frameData_ [ VulkanContext : : MAX_INFLIGHT_FRAMES ] ;
2020-02-29 23:40:55 -08:00
int newInflightFrames_ = - 1 ;
2020-03-08 17:03:58 +01:00
int inflightFramesAtStart_ = 0 ;
2017-08-19 17:32:10 +02:00
2020-06-21 22:34:37 +02:00
int outOfDateFrames_ = 0 ;
2017-08-22 13:25:45 +02:00
// Submission time state
2020-10-11 13:07:08 +02:00
// Note: These are raw backbuffer-sized. Rotated.
int curWidthRaw_ = - 1 ;
int curHeightRaw_ = - 1 ;
// Pre-rotation (as you'd expect).
2019-06-18 00:18:40 +02:00
int curWidth_ = - 1 ;
int curHeight_ = - 1 ;
2020-10-11 13:07:08 +02:00
2017-08-22 12:55:30 +02:00
bool insideFrame_ = false ;
2022-09-23 19:39:00 +02:00
bool run_ = false ;
2023-07-23 19:20:55 +02:00
bool useRenderThread_ = true ;
2023-08-30 10:47:20 +02:00
bool measurePresentTime_ = false ;
2023-07-23 19:20:55 +02:00
2020-05-24 22:39:29 -07:00
// This is the offset within this frame, in case of a mid-frame sync.
2017-10-31 12:02:10 +01:00
VKRStep * curRenderStep_ = nullptr ;
2020-05-17 20:40:22 -07:00
bool curStepHasViewport_ = false ;
2020-05-17 21:23:32 -07:00
bool curStepHasScissor_ = false ;
2022-02-19 20:40:27 +01:00
PipelineFlags curPipelineFlags_ { } ;
2020-10-11 13:07:08 +02:00
BoundingRect curRenderArea_ ;
2020-10-11 11:47:24 +02:00
2017-08-22 13:25:45 +02:00
std : : vector < VKRStep * > steps_ ;
2017-08-19 17:32:10 +02:00
2017-08-22 13:25:45 +02:00
// Execution time state
VulkanContext * vulkan_ ;
2017-08-22 17:18:54 +02:00
std : : thread thread_ ;
2017-10-27 22:10:36 +02:00
VulkanQueueRunner queueRunner_ ;
2017-08-19 17:32:10 +02:00
2022-09-23 19:39:00 +02:00
// For pushing data on the queue.
std : : mutex pushMutex_ ;
std : : condition_variable pushCondVar_ ;
2023-05-17 00:55:04 +02:00
std : : queue < VKRRenderThreadTask * > renderThreadQueue_ ;
2022-09-23 19:39:00 +02:00
// For readbacks and other reasons we need to sync with the render thread.
std : : mutex syncMutex_ ;
std : : condition_variable syncCondVar_ ;
2019-06-16 21:57:22 +02:00
// Shader compilation thread to compile while emulating the rest of the frame.
// Only one right now but we could use more.
std : : thread compileThread_ ;
// Sync
std : : condition_variable compileCond_ ;
std : : mutex compileMutex_ ;
std : : vector < CompileQueueEntry > compileQueue_ ;
2023-08-01 18:04:44 +02:00
// Thread for measuring presentation delay.
std : : thread presentWaitThread_ ;
2022-09-07 15:19:20 +02:00
// pipelines to check and possibly create at the end of the current render pass.
std : : vector < VKRGraphicsPipeline * > pipelinesToCheck_ ;
2022-10-10 10:53:27 +02:00
// For nicer output in the little internal GPU profiler.
SimpleStat initTimeMs_ ;
SimpleStat totalGPUTimeMs_ ;
SimpleStat renderCPUTimeMs_ ;
2022-11-24 10:38:49 +01:00
2022-12-01 19:15:38 +01:00
std : : function < void ( InvalidationCallbackFlags ) > invalidationCallback_ ;
2023-08-02 11:38:31 +02:00
2023-08-16 12:16:31 +02:00
uint64_t frameIdGen_ = FRAME_TIME_HISTORY_LENGTH ;
HistoryBuffer < FrameTimeData , FRAME_TIME_HISTORY_LENGTH > & frameTimeHistory_ ;
2017-08-19 17:32:10 +02:00
} ;