Divide Framework 0.1
A free and open-source 3D Framework under heavy development
Loading...
Searching...
No Matches
VKWrapper.cpp
Go to the documentation of this file.
1
2
3#include "Headers/VKWrapper.h"
4
10
12
20
25
27
30
31#include <SDL2/SDL_vulkan.h>
32
33#define VMA_IMPLEMENTATION
35
36namespace
37{
38 inline VKAPI_ATTR VkBool32 VKAPI_CALL divide_debug_callback( VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
39 VkDebugUtilsMessageTypeFlagsEXT messageType,
40 const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData,
41 void* )
42 {
43
44 if ( Divide::VK_API::GetStateTracker()._enabledAPIDebugging && !(*Divide::VK_API::GetStateTracker()._enabledAPIDebugging) )
45 {
46 return VK_FALSE;
47 }
48
49 using namespace Divide;
50
51 const auto to_string_message_severity = []( VkDebugUtilsMessageSeverityFlagBitsEXT s ) -> const char*
52 {
53 switch ( s )
54 {
55 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: return "VERBOSE";
56 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: return "ERROR";
57 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: return "WARNING";
58 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: return "INFO";
59 default: return "UNKNOWN";
60 }
61 };
62
63 const auto to_string_message_type = []( VkDebugUtilsMessageTypeFlagsEXT s )
64 {
65 Str<64> ret{};
66 if ( s & VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT )
67 {
68 ret.append("[General]");
69 }
70 if ( s & VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT )
71 {
72 ret.append( "[Validation]" );
73 }
74 if ( s & VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT )
75 {
76 ret.append( "[Performance]" );
77 }
78 if ( s & VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT )
79 {
80 ret.append( "[Address Binding]" );
81 }
82 if ( ret.empty() )
83 {
84 ret.append("[Unknown]");
85 }
86
87 return ret;
88 };
89
90 constexpr const char* kSkippedMessages[] = {
91 "UNASSIGNED-BestPractices-vkCreateInstance-specialuse-extension-debugging",
92 "UNASSIGNED-BestPractices-vkCreateDevice-specialuse-extension-d3demulation",
93 "UNASSIGNED-BestPractices-vkCreateDevice-specialuse-extension-glemulation",
94 "UNASSIGNED-BestPractices-vkBindMemory-small-dedicated-allocation",
95 "UNASSIGNED-BestPractices-vkAllocateMemory-small-allocation",
96 "UNASSIGNED-BestPractices-SpirvDeprecated_WorkgroupSize"
97 };
98
99 if ( pCallbackData->pMessageIdName != nullptr )
100 {
101 if ( strstr( pCallbackData->pMessageIdName, "UNASSIGNED-BestPractices-Error-Result") != nullptr )
102 {
103 // We don't care about this error since we use VMA for our allocations and this is standard behaviour with that library
104 if ( strstr( pCallbackData->pMessage, "vkAllocateMemory()" ) != nullptr )
105 {
106 return VK_FALSE;
107 }
108 }
109 else
110 {
111 for ( const char* msg : kSkippedMessages )
112 {
113 if ( strstr( pCallbackData->pMessageIdName, msg ) != nullptr )
114 {
115 return VK_FALSE;
116 }
117 }
118 }
119 }
120 const string outputError = Util::StringFormat("[ {} ] {} : {}\n",
121 to_string_message_severity( messageSeverity ),
122 to_string_message_type( messageType ).c_str(),
123 pCallbackData->pMessage );
124
125 const bool isConsoleImmediate = Console::IsFlagSet( Console::Flags::PRINT_IMMEDIATE );
126 const bool severityDecoration = Console::IsFlagSet( Console::Flags::DECORATE_SEVERITY );
127
128 Console::ToggleFlag( Console::Flags::PRINT_IMMEDIATE, true );
129 Console::ToggleFlag( Console::Flags::DECORATE_SEVERITY, false );
130
131 if ( messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT ||
132 messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT )
133 {
134 Console::printfn( outputError.c_str() );
135 }
136 else if ( messageSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT )
137 {
138 Console::warnfn( outputError.c_str() );
139 }
140 else
141 {
142 Console::errorfn( outputError.c_str() );
143 DIVIDE_ASSERT( VK_API::GetStateTracker()._assertOnAPIError && !(*VK_API::GetStateTracker()._assertOnAPIError), outputError.c_str() );
144 }
145
146 Console::ToggleFlag( Console::Flags::DECORATE_SEVERITY, severityDecoration );
147 Console::ToggleFlag( Console::Flags::PRINT_IMMEDIATE, isConsoleImmediate );
148
149 return VK_FALSE; // Applications must return false here
150 }
151}
152
153namespace Divide
154{
155 static PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = VK_NULL_HANDLE;
156 static PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = VK_NULL_HANDLE;
157 static PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = VK_NULL_HANDLE;
158 static PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = VK_NULL_HANDLE;
159
160 static PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = VK_NULL_HANDLE;
161 static PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = VK_NULL_HANDLE;
162 static PFN_vkGetDescriptorEXT vkGetDescriptorEXT = VK_NULL_HANDLE;
163 static PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = VK_NULL_HANDLE;
164 static PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = VK_NULL_HANDLE;
165 static PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = VK_NULL_HANDLE;
166
167 namespace
168 {
169 const ResourcePath PipelineCacheFileName{ "pipeline_cache.dvd" };
170
172 {
173 return Paths::Shaders::g_cacheLocation / Paths::g_buildTypeLocation / Paths::Shaders::g_cacheLocationVK;
174 }
175
176 [[nodiscard]] FORCE_INLINE bool IsTriangles( const PrimitiveTopology topology )
177 {
178 return topology == PrimitiveTopology::TRIANGLES ||
183 }
184
186 {
187 VkShaderStageFlags ret = 0u;
188
189 if ( mask != to_base( ShaderStageVisibility::NONE ) )
190 {
191 if ( mask == to_base( ShaderStageVisibility::ALL ) )
192 {
193 ret = VK_SHADER_STAGE_ALL;
194 }
195 else if ( mask == to_base( ShaderStageVisibility::ALL_DRAW ) )
196 {
197 ret = VK_SHADER_STAGE_ALL_GRAPHICS;
198 }
199 else if ( mask == to_base( ShaderStageVisibility::COMPUTE ) )
200 {
201 ret = VK_SHADER_STAGE_COMPUTE_BIT;
202 }
203 else
204 {
206 {
207 ret |= VK_SHADER_STAGE_VERTEX_BIT;
208 }
210 {
211 ret |= VK_SHADER_STAGE_GEOMETRY_BIT;
212 }
214 {
215 ret |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
216 }
218 {
219 ret |= VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
220 }
222 {
223 ret |= VK_SHADER_STAGE_FRAGMENT_BIT;
224 }
226 {
227 ret |= VK_SHADER_STAGE_COMPUTE_BIT;
228 }
229 }
230 }
231
232 return ret;
233 }
234
236 {
237 VkDescriptorBufferInfo _info{};
238 VkShaderStageFlags _stageFlags{};
239 };
240
241 using DynamicBufferEntry = std::array<DynamicEntry, MAX_BINDINGS_PER_DESCRIPTOR_SET>;
244 thread_local bool s_pipelineReset = true;
245
247 {
248 for ( auto& bindings : s_dynamicBindings )
249 {
250 bindings.fill( {} );
251 }
252 s_pipelineReset = true;
253 }
254 }
255
256 constexpr U32 VK_VENDOR_ID_AMD = 0x1002;
257 constexpr U32 VK_VENDOR_ID_IMGTECH = 0x1010;
258 constexpr U32 VK_VENDOR_ID_NVIDIA = 0x10DE;
259 constexpr U32 VK_VENDOR_ID_ARM = 0x13B5;
260 constexpr U32 VK_VENDOR_ID_QUALCOMM = 0x5143;
261 constexpr U32 VK_VENDOR_ID_INTEL = 0x8086;
262
268
273 eastl::stack<vkShaderProgram*> VK_API::s_reloadedShaders;
277
278 VkPipeline PipelineBuilder::build_pipeline( VkDevice device, VkPipelineCache pipelineCache, const bool graphics )
279 {
280 if ( graphics )
281 {
282 return build_graphics_pipeline( device, pipelineCache );
283 }
284
285 return build_compute_pipeline( device, pipelineCache );
286 }
287
288 VkPipeline PipelineBuilder::build_compute_pipeline( VkDevice device, VkPipelineCache pipelineCache )
289 {
290 VkComputePipelineCreateInfo pipelineInfo = vk::computePipelineCreateInfo( _pipelineLayout );
291 pipelineInfo.stage = _shaderStages.front();
292 pipelineInfo.layout = _pipelineLayout;
293
294 //it's easy to error out on create graphics pipeline, so we handle it a bit better than the common VK_CHECK case
295 VkPipeline newPipeline;
296 if ( vkCreateComputePipelines( device, pipelineCache, 1, &pipelineInfo, nullptr, &newPipeline ) != VK_SUCCESS )
297 {
298 Console::errorfn( LOCALE_STR( "ERROR_VK_PIPELINE_COMPUTE_FAILED" ) );
299 return VK_NULL_HANDLE; // failed to create graphics pipeline
300 }
301
302 return newPipeline;
303 }
304
305 VkPipeline PipelineBuilder::build_graphics_pipeline( VkDevice device, VkPipelineCache pipelineCache)
306 {
307 //make viewport state from our stored viewport and scissor.
308 //at the moment we won't support multiple viewports or scissors
309 VkPipelineViewportStateCreateInfo viewportState = vk::pipelineViewportStateCreateInfo( 1, 1 );
310 viewportState.pViewports = &_viewport;
311 viewportState.pScissors = &_scissor;
312
313 const VkPipelineColorBlendStateCreateInfo colorBlending = vk::pipelineColorBlendStateCreateInfo( to_U32( _colorBlendAttachments.size() ), _colorBlendAttachments.data() );
314
315 constexpr VkDynamicState dynamicStates[] = {
316 VK_DYNAMIC_STATE_VIEWPORT,
317 VK_DYNAMIC_STATE_SCISSOR,
318 VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
319 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
320 VK_DYNAMIC_STATE_STENCIL_REFERENCE,
321 VK_DYNAMIC_STATE_DEPTH_BIAS,
322 VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
323 VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
324 VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
325 VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
326 VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
327 VK_DYNAMIC_STATE_STENCIL_OP,
328 VK_DYNAMIC_STATE_CULL_MODE,
329 VK_DYNAMIC_STATE_FRONT_FACE,
330 VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
331 VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
332
333 VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT,
334 VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT,
335 VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT,
336
337 /*ToDo:
338 VK_DYNAMIC_STATE_BLEND_CONSTANTS,
339 VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
340 VK_DYNAMIC_STATE_DEPTH_BOUNDS,
341 VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
342 VK_DYNAMIC_STATE_LINE_WIDTH,
343 VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE*/
344 };
345
346 constexpr U32 stateCount = to_U32( std::size( dynamicStates ) );
347
348 const VkPipelineDynamicStateCreateInfo dynamicState = vk::pipelineDynamicStateCreateInfo( dynamicStates, stateCount - (VK_API::s_hasDynamicBlendStateSupport ? 0u : 3u) );
349
350 //build the actual pipeline
351 //we now use all of the info structs we have been writing into into this one to create the pipeline
352 VkGraphicsPipelineCreateInfo pipelineInfo = vk::pipelineCreateInfo( _pipelineLayout, VK_NULL_HANDLE );
353 pipelineInfo.pDynamicState = &dynamicState;
354 pipelineInfo.stageCount = to_U32( _shaderStages.size() );
355 pipelineInfo.pStages = _shaderStages.data();
356 pipelineInfo.pVertexInputState = &_vertexInputInfo;
357 pipelineInfo.pInputAssemblyState = &_inputAssembly;
358 pipelineInfo.pViewportState = &viewportState;
359 pipelineInfo.pRasterizationState = &_rasterizer;
360 pipelineInfo.pMultisampleState = &_multisampling;
361 pipelineInfo.pColorBlendState = &colorBlending;
362 pipelineInfo.pDepthStencilState = &_depthStencil;
363 pipelineInfo.pTessellationState = &_tessellation;
364 pipelineInfo.subpass = 0;
365 pipelineInfo.pNext = &VK_API::GetStateTracker()._pipelineRenderInfo;
366
367 //it's easy to error out on create graphics pipeline, so we handle it a bit better than the common VK_CHECK case
368 VkPipeline newPipeline;
369 if ( vkCreateGraphicsPipelines( device, pipelineCache, 1, &pipelineInfo, nullptr, &newPipeline ) != VK_SUCCESS )
370 {
371 Console::errorfn( LOCALE_STR( "ERROR_VK_PIPELINE_GRAPHICS_FAILED" ) );
372 return VK_NULL_HANDLE; // failed to create graphics pipeline
373 }
374
375 return newPipeline;
376 }
377
379 {
381 _deletionQueue.emplace_back( MOV( function ), (flags() & to_base(Flags::TREAT_AS_TRANSIENT)) ? Config::MAX_FRAMES_IN_FLIGHT + 1u : 0 );
382 }
383
384 void VKDeletionQueue::flush( VkDevice device, const bool force )
385 {
387 bool needsClean = false;
388 for ( const auto& it : _deletionQueue )
389 {
390 if (it.second == 0u || force)
391 {
392 (it.first)(device);
393 needsClean = true;
394 }
395 }
396
397 if ( needsClean )
398 {
399 if ( force )
400 {
401 _deletionQueue.clear();
402 }
403 else
404 {
405 std::erase_if( _deletionQueue, []( const auto it )
406 {
407 return it.second == 0u;
408 });
409 }
410 }
411 }
412
414 {
416 for ( auto& it : _deletionQueue )
417 {
418 if ( it.second > 0u )
419 {
420 it.second -= 1u;
421 }
422 }
423 }
424
426 {
428 return _deletionQueue.empty();
429 }
430
432 : _context( context )
433 , _type(type)
434 , _queueIndex(context.getQueue(type)._index)
435 {
436 const VkFenceCreateInfo fenceCreateInfo = vk::fenceCreateInfo();
437 for (U8 i = 0u; i < BUFFER_COUNT; ++i )
438 {
439 vkCreateFence( _context.getVKDevice(), &fenceCreateInfo, nullptr, &_bufferFences[i]);
440 }
441
442 _commandPool = _context.createCommandPool( _context.getQueue( type )._index, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT );
443
444 const VkCommandBufferAllocateInfo cmdBufAllocateInfo = vk::commandBufferAllocateInfo( _commandPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY, BUFFER_COUNT );
445
446 VK_CHECK( vkAllocateCommandBuffers( _context.getVKDevice(), &cmdBufAllocateInfo, _commandBuffers.data() ) );
447 }
448
450 {
451 vkDestroyCommandPool( _context.getVKDevice(), _commandPool, nullptr );
452 for ( U8 i = 0u; i < BUFFER_COUNT; ++i )
453 {
454 vkDestroyFence( _context.getVKDevice(), _bufferFences[i], nullptr);
455 _bufferFences[i] = VK_NULL_HANDLE;
456 }
457 }
458
459 void VKImmediateCmdContext::flushCommandBuffer( FlushCallback&& function, const char* scopeName )
460 {
462
463 const VkCommandBufferBeginInfo cmdBeginInfo = vk::commandBufferBeginInfo( VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT );
464
465 VkFence fence = _bufferFences[_bufferIndex];
466
467 if ( _wrapCounter > 0u )
468 {
469 vkWaitForFences( _context.getVKDevice(), 1, &fence, true, 9999999999 );
470 vkResetFences( _context.getVKDevice(), 1, &fence );
471 }
472
473 VkCommandBuffer cmd = _commandBuffers[_bufferIndex];
474 VK_CHECK( vkBeginCommandBuffer( cmd, &cmdBeginInfo ) );
476
477 VK_API::PushDebugMessage( cmd, scopeName );
478
479 // Execute the function
480 function( cmd, _type, _queueIndex );
481
483 VK_CHECK( vkEndCommandBuffer( cmd ) );
484
485 VkSubmitInfo submitInfo = vk::submitInfo();
486 submitInfo.commandBufferCount = 1;
487 submitInfo.pCommandBuffers = &cmd;
488
489 _context.submitToQueue( _type, submitInfo, fence );
491
492 if ( _bufferIndex == 0u )
493 {
494 ++_wrapCounter;
495 }
496 }
497
498 void VKStateTracker::init( VKDevice* device, VKPerWindowState* mainWindow )
499 {
500 DIVIDE_ASSERT(device != nullptr && mainWindow != nullptr);
502
503 _activeWindow = mainWindow;
504 for ( U8 t = 0u; t < to_base(QueueType::COUNT); ++t )
505 {
506 _cmdContexts[t] = std::make_unique<VKImmediateCmdContext>( *device, static_cast<QueueType>(t) );
507 }
508
509 }
510
512 {
513 _cmdContexts = {};
515 }
516
518 {
519 _pipeline = {};
520 _activeWindow = nullptr;
524 _drawIndirectBuffer = VK_NULL_HANDLE;
527 _pushConstantsValid = false;
529 _pipelineRenderInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO;
530 }
531
533 {
534 return _cmdContexts[to_base( type )].get();
535 }
536
537 void VK_API::RegisterCustomAPIDelete( DELEGATE<void, VkDevice>&& cbk, const bool isResourceTransient )
538 {
539 if ( isResourceTransient )
540 {
542 }
543 else
544 {
545 s_deviceDeleteQueue.push( MOV( cbk ) );
546 }
547 }
548
550 {
552 s_transferQueue._requests.push_back( request );
553 s_transferQueue._dirty.store(true);
554 }
555
556 VK_API::VK_API( GFXDevice& context ) noexcept
557 : _context( context )
558 {
559 }
560
561 VkCommandBuffer VK_API::getCurrentCommandBuffer() const noexcept
562 {
563 return s_stateTracker._activeWindow->_swapChain->getFrameData()._commandBuffer;
564 }
565
566 void VK_API::idle( [[maybe_unused]] const bool fast ) noexcept
567 {
568 NOP();
569 }
570
572 {
574
575 VKPerWindowState& windowState = _perWindowState[window.getGUID()];
576 if ( windowState._window == nullptr )
577 {
578 windowState._window = &window;
579 initStatePerWindow(windowState);
580 }
581
582 GetStateTracker()._activeWindow = &windowState;
583
584 const vec2<U16> windowDimensions = window.getDrawableSize();
585 VkExtent2D surfaceExtent = windowState._swapChain->surfaceExtent();
586
587 if ( windowDimensions.width != surfaceExtent.width || windowDimensions.height != surfaceExtent.height )
588 {
589 recreateSwapChain( windowState );
590 surfaceExtent = windowState._swapChain->surfaceExtent();
591 }
592
593 const VkResult result = windowState._swapChain->beginFrame();
594 if ( result != VK_SUCCESS )
595 {
596 if ( result != VK_ERROR_OUT_OF_DATE_KHR && result != VK_SUBOPTIMAL_KHR )
597 {
598 Console::errorfn( LOCALE_STR( "ERROR_GENERIC_VK" ), VKErrorString( result ).c_str() );
600 }
601
602 recreateSwapChain( windowState );
603 windowState._skipEndFrame = true;
604 return false;
605 }
606
607 return true;
608 }
609
611 {
612 }
613
615 {
616 }
617
618 void VK_API::prepareFlushWindow( [[maybe_unused]] DisplayWindow& window )
619 {
620 }
621
623 {
625
626 VKPerWindowState& windowState = _perWindowState[window.getGUID()];
627 assert( windowState._window != nullptr );
628
630 GetStateTracker()._activeWindow = nullptr;
631 };
632
633 if ( windowState._skipEndFrame )
634 {
635 windowState._skipEndFrame = false;
636 return;
637 }
638
639 VkCommandBuffer cmd = windowState._swapChain->getFrameData()._commandBuffer;
641 windowState._activeState = {};
642 s_dynamicOffsets.clear();
643
644 const VkResult result = windowState._swapChain->endFrame();
645
646 if ( result != VK_SUCCESS )
647 {
648 if ( result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR )
649 {
650 recreateSwapChain( windowState );
651 }
652 else
653 {
654 Console::errorfn( LOCALE_STR( "ERROR_GENERIC_VK" ), VKErrorString( result ).c_str() );
656 }
657 }
658 }
659
661 {
663
664 for ( U8 i = 0u; i < to_base(DescriptorSetUsage::COUNT); ++i )
665 {
666 PROFILE_SCOPE( "Flip descriptor pools", Profiler::Category::Graphics);
668 if ( pool._frameCount > 1u )
669 {
670 pool._allocatorPool->Flip();
671 pool._handle = pool._allocatorPool->GetAllocator();
672 }
673 }
674 _dummyDescriptorSet = VK_NULL_HANDLE;
676
677 return true;
678 }
679
681 {
683
686
687 while ( !s_reloadedShaders.empty() )
688 {
689 vkShaderProgram* program = s_reloadedShaders.top();
690 for ( auto& it : _compiledPipelines )
691 {
692 if ( !it.second._isValid )
693 {
694 continue;
695 }
696 if ( it.second._program->getGUID() == program->getGUID() )
697 {
698 destroyPipeline( it.second, true );
699 }
700 }
701 s_reloadedShaders.pop();
702 }
703
704 //vkResetCommandPool(_device->getVKDevice(), _device->graphicsCommandPool(), VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT);
705
707 return true;
708 }
709
711 {
713
714 if ( windowState._window->minimized() )
715 {
716 idle( false );
718 }
719 if ( windowState._window->minimized() )
720 {
721 return;
722 }
723
724 if (windowState._window->getGUID() == _context.context().mainWindow().getGUID() )
725 {
727 s_deviceDeleteQueue.flush( _device->getVKDevice(), true );
728 vkDeviceWaitIdle( _device->getVKDevice() );
729 }
730 const ErrorCode err = windowState._swapChain->create( windowState._window->flags() & to_base(WindowFlags::VSYNC),
732 windowState._surface );
733
735 // Clear ALL sync objects as they are all invalid after recreating the swapchain. vkDeviceWaitIdle should resolve potential sync issues.
737 }
738
740 {
741 DIVIDE_ASSERT(windowState._window != nullptr);
742 if (windowState._surface == nullptr )
743 {
744 SDL_Vulkan_CreateSurface( windowState._window->getRawWindow(), _vkbInstance.instance, &windowState._surface );
745 DIVIDE_ASSERT(windowState._surface != nullptr);
746 }
747
748 windowState._swapChain = std::make_unique<VKSwapChain>( *this, *_device, *windowState._window );
749 recreateSwapChain( windowState );
750 }
751
753 {
754 windowState._swapChain.reset();
755
756 if ( _vkbInstance.instance != nullptr )
757 {
758 vkDestroySurfaceKHR( _vkbInstance.instance, windowState._surface, nullptr );
759 }
760
761 windowState = {};
762 }
763
764 ErrorCode VK_API::initRenderingAPI( [[maybe_unused]] I32 argc, [[maybe_unused]] char** argv, Configuration& config ) noexcept
765 {
766 _descriptorSets.fill( VK_NULL_HANDLE );
767 _dummyDescriptorSet = VK_NULL_HANDLE ;
768
769 s_transientDeleteQueue.flags( s_transientDeleteQueue.flags() | to_base( VKDeletionQueue::Flags::TREAT_AS_TRANSIENT ) );
770
771 DisplayWindow* window = _context.context().app().windowManager().mainWindow();
772
773 auto systemInfoRet = vkb::SystemInfo::get_system_info();
774 if ( !systemInfoRet )
775 {
776 Console::errorfn( LOCALE_STR( "ERROR_VK_INIT" ), systemInfoRet.error().message().c_str() );
778 }
779
780 //make the Vulkan instance, with basic debug features
781 vkb::InstanceBuilder builder{};
782 builder.set_app_name( window->title() )
783 .set_engine_name( Config::ENGINE_NAME )
785 .require_api_version( 1, Config::DESIRED_VULKAN_MINOR_VERSION, 0 )
786 .request_validation_layers( Config::ENABLE_GPU_VALIDATION && config.debug.renderer.enableRenderAPIDebugging )
787 .set_debug_callback( divide_debug_callback )
788 .set_debug_callback_user_data_pointer( this );
789
790 vkb::SystemInfo& systemInfo = systemInfoRet.value();
791
792 s_hasValidationFeaturesSupport = false;
793 s_hasDebugMarkerSupport = false;
794 if ( Config::ENABLE_GPU_VALIDATION && (config.debug.renderer.enableRenderAPIDebugging || config.debug.renderer.enableRenderAPIBestPractices) )
795 {
796 if (systemInfo.is_extension_available( VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME ) )
797 {
798 builder.enable_extension( VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME );
799 // Don't count config.debug.renderer.useExtensions against this as validation is basically a core part of the Vulkan dev environment
800 s_hasValidationFeaturesSupport = true;
801 }
802
803 if ( systemInfo.is_extension_available( VK_EXT_DEBUG_UTILS_EXTENSION_NAME ) )
804 {
805 builder.enable_extension( VK_EXT_DEBUG_UTILS_EXTENSION_NAME );
806 builder.add_validation_feature_enable( VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT );
807 if (config.debug.renderer.enableRenderAPIBestPractices )
808 {
809 builder.add_validation_feature_enable( VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT );
810 }
811
812 s_hasDebugMarkerSupport = config.debug.renderer.useExtensions;
813 }
814
815 if ( systemInfo.validation_layers_available )
816 {
817 builder.enable_validation_layers();
818 }
819 }
820
821 auto instanceRet = builder.build();
822 if ( !instanceRet )
823 {
824 Console::errorfn( LOCALE_STR( "ERROR_VK_INIT" ), instanceRet.error().message().c_str() );
826 }
827
828 _vkbInstance = instanceRet.value();
829
830 auto& perWindowContext = _perWindowState[window->getGUID()];
831 perWindowContext._window = window;
832
833 // get the surface of the window we opened with SDL
834 SDL_Vulkan_CreateSurface( perWindowContext._window->getRawWindow(), _vkbInstance.instance, &perWindowContext._surface );
835
836 if ( perWindowContext._surface == nullptr )
837 {
839 }
840
841 _device = std::make_unique<VKDevice>( _vkbInstance, perWindowContext._surface );
842
843 VkDevice vkDevice = _device->getVKDevice();
844 if ( vkDevice == VK_NULL_HANDLE )
845 {
847 }
848
849 if ( _device->getQueue( QueueType::GRAPHICS )._index == INVALID_VK_QUEUE_INDEX )
850 {
852 }
853
854 if ( _device->getPresentQueueIndex() == INVALID_VK_QUEUE_INDEX )
855 {
857 }
858
859 VKQueue graphicsQueue = _device->getQueue( QueueType::GRAPHICS );
860 VkPhysicalDevice physicalDevice = _device->getVKPhysicalDevice();
861 PROFILE_VK_INIT( &vkDevice, &physicalDevice, &graphicsQueue._queue, &graphicsQueue._index, 1, nullptr);
862
863 if ( s_hasDebugMarkerSupport )
864 {
865 Debug::vkCmdBeginDebugUtilsLabelEXT = (PFN_vkCmdBeginDebugUtilsLabelEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdBeginDebugUtilsLabelEXT" );
866 Debug::vkCmdEndDebugUtilsLabelEXT = (PFN_vkCmdEndDebugUtilsLabelEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdEndDebugUtilsLabelEXT" );
867 Debug::vkCmdInsertDebugUtilsLabelEXT = (PFN_vkCmdInsertDebugUtilsLabelEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdInsertDebugUtilsLabelEXT" );
868 Debug::vkSetDebugUtilsObjectNameEXT = (PFN_vkSetDebugUtilsObjectNameEXT)vkGetDeviceProcAddr( vkDevice, "vkSetDebugUtilsObjectNameEXT" );
869 Debug::vkSetDebugUtilsObjectTagEXT = (PFN_vkSetDebugUtilsObjectTagEXT)vkGetDeviceProcAddr( vkDevice, "vkSetDebugUtilsObjectTagEXT" );
870 }
871
872 s_hasDynamicBlendStateSupport = config.debug.renderer.useExtensions && _device->supportsDynamicExtension3();
873 if ( s_hasDynamicBlendStateSupport )
874 {
875 vkCmdSetColorBlendEnableEXT = (PFN_vkCmdSetColorBlendEnableEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdSetColorBlendEnableEXT" );
876 vkCmdSetColorBlendEquationEXT = (PFN_vkCmdSetColorBlendEquationEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdSetColorBlendEquationEXT" );
877 vkCmdSetColorWriteMaskEXT = (PFN_vkCmdSetColorWriteMaskEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdSetColorWriteMaskEXT" );
878 }
879
880 s_hasPushDescriptorSupport = config.debug.renderer.useExtensions && _device->supportsPushDescriptors();
881 if ( s_hasPushDescriptorSupport )
882 {
883 vkCmdPushDescriptorSetKHR = (PFN_vkCmdPushDescriptorSetKHR)vkGetDeviceProcAddr( vkDevice, "vkCmdPushDescriptorSetKHR" );
884 }
885
886 s_hasDescriptorBufferSupport = config.debug.renderer.useExtensions && _device->supportsDescriptorBuffers();
887 if ( s_hasDescriptorBufferSupport )
888 {
889 vkGetDescriptorSetLayoutSizeEXT = (PFN_vkGetDescriptorSetLayoutSizeEXT)vkGetDeviceProcAddr( vkDevice, "vkGetDescriptorSetLayoutSizeEXT" );
890 vkGetDescriptorSetLayoutBindingOffsetEXT = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)vkGetDeviceProcAddr( vkDevice, "vkGetDescriptorSetLayoutBindingOffsetEXT" );
891 vkGetDescriptorEXT = (PFN_vkGetDescriptorEXT)vkGetDeviceProcAddr( vkDevice, "vkGetDescriptorEXT" );
892 vkCmdBindDescriptorBuffersEXT = (PFN_vkCmdBindDescriptorBuffersEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdBindDescriptorBuffersEXT" );
893 vkCmdSetDescriptorBufferOffsetsEXT = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdSetDescriptorBufferOffsetsEXT" );
894 vkCmdBindDescriptorBufferEmbeddedSamplersEXT = (PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)vkGetDeviceProcAddr( vkDevice, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT");
895 }
896
897 VKUtil::OnStartup( vkDevice );
898
899 VkFormatProperties2 properties{};
900 properties.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
901
902 vkGetPhysicalDeviceFormatProperties2( physicalDevice, VK_FORMAT_D24_UNORM_S8_UINT, &properties );
903 s_depthFormatInformation._d24s8Supported = properties.formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
904 vkGetPhysicalDeviceFormatProperties2( physicalDevice, VK_FORMAT_D32_SFLOAT_S8_UINT, &properties );
905 s_depthFormatInformation._d32s8Supported = properties.formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
906 DIVIDE_ASSERT( s_depthFormatInformation._d24s8Supported || s_depthFormatInformation._d32s8Supported );
907
908
909 vkGetPhysicalDeviceFormatProperties2( physicalDevice, VK_FORMAT_X8_D24_UNORM_PACK32, &properties );
910 s_depthFormatInformation._d24x8Supported = properties.formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
911 vkGetPhysicalDeviceFormatProperties2( physicalDevice, VK_FORMAT_D32_SFLOAT, &properties );
912 s_depthFormatInformation._d32FSupported = properties.formatProperties.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
913 DIVIDE_ASSERT( s_depthFormatInformation._d24x8Supported || s_depthFormatInformation._d32FSupported );
914
915 VkPhysicalDeviceProperties deviceProperties{};
916 vkGetPhysicalDeviceProperties( physicalDevice, &deviceProperties );
917
918 DeviceInformation deviceInformation{};
919 deviceInformation._renderer = GPURenderer::UNKNOWN;
920 switch ( deviceProperties.vendorID )
921 {
923 deviceInformation._vendor = GPUVendor::NVIDIA;
924 deviceInformation._renderer = GPURenderer::GEFORCE;
925 break;
927 deviceInformation._vendor = GPUVendor::INTEL;
928 deviceInformation._renderer = GPURenderer::INTEL;
929 break;
930 case VK_VENDOR_ID_AMD:
931 deviceInformation._vendor = GPUVendor::AMD;
932 deviceInformation._renderer = GPURenderer::RADEON;
933 break;
934 case VK_VENDOR_ID_ARM:
935 deviceInformation._vendor = GPUVendor::ARM;
936 deviceInformation._renderer = GPURenderer::MALI;
937 break;
939 deviceInformation._vendor = GPUVendor::QUALCOMM;
940 deviceInformation._renderer = GPURenderer::ADRENO;
941 break;
943 deviceInformation._vendor = GPUVendor::IMAGINATION_TECH;
944 deviceInformation._renderer = GPURenderer::POWERVR;
945 break;
946 case VK_VENDOR_ID_MESA:
947 deviceInformation._vendor = GPUVendor::MESA;
948 deviceInformation._renderer = GPURenderer::SOFTWARE;
949 break;
950 default:
951 deviceInformation._vendor = GPUVendor::OTHER;
952 break;
953 }
954
955 Console::printfn( LOCALE_STR( "VK_VENDOR_STRING" ),
956 deviceProperties.deviceName,
957 deviceProperties.vendorID,
958 deviceProperties.deviceID,
959 deviceProperties.driverVersion,
960 deviceProperties.apiVersion );
961
962 {
963 U32 toolCount = 0u;
964 VK_CHECK( vkGetPhysicalDeviceToolProperties( physicalDevice, &toolCount, NULL ) );
965
966 if ( toolCount > 0u )
967 {
968 std::vector<VkPhysicalDeviceToolPropertiesEXT> tools;
969 tools.resize( toolCount );
970 VK_CHECK( vkGetPhysicalDeviceToolProperties( physicalDevice, &toolCount, tools.data() ) );
971
972 Console::printfn( LOCALE_STR( "VK_TOOL_INFO" ), toolCount );
973
974 for ( VkPhysicalDeviceToolPropertiesEXT& tool : tools )
975 {
976 Console::printfn( "\t{} {}\n", tool.name, tool.version );
977 }
978 }
979 }
980
981 deviceInformation._versionInfo._major = 1u;
982 deviceInformation._versionInfo._minor = to_U8( VK_API_VERSION_MINOR( deviceProperties.apiVersion ) );
983
984 deviceInformation._maxTextureUnits = deviceProperties.limits.maxDescriptorSetSampledImages;
985 deviceInformation._maxVertAttributeBindings = deviceProperties.limits.maxVertexInputBindings;
986 deviceInformation._maxVertAttributes = deviceProperties.limits.maxVertexInputAttributes;
987 deviceInformation._maxRTColourAttachments = deviceProperties.limits.maxColorAttachments;
988 deviceInformation._maxDrawIndirectCount = deviceProperties.limits.maxDrawIndirectCount;
989 deviceInformation._maxTextureSize = deviceProperties.limits.maxImageDimension2D;
990
991 deviceInformation._shaderCompilerThreads = 0xFFFFFFFF;
992 CLAMP( config.rendering.maxAnisotropicFilteringLevel,
993 U8_ZERO,
994 to_U8( deviceProperties.limits.maxSamplerAnisotropy ) );
995 deviceInformation._maxAnisotropy = config.rendering.maxAnisotropicFilteringLevel;
996
997 DIVIDE_ASSERT( PushConstantsStruct::Size() <= deviceProperties.limits.maxPushConstantsSize );
998
999 const VkSampleCountFlags counts = deviceProperties.limits.framebufferColorSampleCounts & deviceProperties.limits.framebufferDepthSampleCounts;
1000 U8 maxMSAASamples = 0u;
1001 if ( counts & VK_SAMPLE_COUNT_2_BIT )
1002 {
1003 maxMSAASamples = 2u;
1004 }
1005 if ( counts & VK_SAMPLE_COUNT_4_BIT )
1006 {
1007 maxMSAASamples = 4u;
1008 }
1009 if ( counts & VK_SAMPLE_COUNT_8_BIT )
1010 {
1011 maxMSAASamples = 8u;
1012 }
1013 if ( counts & VK_SAMPLE_COUNT_16_BIT )
1014 {
1015 maxMSAASamples = 16u;
1016 }
1017 if ( counts & VK_SAMPLE_COUNT_32_BIT )
1018 {
1019 maxMSAASamples = 32u;
1020 }
1021 if ( counts & VK_SAMPLE_COUNT_64_BIT )
1022 {
1023 maxMSAASamples = 64u;
1024 }
1025 // If we do not support MSAA on a hardware level for whatever reason, override user set MSAA levels
1026 config.rendering.MSAASamples = std::min( config.rendering.MSAASamples, maxMSAASamples );
1027 config.rendering.shadowMapping.csm.MSAASamples = std::min( config.rendering.shadowMapping.csm.MSAASamples, maxMSAASamples );
1028 config.rendering.shadowMapping.spot.MSAASamples = std::min( config.rendering.shadowMapping.spot.MSAASamples, maxMSAASamples );
1030
1031 // How many workgroups can we have per compute dispatch
1032 for ( U8 i = 0u; i < 3; ++i )
1033 {
1034 deviceInformation._maxWorgroupCount[i] = deviceProperties.limits.maxComputeWorkGroupCount[i];
1035 deviceInformation._maxWorgroupSize[i] = deviceProperties.limits.maxComputeWorkGroupSize[i];
1036 }
1037 deviceInformation._maxWorgroupInvocations = deviceProperties.limits.maxComputeWorkGroupInvocations;
1038 deviceInformation._maxComputeSharedMemoryBytes = deviceProperties.limits.maxComputeSharedMemorySize;
1039 Console::printfn( LOCALE_STR( "MAX_COMPUTE_WORK_GROUP_INFO" ),
1040 deviceInformation._maxWorgroupCount[0], deviceInformation._maxWorgroupCount[1], deviceInformation._maxWorgroupCount[2],
1041 deviceInformation._maxWorgroupSize[0], deviceInformation._maxWorgroupSize[1], deviceInformation._maxWorgroupSize[2],
1042 deviceInformation._maxWorgroupInvocations );
1043 Console::printfn( LOCALE_STR( "MAX_COMPUTE_SHARED_MEMORY_SIZE" ), deviceInformation._maxComputeSharedMemoryBytes / 1024 );
1044
1045 // Maximum number of varying components supported as outputs in the vertex shader
1046 deviceInformation._maxVertOutputComponents = deviceProperties.limits.maxVertexOutputComponents;
1047 Console::printfn( LOCALE_STR( "MAX_VERTEX_OUTPUT_COMPONENTS" ), deviceInformation._maxVertOutputComponents );
1048
1049 deviceInformation._offsetAlignmentBytesUBO = deviceProperties.limits.minUniformBufferOffsetAlignment;
1050 deviceInformation._maxSizeBytesUBO = deviceProperties.limits.maxUniformBufferRange;
1051 deviceInformation._offsetAlignmentBytesSSBO = deviceProperties.limits.minStorageBufferOffsetAlignment;
1052 deviceInformation._maxSizeBytesSSBO = deviceProperties.limits.maxStorageBufferRange;
1053 deviceInformation._maxSSBOBufferBindings = deviceProperties.limits.maxPerStageDescriptorStorageBuffers;
1054
1055 const bool UBOSizeOver1Mb = deviceInformation._maxSizeBytesUBO / 1024 > 1024;
1056 Console::printfn( LOCALE_STR( "GL_VK_UBO_INFO" ),
1057 deviceProperties.limits.maxDescriptorSetUniformBuffers,
1058 (deviceInformation._maxSizeBytesUBO / 1024) / (UBOSizeOver1Mb ? 1024 : 1),
1059 UBOSizeOver1Mb ? "Mb" : "Kb",
1060 deviceInformation._offsetAlignmentBytesUBO );
1061 Console::printfn( LOCALE_STR( "GL_VK_SSBO_INFO" ),
1062 deviceInformation._maxSSBOBufferBindings,
1063 deviceInformation._maxSizeBytesSSBO / 1024 / 1024,
1064 deviceProperties.limits.maxDescriptorSetStorageBuffers,
1065 deviceInformation._offsetAlignmentBytesSSBO );
1066
1067 deviceInformation._maxClipAndCullDistances = deviceProperties.limits.maxCombinedClipAndCullDistances;
1068 deviceInformation._maxClipDistances = deviceProperties.limits.maxClipDistances;
1069 deviceInformation._maxCullDistances = deviceProperties.limits.maxCullDistances;
1070
1071 GFXDevice::OverrideDeviceInformation( deviceInformation );
1072
1073 VK_API::s_stateTracker._device = _device.get();
1074
1075
1077
1078 VmaAllocatorCreateInfo allocatorInfo = {};
1079 allocatorInfo.physicalDevice = physicalDevice;
1080 allocatorInfo.device = vkDevice;
1081 allocatorInfo.instance = _vkbInstance.instance;
1082 allocatorInfo.vulkanApiVersion = VK_API_VERSION_1_3;
1083 allocatorInfo.preferredLargeHeapBlockSize = 0;
1084
1085 vmaCreateAllocator( &allocatorInfo, &_allocator );
1086 GetStateTracker()._allocatorInstance._allocator = &_allocator;
1087
1088 VkPipelineCacheCreateInfo pipelineCacheCreateInfo{};
1089 pipelineCacheCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
1090
1091 vector<Byte> pipeline_data;
1092 std::ifstream data;
1093 const FileError errCache = readFile( PipelineCacheLocation(), PipelineCacheFileName.string(), FileType::BINARY, data );
1094 if ( errCache == FileError::NONE )
1095 {
1096 data.seekg(0, std::ios::end);
1097 const size_t fileSize = to_size(data.tellg());
1098 data.seekg(0);
1099 pipeline_data.resize(fileSize);
1100 data.read(reinterpret_cast<char*>(pipeline_data.data()), fileSize);
1101
1102 pipelineCacheCreateInfo.initialDataSize = fileSize;
1103 pipelineCacheCreateInfo.pInitialData = pipeline_data.data();
1104 }
1105 else
1106 {
1107 Console::errorfn( LOCALE_STR( "ERROR_VK_PIPELINE_CACHE_LOAD" ), Names::fileError[to_base( errCache )] );
1108 }
1109
1110 if (data.is_open())
1111 {
1112 data.close();
1113 }
1114
1115 if ( _context.context().config().runtime.usePipelineCache )
1116 {
1117 VK_CHECK( vkCreatePipelineCache( vkDevice, &pipelineCacheCreateInfo, nullptr, &_pipelineCache ) );
1118 }
1119
1120 initStatePerWindow( perWindowContext );
1121
1122 s_stateTracker.init(_device.get(), &perWindowContext);
1123 s_stateTracker._assertOnAPIError = &config.debug.renderer.assertOnRenderAPIError;
1124 s_stateTracker._enabledAPIDebugging = &config.debug.renderer.enableRenderAPIDebugging;
1125
1126 return ErrorCode::NO_ERR;
1127 }
1128
1129 void VK_API::destroyPipeline( CompiledPipeline& pipeline, bool defer )
1130 {
1131 if ( !pipeline._isValid )
1132 {
1133 // This should be the only place where this flag is set, and, as such, we already handled the destruction of the pipeline
1134 DIVIDE_ASSERT( pipeline._vkPipelineLayout == VK_NULL_HANDLE );
1135 DIVIDE_ASSERT( pipeline._vkPipeline == VK_NULL_HANDLE );
1136 DIVIDE_ASSERT( pipeline._vkPipelineWireframe == VK_NULL_HANDLE );
1137 return;
1138 }
1139
1140 DIVIDE_ASSERT( pipeline._vkPipelineLayout != VK_NULL_HANDLE );
1141 DIVIDE_ASSERT( pipeline._vkPipeline != VK_NULL_HANDLE );
1142
1143 const auto deletePipeline = [layout = pipeline._vkPipelineLayout, pipeline = pipeline._vkPipeline, wireframePipeline = pipeline._vkPipelineWireframe]( VkDevice device )
1144 {
1145 vkDestroyPipelineLayout( device, layout, nullptr );
1146 vkDestroyPipeline( device, pipeline, nullptr );
1147 if ( wireframePipeline != VK_NULL_HANDLE )
1148 {
1149 vkDestroyPipeline( device, wireframePipeline, nullptr );
1150 }
1151 };
1152
1153 if (!defer )
1154 {
1155 deletePipeline(_device->getVKDevice());
1156 }
1157 else
1158 {
1159 VK_API::RegisterCustomAPIDelete(deletePipeline, true );
1160 }
1161
1162 pipeline._vkPipelineLayout = VK_NULL_HANDLE;
1163 pipeline._vkPipeline = VK_NULL_HANDLE;
1164 pipeline._vkPipelineWireframe = VK_NULL_HANDLE;
1165 pipeline._isValid = false;
1166 }
1167
1169 {
1170 for ( auto& it : _compiledPipelines )
1171 {
1172 destroyPipeline( it.second, false );
1173 }
1174
1175 _compiledPipelines.clear();
1176 }
1177
1179 {
1181
1182 // Destroy sampler objects
1183 {
1184 for ( auto& sampler : s_samplerMap )
1185 {
1186 vkSamplerObject::Destruct( sampler.second );
1187 }
1188 s_samplerMap.clear();
1189 }
1190
1192 if ( _device != nullptr )
1193 {
1194 if ( _device->getVKDevice() != VK_NULL_HANDLE )
1195 {
1196 vkDeviceWaitIdle( _device->getVKDevice() );
1197 s_transientDeleteQueue.flush( _device->getVKDevice(), true );
1198 s_deviceDeleteQueue.flush( _device->getVKDevice(), true );
1199 for ( auto& pool : s_stateTracker._descriptorAllocators )
1200 {
1201 pool._handle = {};
1202 pool._allocatorPool.reset();
1203 }
1204 _descriptorLayoutCache.reset();
1205 _descriptorSetLayouts.fill( VK_NULL_HANDLE );
1206 _descriptorSets.fill( VK_NULL_HANDLE );
1207 _dummyDescriptorSet = VK_NULL_HANDLE;
1208
1210 }
1211
1212 if ( _pipelineCache != nullptr )
1213 {
1214 size_t size{};
1215 VK_CHECK( vkGetPipelineCacheData( _device->getVKDevice(), _pipelineCache, &size, nullptr ) );
1216 /* Get data of pipeline cache */
1217 vector<Byte> data( size );
1218 VK_CHECK( vkGetPipelineCacheData( _device->getVKDevice(), _pipelineCache, &size, data.data() ) );
1219 /* Write pipeline cache data to a file in binary format */
1220 const FileError err = writeFile( PipelineCacheLocation(), PipelineCacheFileName.string(), data.data(), size, FileType::BINARY );
1221 if ( err != FileError::NONE )
1222 {
1223 Console::errorfn( LOCALE_STR( "ERROR_VK_PIPELINE_CACHE_SAVE" ), Names::fileError[to_base( err )] );
1224 }
1225 vkDestroyPipelineCache( _device->getVKDevice(), _pipelineCache, nullptr );
1226 }
1227 if ( _allocator != VK_NULL_HANDLE )
1228 {
1229 vmaDestroyAllocator( _allocator );
1230 _allocator = VK_NULL_HANDLE;
1231 }
1232
1233 for ( auto& state : _perWindowState )
1234 {
1235 destroyStatePerWindow(state.second);
1236 }
1237 _perWindowState.clear();
1239 _device.reset();
1240 }
1241
1242 vkb::destroy_instance( _vkbInstance );
1243 _vkbInstance = {};
1244 }
1245
1246
1247 bool VK_API::Draw( const GenericDrawCommand& cmd, VkCommandBuffer cmdBuffer )
1248 {
1250
1252
1253 if ( cmd._sourceBuffer._id == 0u )
1254 {
1255 DIVIDE_ASSERT( cmd._cmd.indexCount == 0u );
1256
1257 if ( cmd._cmd.vertexCount == 0u )
1258 {
1259 GenericDrawCommand drawCmd = cmd;
1260 switch ( VK_API::GetStateTracker()._pipeline._topology )
1261 {
1262 case PrimitiveTopology::POINTS: drawCmd._cmd.vertexCount = 1u; break;
1266 case PrimitiveTopology::LINES_ADJANCENCY: drawCmd._cmd.vertexCount = 2u; break;
1272 case PrimitiveTopology::PATCH: drawCmd._cmd.vertexCount = 4u; break;
1273 default: return false;
1274 }
1275 VKUtil::SubmitRenderCommand(drawCmd, cmdBuffer, false);
1276 }
1277 else
1278 {
1279 VKUtil::SubmitRenderCommand( cmd, cmdBuffer, false );
1280 }
1281 }
1282 else
1283 {
1284 // Because this can only happen on the main thread, try and avoid costly lookups for hot-loop drawing
1285 thread_local VertexDataInterface::Handle s_lastID = { U16_MAX, 0u };
1286 thread_local VertexDataInterface* s_lastBuffer = nullptr;
1287
1288 if ( s_lastID != cmd._sourceBuffer )
1289 {
1290 s_lastID = cmd._sourceBuffer;
1291 s_lastBuffer = VertexDataInterface::s_VDIPool.find( s_lastID );
1292 }
1293
1294 DIVIDE_ASSERT( s_lastBuffer != nullptr );
1295 vkUserData userData{};
1296 userData._cmdBuffer = &cmdBuffer;
1297
1298 s_lastBuffer->draw( cmd, &userData );
1299 }
1300
1301 return true;
1302 }
1303
1304 namespace
1305 {
1306 [[nodiscard]] bool IsEmpty( const ShaderProgram::BindingsPerSetArray& bindings ) noexcept
1307 {
1308 for ( const auto& binding : bindings )
1309 {
1310 if ( binding._type != DescriptorSetBindingType::COUNT && binding._visibility != 0u )
1311 {
1312 return false;
1313 }
1314 }
1315
1316 return true;
1317 }
1318 }
1319
1320 bool VK_API::bindShaderResources( const DescriptorSetEntries& descriptorSetEntries )
1321 {
1323
1324 auto& program = GetStateTracker()._pipeline._program;
1325 DIVIDE_ASSERT( program != nullptr );
1326 auto& drawDescriptor = program->perDrawDescriptorSetLayout();
1327 const bool targetDescriptorEmpty = IsEmpty( drawDescriptor );
1328 const auto& setUsageData = program->setUsage();
1329
1330 thread_local VkDescriptorImageInfo imageInfoArray[MAX_BINDINGS_PER_DESCRIPTOR_SET];
1331 thread_local eastl::fixed_vector<VkWriteDescriptorSet, MAX_BINDINGS_PER_DESCRIPTOR_SET> descriptorWrites;
1332 U8 imageInfoIndex = 0u;
1333
1334 bool needsBind = false;
1335 for ( const DescriptorSetEntry& entry : descriptorSetEntries )
1336 {
1337 const BaseType<DescriptorSetUsage> usageIdx = to_base( entry._usage );
1338
1339 if ( !setUsageData[usageIdx] )
1340 {
1341 continue;
1342 }
1343
1344 if ( entry._usage == DescriptorSetUsage::PER_DRAW && targetDescriptorEmpty )
1345 {
1346 continue;
1347 }
1348
1349 const bool isPushDescriptor = s_hasPushDescriptorSupport && entry._usage == DescriptorSetUsage::PER_DRAW;
1350
1351 for ( U8 i = 0u; i < entry._set->_bindingCount; ++i )
1352 {
1353 const DescriptorSetBinding& srcBinding = entry._set->_bindings[i];
1354
1355 if ( entry._usage == DescriptorSetUsage::PER_DRAW &&
1356 drawDescriptor[srcBinding._slot]._type == DescriptorSetBindingType::COUNT )
1357 {
1358 continue;
1359 }
1360
1361 const VkShaderStageFlags stageFlags = GetFlagsForStageVisibility( srcBinding._shaderStageVisibility );
1362
1363 switch ( srcBinding._data._type )
1364 {
1367 {
1369
1370 const ShaderBufferEntry& bufferEntry = srcBinding._data._buffer;
1371
1372 DIVIDE_ASSERT( bufferEntry._buffer != nullptr );
1373
1374 VkBuffer buffer = static_cast<vkBufferImpl*>(bufferEntry._buffer->getBufferImpl())->_buffer;
1375
1376 const size_t readOffset = bufferEntry._queueReadIndex * bufferEntry._buffer->alignedBufferSize();
1377
1378 if ( entry._usage == DescriptorSetUsage::PER_BATCH && srcBinding._slot == 0 )
1379 {
1380 // Draw indirect buffer!
1384 }
1385 else
1386 {
1387 const VkDeviceSize offset = bufferEntry._range._startOffset * bufferEntry._buffer->getPrimitiveSize() + readOffset;
1388 DIVIDE_ASSERT( bufferEntry._range._length > 0u );
1389 const size_t boundRange = bufferEntry._range._length* bufferEntry._buffer->getPrimitiveSize();
1390
1391 DynamicEntry& crtBufferInfo = s_dynamicBindings[usageIdx][srcBinding._slot];
1392 if ( isPushDescriptor || crtBufferInfo._info.buffer != buffer || crtBufferInfo._info.range > boundRange || (crtBufferInfo._stageFlags & stageFlags) != stageFlags)
1393 {
1394 crtBufferInfo._info.buffer = buffer;
1395 crtBufferInfo._info.offset = isPushDescriptor ? offset : 0u;
1396 crtBufferInfo._info.range = boundRange;
1397 crtBufferInfo._stageFlags |= stageFlags;
1398
1399
1400 VkDescriptorSetLayoutBinding newBinding{};
1401 newBinding.descriptorCount = 1u;
1402 newBinding.descriptorType = VKUtil::vkDescriptorType( srcBinding._data._type, isPushDescriptor );
1403 newBinding.stageFlags = crtBufferInfo._stageFlags;
1404 newBinding.binding = srcBinding._slot;
1405 newBinding.pImmutableSamplers = nullptr;
1406
1407 descriptorWrites.push_back( vk::writeDescriptorSet( newBinding.descriptorType, newBinding.binding, &crtBufferInfo._info, 1u ) );
1408 }
1409 if (!isPushDescriptor )
1410 {
1411 for ( auto& dynamicBinding : _descriptorDynamicBindings[usageIdx] )
1412 {
1413 if ( dynamicBinding._slot == srcBinding._slot )
1414 {
1415 dynamicBinding._offset = to_U32(offset);
1416 needsBind = true;
1417 break;
1418 }
1419 }
1420 DIVIDE_ASSERT( needsBind );
1421 }
1422 }
1423 } break;
1425 {
1426 PROFILE_SCOPE( "Bind image sampler", Profiler::Category::Graphics );
1427
1428 if ( srcBinding._slot == INVALID_TEXTURE_BINDING )
1429 {
1430 continue;
1431 }
1432
1433 const DescriptorCombinedImageSampler& imageSampler = srcBinding._data._sampledImage;
1434 if ( imageSampler._image._srcTexture == nullptr ) [[unlikely]]
1435 {
1436 NOP(); //unbind request;
1437 }
1438 else
1439 {
1440 DIVIDE_ASSERT( TargetType( imageSampler._image ) != TextureType::COUNT );
1441
1442 const vkTexture* vkTex = static_cast<const vkTexture*>(imageSampler._image._srcTexture);
1443
1445 descriptor._usage = ImageUsage::SHADER_READ;
1446 descriptor._format = vkTex->vkFormat();
1447 descriptor._type = TargetType( imageSampler._image );
1448 descriptor._subRange = imageSampler._image._subRange;
1449
1450 const VkImageLayout targetLayout = IsDepthTexture( vkTex->descriptor()._packing ) ? VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
1451
1452 size_t samplerHash = imageSampler._samplerHash;
1453 const VkSampler samplerHandle = GetSamplerHandle( imageSampler._sampler, samplerHash );
1454 const VkImageView imageView = vkTex->getImageView( descriptor );
1455
1456 VkDescriptorImageInfo& imageInfo = imageInfoArray[imageInfoIndex++];
1457 imageInfo = vk::descriptorImageInfo( samplerHandle, imageView, targetLayout );
1458
1459 VkDescriptorSetLayoutBinding newBinding{};
1460
1461 newBinding.descriptorCount = 1;
1462 newBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
1463 newBinding.stageFlags = stageFlags;
1464 newBinding.binding = srcBinding._slot;
1465
1466 descriptorWrites.push_back( vk::writeDescriptorSet( newBinding.descriptorType, newBinding.binding, &imageInfo, 1u ) );
1467 }
1468 } break;
1470 {
1472
1473 const DescriptorImageView& imageView = srcBinding._data._imageView;
1474 if ( imageView._image._srcTexture == nullptr )
1475 {
1476 continue;
1477 }
1478
1479 DIVIDE_ASSERT( imageView._image._srcTexture != nullptr && imageView._image._subRange._mipLevels._count == 1u );
1480
1481 const vkTexture* vkTex = static_cast<const vkTexture*>(imageView._image._srcTexture);
1482
1484
1486 descriptor._usage = imageView._usage;
1487 descriptor._format = vkTex->vkFormat();
1488 descriptor._type = TargetType( imageView._image );
1489 descriptor._subRange = imageView._image._subRange;
1490
1491 // Should use TextureType::TEXTURE_CUBE_ARRAY
1492 DIVIDE_ASSERT( descriptor._type != TextureType::TEXTURE_CUBE_MAP || descriptor._subRange._layerRange._count == 1u );
1493
1494 const VkImageLayout targetLayout = descriptor._usage == ImageUsage::SHADER_READ ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL;
1495 VkDescriptorImageInfo& imageInfo = imageInfoArray[imageInfoIndex++];
1496 imageInfo = vk::descriptorImageInfo( VK_NULL_HANDLE, vkTex->getImageView( descriptor ), targetLayout );
1497
1498
1499 VkDescriptorSetLayoutBinding newBinding{};
1500
1501 newBinding.descriptorCount = 1;
1502 newBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
1503 newBinding.stageFlags = stageFlags;
1504 newBinding.binding = srcBinding._slot;
1505
1506 descriptorWrites.push_back( vk::writeDescriptorSet( newBinding.descriptorType, newBinding.binding, &imageInfo, 1u ) );
1507 } break;
1509 {
1511 } break;
1512 };
1513 }
1514
1515 if (!descriptorWrites.empty())
1516 {
1517 if ( !isPushDescriptor )
1518 {
1519 PROFILE_SCOPE( "Build and update sets", Profiler::Category::Graphics );
1520 PROFILE_TAG("Usage IDX", usageIdx);
1521
1522 {
1525 builder.buildSetFromLayout( _descriptorSets[usageIdx], _descriptorSetLayouts[usageIdx], _device->getVKDevice() );
1526 }
1527 {
1528 for ( VkWriteDescriptorSet& w : descriptorWrites )
1529 {
1530 w.dstSet = _descriptorSets[usageIdx];
1531 }
1532 VK_PROFILE( vkUpdateDescriptorSets, _device->getVKDevice(), to_U32( descriptorWrites.size() ), descriptorWrites.data(), 0, nullptr );
1533 }
1534 needsBind = true;
1535 }
1536 else
1537 {
1538 const auto& pipeline = GetStateTracker()._pipeline;
1540 pipeline._bindPoint,
1541 pipeline._vkPipelineLayout,
1542 0,
1543 to_U32(descriptorWrites.size()),
1544 descriptorWrites.data());
1545 }
1546 descriptorWrites.clear();
1547 s_dynamicBindings[usageIdx] = {};
1548 }
1549 }
1550
1551 if ( needsBind )
1552 {
1553 PROFILE_SCOPE( "Bind descriptor sets", Profiler::Category::Graphics );
1554
1555 thread_local VkDescriptorSetLayout tempLayout{ VK_NULL_HANDLE };
1556
1557 if ( _dummyDescriptorSet == VK_NULL_HANDLE )
1558 {
1559 if ( tempLayout == VK_NULL_HANDLE )
1560 {
1561 VkDescriptorSetLayoutCreateInfo layoutInfo{
1562 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1563 .bindingCount = 0u
1564 };
1565 tempLayout = _descriptorLayoutCache->createDescriptorLayout( &layoutInfo );
1566 }
1567
1570 }
1571
1572 VkDescriptorSet tempSets[to_base(DescriptorSetUsage::COUNT)];
1573 s_dynamicOffsets.clear();
1574
1575 const U8 offset = s_hasPushDescriptorSupport ? 1u : 0u;
1576 U8 setCount = 0u;
1577 for ( U8 i = 0; i < to_base( DescriptorSetUsage::COUNT ) - offset; ++i )
1578 {
1579 const bool setUsed = setUsageData[i + offset];
1580 tempSets[setCount++] = setUsed ? _descriptorSets[i + offset] : _dummyDescriptorSet;
1581 if ( setUsed )
1582 {
1583 for ( const DynamicBinding& binding : _descriptorDynamicBindings[i + offset] )
1584 {
1585 if ( binding._slot != U8_MAX )
1586 {
1587 s_dynamicOffsets.push_back( binding._offset );
1588 }
1589 }
1590 }
1591 }
1592
1593 const auto& pipeline = GetStateTracker()._pipeline;
1594 VK_PROFILE( vkCmdBindDescriptorSets, getCurrentCommandBuffer(),
1595 pipeline._bindPoint,
1596 pipeline._vkPipelineLayout,
1597 offset,
1598 setCount,
1599 tempSets,
1600 to_U32( s_dynamicOffsets.size() ),
1601 s_dynamicOffsets.data() );
1602 }
1603
1604
1605 return true;
1606 }
1607
1608 void VK_API::bindDynamicState( const RenderStateBlock& currentState, const RTBlendStates& blendStates, VkCommandBuffer cmdBuffer ) noexcept
1609 {
1611
1612 bool ret = false;
1613
1614 auto& activeState = GetStateTracker()._activeWindow->_activeState;
1615
1616 if ( currentState._stencilEnabled )
1617 {
1618 if ( !activeState._isSet || !activeState._block._stencilEnabled )
1619 {
1620 vkCmdSetStencilTestEnable( cmdBuffer, VK_TRUE );
1621 ret = true;
1622 }
1623 if ( !activeState._isSet || activeState._block._stencilMask != currentState._stencilMask )
1624 {
1625 vkCmdSetStencilCompareMask( cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, currentState._stencilMask );
1626 ret = true;
1627 }
1628 if ( !activeState._isSet || activeState._block._stencilWriteMask != currentState._stencilWriteMask )
1629 {
1630 vkCmdSetStencilWriteMask( cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, currentState._stencilWriteMask );
1631 ret = true;
1632 }
1633 if ( !activeState._isSet || activeState._block._stencilRef != currentState._stencilRef )
1634 {
1635 vkCmdSetStencilReference( cmdBuffer, VK_STENCIL_FACE_FRONT_AND_BACK, currentState._stencilRef );
1636 ret = true;
1637 }
1638 if ( !activeState._isSet ||
1639 activeState._block._stencilFailOp != currentState._stencilFailOp ||
1640 activeState._block._stencilPassOp != currentState._stencilPassOp ||
1641 activeState._block._stencilZFailOp != currentState._stencilZFailOp ||
1642 activeState._block._stencilFunc != currentState._stencilFunc )
1643 {
1644 vkCmdSetStencilOp(cmdBuffer,
1645 VK_STENCIL_FACE_FRONT_AND_BACK,
1646 vkStencilOpTable[to_base( currentState._stencilFailOp )],
1647 vkStencilOpTable[to_base( currentState._stencilPassOp )],
1648 vkStencilOpTable[to_base( currentState._stencilZFailOp )],
1649 vkCompareFuncTable[to_base( currentState._stencilFunc )]);
1650 ret = true;
1651 }
1652 }
1653 else if ( !activeState._isSet || !activeState._block._stencilEnabled )
1654 {
1655 vkCmdSetStencilTestEnable( cmdBuffer, VK_FALSE );
1656 ret = true;
1657 }
1658
1659 if ( !activeState._isSet || activeState._block._zFunc != currentState._zFunc )
1660 {
1661 vkCmdSetDepthCompareOp( cmdBuffer, vkCompareFuncTable[to_base( currentState._zFunc )] );
1662 ret = true;
1663 }
1664
1665 if ( !activeState._isSet || activeState._block._depthWriteEnabled != currentState._depthWriteEnabled )
1666 {
1667 vkCmdSetDepthWriteEnable( cmdBuffer, currentState._depthWriteEnabled );
1668 ret = true;
1669 }
1670
1671 if ( !activeState._isSet || !COMPARE( activeState._block._zBias, currentState._zBias ) || !COMPARE( activeState._block._zUnits, currentState._zUnits ) )
1672 {
1673 if ( !IS_ZERO( currentState._zBias ) )
1674 {
1675 vkCmdSetDepthBiasEnable(cmdBuffer, VK_TRUE);
1676 vkCmdSetDepthBias( cmdBuffer, currentState._zUnits, 0.f, currentState._zBias );
1677 }
1678 else
1679 {
1680 vkCmdSetDepthBiasEnable( cmdBuffer, VK_FALSE );
1681 }
1682 ret = true;
1683 }
1684
1685 if ( !activeState._isSet || activeState._block._cullMode != currentState._cullMode )
1686 {
1687 vkCmdSetCullMode( cmdBuffer, vkCullModeTable[to_base( currentState._cullMode )] );
1688 ret = true;
1689 }
1690
1691 if ( !activeState._isSet || activeState._block._frontFaceCCW != currentState._frontFaceCCW )
1692 {
1693 vkCmdSetFrontFace( cmdBuffer, currentState._frontFaceCCW ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE );
1694 ret = true;
1695 }
1696
1697 if ( !activeState._isSet || activeState._block._depthTestEnabled != currentState._depthTestEnabled )
1698 {
1699 vkCmdSetDepthTestEnable( cmdBuffer, currentState._depthTestEnabled );
1700 ret = true;
1701 }
1702
1703 if ( !activeState._isSet || activeState._block._rasterizationEnabled != currentState._rasterizationEnabled )
1704 {
1705 vkCmdSetRasterizerDiscardEnable( cmdBuffer, !currentState._rasterizationEnabled );
1706 ret = true;
1707 }
1708
1709 if ( !activeState._isSet || activeState._block._primitiveRestartEnabled != currentState._primitiveRestartEnabled )
1710 {
1711 vkCmdSetPrimitiveRestartEnable( cmdBuffer, currentState._primitiveRestartEnabled );
1712 ret = true;
1713 }
1714
1715 if ( s_hasDynamicBlendStateSupport )
1716 {
1718 if ( !activeState._isSet || activeState._block._colourWrite != currentState._colourWrite )
1719 {
1720 thread_local std::array<VkColorComponentFlags, to_base( RTColourAttachmentSlot::COUNT )> writeMask;
1721 const VkColorComponentFlags colourFlags = (currentState._colourWrite.b[0] == 1 ? VK_COLOR_COMPONENT_R_BIT : 0) |
1722 (currentState._colourWrite.b[1] == 1 ? VK_COLOR_COMPONENT_G_BIT : 0) |
1723 (currentState._colourWrite.b[2] == 1 ? VK_COLOR_COMPONENT_B_BIT : 0) |
1724 (currentState._colourWrite.b[3] == 1 ? VK_COLOR_COMPONENT_A_BIT : 0);
1725 writeMask.fill(colourFlags);
1726 vkCmdSetColorWriteMaskEXT( cmdBuffer, 0, count, writeMask.data() );
1727 ret = true;
1728 }
1729
1730 if ( !activeState._isSet || activeState._blendStates != blendStates )
1731 {
1732 thread_local std::array<VkBool32, to_base( RTColourAttachmentSlot::COUNT )> blendEnabled;
1733 thread_local std::array<VkColorBlendEquationEXT, to_base( RTColourAttachmentSlot::COUNT )> blendEquations;
1734
1735 for ( U8 i = 0u; i < to_base( RTColourAttachmentSlot::COUNT ); ++i )
1736 {
1737 const BlendingSettings& blendState = blendStates._settings[i];
1738
1739 blendEnabled[i] = blendState.enabled() ? VK_TRUE : VK_FALSE;
1740
1741 auto& equation = blendEquations[i];
1742 equation.srcColorBlendFactor = vkBlendTable[to_base( blendState.blendSrc() )];
1743 equation.dstColorBlendFactor = vkBlendTable[to_base( blendState.blendDest() )];
1744 equation.colorBlendOp = vkBlendOpTable[to_base( blendState.blendOp() )];
1745 if ( blendState.blendOpAlpha() != BlendOperation::COUNT )
1746 {
1747 equation.alphaBlendOp = vkBlendOpTable[to_base( blendState.blendOpAlpha() )];
1748 equation.dstAlphaBlendFactor = vkBlendTable[to_base( blendState.blendDestAlpha() )];
1749 equation.srcAlphaBlendFactor = vkBlendTable[to_base( blendState.blendSrcAlpha() )];
1750 }
1751 else
1752 {
1753 equation.srcAlphaBlendFactor = equation.srcColorBlendFactor;
1754 equation.dstAlphaBlendFactor = equation.dstColorBlendFactor;
1755 equation.alphaBlendOp = equation.colorBlendOp;
1756 }
1757 }
1758
1759 vkCmdSetColorBlendEnableEXT(cmdBuffer, 0, count, blendEnabled.data());
1760 vkCmdSetColorBlendEquationEXT(cmdBuffer, 0, count, blendEquations.data());
1761
1762 activeState._blendStates = blendStates;
1763 ret = true;
1764 }
1765 }
1766
1767 if ( ret )
1768 {
1769 activeState._block = currentState;
1770 activeState._isSet = true;
1771 }
1772 }
1773
1774 ShaderResult VK_API::bindPipeline( const Pipeline& pipeline, VkCommandBuffer cmdBuffer )
1775 {
1777
1778 size_t stateHash = pipeline.stateHash();
1779 Util::Hash_combine(stateHash, GetStateTracker()._renderTargetFormatHash );
1781 {
1782 Util::Hash_combine(stateHash, pipeline.blendStateHash());
1783 }
1784
1785 CompiledPipeline& compiledPipeline = _compiledPipelines[stateHash];
1786 if ( !compiledPipeline._isValid )
1787 {
1789
1790 thread_local RenderStateBlock defaultState{};
1791 thread_local VkDescriptorSetLayout dummyLayout = VK_NULL_HANDLE;
1792
1793 if ( dummyLayout == VK_NULL_HANDLE )
1794 {
1795 VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo
1796 {
1797 .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
1798 .bindingCount = 0u
1799 };
1800 dummyLayout = _descriptorLayoutCache->createDescriptorLayout( &descriptorSetLayoutCreateInfo );
1801 }
1802
1803 const PipelineDescriptor& pipelineDescriptor = pipeline.descriptor();
1804 ShaderProgram* program = Get( pipelineDescriptor._shaderProgramHandle );
1805 if ( program == nullptr )
1806 {
1807 const auto handle = pipelineDescriptor._shaderProgramHandle;
1808 Console::errorfn( LOCALE_STR( "ERROR_GLSL_INVALID_HANDLE" ), handle._index, handle._generation );
1809 return ShaderResult::Failed;
1810 }
1811
1812 compiledPipeline._program = static_cast<vkShaderProgram*>(program);
1813 compiledPipeline._topology = pipelineDescriptor._primitiveTopology;
1814 const RenderStateBlock& currentState = pipelineDescriptor._stateBlock;
1815
1816 VkPushConstantRange push_constant;
1817 push_constant.offset = 0u;
1818 push_constant.size = to_U32( PushConstantsStruct::Size() );
1819 push_constant.stageFlags = compiledPipeline._program->stageMask();
1820 compiledPipeline._stageFlags = push_constant.stageFlags;
1821
1822 VkPipelineLayoutCreateInfo pipeline_layout_info = vk::pipelineLayoutCreateInfo( 0u );
1823 pipeline_layout_info.pPushConstantRanges = &push_constant;
1824 pipeline_layout_info.pushConstantRangeCount = 1;
1825
1826 const ShaderProgram::BindingsPerSetArray& drawLayout = compiledPipeline._program->perDrawDescriptorSetLayout();
1827
1828 DynamicBindings dynamicBindings{};
1830 compiledPipeline._program->dynamicBindings(dynamicBindings);
1831 compiledPipeline._program->descriptorSetLayout( _descriptorSetLayouts[to_base( DescriptorSetUsage::PER_DRAW )] );
1832
1833 const auto& setUsageData = compiledPipeline._program->setUsage();
1834
1835 VkDescriptorSetLayout tempLayouts[to_base( DescriptorSetUsage::COUNT )];
1836
1837 for ( U8 i = 0u; i < to_base( DescriptorSetUsage::COUNT ); ++i )
1838 {
1839 tempLayouts[i] = setUsageData[i] ? _descriptorSetLayouts[i] : dummyLayout;
1840 }
1841 pipeline_layout_info.pSetLayouts = tempLayouts;
1842 pipeline_layout_info.setLayoutCount = to_base( DescriptorSetUsage::COUNT );
1843
1844 VK_CHECK( vkCreatePipelineLayout( _device->getVKDevice(), &pipeline_layout_info, nullptr, &compiledPipeline._vkPipelineLayout ) );
1845
1846 //build the stage-create-info for both vertex and fragment stages. This lets the pipeline know the shader modules per stage
1847 const auto& shaderStages = compiledPipeline._program->shaderStages();
1848
1849 PipelineBuilder pipelineBuilder;
1850
1851 bool isGraphicsPipeline = false;
1852 for ( const auto& stage : shaderStages )
1853 {
1854 pipelineBuilder._shaderStages.push_back( vk::pipelineShaderStageCreateInfo( stage._shader->stageMask(), stage._shader->handle() ) );
1855 isGraphicsPipeline = isGraphicsPipeline || stage._shader->stageMask() != VK_SHADER_STAGE_COMPUTE_BIT;
1856 }
1857 compiledPipeline._bindPoint = isGraphicsPipeline ? VK_PIPELINE_BIND_POINT_GRAPHICS : VK_PIPELINE_BIND_POINT_COMPUTE;
1858
1859 //vertex input controls how to read vertices from vertex buffers. We aren't using it yet
1861 //connect the pipeline builder vertex input info to the one we get from Vertex
1862 const VertexInputDescription vertexDescription = getVertexDescription( pipelineDescriptor._vertexFormat );
1863 pipelineBuilder._vertexInputInfo.pVertexAttributeDescriptions = vertexDescription.attributes.data();
1864 pipelineBuilder._vertexInputInfo.vertexAttributeDescriptionCount = to_U32( vertexDescription.attributes.size() );
1865 pipelineBuilder._vertexInputInfo.pVertexBindingDescriptions = vertexDescription.bindings.data();
1866 pipelineBuilder._vertexInputInfo.vertexBindingDescriptionCount = to_U32( vertexDescription.bindings.size() );
1867
1868 //input assembly is the configuration for drawing triangle lists, strips, or individual points.
1869 //we are just going to draw triangle list
1870 pipelineBuilder._inputAssembly = vk::pipelineInputAssemblyStateCreateInfo( vkPrimitiveTypeTable[to_base( pipelineDescriptor._primitiveTopology )], 0u, defaultState._primitiveRestartEnabled );
1871 //configure the rasterizer to draw filled triangles
1873 vkFillModeTable[to_base( currentState._fillMode )],
1874 vkCullModeTable[to_base( defaultState._cullMode)],
1875 defaultState._frontFaceCCW ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE);
1876 pipelineBuilder._rasterizer.rasterizerDiscardEnable = !defaultState._rasterizationEnabled;
1877
1878 VkSampleCountFlagBits msaaSampleFlags = VK_SAMPLE_COUNT_1_BIT;
1879 const U8 msaaSamples = GetStateTracker()._activeMSAASamples;
1880 if ( msaaSamples > 0u )
1881 {
1882 assert( isPowerOfTwo( msaaSamples ) );
1883 msaaSampleFlags = static_cast<VkSampleCountFlagBits>(msaaSamples);
1884 }
1885 pipelineBuilder._multisampling = vk::pipelineMultisampleStateCreateInfo( msaaSampleFlags );
1886 pipelineBuilder._multisampling.minSampleShading = 1.f;
1887 pipelineBuilder._multisampling.alphaToCoverageEnable = pipelineDescriptor._alphaToCoverage ? VK_TRUE : VK_FALSE;
1888 if ( msaaSamples > 0u )
1889 {
1890 pipelineBuilder._multisampling.sampleShadingEnable = VK_TRUE;
1891 }
1892 VkStencilOpState stencilOpState{};
1893 stencilOpState.failOp = vkStencilOpTable[to_base( defaultState._stencilFailOp )];
1894 stencilOpState.passOp = vkStencilOpTable[to_base( defaultState._stencilPassOp )];
1895 stencilOpState.depthFailOp = vkStencilOpTable[to_base( defaultState._stencilZFailOp )];
1896 stencilOpState.compareOp = vkCompareFuncTable[to_base( defaultState._stencilFunc )];
1897 stencilOpState.compareMask = defaultState._stencilMask;
1898 stencilOpState.writeMask = defaultState._stencilWriteMask;
1899 stencilOpState.reference = defaultState._stencilRef;
1900
1901 pipelineBuilder._depthStencil = vk::pipelineDepthStencilStateCreateInfo( defaultState._depthTestEnabled, defaultState._depthWriteEnabled, vkCompareFuncTable[to_base(defaultState._zFunc)]);
1902 pipelineBuilder._depthStencil.stencilTestEnable = defaultState._stencilEnabled;
1903 pipelineBuilder._depthStencil.front = stencilOpState;
1904 pipelineBuilder._depthStencil.back = stencilOpState;
1905 pipelineBuilder._rasterizer.depthBiasEnable = !IS_ZERO(defaultState._zBias);
1906 pipelineBuilder._rasterizer.depthBiasConstantFactor = defaultState._zUnits;
1907 pipelineBuilder._rasterizer.depthBiasClamp = defaultState._zUnits;
1908 pipelineBuilder._rasterizer.depthBiasSlopeFactor = defaultState._zBias;
1909
1911 {
1912 const P32 cWrite = currentState._colourWrite;
1913 VkPipelineColorBlendAttachmentState blend = vk::pipelineColorBlendAttachmentState(
1914 (cWrite.b[0] == 1 ? VK_COLOR_COMPONENT_R_BIT : 0) |
1915 (cWrite.b[1] == 1 ? VK_COLOR_COMPONENT_G_BIT : 0) |
1916 (cWrite.b[2] == 1 ? VK_COLOR_COMPONENT_B_BIT : 0) |
1917 (cWrite.b[3] == 1 ? VK_COLOR_COMPONENT_A_BIT : 0),
1918 VK_FALSE );
1919
1920 for ( U8 i = 0u; i < to_base( RTColourAttachmentSlot::COUNT ); ++i )
1921 {
1922 const BlendingSettings& blendState = pipelineDescriptor._blendStates._settings[i];
1923
1924 blend.blendEnable = blendState.enabled() ? VK_TRUE : VK_FALSE;
1925 blend.colorBlendOp = vkBlendOpTable[to_base( blendState.blendOp() )];
1926 blend.srcColorBlendFactor = vkBlendTable[to_base( blendState.blendSrc() )];
1927 blend.dstColorBlendFactor = vkBlendTable[to_base( blendState.blendDest() )];
1928 if ( blendState.blendOpAlpha() != BlendOperation::COUNT )
1929 {
1930 blend.alphaBlendOp = vkBlendOpTable[to_base( blendState.blendOpAlpha() )];
1931 blend.dstAlphaBlendFactor = vkBlendTable[to_base( blendState.blendDestAlpha() )];
1932 blend.srcAlphaBlendFactor = vkBlendTable[to_base( blendState.blendSrcAlpha() )];
1933 }
1934 else
1935 {
1936 blend.srcAlphaBlendFactor = blend.srcColorBlendFactor;
1937 blend.dstAlphaBlendFactor = blend.dstColorBlendFactor;
1938 blend.alphaBlendOp = blend.colorBlendOp;
1939 }
1940 pipelineBuilder._colorBlendAttachments.emplace_back( blend );
1941 }
1942 }
1943
1944 //use the triangle layout we created
1945 pipelineBuilder._pipelineLayout = compiledPipeline._vkPipelineLayout;
1947
1948 compiledPipeline._vkPipeline = pipelineBuilder.build_pipeline( _device->getVKDevice(), _pipelineCache, isGraphicsPipeline );
1949
1950 if ( isGraphicsPipeline && IsTriangles( pipelineDescriptor._primitiveTopology ) )
1951 {
1952 pipelineBuilder._rasterizer.polygonMode = VK_POLYGON_MODE_LINE;
1953 compiledPipeline._vkPipelineWireframe = pipelineBuilder.build_pipeline( _device->getVKDevice(), _pipelineCache, true );
1954
1955 Debug::SetObjectName( _device->getVKDevice(), (uint64_t)compiledPipeline._vkPipelineWireframe, VK_OBJECT_TYPE_PIPELINE, Util::StringFormat("{}_wireframe", program->resourceName().c_str()).c_str());
1956 }
1957
1958 Debug::SetObjectName( _device->getVKDevice(), (uint64_t)compiledPipeline._vkPipelineLayout, VK_OBJECT_TYPE_PIPELINE_LAYOUT, program->resourceName().c_str() );
1959 Debug::SetObjectName( _device->getVKDevice(), (uint64_t)compiledPipeline._vkPipeline, VK_OBJECT_TYPE_PIPELINE, program->resourceName().c_str() );
1960
1961 compiledPipeline._isValid = true;
1962 }
1963
1964 VK_PROFILE( vkCmdBindPipeline, cmdBuffer, compiledPipeline._bindPoint, compiledPipeline._vkPipeline );
1965
1966 GetStateTracker()._pipeline = compiledPipeline;
1967 if ( GetStateTracker()._pipelineStageMask != compiledPipeline._stageFlags )
1968 {
1969 GetStateTracker()._pipelineStageMask = compiledPipeline._stageFlags;
1971 }
1972
1973 bindDynamicState( pipeline.descriptor()._stateBlock, pipeline.descriptor()._blendStates, cmdBuffer );
1974 ResetDescriptorDynamicOffsets();
1975
1976 const U8 stageIdx = to_base( DescriptorSetUsage::PER_DRAW );
1977 _descriptorSetLayouts[stageIdx] = compiledPipeline._program->descriptorSetLayout();
1979 {
1980 _descriptorDynamicBindings[stageIdx] = compiledPipeline._program->dynamicBindings();
1981 }
1982 return compiledPipeline._program->validatePreBind(false);
1983 }
1984
1986 {
1988
1989 if ( _uniformsNeedLock )
1990 {
1991 _uniformsNeedLock = false;
1994 }
1995 }
1996
1997 namespace
1998 {
2000 {
2001 VkBuffer _srcBuffer{ VK_NULL_HANDLE };
2002 VkBuffer _dstBuffer{ VK_NULL_HANDLE };
2004 };
2005
2007 using BarrierContainer = eastl::fixed_vector<VkBufferMemoryBarrier2, 32, true>;
2008 using BatchedTransferQueue = eastl::fixed_vector<VKTransferQueue::TransferRequest, 64, false>;
2009
2010 void PrepareTransferRequest( const VKTransferQueue::TransferRequest& request, bool toWrite, VkBufferMemoryBarrier2& memBarrierOut )
2011 {
2012 memBarrierOut = vk::bufferMemoryBarrier2();
2013
2014 if ( toWrite )
2015 {
2016 memBarrierOut.srcStageMask = request.dstStageMask;
2017 memBarrierOut.srcAccessMask = request.dstAccessMask;
2018
2019 memBarrierOut.dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2020 memBarrierOut.dstAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2021 }
2022 else
2023 {
2024 memBarrierOut.srcStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2025 memBarrierOut.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2026
2027 memBarrierOut.dstStageMask = request.dstStageMask;
2028 memBarrierOut.dstAccessMask = request.dstAccessMask;
2029 }
2030
2031 memBarrierOut.offset = request.dstOffset;
2032 memBarrierOut.size = request.size;
2033 memBarrierOut.buffer = request.dstBuffer;
2034 }
2035
2036 void FlushBarriers( BarrierContainer& barriers, BatchedTransferQueue& transferQueueBatched, VkCommandBuffer cmd, bool toWrite )
2037 {
2039
2040 for ( const auto& request : transferQueueBatched )
2041 {
2042 PrepareTransferRequest( request, toWrite, barriers.emplace_back() );
2043 }
2044
2045 if ( !barriers.empty() )
2046 {
2047 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2048 dependencyInfo.bufferMemoryBarrierCount = to_U32( barriers.size() );
2049 dependencyInfo.pBufferMemoryBarriers = barriers.data();
2050
2051 VK_PROFILE( vkCmdPipelineBarrier2, cmd, &dependencyInfo );
2052 efficient_clear( barriers );
2053 }
2054 }
2055
2056 void FlushCopyRequests( CopyContainer& copyRequests, VkCommandBuffer cmd )
2057 {
2059
2060 for ( const PerBufferCopies& request : copyRequests )
2061 {
2062 VkCopyBufferInfo2 copyInfo = { .sType = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 };
2063 copyInfo.dstBuffer = request._dstBuffer;
2064 copyInfo.srcBuffer = request._srcBuffer;
2065 copyInfo.regionCount = to_U32( request._copiesPerBuffer.size() );
2066 copyInfo.pRegions = request._copiesPerBuffer.data();
2067
2068 VK_PROFILE( vkCmdCopyBuffer2, cmd, &copyInfo );
2069 }
2070 }
2071
2072 void PrepareBufferCopyBarriers( CopyContainer& copyRequests, BatchedTransferQueue& transferQueueBatched )
2073 {
2075
2076 copyRequests.clear();
2077 copyRequests.reserve( transferQueueBatched.size() );
2078
2079 VkBufferCopy2 copy{ .sType = VK_STRUCTURE_TYPE_BUFFER_COPY_2 };
2080
2081 for ( const auto& request : transferQueueBatched )
2082 {
2083 copy.dstOffset = request.dstOffset;
2084 copy.srcOffset = request.srcOffset;
2085 copy.size = request.size;
2086
2087 bool found = false;
2088 for ( PerBufferCopies& entry : copyRequests )
2089 {
2090 if ( entry._srcBuffer == request.srcBuffer && entry._dstBuffer == request.dstBuffer )
2091 {
2092 entry._copiesPerBuffer.emplace_back( copy );
2093 found = true;
2094 break;
2095 }
2096 }
2097
2098 if ( !found )
2099 {
2100 PerBufferCopies& cRequest = copyRequests.emplace_back();
2101 cRequest._srcBuffer = request.srcBuffer;
2102 cRequest._dstBuffer = request.dstBuffer;
2103 cRequest._copiesPerBuffer.emplace_back( copy );
2104 }
2105 }
2106 }
2107
2108 void BatchTransferQueue(BarrierContainer& barriers, BatchedTransferQueue& transferQueueBatched, VKTransferQueue& transferQueue )
2109 {
2111
2112 transferQueueBatched.clear();
2113
2114 while ( !transferQueue._requests.empty() )
2115 {
2116 const VKTransferQueue::TransferRequest& request = transferQueue._requests.front();
2117 if ( request.srcBuffer != VK_NULL_HANDLE )
2118 {
2119 transferQueueBatched.push_back( request );
2120 }
2121 else
2122 {
2123 PrepareTransferRequest( request, false, barriers.emplace_back() );
2124 }
2125
2126 transferQueue._requests.pop_front();
2127 }
2128 }
2129
2130 void FlushTransferQueue( VkCommandBuffer cmdBuffer, VKTransferQueue& transferQueue )
2131 {
2133
2134 thread_local vector<PerBufferCopies> s_copyRequests;
2135 thread_local BarrierContainer s_barriers{};
2136 thread_local BatchedTransferQueue s_transferQueueBatched;
2137
2138 BatchTransferQueue(s_barriers, s_transferQueueBatched, transferQueue );
2139 FlushBarriers(s_barriers, s_transferQueueBatched, cmdBuffer, true );
2140 PrepareBufferCopyBarriers( s_copyRequests, s_transferQueueBatched );
2141 FlushCopyRequests( s_copyRequests, cmdBuffer );
2142 FlushBarriers(s_barriers, s_transferQueueBatched, cmdBuffer, false );
2143
2144 s_transferQueueBatched.clear();
2145 transferQueue._dirty.store(false);
2146 }
2147 };
2148
2149 void VK_API::SubmitTransferRequest( const VKTransferQueue::TransferRequest& request, VkCommandBuffer cmd )
2150 {
2152
2153 VkBufferMemoryBarrier2 barriers[2] = {};
2154 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2155 dependencyInfo.bufferMemoryBarrierCount = 1u;
2156 if ( request.srcBuffer != VK_NULL_HANDLE )
2157 {
2158 PrepareTransferRequest( request, true, barriers[0] );
2159
2160 VkBufferCopy2 copy{ .sType = VK_STRUCTURE_TYPE_BUFFER_COPY_2 };
2161 copy.dstOffset = request.dstOffset;
2162 copy.srcOffset = request.srcOffset;
2163 copy.size = request.size;
2164
2165 VkCopyBufferInfo2 copyInfo = { .sType = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 };
2166 copyInfo.dstBuffer = request.dstBuffer;
2167 copyInfo.srcBuffer = request.srcBuffer;
2168 copyInfo.regionCount = 1u;
2169 copyInfo.pRegions = &copy;
2170
2171 vkCmdCopyBuffer2( cmd, &copyInfo );
2172 dependencyInfo.bufferMemoryBarrierCount = 2u;
2173 }
2174
2175 PrepareTransferRequest( request, false, barriers[1] );
2176 dependencyInfo.pBufferMemoryBarriers = barriers;
2177 VK_PROFILE( vkCmdPipelineBarrier2, cmd, &dependencyInfo );
2178 }
2179
2181 {
2183
2184 if ( s_transferQueue._dirty.load() )
2185 {
2186 VK_API::GetStateTracker().IMCmdContext( QueueType::GRAPHICS )->flushCommandBuffer([](VkCommandBuffer cmd, [[maybe_unused]] const QueueType queue, [[maybe_unused]] const bool isDedicatedQueue )
2187 {
2189 }, "Deferred Buffer Uploads" );
2190 }
2191 }
2192
2193 void VK_API::FlushBufferTransferRequests( VkCommandBuffer cmdBuffer )
2194 {
2196
2197 if ( s_transferQueue._dirty.load() )
2198 {
2201
2202 if ( s_transferQueue._requests.size() == 1 )
2203 {
2204 SubmitTransferRequest( s_transferQueue._requests.front(), cmdBuffer );
2205 s_transferQueue._requests.pop_front();
2206 s_transferQueue._dirty.store(false);
2207 }
2208 else
2209 {
2210 FlushTransferQueue( cmdBuffer, s_transferQueue );
2211 }
2212 }
2213 }
2214
2216 {
2217 static mat4<F32> s_defaultPushConstants[2] = { MAT4_ZERO, MAT4_ZERO };
2218 auto& stateTracker = GetStateTracker();
2219
2220 VkCommandBuffer cmdBuffer = getCurrentCommandBuffer();
2222
2223 if ( GFXDevice::IsSubmitCommand( cmd->type() ) )
2224 {
2225 FlushBufferTransferRequests();
2226 }
2227
2228 if ( stateTracker._activeRenderTargetID == SCREEN_TARGET_ID )
2229 {
2230 flushPushConstantsLocks();
2231 }
2232
2233 switch ( cmd->type() )
2234 {
2236 {
2237 PROFILE_SCOPE( "BEGIN_RENDER_PASS", Profiler::Category::Graphics );
2238
2239 thread_local VkRenderingAttachmentInfo dummyAttachment
2240 {
2241 .sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
2242 .imageView = VK_NULL_HANDLE,
2243 .imageLayout = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
2244 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
2245 .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
2246 .clearValue =
2247 {
2248 .color =
2249 {
2254 }
2255 }
2256 };
2257
2258 thread_local VkRenderingInfo renderingInfo{};
2259 thread_local vector<VkRenderingAttachmentInfo> attachmentInfo{ to_base( RTColourAttachmentSlot::COUNT ), dummyAttachment };
2260 thread_local vector<VkFormat> swapChainImageFormat( to_base( RTColourAttachmentSlot::COUNT ), VK_FORMAT_UNDEFINED);
2261
2263 PushDebugMessage( cmdBuffer, crtCmd->_name.c_str() );
2264
2265 stateTracker._activeRenderTargetID = crtCmd->_target;
2266
2267 // We can do this outside of a renderpass
2268 FlushBufferTransferRequests(cmdBuffer);
2269
2270 if ( crtCmd->_target == SCREEN_TARGET_ID )
2271 {
2272 PROFILE_SCOPE( "Draw to screen", Profiler::Category::Graphics);
2273
2274 VKSwapChain* swapChain = stateTracker._activeWindow->_swapChain.get();
2275
2276 attachmentInfo[0].imageView = swapChain->getCurrentImageView();
2277 swapChainImageFormat[0] = swapChain->getSwapChain().image_format;
2278 stateTracker._pipelineRenderInfo.colorAttachmentCount = to_U32(swapChainImageFormat.size());
2279 stateTracker._pipelineRenderInfo.pColorAttachmentFormats = swapChainImageFormat.data();
2280
2281 renderingInfo = {
2282 .sType = VK_STRUCTURE_TYPE_RENDERING_INFO,
2283 .renderArea = {
2284 .offset = {0, 0},
2285 .extent = swapChain->surfaceExtent()
2286 },
2287 .layerCount = 1u,
2288 .colorAttachmentCount = to_U32( attachmentInfo.size() ),
2289 .pColorAttachments = attachmentInfo.data(),
2290 };
2291
2292 VkImageMemoryBarrier2 imageBarrier = vk::imageMemoryBarrier2();
2293 imageBarrier.image = swapChain->getCurrentImage();
2294 imageBarrier.subresourceRange = {
2295 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2296 .baseMipLevel = 0,
2297 .levelCount = 1,
2298 .baseArrayLayer = 0,
2299 .layerCount = 1,
2300 };
2301
2302 imageBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
2303 imageBarrier.dstStageMask = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT;
2304 imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2305
2306 imageBarrier.srcAccessMask = VK_ACCESS_2_NONE;
2307 imageBarrier.srcStageMask = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT;
2308 imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2309
2310 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2311 dependencyInfo.imageMemoryBarrierCount = 1u;
2312 dependencyInfo.pImageMemoryBarriers = &imageBarrier;
2313
2314 VK_PROFILE( vkCmdPipelineBarrier2, cmdBuffer, &dependencyInfo);
2315
2316 stateTracker._activeMSAASamples = 1u;
2317 }
2318 else
2319 {
2320 vkRenderTarget* rt = static_cast<vkRenderTarget*>(_context.renderTargetPool().getRenderTarget( crtCmd->_target ));
2321 Attorney::VKAPIRenderTarget::begin( *rt, cmdBuffer, crtCmd->_descriptor, crtCmd->_clearDescriptor, stateTracker._pipelineRenderInfo );
2322 renderingInfo = rt->renderingInfo();
2323
2324 stateTracker._activeMSAASamples = rt->getSampleCount();
2325 }
2326
2327 {
2328 PROFILE_SCOPE( "Begin Rendering", Profiler::Category::Graphics );
2329
2330 stateTracker._renderTargetFormatHash = 0u;
2331 for ( U32 i = 0u; i < stateTracker._pipelineRenderInfo.colorAttachmentCount; ++i )
2332 {
2333 Util::Hash_combine( stateTracker._renderTargetFormatHash, stateTracker._pipelineRenderInfo.pColorAttachmentFormats[i]);
2334 }
2335
2336 const Rect<I32> renderArea = {
2337 renderingInfo.renderArea.offset.x,
2338 renderingInfo.renderArea.offset.y,
2339 to_I32(renderingInfo.renderArea.extent.width),
2340 to_I32(renderingInfo.renderArea.extent.height)
2341 };
2342
2343 stateTracker._activeRenderTargetDimensions = { renderArea.sizeX, renderArea.sizeY};
2344
2345 _context.setViewport( renderArea );
2346 _context.setScissor( renderArea );
2347 VK_PROFILE( vkCmdBeginRendering, cmdBuffer, &renderingInfo);
2348 }
2349 } break;
2351 {
2352 PROFILE_SCOPE( "END_RENDER_PASS", Profiler::Category::Graphics );
2353
2354 VK_PROFILE( vkCmdEndRendering, cmdBuffer );
2355 if ( stateTracker._activeRenderTargetID == SCREEN_TARGET_ID )
2356 {
2357 VkImageMemoryBarrier2 imageBarrier = vk::imageMemoryBarrier2();
2358 imageBarrier.image = stateTracker._activeWindow->_swapChain->getCurrentImage();
2359 imageBarrier.subresourceRange = {
2360 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2361 .baseMipLevel = 0,
2362 .levelCount = 1,
2363 .baseArrayLayer = 0,
2364 .layerCount = 1,
2365 };
2366
2367 imageBarrier.dstAccessMask = VK_ACCESS_2_NONE;
2368 imageBarrier.dstStageMask = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT;
2369 imageBarrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
2370
2371 imageBarrier.srcAccessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT;
2372 imageBarrier.srcStageMask = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT;
2373 imageBarrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2374
2375 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2376 dependencyInfo.imageMemoryBarrierCount = 1u;
2377 dependencyInfo.pImageMemoryBarriers = &imageBarrier;
2378
2379 VK_PROFILE( vkCmdPipelineBarrier2,cmdBuffer, &dependencyInfo );
2380 }
2381 else
2382 {
2383 vkRenderTarget* rt = static_cast<vkRenderTarget*>(_context.renderTargetPool().getRenderTarget( stateTracker._activeRenderTargetID ));
2385 Attorney::VKAPIRenderTarget::end( *rt, cmdBuffer, crtCmd->_transitionMask );
2386 stateTracker._activeRenderTargetID = SCREEN_TARGET_ID;
2387 }
2388
2389 PopDebugMessage( cmdBuffer );
2390 stateTracker._renderTargetFormatHash = 0u;
2391 stateTracker._activeMSAASamples = _context.context().config().rendering.MSAASamples;
2392 stateTracker._activeRenderTargetDimensions = s_stateTracker._activeWindow->_window->getDrawableSize();
2393 // We can do this outside of a renderpass
2394 FlushBufferTransferRequests( cmdBuffer );
2395 }break;
2397 {
2399
2401 vkRenderTarget* source = static_cast<vkRenderTarget*>(_context.renderTargetPool().getRenderTarget( crtCmd->_source ));
2402 vkRenderTarget* destination = static_cast<vkRenderTarget*>(_context.renderTargetPool().getRenderTarget( crtCmd->_destination ));
2403 Attorney::VKAPIRenderTarget::blitFrom( *destination, cmdBuffer, source, crtCmd->_params );
2404
2405 } break;
2407 {
2408 PROFILE_SCOPE( "BEGIN_GPU_QUERY", Profiler::Category::Graphics );
2409 }break;
2411 {
2412 PROFILE_SCOPE( "END_GPU_QUERY", Profiler::Category::Graphics );
2413 }break;
2415 {
2417
2418 const GFX::CopyTextureCommand* crtCmd = cmd->As<GFX::CopyTextureCommand>();
2419 vkTexture::Copy( cmdBuffer,
2420 static_cast<vkTexture*>(Get(crtCmd->_source)),
2421 crtCmd->_sourceMSAASamples,
2422 static_cast<vkTexture*>(Get(crtCmd->_destination)),
2424 crtCmd->_params );
2425 }break;
2427 {
2428 PROFILE_SCOPE( "CLEAR_TEXTURE", Profiler::Category::Graphics );
2429
2431 if ( crtCmd->_texture != INVALID_HANDLE<Texture> )
2432 {
2433 static_cast<vkTexture*>(Get(crtCmd->_texture))->clearData( cmdBuffer, crtCmd->_clearColour, crtCmd->_layerRange, crtCmd->_mipLevel );
2434 }
2435 }break;
2437 {
2439
2440 const GFX::ReadTextureCommand* crtCmd = cmd->As<GFX::ReadTextureCommand>();
2441 if ( crtCmd->_texture != INVALID_HANDLE<Texture> )
2442 {
2443 const ImageReadbackData data = static_cast<vkTexture*>(Get(crtCmd->_texture))->readData( cmdBuffer, crtCmd->_mipLevel, crtCmd->_pixelPackAlignment);
2444 crtCmd->_callback( data );
2445 }
2446 }break;
2448 {
2449 PROFILE_SCOPE( "BIND_PIPELINE", Profiler::Category::Graphics );
2450
2451 const Pipeline* pipeline = cmd->As<GFX::BindPipelineCommand>()->_pipeline;
2452 assert( pipeline != nullptr );
2453 if ( bindPipeline( *pipeline, cmdBuffer ) == ShaderResult::Failed )
2454 {
2455 const auto handle = pipeline->descriptor()._shaderProgramHandle;
2456 Console::errorfn( LOCALE_STR( "ERROR_GLSL_INVALID_BIND" ), handle._index, handle._generation );
2457 }
2458 } break;
2460 {
2461 PROFILE_SCOPE( "SEND_PUSH_CONSTANTS", Profiler::Category::Graphics );
2462
2463 if ( stateTracker._pipeline._vkPipeline != VK_NULL_HANDLE )
2464 {
2466 UniformData* uniforms = pushConstantsCmd->_uniformData;
2467 if ( uniforms != nullptr )
2468 {
2469 if ( stateTracker._pipeline._program->uploadUniformData( *uniforms, _context.descriptorSet( DescriptorSetUsage::PER_DRAW ).impl(), _uniformsMemCommand ) )
2470 {
2471 _context.descriptorSet( DescriptorSetUsage::PER_DRAW ).dirty( true );
2472 _uniformsNeedLock = _uniformsNeedLock || _uniformsMemCommand._bufferLocks.empty();
2473 }
2474 }
2475 if ( pushConstantsCmd->_fastData.set() )
2476 {
2477 VK_PROFILE( vkCmdPushConstants, cmdBuffer,
2478 stateTracker._pipeline._vkPipelineLayout,
2479 stateTracker._pipeline._program->stageMask(),
2480 0,
2482 pushConstantsCmd->_fastData.dataPtr() );
2483
2484 stateTracker._pushConstantsValid = true;
2485 }
2486 }
2487 } break;
2489 {
2490 PROFILE_SCOPE( "BEGIN_DEBUG_SCOPE", Profiler::Category::Graphics );
2491
2493 PushDebugMessage( cmdBuffer, crtCmd->_scopeName.c_str(), crtCmd->_scopeId );
2494 } break;
2496 {
2497 PROFILE_SCOPE( "END_DEBUG_SCOPE", Profiler::Category::Graphics );
2498
2499 PopDebugMessage( cmdBuffer );
2500 } break;
2502 {
2503 PROFILE_SCOPE( "ADD_DEBUG_MESSAGE", Profiler::Category::Graphics );
2504
2506 InsertDebugMessage( cmdBuffer, crtCmd->_msg.c_str(), crtCmd->_msgId );
2507 }break;
2509 {
2510 PROFILE_SCOPE( "COMPUTE_MIPMAPS", Profiler::Category::Graphics );
2511
2514
2515 PROFILE_SCOPE( "VK: View - based computation", Profiler::Category::Graphics );
2516 if ( crtCmd->_texture != INVALID_HANDLE<Texture> )
2517 {
2518 static_cast<vkTexture*>(Get( crtCmd->_texture ))->generateMipmaps( cmdBuffer, 0u, crtCmd->_layerRange._offset, crtCmd->_layerRange._count, crtCmd->_usage );
2519 }
2520 }break;
2522 {
2523 PROFILE_SCOPE( "DRAW_COMMANDS", Profiler::Category::Graphics );
2524
2525 const auto& drawCommands = cmd->As<GFX::DrawCommand>()->_drawCommands;
2526
2527 if ( stateTracker._pipeline._vkPipeline != VK_NULL_HANDLE )
2528 {
2529 if ( !stateTracker._pushConstantsValid )
2530 {
2531 VK_PROFILE( vkCmdPushConstants, cmdBuffer,
2532 stateTracker._pipeline._vkPipelineLayout,
2533 stateTracker._pipeline._program->stageMask(),
2534 0,
2536 &s_defaultPushConstants[0].mat );
2537 stateTracker._pushConstantsValid = true;
2538 }
2539
2540
2541 U32 drawCount = 0u;
2542 for ( const GenericDrawCommand& currentDrawCommand : drawCommands )
2543 {
2544 DIVIDE_ASSERT( currentDrawCommand._drawCount < _context.GetDeviceInformation()._maxDrawIndirectCount );
2545
2546 if ( isEnabledOption( currentDrawCommand, CmdRenderOptions::RENDER_GEOMETRY ) )
2547 {
2548 Draw( currentDrawCommand, cmdBuffer );
2549 ++drawCount;
2550 }
2551
2552 if ( isEnabledOption( currentDrawCommand, CmdRenderOptions::RENDER_WIREFRAME ) )
2553 {
2554 PrimitiveTopology oldTopology = stateTracker._pipeline._topology;
2555 stateTracker._pipeline._topology = PrimitiveTopology::LINES;
2556 VK_PROFILE( vkCmdBindPipeline, cmdBuffer, stateTracker._pipeline._bindPoint, stateTracker._pipeline._vkPipelineWireframe );
2557 Draw( currentDrawCommand, cmdBuffer );
2558 ++drawCount;
2559 VK_PROFILE( vkCmdBindPipeline, cmdBuffer, stateTracker._pipeline._bindPoint, stateTracker._pipeline._vkPipeline );
2560 stateTracker._pipeline._topology = oldTopology;
2561 }
2562 }
2563
2564 _context.registerDrawCalls( drawCount );
2565 }
2566 }break;
2568 {
2569 PROFILE_SCOPE( "DISPATCH_COMPUTE", Profiler::Category::Graphics );
2570
2571 if ( !stateTracker._pushConstantsValid )
2572 {
2573 VK_PROFILE( vkCmdPushConstants, cmdBuffer,
2574 stateTracker._pipeline._vkPipelineLayout,
2575 stateTracker._pipeline._program->stageMask(),
2576 0,
2578 &s_defaultPushConstants[0].mat );
2579 stateTracker._pushConstantsValid = true;
2580 }
2581
2582 DIVIDE_ASSERT( stateTracker._pipeline._topology == PrimitiveTopology::COMPUTE );
2583 if ( stateTracker._pipeline._vkPipeline != VK_NULL_HANDLE )
2584 {
2586 VK_PROFILE( vkCmdDispatch, cmdBuffer, crtCmd->_computeGroupSize.x, crtCmd->_computeGroupSize.y, crtCmd->_computeGroupSize.z );
2587 }
2588 } break;
2590 {
2591 PROFILE_SCOPE( "MEMORY_BARRIER", Profiler::Category::Graphics );
2592
2593 constexpr U8 MAX_BUFFER_BARRIERS_PER_CMD{64};
2594
2595 std::array<VkImageMemoryBarrier2, RT_MAX_ATTACHMENT_COUNT> imageBarriers{};
2596 U8 imageBarrierCount = 0u;
2597
2598 std::array<VkBufferMemoryBarrier2, MAX_BUFFER_BARRIERS_PER_CMD> bufferBarriers{};
2599 U8 bufferBarrierCount = 0u;
2600
2602
2603 SyncObjectHandle handle{};
2604 for ( const BufferLock& lock : crtCmd->_bufferLocks )
2605 {
2606 if ( lock._buffer == nullptr || lock._range._length == 0u )
2607 {
2608 continue;
2609 }
2610
2611 vkBufferImpl* vkBuffer = static_cast<vkBufferImpl*>(lock._buffer);
2612
2613 VkBufferMemoryBarrier2& memoryBarrier = bufferBarriers[bufferBarrierCount++];
2614 memoryBarrier = vk::bufferMemoryBarrier2();
2615 memoryBarrier.offset = lock._range._startOffset;
2616 memoryBarrier.size = lock._range._length == U32_MAX ? VK_WHOLE_SIZE : lock._range._length;
2617 memoryBarrier.buffer = vkBuffer->_buffer;
2618
2619 const bool isCommandBuffer = vkBuffer->_params._flags._usageType == BufferUsageType::COMMAND_BUFFER;
2620
2621 switch (lock._type )
2622 {
2624 {
2625 if ( handle._id == SyncObjectHandle::INVALID_SYNC_ID )
2626 {
2628 }
2629
2630 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2631 memoryBarrier.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2632 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT | ALL_SHADER_STAGES;
2633 memoryBarrier.dstAccessMask = VK_ACCESS_2_MEMORY_READ_BIT;
2634 if ( isCommandBuffer )
2635 {
2636 memoryBarrier.dstStageMask |= VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT;
2637 memoryBarrier.dstAccessMask |= VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT;
2638 }
2639
2640 if ( !lock._buffer->lockRange( lock._range, handle ) )
2641 {
2643 }
2644 } break;
2646 {
2647 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
2648 memoryBarrier.srcAccessMask = VK_ACCESS_2_SHADER_WRITE_BIT;
2649 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_HOST_BIT;
2650 memoryBarrier.dstAccessMask = VK_ACCESS_2_HOST_READ_BIT;
2651 } break;
2653 {
2654 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
2655 memoryBarrier.srcAccessMask = VK_ACCESS_2_SHADER_WRITE_BIT;
2656 memoryBarrier.dstStageMask = ALL_SHADER_STAGES;
2657 memoryBarrier.dstAccessMask = VK_ACCESS_2_SHADER_READ_BIT;
2658 if ( isCommandBuffer )
2659 {
2660 memoryBarrier.dstStageMask |= VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT;
2661 memoryBarrier.dstAccessMask |= VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT;
2662 }
2663 } break;
2665 {
2666 memoryBarrier.srcStageMask = ALL_SHADER_STAGES;
2667 memoryBarrier.srcAccessMask = VK_ACCESS_2_SHADER_READ_BIT;
2668 if ( isCommandBuffer )
2669 {
2670 memoryBarrier.srcStageMask |= VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT;
2671 memoryBarrier.srcAccessMask |= VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT;
2672 }
2673 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
2674 memoryBarrier.dstAccessMask = VK_ACCESS_2_SHADER_WRITE_BIT;
2675 } break;
2677 {
2678 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
2679 memoryBarrier.srcAccessMask = VK_ACCESS_2_SHADER_WRITE_BIT;
2680 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
2681 memoryBarrier.dstAccessMask = VK_ACCESS_2_SHADER_WRITE_BIT | VK_ACCESS_2_SHADER_READ_BIT;
2682 if ( isCommandBuffer )
2683 {
2684 memoryBarrier.dstStageMask |= VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT;
2685 memoryBarrier.dstAccessMask |= VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT;
2686 }
2687 } break;
2689 {
2690 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2691 memoryBarrier.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2692 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT;
2693 memoryBarrier.dstAccessMask = VK_ACCESS_2_MEMORY_READ_BIT;
2694 }break;
2696 {
2697 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT;
2698 memoryBarrier.srcAccessMask = VK_ACCESS_2_MEMORY_READ_BIT;
2699 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2700 memoryBarrier.dstAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2701 }break;
2703 {
2704 memoryBarrier.srcStageMask = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT;
2705 memoryBarrier.srcAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT;
2706 memoryBarrier.dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT;
2707 memoryBarrier.dstAccessMask = VK_ACCESS_2_MEMORY_WRITE_BIT | VK_ACCESS_2_MEMORY_READ_BIT;
2708 }break;
2709 default : DIVIDE_UNEXPECTED_CALL(); break;
2710 }
2711
2712 if ( bufferBarrierCount == MAX_BUFFER_BARRIERS_PER_CMD )
2713 {
2714 // Too many buffer barriers. Flushing ...
2715 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2716 dependencyInfo.bufferMemoryBarrierCount = bufferBarrierCount;
2717 dependencyInfo.pBufferMemoryBarriers = bufferBarriers.data();
2718
2719 VK_PROFILE( vkCmdPipelineBarrier2, cmdBuffer, &dependencyInfo );
2720 bufferBarrierCount = 0u;
2721 }
2722 }
2723
2724 for ( const auto& it : crtCmd->_textureLayoutChanges )
2725 {
2726 if ( it._sourceLayout == it._targetLayout )
2727 {
2728 continue;
2729 }
2730 DIVIDE_ASSERT( it._targetLayout != ImageUsage::UNDEFINED);
2731
2732 const vkTexture* vkTex = static_cast<const vkTexture*>(it._targetView._srcTexture);
2733
2734 const bool isDepthTexture = IsDepthTexture( vkTex->descriptor()._packing );
2735
2737
2738 switch ( it._targetLayout )
2739 {
2741 {
2743 } break;
2745 {
2746 switch ( it._sourceLayout )
2747 {
2749 {
2751 } break;
2753 {
2755 } break;
2757 {
2759 }break;
2761 {
2763 } break;
2765 {
2767 } break;
2769 {
2771 } break;
2773 {
2775 } break;
2776 default:
2777 {
2779 } break;
2780 }
2781 } break;
2783 {
2784 switch ( it._sourceLayout )
2785 {
2787 {
2789 } break;
2791 {
2793 } break;
2795 {
2797 }break;
2799 {
2800 NOP(); // Both in general layout
2801 } break;
2803 {
2805 } break;
2807 {
2809 } break;
2811 {
2813 } break;
2814 default:
2815 {
2817 } break;
2818 }
2819 } break;
2821 {
2822 switch ( it._sourceLayout )
2823 {
2825 {
2827 } break;
2829 {
2831 } break;
2833 {
2834 NOP(); // Both in general layout
2835 }break;
2837 {
2839 } break;
2841 {
2843 } break;
2845 {
2847 } break;
2849 {
2851 } break;
2852 default:
2853 {
2855 } break;
2856 }
2857 } break;
2859 {
2861 } break;
2863 {
2865 } break;
2867 {
2869 } break;
2870 default: DIVIDE_UNEXPECTED_CALL();
2871 };
2872
2873 if ( transitionType != vkTexture::TransitionType::COUNT )
2874 {
2875 auto subRange = it._targetView._subRange;
2876 const VkImageSubresourceRange subResourceRange = {
2877 .aspectMask = vkTexture::GetAspectFlags( vkTex->descriptor() ),
2878 .baseMipLevel = subRange._mipLevels._offset,
2879 .levelCount = subRange._mipLevels._count == U16_MAX ? VK_REMAINING_MIP_LEVELS : subRange._mipLevels._count,
2880 .baseArrayLayer = subRange._layerRange._offset,
2881 .layerCount = subRange._layerRange._count == U16_MAX ? VK_REMAINING_ARRAY_LAYERS : subRange._layerRange._count,
2882 };
2883
2884 vkTexture::TransitionTexture( transitionType, subResourceRange, vkTex->image()->_image, imageBarriers[imageBarrierCount++] );
2885 }
2886 }
2887
2888 if ( imageBarrierCount > 0u || bufferBarrierCount > 0u)
2889 {
2890 VkDependencyInfo dependencyInfo = vk::dependencyInfo();
2891 dependencyInfo.imageMemoryBarrierCount = imageBarrierCount;
2892 dependencyInfo.pImageMemoryBarriers = imageBarriers.data();
2893 dependencyInfo.bufferMemoryBarrierCount = bufferBarrierCount;
2894 dependencyInfo.pBufferMemoryBarriers = bufferBarriers.data();
2895
2896 VK_PROFILE( vkCmdPipelineBarrier2, cmdBuffer, &dependencyInfo );
2897 }
2898 } break;
2899
2911
2913 default: DIVIDE_UNEXPECTED_CALL(); break;
2914 }
2915 }
2916
2917 void VK_API::preFlushCommandBuffer( [[maybe_unused]] const Handle<GFX::CommandBuffer> commandBuffer )
2918 {
2920
2923 // We don't really know what happened before this state and at worst this is going to end up into an
2924 // extra vkCmdPushConstants call with default data, so better safe.
2927 }
2928
2929 void VK_API::postFlushCommandBuffer( [[maybe_unused]] const Handle<GFX::CommandBuffer> commandBuffer ) noexcept
2930 {
2932
2933 flushPushConstantsLocks();
2934 s_transientDeleteQueue.flush( _device->getDevice() );
2935 GetStateTracker()._activeRenderTargetID = INVALID_RENDER_TARGET_ID;
2936 GetStateTracker()._activeRenderTargetDimensions = s_stateTracker._activeWindow->_window->getDrawableSize();
2937 }
2938
2939 bool VK_API::setViewportInternal( const Rect<I32>& newViewport ) noexcept
2940 {
2941 return setViewportInternal( newViewport, getCurrentCommandBuffer() );
2942 }
2943
2944 bool VK_API::setViewportInternal( const Rect<I32>& newViewport, VkCommandBuffer cmdBuffer ) noexcept
2945 {
2947
2948 VkViewport targetViewport{};
2949 targetViewport.width = to_F32( newViewport.sizeX );
2950 targetViewport.height = -to_F32( newViewport.sizeY );
2951 targetViewport.x = to_F32(newViewport.offsetX);
2952 if ( newViewport.offsetY == 0 )
2953 {
2954 targetViewport.y = to_F32(newViewport.sizeY);
2955 }
2956 else
2957 {
2958 targetViewport.y = to_F32(/*newViewport.sizeY - */newViewport.offsetY);
2959 targetViewport.y = GetStateTracker()._activeRenderTargetDimensions.height + targetViewport.y;
2960 }
2961 targetViewport.minDepth = 0.f;
2962 targetViewport.maxDepth = 1.f;
2963
2964 vkCmdSetViewport( cmdBuffer, 0, 1, &targetViewport );
2965 return true;
2966 }
2967
2968 bool VK_API::setScissorInternal( const Rect<I32>& newScissor ) noexcept
2969 {
2970 return setScissorInternal( newScissor, getCurrentCommandBuffer() );
2971 }
2972
2973 bool VK_API::setScissorInternal( const Rect<I32>& newScissor, VkCommandBuffer cmdBuffer ) noexcept
2974 {
2976
2977 const VkOffset2D offset{ std::max( 0, newScissor.offsetX ), std::max( 0, newScissor.offsetY ) };
2978 const VkExtent2D extent{ to_U32( newScissor.sizeX ),to_U32( newScissor.sizeY ) };
2979 const VkRect2D targetScissor{ offset, extent };
2980 vkCmdSetScissor( cmdBuffer, 0, 1, &targetScissor );
2981 return true;
2982 }
2983
2984 VkDescriptorSetLayout VK_API::createLayoutFromBindings(const DescriptorSetUsage usage, const ShaderProgram::BindingsPerSetArray& bindings, DynamicBindings& dynamicBindings )
2985 {
2987
2988 thread_local eastl::fixed_vector<VkDescriptorSetLayoutBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET, false> layoutBinding{};
2989
2990 layoutBinding.clear();
2991 dynamicBindings.clear();
2992
2993 const bool isPushDescriptor = s_hasPushDescriptorSupport && usage == DescriptorSetUsage::PER_DRAW;
2994
2995 for ( U8 slot = 0u; slot < MAX_BINDINGS_PER_DESCRIPTOR_SET; ++slot )
2996 {
2997 if ( bindings[slot]._type == DescriptorSetBindingType::COUNT || (slot == 0 && usage == DescriptorSetUsage::PER_BATCH ))
2998 {
2999 continue;
3000 }
3001
3002 VkDescriptorSetLayoutBinding& newBinding = layoutBinding.emplace_back();
3003 newBinding.descriptorCount = 1u;
3004 newBinding.descriptorType = VKUtil::vkDescriptorType( bindings[slot]._type, isPushDescriptor );
3005 newBinding.stageFlags = GetFlagsForStageVisibility( bindings[slot]._visibility );
3006 newBinding.binding = slot;
3007 newBinding.pImmutableSamplers = nullptr;
3008
3009 if ( !isPushDescriptor && (bindings[slot]._type == DescriptorSetBindingType::UNIFORM_BUFFER || bindings[slot]._type == DescriptorSetBindingType::SHADER_STORAGE_BUFFER ))
3010 {
3011 dynamicBindings.emplace_back(DynamicBinding{
3012 ._offset = 0u,
3013 ._slot = slot
3014 });
3015 }
3016 }
3017
3018 eastl::sort(begin(dynamicBindings), end(dynamicBindings), []( const DynamicBinding& bindingA, const DynamicBinding& bindingB ) { return bindingA._slot <= bindingB._slot; });
3019
3020 VkDescriptorSetLayoutCreateInfo layoutCreateInfo = vk::descriptorSetLayoutCreateInfo( layoutBinding.data(), to_U32( layoutBinding.size() ) );
3021 if ( isPushDescriptor )
3022 {
3023 layoutCreateInfo.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;
3024 }
3025 return _descriptorLayoutCache->createDescriptorLayout( &layoutCreateInfo );
3026 }
3027
3029 {
3031
3032 _descriptorLayoutCache = std::make_unique<DescriptorLayoutCache>( _device->getVKDevice() );
3033
3034 for ( U8 i = 0u; i < to_base( DescriptorSetUsage::COUNT ); ++i )
3035 {
3037 {
3039 }
3040 }
3041
3042 for ( U8 i = 0u; i < to_base( DescriptorSetUsage::COUNT ); ++i )
3043 {
3044 auto& pool = s_stateTracker._descriptorAllocators[i];
3045 pool._frameCount = Config::MAX_FRAMES_IN_FLIGHT + 1u;
3046 pool._allocatorPool.reset( vke::DescriptorAllocatorPool::Create( _device->getVKDevice(), pool._frameCount) );
3047
3048 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE, 0.f );
3049 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER, 0.f );
3050 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER, 0.f );
3051 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, 0.f );
3052
3053 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, ShaderProgram::GetBindingCount(static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::COMBINED_IMAGE_SAMPLER) * 1.f);
3054 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, ShaderProgram::GetBindingCount( static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::IMAGE ) * 1.f );
3056 {
3057 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, ShaderProgram::GetBindingCount( static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::UNIFORM_BUFFER ) * 1.f );
3058 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, ShaderProgram::GetBindingCount( static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::SHADER_STORAGE_BUFFER ) * 1.f );
3059 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, 0.f);
3060 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, 0.f);
3061 }
3062 else
3063 {
3064 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC, ShaderProgram::GetBindingCount( static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::UNIFORM_BUFFER ) * 1.f );
3065 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC, ShaderProgram::GetBindingCount( static_cast<DescriptorSetUsage>(i), DescriptorSetBindingType::SHADER_STORAGE_BUFFER ) * 1.f );
3066 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 0.f );
3067 pool._allocatorPool->SetPoolSizeMultiplier( VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, 0.f );
3068 }
3069
3070 pool._handle = pool._allocatorPool->GetAllocator();
3071 }
3072 }
3073
3074 void VK_API::onThreadCreated( [[maybe_unused]] const std::thread::id& threadID, [[maybe_unused]] const bool isMainRenderThread ) noexcept
3075 {
3076 }
3077
3079 {
3080 if ( program == nullptr )
3081 {
3082 return;
3083 }
3084
3085 s_reloadedShaders.push(program);
3086 }
3087
3089 {
3090 return s_stateTracker;
3091 }
3092
3093 void VK_API::InsertDebugMessage( VkCommandBuffer cmdBuffer, const char* message, const U32 id )
3094 {
3096 {
3098
3099 constexpr F32 color[4] = { 0.0f, 1.0f, 0.0f, 1.f };
3100
3101 VkDebugUtilsLabelEXT labelInfo{};
3102 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
3103 labelInfo.pLabelName = message;
3104 memcpy( labelInfo.color, &color[0], sizeof( F32 ) * 4 );
3105
3106 Debug::vkCmdInsertDebugUtilsLabelEXT( cmdBuffer, &labelInfo );
3107 }
3108
3109 GetStateTracker()._lastInsertedDebugMessage = { message, id };
3110 }
3111
3112 void VK_API::PushDebugMessage( VkCommandBuffer cmdBuffer, const char* message, const U32 id )
3113 {
3115 {
3117
3118 constexpr F32 color[4] = { 0.5f, 0.5f, 0.5f, 1.f };
3119
3120 VkDebugUtilsLabelEXT labelInfo{};
3121 labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
3122 labelInfo.pLabelName = message;
3123 memcpy( labelInfo.color, &color[0], sizeof( F32 ) * 4 );
3124 Debug::vkCmdBeginDebugUtilsLabelEXT( cmdBuffer, &labelInfo );
3125 }
3126
3127 assert( GetStateTracker()._debugScopeDepth < Config::MAX_DEBUG_SCOPE_DEPTH );
3129 }
3130
3131 void VK_API::PopDebugMessage( VkCommandBuffer cmdBuffer )
3132 {
3134 {
3136
3138 }
3139
3142 }
3143
3145 VkSampler VK_API::GetSamplerHandle( const SamplerDescriptor sampler, size_t& samplerHashInOut )
3146 {
3147 thread_local size_t cached_hash = 0u;
3148 thread_local VkSampler cached_handle = VK_NULL_HANDLE;
3149
3151
3152 if ( samplerHashInOut == SamplerDescriptor::INVALID_SAMPLER_HASH )
3153 {
3154 samplerHashInOut = GetHash( sampler );
3155 }
3156 DIVIDE_ASSERT( samplerHashInOut != 0u );
3157
3158 if ( cached_hash == samplerHashInOut )
3159 {
3160 return cached_handle;
3161 }
3162 cached_hash = samplerHashInOut;
3163
3164 {
3166 // If we fail to find the sampler object for the given hash, we print an error and return the default OpenGL handle
3167 const SamplerObjectMap::const_iterator it = s_samplerMap.find( cached_hash );
3168 if ( it != std::cend( s_samplerMap ) )
3169 {
3170 // Return the Vulkan handle for the sampler object matching the specified hash value
3171 cached_handle = it->second;
3172 return cached_handle;
3173 }
3174 }
3175
3176 cached_handle = VK_NULL_HANDLE;
3177 {
3179 // Check again
3180 const SamplerObjectMap::const_iterator it = s_samplerMap.find( cached_hash );
3181 if ( it == std::cend( s_samplerMap ) )
3182 {
3183 // Cache miss. Create the sampler object now.
3184 // Create and store the newly created sample object. GL_API is responsible for deleting these!
3185 const VkSampler samplerHandler = vkSamplerObject::Construct( sampler );
3186 emplace( s_samplerMap, cached_hash, samplerHandler );
3187 cached_handle = samplerHandler;
3188 }
3189 else
3190 {
3191 cached_handle = it->second;
3192 }
3193 }
3194
3195 return cached_handle;
3196 }
3197
3198 RenderTarget_uptr VK_API::newRT( const RenderTargetDescriptor& descriptor ) const
3199 {
3200 return std::make_unique<vkRenderTarget>( _context, descriptor );
3201 }
3202
3203 GenericVertexData_ptr VK_API::newGVD( U32 ringBufferLength, const std::string_view name ) const
3204 {
3205 return std::make_shared<vkGenericVertexData>( _context, ringBufferLength, name );
3206 }
3207
3208 ShaderBuffer_uptr VK_API::newSB( const ShaderBufferDescriptor& descriptor ) const
3209 {
3210 return std::make_unique<vkShaderBuffer>( _context, descriptor );
3211 }
3212}; //namespace Divide
VkFlags VkShaderStageFlags
Definition: GLSLToSPIRV.h:37
#define LOCALE_STR(X)
Definition: Localization.h:91
#define MOV(...)
#define SCOPE_EXIT
#define DIVIDE_ASSERT(...)
#define DIVIDE_UNEXPECTED_CALL()
#define NOP()
#define FORCE_INLINE
#define PROFILE_SCOPE_AUTO(CATEGORY)
Definition: Profiler.h:87
#define PROFILE_VK_INIT(DEVICES, PHYSICAL_DEVICES, CMD_QUEUES, CMD_QUEUES_FAMILY, NUM_CMD_QUEUS, FUNCTIONS)
Definition: Profiler.h:94
#define PROFILE_TAG(NAME,...)
Definition: Profiler.h:88
#define PROFILE_SCOPE(NAME, CATEGORY)
Definition: Profiler.h:86
#define PROFILE_VK_EVENT_AUTO_AND_CONTEX(BUFFER)
Definition: Profiler.h:101
char * argv[]
Definition: main.cpp:8
WindowManager & windowManager() noexcept
Definition: Application.inl:77
static void MaxMSAASamples(const U8 maxSampleCount) noexcept
Definition: Application.h:238
static void blitFrom(vkRenderTarget &rt, VkCommandBuffer cmdBuffer, vkRenderTarget *source, const RTBlitParams &params) noexcept
static void end(vkRenderTarget &rt, VkCommandBuffer cmdBuffer, const RTTransitionMask &mask)
static void begin(vkRenderTarget &rt, VkCommandBuffer cmdBuffer, const RTDrawDescriptor &descriptor, const RTClearDescriptor &clearPolicy, VkPipelineRenderingCreateInfo &pipelineRenderingCreateInfoOut)
static DescriptorBuilder Begin(DescriptorLayoutCache *layoutCache, vke::DescriptorAllocatorHandle *allocator)
bool buildSetFromLayout(VkDescriptorSet &set, const VkDescriptorSetLayout &layoutIn, VkDevice device)
vec2< U16 > getDrawableSize() const noexcept
bool minimized() const noexcept
const char * title() const noexcept
SDL_Window * getRawWindow() const noexcept
Rough around the edges Adapter pattern abstracting the actual rendering API and access to the GPU.
Definition: GFXDevice.h:215
static bool IsSubmitCommand(GFX::CommandType type) noexcept
Definition: GFXDevice.inl:168
static const DeviceInformation & GetDeviceInformation() noexcept
Definition: GFXDevice.inl:158
static U64 FrameCount() noexcept
Definition: GFXDevice.h:340
static void OverrideDeviceInformation(const DeviceInformation &info) noexcept
Definition: GFXDevice.inl:163
FORCE_INLINE I64 getGUID() const noexcept
Definition: GUIDWrapper.h:51
static void CleanExpiredSyncObjects(RenderAPI api, U64 frameNumber)
Definition: LockManager.cpp:29
static void Clear()
Definition: LockManager.cpp:64
static SyncObjectHandle CreateSyncObject(RenderAPI api, U8 flag=DEFAULT_SYNC_FLAG_INTERNAL)
T * find(PoolHandle handle) const
Definition: ObjectPool.inl:46
PlatformContext & context() noexcept
DisplayWindow & mainWindow() noexcept
Application & app() noexcept
Configuration & config() noexcept
U8 getSampleCount() const noexcept
FORCE_INLINE BufferUsageType getUsage() const noexcept
Definition: ShaderBuffer.h:72
virtual LockableBuffer * getBufferImpl()=0
FORCE_INLINE size_t getPrimitiveSize() const noexcept
Definition: ShaderBuffer.h:71
std::array< BindingsPerSetArray, to_base(DescriptorSetUsage::COUNT)> BindingSetData
static void DestroyStaticData()
std::array< BindingsPerSet, MAX_BINDINGS_PER_DESCRIPTOR_SET > BindingsPerSetArray
static BindingSetData & GetBindingSetData() noexcept
static U32 GetBindingCount(DescriptorSetUsage usage, DescriptorSetBindingType type)
static DepthFormatInformation s_depthFormatInformation
Definition: VKWrapper.h:184
static bool s_hasDescriptorBufferSupport
Definition: VKWrapper.h:181
static void PushDebugMessage(VkCommandBuffer cmdBuffer, const char *message, U32 id=U32_MAX)
Definition: VKWrapper.cpp:3112
static void RegisterTransferRequest(const VKTransferQueue::TransferRequest &request)
Definition: VKWrapper.cpp:549
static void PopDebugMessage(VkCommandBuffer cmdBuffer)
Definition: VKWrapper.cpp:3131
bool drawToWindow(DisplayWindow &window) override
Definition: VKWrapper.cpp:571
void closeRenderingAPI() override
Definition: VKWrapper.cpp:1178
static void RegisterCustomAPIDelete(DELEGATE< void, VkDevice > &&cbk, bool isResourceTransient)
Definition: VKWrapper.cpp:537
static bool s_hasValidationFeaturesSupport
Definition: VKWrapper.h:183
VK_API(GFXDevice &context) noexcept
Definition: VKWrapper.cpp:556
static bool s_hasDynamicBlendStateSupport
Definition: VKWrapper.h:182
bool frameStarted() override
Definition: VKWrapper.cpp:660
static eastl::stack< vkShaderProgram * > s_reloadedShaders
Definition: VKWrapper.h:168
void flushCommand(GFX::CommandBase *cmd) noexcept override
Definition: VKWrapper.cpp:2215
void idle(bool fast) noexcept override
Definition: VKWrapper.cpp:566
static SamplerObjectMap s_samplerMap
Definition: VKWrapper.h:162
hashMap< size_t, CompiledPipeline > _compiledPipelines
Definition: VKWrapper.h:149
static VKStateTracker & GetStateTracker() noexcept
Definition: VKWrapper.cpp:3088
static void OnShaderReloaded(vkShaderProgram *program)
Definition: VKWrapper.cpp:3078
static VKDeletionQueue s_transientDeleteQueue
Definition: VKWrapper.h:164
void destroyStatePerWindow(VKPerWindowState &windowState)
Definition: VKWrapper.cpp:752
std::array< VkDescriptorSetLayout, to_base(DescriptorSetUsage::COUNT)> _descriptorSetLayouts
Definition: VKWrapper.h:153
bool setScissorInternal(const Rect< I32 > &newScissor) noexcept override
Definition: VKWrapper.cpp:2968
void onThreadCreated(const std::thread::id &threadID, bool isMainRenderThread) noexcept override
Definition: VKWrapper.cpp:3074
GenericVertexData_ptr newGVD(U32 ringBufferLength, const std::string_view name) const override
Definition: VKWrapper.cpp:3203
ErrorCode initRenderingAPI(I32 argc, char **argv, Configuration &config) noexcept override
Definition: VKWrapper.cpp:764
bool bindShaderResources(const DescriptorSetEntries &descriptorSetEntries) override
Definition: VKWrapper.cpp:1320
void destroyPipeline(CompiledPipeline &pipeline, bool defer)
Definition: VKWrapper.cpp:1129
static void SubmitTransferRequest(const VKTransferQueue::TransferRequest &request, VkCommandBuffer cmd)
Definition: VKWrapper.cpp:2149
VkCommandBuffer getCurrentCommandBuffer() const noexcept
Definition: VKWrapper.cpp:561
GFXDevice & _context
Definition: VKWrapper.h:138
void postFlushCommandBuffer(Handle< GFX::CommandBuffer > commandBuffer) noexcept override
Definition: VKWrapper.cpp:2929
void preFlushCommandBuffer(Handle< GFX::CommandBuffer > commandBuffer) override
Definition: VKWrapper.cpp:2917
static VKStateTracker s_stateTracker
Definition: VKWrapper.h:163
bool _uniformsNeedLock
Definition: VKWrapper.h:156
std::array< VkDescriptorSet, to_base(DescriptorSetUsage::COUNT)> _descriptorSets
Definition: VKWrapper.h:151
static void FlushBufferTransferRequests()
Definition: VKWrapper.cpp:2180
void prepareFlushWindow(DisplayWindow &window) override
Definition: VKWrapper.cpp:618
vkb::Instance _vkbInstance
Definition: VKWrapper.h:139
std::array< DynamicBindings, to_base(DescriptorSetUsage::COUNT)> _descriptorDynamicBindings
Definition: VKWrapper.h:152
bool setViewportInternal(const Rect< I32 > &newViewport) noexcept override
Definition: VKWrapper.cpp:2939
void flushPushConstantsLocks()
Definition: VKWrapper.cpp:1985
hashMap< I64, VKPerWindowState > _perWindowState
Definition: VKWrapper.h:148
bool frameEnded() override
Definition: VKWrapper.cpp:680
static bool s_hasDebugMarkerSupport
Definition: VKWrapper.h:179
void initStatePerWindow(VKPerWindowState &windowState)
Definition: VKWrapper.cpp:739
VkDescriptorSetLayout createLayoutFromBindings(const DescriptorSetUsage usage, const ShaderProgram::BindingsPerSetArray &bindings, DynamicBindings &dynamicBindings)
Definition: VKWrapper.cpp:2984
VkPipelineCache _pipelineCache
Definition: VKWrapper.h:142
static VkSampler GetSamplerHandle(SamplerDescriptor sampler, size_t &samplerHashInOut)
Return the Vulkan sampler object's handle for the given hash value.
Definition: VKWrapper.cpp:3145
ShaderResult bindPipeline(const Pipeline &pipeline, VkCommandBuffer cmdBuffer)
Definition: VKWrapper.cpp:1774
static bool Draw(const GenericDrawCommand &cmd, VkCommandBuffer cmdBuffer)
Definition: VKWrapper.cpp:1247
RenderTarget_uptr newRT(const RenderTargetDescriptor &descriptor) const override
Definition: VKWrapper.cpp:3198
GFX::MemoryBarrierCommand _uniformsMemCommand
Definition: VKWrapper.h:145
void onRenderThreadLoopStart() override
Definition: VKWrapper.cpp:610
void bindDynamicState(const RenderStateBlock &currentState, const RTBlendStates &blendStates, VkCommandBuffer cmdBuffer) noexcept
Definition: VKWrapper.cpp:1608
static bool s_hasPushDescriptorSupport
Definition: VKWrapper.h:180
void flushWindow(DisplayWindow &window) override
Definition: VKWrapper.cpp:622
static VKTransferQueue s_transferQueue
Definition: VKWrapper.h:166
DescriptorLayoutCache_uptr _descriptorLayoutCache
Definition: VKWrapper.h:146
void initDescriptorSets() override
Definition: VKWrapper.cpp:3028
ShaderBuffer_uptr newSB(const ShaderBufferDescriptor &descriptor) const override
Definition: VKWrapper.cpp:3208
hashMap< size_t, VkSampler, NoHash< size_t > > SamplerObjectMap
Definition: VKWrapper.h:159
VkDescriptorSet _dummyDescriptorSet
Definition: VKWrapper.h:143
static SharedMutex s_samplerMapLock
Definition: VKWrapper.h:161
void recreateSwapChain(VKPerWindowState &windowState)
Definition: VKWrapper.cpp:710
VKDevice_uptr _device
Definition: VKWrapper.h:140
static VKDeletionQueue s_deviceDeleteQueue
Definition: VKWrapper.h:165
VmaAllocator _allocator
Definition: VKWrapper.h:141
void onRenderThreadLoopEnd() override
Definition: VKWrapper.cpp:614
static void InsertDebugMessage(VkCommandBuffer cmdBuffer, const char *message, U32 id=U32_MAX)
Definition: VKWrapper.cpp:3093
void destroyPipelineCache()
Definition: VKWrapper.cpp:1168
VkCommandPool createCommandPool(uint32_t queueFamilyIndex, VkCommandPoolCreateFlags createFlags=VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT) const
Definition: vkDevice.cpp:223
void submitToQueue(QueueType queue, const VkSubmitInfo &submitInfo, VkFence &fence) const
Definition: vkDevice.cpp:235
VKQueue getQueue(QueueType type) const noexcept
Definition: vkDevice.cpp:152
VkDevice getVKDevice() const noexcept
Definition: vkDevice.cpp:203
vkb::Swapchain _swapChain
Definition: vkSwapChain.h:83
VkImageView getCurrentImageView() const noexcept
VkImage getCurrentImage() const noexcept
vkb::Swapchain & getSwapChain() noexcept
virtual void draw(const GenericDrawCommand &command, VDIUserData *data)=0
static void Destruct(VkSampler &handle)
static VkSampler Construct(const SamplerDescriptor &descriptor)
ShaderResult validatePreBind(const bool rebind) override
const vkShaders & shaderStages() const noexcept
VkShaderStageFlags stageMask() const noexcept
static void TransitionTexture(TransitionType type, const VkImageSubresourceRange &subresourceRange, VkImage image, VkImageMemoryBarrier2 &memBarrier)
Definition: vkTexture.cpp:1011
static void Copy(VkCommandBuffer cmdBuffer, const vkTexture *source, const U8 sourceSamples, const vkTexture *destination, const U8 destinationSamples, CopyTexParams params)
Definition: vkTexture.cpp:969
VkImageView getImageView(const CachedImageView::Descriptor &descriptor) const
Definition: vkTexture.cpp:914
static VkImageAspectFlags GetAspectFlags(const TextureDescriptor &descriptor) noexcept
Definition: vkTexture.cpp:177
static DescriptorAllocatorPool * Create(const VkDevice &device, Divide::I32 nFrames=3)
constexpr U8 MAX_DEBUG_SCOPE_DEPTH
Maximum number of nested debug scopes we support in the renderer.
Definition: config.h:147
constexpr auto ENGINE_VERSION_MINOR
Definition: config.h:202
constexpr unsigned char MINIMUM_VULKAN_MINOR_VERSION
Definition: config.h:93
constexpr bool ENABLE_GPU_VALIDATION
Error callbacks, validations, buffer checks, etc. are controlled by this flag. Heavy performance impa...
Definition: config.h:192
constexpr unsigned char DESIRED_VULKAN_MINOR_VERSION
Definition: config.h:94
constexpr auto ENGINE_VERSION_PATCH
Definition: config.h:203
constexpr U8 MAX_FRAMES_IN_FLIGHT
Maximum number of active frames until we start waiting on a fence/sync.
Definition: config.h:100
constexpr auto ENGINE_VERSION_MAJOR
Definition: config.h:201
constexpr char ENGINE_NAME[]
Definition: config.h:200
void SetObjectName(VkDevice device, uint64_t object, VkObjectType objectType, const char *name)
Definition: vkResources.cpp:35
PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT
Definition: vkResources.cpp:31
PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT
Definition: vkResources.cpp:32
PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT
Definition: vkResources.cpp:30
PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT
Definition: vkResources.cpp:28
PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT
Definition: vkResources.cpp:29
constexpr Optick::Category::Type Graphics
Definition: Profiler.h:60
Str StringFormat(const char *fmt, Args &&...args)
void Hash_combine(size_t &seed, const T &v, const Rest &... rest) noexcept
a la Boost
Definition: MathHelper.inl:799
VkDescriptorType vkDescriptorType(DescriptorSetBindingType type, bool isPushDescriptor) noexcept
void SubmitRenderCommand(const GenericDrawCommand &drawCommand, const VkCommandBuffer commandBuffer, bool indexed)
Note: If internal format is not GL_NONE, an indexed draw is issued!
Definition: vkResources.cpp:67
void OnStartup(VkDevice device)
void PrepareTransferRequest(const VKTransferQueue::TransferRequest &request, bool toWrite, VkBufferMemoryBarrier2 &memBarrierOut)
Definition: VKWrapper.cpp:2010
FORCE_INLINE ResourcePath PipelineCacheLocation()
Definition: VKWrapper.cpp:171
FORCE_INLINE bool IsTriangles(const PrimitiveTopology topology)
Definition: VKWrapper.cpp:176
void BatchTransferQueue(BarrierContainer &barriers, BatchedTransferQueue &transferQueueBatched, VKTransferQueue &transferQueue)
Definition: VKWrapper.cpp:2108
std::array< DynamicEntry, MAX_BINDINGS_PER_DESCRIPTOR_SET > DynamicBufferEntry
Definition: VKWrapper.cpp:241
VkShaderStageFlags GetFlagsForStageVisibility(const BaseType< ShaderStageVisibility > mask) noexcept
Definition: VKWrapper.cpp:185
eastl::fixed_vector< VkBufferMemoryBarrier2, 32, true > BarrierContainer
Definition: VKWrapper.cpp:2007
thread_local std::array< DynamicBufferEntry, to_base(DescriptorSetUsage::COUNT)> s_dynamicBindings
Definition: VKWrapper.cpp:242
void FlushCopyRequests(CopyContainer &copyRequests, VkCommandBuffer cmd)
Definition: VKWrapper.cpp:2056
vector< PerBufferCopies > CopyContainer
Definition: VKWrapper.cpp:2006
void PrepareBufferCopyBarriers(CopyContainer &copyRequests, BatchedTransferQueue &transferQueueBatched)
Definition: VKWrapper.cpp:2072
void FlushTransferQueue(VkCommandBuffer cmdBuffer, VKTransferQueue &transferQueue)
Definition: VKWrapper.cpp:2130
thread_local eastl::fixed_vector< U32, MAX_BINDINGS_PER_DESCRIPTOR_SET *to_base(DescriptorSetUsage::COUNT), false > s_dynamicOffsets
Definition: VKWrapper.cpp:243
void FlushBarriers(BarrierContainer &barriers, BatchedTransferQueue &transferQueueBatched, VkCommandBuffer cmd, bool toWrite)
Definition: VKWrapper.cpp:2036
eastl::fixed_vector< VKTransferQueue::TransferRequest, 64, false > BatchedTransferQueue
Definition: VKWrapper.cpp:2008
VkPipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo(VkPolygonMode polygonMode, VkCullModeFlags cullMode, VkFrontFace frontFace, VkPipelineRasterizationStateCreateFlags flags=0)
VkCommandBufferAllocateInfo commandBufferAllocateInfo(VkCommandPool commandPool, VkCommandBufferLevel level, uint32_t bufferCount)
VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo(const VkDescriptorSetLayoutBinding *pBindings, uint32_t bindingCount)
VkSubmitInfo submitInfo()
VkFenceCreateInfo fenceCreateInfo(VkFenceCreateFlags flags=0)
VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo(VkShaderStageFlagBits stage, VkShaderModule shaderModule, const char *entryPoint="main")
VkWriteDescriptorSet writeDescriptorSet(VkDescriptorType type, uint32_t binding, VkDescriptorBufferInfo *bufferInfo, uint32_t descriptorCount=1)
VkPipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo(const VkDynamicState *pDynamicStates, uint32_t dynamicStateCount, VkPipelineDynamicStateCreateFlags flags=0)
VkPipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo(VkSampleCountFlagBits rasterizationSamples, VkPipelineMultisampleStateCreateFlags flags=0)
VkPipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo(VkBool32 depthTestEnable, VkBool32 depthWriteEnable, VkCompareOp depthCompareOp)
VkComputePipelineCreateInfo computePipelineCreateInfo(VkPipelineLayout layout, VkPipelineCreateFlags flags=0)
VkDependencyInfo dependencyInfo()
VkPipelineTessellationStateCreateInfo pipelineTessellationStateCreateInfo(uint32_t patchControlPoints)
VkDescriptorImageInfo descriptorImageInfo(VkSampler sampler, VkImageView imageView, VkImageLayout imageLayout)
VkCommandBufferBeginInfo commandBufferBeginInfo(VkCommandBufferUsageFlags flags=0)
VkImageMemoryBarrier2 imageMemoryBarrier2()
Initialize an image memory barrier with no image transfer ownership.
VkPipelineColorBlendAttachmentState pipelineColorBlendAttachmentState(VkColorComponentFlags colorWriteMask, VkBool32 blendEnable)
VkPipelineViewportStateCreateInfo pipelineViewportStateCreateInfo(uint32_t viewportCount, uint32_t scissorCount, VkPipelineViewportStateCreateFlags flags=0)
VkGraphicsPipelineCreateInfo pipelineCreateInfo()
VkBufferMemoryBarrier2 bufferMemoryBarrier2()
VkPipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo()
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo(const VkDescriptorSetLayout *pSetLayouts, uint32_t setLayoutCount=1)
VkPipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo(VkPrimitiveTopology topology, VkPipelineInputAssemblyStateCreateFlags flags, VkBool32 primitiveRestartEnable)
VkPipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo(uint32_t attachmentCount, const VkPipelineColorBlendAttachmentState *pAttachments)
Handle console commands that start with a forward slash.
Definition: AIProcessor.cpp:7
std::array< VkBlendFactor, to_base(BlendProperty::COUNT)> vkBlendTable
Definition: vkResources.cpp:13
std::array< DescriptorSetEntry, to_base(DescriptorSetUsage::COUNT)> DescriptorSetEntries
DELEGATE_STD< Ret, Args... > DELEGATE
std::lock_guard< mutex > LockGuard
Definition: SharedMutex.h:55
std::array< VkPolygonMode, to_base(FillMode::COUNT)> vkFillModeTable
Definition: vkResources.cpp:18
bool IS_ZERO(const T X) noexcept
constexpr U32 to_U32(const T value)
void Draw()
TextureType TargetType(const ImageView &imageView) noexcept
static PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT
Definition: VKWrapper.cpp:155
bool isEnabledOption(const GenericDrawCommand &cmd, CmdRenderOptions option) noexcept
std::array< VkBlendOp, to_base(BlendOperation::COUNT)> vkBlendOpTable
Definition: vkResources.cpp:14
constexpr U64 U64_MAX
FileError writeFile(const ResourcePath &filePath, const std::string_view fileName, const char *content, const size_t length, const FileType fileType)
FileError readFile(const ResourcePath &filePath, std::string_view fileName, FileType fileType, std::ifstream &sreamOut)
eastl::fixed_vector< DynamicBinding, MAX_BINDINGS_PER_DESCRIPTOR_SET, false > DynamicBindings
Definition: vkResources.h:145
constexpr RenderTargetID SCREEN_TARGET_ID
static PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT
Definition: VKWrapper.cpp:163
int32_t I32
constexpr U32 VK_VENDOR_ID_QUALCOMM
Definition: VKWrapper.cpp:260
uint8_t U8
static PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT
Definition: VKWrapper.cpp:164
constexpr U8 MAX_BINDINGS_PER_DESCRIPTOR_SET
static PFN_vkGetDescriptorEXT vkGetDescriptorEXT
Definition: VKWrapper.cpp:162
static PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT
Definition: VKWrapper.cpp:161
constexpr U32 VK_VENDOR_ID_INTEL
Definition: VKWrapper.cpp:261
constexpr F32 to_F32(const T value)
std::shared_mutex SharedMutex
Definition: SharedMutex.h:43
size_t GetHash(const PropertyDescriptor< T > &descriptor) noexcept
Definition: Resource.inl:40
std::array< VkCompareOp, to_base(ComparisonFunction::COUNT)> vkCompareFuncTable
Definition: vkResources.cpp:15
static PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT
Definition: VKWrapper.cpp:165
@ COUNT
Place all properties above this.
eastl::vector< Type > vector
Definition: Vector.h:42
bool IsDepthTexture(GFXImagePacking packing) noexcept
std::shared_lock< mutex > SharedLock
Definition: SharedMutex.h:49
static PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT
Definition: VKWrapper.cpp:160
static PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT
Definition: VKWrapper.cpp:156
constexpr U8 INVALID_TEXTURE_BINDING
constexpr U16 U16_MAX
std::underlying_type_t< Type > BaseType
constexpr U32 VK_VENDOR_ID_ARM
Definition: VKWrapper.cpp:259
constexpr RenderTargetID INVALID_RENDER_TARGET_ID
constexpr bool isPowerOfTwo(const T x) noexcept
@ Vulkan
not supported yet
constexpr U8 U8_MAX
static PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT
Definition: VKWrapper.cpp:157
void efficient_clear(eastl::fixed_vector< T, nodeCount, bEnableOverflow, OverflowAllocator > &fixed_vector)
Definition: Vector.h:52
constexpr U8 to_U8(const T value)
bool IsEmpty(const BufferLocks &locks) noexcept
Definition: BufferLocks.cpp:8
constexpr U32 U32_MAX
static const mat4< F32 > MAT4_ZERO
Definition: MathMatrices.h:729
constexpr U32 VK_VENDOR_ID_IMGTECH
Definition: VKWrapper.cpp:257
constexpr size_t to_size(const T value)
std::string VKErrorString(VkResult errorCode)
Definition: vkResources.h:293
std::array< VkPrimitiveTopology, to_base(PrimitiveTopology::COUNT)> vkPrimitiveTypeTable
Definition: vkResources.cpp:21
constexpr U32 INVALID_VK_QUEUE_INDEX
Definition: vkResources.h:82
::value constexpr void CLAMP(T &n, T min, T max) noexcept
Clamps value n between min and max.
Definition: MathHelper.inl:114
bool COMPARE(T X, U Y) noexcept
std::array< VkCullModeFlags, to_base(CullMode::COUNT)> vkCullModeTable
Definition: vkResources.cpp:17
VertexInputDescription getVertexDescription(const AttributeMap &vertexFormat)
constexpr I32 to_I32(const T value)
DescriptorSetUsage
FORCE_INLINE T * Get(const Handle< T > handle)
constexpr U32 VK_VENDOR_ID_NVIDIA
Definition: VKWrapper.cpp:258
uint32_t U32
constexpr U32 VK_VENDOR_ID_AMD
Definition: VKWrapper.cpp:256
Project const SceneEntry & entry
Definition: DefaultScene.h:41
constexpr U8 U8_ZERO
std::array< VkStencilOp, to_base(StencilOperation::COUNT)> vkStencilOpTable
Definition: vkResources.cpp:16
static PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR
Definition: VKWrapper.cpp:158
constexpr auto to_base(const Type value) -> Type
VKAPI_ATTR VkBool32 VKAPI_CALL divide_debug_callback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageType, const VkDebugUtilsMessengerCallbackDataEXT *pCallbackData, void *)
Definition: VKWrapper.cpp:38
BufferUsageType _usageType
Definition: BufferParams.h:41
BufferRange _range
Definition: BufferLocks.h:57
BufferSyncUsage _type
Definition: BufferLocks.h:58
LockableBuffer * _buffer
Definition: BufferLocks.h:60
BufferFlags _flags
Definition: BufferParams.h:48
vkShaderProgram * _program
Definition: vkResources.h:102
VkPipeline _vkPipelineWireframe
Definition: vkResources.h:104
PrimitiveTopology _topology
Definition: vkResources.h:106
VkShaderStageFlags _stageFlags
Definition: vkResources.h:107
VkPipelineLayout _vkPipelineLayout
Definition: vkResources.h:105
VkPipelineBindPoint _bindPoint
Definition: vkResources.h:101
struct Divide::Configuration::Runtime runtime
static NO_INLINE void errorfn(const char *format, T &&... args)
static NO_INLINE void printfn(const char *format, T &&... args)
vke::DescriptorAllocatorHandle _handle
Definition: vkResources.h:184
DescriptorSetBindingType _type
DescriptorCombinedImageSampler _sampledImage
DescriptorSetBindingData _data
RTClearDescriptor _clearDescriptor
Definition: Commands.inl:97
UColour4 _clearColour
r = depth, g = stencil if target is a depth(+stencil) attachment
Definition: Commands.inl:138
FORCE_INLINE T * As()
Definition: Commands.h:60
Handle< Texture > _destination
Definition: Commands.inl:122
RTTransitionMask _transitionMask
Definition: Commands.inl:102
TextureLayoutChanges _textureLayoutChanges
Definition: Commands.inl:191
PixelAlignment _pixelPackAlignment
Definition: Commands.inl:130
Handle< Texture > _texture
Definition: Commands.inl:129
DELEGATE_STD< void, const ImageReadbackData & > _callback
Definition: Commands.inl:132
IndirectIndexedDrawCommand _cmd
ImageSubRange _subRange
const Texture * _srcTexture
bool lockRange(BufferRange range, SyncObjectHandle &sync) const
Definition: BufferLocks.cpp:26
VkPipelineLayout _pipelineLayout
Definition: vkResources.h:122
VkPipelineMultisampleStateCreateInfo _multisampling
Definition: vkResources.h:121
VkPipelineInputAssemblyStateCreateInfo _inputAssembly
Definition: vkResources.h:116
std::vector< VkPipelineShaderStageCreateInfo > _shaderStages
Definition: vkResources.h:114
VkPipeline build_graphics_pipeline(VkDevice device, VkPipelineCache pipelineCache)
Definition: VKWrapper.cpp:305
VkPipelineTessellationStateCreateInfo _tessellation
Definition: vkResources.h:124
eastl::fixed_vector< VkPipelineColorBlendAttachmentState, to_base(RTColourAttachmentSlot::COUNT), false > _colorBlendAttachments
Definition: vkResources.h:120
VkPipelineVertexInputStateCreateInfo _vertexInputInfo
Definition: vkResources.h:115
VkPipelineDepthStencilStateCreateInfo _depthStencil
Definition: vkResources.h:123
VkPipeline build_compute_pipeline(VkDevice device, VkPipelineCache pipelineCache)
Definition: VKWrapper.cpp:288
VkPipelineRasterizationStateCreateInfo _rasterizer
Definition: vkResources.h:119
VkPipeline build_pipeline(VkDevice device, VkPipelineCache pipelineCache, bool graphics)
Definition: VKWrapper.cpp:278
PrimitiveTopology _primitiveTopology
Definition: Pipeline.h:48
AttributeMap _vertexFormat
Definition: Pipeline.h:49
Handle< ShaderProgram > _shaderProgramHandle
Definition: Pipeline.h:47
RTBlendStates _blendStates
Definition: Pipeline.h:45
RenderStateBlock _stateBlock
Definition: Pipeline.h:46
const F32 * dataPtr() const
Definition: PushConstants.h:51
bool set() const noexcept
Definition: PushConstants.h:44
static constexpr size_t Size() noexcept
Definition: PushConstants.h:50
std::array< BlendingSettings, to_base(RTColourAttachmentSlot::COUNT)> _settings
StringReturnType< N > string() const noexcept
Definition: ResourcePath.h:64
static constexpr size_t INVALID_SAMPLER_HASH
BufferRange _range
I32 _queueReadIndex
ShaderBuffer * _buffer
static constexpr size_t INVALID_SYNC_ID
Definition: LockManager.h:62
std::deque< std::pair< QueuedItem, U8 > > _deletionQueue
Definition: vkResources.h:269
void push(QueuedItem &&function)
Definition: VKWrapper.cpp:378
void flush(VkDevice device, bool force=false)
Definition: VKWrapper.cpp:384
static constexpr U8 BUFFER_COUNT
Definition: vkResources.h:149
VKImmediateCmdContext(VKDevice &context, QueueType type)
Definition: VKWrapper.cpp:431
void flushCommandBuffer(FlushCallback &&function, const char *scopeName)
Definition: VKWrapper.cpp:459
std::function< void(VkCommandBuffer cmd, QueueType queue, U32 queueIndex)> FlushCallback
Definition: vkResources.h:151
std::array< VkCommandBuffer, BUFFER_COUNT > _commandBuffers
Definition: vkResources.h:168
std::array< VkFence, BUFFER_COUNT > _bufferFences
Definition: vkResources.h:167
struct Divide::VKPerWindowState::VKDynamicState _activeState
DisplayWindow * _window
Definition: vkResources.h:190
VKSwapChain_uptr _swapChain
Definition: vkResources.h:191
VkQueue _queue
Definition: vkResources.h:93
VkBuffer _drawIndirectBuffer
Definition: vkResources.h:224
CompiledPipeline _pipeline
Definition: vkResources.h:220
DebugScope _lastInsertedDebugMessage
Definition: vkResources.h:234
VKImmediateCmdContext * IMCmdContext(QueueType type) const
Definition: VKWrapper.cpp:532
VkShaderStageFlags _pipelineStageMask
Definition: vkResources.h:227
vec2< U16 > _activeRenderTargetDimensions
Definition: vkResources.h:231
size_t _drawIndirectBufferOffset
Definition: vkResources.h:225
std::array< VKImmediateCmdContext_uptr, to_base(QueueType::COUNT)> _cmdContexts
Definition: vkResources.h:246
RenderTargetID _activeRenderTargetID
Definition: vkResources.h:229
void init(VKDevice *device, VKPerWindowState *mainWindow)
Definition: VKWrapper.cpp:498
std::array< DescriptorAllocator, to_base(DescriptorSetUsage::COUNT)> _descriptorAllocators
Definition: vkResources.h:219
VKPerWindowState * _activeWindow
Definition: vkResources.h:215
DebugScope _debugScope[Config::MAX_DEBUG_SCOPE_DEPTH]
Definition: vkResources.h:233
VkPipelineRenderingCreateInfo _pipelineRenderInfo
Definition: vkResources.h:222
std::deque< TransferRequest > _requests
Definition: vkResources.h:288
std::atomic_bool _dirty
Definition: vkResources.h:289
const BufferParams _params
Definition: vkBufferImpl.h:56
vector< VkVertexInputBindingDescription > bindings
Definition: vkBufferImpl.h:43
vector< VkVertexInputAttributeDescription > attributes
Definition: vkBufferImpl.h:44
Definition: VKWrapper.cpp:236
VkCommandBuffer * _cmdBuffer
Definition: VKWrapper.h:55
#define VK_PROFILE(FUNCTION,...)
Definition: vkResources.h:340
#define VK_CHECK(x)
Definition: vkResources.h:327
#define VK_FLAGS_NONE
Definition: vkResources.h:56