From d649554c205ceb6209fe3a3556dd71b1f5ab0095 Mon Sep 17 00:00:00 2001 From: Qining Date: Wed, 25 Oct 2017 12:10:11 -0400 Subject: [PATCH] Support sparse binding in Vulkan for buffers and opaque images (#1237) * WIP: Support sparse binding * WIP: cache sparse binding info on the tracing side * WIP: recreate buffer sparse binding and opaque image sparse binding * WIP: update the state block in the trace side * Log error when the memories for sparse bindings for opaque images or buffers do not exist during recreating state --- core/cc/interval_list.h | 9 + gapii/cc/vulkan_extras.inl | 57 ++++++- gapii/cc/vulkan_inlines.inl | 96 +++++------ gapii/cc/vulkan_mid_execution.cpp | 43 ++++- gapis/api/vulkan/CMakeFiles.cmake | 2 + gapis/api/vulkan/custom_replay.go | 110 ++++++++++++ gapis/api/vulkan/externs.go | 47 +++++ gapis/api/vulkan/sparse_binding_test.go | 161 ++++++++++++++++++ gapis/api/vulkan/sparse_bindings.go | 90 ++++++++++ .../vulkan/templates/vk_spy_helpers.cpp.tmpl | 42 +++++ gapis/api/vulkan/vulkan.api | 96 +++++++++-- gapis/api/vulkan/vulkan.go | 18 +- 12 files changed, 689 insertions(+), 82 deletions(-) create mode 100644 gapis/api/vulkan/sparse_binding_test.go create mode 100644 gapis/api/vulkan/sparse_bindings.go diff --git a/core/cc/interval_list.h b/core/cc/interval_list.h index 455802140b..c533363bcd 100644 --- a/core/cc/interval_list.h +++ b/core/cc/interval_list.h @@ -118,6 +118,10 @@ class CustomIntervalList { // end() returns the pointer to one-past the last interval in the list. inline const T* end() const; + // operator[] returns the const reference to the element at the specified + // location pos. + inline const T& operator[](size_t pos) const; + protected: // rangeFirst returns the index of the first interval + bias that touches or // exceeds start. @@ -240,6 +244,11 @@ inline const T* CustomIntervalList::end() const { } } +template +inline const T& CustomIntervalList::operator[](size_t pos) const { + return mIntervals[pos]; +} + template inline ssize_t CustomIntervalList::rangeFirst(interval_unit_type start, interval_unit_type bias) const { ssize_t l = 0; diff --git a/gapii/cc/vulkan_extras.inl b/gapii/cc/vulkan_extras.inl index d0602d973c..1379c32fc1 100644 --- a/gapii/cc/vulkan_extras.inl +++ b/gapii/cc/vulkan_extras.inl @@ -142,7 +142,9 @@ void SpyOverride_RecreateImage( VkMemoryRequirements* pMemoryRequirements, uint32_t sparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {} void SpyOverride_RecreateBindImageMemory(VkDevice, VkImage, VkDeviceMemory, - VkDeviceSize offset) {} + VkDeviceSize offset, + uint32_t bindCount, + VkSparseMemoryBind* binds) {} void SpyOverride_RecreateImageData(VkDevice, VkImage, uint32_t /*VkImageLayout*/, uint32_t hostMemoryIndex, VkQueue, @@ -165,7 +167,9 @@ void SpyOverride_RecreateComputePipeline(VkDevice, VkPipelineCache, VkPipeline*) {} void SpyOverride_RecreateBuffer(VkDevice, VkBufferCreateInfo*, VkBuffer*) {} void SpyOverride_RecreateBindBufferMemory(VkDevice, VkBuffer, VkDeviceMemory, - VkDeviceSize offset) {} + VkDeviceSize offset, + uint32_t bindCount, + VkSparseMemoryBind* binds) {} void SpyOverride_RecreateBufferData(VkDevice, VkBuffer, uint32_t hostBufferMemoryIndex, VkQueue, void* data) {} @@ -208,3 +212,52 @@ uint32_t SpyOverride_createImageAndCacheMemoryRequirements( void SpyOverride_cacheImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t count, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + + +class SparseBindingInterval { + public: + SparseBindingInterval(const VkSparseMemoryBind& bind) + : resourceOffset_(bind.mresourceOffset), + size_(bind.msize), + memory_(bind.mmemory), + memoryOffset_(bind.mmemoryOffset), + flags_(bind.mflags) {} + SparseBindingInterval(const SparseBindingInterval&) = default; + SparseBindingInterval(SparseBindingInterval&&) = default; + SparseBindingInterval& operator=(const SparseBindingInterval&) = default; + SparseBindingInterval& operator=(SparseBindingInterval&&) = default; + + VkSparseMemoryBind sparseMemoryBind() const { + return VkSparseMemoryBind(resourceOffset_, size_, memory_, memoryOffset_, + flags_); + } + + using interval_unit_type = VkDeviceSize; + inline VkDeviceSize start() const { return resourceOffset_; } + inline VkDeviceSize end() const { return resourceOffset_ + size_; } + inline void adjust(VkDeviceSize start, VkDeviceSize end) { + VkDeviceSize new_size = end - start; + if (start > resourceOffset_) { + VkDeviceSize x = start - resourceOffset_; + resourceOffset_ += x; + memoryOffset_ += x; + } else { + VkDeviceSize x = resourceOffset_ - start; + resourceOffset_ -= x; + memoryOffset_ -= x; + } + size_ = new_size; + } + + private: + VkDeviceSize resourceOffset_; + VkDeviceSize size_; + VkDeviceMemory memory_; + VkDeviceSize memoryOffset_; + VkSparseMemoryBindFlags flags_; +}; + +using SparseBindingList = core::CustomIntervalList; + +std::unordered_map mBufferSparseBindings; +std::unordered_map mOpaqueImageSparseBindings; diff --git a/gapii/cc/vulkan_inlines.inl b/gapii/cc/vulkan_inlines.inl index 6050d6556c..8eff3dafee 100644 --- a/gapii/cc/vulkan_inlines.inl +++ b/gapii/cc/vulkan_inlines.inl @@ -716,7 +716,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBindPipeline, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBindPipeline, map.size() - 1, 0, 0, nullptr ); } template<> @@ -726,7 +726,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetViewport, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetViewport, map.size() - 1, 0, 0, nullptr ); } template<> @@ -736,7 +736,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetScissor, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetScissor, map.size() - 1, 0, 0, nullptr ); } template<> @@ -746,7 +746,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetLineWidth, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetLineWidth, map.size() - 1, 0, 0, nullptr ); } template<> @@ -756,7 +756,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetDepthBias, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetDepthBias, map.size() - 1, 0, 0, nullptr ); } template<> @@ -766,7 +766,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetBlendConstants, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetBlendConstants, map.size() - 1, 0, 0, nullptr ); } template<> @@ -776,7 +776,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetDepthBounds, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetDepthBounds, map.size() - 1, 0, 0, nullptr ); } template<> @@ -786,7 +786,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetStencilCompareMask, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetStencilCompareMask, map.size() - 1, 0, 0, nullptr ); } template<> @@ -796,7 +796,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetStencilWriteMask, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetStencilWriteMask, map.size() - 1, 0, 0, nullptr ); } template<> @@ -806,7 +806,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetStencilReference, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetStencilReference, map.size() - 1, 0, 0, nullptr ); } template<> @@ -816,7 +816,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBindDescriptorSets, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBindDescriptorSets, map.size() - 1, 0, 0, nullptr ); } template<> @@ -826,7 +826,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBindIndexBuffer, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBindIndexBuffer, map.size() - 1, 0, 0, nullptr ); } template<> @@ -836,7 +836,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBindVertexBuffers, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBindVertexBuffers, map.size() - 1, 0, 0, nullptr ); } template<> @@ -846,7 +846,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDraw, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDraw, map.size() - 1, 0, 0, nullptr ); } template<> @@ -856,7 +856,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDrawIndexed, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDrawIndexed, map.size() - 1, 0, 0, nullptr ); } template<> @@ -866,7 +866,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDrawIndirect, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDrawIndirect, map.size() - 1, 0, 0, nullptr ); } template<> @@ -876,7 +876,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDrawIndexedIndirect, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDrawIndexedIndirect, map.size() - 1, 0, 0, nullptr ); } template<> @@ -886,7 +886,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDispatch, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDispatch, map.size() - 1, 0, 0, nullptr ); } template<> @@ -896,7 +896,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDispatchIndirect, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDispatchIndirect, map.size() - 1, 0, 0, nullptr ); } template<> @@ -906,7 +906,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdCopyBuffer, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdCopyBuffer, map.size() - 1, 0, 0, nullptr ); } template<> @@ -916,7 +916,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdCopyImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdCopyImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -926,7 +926,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBlitImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBlitImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -936,7 +936,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdCopyBufferToImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdCopyBufferToImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -946,7 +946,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdCopyImageToBuffer, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdCopyImageToBuffer, map.size() - 1, 0, 0, nullptr ); } template<> @@ -956,7 +956,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdUpdateBuffer, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdUpdateBuffer, map.size() - 1, 0, 0, nullptr ); } template<> @@ -966,7 +966,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdFillBuffer, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdFillBuffer, map.size() - 1, 0, 0, nullptr ); } template<> @@ -976,7 +976,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdClearColorImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdClearColorImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -986,7 +986,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdClearDepthStencilImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdClearDepthStencilImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -996,7 +996,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdClearAttachments, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdClearAttachments, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1006,7 +1006,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdResolveImage, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdResolveImage, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1016,7 +1016,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdSetEvent, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdSetEvent, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1026,7 +1026,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdResetEvent, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdResetEvent, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1036,7 +1036,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdWaitEvents, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdWaitEvents, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1046,7 +1046,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdPipelineBarrier, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdPipelineBarrier, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1056,7 +1056,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBeginQuery, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBeginQuery, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1066,7 +1066,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdEndQuery, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdEndQuery, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1076,7 +1076,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdResetQueryPool, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdResetQueryPool, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1086,7 +1086,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdWriteTimestamp, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdWriteTimestamp, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1096,7 +1096,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdCopyQueryPoolResults, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdCopyQueryPoolResults, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1106,7 +1106,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdPushConstants, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdPushConstants, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1116,7 +1116,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdBeginRenderPass, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdBeginRenderPass, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1126,7 +1126,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdNextSubpass, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdNextSubpass, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1136,7 +1136,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdEndRenderPass, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdEndRenderPass, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1146,7 +1146,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdExecuteCommands, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdExecuteCommands, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1156,7 +1156,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerBeginEXT, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerBeginEXT, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1166,7 +1166,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerEndEXT, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerEndEXT, map.size() - 1, 0, 0, nullptr ); } template<> @@ -1176,7 +1176,7 @@ inline void AppendCommand(VkCommandBuffer buffer, VulkanSpy* spy, std::shared_pt auto& references = spy->CommandBuffers[buffer]->mCommandReferences; const uint32_t reference_idx = references.size(); references[reference_idx] = CommandReference( - buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerInsertEXT, map.size() - 1, 0, 0 + buffer, reference_idx, CommandType::cmd_vkCmdDebugMarkerInsertEXT, map.size() - 1, 0, 0, nullptr ); } //////////////// Command Buffer Insertion @@ -1435,4 +1435,4 @@ inline bool RecreateCommand(CallObserver* observer, } } -inline void VulkanSpy::notifyPendingCommandAdded(CallObserver*, VkQueue) {} \ No newline at end of file +inline void VulkanSpy::notifyPendingCommandAdded(CallObserver*, VkQueue) {} diff --git a/gapii/cc/vulkan_mid_execution.cpp b/gapii/cc/vulkan_mid_execution.cpp index ad6db0e6a4..490045689c 100644 --- a/gapii/cc/vulkan_mid_execution.cpp +++ b/gapii/cc/vulkan_mid_execution.cpp @@ -708,7 +708,12 @@ void VulkanSpy::EnumerateVulkanResources(CallObserver* observer) { VkBuffer copy_buffer; VkDeviceMemory copy_memory; - if (buffer.second->mMemory) { + bool denseBound = buffer.second->mMemory != nullptr; + bool sparseBound = (mBufferSparseBindings.find(buffer.first) != + mBufferSparseBindings.end()) && + (mBufferSparseBindings[buffer.first].count() > 0); + + if (denseBound || sparseBound) { need_to_clean_up_temps = true; VkPhysicalDeviceMemoryProperties properties; mImports.mVkInstanceFunctions[instance] @@ -836,11 +841,19 @@ void VulkanSpy::EnumerateVulkanResources(CallObserver* observer) { device_functions.vkDestroyCommandPool(device, pool, nullptr); } + uint32_t sparseBindCount = sparseBound ? mBufferSparseBindings[buffer.first].count() : 0; + std::vector sparseBinds; + for (const auto& b : mBufferSparseBindings[buffer.first]) { + sparseBinds.emplace_back(b.sparseMemoryBind()); + } + RecreateBindBufferMemory( observer, buffer.second->mDevice, buffer.second->mVulkanHandle, - buffer.second->mMemory ? buffer.second->mMemory->mVulkanHandle - : VkDeviceMemory(0), - buffer.second->mMemoryOffset); + denseBound ? buffer.second->mMemory->mVulkanHandle + : VkDeviceMemory(0), + buffer.second->mMemoryOffset, + sparseBound ? mBufferSparseBindings[buffer.first].count() : 0, + sparseBound ? sparseBinds.data() : nullptr); RecreateBufferData(observer, buffer.second->mDevice, buffer.second->mVulkanHandle, host_buffer_memory_index, @@ -920,7 +933,13 @@ void VulkanSpy::EnumerateVulkanResources(CallObserver* observer) { uint32_t imageLayout = info.mLayout; - if (image.second->mBoundMemory && + bool denseBound = image.second->mBoundMemory != nullptr; + bool opaqueSparseBound = + (mOpaqueImageSparseBindings.find(image.first) != + mOpaqueImageSparseBindings.end()) && + (mOpaqueImageSparseBindings[image.first].count() > 0); + + if ((denseBound || opaqueSparseBound) && info.mSamples == VkSampleCountFlagBits::VK_SAMPLE_COUNT_1_BIT && // Don't capture images with undefined layout. The resulting data // itself will be undefined. @@ -1113,11 +1132,19 @@ void VulkanSpy::EnumerateVulkanResources(CallObserver* observer) { device_functions.vkDestroyCommandPool(device, pool, nullptr); } + uint32_t opaqueSparseBindCount = opaqueSparseBound ? mOpaqueImageSparseBindings[image.first].count() : 0; + std::vector opaqueSparseBinds; + for (const auto& b : mOpaqueImageSparseBindings[image.first]) { + opaqueSparseBinds.emplace_back(b.sparseMemoryBind()); + } + RecreateBindImageMemory( observer, image.second->mDevice, image.second->mVulkanHandle, - image.second->mBoundMemory ? image.second->mBoundMemory->mVulkanHandle - : VkDeviceMemory(0), - image.second->mBoundMemoryOffset); + denseBound ? image.second->mBoundMemory->mVulkanHandle + : VkDeviceMemory(0), + image.second->mBoundMemoryOffset, + opaqueSparseBound ? opaqueSparseBindCount : 0, + opaqueSparseBound ? opaqueSparseBinds.data() : nullptr); RecreateImageData(observer, image.second->mDevice, image.second->mVulkanHandle, imageLayout, diff --git a/gapis/api/vulkan/CMakeFiles.cmake b/gapis/api/vulkan/CMakeFiles.cmake index 88e4dbd7c1..1a16acb344 100644 --- a/gapis/api/vulkan/CMakeFiles.cmake +++ b/gapis/api/vulkan/CMakeFiles.cmake @@ -38,6 +38,8 @@ set(files resolvables.pb.go resolvables.proto resources.go + sparse_binding_test.go + sparse_bindings.go state.go vulkan.go vulkan_terminator.go diff --git a/gapis/api/vulkan/custom_replay.go b/gapis/api/vulkan/custom_replay.go index aac0caeb94..d74ce555b3 100644 --- a/gapis/api/vulkan/custom_replay.go +++ b/gapis/api/vulkan/custom_replay.go @@ -18,6 +18,7 @@ import ( "context" "strings" + "github.com/google/gapid/core/log" "github.com/google/gapid/gapis/api" "github.com/google/gapid/gapis/memory" "github.com/google/gapid/gapis/replay/builder" @@ -1173,6 +1174,53 @@ func (a *RecreateBindImageMemory) Mutate(ctx context.Context, id api.CmdID, s *a return err } } + if a.OpaqueSparseBindCount > 0 { + cb := CommandBuilder{Thread: a.thread} + for _, bind := range a.POpaqueSparseBinds.Slice(0, uint64(a.OpaqueSparseBindCount), s.MemoryLayout).MustRead(ctx, a, s, nil) { + if !GetState(s).DeviceMemories.Contains(bind.Memory) { + // TODO: Move this message to report view + log.E(ctx, "Sparse memory binding for opaque image: %v, Memory: %v does not exist.", a.Image, bind.Memory) + } + } + opaqueMemBindInfo := VkSparseImageOpaqueMemoryBindInfo{ + Image: a.Image, + BindCount: a.OpaqueSparseBindCount, + PBinds: a.POpaqueSparseBinds, + } + opaqueMemBindInfoData := s.AllocDataOrPanic(ctx, opaqueMemBindInfo) + defer opaqueMemBindInfoData.Free() + queueBindInfo := VkBindSparseInfo{ + SType: VkStructureType_VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + PNext: NewVoidᶜᵖ(memory.Nullptr), + WaitSemaphoreCount: 0, + PWaitSemaphores: NewVkSemaphoreᶜᵖ(memory.Nullptr), + BufferBindCount: 0, + PBufferBinds: NewVkSparseBufferMemoryBindInfoᶜᵖ(memory.Nullptr), + ImageOpaqueBindCount: 1, + PImageOpaqueBinds: NewVkSparseImageOpaqueMemoryBindInfoᶜᵖ(opaqueMemBindInfoData.Ptr()), + ImageBindCount: 0, + PImageBinds: NewVkSparseImageMemoryBindInfoᶜᵖ(memory.Nullptr), + SignalSemaphoreCount: 0, + PSignalSemaphores: NewVkSemaphoreᶜᵖ(memory.Nullptr), + } + queueBindInfoData := s.AllocDataOrPanic(ctx, queueBindInfo) + defer queueBindInfoData.Free() + + queue := findSupportedQueueForDevice(a.Device, s, VkQueueFlags(VkQueueFlagBits_VK_QUEUE_SPARSE_BINDING_BIT)) + err := cb.VkQueueBindSparse( + queue, + 1, + queueBindInfoData.Ptr(), + VkFence(0), + VkResult_VK_SUCCESS, + ).AddRead( + queueBindInfoData.Data(), + ).AddRead( + opaqueMemBindInfoData.Data(), + ).Mutate(ctx, id, s, b) + + return err + } return nil } @@ -1343,6 +1391,53 @@ func (a *RecreateBindBufferMemory) Mutate(ctx context.Context, id api.CmdID, s * return err } } + if a.SparseBindCount > 0 { + cb := CommandBuilder{Thread: a.thread} + for _, bind := range a.PSparseBinds.Slice(0, uint64(a.SparseBindCount), s.MemoryLayout).MustRead(ctx, a, s, nil) { + if !GetState(s).DeviceMemories.Contains(bind.Memory) { + // TODO: Move this message to report view + log.E(ctx, "Sparse memory binding for buffer: %v, Memory: %v does not exist.", a.Buffer, bind.Memory) + } + } + bufMemBindInfo := VkSparseBufferMemoryBindInfo{ + Buffer: a.Buffer, + BindCount: a.SparseBindCount, + PBinds: a.PSparseBinds, + } + bufMemBindInfoData := s.AllocDataOrPanic(ctx, bufMemBindInfo) + defer bufMemBindInfoData.Free() + queueBindInfo := VkBindSparseInfo{ + SType: VkStructureType_VK_STRUCTURE_TYPE_BIND_SPARSE_INFO, + PNext: NewVoidᶜᵖ(memory.Nullptr), + WaitSemaphoreCount: 0, + PWaitSemaphores: NewVkSemaphoreᶜᵖ(memory.Nullptr), + BufferBindCount: 1, + PBufferBinds: NewVkSparseBufferMemoryBindInfoᶜᵖ(bufMemBindInfoData.Ptr()), + ImageOpaqueBindCount: 0, + PImageOpaqueBinds: NewVkSparseImageOpaqueMemoryBindInfoᶜᵖ(memory.Nullptr), + ImageBindCount: 0, + PImageBinds: NewVkSparseImageMemoryBindInfoᶜᵖ(memory.Nullptr), + SignalSemaphoreCount: 0, + PSignalSemaphores: NewVkSemaphoreᶜᵖ(memory.Nullptr), + } + queueBindInfoData := s.AllocDataOrPanic(ctx, queueBindInfo) + defer queueBindInfoData.Free() + + queue := findSupportedQueueForDevice(a.Device, s, VkQueueFlags(VkQueueFlagBits_VK_QUEUE_SPARSE_BINDING_BIT)) + err := cb.VkQueueBindSparse( + queue, + 1, + queueBindInfoData.Ptr(), + VkFence(0), + VkResult_VK_SUCCESS, + ).AddRead( + queueBindInfoData.Data(), + ).AddRead( + bufMemBindInfoData.Data(), + ).Mutate(ctx, id, s, b) + + return err + } return nil } @@ -1453,6 +1548,21 @@ func findGraphicsAndComputeQueueForDevice(device VkDevice, s *api.GlobalState) V return backupQueue } +// Returns a queue capable of the sorts of operations specified in the flags. +// If such a queue cannot be found, returns a VkQueue(0). +func findSupportedQueueForDevice(device VkDevice, s *api.GlobalState, flags VkQueueFlags) VkQueue { + c := GetState(s) + for _, v := range c.Queues { + if v.Device == device { + family := c.PhysicalDevices[c.Devices[device].PhysicalDevice].QueueFamilyProperties[v.Family] + if uint32(family.QueueFlags)&uint32(flags) == uint32(flags) { + return v.VulkanHandle + } + } + } + return VkQueue(0) +} + func (a *RecreateQueryPool) Mutate(ctx context.Context, id api.CmdID, s *api.GlobalState, b *builder.Builder) error { defer EnterRecreate(ctx, s)() l := s.MemoryLayout diff --git a/gapis/api/vulkan/externs.go b/gapis/api/vulkan/externs.go index a671034d40..dc8e787252 100644 --- a/gapis/api/vulkan/externs.go +++ b/gapis/api/vulkan/externs.go @@ -907,6 +907,9 @@ func (e externs) execPendingCommands(queue VkQueue) { if command.SemaphoreUpdate == SemaphoreUpdate_Unsignal { o.Semaphores[command.Semaphore].Signaled = false } + if command.SparseBinds != nil { + bindSparse(e.ctx, e.s, command.SparseBinds) + } if command.Buffer == VkCommandBuffer(0) { continue } @@ -1027,3 +1030,47 @@ func (e externs) popAndPushMarkerForNextSubpass(nextSubpass uint32) { GetState(e.s).pushMarkerGroup(name, true, RenderPassMarker) } } + +func bindSparse(ctx context.Context, s *api.GlobalState, binds *QueuedSparseBinds) { + st := GetState(s) + for buffer, binds := range binds.BufferBinds { + for _, bind := range binds.SparseMemoryBinds { + if _, ok := st.bufferSparseBindings[buffer]; !ok { + st.bufferSparseBindings[buffer] = sparseBindingList{} + } + st.bufferSparseBindings[buffer] = addBinding( + st.bufferSparseBindings[buffer], bind) + } + // update the data for UI + bufObj := st.Buffers.Get(buffer) + for i := 0; i < len(st.bufferSparseBindings[buffer]) || i < len(bufObj.SparseMemoryBindings); i++ { + if i >= len(st.bufferSparseBindings[buffer]) { + delete(bufObj.SparseMemoryBindings, uint32(i)) + } + bufObj.SparseMemoryBindings[uint32(i)] = st.bufferSparseBindings[buffer][i] + } + } + for image, binds := range binds.OpaqueImageBinds { + for _, bind := range binds.SparseMemoryBinds { + if _, ok := st.opaqueImageSparseBindings[image]; !ok { + st.opaqueImageSparseBindings[image] = sparseBindingList{} + } + st.opaqueImageSparseBindings[image] = addBinding( + st.opaqueImageSparseBindings[image], bind) + } + // update the data for UI + imgObj := st.Images.Get(image) + for i := 0; i < len(st.opaqueImageSparseBindings[image]) || i < len(imgObj.OpaqueSparseMemoryBindings); i++ { + if i >= len(st.opaqueImageSparseBindings[image]) { + delete(imgObj.OpaqueSparseMemoryBindings, uint32(i)) + } + imgObj.OpaqueSparseMemoryBindings[uint32(i)] = st.opaqueImageSparseBindings[image][i] + } + } + for image, binds := range binds.ImageBinds { + for _, bind := range binds.SparseImageMemoryBinds { + log.W(ctx, "sparse binding: image: %v, bindinfo: %v", image, bind) + log.W(ctx, "Image sparse residency binding is currently not supported") + } + } +} diff --git a/gapis/api/vulkan/sparse_binding_test.go b/gapis/api/vulkan/sparse_binding_test.go new file mode 100644 index 0000000000..2fda73a22d --- /dev/null +++ b/gapis/api/vulkan/sparse_binding_test.go @@ -0,0 +1,161 @@ +// Copyright (C) 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vulkan + +import ( + "reflect" + "testing" + + "github.com/google/gapid/core/assert" +) + +func TestAddBinding(t *testing.T) { + checkRecordedBindings := func(incoming []VkSparseMemoryBind, expected sparseBindingList) { + l := sparseBindingList{} + for _, i := range incoming { + l = addBinding(l, i) + } + assert.To(t).For("Expected recorded bindings: %v\nActual recorded bindings: %v", expected, l).That( + reflect.DeepEqual(expected, l)).Equals(true) + } + + newBinding := func(offset, size, mem, memoffset uint64) VkSparseMemoryBind { + return VkSparseMemoryBind{ + ResourceOffset: VkDeviceSize(offset), + Size: VkDeviceSize(size), + Memory: VkDeviceMemory(mem), + MemoryOffset: VkDeviceSize(memoffset), + } + } + + // empty + checkRecordedBindings([]VkSparseMemoryBind{}, sparseBindingList{}) + + // empty bindings + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 0, 0, 0), + }, sparseBindingList{ + newBinding(0, 0, 0, 0), + }) + + // no-empty bindings + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 512, 0, 10), + }, sparseBindingList{ + newBinding(0, 512, 0, 10), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(768, 1024*1024, 0xffffffff11223344, 1024*10), + }, sparseBindingList{ + newBinding(768, 1024*1024, 0xffffffff11223344, 1024*10), + }) + + // order + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(1024*5, 1024, 0xf, 100), + newBinding(1024*4, 1024, 0xe, 100), + newBinding(1024*3, 1024, 0xd, 100), + newBinding(1024*2, 1024, 0xc, 100), + newBinding(1024, 1024, 0xb, 100), + newBinding(0, 1024, 0xa, 100), + }, sparseBindingList{ + newBinding(0, 1024, 0xa, 100), + newBinding(1024, 1024, 0xb, 100), + newBinding(1024*2, 1024, 0xc, 100), + newBinding(1024*3, 1024, 0xd, 100), + newBinding(1024*4, 1024, 0xe, 100), + newBinding(1024*5, 1024, 0xf, 100), + }) + + // conflict with existing bindings + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(1024, 1024, 0xa, 100), + newBinding(512, 1024, 0xb, 100), + }, sparseBindingList{ + newBinding(512, 1024, 0xb, 100), + newBinding(1024+512, 512, 0xa, 100+512), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 1024, 0xa, 100), + newBinding(512, 1024, 0xb, 100), + }, sparseBindingList{ + newBinding(0, 512, 0xa, 100), + newBinding(512, 1024, 0xb, 100), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 2048, 0xa, 100), + newBinding(512, 1024, 0xb, 100), + }, sparseBindingList{ + newBinding(0, 512, 0xa, 100), + newBinding(512, 1024, 0xb, 100), + newBinding(1024+512, 512, 0xa, 100+1024+512), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(512, 1024, 0xa, 100), + newBinding(0, 2048, 0xb, 100), + }, sparseBindingList{ + newBinding(0, 2048, 0xb, 100), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 1000, 0xa, 100), + newBinding(100, 1000, 0xb, 100), + newBinding(200, 1000, 0xc, 100), + newBinding(500, 500, 0xd, 100), + newBinding(600, 100, 0xe, 100), + newBinding(300, 700, 0xf, 100), + }, sparseBindingList{ + newBinding(0, 100, 0xa, 100), + newBinding(100, 100, 0xb, 100), + newBinding(200, 100, 0xc, 100), + newBinding(300, 700, 0xf, 100), + newBinding(1000, 200, 0xc, 900), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 1000, 0xa, 100), + newBinding(100, 1000, 0xb, 100), + newBinding(200, 1000, 0xc, 100), + newBinding(500, 500, 0xd, 100), + newBinding(600, 100, 0xe, 100), + newBinding(300, 700, 0xf, 100), + newBinding(500, 100, 0xa, 200), + }, sparseBindingList{ + newBinding(0, 100, 0xa, 100), + newBinding(100, 100, 0xb, 100), + newBinding(200, 100, 0xc, 100), + newBinding(300, 200, 0xf, 100), + newBinding(500, 100, 0xa, 200), + newBinding(600, 400, 0xf, 400), + newBinding(1000, 200, 0xc, 900), + }) + + checkRecordedBindings([]VkSparseMemoryBind{ + newBinding(0, 1000, 0xa, 100), + newBinding(100, 1000, 0xb, 100), + newBinding(200, 1000, 0xc, 100), + newBinding(500, 500, 0xd, 100), + newBinding(600, 100, 0xe, 100), + newBinding(300, 700, 0xf, 100), + newBinding(500, 100, 0xa, 200), + newBinding(0, 2000, 0xb, 500), + }, sparseBindingList{ + newBinding(0, 2000, 0xb, 500), + }) +} diff --git a/gapis/api/vulkan/sparse_bindings.go b/gapis/api/vulkan/sparse_bindings.go new file mode 100644 index 0000000000..400be95d28 --- /dev/null +++ b/gapis/api/vulkan/sparse_bindings.go @@ -0,0 +1,90 @@ +// Copyright (C) 2017 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package vulkan + +import "github.com/google/gapid/core/math/interval" + +type sparseBindingList []VkSparseMemoryBind + +// Implements the interval.List interface. +func (l sparseBindingList) Length() int { + return len(l) +} + +func (l sparseBindingList) GetSpan(index int) interval.U64Span { + return l[index].span() +} + +func (b VkSparseMemoryBind) span() interval.U64Span { + return interval.U64Span{Start: uint64(b.ResourceOffset), End: uint64(b.ResourceOffset) + uint64(b.Size)} +} + +func addBinding(l sparseBindingList, b VkSparseMemoryBind) sparseBindingList { + first, count := interval.Intersect(l, b.span()) + if count == 0 { + // no conflict + i := interval.Search(l, func(span interval.U64Span) bool { + return span.Start >= b.span().End + }) + l = append(l[:i], append(sparseBindingList{b}, l[i:]...)...) + } else { + // has conflits, truncate the existing spans to remove conflict, then add + // the incoming bind again as if there is no conflict. Note that it is + // guaranteed that there is no conflict among the existing spans + i := first + for i < first+count { + sp := l.GetSpan(i) + if sp.Start < b.span().Start && + sp.End <= b.span().End && + sp.End > b.span().Start { + // truncate the tail of sp + overlap := VkDeviceSize(sp.End - b.span().Start) + l[i].Size = l[i].Size - VkDeviceSize(overlap) + i++ + + } else if sp.Start >= b.span().Start && + sp.End > b.span().End && + sp.Start < b.span().End { + // truncate the head of sp + overlap := VkDeviceSize(b.span().End - sp.Start) + l[i].Size = l[i].Size - VkDeviceSize(overlap) + l[i].MemoryOffset += overlap + l[i].ResourceOffset += overlap + i++ + + } else if sp.Start < b.span().Start && + sp.End > b.span().End { + // split sp + newB := l[i] + newB.MemoryOffset += VkDeviceSize(b.span().End - sp.Start) + newB.ResourceOffset += VkDeviceSize(b.span().End - sp.Start) + newB.Size -= VkDeviceSize(b.span().End - sp.Start) + + l[i].Size -= VkDeviceSize(sp.End - b.span().Start) + l = addBinding(l, newB) + // Should not have any other intersects + break + + } else if sp.Start >= b.span().Start && + sp.End <= b.span().End { + // remove sp, no need to i++ + l = append(l[:i], l[i+1:]...) + count-- + } + } + l = addBinding(l, b) + } + return l +} diff --git a/gapis/api/vulkan/templates/vk_spy_helpers.cpp.tmpl b/gapis/api/vulkan/templates/vk_spy_helpers.cpp.tmpl index 1a4fd0b06b..50a3b8c503 100644 --- a/gapis/api/vulkan/templates/vk_spy_helpers.cpp.tmpl +++ b/gapis/api/vulkan/templates/vk_spy_helpers.cpp.tmpl @@ -533,6 +533,48 @@ void VulkanSpy::execPendingCommands(CallObserver* observer, VkQueue queue) { } else if (cmd.mSemaphoreUpdate == SemaphoreUpdate::Unsignal) { Semaphores[cmd.mSemaphore]->mSignaled = false; } + if (cmd.mSparseBinds != nullptr) { + for (const auto& bb : cmd.mSparseBinds->mBufferBinds) { + VkBuffer buf = bb.first; + if (mBufferSparseBindings.find(buf) == mBufferSparseBindings.end()) { + mBufferSparseBindings[buf] = SparseBindingList(); + } + for (const auto& b : bb.second->mSparseMemoryBinds) { + mBufferSparseBindings[buf].replace( + VulkanSpy::SparseBindingInterval(b.second)); + } + for (uint32_t i = 0; i < Buffers[buf]->mSparseMemoryBindings.size() || + i < mBufferSparseBindings[buf].count(); + i++) { + if (i >= mBufferSparseBindings[buf].count()) { + Buffers[buf]->mSparseMemoryBindings.erase(i); + } + Buffers[buf]->mSparseMemoryBindings[i] = + mBufferSparseBindings[buf][i].sparseMemoryBind(); + } + } + for (const auto& ob : cmd.mSparseBinds->mOpaqueImageBinds) { + VkImage img = ob.first; + if (mOpaqueImageSparseBindings.find(img) == + mOpaqueImageSparseBindings.end()) { + mOpaqueImageSparseBindings[img] = SparseBindingList(); + } + for (const auto& b : ob.second->mSparseMemoryBinds) { + mOpaqueImageSparseBindings[img].replace( + VulkanSpy::SparseBindingInterval(b.second)); + } + for (uint32_t i = 0; + i < Images[img]->mOpaqueSparseMemoryBindings.size() || + i < mOpaqueImageSparseBindings[img].count(); + i++) { + if (i >= mOpaqueImageSparseBindings[img].count()) { + Images[img]->mOpaqueSparseMemoryBindings.erase(i); + } + Images[img]->mOpaqueSparseMemoryBindings[i] = + mOpaqueImageSparseBindings[img][i].sparseMemoryBind(); + } + } + } if (cmd.mBuffer == 0) { continue; } diff --git a/gapis/api/vulkan/vulkan.api b/gapis/api/vulkan/vulkan.api index ca70940af4..39605a7c1d 100644 --- a/gapis/api/vulkan/vulkan.api +++ b/gapis/api/vulkan/vulkan.api @@ -1597,7 +1597,7 @@ class VkSparseImageMemoryBind { class VkSparseImageMemoryBindInfo { VkImage image u32 bindCount - const VkSparseMemoryBind* pBinds + const VkSparseImageMemoryBind* pBinds } @serialize @@ -2797,7 +2797,7 @@ cmd VkResult vkQueueSubmit( for j in (0 .. info.waitSemaphoreCount) { LastBoundQueue.PendingCommands[len(LastBoundQueue.PendingCommands)] = CommandReference(as!VkCommandBuffer(0), 0, cmd_vkNoCommand, 0, - Unsignal, wait_semaphores[j]) + Unsignal, wait_semaphores[j], null) } read(info.pWaitDstStageMask[0:info.waitSemaphoreCount]) @@ -2838,7 +2838,7 @@ cmd VkResult vkQueueSubmit( for j in (0 .. info.signalSemaphoreCount) { LastBoundQueue.PendingCommands[len(LastBoundQueue.PendingCommands)] = CommandReference(as!VkCommandBuffer(0), 0, cmd_vkNoCommand, 0, - Signal, signal_semaphores[j]) + Signal, signal_semaphores[j], null) } nextSubcontext() } @@ -3128,7 +3128,12 @@ cmd void RecreateBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, - VkDeviceSize offset) { + VkDeviceSize offset, + u32 sparseBindCount, + const VkSparseMemoryBind* pSparseBinds) { + if sparseBindCount > 0 { + read(pSparseBinds[0:sparseBindCount]) + } } @indirect("VkDevice") @@ -3164,7 +3169,12 @@ cmd void RecreateBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, - VkDeviceSize offset) { + VkDeviceSize offset, + u32 opaqueSparseBindCount, + const VkSparseMemoryBind* pOpaqueSparseBinds) { + if opaqueSparseBindCount > 0 { + read(pOpaqueSparseBinds[0:opaqueSparseBindCount]) + } } @indirect("VkDevice") @@ -3247,30 +3257,67 @@ cmd VkResult vkQueueBindSparse( u32 bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) { + LastBoundQueue = Queues[queue] infos := pBindInfo[0:bindInfoCount] + enterSubcontext() for i in (0 .. bindInfoCount) { - read(infos[i].pWaitSemaphores[0:infos[i].waitSemaphoreCount]) + info := infos[i] + wait_semaphores := info.pWaitSemaphores[0:info.waitSemaphoreCount] + for j in (0 .. info.waitSemaphoreCount) { + LastBoundQueue.PendingCommands[len(LastBoundQueue.PendingCommands)] + = CommandReference(as!VkCommandBuffer(0), 0, cmd_vkNoCommand, 0, + Unsignal, wait_semaphores[j], null) + } + + queuedBinds := new!QueuedSparseBinds() - bufferBinds := infos[i].pBufferBinds[0:infos[i].bufferBindCount] - for j in (0 .. infos[i].bufferBindCount) { + bufferBinds := info.pBufferBinds[0:info.bufferBindCount] + for j in (0 .. info.bufferBindCount) { bufferBindInfo := bufferBinds[j] - read(bufferBindInfo.pBinds[0:bufferBindInfo.bindCount]) + bindsToQueue := new!SparseMemoryBinds() + memoryBinds := bufferBindInfo.pBinds[0:bufferBindInfo.bindCount] + for k in (0 .. bufferBindInfo.bindCount) { + bindsToQueue.SparseMemoryBinds[k] = memoryBinds[k] + } + queuedBinds.BufferBinds[bufferBindInfo.buffer] = bindsToQueue } - opaqueBinds := infos[i].pImageOpaqueBinds[0:infos[i].imageOpaqueBindCount] - for j in (0 .. infos[i].imageOpaqueBindCount) { + opaqueBinds := info.pImageOpaqueBinds[0:info.imageOpaqueBindCount] + for j in (0 .. info.imageOpaqueBindCount) { opaqueBindInfo := opaqueBinds[j] - read(opaqueBindInfo.pBinds[0:opaqueBindInfo.bindCount]) + bindsToQueue := new!SparseMemoryBinds() + memoryBinds := opaqueBindInfo.pBinds[0:opaqueBindInfo.bindCount] + for k in (0 .. opaqueBindInfo.bindCount) { + bindsToQueue.SparseMemoryBinds[k] = memoryBinds[k] + } + queuedBinds.OpaqueImageBinds[opaqueBindInfo.image] = bindsToQueue } - imageBinds := infos[i].pImageBinds[0:infos[i].imageBindCount] - for j in (0 .. infos[i].imageBindCount) { + imageBinds := info.pImageBinds[0:info.imageBindCount] + for j in (0 .. info.imageBindCount) { imageBindInfo := imageBinds[j] - read(imageBindInfo.pBinds[0:imageBindInfo.bindCount]) + bindsToQueue := new!SparseImageMemoryBinds() + imageMemoryBinds := imageBindInfo.pBinds[0:imageBindInfo.bindCount] + for k in (0 .. imageBindInfo.bindCount) { + bindsToQueue.SparseImageMemoryBinds[k] = imageMemoryBinds[k] + } + queuedBinds.ImageBinds[imageBindInfo.image] = bindsToQueue } - read(infos[i].pSignalSemaphores[0:infos[i].signalSemaphoreCount]) + LastBoundQueue.PendingCommands[len(LastBoundQueue.PendingCommands)] + = CommandReference(as!VkCommandBuffer(0), 0, cmd_vkNoCommand, 0, + None, as!VkSemaphore(0), queuedBinds) + + signal_semaphores := info.pSignalSemaphores[0:info.signalSemaphoreCount] + for j in (0 .. info.signalSemaphoreCount) { + LastBoundQueue.PendingCommands[len(LastBoundQueue.PendingCommands)] + = CommandReference(as!VkCommandBuffer(0), 0, cmd_vkNoCommand, 0, + Signal, signal_semaphores[j], null) + } } + leaveSubcontext() + execPendingCommands(queue) + fence return ? } @@ -8946,6 +8993,20 @@ enum SemaphoreUpdate { Signal = 2 } +@internal class SparseMemoryBinds { + @unused map!(u32, VkSparseMemoryBind) SparseMemoryBinds +} + +@internal class SparseImageMemoryBinds { + @unused map!(u32, VkSparseImageMemoryBind) SparseImageMemoryBinds +} + +@internal class QueuedSparseBinds { + @unused map!(VkBuffer, ref!SparseMemoryBinds) BufferBinds + @unused map!(VkImage, ref!SparseMemoryBinds) OpaqueImageBinds + @unused map!(VkImage, ref!SparseImageMemoryBinds) ImageBinds +} + @internal class CommandReference { @unused VkCommandBuffer Buffer @unused u32 CommandIndex @@ -8953,6 +9014,7 @@ enum SemaphoreUpdate { @unused u32 MapIndex @unused SemaphoreUpdate SemaphoreUpdate @unused VkSemaphore Semaphore + @unused ref!QueuedSparseBinds SparseBinds } @internal class BufferCommands { @@ -9047,6 +9109,7 @@ enum SemaphoreUpdate { @unused BufferInfo Info ref!DeviceMemoryObject Memory VkDeviceSize MemoryOffset + map!(u32, VkSparseMemoryBind) SparseMemoryBindings @unused ref!QueueObject LastBoundQueue @unused ref!VulkanDebugMarkerInfo DebugInfo } @@ -9083,6 +9146,7 @@ enum SemaphoreUpdate { @unused ref!QueueObject LastBoundQueue ref!DeviceMemoryObject BoundMemory VkDeviceSize BoundMemoryOffset + map!(u32, VkSparseMemoryBind) OpaqueSparseMemoryBindings @unused bool IsSwapchainImage VkImage VulkanHandle ImageInfo Info diff --git a/gapis/api/vulkan/vulkan.go b/gapis/api/vulkan/vulkan.go index bd41d8b2ad..9a663f890d 100644 --- a/gapis/api/vulkan/vulkan.go +++ b/gapis/api/vulkan/vulkan.go @@ -30,14 +30,16 @@ import ( ) type CustomState struct { - SubCmdIdx api.SubCmdIdx - CurrentSubmission api.Cmd - PreSubcommand func(interface{}) - PostSubcommand func(interface{}) - AddCommand func(interface{}) - IsRebuilding bool - pushMarkerGroup func(name string, next bool, ty MarkerType) - popMarkerGroup func(ty MarkerType) + SubCmdIdx api.SubCmdIdx + CurrentSubmission api.Cmd + PreSubcommand func(interface{}) + PostSubcommand func(interface{}) + AddCommand func(interface{}) + IsRebuilding bool + pushMarkerGroup func(name string, next bool, ty MarkerType) + popMarkerGroup func(ty MarkerType) + bufferSparseBindings map[VkBuffer]sparseBindingList + opaqueImageSparseBindings map[VkImage]sparseBindingList } func getStateObject(s *api.GlobalState) *State {