35class VulkanDescriptorSet;
39uint64_t clamp_ubo_binding_size(
const uint64_t&
offset,
40 const uint64_t& bufferSize,
41 const uint64_t& maxSize) {
43 auto remainSize = bufferSize -
offset;
44 return remainSize > maxSize ? maxSize : remainSize;
54 if (sharedContext->
isProtected() == Protected::kYes) {
68 CreateCommandPool(sharedContext->
device(), &cmdPoolInfo,
nullptr, &
pool));
81 VkCommandBuffer primaryCmdBuffer;
85 AllocateCommandBuffers(sharedContext->
device(), &cmdInfo, &primaryCmdBuffer));
88 DestroyCommandPool(sharedContext->
device(),
pool,
nullptr));
98VulkanCommandBuffer::VulkanCommandBuffer(VkCommandPool
pool,
99 VkCommandBuffer primaryCommandBuffer,
103 , fPrimaryCommandBuffer(primaryCommandBuffer)
104 , fSharedContext(sharedContext)
105 , fResourceProvider(resourceProvider) {
119 DestroyFence(fSharedContext->
device(), fSubmitFence,
nullptr));
123 DestroyCommandPool(fSharedContext->
device(), fPool,
nullptr));
129 fActiveGraphicsPipeline =
nullptr;
130 fBindUniformBuffers =
true;
132 fBoundIndexBufferOffset = 0;
134 fBoundIndirectBufferOffset = 0;
136 fNumTextureSamplers = 0;
137 fUniformBuffersToBind.fill({
nullptr, 0});
138 for (
int i = 0; i < 4; ++i) {
139 fCachedBlendConstant[i] = -1.0;
141 for (
auto& boundInputBuffer : fBoundInputBuffers) {
144 for (
auto& boundInputOffset : fBoundInputBufferOffsets) {
145 boundInputOffset = 0;
154void VulkanCommandBuffer::begin() {
159 cmdBufferBeginInfo.
pNext =
nullptr;
164 BeginCommandBuffer(fPrimaryCommandBuffer, &cmdBufferBeginInfo));
168void VulkanCommandBuffer::end() {
172 this->submitPipelineBarriers();
181 if (!waitSemaphores) {
186 for (
size_t i = 0; i < numWaitSemaphores; ++i) {
187 auto& semaphore = waitSemaphores[i];
189 fWaitSemaphores.
push_back(semaphore.getVkSemaphore());
196 if (!signalSemaphores) {
201 for (
size_t i = 0; i < numSignalSemaphores; ++i) {
202 auto& semaphore = signalSemaphores[i];
204 fSignalSemaphores.
push_back(semaphore.getVkSemaphore());
219 newLayout =
texture->currentLayout();
224 uint32_t currentQueueFamilyIndex =
texture->currentQueueFamilyIndex();
226 auto isSpecialQueue = [](uint32_t queueFamilyIndex) {
230 if (isSpecialQueue(currentQueueFamilyIndex) && isSpecialQueue(newQueueFamilyIndex)) {
236 texture->setImageLayoutAndQueueIndex(
this,
241 newQueueFamilyIndex);
248 const VkSemaphore* waitSemaphores,
250 uint32_t commandBufferCount,
251 const VkCommandBuffer* commandBuffers,
252 uint32_t signalCount,
253 const VkSemaphore* signalSemaphores,
256 if (protectedContext == Protected::kYes) {
259 protectedSubmitInfo.
pNext =
nullptr;
266 submitInfo.
pNext = protectedContext == Protected::kYes ? &protectedSubmitInfo :
nullptr;
290 fSharedContext, err, CreateFence(
device, &fenceInfo,
nullptr, &fSubmitFence));
302 int waitCount = fWaitSemaphores.
size();
304 for (
int i = 0; i < waitCount; ++i) {
313 fWaitSemaphores.
data(),
316 &fPrimaryCommandBuffer,
317 fSignalSemaphores.
size(),
318 fSignalSemaphores.
data(),
320 fWaitSemaphores.
clear();
321 fSignalSemaphores.
clear();
339 GetFenceStatus(fSharedContext->
device(), fSubmitFence));
349 SKGPU_LOG_F(
"Error calling vkGetFenceStatus. Error: %d", err);
350 SK_ABORT(
"Got an invalid fence status");
360 WaitForFences(fSharedContext->
device(),
367void VulkanCommandBuffer::updateRtAdjustUniform(
const SkRect& viewport) {
368 SkASSERT(fActive && !fActiveRenderPass);
375 float invTwoW = 2.f / viewport.
width();
376 float invTwoH = 2.f / viewport.
height();
377 const float rtAdjust[4] = {invTwoW, invTwoH, -1.f -
x * invTwoW, -1.f -
y * invTwoH};
380 const VulkanBuffer* intrinsicVulkanBuffer =
381 static_cast<VulkanBuffer*
>(intrinsicUniformBuffer.
get());
385 {intrinsicUniformBuffer.
get(), 0};
387 this->updateBuffer(intrinsicVulkanBuffer,
400 const Texture* depthStencilTexture,
403 for (
const auto& drawPass : drawPasses) {
408 drawPass->sampledTextures();
412 textureProxy->texture()));
418 this->submitPipelineBarriers();
422 this->updateRtAdjustUniform(viewport);
423 this->setViewport(viewport);
425 if (!this->beginRenderPass(renderPassDesc, colorTexture, resolveTexture, depthStencilTexture)) {
429 for (
const auto& drawPass : drawPasses) {
430 this->addDrawPass(drawPass.get());
433 this->endRenderPass();
437bool VulkanCommandBuffer::updateLoadMSAAVertexBuffer() {
442 const VulkanBuffer* vulkanVertexBuffer =
static_cast<const VulkanBuffer*
>(vertexBuffer);
448 static constexpr float kVertices[8] = { 1.f, 1.f,
452 this->updateBuffer(vulkanVertexBuffer,
463bool VulkanCommandBuffer::updateAndBindLoadMSAAInputAttachment(
const VulkanTexture& resolveTexture)
479 textureInfo.
imageLayout = resolveTexture.currentLayout();
484 writeInfo.
pNext =
nullptr;
485 writeInfo.
dstSet = *
set->descriptorSet();
495 UpdateDescriptorSets(fSharedContext->
device(),
502 CmdBindDescriptorSets(fPrimaryCommandBuffer,
504 fActiveGraphicsPipeline->
layout(),
507 set->descriptorSet(),
515bool VulkanCommandBuffer::loadMSAAFromResolve(
const RenderPassDesc& renderPassDesc,
516 VulkanTexture& resolveTexture,
519 fResourceProvider->findOrCreateLoadMSAAPipeline(renderPassDesc);
521 SKGPU_LOG_E(
"Unable to create pipeline to load resolve texture into MSAA attachment");
525 this->bindGraphicsPipeline(loadPipeline.
get());
528 fBindUniformBuffers =
false;
529 fBindTextureSamplers =
false;
531 this->setScissor(0, 0, dstDimensions.
width(), dstDimensions.
height());
533 if (!this->updateAndBindLoadMSAAInputAttachment(resolveTexture)) {
534 SKGPU_LOG_E(
"Unable to update and bind an input attachment descriptor for loading MSAA "
557 fBindUniformBuffers =
true;
558 fBindTextureSamplers =
true;
563void setup_texture_layouts(VulkanCommandBuffer* cmdBuf,
564 VulkanTexture* colorTexture,
565 VulkanTexture* resolveTexture,
566 VulkanTexture* depthStencilTexture,
567 bool loadMSAAFromResolve) {
569 colorTexture->setImageLayout(cmdBuf,
575 if (resolveTexture) {
576 if (loadMSAAFromResolve) {
581 resolveTexture->setImageLayout(cmdBuf,
589 resolveTexture->setImageLayout(cmdBuf,
598 if (depthStencilTexture) {
599 depthStencilTexture->setImageLayout(cmdBuf,
608void track_attachments(VulkanCommandBuffer* cmdBuf,
609 VulkanTexture* colorTexture,
610 VulkanTexture* resolveTexture,
611 VulkanTexture* depthStencilTexture) {
613 cmdBuf->trackResource(
sk_ref_sp(colorTexture));
616 cmdBuf->trackResource(
sk_ref_sp(resolveTexture));
618 if (depthStencilTexture) {
619 cmdBuf->trackResource(
sk_ref_sp(depthStencilTexture));
624 VulkanTexture* colorTexture,
625 VulkanTexture* resolveTexture,
626 VulkanTexture* depthStencilTexture) {
628 VkImageView& colorAttachmentView = attachmentViews.
push_back();
629 colorAttachmentView =
632 if (resolveTexture) {
633 VkImageView& resolveView = attachmentViews.
push_back();
639 if (depthStencilTexture) {
640 VkImageView& stencilView = attachmentViews.
push_back();
646void gather_clear_values(
648 const RenderPassDesc& renderPassDesc,
649 VulkanTexture* colorTexture,
650 VulkanTexture* depthStencilTexture,
651 int depthStencilAttachmentIdx) {
657 colorAttachmentClear.
color = {{renderPassDesc.fClearColor[0],
658 renderPassDesc.fClearColor[1],
659 renderPassDesc.fClearColor[2],
660 renderPassDesc.fClearColor[3]}};
663 if (depthStencilTexture) {
664 VkClearValue& depthStencilAttachmentClear = clearValues.
at(depthStencilAttachmentIdx);
665 memset(&depthStencilAttachmentClear, 0,
sizeof(
VkClearValue));
666 depthStencilAttachmentClear.
depthStencil = {renderPassDesc.fClearDepth,
667 renderPassDesc.fClearStencil};
673bool VulkanCommandBuffer::beginRenderPass(
const RenderPassDesc& renderPassDesc,
674 const Texture* colorTexture,
675 const Texture* resolveTexture,
676 const Texture* depthStencilTexture) {
678 VulkanTexture* vulkanColorTexture =
679 const_cast<VulkanTexture*
>(
static_cast<const VulkanTexture*
>(colorTexture));
680 VulkanTexture* vulkanResolveTexture =
681 const_cast<VulkanTexture*
>(
static_cast<const VulkanTexture*
>(resolveTexture));
682 VulkanTexture* vulkanDepthStencilTexture =
683 const_cast<VulkanTexture*
>(
static_cast<const VulkanTexture*
>(depthStencilTexture));
690 bool loadMSAAFromResolve = renderPassDesc.fColorResolveAttachment.fTextureInfo.isValid() &&
691 renderPassDesc.fColorResolveAttachment.fLoadOp ==
LoadOp::kLoad;
692 if (loadMSAAFromResolve && (!vulkanResolveTexture || !vulkanColorTexture ||
693 !vulkanResolveTexture->supportsInputAttachmentUsage())) {
694 SKGPU_LOG_E(
"Cannot begin render pass. In order to load MSAA from resolve, the color "
695 "attachment must have input attachment usage and both the color and resolve "
696 "attachments must be valid.");
700 track_attachments(
this, vulkanColorTexture, vulkanResolveTexture, vulkanDepthStencilTexture);
703 setup_texture_layouts(
this,
705 vulkanResolveTexture,
706 vulkanDepthStencilTexture,
707 loadMSAAFromResolve);
709 static constexpr int kMaxNumAttachments = 3;
712 gather_attachment_views(
713 attachmentViews, vulkanColorTexture, vulkanResolveTexture, vulkanDepthStencilTexture);
719 int depthStencilAttachmentIndex = resolveTexture ? 2 : 1;
720 gather_clear_values(clearValues,
723 vulkanDepthStencilTexture,
724 depthStencilAttachmentIndex);
727 fResourceProvider->findOrCreateRenderPass(renderPassDesc,
false);
728 if (!vulkanRenderPass) {
732 this->submitPipelineBarriers();
735 int frameBufferWidth = 0;
736 int frameBufferHeight = 0;
740 frameBufferWidth = colorTexture->dimensions().width();
741 frameBufferHeight = colorTexture->dimensions().height();
742 }
else if (depthStencilTexture) {
743 frameBufferWidth = depthStencilTexture->dimensions().width();
744 frameBufferHeight = depthStencilTexture->dimensions().height();
752 SKGPU_LOG_W(
"Could not create Vulkan Framebuffer");
759 beginInfo.
pNext =
nullptr;
760 beginInfo.
renderPass = vulkanRenderPass->renderPass();
761 beginInfo.
framebuffer = framebuffer->framebuffer();
763 { (
unsigned int) frameBufferWidth, (
unsigned int) frameBufferHeight }};
770 if (loadMSAAFromResolve) {
777 if (!this->updateLoadMSAAVertexBuffer()) {
778 SKGPU_LOG_E(
"Failed to update vertex buffer for loading MSAA from resolve");
785 this->submitPipelineBarriers();
788 CmdBeginRenderPass(fPrimaryCommandBuffer,
791 fActiveRenderPass =
true;
793 if (loadMSAAFromResolve && !this->loadMSAAFromResolve(renderPassDesc,
794 *vulkanResolveTexture,
795 vulkanColorTexture->dimensions())) {
797 this->endRenderPass();
806void VulkanCommandBuffer::endRenderPass() {
809 fActiveRenderPass =
false;
812void VulkanCommandBuffer::addDrawPass(
const DrawPass* drawPass) {
813 drawPass->addResourceRefs(
this);
816 case DrawPassCommands::Type::kBindGraphicsPipeline: {
817 auto bgp =
static_cast<DrawPassCommands::BindGraphicsPipeline*
>(cmdPtr);
818 this->bindGraphicsPipeline(drawPass->getPipeline(bgp->fPipelineIndex));
821 case DrawPassCommands::Type::kSetBlendConstants: {
822 auto sbc =
static_cast<DrawPassCommands::SetBlendConstants*
>(cmdPtr);
823 this->setBlendConstants(sbc->fBlendConstants);
826 case DrawPassCommands::Type::kBindUniformBuffer: {
827 auto bub =
static_cast<DrawPassCommands::BindUniformBuffer*
>(cmdPtr);
828 this->recordBufferBindingInfo(bub->fInfo, bub->fSlot);
831 case DrawPassCommands::Type::kBindDrawBuffers: {
832 auto bdb =
static_cast<DrawPassCommands::BindDrawBuffers*
>(cmdPtr);
833 this->bindDrawBuffers(
834 bdb->fVertices, bdb->fInstances, bdb->fIndices, bdb->fIndirect);
837 case DrawPassCommands::Type::kBindTexturesAndSamplers: {
838 auto bts =
static_cast<DrawPassCommands::BindTexturesAndSamplers*
>(cmdPtr);
839 this->recordTextureAndSamplerDescSet(*drawPass, *bts);
842 case DrawPassCommands::Type::kSetScissor: {
843 auto ss =
static_cast<DrawPassCommands::SetScissor*
>(cmdPtr);
848 case DrawPassCommands::Type::kDraw: {
849 auto draw =
static_cast<DrawPassCommands::Draw*
>(cmdPtr);
850 this->draw(draw->fType, draw->fBaseVertex, draw->fVertexCount);
853 case DrawPassCommands::Type::kDrawIndexed: {
854 auto draw =
static_cast<DrawPassCommands::DrawIndexed*
>(cmdPtr);
856 draw->fType, draw->fBaseIndex, draw->fIndexCount, draw->fBaseVertex);
859 case DrawPassCommands::Type::kDrawInstanced: {
860 auto draw =
static_cast<DrawPassCommands::DrawInstanced*
>(cmdPtr);
861 this->drawInstanced(draw->fType,
865 draw->fInstanceCount);
868 case DrawPassCommands::Type::kDrawIndexedInstanced: {
869 auto draw =
static_cast<DrawPassCommands::DrawIndexedInstanced*
>(cmdPtr);
870 this->drawIndexedInstanced(draw->fType,
875 draw->fInstanceCount);
878 case DrawPassCommands::Type::kDrawIndirect: {
879 auto draw =
static_cast<DrawPassCommands::DrawIndirect*
>(cmdPtr);
880 this->drawIndirect(draw->fType);
883 case DrawPassCommands::Type::kDrawIndexedIndirect: {
884 auto draw =
static_cast<DrawPassCommands::DrawIndexedIndirect*
>(cmdPtr);
885 this->drawIndexedIndirect(draw->fType);
892void VulkanCommandBuffer::bindGraphicsPipeline(
const GraphicsPipeline* graphicsPipeline) {
893 fActiveGraphicsPipeline =
static_cast<const VulkanGraphicsPipeline*
>(graphicsPipeline);
897 fActiveGraphicsPipeline->
pipeline()));
901 fBindUniformBuffers =
true;
904void VulkanCommandBuffer::setBlendConstants(
float* blendConstants) {
906 if (0 != memcmp(blendConstants, fCachedBlendConstant, 4 *
sizeof(
float))) {
908 CmdSetBlendConstants(fPrimaryCommandBuffer, blendConstants));
909 memcpy(fCachedBlendConstant, blendConstants, 4 *
sizeof(
float));
913void VulkanCommandBuffer::recordBufferBindingInfo(
const BindBufferInfo&
info,
UniformSlot slot) {
914 unsigned int bufferIndex = 0;
926 fUniformBuffersToBind[bufferIndex] =
info;
927 fBindUniformBuffers =
true;
930void VulkanCommandBuffer::syncDescriptorSets() {
931 if (fBindUniformBuffers) {
932 this->bindUniformBuffers();
936 fBindTextureSamplers =
true;
938 if (fBindTextureSamplers) {
939 this->bindTextureSamplers();
943void VulkanCommandBuffer::bindUniformBuffers() {
944 fBindUniformBuffers =
false;
962 SKGPU_LOG_E(
"Unable to find or create descriptor set");
965 static uint64_t maxUniformBufferRange =
static_cast<const VulkanSharedContext*
>(
966 fSharedContext)->vulkanCaps().maxUniformBufferRange();
968 for (
int i = 0; i < descriptors.
size(); i++) {
969 int descriptorBindingIndex = descriptors.
at(i).bindingIndex;
970 SkASSERT(
static_cast<unsigned long>(descriptorBindingIndex)
971 < fUniformBuffersToBind.size());
972 if (fUniformBuffersToBind[descriptorBindingIndex].fBuffer) {
975 auto vulkanBuffer =
static_cast<const VulkanBuffer*
>(
976 fUniformBuffersToBind[descriptorBindingIndex].fBuffer);
977 bufferInfo.
buffer = vulkanBuffer->vkBuffer();
978 bufferInfo.
offset = fUniformBuffersToBind[descriptorBindingIndex].fOffset;
979 bufferInfo.
range = clamp_ubo_binding_size(bufferInfo.
offset, vulkanBuffer->size(),
980 maxUniformBufferRange);
985 writeInfo.
pNext =
nullptr;
986 writeInfo.
dstSet = *
set->descriptorSet();
987 writeInfo.
dstBinding = descriptorBindingIndex;
1001 UpdateDescriptorSets(fSharedContext->
device(),
1009 CmdBindDescriptorSets(fPrimaryCommandBuffer,
1011 fActiveGraphicsPipeline->
layout(),
1014 set->descriptorSet(),
1020void VulkanCommandBuffer::bindDrawBuffers(
const BindBufferInfo& vertices,
1021 const BindBufferInfo& instances,
1022 const BindBufferInfo& indices,
1023 const BindBufferInfo& indirect) {
1024 this->bindVertexBuffers(vertices.fBuffer,
1028 this->bindIndexBuffer(indices.fBuffer, indices.fOffset);
1029 this->bindIndirectBuffer(indirect.fBuffer, indirect.fOffset);
1032void VulkanCommandBuffer::bindVertexBuffers(
const Buffer* vertexBuffer,
1033 size_t vertexOffset,
1034 const Buffer* instanceBuffer,
1035 size_t instanceOffset) {
1036 this->bindInputBuffer(vertexBuffer, vertexOffset,
1038 this->bindInputBuffer(instanceBuffer, instanceOffset,
1045 VkBuffer vkBuffer =
static_cast<const VulkanBuffer*
>(
buffer)->vkBuffer();
1047 if (vkBuffer != fBoundInputBuffers[binding] ||
1048 offset != fBoundInputBufferOffsets[binding]) {
1050 CmdBindVertexBuffers(fPrimaryCommandBuffer,
1055 fBoundInputBuffers[binding] = vkBuffer;
1056 fBoundInputBufferOffsets[binding] =
offset;
1062void VulkanCommandBuffer::bindIndexBuffer(
const Buffer* indexBuffer,
size_t offset) {
1064 VkBuffer vkBuffer =
static_cast<const VulkanBuffer*
>(indexBuffer)->vkBuffer();
1066 if (vkBuffer != fBoundIndexBuffer ||
offset != fBoundIndexBufferOffset) {
1071 fBoundIndexBuffer = vkBuffer;
1072 fBoundIndexBufferOffset =
offset;
1077 fBoundIndexBufferOffset = 0;
1081void VulkanCommandBuffer::bindIndirectBuffer(
const Buffer* indirectBuffer,
size_t offset) {
1083 if (indirectBuffer) {
1084 fBoundIndirectBuffer =
static_cast<const VulkanBuffer*
>(indirectBuffer)->vkBuffer();
1085 fBoundIndirectBufferOffset =
offset;
1089 fBoundIndirectBufferOffset = 0;
1093void VulkanCommandBuffer::recordTextureAndSamplerDescSet(
1094 const DrawPass& drawPass,
const DrawPassCommands::BindTexturesAndSamplers& command) {
1095 if (
command.fNumTexSamplers == 0) {
1096 fNumTextureSamplers = 0;
1098 fBindTextureSamplers =
false;
1103 for (
int i = 0; i <
command.fNumTexSamplers; i++) {
1113 SKGPU_LOG_E(
"Unable to find or create descriptor set");
1114 fNumTextureSamplers = 0;
1116 fBindTextureSamplers =
false;
1122 for (
int i = 0; i <
command.fNumTexSamplers; ++i) {
1123 auto texture =
const_cast<VulkanTexture*
>(
static_cast<const VulkanTexture*
>(
1124 drawPass.getTexture(
command.fTextureIndices[i])));
1125 auto sampler =
static_cast<const VulkanSampler*
>(
1126 drawPass.getSampler(
command.fSamplerIndices[i]));
1130 SKGPU_LOG_E(
"Texture and sampler must not be null");
1131 fNumTextureSamplers = 0;
1133 fBindTextureSamplers =
false;
1139 textureInfo.
sampler = sampler->vkSampler();
1147 writeInfo.
pNext =
nullptr;
1148 writeInfo.
dstSet = *
set->descriptorSet();
1159 UpdateDescriptorSets(fSharedContext->
device(),
1161 &writeDescriptorSets[0],
1168 fTextureSamplerDescSetToBind = *
set->descriptorSet();
1169 fBindTextureSamplers =
true;
1170 fNumTextureSamplers =
command.fNumTexSamplers;
1174void VulkanCommandBuffer::bindTextureSamplers() {
1175 fBindTextureSamplers =
false;
1179 CmdBindDescriptorSets(fPrimaryCommandBuffer,
1181 fActiveGraphicsPipeline->
layout(),
1184 &fTextureSamplerDescSetToBind,
1190void VulkanCommandBuffer::setScissor(
unsigned int left,
unsigned int top,
unsigned int width,
1193 {(int32_t)
left, (int32_t)top},
1197 CmdSetScissor(fPrimaryCommandBuffer,
1204 unsigned int baseVertex,
1205 unsigned int vertexCount) {
1207 this->syncDescriptorSets();
1210 CmdDraw(fPrimaryCommandBuffer,
1218 unsigned int baseIndex,
1219 unsigned int indexCount,
1220 unsigned int baseVertex) {
1222 this->syncDescriptorSets();
1225 CmdDrawIndexed(fPrimaryCommandBuffer,
1234 unsigned int baseVertex,
1235 unsigned int vertexCount,
1236 unsigned int baseInstance,
1237 unsigned int instanceCount) {
1239 this->syncDescriptorSets();
1242 CmdDraw(fPrimaryCommandBuffer,
1249void VulkanCommandBuffer::drawIndexedInstanced(
PrimitiveType,
1250 unsigned int baseIndex,
1251 unsigned int indexCount,
1252 unsigned int baseVertex,
1253 unsigned int baseInstance,
1254 unsigned int instanceCount) {
1256 this->syncDescriptorSets();
1259 CmdDrawIndexed(fPrimaryCommandBuffer,
1269 this->syncDescriptorSets();
1274 CmdDrawIndirect(fPrimaryCommandBuffer,
1275 fBoundIndirectBuffer,
1276 fBoundIndirectBufferOffset,
1281void VulkanCommandBuffer::drawIndexedIndirect(
PrimitiveType) {
1283 this->syncDescriptorSets();
1288 CmdDrawIndexedIndirect(fPrimaryCommandBuffer,
1289 fBoundIndirectBuffer,
1290 fBoundIndirectBufferOffset,
1302 auto vkSrcBuffer =
static_cast<const VulkanBuffer*
>(srcBuffer);
1303 auto vkDstBuffer =
static_cast<const VulkanBuffer*
>(dstBuffer);
1310 region.srcOffset = srcOffset;
1311 region.dstOffset = dstOffset;
1314 this->submitPipelineBarriers();
1317 CmdCopyBuffer(fPrimaryCommandBuffer,
1318 vkSrcBuffer->vkBuffer(),
1319 vkDstBuffer->vkBuffer(),
1329 size_t bufferOffset,
1330 size_t bufferRowBytes) {
1337 texture->textureInfo().getVulkanTextureInfo(&srcTextureInfo);
1343 region.bufferOffset = bufferOffset;
1345 region.bufferRowLength = (uint32_t)(bufferRowBytes/bytesPerBlock);
1346 region.bufferImageHeight = 0;
1348 region.imageOffset = { srcRect.
left(), srcRect.
top(), 0 };
1349 region.imageExtent = { (uint32_t)srcRect.
width(), (uint32_t)srcRect.
height(), 1 };
1352 const_cast<VulkanTexture*
>(srcTexture)->setImageLayout(
this,
1358 const_cast<VulkanBuffer*
>(dstBuffer)->setBufferAccess(
this,
1362 this->submitPipelineBarriers();
1365 CmdCopyImageToBuffer(fPrimaryCommandBuffer,
1368 dstBuffer->vkBuffer(),
1384 dstTexture->
textureInfo().getVulkanTextureInfo(&dstTextureInfo);
1391 for (
int i = 0; i <
count; ++i) {
1397 region.bufferRowLength =
1398 (uint32_t)((copyData[i].fBufferRowBytes/bytesPerBlock) * oneBlockDims.
fWidth);
1399 region.bufferImageHeight = 0;
1401 region.imageOffset = { copyData[i].
fRect.
left(),
1404 region.imageExtent = { (uint32_t)copyData[i].
fRect.
width(),
1410 const_cast<VulkanTexture*
>(dstTexture)->setImageLayout(
this,
1416 this->submitPipelineBarriers();
1419 CmdCopyBufferToImage(fPrimaryCommandBuffer,
1420 srcBuffer->vkBuffer(),
1442 copyRegion.
extent = { (uint32_t)srcRect.
width(), (uint32_t)srcRect.
height(), 1 };
1445 const_cast<VulkanTexture*
>(srcTexture)->setImageLayout(
this,
1451 const_cast<VulkanTexture*
>(dstTexture)->setImageLayout(
this,
1457 this->submitPipelineBarriers();
1460 CmdCopyImage(fPrimaryCommandBuffer,
1476 *outDidResultInWork =
true;
1489 this->pipelineBarrier(resource,
1493 kBufferMemory_BarrierType,
1504 this->pipelineBarrier(
nullptr,
1508 kBufferMemory_BarrierType,
1518 this->pipelineBarrier(resource,
1522 kImageMemory_BarrierType,
1526void VulkanCommandBuffer::pipelineBarrier(
const Resource* resource,
1530 BarrierType barrierType,
1540 bool isValidSubpassBarrier =
false;
1541 if (barrierType == kImageMemory_BarrierType) {
1548 SkASSERT(!fActiveRenderPass || isValidSubpassBarrier);
1551 if (barrierType == kBufferMemory_BarrierType) {
1555 SkASSERT(barrierType == kImageMemory_BarrierType);
1563 for (
int i = 0; i < fImageBarriers.
size(); ++i) {
1565 if (barrierPtr->
image == currentBarrier.
image) {
1575 if (std::max(newStart, oldStart) <= std::min(newEnd, oldEnd)) {
1576 this->submitPipelineBarriers();
1583 fBarriersByRegion |= byRegion;
1584 fSrcStageMask = fSrcStageMask | srcStageMask;
1585 fDstStageMask = fDstStageMask | dstStageMask;
1590 if (fActiveRenderPass) {
1591 this->submitPipelineBarriers(
true);
1595void VulkanCommandBuffer::submitPipelineBarriers(
bool forSelfDependency) {
1601 if (fBufferBarriers.
size() || fImageBarriers.
size()) {
1606 SkASSERT(!fActiveRenderPass || forSelfDependency);
1609 SkASSERT(fSrcStageMask && fDstStageMask);
1613 CmdPipelineBarrier(fPrimaryCommandBuffer, fSrcStageMask, fDstStageMask,
1616 fBufferBarriers.
size(), fBufferBarriers.
begin(),
1617 fImageBarriers.
size(), fImageBarriers.
begin()));
1618 fBufferBarriers.
clear();
1619 fImageBarriers.
clear();
1620 fBarriersByRegion =
false;
1631void VulkanCommandBuffer::updateBuffer(
const VulkanBuffer*
buffer,
1636 SkASSERT(fActive && !fActiveRenderPass);
1638 SKGPU_LOG_W(
"VulkanCommandBuffer::updateBuffer requires a valid VulkanBuffer pointer backed"
1639 "by a valid VkBuffer handle");
1647 this->submitPipelineBarriers();
1656void VulkanCommandBuffer::nextSubpass() {
1662void VulkanCommandBuffer::setViewport(
const SkRect& viewport) {
1672 CmdSetViewport(fPrimaryCommandBuffer,
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
#define SKGPU_LOG_E(fmt,...)
#define SKGPU_LOG_W(fmt,...)
#define SKGPU_LOG_F(fmt,...)
#define SK_ABORT(message,...)
static bool left(const SkPoint &p0, const SkPoint &p1)
sk_sp< T > sk_ref_sp(T *obj)
#define VULKAN_CALL(IFACE, X)
#define VULKAN_CALL_ERRCHECK(SHARED_CONTEXT, X)
#define VULKAN_CALL_RESULT(SHARED_CONTEXT, RESULT, X)
#define VULKAN_CALL_RESULT_NOCHECK(IFACE, RESULT, X)
Type::kYUV Type::kRGBA() int(0.7 *637)
SkIVector fReplayTranslation
void trackResource(sk_sp< Resource > resource)
Protected isProtected() const
SkTextureCompressionType compressionType() const
const Texture * texture() const
const TextureInfo & textureInfo() const
bool setNewCommandBufferResources() override
void prepareSurfaceForStateUpdate(SkSurface *targetSurface, const MutableTextureState *newState) override
bool onCopyTextureToBuffer(const Texture *, SkIRect srcRect, const Buffer *, size_t bufferOffset, size_t bufferRowBytes) override
bool onSynchronizeBufferToCpu(const Buffer *, bool *outDidResultInWork) override
void addBufferMemoryBarrier(const Resource *resource, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkBufferMemoryBarrier *barrier)
bool onClearBuffer(const Buffer *, size_t offset, size_t size) override
~VulkanCommandBuffer() override
bool onAddComputePass(DispatchGroupSpan) override
bool onAddRenderPass(const RenderPassDesc &, const Texture *colorTexture, const Texture *resolveTexture, const Texture *depthStencilTexture, SkRect viewport, const DrawPassList &) override
void addSignalSemaphores(size_t numWaitSemaphores, const BackendSemaphore *signalSemaphores) override
bool onCopyBufferToTexture(const Buffer *, const Texture *, const BufferTextureCopyData *copyData, int count) override
bool onCopyBufferToBuffer(const Buffer *srcBuffer, size_t srcOffset, const Buffer *dstBuffer, size_t dstOffset, size_t size) override
bool onCopyTextureToTexture(const Texture *src, SkIRect srcRect, const Texture *dst, SkIPoint dstPoint, int mipLevel) override
void addImageMemoryBarrier(const Resource *, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, bool byRegion, VkImageMemoryBarrier *barrier)
void addWaitSemaphores(size_t numWaitSemaphores, const BackendSemaphore *waitSemaphores) override
void onResetCommandBuffer() override
static std::unique_ptr< VulkanCommandBuffer > Make(const VulkanSharedContext *, VulkanResourceProvider *)
static constexpr unsigned int kPaintUniformBufferIndex
static constexpr unsigned int kInputAttachmentBindingIndex
static const DescriptorData kRenderStepUniformDescriptor
bool hasFragmentUniforms() const
static constexpr unsigned int kInstanceBufferIndex
static const DescriptorData kInputAttachmentDescriptor
static const DescriptorData kIntrinsicUniformBufferDescriptor
static constexpr unsigned int kInputAttachmentDescSetIndex
bool hasStepUniforms() const
static constexpr unsigned int kVertexBufferIndex
static constexpr unsigned int kIntrinsicUniformBufferIndex
int numTextureSamplers() const
static constexpr unsigned int kRenderStepUniformBufferIndex
static const DescriptorData kPaintUniformDescriptor
VkPipeline pipeline() const
static constexpr unsigned int kTextureBindDescSetIndex
static constexpr unsigned int kUniformBufferDescSetIndex
VkPipelineLayout layout() const
static constexpr int kColorAttachmentIdx
static constexpr int kMaxExpectedAttachmentCount
const Buffer * loadMSAAVertexBuffer() const
static constexpr size_t kLoadMSAAVertexBufferSize
static constexpr size_t kIntrinsicConstantSize
sk_sp< Buffer > refIntrinsicConstantBuffer() const
const skgpu::VulkanInterface * interface() const
uint32_t queueIndex() const
void setImageLayout(VulkanCommandBuffer *buffer, VkImageLayout newLayout, VkAccessFlags dstAccessMask, VkPipelineStageFlags dstStageMask, bool byRegion) const
static VkAccessFlags LayoutToSrcAccessMask(const VkImageLayout layout)
static VkPipelineStageFlags LayoutToPipelineSrcStageFlags(const VkImageLayout layout)
static const uint8_t buffer[]
sk_sp< SkBlender > blender SkRect rect
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not set
SK_API uint32_t GetVkQueueFamilyIndex(const MutableTextureState &state)
SK_API VkImageLayout GetVkImageLayout(const MutableTextureState &state)
static bool submit_to_queue(const VulkanSharedContext *sharedContext, VkQueue queue, VkFence fence, uint32_t waitCount, const VkSemaphore *waitSemaphores, const VkPipelineStageFlags *waitStages, uint32_t commandBufferCount, const VkCommandBuffer *commandBuffers, uint32_t signalCount, const VkSemaphore *signalSemaphores, Protected protectedContext)
@ kCombinedTextureSampler
VkDescriptorType DsTypeEnumToVkDs(DescriptorType type)
static constexpr size_t VkFormatBytesPerBlock(VkFormat vkFormat)
SkISize CompressedDimensions(SkTextureCompressionType type, SkISize baseDimensions)
constexpr int32_t y() const
constexpr int32_t x() const
constexpr int32_t top() const
constexpr int32_t height() const
int32_t fTop
smaller y-axis bounds
constexpr int32_t width() const
int32_t fLeft
smaller x-axis bounds
constexpr int32_t left() const
constexpr int32_t width() const
constexpr int32_t height() const
SkScalar fLeft
smaller x-axis bounds
constexpr float x() const
constexpr float y() const
constexpr float height() const
constexpr float width() const
SkScalar fTop
smaller y-axis bounds
const VkCommandBufferInheritanceInfo * pInheritanceInfo
VkCommandBufferUsageFlags flags
VkImageLayout imageLayout
VkImageSubresourceLayers srcSubresource
VkImageSubresourceLayers dstSubresource
uint32_t dstQueueFamilyIndex
VkImageSubresourceRange subresourceRange
uint32_t srcQueueFamilyIndex
VkImageAspectFlags aspectMask
const VkClearValue * pClearValues
VkFramebuffer framebuffer
uint32_t waitSemaphoreCount
const VkPipelineStageFlags * pWaitDstStageMask
uint32_t commandBufferCount
const VkSemaphore * pWaitSemaphores
uint32_t signalSemaphoreCount
const VkCommandBuffer * pCommandBuffers
const VkSemaphore * pSignalSemaphores
const VkBufferView * pTexelBufferView
const VkDescriptorImageInfo * pImageInfo
const VkDescriptorBufferInfo * pBufferInfo
VkDescriptorType descriptorType
VkClearDepthStencilValue depthStencil
VkFlags VkPipelineStageFlags
@ VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
@ VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
@ VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
@ VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
@ VK_IMAGE_LAYOUT_UNDEFINED
@ VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
@ VK_COMMAND_BUFFER_LEVEL_PRIMARY
@ VK_DEPENDENCY_BY_REGION_BIT
VkFlags VkDependencyFlags
VkFlags VkCommandPoolCreateFlags
@ VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT
@ VK_PIPELINE_BIND_POINT_GRAPHICS
@ VK_IMAGE_ASPECT_COLOR_BIT
#define VK_QUEUE_FAMILY_FOREIGN_EXT
@ VK_BUFFER_USAGE_TRANSFER_DST_BIT
@ VK_BUFFER_USAGE_VERTEX_BUFFER_BIT
@ VK_BUFFER_USAGE_TRANSFER_SRC_BIT
@ VK_ACCESS_HOST_READ_BIT
@ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
@ VK_ACCESS_TRANSFER_WRITE_BIT
@ VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT
@ VK_ACCESS_INPUT_ATTACHMENT_READ_BIT
@ VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
@ VK_ACCESS_TRANSFER_READ_BIT
@ VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
@ VK_ACCESS_SHADER_READ_BIT
@ VK_ACCESS_UNIFORM_READ_BIT
@ VK_ACCESS_COLOR_ATTACHMENT_READ_BIT
@ VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER
#define VK_QUEUE_FAMILY_EXTERNAL
@ VK_SUBPASS_CONTENTS_INLINE
@ VK_COMMAND_POOL_CREATE_TRANSIENT_BIT
@ VK_COMMAND_POOL_CREATE_PROTECTED_BIT
@ VK_PIPELINE_STAGE_VERTEX_INPUT_BIT
@ VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
@ VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT
@ VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
@ VK_PIPELINE_STAGE_HOST_BIT
@ VK_PIPELINE_STAGE_TRANSFER_BIT
@ VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
#define VK_QUEUE_FAMILY_IGNORED
@ VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO
@ VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET
@ VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO
@ VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO
@ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO
@ VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO
@ VK_STRUCTURE_TYPE_SUBMIT_INFO
@ VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO