Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
VulkanCommandBuffer.h
Go to the documentation of this file.
1/*
2 * Copyright 2022 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef skgpu_graphite_VulkanCommandBuffer_DEFINED
9#define skgpu_graphite_VulkanCommandBuffer_DEFINED
10
12
16
17namespace skgpu::graphite {
18
19class VulkanBuffer;
20class VulkanResourceProvider;
21class VulkanSharedContext;
22class VulkanTexture;
23class Buffer;
24
25class VulkanCommandBuffer final : public CommandBuffer {
26public:
27 static std::unique_ptr<VulkanCommandBuffer> Make(const VulkanSharedContext*,
29 ~VulkanCommandBuffer() override;
30
31 bool setNewCommandBufferResources() override;
32
33 bool submit(VkQueue);
34
35 bool isFinished();
36
37 void waitUntilFinished();
38
39 void addBufferMemoryBarrier(const Resource* resource,
40 VkPipelineStageFlags srcStageMask,
41 VkPipelineStageFlags dstStageMask,
42 VkBufferMemoryBarrier* barrier);
44 VkPipelineStageFlags dstStageMask,
45 VkBufferMemoryBarrier* barrier);
47 VkPipelineStageFlags srcStageMask,
48 VkPipelineStageFlags dstStageMask,
49 bool byRegion,
50 VkImageMemoryBarrier* barrier);
51
52private:
53 VulkanCommandBuffer(VkCommandPool pool,
54 VkCommandBuffer primaryCommandBuffer,
55 const VulkanSharedContext* sharedContext,
56 VulkanResourceProvider* resourceProvider);
57
58 void onResetCommandBuffer() override;
59
60 void begin();
61 void end();
62
63 void addWaitSemaphores(size_t numWaitSemaphores,
64 const BackendSemaphore* waitSemaphores) override;
65 void addSignalSemaphores(size_t numWaitSemaphores,
66 const BackendSemaphore* signalSemaphores) override;
67 void prepareSurfaceForStateUpdate(SkSurface* targetSurface,
68 const MutableTextureState* newState) override;
69
71 const Texture* colorTexture,
72 const Texture* resolveTexture,
73 const Texture* depthStencilTexture,
74 SkRect viewport,
75 const DrawPassList&) override;
76
77 bool beginRenderPass(const RenderPassDesc&,
78 const Texture* colorTexture,
79 const Texture* resolveTexture,
80 const Texture* depthStencilTexture);
81 void endRenderPass();
82
83 void addDrawPass(const DrawPass*);
84
85 // Track descriptor changes for binding prior to draw calls
86 void recordBufferBindingInfo(const BindBufferInfo& info, UniformSlot);
87 void recordTextureAndSamplerDescSet(
88 const DrawPass&, const DrawPassCommands::BindTexturesAndSamplers&);
89
90 void bindTextureSamplers();
91 void bindUniformBuffers();
92 void syncDescriptorSets();
93
94 void bindGraphicsPipeline(const GraphicsPipeline*);
95 void setBlendConstants(float* blendConstants);
96 void bindDrawBuffers(const BindBufferInfo& vertices,
97 const BindBufferInfo& instances,
98 const BindBufferInfo& indices,
99 const BindBufferInfo& indirect);
100 void bindVertexBuffers(const Buffer* vertexBuffer, size_t vertexOffset,
101 const Buffer* instanceBuffer, size_t instanceOffset);
102 void bindInputBuffer(const Buffer* buffer, VkDeviceSize offset, uint32_t binding);
103 void bindIndexBuffer(const Buffer* indexBuffer, size_t offset);
104 void bindIndirectBuffer(const Buffer* indirectBuffer, size_t offset);
105 void setScissor(unsigned int left, unsigned int top,
106 unsigned int width, unsigned int height);
107
108 void draw(PrimitiveType type, unsigned int baseVertex, unsigned int vertexCount);
109 void drawIndexed(PrimitiveType type, unsigned int baseIndex, unsigned int indexCount,
110 unsigned int baseVertex);
111 void drawInstanced(PrimitiveType type,
112 unsigned int baseVertex, unsigned int vertexCount,
113 unsigned int baseInstance, unsigned int instanceCount);
114 void drawIndexedInstanced(PrimitiveType type, unsigned int baseIndex,
115 unsigned int indexCount, unsigned int baseVertex,
116 unsigned int baseInstance, unsigned int instanceCount);
117 void drawIndirect(PrimitiveType type);
118 void drawIndexedIndirect(PrimitiveType type);
119
120 // TODO: The virtuals in this class have not yet been implemented as we still haven't
121 // implemented the objects they use.
122 bool onAddComputePass(DispatchGroupSpan) override;
123
124 bool onCopyBufferToBuffer(const Buffer* srcBuffer,
125 size_t srcOffset,
126 const Buffer* dstBuffer,
127 size_t dstOffset,
128 size_t size) override;
129 bool onCopyTextureToBuffer(const Texture*,
130 SkIRect srcRect,
131 const Buffer*,
132 size_t bufferOffset,
133 size_t bufferRowBytes) override;
134 bool onCopyBufferToTexture(const Buffer*,
135 const Texture*,
136 const BufferTextureCopyData* copyData,
137 int count) override;
138 bool onCopyTextureToTexture(const Texture* src,
139 SkIRect srcRect,
140 const Texture* dst,
141 SkIPoint dstPoint,
142 int mipLevel) override;
143
144 bool onSynchronizeBufferToCpu(const Buffer*, bool* outDidResultInWork) override;
145 bool onClearBuffer(const Buffer*, size_t offset, size_t size) override;
146
147 enum BarrierType {
148 kBufferMemory_BarrierType,
149 kImageMemory_BarrierType
150 };
151 void pipelineBarrier(const Resource* resource,
152 VkPipelineStageFlags srcStageMask,
153 VkPipelineStageFlags dstStageMask,
154 bool byRegion,
155 BarrierType barrierType,
156 void* barrier);
157 void submitPipelineBarriers(bool forSelfDependency = false);
158
159 // Update the intrinsic constant uniform with the latest rtAdjust value as determined by a
160 // given viewport. The resource provider is responsible for finding a suitable buffer and
161 // managing its lifetime.
162 void updateRtAdjustUniform(const SkRect& viewport);
163
164 bool updateLoadMSAAVertexBuffer();
165 bool loadMSAAFromResolve(const RenderPassDesc&,
166 VulkanTexture& resolveTexture,
167 SkISize dstDimensions);
168 bool updateAndBindLoadMSAAInputAttachment(const VulkanTexture& resolveTexture);
169 void updateBuffer(const VulkanBuffer* buffer,
170 const void* data,
171 size_t dataSize,
172 size_t dstOffset = 0);
173 void nextSubpass();
174 void setViewport(const SkRect& viewport);
175
176 VkCommandPool fPool;
177 VkCommandBuffer fPrimaryCommandBuffer;
178 const VulkanSharedContext* fSharedContext;
179 VulkanResourceProvider* fResourceProvider;
180
181 // begin() has been called, but not end()
182 bool fActive = false;
183 // Track whether there is currently an active render pass (beginRenderPass has been called, but
184 // not endRenderPass)
185 bool fActiveRenderPass = false;
186
187 const VulkanGraphicsPipeline* fActiveGraphicsPipeline = nullptr;
188
189 VkFence fSubmitFence = VK_NULL_HANDLE;
190
191 // Current semaphores
194
195 // Tracking of memory barriers so that we can submit them all in a batch together.
198 bool fBarriersByRegion = false;
199 VkPipelineStageFlags fSrcStageMask = 0;
200 VkPipelineStageFlags fDstStageMask = 0;
201
202 // Track whether certain descriptor sets need to be bound
203 bool fBindUniformBuffers = false;
204 bool fBindTextureSamplers = false;
205
206 std::array<BindBufferInfo, VulkanGraphicsPipeline::kNumUniformBuffers> fUniformBuffersToBind
207 = {{{nullptr, 0}}};
208 VkDescriptorSet fTextureSamplerDescSetToBind = VK_NULL_HANDLE;
209
210 int fNumTextureSamplers = 0;
211
212 VkBuffer fBoundInputBuffers[VulkanGraphicsPipeline::kNumInputBuffers];
213 size_t fBoundInputBufferOffsets[VulkanGraphicsPipeline::kNumInputBuffers];
214
215 VkBuffer fBoundIndexBuffer = VK_NULL_HANDLE;
216 VkBuffer fBoundIndirectBuffer = VK_NULL_HANDLE;
217 size_t fBoundIndexBufferOffset = 0;
218 size_t fBoundIndirectBufferOffset = 0;
219
220 float fCachedBlendConstant[4];
221};
222
223} // namespace skgpu::graphite
224
225#endif // skgpu_graphite_VulkanCommandBuffer_DEFINED
226
AutoreleasePool pool
static void info(const char *fmt,...) SK_PRINTF_LIKE(1
Definition DM.cpp:213
int count
static bool left(const SkPoint &p0, const SkPoint &p1)
void prepareSurfaceForStateUpdate(SkSurface *targetSurface, const MutableTextureState *newState) override
bool onCopyTextureToBuffer(const Texture *, SkIRect srcRect, const Buffer *, size_t bufferOffset, size_t bufferRowBytes) override
bool onSynchronizeBufferToCpu(const Buffer *, bool *outDidResultInWork) override
void addBufferMemoryBarrier(const Resource *resource, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkBufferMemoryBarrier *barrier)
bool onClearBuffer(const Buffer *, size_t offset, size_t size) override
bool onAddComputePass(DispatchGroupSpan) override
bool onAddRenderPass(const RenderPassDesc &, const Texture *colorTexture, const Texture *resolveTexture, const Texture *depthStencilTexture, SkRect viewport, const DrawPassList &) override
void addSignalSemaphores(size_t numWaitSemaphores, const BackendSemaphore *signalSemaphores) override
bool onCopyBufferToTexture(const Buffer *, const Texture *, const BufferTextureCopyData *copyData, int count) override
bool onCopyBufferToBuffer(const Buffer *srcBuffer, size_t srcOffset, const Buffer *dstBuffer, size_t dstOffset, size_t size) override
bool onCopyTextureToTexture(const Texture *src, SkIRect srcRect, const Texture *dst, SkIPoint dstPoint, int mipLevel) override
void addImageMemoryBarrier(const Resource *, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, bool byRegion, VkImageMemoryBarrier *barrier)
void addWaitSemaphores(size_t numWaitSemaphores, const BackendSemaphore *waitSemaphores) override
static std::unique_ptr< VulkanCommandBuffer > Make(const VulkanSharedContext *, VulkanResourceProvider *)
static constexpr unsigned int kNumInputBuffers
static const uint8_t buffer[]
int32_t height
int32_t width
Point offset
VkFlags VkPipelineStageFlags
uint64_t VkDeviceSize
Definition vulkan_core.h:96
#define VK_NULL_HANDLE
Definition vulkan_core.h:46