Flutter Engine
The Flutter Engine
render_pass_vk.cc
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <array>
8#include <cstdint>
9#include <vector>
10
11#include "fml/status.h"
27#include "vulkan/vulkan_handles.hpp"
28
29namespace impeller {
30
31// Warning: if any of the constant values or layouts are changed in the
32// framebuffer fetch shader, then this input binding may need to be
33// manually changed.
34//
35// See: impeller/entity/shaders/blending/framebuffer_blend.frag
36static constexpr size_t kMagicSubpassInputBinding = 64u;
37
38static vk::ClearColorValue VKClearValueFromColor(Color color) {
39 vk::ClearColorValue value;
40 value.setFloat32(
41 std::array<float, 4>{color.red, color.green, color.blue, color.alpha});
42 return value;
43}
44
45static vk::ClearDepthStencilValue VKClearValueFromDepthStencil(uint32_t stencil,
46 Scalar depth) {
47 vk::ClearDepthStencilValue value;
48 value.depth = depth;
49 value.stencil = stencil;
50 return value;
51}
52
53static std::vector<vk::ClearValue> GetVKClearValues(
54 const RenderTarget& target) {
55 std::vector<vk::ClearValue> clears;
56
57 for (const auto& [_, color] : target.GetColorAttachments()) {
58 clears.emplace_back(VKClearValueFromColor(color.clear_color));
59 if (color.resolve_texture) {
60 clears.emplace_back(VKClearValueFromColor(color.clear_color));
61 }
62 }
63
64 const auto& depth = target.GetDepthAttachment();
65 const auto& stencil = target.GetStencilAttachment();
66
67 if (depth.has_value()) {
68 clears.emplace_back(VKClearValueFromDepthStencil(
69 stencil ? stencil->clear_stencil : 0u, depth->clear_depth));
70 } else if (stencil.has_value()) {
71 clears.emplace_back(VKClearValueFromDepthStencil(
72 stencil->clear_stencil, depth ? depth->clear_depth : 0.0f));
73 }
74
75 return clears;
76}
77
78SharedHandleVK<vk::RenderPass> RenderPassVK::CreateVKRenderPass(
79 const ContextVK& context,
80 const SharedHandleVK<vk::RenderPass>& recycled_renderpass,
81 const std::shared_ptr<CommandBufferVK>& command_buffer) const {
82 BarrierVK barrier;
83 barrier.new_layout = vk::ImageLayout::eGeneral;
84 barrier.cmd_buffer = command_buffer->GetEncoder()->GetCommandBuffer();
85 barrier.src_access = vk::AccessFlagBits::eShaderRead;
86 barrier.src_stage = vk::PipelineStageFlagBits::eFragmentShader;
87 barrier.dst_access = vk::AccessFlagBits::eColorAttachmentWrite |
88 vk::AccessFlagBits::eTransferWrite;
89 barrier.dst_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput |
90 vk::PipelineStageFlagBits::eTransfer;
91
92 RenderPassBuilderVK builder;
93
94 for (const auto& [bind_point, color] : render_target_.GetColorAttachments()) {
95 builder.SetColorAttachment(
96 bind_point, //
97 color.texture->GetTextureDescriptor().format, //
98 color.texture->GetTextureDescriptor().sample_count, //
99 color.load_action, //
100 color.store_action //
101 );
102 TextureVK::Cast(*color.texture).SetLayout(barrier);
103 if (color.resolve_texture) {
104 TextureVK::Cast(*color.resolve_texture).SetLayout(barrier);
105 }
106 }
107
108 if (auto depth = render_target_.GetDepthAttachment(); depth.has_value()) {
109 builder.SetDepthStencilAttachment(
110 depth->texture->GetTextureDescriptor().format, //
111 depth->texture->GetTextureDescriptor().sample_count, //
112 depth->load_action, //
113 depth->store_action //
114 );
115 } else if (auto stencil = render_target_.GetStencilAttachment();
116 stencil.has_value()) {
117 builder.SetStencilAttachment(
118 stencil->texture->GetTextureDescriptor().format, //
119 stencil->texture->GetTextureDescriptor().sample_count, //
120 stencil->load_action, //
121 stencil->store_action //
122 );
123 }
124
125 if (recycled_renderpass != nullptr) {
126 return recycled_renderpass;
127 }
128
129 auto pass = builder.Build(context.GetDevice());
130
131 if (!pass) {
132 VALIDATION_LOG << "Failed to create render pass for framebuffer.";
133 return {};
134 }
135
136 context.SetDebugName(pass.get(), debug_label_.c_str());
137
138 return MakeSharedVK(std::move(pass));
139}
140
141RenderPassVK::RenderPassVK(const std::shared_ptr<const Context>& context,
142 const RenderTarget& target,
143 std::shared_ptr<CommandBufferVK> command_buffer)
144 : RenderPass(context, target), command_buffer_(std::move(command_buffer)) {
145 color_image_vk_ =
146 render_target_.GetColorAttachments().find(0u)->second.texture;
147 resolve_image_vk_ =
148 render_target_.GetColorAttachments().find(0u)->second.resolve_texture;
149
150 const auto& vk_context = ContextVK::Cast(*context);
151 const std::shared_ptr<CommandEncoderVK>& encoder =
152 command_buffer_->GetEncoder();
153 command_buffer_vk_ = encoder->GetCommandBuffer();
154 render_target_.IterateAllAttachments(
155 [&encoder](const auto& attachment) -> bool {
156 encoder->Track(attachment.texture);
157 encoder->Track(attachment.resolve_texture);
158 return true;
159 });
160
161 SharedHandleVK<vk::RenderPass> recycled_render_pass;
162 SharedHandleVK<vk::Framebuffer> recycled_framebuffer;
163 if (resolve_image_vk_) {
164 recycled_render_pass =
165 TextureVK::Cast(*resolve_image_vk_).GetCachedRenderPass();
166 recycled_framebuffer =
167 TextureVK::Cast(*resolve_image_vk_).GetCachedFramebuffer();
168 }
169
170 const auto& target_size = render_target_.GetRenderTargetSize();
171
172 render_pass_ =
173 CreateVKRenderPass(vk_context, recycled_render_pass, command_buffer_);
174 if (!render_pass_) {
175 VALIDATION_LOG << "Could not create renderpass.";
176 is_valid_ = false;
177 return;
178 }
179
180 auto framebuffer = (recycled_framebuffer == nullptr)
181 ? CreateVKFramebuffer(vk_context, *render_pass_)
182 : recycled_framebuffer;
183 if (!framebuffer) {
184 VALIDATION_LOG << "Could not create framebuffer.";
185 is_valid_ = false;
186 return;
187 }
188
189 if (!encoder->Track(framebuffer) || !encoder->Track(render_pass_)) {
190 is_valid_ = false;
191 return;
192 }
193 if (resolve_image_vk_) {
194 TextureVK::Cast(*resolve_image_vk_).SetCachedFramebuffer(framebuffer);
195 TextureVK::Cast(*resolve_image_vk_).SetCachedRenderPass(render_pass_);
196 }
197
198 auto clear_values = GetVKClearValues(render_target_);
199
200 vk::RenderPassBeginInfo pass_info;
201 pass_info.renderPass = *render_pass_;
202 pass_info.framebuffer = *framebuffer;
203 pass_info.renderArea.extent.width = static_cast<uint32_t>(target_size.width);
204 pass_info.renderArea.extent.height =
205 static_cast<uint32_t>(target_size.height);
206 pass_info.setClearValues(clear_values);
207
208 command_buffer_vk_.beginRenderPass(pass_info, vk::SubpassContents::eInline);
209
210 // Set the initial viewport.
211 const auto vp = Viewport{.rect = Rect::MakeSize(target_size)};
212 vk::Viewport viewport = vk::Viewport()
213 .setWidth(vp.rect.GetWidth())
214 .setHeight(-vp.rect.GetHeight())
215 .setY(vp.rect.GetHeight())
216 .setMinDepth(0.0f)
217 .setMaxDepth(1.0f);
218 command_buffer_vk_.setViewport(0, 1, &viewport);
219
220 // Set the initial scissor.
221 const auto sc = IRect::MakeSize(target_size);
222 vk::Rect2D scissor =
223 vk::Rect2D()
224 .setOffset(vk::Offset2D(sc.GetX(), sc.GetY()))
225 .setExtent(vk::Extent2D(sc.GetWidth(), sc.GetHeight()));
226 command_buffer_vk_.setScissor(0, 1, &scissor);
227
228 // Set the initial stencil reference.
229 command_buffer_vk_.setStencilReference(
230 vk::StencilFaceFlagBits::eVkStencilFrontAndBack, 0u);
231
232 is_valid_ = true;
233}
234
235RenderPassVK::~RenderPassVK() = default;
236
237bool RenderPassVK::IsValid() const {
238 return is_valid_;
239}
240
241void RenderPassVK::OnSetLabel(std::string label) {
242#ifdef IMPELLER_DEBUG
243 ContextVK::Cast(*context_).SetDebugName(render_pass_->Get(),
244 std::string(label).c_str());
245#endif // IMPELLER_DEBUG
246}
247
248SharedHandleVK<vk::Framebuffer> RenderPassVK::CreateVKFramebuffer(
249 const ContextVK& context,
250 const vk::RenderPass& pass) const {
251 vk::FramebufferCreateInfo fb_info;
252
253 fb_info.renderPass = pass;
254
255 const auto target_size = render_target_.GetRenderTargetSize();
256 fb_info.width = target_size.width;
257 fb_info.height = target_size.height;
258 fb_info.layers = 1u;
259
260 std::vector<vk::ImageView> attachments;
261
262 // This bit must be consistent to ensure compatibility with the pass created
263 // earlier. Follow this order: Color attachments, then depth-stencil, then
264 // stencil.
265 for (const auto& [_, color] : render_target_.GetColorAttachments()) {
266 // The bind point doesn't matter here since that information is present in
267 // the render pass.
268 attachments.emplace_back(
269 TextureVK::Cast(*color.texture).GetRenderTargetView());
270 if (color.resolve_texture) {
271 attachments.emplace_back(
272 TextureVK::Cast(*color.resolve_texture).GetRenderTargetView());
273 }
274 }
275 if (auto depth = render_target_.GetDepthAttachment(); depth.has_value()) {
276 attachments.emplace_back(
277 TextureVK::Cast(*depth->texture).GetRenderTargetView());
278 } else if (auto stencil = render_target_.GetStencilAttachment();
279 stencil.has_value()) {
280 attachments.emplace_back(
281 TextureVK::Cast(*stencil->texture).GetRenderTargetView());
282 }
283
284 fb_info.setAttachments(attachments);
285
286 auto [result, framebuffer] =
287 context.GetDevice().createFramebufferUnique(fb_info);
288
289 if (result != vk::Result::eSuccess) {
290 VALIDATION_LOG << "Could not create framebuffer: " << vk::to_string(result);
291 return {};
292 }
293
294 return MakeSharedVK(std::move(framebuffer));
295}
296
297// |RenderPass|
298void RenderPassVK::SetPipeline(
299 const std::shared_ptr<Pipeline<PipelineDescriptor>>& pipeline) {
300 pipeline_ = pipeline.get();
301 if (!pipeline_) {
302 return;
303 }
304
305 pipeline_uses_input_attachments_ =
306 pipeline_->GetDescriptor().GetVertexDescriptor()->UsesInputAttacments();
307
308 if (pipeline_uses_input_attachments_) {
309 if (bound_image_offset_ >= kMaxBindings) {
310 pipeline_ = nullptr;
311 return;
312 }
313 vk::DescriptorImageInfo image_info;
314 image_info.imageLayout = vk::ImageLayout::eGeneral;
315 image_info.sampler = VK_NULL_HANDLE;
316 image_info.imageView = TextureVK::Cast(*color_image_vk_).GetImageView();
317 image_workspace_[bound_image_offset_++] = image_info;
318
319 vk::WriteDescriptorSet write_set;
320 write_set.dstBinding = kMagicSubpassInputBinding;
321 write_set.descriptorCount = 1u;
322 write_set.descriptorType = vk::DescriptorType::eInputAttachment;
323 write_set.pImageInfo = &image_workspace_[bound_image_offset_ - 1];
324
325 write_workspace_[descriptor_write_offset_++] = write_set;
326 }
327}
328
329// |RenderPass|
330void RenderPassVK::SetCommandLabel(std::string_view label) {
331#ifdef IMPELLER_DEBUG
332 command_buffer_->GetEncoder()->PushDebugGroup(label);
333 has_label_ = true;
334#endif // IMPELLER_DEBUG
335}
336
337// |RenderPass|
338void RenderPassVK::SetStencilReference(uint32_t value) {
339 command_buffer_vk_.setStencilReference(
340 vk::StencilFaceFlagBits::eVkStencilFrontAndBack, value);
341}
342
343// |RenderPass|
344void RenderPassVK::SetBaseVertex(uint64_t value) {
345 base_vertex_ = value;
346}
347
348// |RenderPass|
349void RenderPassVK::SetViewport(Viewport viewport) {
350 vk::Viewport viewport_vk = vk::Viewport()
351 .setWidth(viewport.rect.GetWidth())
352 .setHeight(-viewport.rect.GetHeight())
353 .setY(viewport.rect.GetHeight())
354 .setMinDepth(0.0f)
355 .setMaxDepth(1.0f);
356 command_buffer_vk_.setViewport(0, 1, &viewport_vk);
357}
358
359// |RenderPass|
360void RenderPassVK::SetScissor(IRect scissor) {
361 vk::Rect2D scissor_vk =
362 vk::Rect2D()
363 .setOffset(vk::Offset2D(scissor.GetX(), scissor.GetY()))
364 .setExtent(vk::Extent2D(scissor.GetWidth(), scissor.GetHeight()));
365 command_buffer_vk_.setScissor(0, 1, &scissor_vk);
366}
367
368// |RenderPass|
369void RenderPassVK::SetInstanceCount(size_t count) {
370 instance_count_ = count;
371}
372
373// |RenderPass|
374bool RenderPassVK::SetVertexBuffer(VertexBuffer buffer) {
375 vertex_count_ = buffer.vertex_count;
376 if (buffer.index_type == IndexType::kUnknown || !buffer.vertex_buffer) {
377 return false;
378 }
379
380 if (!command_buffer_->GetEncoder()->Track(buffer.vertex_buffer.buffer)) {
381 return false;
382 }
383
384 // Bind the vertex buffer.
385 vk::Buffer vertex_buffer_handle =
386 DeviceBufferVK::Cast(*buffer.vertex_buffer.buffer).GetBuffer();
387 vk::Buffer vertex_buffers[] = {vertex_buffer_handle};
388 vk::DeviceSize vertex_buffer_offsets[] = {buffer.vertex_buffer.range.offset};
389
390 command_buffer_vk_.bindVertexBuffers(0u, 1u, vertex_buffers,
391 vertex_buffer_offsets);
392
393 // Bind the index buffer.
394 if (buffer.index_type != IndexType::kNone) {
395 has_index_buffer_ = true;
396 const BufferView& index_buffer_view = buffer.index_buffer;
397 if (!index_buffer_view) {
398 return false;
399 }
400
401 const std::shared_ptr<const DeviceBuffer>& index_buffer =
402 index_buffer_view.buffer;
403 if (!index_buffer) {
404 VALIDATION_LOG << "Failed to acquire device buffer"
405 << " for index buffer view";
406 return false;
407 }
408
409 if (!command_buffer_->GetEncoder()->Track(index_buffer)) {
410 return false;
411 }
412
413 vk::Buffer index_buffer_handle =
414 DeviceBufferVK::Cast(*index_buffer).GetBuffer();
415 command_buffer_vk_.bindIndexBuffer(index_buffer_handle,
416 index_buffer_view.range.offset,
417 ToVKIndexType(buffer.index_type));
418 } else {
419 has_index_buffer_ = false;
420 }
421 return true;
422}
423
424// |RenderPass|
426 if (!pipeline_) {
428 "No valid pipeline is bound to the RenderPass.");
429 }
430
431 //----------------------------------------------------------------------------
432 /// If there are immutable samplers referenced in the render pass, the base
433 /// pipeline variant is no longer valid and needs to be re-constructed to
434 /// reference the samplers.
435 ///
436 /// This is an instance of JIT creation of PSOs that can cause jank. It is
437 /// unavoidable because it isn't possible to know all possible combinations of
438 /// target YUV conversions. Fortunately, this will only ever happen when
439 /// rendering to external textures. Like Android Hardware Buffers on Android.
440 ///
441 /// Even when JIT creation is unavoidable, pipelines will cache their variants
442 /// when able and all pipeline creation will happen via a base pipeline cache
443 /// anyway. So the jank can be mostly entirely ameliorated and it should only
444 /// ever happen when the first unknown YUV conversion is encountered.
445 ///
446 /// Jank can be completely eliminated by pre-populating known YUV conversion
447 /// pipelines.
448 if (immutable_sampler_) {
449 std::shared_ptr<PipelineVK> pipeline_variant =
450 PipelineVK::Cast(*pipeline_)
451 .CreateVariantForImmutableSamplers(immutable_sampler_);
452 if (!pipeline_variant) {
453 return fml::Status(
455 "Could not create pipeline variant with immutable sampler.");
456 }
457 pipeline_ = pipeline_variant.get();
458 }
459
460 const auto& context_vk = ContextVK::Cast(*context_);
461 const auto& pipeline_vk = PipelineVK::Cast(*pipeline_);
462
463 auto descriptor_result =
464 command_buffer_->GetEncoder()->AllocateDescriptorSets(
465 pipeline_vk.GetDescriptorSetLayout(), context_vk);
466 if (!descriptor_result.ok()) {
468 "Could not allocate descriptor sets.");
469 }
470 const auto descriptor_set = descriptor_result.value();
471 const auto pipeline_layout = pipeline_vk.GetPipelineLayout();
472 command_buffer_vk_.bindPipeline(vk::PipelineBindPoint::eGraphics,
473 pipeline_vk.GetPipeline());
474
475 for (auto i = 0u; i < descriptor_write_offset_; i++) {
476 write_workspace_[i].dstSet = descriptor_set;
477 }
478
479 context_vk.GetDevice().updateDescriptorSets(descriptor_write_offset_,
480 write_workspace_.data(), 0u, {});
481
482 command_buffer_vk_.bindDescriptorSets(
483 vk::PipelineBindPoint::eGraphics, // bind point
484 pipeline_layout, // layout
485 0, // first set
486 1, // set count
487 &descriptor_set, // sets
488 0, // offset count
489 nullptr // offsets
490 );
491
492 if (pipeline_uses_input_attachments_) {
494 command_buffer_vk_, TextureVK::Cast(*color_image_vk_).GetImage());
495 }
496
497 if (has_index_buffer_) {
498 command_buffer_vk_.drawIndexed(vertex_count_, // index count
499 instance_count_, // instance count
500 0u, // first index
501 base_vertex_, // vertex offset
502 0u // first instance
503 );
504 } else {
505 command_buffer_vk_.draw(vertex_count_, // vertex count
506 instance_count_, // instance count
507 base_vertex_, // vertex offset
508 0u // first instance
509 );
510 }
511
512#ifdef IMPELLER_DEBUG
513 if (has_label_) {
514 command_buffer_->GetEncoder()->PopDebugGroup();
515 }
516#endif // IMPELLER_DEBUG
517 has_label_ = false;
518 has_index_buffer_ = false;
519 bound_image_offset_ = 0u;
520 bound_buffer_offset_ = 0u;
521 descriptor_write_offset_ = 0u;
522 instance_count_ = 1u;
523 base_vertex_ = 0u;
524 vertex_count_ = 0u;
525 pipeline_ = nullptr;
526 pipeline_uses_input_attachments_ = false;
527 immutable_sampler_ = nullptr;
528 return fml::Status();
529}
530
531// The RenderPassVK binding methods only need the binding, set, and buffer type
532// information.
533bool RenderPassVK::BindResource(ShaderStage stage,
535 const ShaderUniformSlot& slot,
536 const ShaderMetadata& metadata,
537 BufferView view) {
538 return BindResource(slot.binding, type, view);
539}
540
541bool RenderPassVK::BindResource(
542 ShaderStage stage,
544 const ShaderUniformSlot& slot,
545 const std::shared_ptr<const ShaderMetadata>& metadata,
546 BufferView view) {
547 return BindResource(slot.binding, type, view);
548}
549
550bool RenderPassVK::BindResource(size_t binding,
552 const BufferView& view) {
553 if (bound_buffer_offset_ >= kMaxBindings) {
554 return false;
555 }
556
557 const std::shared_ptr<const DeviceBuffer>& device_buffer = view.buffer;
558 auto buffer = DeviceBufferVK::Cast(*device_buffer).GetBuffer();
559 if (!buffer) {
560 return false;
561 }
562
563 if (!command_buffer_->GetEncoder()->Track(device_buffer)) {
564 return false;
565 }
566
567 uint32_t offset = view.range.offset;
568
569 vk::DescriptorBufferInfo buffer_info;
570 buffer_info.buffer = buffer;
571 buffer_info.offset = offset;
572 buffer_info.range = view.range.length;
573 buffer_workspace_[bound_buffer_offset_++] = buffer_info;
574
575 vk::WriteDescriptorSet write_set;
576 write_set.dstBinding = binding;
577 write_set.descriptorCount = 1u;
578 write_set.descriptorType = ToVKDescriptorType(type);
579 write_set.pBufferInfo = &buffer_workspace_[bound_buffer_offset_ - 1];
580
581 write_workspace_[descriptor_write_offset_++] = write_set;
582 return true;
583}
584
585bool RenderPassVK::BindResource(ShaderStage stage,
587 const SampledImageSlot& slot,
588 const ShaderMetadata& metadata,
589 std::shared_ptr<const Texture> texture,
590 const std::unique_ptr<const Sampler>& sampler) {
591 if (bound_buffer_offset_ >= kMaxBindings) {
592 return false;
593 }
594 if (!texture->IsValid() || !sampler) {
595 return false;
596 }
597 const TextureVK& texture_vk = TextureVK::Cast(*texture);
598 const SamplerVK& sampler_vk = SamplerVK::Cast(*sampler);
599
600 if (!command_buffer_->GetEncoder()->Track(texture)) {
601 return false;
602 }
603
604 if (!immutable_sampler_) {
605 immutable_sampler_ = texture_vk.GetImmutableSamplerVariant(sampler_vk);
606 }
607
608 vk::DescriptorImageInfo image_info;
609 image_info.imageLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
610 image_info.sampler = sampler_vk.GetSampler();
611 image_info.imageView = texture_vk.GetImageView();
612 image_workspace_[bound_image_offset_++] = image_info;
613
614 vk::WriteDescriptorSet write_set;
615 write_set.dstBinding = slot.binding;
616 write_set.descriptorCount = 1u;
617 write_set.descriptorType = vk::DescriptorType::eCombinedImageSampler;
618 write_set.pImageInfo = &image_workspace_[bound_image_offset_ - 1];
619
620 write_workspace_[descriptor_write_offset_++] = write_set;
621 return true;
622}
623
624bool RenderPassVK::OnEncodeCommands(const Context& context) const {
625 command_buffer_->GetEncoder()->GetCommandBuffer().endRenderPass();
626
627 // If this render target will be consumed by a subsequent render pass,
628 // perform a layout transition to a shader read state.
629 const std::shared_ptr<Texture>& result_texture =
630 resolve_image_vk_ ? resolve_image_vk_ : color_image_vk_;
631 if (result_texture->GetTextureDescriptor().usage &
632 TextureUsage::kShaderRead) {
633 BarrierVK barrier;
634 barrier.cmd_buffer = command_buffer_vk_;
635 barrier.src_access = vk::AccessFlagBits::eColorAttachmentWrite |
636 vk::AccessFlagBits::eTransferWrite;
637 barrier.src_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput |
638 vk::PipelineStageFlagBits::eTransfer;
639 barrier.dst_access = vk::AccessFlagBits::eShaderRead;
640 barrier.dst_stage = vk::PipelineStageFlagBits::eFragmentShader;
641
642 barrier.new_layout = vk::ImageLayout::eShaderReadOnlyOptimal;
643
644 if (!TextureVK::Cast(*result_texture).SetLayout(barrier)) {
645 return false;
646 }
647 }
648
649 return true;
650}
651
652} // namespace impeller
int count
Definition: FontMgrTest.cpp:50
GLenum type
static TextureVK & Cast(Texture &base)
Definition: backend_cast.h:13
const RenderTarget render_target_
Definition: render_pass.h:179
const std::map< size_t, ColorAttachment > & GetColorAttachments() const
const std::optional< DepthAttachment > & GetDepthAttachment() const
const std::optional< StencilAttachment > & GetStencilAttachment() const
void SetCachedFramebuffer(const SharedHandleVK< vk::Framebuffer > &framebuffer)
Definition: texture_vk.cc:177
SharedHandleVK< vk::RenderPass > GetCachedRenderPass() const
Definition: texture_vk.cc:191
SharedHandleVK< vk::Framebuffer > GetCachedFramebuffer() const
Definition: texture_vk.cc:187
bool SetLayout(const BarrierVK &barrier) const
Definition: texture_vk.cc:159
void SetCachedRenderPass(const SharedHandleVK< vk::RenderPass > &render_pass)
Definition: texture_vk.cc:182
static void Draw(SkCanvas *canvas, const SkRect &rect)
DlColor color
uint8_t value
GAsyncResult * result
uint32_t * target
FlTexture * texture
SK_API sk_sp< SkSurface > RenderTarget(GrRecordingContext *context, skgpu::Budgeted budgeted, const SkImageInfo &imageInfo, int sampleCount, GrSurfaceOrigin surfaceOrigin, const SkSurfaceProps *surfaceProps, bool shouldCreateWithMips=false, bool isProtected=false)
@ kNone
Definition: layer.h:53
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer
Definition: switches.h:126
float Scalar
Definition: scalar.h:18
constexpr vk::IndexType ToVKIndexType(IndexType index_type)
Definition: formats_vk.h:349
constexpr vk::DescriptorType ToVKDescriptorType(DescriptorType type)
Definition: formats_vk.h:266
static constexpr size_t kMaxBindings
Definition: pipeline_vk.h:26
IRect64 IRect
Definition: rect.h:772
static constexpr size_t kMagicSubpassInputBinding
static vk::ClearDepthStencilValue VKClearValueFromDepthStencil(uint32_t stencil, Scalar depth)
static std::vector< vk::ClearValue > GetVKClearValues(const RenderTarget &target)
void InsertBarrierForInputAttachmentRead(const vk::CommandBuffer &buffer, const vk::Image &image)
Inserts the appropriate barriers to ensure that subsequent commands can read from the specified image...
static vk::ClearColorValue VKClearValueFromColor(Color color)
auto MakeSharedVK(vk::UniqueHandle< T, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE > handle)
Task::Status Status
Definition: TaskList.cpp:15
Definition: ref_ptr.h:256
static SkString to_string(int n)
Definition: nanobench.cpp:119
SeparatedVector2 offset
static constexpr TRect MakeSize(const TSize< U > &size)
Definition: rect.h:146
#define VALIDATION_LOG
Definition: validation.h:73
#define VK_NULL_HANDLE
Definition: vulkan_core.h:46