146 const std::shared_ptr<Allocator>& transients_allocator,
148 const std::vector<Command>& commands,
149 const std::shared_ptr<GPUTracerGLES>& tracer) {
150 TRACE_EVENT0(
"impeller",
"RenderPassGLES::EncodeCommandsInReactor");
154 tracer->MarkFrameStart(gl);
158 [&gl]() { gl.PopDebugGroup(); });
159 if (!pass_data.
label.empty()) {
160 gl.PushDebugGroup(pass_data.
label);
162 pop_pass_debug_marker.Release();
165 GLuint fbo = GL_NONE;
167 if (fbo != GL_NONE) {
168 gl.BindFramebuffer(GL_FRAMEBUFFER, GL_NONE);
169 gl.DeleteFramebuffers(1u, &fbo);
174 const bool is_default_fbo = color_gles.IsWrapped();
176 if (is_default_fbo) {
177 if (color_gles.GetFBO().has_value()) {
179 gl.BindFramebuffer(GL_FRAMEBUFFER, *color_gles.GetFBO());
183 gl.GenFramebuffers(1u, &fbo);
184 gl.BindFramebuffer(GL_FRAMEBUFFER, fbo);
186 if (!color_gles.SetAsFramebufferAttachment(
187 GL_FRAMEBUFFER, TextureGLES::AttachmentType::kColor0)) {
192 if (!depth->SetAsFramebufferAttachment(
193 GL_FRAMEBUFFER, TextureGLES::AttachmentType::kDepth)) {
198 if (!stencil->SetAsFramebufferAttachment(
199 GL_FRAMEBUFFER, TextureGLES::AttachmentType::kStencil)) {
204 auto status = gl.CheckFramebufferStatus(GL_FRAMEBUFFER);
205 if (status != GL_FRAMEBUFFER_COMPLETE) {
218 if (gl.DepthRangef.IsAvailable()) {
228 GLenum clear_bits = 0u;
230 clear_bits |= GL_COLOR_BUFFER_BIT;
233 clear_bits |= GL_DEPTH_BUFFER_BIT;
236 clear_bits |= GL_STENCIL_BUFFER_BIT;
239 gl.Disable(GL_SCISSOR_TEST);
240 gl.Disable(GL_DEPTH_TEST);
241 gl.Disable(GL_STENCIL_TEST);
242 gl.Disable(GL_CULL_FACE);
243 gl.Disable(GL_BLEND);
244 gl.ColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
245 gl.DepthMask(GL_TRUE);
246 gl.StencilMaskSeparate(GL_FRONT, 0xFFFFFFFF);
247 gl.StencilMaskSeparate(GL_BACK, 0xFFFFFFFF);
249 gl.Clear(clear_bits);
251 for (
const auto& command : commands) {
252 if (command.instance_count != 1u) {
253 VALIDATION_LOG <<
"GLES backend does not support instanced rendering.";
257 if (!command.pipeline) {
264 [&gl]() { gl.PopDebugGroup(); });
265 if (!command.label.empty()) {
266 gl.PushDebugGroup(command.label);
268 pop_cmd_debug_marker.Release();
272 const auto& pipeline = PipelineGLES::Cast(*command.pipeline);
274 const auto* color_attachment =
275 pipeline.GetDescriptor().GetLegacyCompatibleColorAttachment();
276 if (!color_attachment) {
278 <<
"Color attachment is too complicated for a legacy renderer.";
296 pipeline.GetDescriptor().GetDepthStencilAttachmentDescriptor();
298 gl.Enable(GL_DEPTH_TEST);
300 gl.DepthMask(depth->depth_write_enabled ? GL_TRUE : GL_FALSE);
302 gl.Disable(GL_DEPTH_TEST);
313 const auto& viewport = command.viewport.value_or(pass_data.
viewport);
314 gl.Viewport(viewport.rect.GetX(),
315 target_size.height - viewport.rect.GetY() -
316 viewport.rect.GetHeight(),
317 viewport.rect.GetWidth(),
318 viewport.rect.GetHeight()
321 if (gl.DepthRangef.IsAvailable()) {
322 gl.DepthRangef(viewport.depth_range.z_near, viewport.depth_range.z_far);
324 gl.DepthRange(viewport.depth_range.z_near, viewport.depth_range.z_far);
331 if (command.scissor.has_value()) {
332 const auto& scissor = command.scissor.value();
333 gl.Enable(GL_SCISSOR_TEST);
336 target_size.height - scissor.GetY() - scissor.GetHeight(),
341 gl.Disable(GL_SCISSOR_TEST);
347 switch (pipeline.GetDescriptor().GetCullMode()) {
348 case CullMode::kNone:
349 gl.Disable(GL_CULL_FACE);
351 case CullMode::kFrontFace:
352 gl.Enable(GL_CULL_FACE);
353 gl.CullFace(GL_FRONT);
355 case CullMode::kBackFace:
356 gl.Enable(GL_CULL_FACE);
357 gl.CullFace(GL_BACK);
363 switch (pipeline.GetDescriptor().GetWindingOrder()) {
364 case WindingOrder::kClockwise:
367 case WindingOrder::kCounterClockwise:
368 gl.FrontFace(GL_CCW);
372 if (command.vertex_buffer.index_type == IndexType::kUnknown) {
376 auto vertex_desc_gles = pipeline.GetBufferBindings();
381 auto& vertex_buffer_view = command.vertex_buffer.vertex_buffer;
383 if (!vertex_buffer_view) {
387 auto vertex_buffer = vertex_buffer_view.buffer;
389 if (!vertex_buffer) {
393 const auto& vertex_buffer_gles = DeviceBufferGLES::Cast(*vertex_buffer);
394 if (!vertex_buffer_gles.BindAndUploadDataIfNecessary(
395 DeviceBufferGLES::BindingType::kArrayBuffer)) {
402 if (!pipeline.BindProgram()) {
409 if (!vertex_desc_gles->BindVertexAttributes(
410 gl, vertex_buffer_view.range.offset)) {
417 if (!vertex_desc_gles->BindUniformData(gl,
418 *transients_allocator,
419 command.vertex_bindings,
420 command.fragment_bindings
433 auto mode = pipeline.GetDescriptor().GetPolygonMode() == PolygonMode::kLine
435 :
ToMode(pipeline.GetDescriptor().GetPrimitiveType());
440 if (command.vertex_buffer.index_type == IndexType::kNone) {
441 gl.DrawArrays(mode, command.base_vertex,
442 command.vertex_buffer.vertex_count);
445 auto index_buffer_view = command.vertex_buffer.index_buffer;
446 auto index_buffer = index_buffer_view.buffer;
447 const auto& index_buffer_gles = DeviceBufferGLES::Cast(*index_buffer);
448 if (!index_buffer_gles.BindAndUploadDataIfNecessary(
449 DeviceBufferGLES::BindingType::kElementArrayBuffer)) {
452 gl.DrawElements(mode,
453 command.vertex_buffer.vertex_count,
455 reinterpret_cast<const GLvoid*
>(
static_cast<GLsizei
>(
456 index_buffer_view.range.offset))
463 if (!vertex_desc_gles->UnbindVertexAttributes(gl)) {
470 if (!pipeline.UnbindProgram()) {
475 if (gl.DiscardFramebufferEXT.IsAvailable()) {
476 std::vector<GLenum> attachments;
481 bool angle_safe = gl.GetCapabilities()->IsANGLE() ? !is_default_fbo :
true;
484 attachments.push_back(is_default_fbo ? GL_COLOR_EXT
485 : GL_COLOR_ATTACHMENT0);
488 attachments.push_back(is_default_fbo ? GL_DEPTH_EXT
489 : GL_DEPTH_ATTACHMENT);
493 attachments.push_back(is_default_fbo ? GL_STENCIL_EXT
494 : GL_STENCIL_ATTACHMENT);
496 gl.DiscardFramebufferEXT(GL_FRAMEBUFFER,
503 if (is_default_fbo) {
504 tracer->MarkFrameEnd(gl);
512bool RenderPassGLES::OnEncodeCommands(
const Context& context)
const {
516 const auto& render_target = GetRenderTarget();
517 if (!render_target.HasColorAttachment(0u)) {
520 const auto& color0 = render_target.GetColorAttachments().at(0u);
521 const auto& depth0 = render_target.GetDepthAttachment();
522 const auto& stencil0 = render_target.GetStencilAttachment();
524 auto pass_data = std::make_shared<RenderPassData>();
525 pass_data->label = label_;
526 pass_data->viewport.rect = Rect::MakeSize(GetRenderTargetSize());
531 pass_data->color_attachment = color0.texture;
532 pass_data->clear_color = color0.clear_color;
534 pass_data->discard_color_attachment =
540 if (color0.resolve_texture) {
542 pass_data->discard_color_attachment =
false;
548 if (depth0.has_value()) {
549 pass_data->depth_attachment = depth0->texture;
550 pass_data->clear_depth = depth0->clear_depth;
552 pass_data->discard_depth_attachment =
559 if (stencil0.has_value()) {
560 pass_data->stencil_attachment = stencil0->texture;
561 pass_data->clear_stencil = stencil0->clear_stencil;
562 pass_data->clear_stencil_attachment =
564 pass_data->discard_stencil_attachment =
568 std::shared_ptr<const RenderPassGLES> shared_this = shared_from_this();
569 auto tracer = ContextGLES::Cast(context).GetGPUTracer();
570 return reactor_->AddOperation([pass_data,
572 render_pass = std::move(shared_this),
573 tracer](
const auto& reactor) {
574 auto result = EncodeCommandsInReactor(*pass_data, allocator, reactor,
575 render_pass->commands_, tracer);
576 FML_CHECK(result) <<
"Must be able to encode GL commands without error.";
virtual const std::shared_ptr< const Capabilities > & GetCapabilities() const =0
Get the capabilities of Impeller context. All optionally supported feature of the platform,...
virtual std::shared_ptr< Allocator > GetResourceAllocator() const =0
Returns the allocator used to create textures and buffers on the device.