141 {
142 if (draw_data->CmdListsCount == 0) {
143 return;
144 }
145
146 using VS = impeller::ImguiRasterVertexShader;
147 using FS = impeller::ImguiRasterFragmentShader;
148
150 IM_ASSERT(bd != nullptr && "Did you call ImGui_ImplImpeller_Init()?");
151
152 size_t total_vtx_bytes = draw_data->TotalVtxCount * sizeof(VS::PerVertexData);
153 size_t total_idx_bytes = draw_data->TotalIdxCount * sizeof(ImDrawIdx);
154 if (!total_vtx_bytes || !total_idx_bytes) {
155 return;
156 }
157
158
160 buffer_desc.
size = total_vtx_bytes + total_idx_bytes;
162
163 auto buffer = bd->context->GetResourceAllocator()->CreateBuffer(buffer_desc);
164 buffer->SetLabel(
"ImGui vertex+index buffer");
165
167 draw_data->DisplayPos.x, draw_data->DisplayPos.y,
168 draw_data->DisplaySize.x, draw_data->DisplaySize.y);
169
171 .
rect = display_rect.
Scale(draw_data->FramebufferScale.x,
172 draw_data->FramebufferScale.y)};
173
174
175 VS::UniformBuffer uniforms;
179
180 size_t vertex_buffer_offset = 0;
181 size_t index_buffer_offset = total_vtx_bytes;
182
183 for (int draw_list_i = 0; draw_list_i < draw_data->CmdListsCount;
184 draw_list_i++) {
185 const ImDrawList* cmd_list = draw_data->CmdLists[draw_list_i];
186
187
188
189
190
191
192
193
194
195 std::vector<VS::PerVertexData> vtx_data;
196 vtx_data.reserve(cmd_list->VtxBuffer.size());
197 for (const auto& v : cmd_list->VtxBuffer) {
198 ImVec4 color = ImGui::ColorConvertU32ToFloat4(v.col);
199 vtx_data.push_back({{v.pos.x, v.pos.y},
200 {v.uv.x, v.uv.y},
201 {color.x, color.y, color.z, color.w}});
202 }
203
204 auto draw_list_vtx_bytes =
205 static_cast<size_t>(vtx_data.size() * sizeof(VS::PerVertexData));
206 auto draw_list_idx_bytes =
207 static_cast<size_t>(cmd_list->IdxBuffer.size_in_bytes());
208 if (!
buffer->CopyHostBuffer(
reinterpret_cast<uint8_t*
>(vtx_data.data()),
210 vertex_buffer_offset)) {
211 IM_ASSERT(false && "Could not copy vertices to buffer.");
212 }
213 if (!
buffer->CopyHostBuffer(
214 reinterpret_cast<uint8_t*>(cmd_list->IdxBuffer.Data),
216 IM_ASSERT(false && "Could not copy indices to buffer.");
217 }
218
219 for (int cmd_i = 0; cmd_i < cmd_list->CmdBuffer.Size; cmd_i++) {
220 const ImDrawCmd* pcmd = &cmd_list->CmdBuffer[cmd_i];
221
222 if (pcmd->UserCallback) {
223 pcmd->UserCallback(cmd_list, pcmd);
224 } else {
225
227 (pcmd->ClipRect.x - draw_data->DisplayPos.x) *
228 draw_data->FramebufferScale.x,
229 (pcmd->ClipRect.y - draw_data->DisplayPos.y) *
230 draw_data->FramebufferScale.y,
231 (pcmd->ClipRect.z - draw_data->DisplayPos.x) *
232 draw_data->FramebufferScale.x,
233 (pcmd->ClipRect.w - draw_data->DisplayPos.y) *
234 draw_data->FramebufferScale.y);
235 {
236
237 auto visible_clip = clip_rect.Intersection(viewport.rect);
238 if (!visible_clip.has_value()) {
239 continue;
240 }
241 clip_rect = visible_clip.value();
242 }
243 {
244
245
246 auto visible_clip = clip_rect.Intersection(
248 if (!visible_clip.has_value()) {
249 continue;
250 }
251 clip_rect = visible_clip.value();
252 }
253
254 render_pass.SetCommandLabel(
255 std::format("ImGui draw list {} (command {})", draw_list_i, cmd_i));
256 render_pass.SetViewport(viewport);
258 render_pass.SetPipeline(bd->pipeline);
259 VS::BindUniformBuffer(render_pass, vtx_uniforms);
260 FS::BindTex(render_pass, bd->font_texture, bd->sampler);
261
262 size_t vb_start =
263 vertex_buffer_offset + pcmd->VtxOffset * sizeof(ImDrawVert);
264
270 pcmd->IdxOffset * sizeof(ImDrawIdx),
271 pcmd->ElemCount * sizeof(ImDrawIdx)));
274 render_pass.SetVertexBuffer(std::move(vertex_buffer));
275 render_pass.SetBaseVertex(pcmd->VtxOffset);
276
277 render_pass.Draw().ok();
278 }
279 }
280
281 vertex_buffer_offset += draw_list_vtx_bytes;
282 index_buffer_offset += draw_list_idx_bytes;
283 }
285}
BufferView EmplaceUniform(const UniformType &uniform)
Emplace uniform data onto the host buffer. Ensure that backend specific uniform alignment requirement...
void Reset()
Resets the contents of the HostBuffer to nothing so it can be reused.
static ImGui_ImplImpeller_Data * ImGui_ImplImpeller_GetBackendData()
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set profile Make the profiler discard new samples once the profiler sample buffer is full When this flag is not the profiler sample buffer is used as a ring buffer
LinePipeline::FragmentShader FS
LinePipeline::VertexShader VS
static constexpr Matrix MakeOrthographic(TSize< T > size)
constexpr Matrix Translate(const Vector3 &t) const
RoundOut(const TRect< U > &r)
static constexpr TRect MakeXYWH(Type x, Type y, Type width, Type height)
constexpr TRect Scale(Type scale) const
static constexpr TRect MakeSize(const TSize< U > &size)
static constexpr TRect MakeLTRB(Type left, Type top, Type right, Type bottom)
BufferView index_buffer
The index buffer binding used by the vertex shader stage.