Flutter Engine
 
Loading...
Searching...
No Matches
mock_vulkan.cc
Go to the documentation of this file.
1// Copyright 2013 The Flutter Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
6
7#include <cstdint>
8#include <cstring>
9#include <utility>
10#include <vector>
11
13#include "impeller/renderer/backend/vulkan/vk.h" // IWYU pragma: keep.
14#include "third_party/swiftshader/include/vulkan/vulkan_core.h"
15#include "vulkan/vulkan.hpp"
16#include "vulkan/vulkan_core.h"
17
18namespace impeller {
19namespace testing {
20
21namespace {
22
23struct MockCommandBuffer {
24 explicit MockCommandBuffer(
25 std::shared_ptr<std::vector<std::string>> called_functions)
26 : called_functions_(std::move(called_functions)) {}
27 std::shared_ptr<std::vector<std::string>> called_functions_;
28 std::vector<VkImageMemoryBarrier> image_memory_barriers_;
29};
30
31struct MockQueryPool {};
32
33struct MockCommandPool {};
34
35struct MockDescriptorPool {};
36
37struct MockSurfaceKHR {};
38
39struct MockImage {};
40
41struct MockSwapchainKHR {
42 std::array<MockImage, 3> images;
43 size_t current_image = 0;
44};
45
46struct MockSemaphore {};
47
48struct MockFramebuffer {};
49
50static ISize currentImageSize = ISize{1, 1};
51
52class MockDevice final {
53 public:
54 explicit MockDevice() : called_functions_(new std::vector<std::string>()) {}
55
56 MockCommandBuffer* NewCommandBuffer() {
57 auto buffer = std::make_unique<MockCommandBuffer>(called_functions_);
58 MockCommandBuffer* result = buffer.get();
59 Lock lock(command_buffers_mutex_);
60 command_buffers_.emplace_back(std::move(buffer));
61 return result;
62 }
63
64 MockCommandPool* NewCommandPool() {
65 auto pool = std::make_unique<MockCommandPool>();
66 MockCommandPool* result = pool.get();
67 Lock lock(commmand_pools_mutex_);
68 command_pools_.emplace_back(std::move(pool));
69 return result;
70 }
71
72 void DeleteCommandPool(MockCommandPool* pool) {
73 Lock lock(commmand_pools_mutex_);
74 auto it = std::find_if(command_pools_.begin(), command_pools_.end(),
75 [pool](const std::unique_ptr<MockCommandPool>& p) {
76 return p.get() == pool;
77 });
78 if (it != command_pools_.end()) {
79 command_pools_.erase(it);
80 }
81 }
82
83 const std::shared_ptr<std::vector<std::string>>& GetCalledFunctions() {
84 return called_functions_;
85 }
86
87 void AddCalledFunction(const std::string& function) {
88 Lock lock(called_functions_mutex_);
89 called_functions_->push_back(function);
90 }
91
92 private:
93 MockDevice(const MockDevice&) = delete;
94
95 MockDevice& operator=(const MockDevice&) = delete;
96
97 Mutex called_functions_mutex_;
98 std::shared_ptr<std::vector<std::string>> called_functions_
99 IPLR_GUARDED_BY(called_functions_mutex_);
100
101 Mutex command_buffers_mutex_;
102 std::vector<std::unique_ptr<MockCommandBuffer>> command_buffers_
103 IPLR_GUARDED_BY(command_buffers_mutex_);
104
105 Mutex commmand_pools_mutex_;
106 std::vector<std::unique_ptr<MockCommandPool>> command_pools_
107 IPLR_GUARDED_BY(commmand_pools_mutex_);
108};
109
110void noop() {}
111
112static thread_local std::vector<std::string> g_instance_extensions;
113
114VkResult vkEnumerateInstanceExtensionProperties(
115 const char* pLayerName,
116 uint32_t* pPropertyCount,
117 VkExtensionProperties* pProperties) {
118 if (!pProperties) {
119 *pPropertyCount = g_instance_extensions.size();
120 } else {
121 uint32_t count = 0;
122 for (const std::string& ext : g_instance_extensions) {
123 strncpy(pProperties[count].extensionName, ext.c_str(),
124 sizeof(VkExtensionProperties::extensionName));
125 pProperties[count].specVersion = 0;
126 count++;
127 }
128 }
129 return VK_SUCCESS;
130}
131
132static thread_local std::vector<std::string> g_instance_layers;
133
134VkResult vkEnumerateInstanceLayerProperties(uint32_t* pPropertyCount,
135 VkLayerProperties* pProperties) {
136 if (!pProperties) {
137 *pPropertyCount = g_instance_layers.size();
138 } else {
139 uint32_t count = 0;
140 for (const std::string& layer : g_instance_layers) {
141 strncpy(pProperties[count].layerName, layer.c_str(),
142 sizeof(VkLayerProperties::layerName));
143 pProperties[count].specVersion = 0;
144 count++;
145 }
146 }
147 return VK_SUCCESS;
148}
149
150VkResult vkEnumeratePhysicalDevices(VkInstance instance,
151 uint32_t* pPhysicalDeviceCount,
152 VkPhysicalDevice* pPhysicalDevices) {
153 if (!pPhysicalDevices) {
154 *pPhysicalDeviceCount = 1;
155 } else {
156 pPhysicalDevices[0] = reinterpret_cast<VkPhysicalDevice>(0xfeedface);
157 }
158 return VK_SUCCESS;
159}
160
161static thread_local std::function<void(VkPhysicalDevice physicalDevice,
162 VkFormat format,
163 VkFormatProperties* pFormatProperties)>
164 g_format_properties_callback;
165
166void vkGetPhysicalDeviceFormatProperties(
167 VkPhysicalDevice physicalDevice,
168 VkFormat format,
169 VkFormatProperties* pFormatProperties) {
170 g_format_properties_callback(physicalDevice, format, pFormatProperties);
171}
172
173static thread_local std::function<void(VkPhysicalDevice physicalDevice,
174 VkPhysicalDeviceProperties* pProperties)>
175 g_physical_device_properties_callback;
176
177void vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
178 VkPhysicalDeviceProperties* pProperties) {
179 pProperties->limits.framebufferColorSampleCounts =
180 static_cast<VkSampleCountFlags>(VK_SAMPLE_COUNT_1_BIT |
181 VK_SAMPLE_COUNT_4_BIT);
182 pProperties->limits.maxImageDimension2D = 4096;
183 pProperties->limits.timestampPeriod = 1;
184 if (g_physical_device_properties_callback) {
185 g_physical_device_properties_callback(physicalDevice, pProperties);
186 }
187}
188
189void vkGetPhysicalDeviceQueueFamilyProperties(
190 VkPhysicalDevice physicalDevice,
191 uint32_t* pQueueFamilyPropertyCount,
192 VkQueueFamilyProperties* pQueueFamilyProperties) {
193 if (!pQueueFamilyProperties) {
194 *pQueueFamilyPropertyCount = 1;
195 } else {
196 pQueueFamilyProperties[0].queueCount = 3;
197 pQueueFamilyProperties[0].queueFlags = static_cast<VkQueueFlags>(
198 VK_QUEUE_TRANSFER_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT);
199 }
200}
201
202VkResult vkEnumerateDeviceExtensionProperties(
203 VkPhysicalDevice physicalDevice,
204 const char* pLayerName,
205 uint32_t* pPropertyCount,
206 VkExtensionProperties* pProperties) {
207 if (!pProperties) {
208 *pPropertyCount = 1;
209 } else {
210 strcpy(pProperties[0].extensionName, "VK_KHR_swapchain");
211 pProperties[0].specVersion = 0;
212 }
213 return VK_SUCCESS;
214}
215
216VkResult vkCreateDevice(VkPhysicalDevice physicalDevice,
217 const VkDeviceCreateInfo* pCreateInfo,
218 const VkAllocationCallbacks* pAllocator,
219 VkDevice* pDevice) {
220 *pDevice = reinterpret_cast<VkDevice>(new MockDevice());
221 return VK_SUCCESS;
222}
223
224VkResult vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo,
225 const VkAllocationCallbacks* pAllocator,
226 VkInstance* pInstance) {
227 *pInstance = reinterpret_cast<VkInstance>(0xbaadf00d);
228 return VK_SUCCESS;
229}
230
231void vkGetPhysicalDeviceMemoryProperties(
232 VkPhysicalDevice physicalDevice,
233 VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
234 pMemoryProperties->memoryTypeCount = 2;
235 pMemoryProperties->memoryHeapCount = 2;
236 pMemoryProperties->memoryTypes[0].heapIndex = 0;
237 pMemoryProperties->memoryTypes[0].propertyFlags =
238 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
239 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
240 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
241 pMemoryProperties->memoryTypes[1].heapIndex = 1;
242 pMemoryProperties->memoryTypes[1].propertyFlags =
243 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
244 pMemoryProperties->memoryHeaps[0].size = 1024 * 1024 * 1024;
245 pMemoryProperties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
246 pMemoryProperties->memoryHeaps[1].size = 1024 * 1024 * 1024;
247 pMemoryProperties->memoryHeaps[1].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
248}
249
250VkResult vkCreatePipelineCache(VkDevice device,
251 const VkPipelineCacheCreateInfo* pCreateInfo,
252 const VkAllocationCallbacks* pAllocator,
253 VkPipelineCache* pPipelineCache) {
254 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
255 mock_device->AddCalledFunction("vkCreatePipelineCache");
256 *pPipelineCache = reinterpret_cast<VkPipelineCache>(0xb000dead);
257 return VK_SUCCESS;
258}
259
260VkResult vkCreateCommandPool(VkDevice device,
261 const VkCommandPoolCreateInfo* pCreateInfo,
262 const VkAllocationCallbacks* pAllocator,
263 VkCommandPool* pCommandPool) {
264 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
265 mock_device->AddCalledFunction("vkCreateCommandPool");
266 *pCommandPool =
267 reinterpret_cast<VkCommandPool>(mock_device->NewCommandPool());
268 return VK_SUCCESS;
269}
270
271VkResult vkResetCommandPool(VkDevice device,
272 VkCommandPool commandPool,
273 VkCommandPoolResetFlags flags) {
274 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
275 if (flags & VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT) {
276 mock_device->AddCalledFunction("vkResetCommandPoolReleaseResources");
277 } else {
278 mock_device->AddCalledFunction("vkResetCommandPool");
279 }
280 return VK_SUCCESS;
281}
282
283VkResult vkAllocateCommandBuffers(
284 VkDevice device,
285 const VkCommandBufferAllocateInfo* pAllocateInfo,
286 VkCommandBuffer* pCommandBuffers) {
287 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
288 mock_device->AddCalledFunction("vkAllocateCommandBuffers");
289 *pCommandBuffers =
290 reinterpret_cast<VkCommandBuffer>(mock_device->NewCommandBuffer());
291 return VK_SUCCESS;
292}
293
294VkResult vkBeginCommandBuffer(VkCommandBuffer commandBuffer,
295 const VkCommandBufferBeginInfo* pBeginInfo) {
296 return VK_SUCCESS;
297}
298
299VkResult vkCreateImage(VkDevice device,
300 const VkImageCreateInfo* pCreateInfo,
301 const VkAllocationCallbacks* pAllocator,
302 VkImage* pImage) {
303 *pImage = reinterpret_cast<VkImage>(0xD0D0CACA);
304 return VK_SUCCESS;
305}
306
307void vkGetImageMemoryRequirements2KHR(
308 VkDevice device,
309 const VkImageMemoryRequirementsInfo2* pInfo,
310 VkMemoryRequirements2* pMemoryRequirements) {
311 pMemoryRequirements->memoryRequirements.size = 1024;
312 pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
313}
314
315VkResult vkAllocateMemory(VkDevice device,
316 const VkMemoryAllocateInfo* pAllocateInfo,
317 const VkAllocationCallbacks* pAllocator,
318 VkDeviceMemory* pMemory) {
319 *pMemory = reinterpret_cast<VkDeviceMemory>(0xCAFEB0BA);
320 return VK_SUCCESS;
321}
322
323VkResult vkBindImageMemory(VkDevice device,
324 VkImage image,
325 VkDeviceMemory memory,
326 VkDeviceSize memoryOffset) {
327 return VK_SUCCESS;
328}
329
330VkResult vkCreateImageView(VkDevice device,
331 const VkImageViewCreateInfo* pCreateInfo,
332 const VkAllocationCallbacks* pAllocator,
333 VkImageView* pView) {
334 *pView = reinterpret_cast<VkImageView>(0xFEE1DEAD);
335 return VK_SUCCESS;
336}
337
338VkResult vkCreateBuffer(VkDevice device,
339 const VkBufferCreateInfo* pCreateInfo,
340 const VkAllocationCallbacks* pAllocator,
341 VkBuffer* pBuffer) {
342 *pBuffer = reinterpret_cast<VkBuffer>(0xDEADDEAD);
343 return VK_SUCCESS;
344}
345
346void vkGetBufferMemoryRequirements2KHR(
347 VkDevice device,
348 const VkBufferMemoryRequirementsInfo2* pInfo,
349 VkMemoryRequirements2* pMemoryRequirements) {
350 pMemoryRequirements->memoryRequirements.size = 1024;
351 pMemoryRequirements->memoryRequirements.memoryTypeBits = 1;
352}
353
354VkResult vkBindBufferMemory(VkDevice device,
355 VkBuffer buffer,
356 VkDeviceMemory memory,
357 VkDeviceSize memoryOffset) {
358 return VK_SUCCESS;
359}
360
361VkResult vkCreateRenderPass(VkDevice device,
362 const VkRenderPassCreateInfo* pCreateInfo,
363 const VkAllocationCallbacks* pAllocator,
364 VkRenderPass* pRenderPass) {
365 *pRenderPass = reinterpret_cast<VkRenderPass>(0x12341234);
366 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
367 mock_device->AddCalledFunction("vkCreateRenderPass");
368 return VK_SUCCESS;
369}
370
371VkResult vkCreateDescriptorSetLayout(
372 VkDevice device,
373 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
374 const VkAllocationCallbacks* pAllocator,
375 VkDescriptorSetLayout* pSetLayout) {
376 *pSetLayout = reinterpret_cast<VkDescriptorSetLayout>(0x77777777);
377 return VK_SUCCESS;
378}
379
380VkResult vkCreatePipelineLayout(VkDevice device,
381 const VkPipelineLayoutCreateInfo* pCreateInfo,
382 const VkAllocationCallbacks* pAllocator,
383 VkPipelineLayout* pPipelineLayout) {
384 *pPipelineLayout = reinterpret_cast<VkPipelineLayout>(0x88888888);
385 return VK_SUCCESS;
386}
387
388VkResult vkCreateGraphicsPipelines(
389 VkDevice device,
390 VkPipelineCache pipelineCache,
391 uint32_t createInfoCount,
392 const VkGraphicsPipelineCreateInfo* pCreateInfos,
393 const VkAllocationCallbacks* pAllocator,
394 VkPipeline* pPipelines) {
395 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
396 mock_device->AddCalledFunction("vkCreateGraphicsPipelines");
397 *pPipelines = reinterpret_cast<VkPipeline>(0x99999999);
398 return VK_SUCCESS;
399}
400
401void vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) {
402 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
403 mock_device->AddCalledFunction("vkDestroyDevice");
404 delete reinterpret_cast<MockDevice*>(device);
405}
406
407void vkDestroyPipeline(VkDevice device,
408 VkPipeline pipeline,
409 const VkAllocationCallbacks* pAllocator) {
410 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
411 mock_device->AddCalledFunction("vkDestroyPipeline");
412}
413
414VkResult vkCreateShaderModule(VkDevice device,
415 const VkShaderModuleCreateInfo* pCreateInfo,
416 const VkAllocationCallbacks* pAllocator,
417 VkShaderModule* pShaderModule) {
418 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
419 mock_device->AddCalledFunction("vkCreateShaderModule");
420 *pShaderModule = reinterpret_cast<VkShaderModule>(0x11111111);
421 return VK_SUCCESS;
422}
423
424void vkDestroyShaderModule(VkDevice device,
425 VkShaderModule shaderModule,
426 const VkAllocationCallbacks* pAllocator) {
427 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
428 mock_device->AddCalledFunction("vkDestroyShaderModule");
429}
430
431void vkDestroyPipelineCache(VkDevice device,
432 VkPipelineCache pipelineCache,
433 const VkAllocationCallbacks* pAllocator) {
434 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
435 mock_device->AddCalledFunction("vkDestroyPipelineCache");
436}
437
438void vkDestroySurfaceKHR(VkInstance instance,
439 VkSurfaceKHR surface,
440 const VkAllocationCallbacks* pAllocator) {
441 return;
442}
443
444void vkCmdBindPipeline(VkCommandBuffer commandBuffer,
445 VkPipelineBindPoint pipelineBindPoint,
446 VkPipeline pipeline) {
447 MockCommandBuffer* mock_command_buffer =
448 reinterpret_cast<MockCommandBuffer*>(commandBuffer);
449 mock_command_buffer->called_functions_->push_back("vkCmdBindPipeline");
450}
451
452void vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,
453 VkPipelineStageFlags srcStageMask,
454 VkPipelineStageFlags dstStageMask,
455 VkDependencyFlags dependencyFlags,
456 uint32_t memoryBarrierCount,
457 const VkMemoryBarrier* pMemoryBarriers,
458 uint32_t bufferMemoryBarrierCount,
459 const VkBufferMemoryBarrier* pBufferMemoryBarriers,
460 uint32_t imageMemoryBarrierCount,
461 const VkImageMemoryBarrier* pImageMemoryBarriers) {
462 MockCommandBuffer* mock_command_buffer =
463 reinterpret_cast<MockCommandBuffer*>(commandBuffer);
464 mock_command_buffer->called_functions_->push_back("vkCmdPipelineBarrier");
465 if (pImageMemoryBarriers) {
466 for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
467 mock_command_buffer->image_memory_barriers_.push_back(
468 pImageMemoryBarriers[i]);
469 }
470 }
471}
472
473void vkCmdSetStencilReference(VkCommandBuffer commandBuffer,
474 VkStencilFaceFlags faceMask,
475 uint32_t reference) {
476 MockCommandBuffer* mock_command_buffer =
477 reinterpret_cast<MockCommandBuffer*>(commandBuffer);
478 mock_command_buffer->called_functions_->push_back("vkCmdSetStencilReference");
479}
480
481void vkCmdSetScissor(VkCommandBuffer commandBuffer,
482 uint32_t firstScissor,
483 uint32_t scissorCount,
484 const VkRect2D* pScissors) {
485 MockCommandBuffer* mock_command_buffer =
486 reinterpret_cast<MockCommandBuffer*>(commandBuffer);
487 mock_command_buffer->called_functions_->push_back("vkCmdSetScissor");
488}
489
490void vkCmdSetViewport(VkCommandBuffer commandBuffer,
491 uint32_t firstViewport,
492 uint32_t viewportCount,
493 const VkViewport* pViewports) {
494 MockCommandBuffer* mock_command_buffer =
495 reinterpret_cast<MockCommandBuffer*>(commandBuffer);
496 mock_command_buffer->called_functions_->push_back("vkCmdSetViewport");
497}
498
499void vkFreeCommandBuffers(VkDevice device,
500 VkCommandPool commandPool,
501 uint32_t commandBufferCount,
502 const VkCommandBuffer* pCommandBuffers) {
503 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
504 mock_device->AddCalledFunction("vkFreeCommandBuffers");
505}
506
507void vkDestroyCommandPool(VkDevice device,
508 VkCommandPool commandPool,
509 const VkAllocationCallbacks* pAllocator) {
510 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
511 mock_device->DeleteCommandPool(
512 reinterpret_cast<MockCommandPool*>(commandPool));
513 mock_device->AddCalledFunction("vkDestroyCommandPool");
514}
515
516VkResult vkEndCommandBuffer(VkCommandBuffer commandBuffer) {
517 return VK_SUCCESS;
518}
519
520VkResult vkCreateFence(VkDevice device,
521 const VkFenceCreateInfo* pCreateInfo,
522 const VkAllocationCallbacks* pAllocator,
523 VkFence* pFence) {
524 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
525 mock_device->AddCalledFunction("vkCreateFence");
526 *pFence = reinterpret_cast<VkFence>(new MockFence());
527 return VK_SUCCESS;
528}
529
530VkResult vkDestroyFence(VkDevice device,
531 VkFence fence,
532 const VkAllocationCallbacks* pAllocator) {
533 delete reinterpret_cast<MockFence*>(fence);
534 return VK_SUCCESS;
535}
536
537VkResult vkQueueSubmit(VkQueue queue,
538 uint32_t submitCount,
539 const VkSubmitInfo* pSubmits,
540 VkFence fence) {
541 return VK_SUCCESS;
542}
543
544VkResult vkWaitForFences(VkDevice device,
545 uint32_t fenceCount,
546 const VkFence* pFences,
547 VkBool32 waitAll,
548 uint64_t timeout) {
549 return VK_SUCCESS;
550}
551
552VkResult vkGetFenceStatus(VkDevice device, VkFence fence) {
553 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
554 MockFence* mock_fence = reinterpret_cast<MockFence*>(fence);
555 return mock_fence->GetStatus();
556}
557
558VkResult vkResetFences(VkDevice device,
559 uint32_t fenceCount,
560 const VkFence* fences) {
561 return VK_SUCCESS;
562}
563
564VkResult vkCreateDebugUtilsMessengerEXT(
565 VkInstance instance,
566 const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo,
567 const VkAllocationCallbacks* pAllocator,
568 VkDebugUtilsMessengerEXT* pMessenger) {
569 return VK_SUCCESS;
570}
571
572VkResult vkSetDebugUtilsObjectNameEXT(
573 VkDevice device,
574 const VkDebugUtilsObjectNameInfoEXT* pNameInfo) {
575 return VK_SUCCESS;
576}
577
578VkResult vkCreateQueryPool(VkDevice device,
579 const VkQueryPoolCreateInfo* pCreateInfo,
580 const VkAllocationCallbacks* pAllocator,
581 VkQueryPool* pQueryPool) {
582 *pQueryPool = reinterpret_cast<VkQueryPool>(new MockQueryPool());
583 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
584 mock_device->AddCalledFunction("vkCreateQueryPool");
585 return VK_SUCCESS;
586}
587
588void vkDestroyQueryPool(VkDevice device,
589 VkQueryPool queryPool,
590 const VkAllocationCallbacks* pAllocator) {
591 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
592 mock_device->AddCalledFunction("vkDestroyQueryPool");
593 delete reinterpret_cast<MockQueryPool*>(queryPool);
594}
595
596VkResult vkGetQueryPoolResults(VkDevice device,
597 VkQueryPool queryPool,
598 uint32_t firstQuery,
599 uint32_t queryCount,
600 size_t dataSize,
601 void* pData,
602 VkDeviceSize stride,
603 VkQueryResultFlags flags) {
604 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
605 if (dataSize == sizeof(uint32_t)) {
606 uint32_t* data = static_cast<uint32_t*>(pData);
607 for (auto i = firstQuery; i < queryCount; i++) {
608 data[0] = i;
609 }
610 } else if (dataSize == sizeof(int64_t)) {
611 uint64_t* data = static_cast<uint64_t*>(pData);
612 for (auto i = firstQuery; i < queryCount; i++) {
613 data[0] = i;
614 }
615 }
616 mock_device->AddCalledFunction("vkGetQueryPoolResults");
617 return VK_SUCCESS;
618}
619
620VkResult vkCreateDescriptorPool(VkDevice device,
621 const VkDescriptorPoolCreateInfo* pCreateInfo,
622 const VkAllocationCallbacks* pAllocator,
623 VkDescriptorPool* pDescriptorPool) {
624 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
625 *pDescriptorPool =
626 reinterpret_cast<VkDescriptorPool>(new MockDescriptorPool());
627 mock_device->AddCalledFunction("vkCreateDescriptorPool");
628 return VK_SUCCESS;
629}
630
631void vkDestroyDescriptorPool(VkDevice device,
632 VkDescriptorPool descriptorPool,
633 const VkAllocationCallbacks* pAllocator) {
634 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
635 mock_device->AddCalledFunction("vkDestroyDescriptorPool");
636 delete reinterpret_cast<MockDescriptorPool*>(descriptorPool);
637}
638
639VkResult vkResetDescriptorPool(VkDevice device,
640 VkDescriptorPool descriptorPool,
641 VkDescriptorPoolResetFlags flags) {
642 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
643 mock_device->AddCalledFunction("vkResetDescriptorPool");
644 return VK_SUCCESS;
645}
646
647VkResult vkAllocateDescriptorSets(
648 VkDevice device,
649 const VkDescriptorSetAllocateInfo* pAllocateInfo,
650 VkDescriptorSet* pDescriptorSets) {
651 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
652 mock_device->AddCalledFunction("vkAllocateDescriptorSets");
653 return VK_SUCCESS;
654}
655
656VkResult vkGetPhysicalDeviceSurfaceFormatsKHR(
657 VkPhysicalDevice physicalDevice,
658 VkSurfaceKHR surface,
659 uint32_t* pSurfaceFormatCount,
660 VkSurfaceFormatKHR* pSurfaceFormats) {
661 *pSurfaceFormatCount = 1u;
662 if (pSurfaceFormats != nullptr) {
663 pSurfaceFormats[0] =
664 VkSurfaceFormatKHR{.format = VK_FORMAT_R8G8B8A8_UNORM,
665 .colorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR};
666 }
667 return VK_SUCCESS;
668}
669
670VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
671 VkPhysicalDevice physicalDevice,
672 VkSurfaceKHR surface,
673 VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) {
674 *pSurfaceCapabilities = VkSurfaceCapabilitiesKHR{
675 .minImageCount = 3,
676 .maxImageCount = 6,
677 .currentExtent =
678 VkExtent2D{
679 .width = static_cast<uint32_t>(currentImageSize.width),
680 .height = static_cast<uint32_t>(currentImageSize.height),
681 },
682 .minImageExtent =
683 VkExtent2D{
684 .width = 0,
685 .height = 0,
686 },
687 .maxImageExtent =
688 VkExtent2D{
689 .width = static_cast<uint32_t>(currentImageSize.width),
690 .height = static_cast<uint32_t>(currentImageSize.height),
691 },
692 .maxImageArrayLayers = 1,
693 .supportedTransforms =
694 VkSurfaceTransformFlagBitsKHR::VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
695 .currentTransform =
696 VkSurfaceTransformFlagBitsKHR::VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
697 .supportedCompositeAlpha = VkCompositeAlphaFlagBitsKHR::
698 VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
699 .supportedUsageFlags =
700 VkImageUsageFlagBits::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT};
701 return VK_SUCCESS;
702}
703
704VkResult vkGetPhysicalDeviceSurfaceSupportKHR(VkPhysicalDevice physicalDevice,
705 uint32_t queueFamilyIndex,
706 VkSurfaceKHR surface,
707 VkBool32* pSupported) {
708 *pSupported = VK_TRUE;
709 return VK_SUCCESS;
710}
711
712VkResult vkCreateSwapchainKHR(VkDevice device,
713 const VkSwapchainCreateInfoKHR* pCreateInfo,
714 const VkAllocationCallbacks* pAllocator,
715 VkSwapchainKHR* pSwapchain) {
716 *pSwapchain = reinterpret_cast<VkSwapchainKHR>(new MockSwapchainKHR());
717 return VK_SUCCESS;
718}
719
720void vkDestroySwapchainKHR(VkDevice device,
721 VkSwapchainKHR swapchain,
722 const VkAllocationCallbacks* pAllocator) {
723 delete reinterpret_cast<MockSwapchainKHR*>(swapchain);
724}
725
726VkResult vkGetSwapchainImagesKHR(VkDevice device,
727 VkSwapchainKHR swapchain,
728 uint32_t* pSwapchainImageCount,
729 VkImage* pSwapchainImages) {
730 MockSwapchainKHR* mock_swapchain =
731 reinterpret_cast<MockSwapchainKHR*>(swapchain);
732 auto& images = mock_swapchain->images;
733 *pSwapchainImageCount = images.size();
734 if (pSwapchainImages != nullptr) {
735 for (size_t i = 0; i < images.size(); i++) {
736 pSwapchainImages[i] = reinterpret_cast<VkImage>(&images[i]);
737 }
738 }
739 return VK_SUCCESS;
740}
741
742VkResult vkCreateSemaphore(VkDevice device,
743 const VkSemaphoreCreateInfo* pCreateInfo,
744 const VkAllocationCallbacks* pAllocator,
745 VkSemaphore* pSemaphore) {
746 *pSemaphore = reinterpret_cast<VkSemaphore>(new MockSemaphore());
747 return VK_SUCCESS;
748}
749
750void vkDestroySemaphore(VkDevice device,
751 VkSemaphore semaphore,
752 const VkAllocationCallbacks* pAllocator) {
753 delete reinterpret_cast<MockSemaphore*>(semaphore);
754}
755
756VkResult vkAcquireNextImageKHR(VkDevice device,
757 VkSwapchainKHR swapchain,
758 uint64_t timeout,
759 VkSemaphore semaphore,
760 VkFence fence,
761 uint32_t* pImageIndex) {
762 auto current_index =
763 reinterpret_cast<MockSwapchainKHR*>(swapchain)->current_image++;
764 *pImageIndex = (current_index + 1) % 3u;
765 return VK_SUCCESS;
766}
767
768VkResult vkCreateFramebuffer(VkDevice device,
769 const VkFramebufferCreateInfo* pCreateInfo,
770 const VkAllocationCallbacks* pAllocator,
771 VkFramebuffer* pFramebuffer) {
772 *pFramebuffer = reinterpret_cast<VkFramebuffer>(new MockFramebuffer());
773 return VK_SUCCESS;
774}
775
776void vkDestroyFramebuffer(VkDevice device,
777 VkFramebuffer framebuffer,
778 const VkAllocationCallbacks* pAllocator) {
779 delete reinterpret_cast<MockFramebuffer*>(framebuffer);
780}
781
782void vkTrimCommandPool(VkDevice device,
783 VkCommandPool commandPool,
784 VkCommandPoolTrimFlags flags) {
785 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
786 mock_device->AddCalledFunction("vkTrimCommandPool");
787}
788
789VkResult vkGetPipelineCacheData(VkDevice device,
790 VkPipelineCache pipelineCache,
791 size_t* pDataSize,
792 void* pData) {
793 if (pData) {
794 const std::array<uint8_t, 5> cache_data{1, 2, 3, 4, 5};
795 size_t dst_buffer_size = *pDataSize;
796 size_t length = std::min(dst_buffer_size, cache_data.size());
797 std::memcpy(pData, cache_data.data(), length);
798 *pDataSize = length;
799 return (dst_buffer_size >= length) ? VK_SUCCESS : VK_INCOMPLETE;
800 } else {
801 *pDataSize = 10;
802 return VK_SUCCESS;
803 }
804}
805
806PFN_vkVoidFunction GetMockVulkanProcAddress(VkInstance instance,
807 const char* pName) {
808 if (strcmp("vkEnumerateInstanceExtensionProperties", pName) == 0) {
809 return reinterpret_cast<PFN_vkVoidFunction>(
810 vkEnumerateInstanceExtensionProperties);
811 } else if (strcmp("vkEnumerateInstanceLayerProperties", pName) == 0) {
812 return reinterpret_cast<PFN_vkVoidFunction>(
813 vkEnumerateInstanceLayerProperties);
814 } else if (strcmp("vkEnumeratePhysicalDevices", pName) == 0) {
815 return reinterpret_cast<PFN_vkVoidFunction>(vkEnumeratePhysicalDevices);
816 } else if (strcmp("vkGetPhysicalDeviceFormatProperties", pName) == 0) {
817 return reinterpret_cast<PFN_vkVoidFunction>(
818 vkGetPhysicalDeviceFormatProperties);
819 } else if (strcmp("vkGetPhysicalDeviceProperties", pName) == 0) {
820 return reinterpret_cast<PFN_vkVoidFunction>(vkGetPhysicalDeviceProperties);
821 } else if (strcmp("vkGetPhysicalDeviceQueueFamilyProperties", pName) == 0) {
822 return reinterpret_cast<PFN_vkVoidFunction>(
823 vkGetPhysicalDeviceQueueFamilyProperties);
824 } else if (strcmp("vkEnumerateDeviceExtensionProperties", pName) == 0) {
825 return reinterpret_cast<PFN_vkVoidFunction>(
826 vkEnumerateDeviceExtensionProperties);
827 } else if (strcmp("vkCreateDevice", pName) == 0) {
828 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateDevice);
829 } else if (strcmp("vkCreateInstance", pName) == 0) {
830 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateInstance);
831 } else if (strcmp("vkGetPhysicalDeviceMemoryProperties", pName) == 0) {
832 return reinterpret_cast<PFN_vkVoidFunction>(
833 vkGetPhysicalDeviceMemoryProperties);
834 } else if (strcmp("vkCreatePipelineCache", pName) == 0) {
835 return reinterpret_cast<PFN_vkVoidFunction>(vkCreatePipelineCache);
836 } else if (strcmp("vkCreateCommandPool", pName) == 0) {
837 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateCommandPool);
838 } else if (strcmp("vkResetCommandPool", pName) == 0) {
839 return reinterpret_cast<PFN_vkVoidFunction>(vkResetCommandPool);
840 } else if (strcmp("vkAllocateCommandBuffers", pName) == 0) {
841 return reinterpret_cast<PFN_vkVoidFunction>(vkAllocateCommandBuffers);
842 } else if (strcmp("vkBeginCommandBuffer", pName) == 0) {
843 return reinterpret_cast<PFN_vkVoidFunction>(vkBeginCommandBuffer);
844 } else if (strcmp("vkCreateImage", pName) == 0) {
845 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateImage);
846 } else if (strcmp("vkGetInstanceProcAddr", pName) == 0) {
847 return reinterpret_cast<PFN_vkVoidFunction>(GetMockVulkanProcAddress);
848 } else if (strcmp("vkGetDeviceProcAddr", pName) == 0) {
849 return reinterpret_cast<PFN_vkVoidFunction>(GetMockVulkanProcAddress);
850 } else if (strcmp("vkGetImageMemoryRequirements2KHR", pName) == 0 ||
851 strcmp("vkGetImageMemoryRequirements2", pName) == 0) {
852 return reinterpret_cast<PFN_vkVoidFunction>(
853 vkGetImageMemoryRequirements2KHR);
854 } else if (strcmp("vkAllocateMemory", pName) == 0) {
855 return reinterpret_cast<PFN_vkVoidFunction>(vkAllocateMemory);
856 } else if (strcmp("vkBindImageMemory", pName) == 0) {
857 return reinterpret_cast<PFN_vkVoidFunction>(vkBindImageMemory);
858 } else if (strcmp("vkCreateImageView", pName) == 0) {
859 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateImageView);
860 } else if (strcmp("vkCreateBuffer", pName) == 0) {
861 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateBuffer);
862 } else if (strcmp("vkGetBufferMemoryRequirements2KHR", pName) == 0 ||
863 strcmp("vkGetBufferMemoryRequirements2", pName) == 0) {
864 return reinterpret_cast<PFN_vkVoidFunction>(
865 vkGetBufferMemoryRequirements2KHR);
866 } else if (strcmp("vkBindBufferMemory", pName) == 0) {
867 return reinterpret_cast<PFN_vkVoidFunction>(vkBindBufferMemory);
868 } else if (strcmp("vkCreateRenderPass", pName) == 0) {
869 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateRenderPass);
870 } else if (strcmp("vkCreateDescriptorSetLayout", pName) == 0) {
871 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateDescriptorSetLayout);
872 } else if (strcmp("vkCreatePipelineLayout", pName) == 0) {
873 return reinterpret_cast<PFN_vkVoidFunction>(vkCreatePipelineLayout);
874 } else if (strcmp("vkCreateGraphicsPipelines", pName) == 0) {
875 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateGraphicsPipelines);
876 } else if (strcmp("vkDestroyDevice", pName) == 0) {
877 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyDevice);
878 } else if (strcmp("vkDestroyPipeline", pName) == 0) {
879 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyPipeline);
880 } else if (strcmp("vkCreateShaderModule", pName) == 0) {
881 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateShaderModule);
882 } else if (strcmp("vkDestroyShaderModule", pName) == 0) {
883 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyShaderModule);
884 } else if (strcmp("vkDestroyPipelineCache", pName) == 0) {
885 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyPipelineCache);
886 } else if (strcmp("vkCmdBindPipeline", pName) == 0) {
887 return reinterpret_cast<PFN_vkVoidFunction>(vkCmdBindPipeline);
888 } else if (strcmp("vkCmdPipelineBarrier", pName) == 0) {
889 return reinterpret_cast<PFN_vkVoidFunction>(vkCmdPipelineBarrier);
890 } else if (strcmp("vkCmdSetStencilReference", pName) == 0) {
891 return reinterpret_cast<PFN_vkVoidFunction>(vkCmdSetStencilReference);
892 } else if (strcmp("vkCmdSetScissor", pName) == 0) {
893 return reinterpret_cast<PFN_vkVoidFunction>(vkCmdSetScissor);
894 } else if (strcmp("vkCmdSetViewport", pName) == 0) {
895 return reinterpret_cast<PFN_vkVoidFunction>(vkCmdSetViewport);
896 } else if (strcmp("vkDestroyCommandPool", pName) == 0) {
897 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyCommandPool);
898 } else if (strcmp("vkFreeCommandBuffers", pName) == 0) {
899 return reinterpret_cast<PFN_vkVoidFunction>(vkFreeCommandBuffers);
900 } else if (strcmp("vkEndCommandBuffer", pName) == 0) {
901 return reinterpret_cast<PFN_vkVoidFunction>(vkEndCommandBuffer);
902 } else if (strcmp("vkCreateFence", pName) == 0) {
903 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateFence);
904 } else if (strcmp("vkDestroyFence", pName) == 0) {
905 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyFence);
906 } else if (strcmp("vkQueueSubmit", pName) == 0) {
907 return reinterpret_cast<PFN_vkVoidFunction>(vkQueueSubmit);
908 } else if (strcmp("vkWaitForFences", pName) == 0) {
909 return reinterpret_cast<PFN_vkVoidFunction>(vkWaitForFences);
910 } else if (strcmp("vkGetFenceStatus", pName) == 0) {
911 return reinterpret_cast<PFN_vkVoidFunction>(vkGetFenceStatus);
912 } else if (strcmp("vkResetFences", pName) == 0) {
913 return reinterpret_cast<PFN_vkVoidFunction>(vkResetFences);
914 } else if (strcmp("vkCreateDebugUtilsMessengerEXT", pName) == 0) {
915 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateDebugUtilsMessengerEXT);
916 } else if (strcmp("vkSetDebugUtilsObjectNameEXT", pName) == 0) {
917 return reinterpret_cast<PFN_vkVoidFunction>(vkSetDebugUtilsObjectNameEXT);
918 } else if (strcmp("vkCreateQueryPool", pName) == 0) {
919 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateQueryPool);
920 } else if (strcmp("vkDestroyQueryPool", pName) == 0) {
921 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyQueryPool);
922 } else if (strcmp("vkGetQueryPoolResults", pName) == 0) {
923 return reinterpret_cast<PFN_vkVoidFunction>(vkGetQueryPoolResults);
924 } else if (strcmp("vkCreateDescriptorPool", pName) == 0) {
925 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateDescriptorPool);
926 } else if (strcmp("vkDestroyDescriptorPool", pName) == 0) {
927 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyDescriptorPool);
928 } else if (strcmp("vkResetDescriptorPool", pName) == 0) {
929 return reinterpret_cast<PFN_vkVoidFunction>(vkResetDescriptorPool);
930 } else if (strcmp("vkAllocateDescriptorSets", pName) == 0) {
931 return reinterpret_cast<PFN_vkVoidFunction>(vkAllocateDescriptorSets);
932 } else if (strcmp("vkGetPhysicalDeviceSurfaceFormatsKHR", pName) == 0) {
933 return reinterpret_cast<PFN_vkVoidFunction>(
934 vkGetPhysicalDeviceSurfaceFormatsKHR);
935 } else if (strcmp("vkGetPhysicalDeviceSurfaceCapabilitiesKHR", pName) == 0) {
936 return reinterpret_cast<PFN_vkVoidFunction>(
937 vkGetPhysicalDeviceSurfaceCapabilitiesKHR);
938 } else if (strcmp("vkGetPhysicalDeviceSurfaceSupportKHR", pName) == 0) {
939 return reinterpret_cast<PFN_vkVoidFunction>(
940 vkGetPhysicalDeviceSurfaceSupportKHR);
941 } else if (strcmp("vkCreateSwapchainKHR", pName) == 0) {
942 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateSwapchainKHR);
943 } else if (strcmp("vkDestroySwapchainKHR", pName) == 0) {
944 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySwapchainKHR);
945 } else if (strcmp("vkGetSwapchainImagesKHR", pName) == 0) {
946 return reinterpret_cast<PFN_vkVoidFunction>(vkGetSwapchainImagesKHR);
947 } else if (strcmp("vkCreateSemaphore", pName) == 0) {
948 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateSemaphore);
949 } else if (strcmp("vkDestroySemaphore", pName) == 0) {
950 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySemaphore);
951 } else if (strcmp("vkDestroySurfaceKHR", pName) == 0) {
952 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroySurfaceKHR);
953 } else if (strcmp("vkAcquireNextImageKHR", pName) == 0) {
954 return reinterpret_cast<PFN_vkVoidFunction>(vkAcquireNextImageKHR);
955 } else if (strcmp("vkCreateFramebuffer", pName) == 0) {
956 return reinterpret_cast<PFN_vkVoidFunction>(vkCreateFramebuffer);
957 } else if (strcmp("vkDestroyFramebuffer", pName) == 0) {
958 return reinterpret_cast<PFN_vkVoidFunction>(vkDestroyFramebuffer);
959 } else if (strcmp("vkTrimCommandPool", pName) == 0) {
960 return reinterpret_cast<PFN_vkVoidFunction>(vkTrimCommandPool);
961 } else if (strcmp("vkGetPipelineCacheData", pName) == 0) {
962 return reinterpret_cast<PFN_vkVoidFunction>(vkGetPipelineCacheData);
963 }
964 return noop;
965}
966
967} // namespace
968
970 : instance_extensions_({"VK_KHR_surface", "VK_MVK_macos_surface"}),
971 format_properties_callback_([](VkPhysicalDevice physicalDevice,
972 VkFormat format,
973 VkFormatProperties* pFormatProperties) {
974 if (format == VK_FORMAT_R8G8B8A8_UNORM) {
975 pFormatProperties->optimalTilingFeatures =
976 static_cast<VkFormatFeatureFlags>(
977 vk::FormatFeatureFlagBits::eColorAttachment);
978 } else if (format == VK_FORMAT_D32_SFLOAT_S8_UINT) {
979 pFormatProperties->optimalTilingFeatures =
980 static_cast<VkFormatFeatureFlags>(
981 vk::FormatFeatureFlagBits::eDepthStencilAttachment);
982 } else if (format == VK_FORMAT_S8_UINT) {
983 pFormatProperties->optimalTilingFeatures =
984 static_cast<VkFormatFeatureFlags>(
985 vk::FormatFeatureFlagBits::eDepthStencilAttachment);
986 }
987 }) {}
988
989std::shared_ptr<ContextVK> MockVulkanContextBuilder::Build() {
990 auto message_loop = fml::ConcurrentMessageLoop::Create();
991 ContextVK::Settings settings;
992 settings.proc_address_callback = GetMockVulkanProcAddress;
993 if (settings_callback_) {
994 settings_callback_(settings);
995 }
996 g_instance_extensions = instance_extensions_;
997 g_instance_layers = instance_layers_;
998 g_format_properties_callback = format_properties_callback_;
999 g_physical_device_properties_callback = physical_properties_callback_;
1000 settings.embedder_data = embedder_data_;
1001 std::shared_ptr<ContextVK> result = ContextVK::Create(std::move(settings));
1002 return result;
1003}
1004
1005std::shared_ptr<std::vector<std::string>> GetMockVulkanFunctions(
1006 VkDevice device) {
1007 MockDevice* mock_device = reinterpret_cast<MockDevice*>(device);
1008 return mock_device->GetCalledFunctions();
1009}
1010
1012 currentImageSize = size;
1013}
1014
1015std::vector<VkImageMemoryBarrier>& GetImageMemoryBarriers(
1016 VkCommandBuffer buffer) {
1017 MockCommandBuffer* mock_command_buffer =
1018 reinterpret_cast<MockCommandBuffer*>(buffer);
1019 return mock_command_buffer->image_memory_barriers_;
1020}
1021
1022} // namespace testing
1023} // namespace impeller
static std::shared_ptr< ConcurrentMessageLoop > Create(size_t worker_count=std::thread::hardware_concurrency())
static std::shared_ptr< ContextVK > Create(Settings settings)
MockCommandBuffer(std::weak_ptr< const Context > context)
Definition mocks.h:120
std::shared_ptr< ContextVK > Build()
Create a Vulkan context with Vulkan functions mocked. The caller is given a chance to tinker on the s...
FlutterVulkanImage * image
VkSwapchainKHR swapchain
Definition main.cc:80
VkDevice device
Definition main.cc:69
VkInstance instance
Definition main.cc:64
VkQueue queue
Definition main.cc:71
size_t length
size_t current_image
std::shared_ptr< std::vector< std::string > > called_functions_
std::array< MockImage, 3 > images
std::vector< VkImageMemoryBarrier > image_memory_barriers_
it will be possible to load the file into Perfetto s trace viewer use test Running tests that layout and measure text will not yield consistent results across various platforms Enabling this option will make font resolution default to the Ahem test font on all disable asset Prevents usage of any non test fonts unless they were explicitly Loaded via prefetched default font Indicates whether the embedding started a prefetch of the default font manager before creating the engine run In non interactive keep the shell running after the Dart script has completed enable serial On low power devices with low core running concurrent GC tasks on threads can cause them to contend with the UI thread which could potentially lead to jank This option turns off all concurrent GC activities domain network JSON encoded network policy per domain This overrides the DisallowInsecureConnections switch Embedder can specify whether to allow or disallow insecure connections at a domain level old gen heap size
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot data
Definition switch_defs.h:36
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set profile Make the profiler discard new samples once the profiler sample buffer is full When this flag is not the profiler sample buffer is used as a ring buffer
Definition switch_defs.h:98
std::shared_ptr< std::vector< std::string > > GetMockVulkanFunctions(VkDevice device)
std::vector< VkImageMemoryBarrier > & GetImageMemoryBarriers(VkCommandBuffer buffer)
void SetSwapchainImageSize(ISize size)
Override the image size returned by all swapchain images.
ISize64 ISize
Definition size.h:162
Definition ref_ptr.h:261
int32_t height
PFN_vkGetInstanceProcAddr proc_address_callback
Definition context_vk.h:80
std::optional< EmbedderData > embedder_data
Definition context_vk.h:90
#define IPLR_GUARDED_BY(x)