8#ifndef skgpu_graphite_MtlComputeCommandEncoder_DEFINED
9#define skgpu_graphite_MtlComputeCommandEncoder_DEFINED
16#import <Metal/Metal.h>
26 id<MTLCommandBuffer> commandBuffer) {
31 sk_cfp<id<MTLComputeCommandEncoder>>
encoder =
32 sk_ret_cfp([commandBuffer computeCommandEncoder]);
41 const char*
getResourceType()
const override {
return "Metal Compute Command Encoder"; }
50 if (fCurrentComputePipelineState != pso) {
52 fCurrentComputePipelineState = pso;
58 SkASSERT(index < kMaxExpectedBuffers);
59 if (@available(macOS 10.11, iOS 8.3, tvOS 9.0, *)) {
60 if (fBuffers[index] ==
buffer) {
65 if (fBuffers[index] !=
buffer || fBufferOffsets[index] !=
offset) {
68 fBufferOffsets[index] =
offset;
74 SkASSERT(index < kMaxExpectedBuffers);
75 if (fBufferOffsets[index] !=
offset) {
77 fBufferOffsets[index] =
offset;
82 SkASSERT(index < kMaxExpectedTextures);
83 if (fTextures[index] !=
texture) {
90 SkASSERT(index < kMaxExpectedTextures);
91 if (fSamplers[index] != sampler) {
93 fSamplers[index] = sampler;
104 MTLSize threadgroupCount =
106 MTLSize threadsPerThreadgroup =
109 threadsPerThreadgroup:threadsPerThreadgroup];
115 MTLSize threadsPerThreadgroup =
118 indirectBufferOffset:
offset
119 threadsPerThreadgroup:threadsPerThreadgroup];
125 inline static constexpr int kMaxExpectedBuffers = 16;
126 inline static constexpr int kMaxExpectedTextures = 16;
129 sk_cfp<id<MTLComputeCommandEncoder>>
encoder)
135 for (
int i = 0;
i < kMaxExpectedBuffers;
i++) {
138 for (
int i = 0;
i < kMaxExpectedTextures;
i++) {
144 void freeGpuData()
override { fCommandEncoder.reset(); }
146 sk_cfp<id<MTLComputeCommandEncoder>> fCommandEncoder;
148 id<MTLComputePipelineState> fCurrentComputePipelineState = nil;
150 id<MTLBuffer> fBuffers[kMaxExpectedBuffers];
151 NSUInteger fBufferOffsets[kMaxExpectedBuffers];
153 id<MTLTexture> fTextures[kMaxExpectedTextures];
154 id<MTLSamplerState> fSamplers[kMaxExpectedTextures];
#define SK_API_AVAILABLE(...)
@ kYes
Do pre-clip the geometry before applying the (perspective) matrix.
void setLabel(NSString *label)
void insertDebugSignpost(NSString *string)
void setBuffer(id< MTLBuffer > buffer, NSUInteger offset, NSUInteger index)
void pushDebugGroup(NSString *string)
void setComputePipelineState(id< MTLComputePipelineState > pso)
void setTexture(id< MTLTexture > texture, NSUInteger index)
const char * getResourceType() const override
void setBufferOffset(NSUInteger offset, NSUInteger index) SK_API_AVAILABLE(macos(10.11)
static sk_sp< MtlComputeCommandEncoder > Make(const SharedContext *sharedContext, id< MTLCommandBuffer > commandBuffer)
void dispatchThreadgroups(const WorkgroupSize &globalSize, const WorkgroupSize &localSize)
void dispatchThreadgroupsWithIndirectBuffer(id< MTLBuffer > indirectBuffer, NSUInteger offset, const WorkgroupSize &localSize)
void setThreadgroupMemoryLength(NSUInteger length, NSUInteger index)
void setSamplerState(id< MTLSamplerState > sampler, NSUInteger index)
const SharedContext * sharedContext() const
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir Path to the cache directory This is different from the persistent_cache_path in embedder which is used for Skia shader cache icu native lib Path to the library file that exports the ICU data vm service The hostname IP address on which the Dart VM Service should be served If not defaults to or::depending on whether ipv6 is specified vm service A custom Dart VM Service port The default is to pick a randomly available open port disable vm Disable the Dart VM Service The Dart VM Service is never available in release mode disable vm service Disable mDNS Dart VM Service publication Bind to the IPv6 localhost address for the Dart VM Service Ignored if vm service host is set endless trace buffer