Flutter Engine
The Flutter Engine
CoverageMaskRenderStep.cpp
Go to the documentation of this file.
1/*
2 * Copyright 2023 Google LLC
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
8
15
16namespace skgpu::graphite {
17
18// The device origin is applied *before* the maskToDeviceRemainder matrix so that it can be
19// combined with the mask atlas origin. This is necessary so that the mask bounds can be inset or
20// outset for clamping w/o affecting the alignment of the mask sampling.
21static skvx::float2 get_device_translation(const SkM44& localToDevice) {
22 float m00 = localToDevice.rc(0,0), m01 = localToDevice.rc(0,1);
23 float m10 = localToDevice.rc(1,0), m11 = localToDevice.rc(1,1);
24
25 float det = m00*m11 - m01*m10;
26 if (SkScalarNearlyZero(det)) {
27 // We can't extract any pre-translation, since the upper 2x2 is not invertible. Return (0,0)
28 // so that the maskToDeviceRemainder matrix remains the full transform.
29 return {0.f, 0.f};
30 }
31
32 // Calculate inv([[m00,m01][m10,m11]])*[[m30][m31]] to get the pre-remainder device translation.
33 float tx = localToDevice.rc(0,3), ty = localToDevice.rc(1,3);
34 skvx::float4 invT = skvx::float4{m11, -m10, -m01, m00} * skvx::float4{tx,tx,ty,ty};
35 return (invT.xy() + invT.zw()) / det;
36}
37
39 : RenderStep("CoverageMaskRenderStep",
40 "",
41 // The mask will have AA outsets baked in, but the original bounds for clipping
42 // still require the outset for analytic coverage.
43 Flags::kPerformsShading | Flags::kHasTextures | Flags::kEmitsCoverage |
44 Flags::kOutsetBoundsForAA,
45 /*uniforms=*/{{"maskToDeviceRemainder", SkSLType::kFloat3x3}},
48 /*vertexAttrs=*/{},
49 /*instanceAttrs=*/
50 // Draw bounds and mask bounds are in normalized relative to the mask texture,
51 // but 'drawBounds' is stored in float since the coords may map outside of
52 // [0,1] for inverse-filled masks. 'drawBounds' is relative to the logical mask
53 // entry's origin, while 'maskBoundsIn' is atlas-relative. Inverse fills swap
54 // the order in 'maskBoundsIn' to be RBLT.
55 {{"drawBounds", VertexAttribType::kFloat4 , SkSLType::kFloat4}, // ltrb
57 // Remaining translation extracted from actual 'maskToDevice' transform.
61 // deviceToLocal matrix for producing local coords for shader evaluation
65 /*varyings=*/
66 {// `maskBounds` are the atlas-relative, sorted bounds of the coverage mask.
67 // `textureCoords` are the atlas-relative UV coordinates of the draw, which
68 // can spill beyond `maskBounds` for inverse fills.
69 // TODO: maskBounds is constant for all fragments for a given instance,
70 // could we store them in the draw's SSBO?
71 {"maskBounds" , SkSLType::kFloat4},
72 {"textureCoords", SkSLType::kFloat2},
73 // 'invert' is set to 0 use unmodified coverage, and set to 1 for "1-c".
74 {"invert", SkSLType::kHalf}}) {}
75
77 // Returns the body of a vertex function, which must define a float4 devPosition variable and
78 // must write to an already-defined float2 stepLocalCoords variable.
79 return "float4 devPosition = coverage_mask_vertex_fn("
80 "float2(sk_VertexID >> 1, sk_VertexID & 1), "
81 "maskToDeviceRemainder, drawBounds, maskBoundsIn, deviceOrigin, "
82 "depth, float3x3(mat0, mat1, mat2), "
83 "maskBounds, textureCoords, invert, stepLocalCoords);\n";
84}
85
87 const ResourceBindingRequirements& bindingReqs, int* nextBindingIndex) const {
88 return EmitSamplerLayout(bindingReqs, nextBindingIndex) + " sampler2D pathAtlas;";
89}
90
92 return R"(
93 half c = sample(pathAtlas, clamp(textureCoords, maskBounds.LT, maskBounds.RB)).r;
94 outputCoverage = half4(mix(c, 1 - c, invert));
95 )";
96}
97
99 const DrawParams& params,
100 skvx::ushort2 ssboIndices) const {
101 const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
102 const TextureProxy* proxy = coverageMask.textureProxy();
103 SkASSERT(proxy);
104
105 // A quad is a 4-vertex instance. The coordinates are derived from the vertex IDs.
106 DrawWriter::Instances instances(*dw, {}, {}, 4);
107
108 // The device origin is the translation extracted from the mask-to-device matrix so
109 // that the remaining matrix uniform has less variance between draws.
110 const auto& maskToDevice = params.transform().matrix();
111 skvx::float2 deviceOrigin = get_device_translation(maskToDevice);
112
113 // Relative to mask space (device origin and mask-to-device remainder must be applied in shader)
114 skvx::float4 maskBounds = coverageMask.bounds().ltrb();
115 skvx::float4 drawBounds;
116
117 if (coverageMask.inverted()) {
118 // Only mask filters trigger complex transforms, and they are never inverse filled. Since
119 // we know this is an inverted mask, then we can exactly map the draw's clip bounds to mask
120 // space so that the clip is still fully covered without branching in the vertex shader.
121 SkASSERT(maskToDevice == SkM44::Translate(deviceOrigin.x(), deviceOrigin.y()));
122 drawBounds = params.clip().drawBounds().makeOffset(-deviceOrigin).ltrb();
123
124 // If the mask is fully clipped out, then the shape's mask info should be (0,0,0,0).
125 // If it's not fully clipped out, then the mask info should be non-empty.
126 SkASSERT(!params.clip().transformedShapeBounds().isEmptyNegativeOrNaN() ^
127 all(maskBounds == 0.f));
128
129 if (params.clip().transformedShapeBounds().isEmptyNegativeOrNaN()) {
130 // The inversion check is strict inequality, so (0,0,0,0) would not be detected. Adjust
131 // to (0,0,1/2,1/2) to restrict sampling to the top-left quarter of the top-left pixel,
132 // which should have a value of 0 regardless of filtering mode.
133 maskBounds = skvx::float4{0.f, 0.f, 0.5f, 0.5f};
134 } else {
135 // Add 1/2px outset to the mask bounds so that clamped coordinates sample the texel
136 // center of the padding around the atlas entry.
137 maskBounds += skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
138 }
139
140 // and store RBLT so that the 'maskBoundsIn' attribute has xy > zw to detect inverse fill.
141 maskBounds = skvx::shuffle<2,3,0,1>(maskBounds);
142 } else {
143 // If we aren't inverted, then the originally assigned values don't need to be adjusted, but
144 // also ensure the mask isn't empty (otherwise the draw should have been skipped earlier).
145 SkASSERT(!coverageMask.bounds().isEmptyNegativeOrNaN());
146 SkASSERT(all(maskBounds.xy() < maskBounds.zw()));
147
148 // Since the mask bounds and draw bounds are 1-to-1 with each other, the clamping of texture
149 // coords is mostly a formality. We inset the mask bounds by 1/2px so that we clamp to the
150 // texel center of the outer row/column of the mask. This should be a no-op for nearest
151 // sampling but prevents any linear sampling from incorporating adjacent data; for atlases
152 // this would just be 0 but for non-atlas coverage masks that might not have padding this
153 // avoids filtering unknown values in an approx-fit texture.
154 drawBounds = maskBounds;
155 maskBounds -= skvx::float4{-0.5f, -0.5f, 0.5f, 0.5f};
156 }
157
158 // Move 'drawBounds' and 'maskBounds' into the atlas coordinate space, then adjust the
159 // device translation to undo the atlas origin automatically in the vertex shader.
160 skvx::float2 textureOrigin = skvx::cast<float>(coverageMask.textureOrigin());
161 maskBounds += textureOrigin.xyxy();
162 drawBounds += textureOrigin.xyxy();
163 deviceOrigin -= textureOrigin;
164
165 // Normalize drawBounds and maskBounds after possibly correcting drawBounds for inverse fills.
166 // The maskToDevice matrix uniform will handle de-normalizing drawBounds for vertex positions.
167 auto atlasSizeInv = skvx::float2{1.f / proxy->dimensions().width(),
168 1.f / proxy->dimensions().height()};
169 drawBounds *= atlasSizeInv.xyxy();
170 maskBounds *= atlasSizeInv.xyxy();
171 deviceOrigin *= atlasSizeInv;
172
173 // Since the mask bounds define normalized texels of the texture, we can encode them as
174 // ushort_norm without losing precision to save space.
175 SkASSERT(all((maskBounds >= 0.f) & (maskBounds <= 1.f)));
176 maskBounds = 65535.f * maskBounds + 0.5f;
177
178 const SkM44& m = coverageMask.deviceToLocal();
179 instances.append(1) << drawBounds << skvx::cast<uint16_t>(maskBounds) << deviceOrigin
180 << params.order().depthAsFloat() << ssboIndices
181 << m.rc(0,0) << m.rc(1,0) << m.rc(3,0) // mat0
182 << m.rc(0,1) << m.rc(1,1) << m.rc(3,1) // mat1
183 << m.rc(0,3) << m.rc(1,3) << m.rc(3,3); // mat2
184}
185
187 PipelineDataGatherer* gatherer) const {
188 SkDEBUGCODE(UniformExpectationsValidator uev(gatherer, this->uniforms());)
189
190 const CoverageMaskShape& coverageMask = params.geometry().coverageMaskShape();
191 const TextureProxy* proxy = coverageMask.textureProxy();
192 SkASSERT(proxy);
193
194 // Most coverage masks are aligned with the device pixels, so the params' transform is an
195 // integer translation matrix. This translation is extracted as an instance attribute so that
196 // the remaining transform has a much lower frequency of changing (only complex-transformed
197 // mask filters).
198 skvx::float2 deviceOrigin = get_device_translation(params.transform().matrix());
199 SkMatrix maskToDevice = params.transform().matrix().asM33();
200 maskToDevice.preTranslate(-deviceOrigin.x(), -deviceOrigin.y());
201
202 // The mask coordinates in the vertex shader will be normalized, so scale by the proxy size
203 // to get back to Skia's texel-based coords.
204 maskToDevice.preScale(proxy->dimensions().width(), proxy->dimensions().height());
205
206 // Write uniforms:
207 gatherer->write(maskToDevice);
208
209 // Write textures and samplers:
210 const bool pixelAligned =
211 params.transform().type() <= Transform::Type::kSimpleRectStaysRect &&
212 params.transform().maxScaleFactor() == 1.f &&
213 all(deviceOrigin == floor(deviceOrigin + SK_ScalarNearlyZero));
214 constexpr SkTileMode kTileModes[2] = {SkTileMode::kClamp, SkTileMode::kClamp};
215 gatherer->add(sk_ref_sp(proxy),
216 {pixelAligned ? SkFilterMode::kNearest : SkFilterMode::kLinear, kTileModes});
217}
218
219} // namespace skgpu::graphite
#define SkASSERT(cond)
Definition: SkAssert.h:116
sk_sp< T > sk_ref_sp(T *obj)
Definition: SkRefCnt.h:381
static bool SkScalarNearlyZero(SkScalar x, SkScalar tolerance=SK_ScalarNearlyZero)
Definition: SkScalar.h:101
#define SK_ScalarNearlyZero
Definition: SkScalar.h:99
SkDEBUGCODE(SK_SPI) SkThreadID SkGetThreadID()
SkTileMode
Definition: SkTileMode.h:13
Definition: SkM44.h:150
SkScalar rc(int r, int c) const
Definition: SkM44.h:261
static SkM44 Translate(SkScalar x, SkScalar y, SkScalar z=0)
Definition: SkM44.h:225
SkMatrix & preTranslate(SkScalar dx, SkScalar dy)
Definition: SkMatrix.cpp:263
SkMatrix & preScale(SkScalar sx, SkScalar sy, SkScalar px, SkScalar py)
Definition: SkMatrix.cpp:315
std::string texturesAndSamplersSkSL(const ResourceBindingRequirements &, int *nextBindingIndex) const override
const char * fragmentCoverageSkSL() const override
void writeUniformsAndTextures(const DrawParams &, PipelineDataGatherer *) const override
void writeVertices(DrawWriter *, const DrawParams &, skvx::ushort2 ssboIndices) const override
const TextureProxy * textureProxy() const
AI bool isEmptyNegativeOrNaN() const
Definition: Rect.h:102
AI float4 ltrb() const
Definition: Rect.h:82
SkSpan< const Uniform > uniforms() const
Definition: Renderer.h:143
const EmbeddedViewParams * params
static constexpr DepthStencilSettings kDirectDepthGreaterPass
static skvx::float2 get_device_translation(const SkM44 &localToDevice)
std::string EmitSamplerLayout(const ResourceBindingRequirements &bindingReqs, int *binding)
SIT bool all(const Vec< 1, T > &x)
Definition: SkVx.h:582
SIN Vec< N, float > floor(const Vec< N, float > &x)
Definition: SkVx.h:703
constexpr int32_t width() const
Definition: SkSize.h:36
constexpr int32_t height() const
Definition: SkSize.h:37
Definition: SkVx.h:83