Flutter Engine
The Flutter Engine
Loading...
Searching...
No Matches
AtlasPathRenderer.cpp
Go to the documentation of this file.
1/*
2 * Copyright 2019 Google LLC.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
9
10#include "src/base/SkVx.h"
11#include "src/core/SkIPoint16.h"
23
24using namespace skia_private;
25
26namespace {
27
28// Returns the rect [topLeftFloor, botRightCeil], which is the rect [r] rounded out to integer
29// boundaries.
30std::pair<skvx::float2, skvx::float2> round_out(const SkRect& r) {
31 return {floor(skvx::float2::Load(&r.fLeft)),
33}
34
35// Returns whether the given proxyOwner uses the atlasProxy.
36template<typename T> bool refs_atlas(const T* proxyOwner, const GrSurfaceProxy* atlasProxy) {
37 bool refsAtlas = false;
38 auto checkForAtlasRef = [atlasProxy, &refsAtlas](GrSurfaceProxy* proxy, skgpu::Mipmapped) {
39 if (proxy == atlasProxy) {
40 refsAtlas = true;
41 }
42 };
43 if (proxyOwner) {
44 proxyOwner->visitProxies(checkForAtlasRef);
45 }
46 return refsAtlas;
47}
48
49bool is_visible(const SkRect& pathDevBounds, const SkIRect& clipBounds) {
50 auto pathTopLeft = skvx::float2::Load(&pathDevBounds.fLeft);
51 auto pathBotRight = skvx::float2::Load(&pathDevBounds.fRight);
52 // Empty paths are never visible. Phrase this as a NOT of positive logic so we also return false
53 // in the case of NaN.
54 if (!all(pathTopLeft < pathBotRight)) {
55 return false;
56 }
57 auto clipTopLeft = skvx::cast<float>(skvx::int2::Load(&clipBounds.fLeft));
58 auto clipBotRight = skvx::cast<float>(skvx::int2::Load(&clipBounds.fRight));
59 static_assert(sizeof(clipBounds) == sizeof(clipTopLeft) + sizeof(clipBotRight));
60 return all(pathTopLeft < clipBotRight) && all(pathBotRight > clipTopLeft);
61}
62
63#ifdef SK_DEBUG
64// Ensures the atlas dependencies are set up such that each atlas will be totally out of service
65// before we render the next one in line. This means there will only ever be one atlas active at a
66// time and that they can all share the same texture.
67void validate_atlas_dependencies(
69 for (int i = atlasTasks.size() - 1; i >= 1; --i) {
70 auto atlasTask = atlasTasks[i].get();
71 auto previousAtlasTask = atlasTasks[i - 1].get();
72 // Double check that atlasTask depends on every dependent of its previous atlas. If this
73 // fires it might mean previousAtlasTask gained a new dependent after atlasTask came into
74 // service (maybe by an op that hadn't yet been added to an opsTask when we registered the
75 // new atlas with the drawingManager).
76 for (GrRenderTask* previousAtlasUser : previousAtlasTask->dependents()) {
77 SkASSERT(atlasTask->dependsOn(previousAtlasUser));
78 }
79 }
80}
81#endif
82
83} // anonymous namespace
84
85namespace skgpu::ganesh {
86
88constexpr static int kAtlasInitialSize = 512;
89
90// The atlas is only used for small-area paths, which means at least one dimension of every path is
91// guaranteed to be quite small. So if we transpose tall paths, then every path will have a small
92// height, which lends very well to efficient pow2 atlas packing.
94
95// Ensure every path in the atlas falls in or below the 256px high rectanizer band.
96constexpr static int kAtlasMaxPathHeight = 256;
97
98// If we have MSAA to fall back on, paths are already fast enough that we really only benefit from
99// atlasing when they are very small.
100constexpr static int kAtlasMaxPathHeightWithMSAAFallback = 128;
101
102// http://skbug.com/12291 -- The way GrDynamicAtlas works, a single 2048x1 path is given an entire
103// 2048x2048 atlas with draw bounds of 2048x1025. Limit the max width to 1024 to avoid this landmine
104// until it's resolved.
105constexpr static int kAtlasMaxPathWidth = 1024;
106
108#ifdef SK_BUILD_FOR_IOS
109 // b/195095846: There is a bug with the atlas path renderer on OpenGL iOS. Disable until we can
110 // investigate.
111 if (rContext->backend() == GrBackendApi::kOpenGL) {
112 return false;
113 }
114#endif
115#ifdef SK_BUILD_FOR_WIN
116 // http://skbug.com/13519 There is a bug with the atlas path renderer on Direct3D, running on
117 // Radeon hardware and possibly others. Disable until we can investigate.
118 if (rContext->backend() == GrBackendApi::kDirect3D) {
119 return false;
120 }
121#endif
122 const GrCaps& caps = *rContext->priv().caps();
123 auto atlasFormat = caps.getDefaultBackendFormat(kAtlasAlpha8Type, GrRenderable::kYes);
124 return rContext->asDirectContext() && // The atlas doesn't support DDL yet.
125 caps.internalMultisampleCount(atlasFormat) > 1 &&
126 // GrAtlasRenderTask currently requires tessellation. In the future it could use the
127 // default path renderer when tessellation isn't available.
129}
130
136
137AtlasPathRenderer::AtlasPathRenderer(GrDirectContext* dContext) {
138 SkASSERT(IsSupported(dContext));
139 const GrCaps& caps = *dContext->priv().caps();
140#if defined(GR_TEST_UTILS)
141 fAtlasMaxSize = dContext->priv().options().fMaxTextureAtlasSize;
142#else
143 fAtlasMaxSize = 2048;
144#endif
145 fAtlasMaxSize = SkPrevPow2(std::min(fAtlasMaxSize, (float)caps.maxPreferredRenderTargetSize()));
146 fAtlasMaxPathWidth = std::min((float)kAtlasMaxPathWidth, fAtlasMaxSize);
147 fAtlasInitialSize = SkNextPow2(std::min(kAtlasInitialSize, (int)fAtlasMaxSize));
148}
149
150bool AtlasPathRenderer::pathFitsInAtlas(const SkRect& pathDevBounds,
151 GrAAType fallbackAAType) const {
152 SkASSERT(fallbackAAType != GrAAType::kNone); // The atlas doesn't support non-AA.
153 float atlasMaxPathHeight_p2 = (fallbackAAType == GrAAType::kMSAA)
156 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
157 auto size = botRightCeil - topLeftFloor;
158 return // Ensure the path's largest dimension fits in the atlas.
159 all(size <= fAtlasMaxPathWidth) &&
160 // Since we will transpose tall skinny paths, limiting to atlasMaxPathHeight^2 pixels
161 // guarantees heightInAtlas <= atlasMaxPathHeight, while also allowing paths that are
162 // very wide and short.
163 size[0] * size[1] <= atlasMaxPathHeight_p2;
164}
165
166void AtlasPathRenderer::AtlasPathKey::set(const SkMatrix& m, const SkPath& path) {
167 fPathGenID = path.getGenerationID();
168 fAffineMatrix[0] = m.getScaleX();
169 fAffineMatrix[1] = m.getSkewX();
170 fAffineMatrix[2] = m.getTranslateX();
171 fAffineMatrix[3] = m.getSkewY();
172 fAffineMatrix[4] = m.getScaleY();
173 fAffineMatrix[5] = m.getTranslateY();
174 fFillRule = (uint32_t)GrFillRuleForSkPath(path); // Fill rule doesn't affect the path's genID.
175}
176
177bool AtlasPathRenderer::addPathToAtlas(GrRecordingContext* rContext,
178 const SkMatrix& viewMatrix,
179 const SkPath& path,
180 const SkRect& pathDevBounds,
181 SkIRect* devIBounds,
182 SkIPoint16* locationInAtlas,
183 bool* transposedInAtlas,
184 const DrawRefsAtlasCallback& drawRefsAtlasCallback) {
185 SkASSERT(!viewMatrix.hasPerspective()); // See onCanDrawPath().
186
187 pathDevBounds.roundOut(devIBounds);
188#ifdef SK_DEBUG
189 // is_visible() should have guaranteed the path's bounds were representable as ints, since clip
190 // bounds within the max render target size are nowhere near INT_MAX.
191 auto [topLeftFloor, botRightCeil] = round_out(pathDevBounds);
192 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fLeft)) == topLeftFloor));
193 SkASSERT(all(skvx::cast<float>(skvx::int2::Load(&devIBounds->fRight)) == botRightCeil));
194#endif
195
196 int widthInAtlas = devIBounds->width();
197 int heightInAtlas = devIBounds->height();
198 // is_visible() should have guaranteed the path's bounds were non-empty.
199 SkASSERT(widthInAtlas > 0 && heightInAtlas > 0);
200
201 if (SkNextPow2(widthInAtlas) == SkNextPow2(heightInAtlas)) {
202 // Both dimensions go to the same pow2 band in the atlas. Use the larger dimension as height
203 // for more efficient packing.
204 *transposedInAtlas = widthInAtlas > heightInAtlas;
205 } else {
206 // Both dimensions go to different pow2 bands in the atlas. Use the smaller pow2 band for
207 // most efficient packing.
208 *transposedInAtlas = heightInAtlas > widthInAtlas;
209 }
210 if (*transposedInAtlas) {
211 std::swap(heightInAtlas, widthInAtlas);
212 }
213 // pathFitsInAtlas() should have guaranteed these constraints on the path size.
214 SkASSERT(widthInAtlas <= (int)fAtlasMaxPathWidth);
215 SkASSERT(heightInAtlas <= kAtlasMaxPathHeight);
216
217 // Check if this path is already in the atlas. This is mainly for clip paths.
218 AtlasPathKey atlasPathKey;
219 if (!path.isVolatile()) {
220 atlasPathKey.set(viewMatrix, path);
221 if (const SkIPoint16* existingLocation = fAtlasPathCache.find(atlasPathKey)) {
222 *locationInAtlas = *existingLocation;
223 return true;
224 }
225 }
226
227 if (fAtlasRenderTasks.empty() ||
228 !fAtlasRenderTasks.back()->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
229 heightInAtlas, *transposedInAtlas, locationInAtlas)) {
230 // We either don't have an atlas yet or the current one is full. Try to replace it.
231 auto currentAtlasTask = (!fAtlasRenderTasks.empty()) ? fAtlasRenderTasks.back().get()
232 : nullptr;
233 if (currentAtlasTask &&
234 drawRefsAtlasCallback &&
235 drawRefsAtlasCallback(currentAtlasTask->atlasProxy())) {
236 // The draw already refs the current atlas. Give up. Otherwise the draw would ref two
237 // different atlases and they couldn't share a texture.
238 return false;
239 }
240 // Replace the atlas with a new one.
241 auto dynamicAtlas = std::make_unique<GrDynamicAtlas>(
243 SkISize{fAtlasInitialSize, fAtlasInitialSize}, fAtlasMaxSize,
244 *rContext->priv().caps(), kAtlasAlgorithm);
245 auto newAtlasTask = sk_make_sp<AtlasRenderTask>(rContext,
246 sk_make_sp<GrArenas>(),
247 std::move(dynamicAtlas));
248 rContext->priv().drawingManager()->addAtlasTask(newAtlasTask, currentAtlasTask);
249 SkAssertResult(newAtlasTask->addPath(viewMatrix, path, devIBounds->topLeft(), widthInAtlas,
250 heightInAtlas, *transposedInAtlas, locationInAtlas));
251 fAtlasRenderTasks.push_back(std::move(newAtlasTask));
252 fAtlasPathCache.reset();
253 }
254
255 // Remember this path's location in the atlas, in case it gets drawn again.
256 if (!path.isVolatile()) {
257 fAtlasPathCache.set(atlasPathKey, *locationInAtlas);
258 }
259 return true;
260}
261
263#ifdef SK_DEBUG
264 if (!fAtlasRenderTasks.empty()) {
265 // args.fPaint should NEVER reference our current atlas. If it does, it means somebody
266 // intercepted a clip FP meant for a different op and will cause rendering artifacts.
267 const GrSurfaceProxy* atlasProxy = fAtlasRenderTasks.back()->atlasProxy();
268 SkASSERT(!refs_atlas(args.fPaint->getColorFragmentProcessor(), atlasProxy));
269 SkASSERT(!refs_atlas(args.fPaint->getCoverageFragmentProcessor(), atlasProxy));
270 }
271 SkASSERT(!args.fHasUserStencilSettings); // See onGetStencilSupport().
272#endif
273 bool canDrawPath = args.fShape->style().isSimpleFill() &&
274#ifdef SK_DISABLE_ATLAS_PATH_RENDERER_WITH_COVERAGE_AA
275 // The MSAA requirement is a temporary limitation in order to preserve
276 // functionality for refactoring. TODO: Allow kCoverage AA types.
277 args.fAAType == GrAAType::kMSAA &&
278#else
279 args.fAAType != GrAAType::kNone &&
280#endif
281 // Non-DMSAA convex paths should be handled by the convex tessellator.
282 // (With DMSAA we continue to use the atlas for these paths in order to avoid
283 // triggering MSAA.)
284 (args.fProxy->numSamples() == 1 || !args.fShape->knownToBeConvex()) &&
285 !args.fShape->style().hasPathEffect() &&
286 !args.fViewMatrix->hasPerspective() &&
287 this->pathFitsInAtlas(args.fViewMatrix->mapRect(args.fShape->bounds()),
288 args.fAAType);
290}
291
293 SkPath path;
294 args.fShape->asPath(&path);
295
296 const SkRect pathDevBounds = args.fViewMatrix->mapRect(args.fShape->bounds());
297 SkASSERT(this->pathFitsInAtlas(pathDevBounds, args.fAAType));
298
299 if (!is_visible(pathDevBounds, args.fClip->getConservativeBounds())) {
300 // The path is empty or outside the clip. No mask is needed.
301 if (path.isInverseFillType()) {
302 args.fSurfaceDrawContext->drawPaint(args.fClip, std::move(args.fPaint),
303 *args.fViewMatrix);
304 }
305 return true;
306 }
307
308 SkIRect devIBounds;
309 SkIPoint16 locationInAtlas;
310 bool transposedInAtlas;
311 SkAssertResult(this->addPathToAtlas(args.fContext, *args.fViewMatrix, path, pathDevBounds,
312 &devIBounds, &locationInAtlas, &transposedInAtlas,
313 nullptr/*DrawRefsAtlasCallback -- see onCanDrawPath()*/));
314
315 const SkIRect& fillBounds = args.fShape->inverseFilled()
316 ? (args.fClip
317 ? args.fClip->getConservativeBounds()
318 : args.fSurfaceDrawContext->asSurfaceProxy()->backingStoreBoundsIRect())
319 : devIBounds;
320 const GrCaps& caps = *args.fSurfaceDrawContext->caps();
321 auto op = GrOp::Make<DrawAtlasPathOp>(args.fContext,
322 args.fSurfaceDrawContext->arenaAlloc(),
323 fillBounds, *args.fViewMatrix,
324 std::move(args.fPaint), locationInAtlas,
325 devIBounds, transposedInAtlas,
326 fAtlasRenderTasks.back()->readView(caps),
327 args.fShape->inverseFilled());
328 args.fSurfaceDrawContext->addDrawOp(args.fClip, std::move(op));
329 return true;
330}
331
333 const GrOp* opBeingClipped,
334 std::unique_ptr<GrFragmentProcessor> inputFP,
335 const SkIRect& drawBounds,
336 const SkMatrix& viewMatrix,
337 const SkPath& path) {
338 if (viewMatrix.hasPerspective()) {
339 return GrFPFailure(std::move(inputFP));
340 }
341
342 const SkRect pathDevBounds = viewMatrix.mapRect(path.getBounds());
343 if (!is_visible(pathDevBounds, drawBounds)) {
344 // The path is empty or outside the drawBounds. No mask is needed. We explicitly allow the
345 // returned successful "fp" to be null in case this bypassed atlas clip effect was the first
346 // clip to be processed by the clip stack (at which point inputFP is null).
347 return path.isInverseFillType() ? GrFPNullableSuccess(std::move(inputFP))
348 : GrFPFailure(std::move(inputFP));
349 }
350
351 auto fallbackAAType = (sdc->numSamples() > 1 || sdc->canUseDynamicMSAA()) ? GrAAType::kMSAA
353 if (!this->pathFitsInAtlas(pathDevBounds, fallbackAAType)) {
354 // The path is too big.
355 return GrFPFailure(std::move(inputFP));
356 }
357
358 SkIRect devIBounds;
359 SkIPoint16 locationInAtlas;
360 bool transposedInAtlas;
361 // Called if the atlas runs out of room, to determine if it's safe to create a new one. (Draws
362 // can never access more than one atlas.)
363 auto drawRefsAtlasCallback = [opBeingClipped, &inputFP](const GrSurfaceProxy* atlasProxy) {
364 return refs_atlas(opBeingClipped, atlasProxy) ||
365 refs_atlas(inputFP.get(), atlasProxy);
366 };
367 // addPathToAtlas() ignores inverseness of the fill. See GrAtlasRenderTask::getAtlasUberPath().
368 if (!this->addPathToAtlas(sdc->recordingContext(), viewMatrix, path, pathDevBounds, &devIBounds,
369 &locationInAtlas, &transposedInAtlas, drawRefsAtlasCallback)) {
370 // The atlas ran out of room and we were unable to start a new one.
371 return GrFPFailure(std::move(inputFP));
372 }
373
374 SkMatrix atlasMatrix;
375 auto [atlasX, atlasY] = locationInAtlas;
376 if (!transposedInAtlas) {
377 atlasMatrix = SkMatrix::Translate(atlasX - devIBounds.left(), atlasY - devIBounds.top());
378 } else {
379 atlasMatrix.setAll(0, 1, atlasX - devIBounds.top(),
380 1, 0, atlasY - devIBounds.left(),
381 0, 0, 1);
382 }
384 if (path.isInverseFillType()) {
386 }
387 if (!devIBounds.contains(drawBounds)) {
389 // At this point in time we expect callers to tighten the scissor for "kIntersect" clips, as
390 // opposed to us having to check the path bounds. Feel free to remove this assert if that
391 // ever changes.
392 SkASSERT(path.isInverseFillType());
393 }
394 GrSurfaceProxyView atlasView = fAtlasRenderTasks.back()->readView(*sdc->caps());
395 return GrFPSuccess(std::make_unique<GrModulateAtlasCoverageEffect>(flags, std::move(inputFP),
396 std::move(atlasView),
397 atlasMatrix, devIBounds));
398}
399
401 if (fAtlasRenderTasks.empty()) {
402 SkASSERT(fAtlasPathCache.count() == 0);
403 return true;
404 }
405
406 // Verify the atlases can all share the same texture.
407 SkDEBUGCODE(validate_atlas_dependencies(fAtlasRenderTasks);)
408
409 bool successful;
410
411#if defined(GR_TEST_UTILS)
412 if (onFlushRP->failFlushTimeCallbacks()) {
413 successful = false;
414 } else
415#endif
416 {
417 // TODO: it seems like this path renderer's backing-texture reuse could be greatly
418 // improved. Please see skbug.com/13298.
419
420 // Instantiate the first atlas.
421 successful = fAtlasRenderTasks[0]->instantiate(onFlushRP);
422
423 // Instantiate the remaining atlases.
424 GrTexture* firstAtlas = fAtlasRenderTasks[0]->atlasProxy()->peekTexture();
425 SkASSERT(firstAtlas);
426 for (int i = 1; successful && i < fAtlasRenderTasks.size(); ++i) {
427 auto atlasTask = fAtlasRenderTasks[i].get();
428 if (atlasTask->atlasProxy()->backingStoreDimensions() == firstAtlas->dimensions()) {
429 successful &= atlasTask->instantiate(onFlushRP, sk_ref_sp(firstAtlas));
430 } else {
431 // The atlases are expected to all be full size except possibly the final one.
432 SkASSERT(i == fAtlasRenderTasks.size() - 1);
433 SkASSERT(atlasTask->atlasProxy()->backingStoreDimensions().area() <
434 firstAtlas->dimensions().area());
435 // TODO: Recycle the larger atlas texture anyway?
436 successful &= atlasTask->instantiate(onFlushRP);
437 }
438 }
439 }
440
441 // Reset all atlas data.
442 fAtlasRenderTasks.clear();
443 fAtlasPathCache.reset();
444 return successful;
445}
446
447} // namespace skgpu::ganesh
static GrFPResult GrFPNullableSuccess(std::unique_ptr< GrFragmentProcessor > fp)
static GrFPResult GrFPSuccess(std::unique_ptr< GrFragmentProcessor > fp)
std::tuple< bool, std::unique_ptr< GrFragmentProcessor > > GrFPResult
static GrFPResult GrFPFailure(std::unique_ptr< GrFragmentProcessor > fp)
GrAAType
GrFillRule GrFillRuleForSkPath(const SkPath &path)
#define SkAssertResult(cond)
Definition SkAssert.h:123
#define SkASSERT(cond)
Definition SkAssert.h:116
#define SkDEBUGCODE(...)
Definition SkDebug.h:23
static int SkPrevPow2(int value)
Definition SkMathPriv.h:287
static int SkNextPow2(int value)
Definition SkMathPriv.h:272
sk_sp< T > sk_ref_sp(T *obj)
Definition SkRefCnt.h:381
const GrContextOptions & options() const
const GrCaps * caps() const
GrBackendFormat getDefaultBackendFormat(GrColorType, GrRenderable) const
Definition GrCaps.cpp:400
int maxPreferredRenderTargetSize() const
Definition GrCaps.h:227
int internalMultisampleCount(const GrBackendFormat &format) const
Definition GrCaps.h:271
virtual GrDirectContext * asDirectContext()
SK_API GrBackendApi backend() const
GrDirectContextPriv priv()
void addAtlasTask(sk_sp< GrRenderTask > atlasTask, GrRenderTask *previousAtlasTask)
Definition GrOp.h:70
GrDrawingManager * drawingManager()
GrRecordingContextPriv priv()
SkISize dimensions() const
Definition GrSurface.h:27
static SkMatrix Translate(SkScalar dx, SkScalar dy)
Definition SkMatrix.h:91
SkMatrix & setAll(SkScalar scaleX, SkScalar skewX, SkScalar transX, SkScalar skewY, SkScalar scaleY, SkScalar transY, SkScalar persp0, SkScalar persp1, SkScalar persp2)
Definition SkMatrix.h:562
bool hasPerspective() const
Definition SkMatrix.h:312
bool mapRect(SkRect *dst, const SkRect &src, SkApplyPerspectiveClip pc=SkApplyPerspectiveClip::kYes) const
static bool IsSupported(GrRecordingContext *)
bool onDrawPath(const DrawPathArgs &) override
static sk_sp< AtlasPathRenderer > Make(GrRecordingContext *rContext)
bool preFlush(GrOnFlushResourceProvider *) override
CanDrawPath onCanDrawPath(const CanDrawPathArgs &) const override
GrFPResult makeAtlasClipEffect(const skgpu::ganesh::SurfaceDrawContext *, const GrOp *opBeingClipped, std::unique_ptr< GrFragmentProcessor > inputFP, const SkIRect &drawBounds, const SkMatrix &, const SkPath &)
CanDrawPath canDrawPath(const CanDrawPathArgs &args) const
const GrCaps * caps() const
GrRecordingContext * recordingContext() const
FlutterSemanticsFlag flags
G_BEGIN_DECLS G_MODULE_EXPORT FlValue * args
DEF_SWITCHES_START aot vmservice shared library Name of the *so containing AOT compiled Dart assets for launching the service isolate vm snapshot The VM snapshot data that will be memory mapped as read only SnapshotAssetPath must be present isolate snapshot The isolate snapshot data that will be memory mapped as read only SnapshotAssetPath must be present cache dir path
Definition switches.h:57
static constexpr int kAtlasInitialSize
static constexpr int kAtlasMaxPathHeightWithMSAAFallback
static constexpr auto kAtlasAlgorithm
static constexpr int kAtlasMaxPathHeight
static constexpr auto kAtlasAlpha8Type
static constexpr int kAtlasMaxPathWidth
Mipmapped
Definition GpuTypes.h:53
SIT bool all(const Vec< 1, T > &x)
Definition SkVx.h:582
SIN Vec< N, float > floor(const Vec< N, float > &x)
Definition SkVx.h:703
SIN Vec< N, float > ceil(const Vec< N, float > &x)
Definition SkVx.h:702
constexpr int32_t top() const
Definition SkRect.h:120
constexpr int32_t height() const
Definition SkRect.h:165
constexpr int32_t width() const
Definition SkRect.h:158
constexpr SkIPoint topLeft() const
Definition SkRect.h:151
int32_t fLeft
smaller x-axis bounds
Definition SkRect.h:33
constexpr int32_t left() const
Definition SkRect.h:113
bool contains(int32_t x, int32_t y) const
Definition SkRect.h:463
int32_t fRight
larger x-axis bounds
Definition SkRect.h:35
constexpr int64_t area() const
Definition SkSize.h:39
SkScalar fLeft
smaller x-axis bounds
Definition extension.cpp:14
SkScalar fRight
larger x-axis bounds
Definition extension.cpp:16
void roundOut(SkIRect *dst) const
Definition SkRect.h:1241
static SKVX_ALWAYS_INLINE Vec Load(const void *ptr)
Definition SkVx.h:109