49 (void)fUsageRefCnt.fetch_add(+1, std::memory_order_relaxed);
54 bool shouldFree =
false;
59 if (1 == fUsageRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
65 mutableThis->internalDispose();
71 if (fCommandBufferRefsAsUsageRefs) {
75 (void)fCommandBufferRefCnt.fetch_add(+1, std::memory_order_relaxed);
80 if (fCommandBufferRefsAsUsageRefs) {
83 bool shouldFree =
false;
86 SkASSERT(this->hasCommandBufferRef());
88 if (1 == fCommandBufferRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
94 mutableThis->internalDispose();
172#if defined(GRAPHITE_TEST_UTILS)
173 bool testingShouldDeleteASAP()
const {
return fDeleteASAP == DeleteASAP::kYes; }
175 virtual const Texture* asTexture()
const {
return nullptr; }
179 Resource(
const SharedContext*,
183 std::string_view label,
184 bool commandBufferRefsAsUsageRefs =
false);
197 const char* dumpName)
const {}
200 bool debugHasCommandBufferRef()
const {
201 return hasCommandBufferRef();
210 enum class DeleteASAP :
bool {
215 DeleteASAP shouldDeleteASAP()
const {
return fDeleteASAP; }
219 void updateAccessTime() {
220 fLastAccess = skgpu::StdSteadyClock::now();
222 skgpu::StdSteadyClock::time_point lastAccessTime()
const {
230 friend ResourceCache;
237 void initialUsageRef()
const {
241 (void)fUsageRefCnt.fetch_add(+1, std::memory_order_relaxed);
244 bool isPurgeable()
const;
245 int* accessReturnIndex()
const {
return &fReturnIndex; }
246 int* accessCacheIndex()
const {
return &fCacheArrayIndex; }
248 uint32_t timestamp()
const {
return fTimestamp; }
249 void setTimestamp(uint32_t ts) { fTimestamp = ts; }
256 void refCache()
const {
258 (void)fCacheRefCnt.fetch_add(+1, std::memory_order_relaxed);
263 void unrefCache()
const {
264 bool shouldFree =
false;
269 if (1 == fCacheRefCnt.fetch_add(-1, std::memory_order_acq_rel)) {
275 mutableThis->internalDispose();
280 bool isUsableAsScratch()
const {
288 bool hasUsageRef()
const {
289 if (0 == fUsageRefCnt.load(std::memory_order_acquire)) {
298 bool hasCommandBufferRef()
const {
301 if (0 == fCommandBufferRefCnt.load(std::memory_order_acquire)) {
307 SkASSERT(!fCommandBufferRefsAsUsageRefs);
311 bool hasCacheRef()
const {
312 if (0 == fCacheRefCnt.load(std::memory_order_acquire)) {
321 bool hasAnyRefs()
const {
322 return this->hasUsageRef() || this->hasCommandBufferRef() || this->hasCacheRef();
328 void internalDispose();
333 mutable SkMutex fUnrefMutex;
335 SkDEBUGCODE(
mutable bool fCalledRemovedFromCache =
false;)
339 const SharedContext* fSharedContext;
341 mutable std::atomic<int32_t> fUsageRefCnt;
342 mutable std::atomic<int32_t> fCommandBufferRefCnt;
343 mutable std::atomic<int32_t> fCacheRefCnt;
345 const bool fCommandBufferRefsAsUsageRefs =
false;
347 GraphiteResourceKey fKey;
352 mutable int fReturnIndex = -1;
356 static const size_t kInvalidGpuMemorySize = ~static_cast<size_t>(0);
357 mutable size_t fGpuMemorySize = kInvalidGpuMemorySize;
370 DeleteASAP fDeleteASAP = DeleteASAP::kNo;
374 mutable int fCacheArrayIndex = -1;
378 skgpu::StdSteadyClock::time_point fLastAccess;
380 const UniqueID fUniqueID;
389 SkDEBUGCODE(
mutable bool fNonShareableInCache =
false;)