blob: c00a2707e0a2518995ad1611bbdf168c09b3df8a [file] [log] [blame]
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -04001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "CacheManager.h"
18
John Reck5f66fb82022-09-23 17:49:23 -040019#include <GrContextOptions.h>
20#include <SkExecutor.h>
21#include <SkGraphics.h>
John Reck5f66fb82022-09-23 17:49:23 -040022#include <math.h>
23#include <utils/Trace.h>
24
25#include <set>
26
27#include "CanvasContext.h"
Alec Mouri22d753f2019-09-05 17:11:45 -070028#include "DeviceInfo.h"
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040029#include "Layer.h"
Stan Ilieve75ef1f2017-11-27 17:22:42 -050030#include "Properties.h"
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040031#include "RenderThread.h"
John Reck9fc3d272023-05-01 16:33:22 -040032#include "VulkanManager.h"
Stan Ilieve0fae232020-01-07 17:21:49 -050033#include "pipeline/skia/ATraceMemoryDump.h"
Stan Ilieve75ef1f2017-11-27 17:22:42 -050034#include "pipeline/skia/ShaderCache.h"
Derek Sollenberger0057db22018-03-29 14:18:44 -040035#include "pipeline/skia/SkiaMemoryTracer.h"
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040036#include "renderstate/RenderState.h"
John Reck322b8ab2019-03-14 13:15:28 -070037#include "thread/CommonPool.h"
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040038
39namespace android {
40namespace uirenderer {
41namespace renderthread {
42
John Reck5f66fb82022-09-23 17:49:23 -040043CacheManager::CacheManager(RenderThread& thread)
44 : mRenderThread(thread), mMemoryPolicy(loadMemoryPolicy()) {
45 mMaxSurfaceArea = static_cast<size_t>((DeviceInfo::getWidth() * DeviceInfo::getHeight()) *
46 mMemoryPolicy.initialMaxSurfaceAreaScale);
47 setupCacheLimits();
48}
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040049
Kevin Lubick80beb6f2023-01-20 17:57:00 +000050static inline int countLeadingZeros(uint32_t mask) {
51 // __builtin_clz(0) is undefined, so we have to detect that case.
52 return mask ? __builtin_clz(mask) : 32;
53}
54
55// Return the smallest power-of-2 >= n.
56static inline uint32_t nextPowerOfTwo(uint32_t n) {
57 return n ? (1 << (32 - countLeadingZeros(n - 1))) : 1;
58}
59
John Reck5f66fb82022-09-23 17:49:23 -040060void CacheManager::setupCacheLimits() {
61 mMaxResourceBytes = mMaxSurfaceArea * mMemoryPolicy.surfaceSizeMultiplier;
62 mBackgroundResourceBytes = mMaxResourceBytes * mMemoryPolicy.backgroundRetentionPercent;
63 // This sets the maximum size for a single texture atlas in the GPU font cache. If
64 // necessary, the cache can allocate additional textures that are counted against the
65 // total cache limits provided to Skia.
Kevin Lubick80beb6f2023-01-20 17:57:00 +000066 mMaxGpuFontAtlasBytes = nextPowerOfTwo(mMaxSurfaceArea);
John Reck5f66fb82022-09-23 17:49:23 -040067 // This sets the maximum size of the CPU font cache to be at least the same size as the
68 // total number of GPU font caches (i.e. 4 separate GPU atlases).
69 mMaxCpuFontCacheBytes = std::max(mMaxGpuFontAtlasBytes * 4, SkGraphics::GetFontCacheLimit());
70 mBackgroundCpuFontCacheBytes = mMaxCpuFontCacheBytes * mMemoryPolicy.backgroundRetentionPercent;
71
Derek Sollenbergerb9e296e2019-04-18 16:21:42 -040072 SkGraphics::SetFontCacheLimit(mMaxCpuFontCacheBytes);
John Reck5f66fb82022-09-23 17:49:23 -040073 if (mGrContext) {
74 mGrContext->setResourceCacheLimit(mMaxResourceBytes);
75 }
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040076}
77
Adlai Hollerd2345212020-10-07 14:16:40 -040078void CacheManager::reset(sk_sp<GrDirectContext> context) {
Greg Daniel660d6ec2017-12-08 11:44:27 -050079 if (context != mGrContext) {
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040080 destroy();
81 }
82
83 if (context) {
Greg Daniel660d6ec2017-12-08 11:44:27 -050084 mGrContext = std::move(context);
Robert Phillips57bb0bf2019-09-06 13:18:17 -040085 mGrContext->setResourceCacheLimit(mMaxResourceBytes);
John Reck5f66fb82022-09-23 17:49:23 -040086 mLastDeferredCleanup = systemTime(CLOCK_MONOTONIC);
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040087 }
88}
89
90void CacheManager::destroy() {
91 // cleanup any caches here as the GrContext is about to go away...
92 mGrContext.reset(nullptr);
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -040093}
94
John Reck322b8ab2019-03-14 13:15:28 -070095class CommonPoolExecutor : public SkExecutor {
Derek Sollenberger8ec9e882017-08-24 16:36:08 -040096public:
John Reck0fa0cbc2019-04-05 16:57:46 -070097 virtual void add(std::function<void(void)> func) override { CommonPool::post(std::move(func)); }
Derek Sollenberger8ec9e882017-08-24 16:36:08 -040098};
99
John Reck322b8ab2019-03-14 13:15:28 -0700100static CommonPoolExecutor sDefaultExecutor;
101
John Reck0fa0cbc2019-04-05 16:57:46 -0700102void CacheManager::configureContext(GrContextOptions* contextOptions, const void* identity,
103 ssize_t size) {
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400104 contextOptions->fAllowPathMaskCaching = true;
Derek Sollenbergerb9e296e2019-04-18 16:21:42 -0400105 contextOptions->fGlyphCacheTextureMaximumBytes = mMaxGpuFontAtlasBytes;
John Reck322b8ab2019-03-14 13:15:28 -0700106 contextOptions->fExecutor = &sDefaultExecutor;
Stan Ilieve75ef1f2017-11-27 17:22:42 -0500107
Yichi Chen9f959552018-03-29 21:21:54 +0800108 auto& cache = skiapipeline::ShaderCache::get();
109 cache.initShaderDiskCache(identity, size);
110 contextOptions->fPersistentCache = &cache;
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400111}
112
John Reck5f66fb82022-09-23 17:49:23 -0400113void CacheManager::trimMemory(TrimLevel mode) {
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400114 if (!mGrContext) {
115 return;
116 }
117
Greg Daniel41ef5662021-02-01 14:25:48 -0500118 // flush and submit all work to the gpu and wait for it to finish
119 mGrContext->flushAndSubmit(/*syncCpu=*/true);
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400120
121 switch (mode) {
Tim Murray328ff432023-04-03 18:47:03 -0700122 case TrimLevel::BACKGROUND:
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400123 mGrContext->freeGpuResources();
Derek Sollenbergerb9e296e2019-04-18 16:21:42 -0400124 SkGraphics::PurgeAllCaches();
John Reck5f66fb82022-09-23 17:49:23 -0400125 mRenderThread.destroyRenderingContext();
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400126 break;
John Reck5f66fb82022-09-23 17:49:23 -0400127 case TrimLevel::UI_HIDDEN:
Derek Sollenbergerb1f27aa2018-04-02 13:36:45 -0400128 // Here we purge all the unlocked scratch resources and then toggle the resources cache
129 // limits between the background and max amounts. This causes the unlocked resources
130 // that have persistent data to be purged in LRU order.
Robert Phillips57bb0bf2019-09-06 13:18:17 -0400131 mGrContext->setResourceCacheLimit(mBackgroundResourceBytes);
Derek Sollenbergerb9e296e2019-04-18 16:21:42 -0400132 SkGraphics::SetFontCacheLimit(mBackgroundCpuFontCacheBytes);
John Reck5f66fb82022-09-23 17:49:23 -0400133 mGrContext->purgeUnlockedResources(mMemoryPolicy.purgeScratchOnly);
134 mGrContext->setResourceCacheLimit(mMaxResourceBytes);
Derek Sollenbergerb9e296e2019-04-18 16:21:42 -0400135 SkGraphics::SetFontCacheLimit(mMaxCpuFontCacheBytes);
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400136 break;
John Reck5f66fb82022-09-23 17:49:23 -0400137 default:
138 break;
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400139 }
140}
141
142void CacheManager::trimStaleResources() {
143 if (!mGrContext) {
144 return;
145 }
Greg Danielc7ad4082020-05-14 15:38:26 -0400146 mGrContext->flushAndSubmit();
John Reckf846aee2019-10-08 23:28:41 +0000147 mGrContext->purgeResourcesNotUsedInMs(std::chrono::seconds(30));
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400148}
149
John Reck39207682021-05-12 19:10:47 -0400150void CacheManager::getMemoryUsage(size_t* cpuUsage, size_t* gpuUsage) {
151 *cpuUsage = 0;
152 *gpuUsage = 0;
153 if (!mGrContext) {
154 return;
155 }
156
157 skiapipeline::SkiaMemoryTracer cpuTracer("category", true);
158 SkGraphics::DumpMemoryStatistics(&cpuTracer);
159 *cpuUsage += cpuTracer.total();
160
161 skiapipeline::SkiaMemoryTracer gpuTracer("category", true);
162 mGrContext->dumpMemoryStatistics(&gpuTracer);
163 *gpuUsage += gpuTracer.total();
164}
165
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400166void CacheManager::dumpMemoryUsage(String8& log, const RenderState* renderState) {
John Reck5f66fb82022-09-23 17:49:23 -0400167 log.appendFormat(R"(Memory policy:
168 Max surface area: %zu
169 Max resource usage: %.2fMB (x%.0f)
170 Background retention: %.0f%% (altUiHidden = %s)
171)",
172 mMaxSurfaceArea, mMaxResourceBytes / 1000000.f,
173 mMemoryPolicy.surfaceSizeMultiplier,
174 mMemoryPolicy.backgroundRetentionPercent * 100.0f,
175 mMemoryPolicy.useAlternativeUiHidden ? "true" : "false");
176 if (Properties::isSystemOrPersistent) {
177 log.appendFormat(" IsSystemOrPersistent\n");
178 }
179 log.appendFormat(" GPU Context timeout: %" PRIu64 "\n", ns2s(mMemoryPolicy.contextTimeout));
180 size_t stoppedContexts = 0;
181 for (auto context : mCanvasContexts) {
182 if (context->isStopped()) stoppedContexts++;
183 }
184 log.appendFormat("Contexts: %zu (stopped = %zu)\n", mCanvasContexts.size(), stoppedContexts);
185
John Reck9fc3d272023-05-01 16:33:22 -0400186 auto vkInstance = VulkanManager::peekInstance();
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400187 if (!mGrContext) {
John Reck9fc3d272023-05-01 16:33:22 -0400188 if (!vkInstance) {
189 log.appendFormat("No GPU context.\n");
190 } else {
191 log.appendFormat("No GrContext; however %d remaining Vulkan refs",
192 vkInstance->getStrongCount() - 1);
193 }
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400194 return;
195 }
Derek Sollenberger0057db22018-03-29 14:18:44 -0400196 std::vector<skiapipeline::ResourcePair> cpuResourceMap = {
197 {"skia/sk_resource_cache/bitmap_", "Bitmaps"},
198 {"skia/sk_resource_cache/rrect-blur_", "Masks"},
199 {"skia/sk_resource_cache/rects-blur_", "Masks"},
200 {"skia/sk_resource_cache/tessellated", "Shadows"},
John Reck39207682021-05-12 19:10:47 -0400201 {"skia/sk_glyph_cache", "Glyph Cache"},
Derek Sollenberger0057db22018-03-29 14:18:44 -0400202 };
203 skiapipeline::SkiaMemoryTracer cpuTracer(cpuResourceMap, false);
204 SkGraphics::DumpMemoryStatistics(&cpuTracer);
John Reck66e06d42021-05-11 17:04:54 -0400205 if (cpuTracer.hasOutput()) {
206 log.appendFormat("CPU Caches:\n");
207 cpuTracer.logOutput(log);
John Reck39207682021-05-12 19:10:47 -0400208 log.appendFormat(" Glyph Count: %d \n", SkGraphics::GetFontCacheCountUsed());
209 log.appendFormat("Total CPU memory usage:\n");
210 cpuTracer.logTotals(log);
John Reck66e06d42021-05-11 17:04:54 -0400211 }
Derek Sollenberger0057db22018-03-29 14:18:44 -0400212
Derek Sollenberger0057db22018-03-29 14:18:44 -0400213 skiapipeline::SkiaMemoryTracer gpuTracer("category", true);
214 mGrContext->dumpMemoryStatistics(&gpuTracer);
John Reck66e06d42021-05-11 17:04:54 -0400215 if (gpuTracer.hasOutput()) {
216 log.appendFormat("GPU Caches:\n");
217 gpuTracer.logOutput(log);
218 }
Derek Sollenberger0057db22018-03-29 14:18:44 -0400219
John Reck66e06d42021-05-11 17:04:54 -0400220 if (renderState && renderState->mActiveLayers.size() > 0) {
221 log.appendFormat("Layer Info:\n");
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400222
Stan Iliev564ca3e2018-09-04 22:00:00 +0000223 const char* layerType = Properties::getRenderPipelineType() == RenderPipelineType::SkiaGL
John Reck0fa0cbc2019-04-05 16:57:46 -0700224 ? "GlLayer"
225 : "VkLayer";
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400226 size_t layerMemoryTotal = 0;
227 for (std::set<Layer*>::iterator it = renderState->mActiveLayers.begin();
John Reck1bcacfd2017-11-03 10:12:19 -0700228 it != renderState->mActiveLayers.end(); it++) {
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400229 const Layer* layer = *it;
John Reck1bcacfd2017-11-03 10:12:19 -0700230 log.appendFormat(" %s size %dx%d\n", layerType, layer->getWidth(),
231 layer->getHeight());
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400232 layerMemoryTotal += layer->getWidth() * layer->getHeight() * 4;
233 }
Derek Sollenberger0057db22018-03-29 14:18:44 -0400234 log.appendFormat(" Layers Total %6.2f KB (numLayers = %zu)\n",
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400235 layerMemoryTotal / 1024.0f, renderState->mActiveLayers.size());
236 }
237
Derek Sollenberger0057db22018-03-29 14:18:44 -0400238 log.appendFormat("Total GPU memory usage:\n");
239 gpuTracer.logTotals(log);
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400240}
241
Stan Ilieve0fae232020-01-07 17:21:49 -0500242void CacheManager::onFrameCompleted() {
John Reck5f66fb82022-09-23 17:49:23 -0400243 cancelDestroyContext();
244 mFrameCompletions.next() = systemTime(CLOCK_MONOTONIC);
Stan Ilieve0fae232020-01-07 17:21:49 -0500245 if (ATRACE_ENABLED()) {
246 static skiapipeline::ATraceMemoryDump tracer;
247 tracer.startFrame();
248 SkGraphics::DumpMemoryStatistics(&tracer);
249 if (mGrContext) {
250 mGrContext->dumpMemoryStatistics(&tracer);
251 }
252 tracer.logTraces();
253 }
254}
255
John Reck5f66fb82022-09-23 17:49:23 -0400256void CacheManager::onThreadIdle() {
257 if (!mGrContext || mFrameCompletions.size() == 0) return;
258
259 const nsecs_t now = systemTime(CLOCK_MONOTONIC);
260 // Rate limiting
261 if ((now - mLastDeferredCleanup) < 25_ms) {
262 mLastDeferredCleanup = now;
263 const nsecs_t frameCompleteNanos = mFrameCompletions[0];
264 const nsecs_t frameDiffNanos = now - frameCompleteNanos;
265 const nsecs_t cleanupMillis =
266 ns2ms(std::max(frameDiffNanos, mMemoryPolicy.minimumResourceRetention));
267 mGrContext->performDeferredCleanup(std::chrono::milliseconds(cleanupMillis),
268 mMemoryPolicy.purgeScratchOnly);
269 }
270}
271
272void CacheManager::scheduleDestroyContext() {
273 if (mMemoryPolicy.contextTimeout > 0) {
274 mRenderThread.queue().postDelayed(mMemoryPolicy.contextTimeout,
275 [this, genId = mGenerationId] {
276 if (mGenerationId != genId) return;
277 // GenID should have already stopped this, but just in
278 // case
279 if (!areAllContextsStopped()) return;
280 mRenderThread.destroyRenderingContext();
281 });
282 }
283}
284
285void CacheManager::cancelDestroyContext() {
286 if (mIsDestructionPending) {
287 mIsDestructionPending = false;
288 mGenerationId++;
289 }
290}
291
292bool CacheManager::areAllContextsStopped() {
293 for (auto context : mCanvasContexts) {
294 if (!context->isStopped()) return false;
295 }
296 return true;
297}
298
299void CacheManager::checkUiHidden() {
300 if (!mGrContext) return;
301
302 if (mMemoryPolicy.useAlternativeUiHidden && areAllContextsStopped()) {
303 trimMemory(TrimLevel::UI_HIDDEN);
304 }
305}
306
307void CacheManager::registerCanvasContext(CanvasContext* context) {
308 mCanvasContexts.push_back(context);
309 cancelDestroyContext();
310}
311
312void CacheManager::unregisterCanvasContext(CanvasContext* context) {
313 std::erase(mCanvasContexts, context);
314 checkUiHidden();
315 if (mCanvasContexts.empty()) {
316 scheduleDestroyContext();
317 }
318}
319
320void CacheManager::onContextStopped(CanvasContext* context) {
321 checkUiHidden();
322 if (mMemoryPolicy.releaseContextOnStoppedOnly && areAllContextsStopped()) {
323 scheduleDestroyContext();
324 }
325}
326
327void CacheManager::notifyNextFrameSize(int width, int height) {
328 int frameArea = width * height;
329 if (frameArea > mMaxSurfaceArea) {
330 mMaxSurfaceArea = frameArea;
331 setupCacheLimits();
Nader Jawaddd1fcab2021-06-10 18:54:23 -0700332 }
333}
334
Derek Sollenbergerf9e45d12017-06-01 13:07:39 -0400335} /* namespace renderthread */
336} /* namespace uirenderer */
337} /* namespace android */