blob: a0d93e928876c47647021a499f8829a7ec7c7e48 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
21#include <strings.h>
22#include <sys/stat.h>
23
24#include <algorithm>
25#include <cstdint>
26#include <cstdlib>
27#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080028
rnleece9762b2021-05-21 15:40:53 -070029#include <gui/TraceUtils.h>
John Reck0fa0cbc2019-04-05 16:57:46 -070030#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070031#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050032#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070033#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070034#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080035#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070036#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040037#include "pipeline/skia/SkiaOpenGLPipeline.h"
38#include "pipeline/skia/SkiaPipeline.h"
39#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070040#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080041#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070042#include "utils/TimeUtils.h"
John Recke248bd12015-08-05 13:53:53 -070043
John Reckf47a5942014-06-30 16:20:04 -070044#define TRIM_MEMORY_COMPLETE 80
45#define TRIM_MEMORY_UI_HIDDEN 20
46
John Reck149173d2015-08-10 09:52:29 -070047#define LOG_FRAMETIME_MMA 0
48
49#if LOG_FRAMETIME_MMA
50static float sBenchMma = 0;
51static int sFrameCount = 0;
52static const float NANOS_PER_MILLIS_F = 1000000.0f;
53#endif
54
John Reck23b797a2014-01-03 18:08:34 -080055namespace android {
56namespace uirenderer {
57namespace renderthread {
58
Huihong Luo054b8d32021-02-24 18:48:12 -080059namespace {
60class ScopedActiveContext {
61public:
62 ScopedActiveContext(CanvasContext* context) { sActiveContext = context; }
63
64 ~ScopedActiveContext() { sActiveContext = nullptr; }
65
66 static CanvasContext* getActiveContext() { return sActiveContext; }
67
68private:
69 static CanvasContext* sActiveContext;
70};
71
72CanvasContext* ScopedActiveContext::sActiveContext = nullptr;
73} /* namespace */
74
John Reck1bcacfd2017-11-03 10:12:19 -070075CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
76 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040077 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040078
Stan Iliev03de0742016-07-07 12:35:54 -040079 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040080 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040081 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070082 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040083 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040084 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070085 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040086 default:
John Reck1bcacfd2017-11-03 10:12:19 -070087 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040088 break;
89 }
90 return nullptr;
91}
92
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040093void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
94 ATRACE_CALL();
95 auto renderType = Properties::getRenderPipelineType();
96 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040097 case RenderPipelineType::SkiaGL:
98 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
99 break;
100 case RenderPipelineType::SkiaVulkan:
101 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
102 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400103 default:
John Reck1bcacfd2017-11-03 10:12:19 -0700104 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400105 break;
106 }
107}
108
109void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -0700110 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400111}
112
John Reck1bcacfd2017-11-03 10:12:19 -0700113CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
114 IContextFactory* contextFactory,
115 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700116 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400117 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800118 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800119 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700120 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700121 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400122 , mContentDrawBounds(0, 0, 0, 0)
123 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800124 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700125 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700126 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck23b797a2014-01-03 18:08:34 -0800127}
128
129CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800130 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800131 for (auto& node : mRenderNodes) {
132 node->clearRoot();
133 }
134 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800135}
136
John Reck2de950d2017-01-25 10:58:30 -0800137void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
138 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
139 node->makeRoot();
140 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
141}
142
143void CanvasContext::removeRenderNode(RenderNode* node) {
144 node->clearRoot();
145 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700146 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800147}
148
149void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700150 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800151 setSurface(nullptr);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800152 setSurfaceControl(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800153 freePrefetchedLayers();
154 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700155 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800156}
157
John Reckfbe14bb2020-09-21 14:37:41 -0700158static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700159 int query_value;
160 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
161 if (err != 0 || query_value < 0) {
162 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
163 return;
164 }
165 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
166
John Reckfbe14bb2020-09-21 14:37:41 -0700167 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
168 // query for min_undequeued
169 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700170 native_window_set_buffer_count(window, bufferCount);
171}
172
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800173void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800174 ATRACE_CALL();
175
John Reck8ddbc592020-05-07 16:11:18 -0700176 if (window) {
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800177 int extraBuffers = 0;
178 native_window_get_extra_buffer_count(window, &extraBuffers);
179
John Reck8ddbc592020-05-07 16:11:18 -0700180 mNativeSurface = std::make_unique<ReliableSurface>(window);
181 mNativeSurface->init();
182 if (enableTimeout) {
183 // TODO: Fix error handling & re-shorten timeout
184 ANativeWindow_setDequeueTimeout(window, 4000_ms);
185 }
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800186 mNativeSurface->setExtraBufferCount(extraBuffers);
John Reck8ddbc592020-05-07 16:11:18 -0700187 } else {
188 mNativeSurface = nullptr;
189 }
John Reckb36bfdd2020-07-23 13:47:49 -0700190 setupPipelineSurface();
191}
John Reck8ddbc592020-05-07 16:11:18 -0700192
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800193void CanvasContext::setSurfaceControl(ASurfaceControl* surfaceControl) {
194 if (surfaceControl == mSurfaceControl) return;
195
196 auto funcs = mRenderThread.getASurfaceControlFunctions();
Jorim Jaggi71db8892021-02-03 23:19:29 +0100197
Huihong Luoe76f1812021-05-06 10:28:12 -0700198 if (surfaceControl == nullptr) {
199 setASurfaceTransactionCallback(nullptr);
200 }
201
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800202 if (mSurfaceControl != nullptr) {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100203 funcs.unregisterListenerFunc(this, &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800204 funcs.releaseFunc(mSurfaceControl);
205 }
206 mSurfaceControl = surfaceControl;
Jorim Jaggi71db8892021-02-03 23:19:29 +0100207 mExpectSurfaceStats = surfaceControl != nullptr;
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800208 if (mSurfaceControl != nullptr) {
209 funcs.acquireFunc(mSurfaceControl);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100210 funcs.registerListenerFunc(surfaceControl, this, &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800211 }
212}
213
John Reckb36bfdd2020-07-23 13:47:49 -0700214void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800215 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700216 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
217
218 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700219 setBufferCount(mNativeSurface->getNativeWindow());
220
John Reck8ddbc592020-05-07 16:11:18 -0700221 }
John Reck23b797a2014-01-03 18:08:34 -0800222
John Reck28912a52016-04-18 14:34:18 -0700223 mFrameNumber = -1;
224
John Reckb36bfdd2020-07-23 13:47:49 -0700225 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700226 mHaveNewSurface = true;
227 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500228 // Enable frame stats after the surface has been bound to the appropriate graphics API.
229 // Order is important when new and old surfaces are the same, because old surface has
230 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800231 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700232 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700233 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400234 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800235 }
John Reck23b797a2014-01-03 18:08:34 -0800236}
237
John Reck1125d1f2014-10-23 11:02:19 -0700238void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
239 mSwapBehavior = swapBehavior;
240}
241
John Reckf8441e62017-10-23 13:10:41 -0700242bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400243 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800244 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800245}
246
John Reck8afcc762016-04-13 10:24:06 -0700247void CanvasContext::setStopped(bool stopped) {
248 if (mStopped != stopped) {
249 mStopped = stopped;
250 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400251 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700252 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400253 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700254 } else if (mIsDirty && hasSurface()) {
255 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700256 }
257 }
258}
259
John Reck8785ceb2018-10-29 16:45:58 -0700260void CanvasContext::allocateBuffers() {
261 if (mNativeSurface) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800262 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700263 }
264}
265
266void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800267 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
268 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700269}
270
John Reck8785ceb2018-10-29 16:45:58 -0700271void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800272 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700273 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800274}
275
John Reck63a06672014-05-07 13:45:54 -0700276void CanvasContext::setOpaque(bool opaque) {
277 mOpaque = opaque;
278}
279
John Reckb36bfdd2020-07-23 13:47:49 -0700280void CanvasContext::setColorMode(ColorMode mode) {
281 mRenderPipeline->setSurfaceColorProperties(mode);
282 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700283}
284
John Reck8afcc762016-04-13 10:24:06 -0700285bool CanvasContext::makeCurrent() {
286 if (mStopped) return false;
287
Stan Iliev768e3932016-07-08 21:34:52 -0400288 auto result = mRenderPipeline->makeCurrent();
289 switch (result) {
290 case MakeCurrentResult::AlreadyCurrent:
291 return true;
292 case MakeCurrentResult::Failed:
293 mHaveNewSurface = true;
294 setSurface(nullptr);
295 return false;
296 case MakeCurrentResult::Succeeded:
297 mHaveNewSurface = true;
298 return true;
299 default:
300 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700301 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700302 }
Stan Iliev768e3932016-07-08 21:34:52 -0400303
304 return true;
John Reck860d1552014-04-11 19:15:05 -0700305}
306
John Reckbf3c6022015-06-02 15:55:00 -0700307static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700308 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700309}
310
John Reck0def73a2016-07-01 16:19:13 -0700311bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700312 static const auto SLOW_THRESHOLD = 6_ms;
313
John Reck0def73a2016-07-01 16:19:13 -0700314 if (mSwapHistory.size() != mSwapHistory.capacity()) {
315 // We want at least 3 frames of history before attempting to
316 // guess if the queue is stuffed
317 return false;
318 }
319 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
320 auto& swapA = mSwapHistory[0];
321
322 // Was there a happy queue & dequeue time? If so, don't
323 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700324 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700325 return false;
326 }
327
328 for (size_t i = 1; i < mSwapHistory.size(); i++) {
329 auto& swapB = mSwapHistory[i];
330
Chris Craik31635682016-07-19 17:59:12 -0700331 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700332 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800333 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700334 return false;
335 }
336
337 // Was there a happy queue & dequeue time? If so, don't
338 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700339 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700340 return false;
341 }
342
343 swapA = swapB;
344 }
345
346 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700347 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700348 return true;
349}
350
John Reck1bcacfd2017-11-03 10:12:19 -0700351void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
352 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700353 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700354
John Reckbf3c6022015-06-02 15:55:00 -0700355 // If the previous frame was dropped we don't need to hold onto it, so
356 // just keep using the previous frame's structure instead
357 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700358 mCurrentFrameInfo = mJankTracker.startFrame();
John Reckbf3c6022015-06-02 15:55:00 -0700359 }
Jorim Jaggi71db8892021-02-03 23:19:29 +0100360
John Reckba6adf62015-02-19 14:36:50 -0800361 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700362 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800363 mCurrentFrameInfo->markSyncStart();
364
John Recke4267ea2014-06-03 15:53:15 -0700365 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700366 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700367 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800368 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700369
John Reckec845a22014-09-05 15:23:38 -0700370 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700371 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700372 // Only the primary target node will be drawn full - all other nodes would get drawn in
373 // real time mode. In case of a window, the primary node is the window content and the other
374 // node(s) are non client / filler nodes.
375 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
376 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800377 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700378 }
John Reck119907c2014-08-14 09:02:01 -0700379 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800380 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700381
John Reck2de950d2017-01-25 10:58:30 -0800382 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800383 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700384
John Reck306f3312016-06-10 16:01:55 -0700385 mIsDirty = true;
386
John Reck848f6512018-12-03 13:26:43 -0800387 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700388 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800389 info.out.canDrawThisFrame = false;
390 return;
391 }
392
John Reckf1480762016-07-03 18:28:25 -0700393 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700394 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700395 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800396 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700397 // The slight fudge-factor is to deal with cases where
398 // the vsync was estimated due to being slow handling the signal.
399 // See the logic in TimeLord#computeFrameTimeNanos or in
400 // Choreographer.java for details on when this happens
401 if (vsyncDelta < 2_ms) {
402 // Already drew for this vsync pulse, UI draw request missed
403 // the deadline for RT animations
404 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700405 }
Bo Hudd082242018-12-02 05:22:41 +0000406 } else {
407 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700408 }
John Recka5dda642014-05-22 15:43:54 -0700409
John Reckcc2eee82018-05-17 10:44:00 -0700410 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
411 // be an allowable combination?
412 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
413 info.out.canDrawThisFrame = false;
414 }
415
John Reck848f6512018-12-03 13:26:43 -0800416 if (info.out.canDrawThisFrame) {
417 int err = mNativeSurface->reserveNext();
418 if (err != OK) {
419 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
420 info.out.canDrawThisFrame = false;
421 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
422 if (err != TIMED_OUT) {
423 // A timed out surface can still recover, but assume others are permanently dead.
424 setSurface(nullptr);
425 return;
426 }
427 }
428 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700429 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700430 }
431
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400432 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700433 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700434 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
435 info.out.requiresUiRedraw = true;
436 }
John Reckcd028f32014-06-24 08:44:29 -0700437 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700438 // If animationsNeedsRedraw is set don't bother posting for an RT anim
439 // as we will just end up fighting the UI thread.
440 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400441 postedFrameCallback = true;
442 }
443 }
444
445 if (!postedFrameCallback &&
446 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
447 // Subtract the time of one frame so it can be displayed on time.
448 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
449 if (info.out.animatedImageDelay <= kFrameTime) {
450 mRenderThread.postFrameCallback(this);
451 } else {
452 const auto delay = info.out.animatedImageDelay - kFrameTime;
453 int genId = mGenerationID;
454 mRenderThread.queue().postDelayed(delay, [this, genId]() {
455 if (mGenerationID == genId) {
456 mRenderThread.postFrameCallback(this);
457 }
458 });
John Reckf9be7792014-05-02 18:21:16 -0700459 }
John Recke45b1fd2014-04-15 09:50:16 -0700460 }
461}
462
John Reckf47a5942014-06-30 16:20:04 -0700463void CanvasContext::stopDrawing() {
464 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700465 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400466 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700467}
468
John Recka5dda642014-05-22 15:43:54 -0700469void CanvasContext::notifyFramePending() {
470 ATRACE_CALL();
471 mRenderThread.pushBackFrameCallback(this);
472}
473
Bo Liu4d0f1f12021-05-06 16:49:40 -0400474nsecs_t CanvasContext::draw() {
Greg Daniel16d11c62021-04-08 16:17:35 -0400475 if (auto grContext = getGrContext()) {
476 if (grContext->abandoned()) {
477 LOG_ALWAYS_FATAL("GrContext is abandoned/device lost at start of CanvasContext::draw");
Bo Liu4d0f1f12021-05-06 16:49:40 -0400478 return 0;
Greg Daniel16d11c62021-04-08 16:17:35 -0400479 }
480 }
John Recke4267ea2014-06-03 15:53:15 -0700481 SkRect dirty;
482 mDamageAccumulator.finish(&dirty);
483
John Reck0fa0cbc2019-04-05 16:57:46 -0700484 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800485 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
chaviw5fc80e72020-04-30 12:14:35 -0700486 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
487 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700488 waitOnFences();
chaviw5fc80e72020-04-30 12:14:35 -0700489 for (auto& func : mFrameCompleteCallbacks) {
490 std::invoke(func, mFrameNumber);
491 }
492 mFrameCompleteCallbacks.clear();
Bo Liu4d0f1f12021-05-06 16:49:40 -0400493 return 0;
John Reck0b19a732019-03-07 17:18:25 -0800494 }
John Reck240ff622015-04-28 13:50:00 -0700495
Huihong Luo054b8d32021-02-24 18:48:12 -0800496 ScopedActiveContext activeContext(this);
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100497 mCurrentFrameInfo->set(FrameInfoIndex::FrameInterval) =
498 mRenderThread.timeLord().frameIntervalNanos();
499
John Reck240ff622015-04-28 13:50:00 -0700500 mCurrentFrameInfo->markIssueDrawCommandsStart();
501
Stan Iliev768e3932016-07-08 21:34:52 -0400502 Frame frame = mRenderPipeline->getFrame();
Stan Iliev768e3932016-07-08 21:34:52 -0400503 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800504
Stan Iliev768e3932016-07-08 21:34:52 -0400505 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700506 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
507 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800508
Stan Iliev7203e1f2019-07-25 13:12:02 -0400509 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700510
John Reck38f6c032016-03-17 10:23:49 -0700511 waitOnFences();
512
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700513 if (mNativeSurface) {
514 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000515 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
516 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
Siarhei Vishniakou4bcbffd2021-02-17 06:19:36 +0000517 const auto inputEventId =
518 static_cast<int32_t>(mCurrentFrameInfo->get(FrameInfoIndex::InputEventId));
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000519 native_window_set_frame_timeline_info(mNativeSurface->getNativeWindow(), vsyncId,
520 inputEventId);
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700521 }
522 }
523
Stan Iliev768e3932016-07-08 21:34:52 -0400524 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700525 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700526 bool didSwap =
527 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800528
John Reck306f3312016-06-10 16:01:55 -0700529 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800530
Stan Iliev768e3932016-07-08 21:34:52 -0400531 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700532 bool didDraw = true;
533 // Handle any swapchain errors
534 error = mNativeSurface->getAndClearError();
535 if (error == TIMED_OUT) {
536 // Try again
537 mRenderThread.postFrameCallback(this);
538 // But since this frame didn't happen, we need to mark full damage in the swap
539 // history
540 didDraw = false;
541
542 } else if (error != OK || !didSwap) {
543 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700544 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700545 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700546 }
John Reck59dd2ea2019-07-26 16:51:08 -0700547
John Recke486d932015-10-28 09:21:19 -0700548 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700549 if (didDraw) {
550 swap.damage = windowDirty;
551 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800552 float max = static_cast<float>(INT_MAX);
553 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700554 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100555 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700556 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700557 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800558 nsecs_t dequeueStart =
559 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700560 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
561 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700562 // and thus is not part of the frame.
563 swap.dequeueDuration = 0;
564 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700565 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800566 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700567 }
Alec Mourif023a322019-11-25 10:02:21 -0800568 swap.queueDuration =
569 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700570 } else {
571 swap.dequeueDuration = 0;
572 swap.queueDuration = 0;
573 }
John Reck1bcacfd2017-11-03 10:12:19 -0700574 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
575 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
John Reck149173d2015-08-10 09:52:29 -0700576 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700577 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700578 } else {
579 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
580 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
John Reck4f02bf42014-01-03 18:09:17 -0800581 }
John Reckfe5e7b72014-05-23 17:42:28 -0700582
Jorim Jaggi71db8892021-02-03 23:19:29 +0100583 mCurrentFrameInfo->markSwapBuffersCompleted();
John Reck149173d2015-08-10 09:52:29 -0700584
585#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700586 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
587 FrameInfoIndex::FrameCompleted) /
588 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700589 if (sFrameCount) {
590 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
591 } else {
592 sBenchMma = thisFrame;
593 }
594 if (++sFrameCount == 10) {
595 sFrameCount = 1;
596 ALOGD("Average frame time: %.4f", sBenchMma);
597 }
598#endif
599
John Reckcc2eee82018-05-17 10:44:00 -0700600 if (didSwap) {
601 for (auto& func : mFrameCompleteCallbacks) {
602 std::invoke(func, frameCompleteNr);
603 }
604 mFrameCompleteCallbacks.clear();
605 }
606
Jorim Jaggi71db8892021-02-03 23:19:29 +0100607 if (requireSwap) {
608 if (mExpectSurfaceStats) {
Siarhei Vishniakouadf50242021-03-11 21:21:05 +0000609 reportMetricsWithPresentTime();
Jorim Jaggi71db8892021-02-03 23:19:29 +0100610 std::lock_guard lock(mLast4FrameInfosMutex);
611 std::pair<FrameInfo*, int64_t>& next = mLast4FrameInfos.next();
612 next.first = mCurrentFrameInfo;
613 next.second = frameCompleteNr;
614 } else {
615 mCurrentFrameInfo->markFrameCompleted();
616 mCurrentFrameInfo->set(FrameInfoIndex::GpuCompleted)
617 = mCurrentFrameInfo->get(FrameInfoIndex::FrameCompleted);
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100618 mJankTracker.finishFrame(*mCurrentFrameInfo, mFrameMetricsReporter);
Alec Mouri026106f2020-03-26 17:17:09 -0700619 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400620 }
621
Stan Ilieve0fae232020-01-07 17:21:49 -0500622 mRenderThread.cacheManager().onFrameCompleted();
Bo Liu4d0f1f12021-05-06 16:49:40 -0400623 return mCurrentFrameInfo->get(FrameInfoIndex::DequeueBufferDuration);
John Reck4f02bf42014-01-03 18:09:17 -0800624}
625
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000626void CanvasContext::reportMetricsWithPresentTime() {
627 if (mFrameMetricsReporter == nullptr) {
628 return;
629 }
630 if (mNativeSurface == nullptr) {
631 return;
632 }
633 FrameInfo* forthBehind;
634 int64_t frameNumber;
635 { // acquire lock
636 std::scoped_lock lock(mLast4FrameInfosMutex);
637 if (mLast4FrameInfos.size() != mLast4FrameInfos.capacity()) {
638 // Not enough frames yet
639 return;
640 }
641 // Surface object keeps stats for the last 8 frames.
642 std::tie(forthBehind, frameNumber) = mLast4FrameInfos.front();
643 } // release lock
644
645 nsecs_t presentTime = 0;
646 native_window_get_frame_timestamps(
647 mNativeSurface->getNativeWindow(), frameNumber, nullptr /*outRequestedPresentTime*/,
648 nullptr /*outAcquireTime*/, nullptr /*outLatchTime*/,
649 nullptr /*outFirstRefreshStartTime*/, nullptr /*outLastRefreshStartTime*/,
650 nullptr /*outGpuCompositionDoneTime*/, &presentTime, nullptr /*outDequeueReadyTime*/,
651 nullptr /*outReleaseTime*/);
652
653 forthBehind->set(FrameInfoIndex::DisplayPresentTime) = presentTime;
654 mFrameMetricsReporter->reportFrameMetrics(forthBehind->data(), true /*hasPresentTime*/);
655}
656
Jorim Jaggi71db8892021-02-03 23:19:29 +0100657void CanvasContext::onSurfaceStatsAvailable(void* context, ASurfaceControl* control,
658 ASurfaceControlStats* stats) {
659
660 CanvasContext* instance = static_cast<CanvasContext*>(context);
661
662 const ASurfaceControlFunctions& functions =
663 instance->mRenderThread.getASurfaceControlFunctions();
664
665 nsecs_t gpuCompleteTime = functions.getAcquireTimeFunc(stats);
666 uint64_t frameNumber = functions.getFrameNumberFunc(stats);
667
668 FrameInfo* frameInfo = nullptr;
669 {
670 std::lock_guard(instance->mLast4FrameInfosMutex);
671 for (size_t i = 0; i < instance->mLast4FrameInfos.size(); i++) {
672 if (instance->mLast4FrameInfos[i].second == frameNumber) {
673 frameInfo = instance->mLast4FrameInfos[i].first;
674 break;
675 }
676 }
677 }
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000678
Jorim Jaggi71db8892021-02-03 23:19:29 +0100679 if (frameInfo != nullptr) {
Jorim Jaggib891cdf2021-05-11 14:57:25 +0200680 frameInfo->set(FrameInfoIndex::FrameCompleted) = std::max(gpuCompleteTime,
681 frameInfo->get(FrameInfoIndex::SwapBuffersCompleted));
Jorim Jaggi71db8892021-02-03 23:19:29 +0100682 frameInfo->set(FrameInfoIndex::GpuCompleted) = gpuCompleteTime;
Jorim Jaggie4a52712021-05-31 17:57:50 +0200683 std::lock_guard(instance->mFrameMetricsReporterMutex);
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100684 instance->mJankTracker.finishFrame(*frameInfo, instance->mFrameMetricsReporter);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100685 }
686}
687
John Recke45b1fd2014-04-15 09:50:16 -0700688// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700689void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400690 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700691 prepareAndDraw(nullptr);
692}
John Reck368cdd82014-05-07 13:11:00 -0700693
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400694SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800695 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
696 if (mNativeSurface == nullptr) {
697 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400698 }
Alec Mourif023a322019-11-25 10:02:21 -0800699 ANativeWindow* anw = mNativeSurface->getNativeWindow();
700
701 SkISize size;
702 size.fWidth = ANativeWindow_getWidth(anw);
703 size.fHeight = ANativeWindow_getHeight(anw);
704 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400705}
706
Skuhneea7a7fb2015-08-28 07:10:31 -0700707void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700708 ATRACE_CALL();
709
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800710 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700711 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700712 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100713 int64_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
John Reckba6adf62015-02-19 14:36:50 -0800714 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700715 UiFrameInfoBuilder(frameInfo)
716 .addFlag(FrameInfoFlags::RTAnimation)
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100717 .setVsync(vsync, vsync, vsyncId, frameDeadline, frameInterval);
John Reckfe5e7b72014-05-23 17:42:28 -0700718
Chris Craike2e53a72015-10-28 15:55:40 -0700719 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100720 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700721 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700722 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700723 } else {
724 // wait on fences so tasks don't overlap next frame
725 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700726 }
John Recke45b1fd2014-04-15 09:50:16 -0700727}
728
John Reck998a6d82014-08-28 15:35:53 -0700729void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700730 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800731 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700732 }
733}
734
John Reck2de950d2017-01-25 10:58:30 -0800735void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700736 if (mPrefetchedLayers.size()) {
737 for (auto& node : mPrefetchedLayers) {
738 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700739 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800740 node->destroyLayers();
741 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700742 }
743 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700744 }
745}
746
John Reck2de950d2017-01-25 10:58:30 -0800747void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700748 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400749 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700750
John Reck3e824952014-08-20 10:08:39 -0700751 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
752 stopDrawing();
753
Chris Craike2e53a72015-10-28 15:55:40 -0700754 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700755 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700756 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700757 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700758 node->prepareTree(info);
759 SkRect ignore;
760 mDamageAccumulator.finish(&ignore);
761 // Tickle the GENERIC property on node to mark it as dirty for damaging
762 // purposes when the frame is actually drawn
763 node->setPropertyFieldsDirty(RenderNode::GENERIC);
764
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700765 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700766
Chris Craikd41c4d82015-01-05 15:51:13 -0800767 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700768 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700769}
770
John Reck2de950d2017-01-25 10:58:30 -0800771void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700772 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400773 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800774 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700775 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800776 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700777 }
Stan Iliev768e3932016-07-08 21:34:52 -0400778 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700779 }
780}
781
782void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700783 ATRACE_CALL();
784 if (!thread.getGrContext()) return;
785 ATRACE_CALL();
786 if (level >= TRIM_MEMORY_COMPLETE) {
787 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800788 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700789 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
790 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700791 }
792}
793
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400794DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400795 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700796}
797
John Reckba6adf62015-02-19 14:36:50 -0800798void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700799 mJankTracker.dumpStats(fd);
800 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800801}
802
803void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700804 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800805}
806
John Reckdf1742e2017-01-19 15:56:21 -0800807void CanvasContext::setName(const std::string&& name) {
808 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
809}
810
John Reck38f6c032016-03-17 10:23:49 -0700811void CanvasContext::waitOnFences() {
812 if (mFrameFences.size()) {
813 ATRACE_CALL();
814 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700815 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700816 }
817 mFrameFences.clear();
818 }
819}
820
John Reck38f6c032016-03-17 10:23:49 -0700821void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700822 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700823}
824
John Reck28912a52016-04-18 14:34:18 -0700825int64_t CanvasContext::getFrameNumber() {
826 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
827 if (mFrameNumber == -1 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800828 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700829 }
830 return mFrameNumber;
831}
832
John Reck0b19a732019-03-07 17:18:25 -0800833bool CanvasContext::surfaceRequiresRedraw() {
834 if (!mNativeSurface) return false;
835 if (mHaveNewSurface) return true;
836
Alec Mourif023a322019-11-25 10:02:21 -0800837 ANativeWindow* anw = mNativeSurface->getNativeWindow();
838 const int width = ANativeWindow_getWidth(anw);
839 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800840
John Reck41459192019-10-31 15:04:58 -0700841 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800842}
843
Stan Iliev768e3932016-07-08 21:34:52 -0400844SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
845 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
846 // can't rely on prior content of window if viewport size changes
847 dirty->setEmpty();
848 mLastFrameWidth = frame.width();
849 mLastFrameHeight = frame.height();
850 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
851 // New surface needs a full draw
852 dirty->setEmpty();
853 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400854 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700855 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
856 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400857 dirty->setEmpty();
858 }
859 profiler().unionDirty(dirty);
860 }
861
862 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400863 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400864 }
865
866 // At this point dirty is the area of the window to update. However,
867 // the area of the frame we need to repaint is potentially different, so
868 // stash the screen area for later
869 SkRect windowDirty(*dirty);
870
871 // If the buffer age is 0 we do a full-screen repaint (handled above)
872 // If the buffer age is 1 the buffer contents are the same as they were
873 // last frame so there's nothing to union() against
874 // Therefore we only care about the > 1 case.
875 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700876 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400877 // We don't have enough history to handle this old of a buffer
878 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400879 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400880 } else {
881 // At this point we haven't yet added the latest frame
882 // to the damage history (happens below)
883 // So we need to damage
884 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700885 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400886 dirty->join(mSwapHistory[i].damage);
887 }
888 }
889 }
890
891 return windowDirty;
892}
893
Huihong Luo054b8d32021-02-24 18:48:12 -0800894CanvasContext* CanvasContext::getActiveContext() {
895 return ScopedActiveContext::getActiveContext();
896}
897
898bool CanvasContext::mergeTransaction(ASurfaceTransaction* transaction, ASurfaceControl* control) {
899 if (!mASurfaceTransactionCallback) return false;
900 std::invoke(mASurfaceTransactionCallback, reinterpret_cast<int64_t>(transaction),
901 reinterpret_cast<int64_t>(control), getFrameNumber());
902 return true;
903}
904
John Reck23b797a2014-01-03 18:08:34 -0800905} /* namespace renderthread */
906} /* namespace uirenderer */
907} /* namespace android */