blob: 9543d47640e698d228ffaf4e5420373783268bb6 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
21#include <strings.h>
22#include <sys/stat.h>
23
24#include <algorithm>
25#include <cstdint>
26#include <cstdlib>
27#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080028
John Reck0fa0cbc2019-04-05 16:57:46 -070029#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070030#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050031#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070032#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070033#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080034#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070035#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040036#include "pipeline/skia/SkiaOpenGLPipeline.h"
37#include "pipeline/skia/SkiaPipeline.h"
38#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070039#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080040#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070041#include "utils/TimeUtils.h"
John Reck322b8ab2019-03-14 13:15:28 -070042#include "utils/TraceUtils.h"
John Recke248bd12015-08-05 13:53:53 -070043
John Reckf47a5942014-06-30 16:20:04 -070044#define TRIM_MEMORY_COMPLETE 80
45#define TRIM_MEMORY_UI_HIDDEN 20
46
John Reck149173d2015-08-10 09:52:29 -070047#define LOG_FRAMETIME_MMA 0
48
49#if LOG_FRAMETIME_MMA
50static float sBenchMma = 0;
51static int sFrameCount = 0;
52static const float NANOS_PER_MILLIS_F = 1000000.0f;
53#endif
54
John Reck23b797a2014-01-03 18:08:34 -080055namespace android {
56namespace uirenderer {
57namespace renderthread {
58
John Reck1bcacfd2017-11-03 10:12:19 -070059CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
60 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040061 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040062
Stan Iliev03de0742016-07-07 12:35:54 -040063 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040064 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040065 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070066 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040067 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040068 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070069 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040070 default:
John Reck1bcacfd2017-11-03 10:12:19 -070071 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040072 break;
73 }
74 return nullptr;
75}
76
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040077void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
78 ATRACE_CALL();
79 auto renderType = Properties::getRenderPipelineType();
80 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040081 case RenderPipelineType::SkiaGL:
82 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
83 break;
84 case RenderPipelineType::SkiaVulkan:
85 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
86 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040087 default:
John Reck1bcacfd2017-11-03 10:12:19 -070088 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040089 break;
90 }
91}
92
93void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -070094 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040095}
96
John Reck1bcacfd2017-11-03 10:12:19 -070097CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
98 IContextFactory* contextFactory,
99 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700100 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400101 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800102 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800103 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700104 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700105 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400106 , mContentDrawBounds(0, 0, 0, 0)
107 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800108 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700109 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700110 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck23b797a2014-01-03 18:08:34 -0800111}
112
113CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800114 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800115 for (auto& node : mRenderNodes) {
116 node->clearRoot();
117 }
118 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800119}
120
John Reck2de950d2017-01-25 10:58:30 -0800121void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
122 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
123 node->makeRoot();
124 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
125}
126
127void CanvasContext::removeRenderNode(RenderNode* node) {
128 node->clearRoot();
129 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700130 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800131}
132
133void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700134 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800135 setSurface(nullptr);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800136 setSurfaceControl(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800137 freePrefetchedLayers();
138 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700139 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800140}
141
John Reckfbe14bb2020-09-21 14:37:41 -0700142static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700143 int query_value;
144 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
145 if (err != 0 || query_value < 0) {
146 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
147 return;
148 }
149 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
150
John Reckfbe14bb2020-09-21 14:37:41 -0700151 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
152 // query for min_undequeued
153 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700154 native_window_set_buffer_count(window, bufferCount);
155}
156
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800157void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800158 ATRACE_CALL();
159
John Reck8ddbc592020-05-07 16:11:18 -0700160 if (window) {
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800161 int extraBuffers = 0;
162 native_window_get_extra_buffer_count(window, &extraBuffers);
163
John Reck8ddbc592020-05-07 16:11:18 -0700164 mNativeSurface = std::make_unique<ReliableSurface>(window);
165 mNativeSurface->init();
166 if (enableTimeout) {
167 // TODO: Fix error handling & re-shorten timeout
168 ANativeWindow_setDequeueTimeout(window, 4000_ms);
169 }
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800170 mNativeSurface->setExtraBufferCount(extraBuffers);
John Reck8ddbc592020-05-07 16:11:18 -0700171 } else {
172 mNativeSurface = nullptr;
173 }
John Reckb36bfdd2020-07-23 13:47:49 -0700174 setupPipelineSurface();
175}
John Reck8ddbc592020-05-07 16:11:18 -0700176
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800177void CanvasContext::setSurfaceControl(ASurfaceControl* surfaceControl) {
178 if (surfaceControl == mSurfaceControl) return;
179
180 auto funcs = mRenderThread.getASurfaceControlFunctions();
181 if (mSurfaceControl != nullptr) {
182 funcs.releaseFunc(mSurfaceControl);
183 }
184 mSurfaceControl = surfaceControl;
185 if (mSurfaceControl != nullptr) {
186 funcs.acquireFunc(mSurfaceControl);
187 }
188}
189
John Reckb36bfdd2020-07-23 13:47:49 -0700190void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800191 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700192 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
193
194 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700195 setBufferCount(mNativeSurface->getNativeWindow());
196
John Reck8ddbc592020-05-07 16:11:18 -0700197 }
John Reck23b797a2014-01-03 18:08:34 -0800198
John Reck28912a52016-04-18 14:34:18 -0700199 mFrameNumber = -1;
200
John Reckb36bfdd2020-07-23 13:47:49 -0700201 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700202 mHaveNewSurface = true;
203 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500204 // Enable frame stats after the surface has been bound to the appropriate graphics API.
205 // Order is important when new and old surfaces are the same, because old surface has
206 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800207 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700208 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700209 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400210 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800211 }
John Reck23b797a2014-01-03 18:08:34 -0800212}
213
John Reck1125d1f2014-10-23 11:02:19 -0700214void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
215 mSwapBehavior = swapBehavior;
216}
217
John Reckf8441e62017-10-23 13:10:41 -0700218bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400219 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800220 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800221}
222
John Reck8afcc762016-04-13 10:24:06 -0700223void CanvasContext::setStopped(bool stopped) {
224 if (mStopped != stopped) {
225 mStopped = stopped;
226 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400227 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700228 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400229 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700230 } else if (mIsDirty && hasSurface()) {
231 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700232 }
233 }
234}
235
John Reck8785ceb2018-10-29 16:45:58 -0700236void CanvasContext::allocateBuffers() {
237 if (mNativeSurface) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800238 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700239 }
240}
241
242void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800243 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
244 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700245}
246
John Reck8785ceb2018-10-29 16:45:58 -0700247void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800248 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700249 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800250}
251
John Reck63a06672014-05-07 13:45:54 -0700252void CanvasContext::setOpaque(bool opaque) {
253 mOpaque = opaque;
254}
255
John Reckb36bfdd2020-07-23 13:47:49 -0700256void CanvasContext::setColorMode(ColorMode mode) {
257 mRenderPipeline->setSurfaceColorProperties(mode);
258 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700259}
260
John Reck8afcc762016-04-13 10:24:06 -0700261bool CanvasContext::makeCurrent() {
262 if (mStopped) return false;
263
Stan Iliev768e3932016-07-08 21:34:52 -0400264 auto result = mRenderPipeline->makeCurrent();
265 switch (result) {
266 case MakeCurrentResult::AlreadyCurrent:
267 return true;
268 case MakeCurrentResult::Failed:
269 mHaveNewSurface = true;
270 setSurface(nullptr);
271 return false;
272 case MakeCurrentResult::Succeeded:
273 mHaveNewSurface = true;
274 return true;
275 default:
276 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700277 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700278 }
Stan Iliev768e3932016-07-08 21:34:52 -0400279
280 return true;
John Reck860d1552014-04-11 19:15:05 -0700281}
282
John Reckbf3c6022015-06-02 15:55:00 -0700283static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700284 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700285}
286
John Reck0def73a2016-07-01 16:19:13 -0700287bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700288 static const auto SLOW_THRESHOLD = 6_ms;
289
John Reck0def73a2016-07-01 16:19:13 -0700290 if (mSwapHistory.size() != mSwapHistory.capacity()) {
291 // We want at least 3 frames of history before attempting to
292 // guess if the queue is stuffed
293 return false;
294 }
295 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
296 auto& swapA = mSwapHistory[0];
297
298 // Was there a happy queue & dequeue time? If so, don't
299 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700300 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700301 return false;
302 }
303
304 for (size_t i = 1; i < mSwapHistory.size(); i++) {
305 auto& swapB = mSwapHistory[i];
306
Chris Craik31635682016-07-19 17:59:12 -0700307 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700308 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800309 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700310 return false;
311 }
312
313 // Was there a happy queue & dequeue time? If so, don't
314 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700315 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700316 return false;
317 }
318
319 swapA = swapB;
320 }
321
322 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700323 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700324 return true;
325}
326
John Reck1bcacfd2017-11-03 10:12:19 -0700327void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
328 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700329 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700330
John Reckbf3c6022015-06-02 15:55:00 -0700331 // If the previous frame was dropped we don't need to hold onto it, so
332 // just keep using the previous frame's structure instead
333 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700334 mCurrentFrameInfo = mJankTracker.startFrame();
Stan Iliev7203e1f2019-07-25 13:12:02 -0400335 mLast4FrameInfos.next().first = mCurrentFrameInfo;
John Reckbf3c6022015-06-02 15:55:00 -0700336 }
John Reckba6adf62015-02-19 14:36:50 -0800337 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700338 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800339 mCurrentFrameInfo->markSyncStart();
340
John Recke4267ea2014-06-03 15:53:15 -0700341 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700342 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700343 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800344 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700345
John Reckec845a22014-09-05 15:23:38 -0700346 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700347 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700348 // Only the primary target node will be drawn full - all other nodes would get drawn in
349 // real time mode. In case of a window, the primary node is the window content and the other
350 // node(s) are non client / filler nodes.
351 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
352 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800353 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700354 }
John Reck119907c2014-08-14 09:02:01 -0700355 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800356 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700357
John Reck2de950d2017-01-25 10:58:30 -0800358 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800359 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700360
John Reck306f3312016-06-10 16:01:55 -0700361 mIsDirty = true;
362
John Reck848f6512018-12-03 13:26:43 -0800363 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700364 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800365 info.out.canDrawThisFrame = false;
366 return;
367 }
368
John Reckf1480762016-07-03 18:28:25 -0700369 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700370 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700371 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800372 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700373 // The slight fudge-factor is to deal with cases where
374 // the vsync was estimated due to being slow handling the signal.
375 // See the logic in TimeLord#computeFrameTimeNanos or in
376 // Choreographer.java for details on when this happens
377 if (vsyncDelta < 2_ms) {
378 // Already drew for this vsync pulse, UI draw request missed
379 // the deadline for RT animations
380 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700381 }
Bo Hudd082242018-12-02 05:22:41 +0000382 } else {
383 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700384 }
John Recka5dda642014-05-22 15:43:54 -0700385
John Reckcc2eee82018-05-17 10:44:00 -0700386 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
387 // be an allowable combination?
388 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
389 info.out.canDrawThisFrame = false;
390 }
391
John Reck848f6512018-12-03 13:26:43 -0800392 if (info.out.canDrawThisFrame) {
393 int err = mNativeSurface->reserveNext();
394 if (err != OK) {
395 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
396 info.out.canDrawThisFrame = false;
397 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
398 if (err != TIMED_OUT) {
399 // A timed out surface can still recover, but assume others are permanently dead.
400 setSurface(nullptr);
401 return;
402 }
403 }
404 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700405 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700406 }
407
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400408 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700409 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700410 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
411 info.out.requiresUiRedraw = true;
412 }
John Reckcd028f32014-06-24 08:44:29 -0700413 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700414 // If animationsNeedsRedraw is set don't bother posting for an RT anim
415 // as we will just end up fighting the UI thread.
416 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400417 postedFrameCallback = true;
418 }
419 }
420
421 if (!postedFrameCallback &&
422 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
423 // Subtract the time of one frame so it can be displayed on time.
424 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
425 if (info.out.animatedImageDelay <= kFrameTime) {
426 mRenderThread.postFrameCallback(this);
427 } else {
428 const auto delay = info.out.animatedImageDelay - kFrameTime;
429 int genId = mGenerationID;
430 mRenderThread.queue().postDelayed(delay, [this, genId]() {
431 if (mGenerationID == genId) {
432 mRenderThread.postFrameCallback(this);
433 }
434 });
John Reckf9be7792014-05-02 18:21:16 -0700435 }
John Recke45b1fd2014-04-15 09:50:16 -0700436 }
437}
438
John Reckf47a5942014-06-30 16:20:04 -0700439void CanvasContext::stopDrawing() {
440 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700441 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400442 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700443}
444
John Recka5dda642014-05-22 15:43:54 -0700445void CanvasContext::notifyFramePending() {
446 ATRACE_CALL();
447 mRenderThread.pushBackFrameCallback(this);
448}
449
John Recke4267ea2014-06-03 15:53:15 -0700450void CanvasContext::draw() {
John Recke4267ea2014-06-03 15:53:15 -0700451 SkRect dirty;
452 mDamageAccumulator.finish(&dirty);
453
John Reck0fa0cbc2019-04-05 16:57:46 -0700454 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800455 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
chaviw5fc80e72020-04-30 12:14:35 -0700456 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
457 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700458 waitOnFences();
chaviw5fc80e72020-04-30 12:14:35 -0700459 for (auto& func : mFrameCompleteCallbacks) {
460 std::invoke(func, mFrameNumber);
461 }
462 mFrameCompleteCallbacks.clear();
John Reck0b19a732019-03-07 17:18:25 -0800463 return;
464 }
John Reck240ff622015-04-28 13:50:00 -0700465
John Reck240ff622015-04-28 13:50:00 -0700466 mCurrentFrameInfo->markIssueDrawCommandsStart();
467
Stan Iliev768e3932016-07-08 21:34:52 -0400468 Frame frame = mRenderPipeline->getFrame();
Stan Iliev768e3932016-07-08 21:34:52 -0400469 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800470
Stan Iliev768e3932016-07-08 21:34:52 -0400471 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700472 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
473 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800474
Stan Iliev7203e1f2019-07-25 13:12:02 -0400475 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700476
John Reck38f6c032016-03-17 10:23:49 -0700477 waitOnFences();
478
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700479 if (mNativeSurface) {
480 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000481 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
482 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
483 const auto inputEventId = mCurrentFrameInfo->get(FrameInfoIndex::NewestInputEvent);
484 native_window_set_frame_timeline_info(mNativeSurface->getNativeWindow(), vsyncId,
485 inputEventId);
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700486 }
487 }
488
Stan Iliev768e3932016-07-08 21:34:52 -0400489 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700490 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700491 bool didSwap =
492 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800493
John Reck306f3312016-06-10 16:01:55 -0700494 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800495
Stan Iliev768e3932016-07-08 21:34:52 -0400496 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700497 bool didDraw = true;
498 // Handle any swapchain errors
499 error = mNativeSurface->getAndClearError();
500 if (error == TIMED_OUT) {
501 // Try again
502 mRenderThread.postFrameCallback(this);
503 // But since this frame didn't happen, we need to mark full damage in the swap
504 // history
505 didDraw = false;
506
507 } else if (error != OK || !didSwap) {
508 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700509 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700510 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700511 }
John Reck59dd2ea2019-07-26 16:51:08 -0700512
John Recke486d932015-10-28 09:21:19 -0700513 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700514 if (didDraw) {
515 swap.damage = windowDirty;
516 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800517 float max = static_cast<float>(INT_MAX);
518 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700519 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100520 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700521 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700522 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800523 nsecs_t dequeueStart =
524 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700525 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
526 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700527 // and thus is not part of the frame.
528 swap.dequeueDuration = 0;
529 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700530 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800531 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700532 }
Alec Mourif023a322019-11-25 10:02:21 -0800533 swap.queueDuration =
534 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700535 } else {
536 swap.dequeueDuration = 0;
537 swap.queueDuration = 0;
538 }
John Reck1bcacfd2017-11-03 10:12:19 -0700539 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
540 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400541 mLast4FrameInfos[-1].second = frameCompleteNr;
John Reck149173d2015-08-10 09:52:29 -0700542 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700543 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700544 } else {
545 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
546 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400547 mLast4FrameInfos[-1].second = -1;
John Reck4f02bf42014-01-03 18:09:17 -0800548 }
John Reckfe5e7b72014-05-23 17:42:28 -0700549
John Reckba6adf62015-02-19 14:36:50 -0800550 // TODO: Use a fence for real completion?
551 mCurrentFrameInfo->markFrameCompleted();
John Reck149173d2015-08-10 09:52:29 -0700552
553#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700554 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
555 FrameInfoIndex::FrameCompleted) /
556 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700557 if (sFrameCount) {
558 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
559 } else {
560 sBenchMma = thisFrame;
561 }
562 if (++sFrameCount == 10) {
563 sFrameCount = 1;
564 ALOGD("Average frame time: %.4f", sBenchMma);
565 }
566#endif
567
John Reckcc2eee82018-05-17 10:44:00 -0700568 if (didSwap) {
569 for (auto& func : mFrameCompleteCallbacks) {
570 std::invoke(func, frameCompleteNr);
571 }
572 mFrameCompleteCallbacks.clear();
573 }
574
John Reck34781b22017-07-05 16:39:36 -0700575 mJankTracker.finishFrame(*mCurrentFrameInfo);
Andres Morales910beb82016-02-02 16:19:40 -0800576 if (CC_UNLIKELY(mFrameMetricsReporter.get() != nullptr)) {
577 mFrameMetricsReporter->reportFrameMetrics(mCurrentFrameInfo->data());
Andres Morales06f5bc72015-12-15 15:21:31 -0800578 }
John Reck38e0c322015-11-10 12:19:17 -0800579
Stan Iliev7203e1f2019-07-25 13:12:02 -0400580 if (mLast4FrameInfos.size() == mLast4FrameInfos.capacity()) {
581 // By looking 4 frames back, we guarantee all SF stats are available. There are at
582 // most 3 buffers in BufferQueue. Surface object keeps stats for the last 8 frames.
583 FrameInfo* forthBehind = mLast4FrameInfos.front().first;
584 int64_t composedFrameId = mLast4FrameInfos.front().second;
585 nsecs_t acquireTime = -1;
Alec Mouri026106f2020-03-26 17:17:09 -0700586 if (mNativeSurface) {
587 native_window_get_frame_timestamps(mNativeSurface->getNativeWindow(), composedFrameId,
588 nullptr, &acquireTime, nullptr, nullptr, nullptr,
589 nullptr, nullptr, nullptr, nullptr);
590 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400591 // Ignore default -1, NATIVE_WINDOW_TIMESTAMP_INVALID and NATIVE_WINDOW_TIMESTAMP_PENDING
592 forthBehind->set(FrameInfoIndex::GpuCompleted) = acquireTime > 0 ? acquireTime : -1;
593 mJankTracker.finishGpuDraw(*forthBehind);
594 }
595
Stan Ilieve0fae232020-01-07 17:21:49 -0500596 mRenderThread.cacheManager().onFrameCompleted();
John Reck4f02bf42014-01-03 18:09:17 -0800597}
598
John Recke45b1fd2014-04-15 09:50:16 -0700599// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700600void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400601 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700602 prepareAndDraw(nullptr);
603}
John Reck368cdd82014-05-07 13:11:00 -0700604
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400605SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800606 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
607 if (mNativeSurface == nullptr) {
608 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400609 }
Alec Mourif023a322019-11-25 10:02:21 -0800610 ANativeWindow* anw = mNativeSurface->getNativeWindow();
611
612 SkISize size;
613 size.fWidth = ANativeWindow_getWidth(anw);
614 size.fHeight = ANativeWindow_getHeight(anw);
615 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400616}
617
Skuhneea7a7fb2015-08-28 07:10:31 -0700618void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700619 ATRACE_CALL();
620
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800621 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700622 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700623 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
John Reckba6adf62015-02-19 14:36:50 -0800624 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700625 UiFrameInfoBuilder(frameInfo)
626 .addFlag(FrameInfoFlags::RTAnimation)
Ady Abrahamdfb13982020-10-05 17:59:09 -0700627 .setVsync(vsync, vsync, vsyncId, frameDeadline);
John Reckfe5e7b72014-05-23 17:42:28 -0700628
Chris Craike2e53a72015-10-28 15:55:40 -0700629 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100630 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700631 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700632 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700633 } else {
634 // wait on fences so tasks don't overlap next frame
635 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700636 }
John Recke45b1fd2014-04-15 09:50:16 -0700637}
638
John Reck998a6d82014-08-28 15:35:53 -0700639void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700640 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800641 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700642 }
643}
644
John Reck2de950d2017-01-25 10:58:30 -0800645void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700646 if (mPrefetchedLayers.size()) {
647 for (auto& node : mPrefetchedLayers) {
648 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700649 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800650 node->destroyLayers();
651 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700652 }
653 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700654 }
655}
656
John Reck2de950d2017-01-25 10:58:30 -0800657void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700658 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400659 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700660
John Reck3e824952014-08-20 10:08:39 -0700661 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
662 stopDrawing();
663
Chris Craike2e53a72015-10-28 15:55:40 -0700664 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700665 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700666 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700667 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700668 node->prepareTree(info);
669 SkRect ignore;
670 mDamageAccumulator.finish(&ignore);
671 // Tickle the GENERIC property on node to mark it as dirty for damaging
672 // purposes when the frame is actually drawn
673 node->setPropertyFieldsDirty(RenderNode::GENERIC);
674
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700675 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700676
Chris Craikd41c4d82015-01-05 15:51:13 -0800677 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700678 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700679}
680
John Reck2de950d2017-01-25 10:58:30 -0800681void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700682 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400683 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800684 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700685 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800686 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700687 }
Stan Iliev768e3932016-07-08 21:34:52 -0400688 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700689 }
690}
691
692void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700693 ATRACE_CALL();
694 if (!thread.getGrContext()) return;
695 ATRACE_CALL();
696 if (level >= TRIM_MEMORY_COMPLETE) {
697 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800698 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700699 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
700 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700701 }
702}
703
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400704DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400705 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700706}
707
John Reckba6adf62015-02-19 14:36:50 -0800708void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700709 mJankTracker.dumpStats(fd);
710 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800711}
712
713void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700714 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800715}
716
John Reckdf1742e2017-01-19 15:56:21 -0800717void CanvasContext::setName(const std::string&& name) {
718 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
719}
720
John Reck38f6c032016-03-17 10:23:49 -0700721void CanvasContext::waitOnFences() {
722 if (mFrameFences.size()) {
723 ATRACE_CALL();
724 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700725 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700726 }
727 mFrameFences.clear();
728 }
729}
730
John Reck38f6c032016-03-17 10:23:49 -0700731void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700732 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700733}
734
John Reck28912a52016-04-18 14:34:18 -0700735int64_t CanvasContext::getFrameNumber() {
736 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
737 if (mFrameNumber == -1 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800738 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700739 }
740 return mFrameNumber;
741}
742
John Reck0b19a732019-03-07 17:18:25 -0800743bool CanvasContext::surfaceRequiresRedraw() {
744 if (!mNativeSurface) return false;
745 if (mHaveNewSurface) return true;
746
Alec Mourif023a322019-11-25 10:02:21 -0800747 ANativeWindow* anw = mNativeSurface->getNativeWindow();
748 const int width = ANativeWindow_getWidth(anw);
749 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800750
John Reck41459192019-10-31 15:04:58 -0700751 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800752}
753
Stan Iliev768e3932016-07-08 21:34:52 -0400754SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
755 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
756 // can't rely on prior content of window if viewport size changes
757 dirty->setEmpty();
758 mLastFrameWidth = frame.width();
759 mLastFrameHeight = frame.height();
760 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
761 // New surface needs a full draw
762 dirty->setEmpty();
763 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400764 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700765 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
766 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400767 dirty->setEmpty();
768 }
769 profiler().unionDirty(dirty);
770 }
771
772 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400773 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400774 }
775
776 // At this point dirty is the area of the window to update. However,
777 // the area of the frame we need to repaint is potentially different, so
778 // stash the screen area for later
779 SkRect windowDirty(*dirty);
780
781 // If the buffer age is 0 we do a full-screen repaint (handled above)
782 // If the buffer age is 1 the buffer contents are the same as they were
783 // last frame so there's nothing to union() against
784 // Therefore we only care about the > 1 case.
785 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700786 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400787 // We don't have enough history to handle this old of a buffer
788 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400789 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400790 } else {
791 // At this point we haven't yet added the latest frame
792 // to the damage history (happens below)
793 // So we need to damage
794 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700795 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400796 dirty->join(mSwapHistory[i].damage);
797 }
798 }
799 }
800
801 return windowDirty;
802}
803
John Reck23b797a2014-01-03 18:08:34 -0800804} /* namespace renderthread */
805} /* namespace uirenderer */
806} /* namespace android */