blob: 65afcc3a2558bbdba7d5482845429c10c4245040 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
21#include <strings.h>
22#include <sys/stat.h>
23
24#include <algorithm>
25#include <cstdint>
26#include <cstdlib>
27#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080028
John Reck0fa0cbc2019-04-05 16:57:46 -070029#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070030#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050031#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070032#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070033#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080034#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070035#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040036#include "pipeline/skia/SkiaOpenGLPipeline.h"
37#include "pipeline/skia/SkiaPipeline.h"
38#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070039#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080040#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070041#include "utils/TimeUtils.h"
John Reck322b8ab2019-03-14 13:15:28 -070042#include "utils/TraceUtils.h"
John Recke248bd12015-08-05 13:53:53 -070043
John Reckf47a5942014-06-30 16:20:04 -070044#define TRIM_MEMORY_COMPLETE 80
45#define TRIM_MEMORY_UI_HIDDEN 20
46
John Reck149173d2015-08-10 09:52:29 -070047#define LOG_FRAMETIME_MMA 0
48
49#if LOG_FRAMETIME_MMA
50static float sBenchMma = 0;
51static int sFrameCount = 0;
52static const float NANOS_PER_MILLIS_F = 1000000.0f;
53#endif
54
John Reck23b797a2014-01-03 18:08:34 -080055namespace android {
56namespace uirenderer {
57namespace renderthread {
58
John Reck1bcacfd2017-11-03 10:12:19 -070059CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
60 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040061 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040062
Stan Iliev03de0742016-07-07 12:35:54 -040063 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040064 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040065 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070066 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040067 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040068 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070069 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040070 default:
John Reck1bcacfd2017-11-03 10:12:19 -070071 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040072 break;
73 }
74 return nullptr;
75}
76
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040077void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
78 ATRACE_CALL();
79 auto renderType = Properties::getRenderPipelineType();
80 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040081 case RenderPipelineType::SkiaGL:
82 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
83 break;
84 case RenderPipelineType::SkiaVulkan:
85 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
86 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040087 default:
John Reck1bcacfd2017-11-03 10:12:19 -070088 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040089 break;
90 }
91}
92
93void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -070094 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040095}
96
John Reck1bcacfd2017-11-03 10:12:19 -070097CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
98 IContextFactory* contextFactory,
99 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700100 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400101 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800102 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800103 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700104 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700105 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400106 , mContentDrawBounds(0, 0, 0, 0)
107 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800108 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700109 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700110 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck23b797a2014-01-03 18:08:34 -0800111}
112
113CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800114 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800115 for (auto& node : mRenderNodes) {
116 node->clearRoot();
117 }
118 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800119}
120
John Reck2de950d2017-01-25 10:58:30 -0800121void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
122 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
123 node->makeRoot();
124 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
125}
126
127void CanvasContext::removeRenderNode(RenderNode* node) {
128 node->clearRoot();
129 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700130 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800131}
132
133void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700134 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800135 setSurface(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800136 freePrefetchedLayers();
137 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700138 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800139}
140
John Reckfbe14bb2020-09-21 14:37:41 -0700141static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700142 int query_value;
143 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
144 if (err != 0 || query_value < 0) {
145 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
146 return;
147 }
148 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
149
John Reckfbe14bb2020-09-21 14:37:41 -0700150 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
151 // query for min_undequeued
152 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700153 native_window_set_buffer_count(window, bufferCount);
154}
155
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800156void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800157 ATRACE_CALL();
158
John Reck8ddbc592020-05-07 16:11:18 -0700159 if (window) {
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800160 int extraBuffers = 0;
161 native_window_get_extra_buffer_count(window, &extraBuffers);
162
John Reck8ddbc592020-05-07 16:11:18 -0700163 mNativeSurface = std::make_unique<ReliableSurface>(window);
164 mNativeSurface->init();
165 if (enableTimeout) {
166 // TODO: Fix error handling & re-shorten timeout
167 ANativeWindow_setDequeueTimeout(window, 4000_ms);
168 }
Ady Abraham7bb70fa2021-02-03 18:33:11 -0800169 mNativeSurface->setExtraBufferCount(extraBuffers);
John Reck8ddbc592020-05-07 16:11:18 -0700170 } else {
171 mNativeSurface = nullptr;
172 }
John Reckb36bfdd2020-07-23 13:47:49 -0700173 setupPipelineSurface();
174}
John Reck8ddbc592020-05-07 16:11:18 -0700175
John Reckb36bfdd2020-07-23 13:47:49 -0700176void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800177 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700178 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
179
180 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700181 setBufferCount(mNativeSurface->getNativeWindow());
182
John Reck8ddbc592020-05-07 16:11:18 -0700183 }
John Reck23b797a2014-01-03 18:08:34 -0800184
John Reck28912a52016-04-18 14:34:18 -0700185 mFrameNumber = -1;
186
John Reckb36bfdd2020-07-23 13:47:49 -0700187 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700188 mHaveNewSurface = true;
189 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500190 // Enable frame stats after the surface has been bound to the appropriate graphics API.
191 // Order is important when new and old surfaces are the same, because old surface has
192 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800193 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700194 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700195 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400196 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800197 }
John Reck23b797a2014-01-03 18:08:34 -0800198}
199
John Reck1125d1f2014-10-23 11:02:19 -0700200void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
201 mSwapBehavior = swapBehavior;
202}
203
John Reckf8441e62017-10-23 13:10:41 -0700204bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400205 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800206 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800207}
208
John Reck8afcc762016-04-13 10:24:06 -0700209void CanvasContext::setStopped(bool stopped) {
210 if (mStopped != stopped) {
211 mStopped = stopped;
212 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400213 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700214 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400215 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700216 } else if (mIsDirty && hasSurface()) {
217 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700218 }
219 }
220}
221
John Reck8785ceb2018-10-29 16:45:58 -0700222void CanvasContext::allocateBuffers() {
223 if (mNativeSurface) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800224 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700225 }
226}
227
228void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800229 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
230 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700231}
232
John Reck8785ceb2018-10-29 16:45:58 -0700233void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800234 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700235 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800236}
237
John Reck63a06672014-05-07 13:45:54 -0700238void CanvasContext::setOpaque(bool opaque) {
239 mOpaque = opaque;
240}
241
John Reckb36bfdd2020-07-23 13:47:49 -0700242void CanvasContext::setColorMode(ColorMode mode) {
243 mRenderPipeline->setSurfaceColorProperties(mode);
244 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700245}
246
John Reck8afcc762016-04-13 10:24:06 -0700247bool CanvasContext::makeCurrent() {
248 if (mStopped) return false;
249
Stan Iliev768e3932016-07-08 21:34:52 -0400250 auto result = mRenderPipeline->makeCurrent();
251 switch (result) {
252 case MakeCurrentResult::AlreadyCurrent:
253 return true;
254 case MakeCurrentResult::Failed:
255 mHaveNewSurface = true;
256 setSurface(nullptr);
257 return false;
258 case MakeCurrentResult::Succeeded:
259 mHaveNewSurface = true;
260 return true;
261 default:
262 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700263 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700264 }
Stan Iliev768e3932016-07-08 21:34:52 -0400265
266 return true;
John Reck860d1552014-04-11 19:15:05 -0700267}
268
John Reckbf3c6022015-06-02 15:55:00 -0700269static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700270 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700271}
272
John Reck0def73a2016-07-01 16:19:13 -0700273bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700274 static const auto SLOW_THRESHOLD = 6_ms;
275
John Reck0def73a2016-07-01 16:19:13 -0700276 if (mSwapHistory.size() != mSwapHistory.capacity()) {
277 // We want at least 3 frames of history before attempting to
278 // guess if the queue is stuffed
279 return false;
280 }
281 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
282 auto& swapA = mSwapHistory[0];
283
284 // Was there a happy queue & dequeue time? If so, don't
285 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700286 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700287 return false;
288 }
289
290 for (size_t i = 1; i < mSwapHistory.size(); i++) {
291 auto& swapB = mSwapHistory[i];
292
Chris Craik31635682016-07-19 17:59:12 -0700293 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700294 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800295 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700296 return false;
297 }
298
299 // Was there a happy queue & dequeue time? If so, don't
300 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700301 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700302 return false;
303 }
304
305 swapA = swapB;
306 }
307
308 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700309 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700310 return true;
311}
312
John Reck1bcacfd2017-11-03 10:12:19 -0700313void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
314 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700315 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700316
John Reckbf3c6022015-06-02 15:55:00 -0700317 // If the previous frame was dropped we don't need to hold onto it, so
318 // just keep using the previous frame's structure instead
319 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700320 mCurrentFrameInfo = mJankTracker.startFrame();
Stan Iliev7203e1f2019-07-25 13:12:02 -0400321 mLast4FrameInfos.next().first = mCurrentFrameInfo;
John Reckbf3c6022015-06-02 15:55:00 -0700322 }
John Reckba6adf62015-02-19 14:36:50 -0800323 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700324 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800325 mCurrentFrameInfo->markSyncStart();
326
John Recke4267ea2014-06-03 15:53:15 -0700327 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700328 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700329 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800330 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700331
John Reckec845a22014-09-05 15:23:38 -0700332 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700333 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700334 // Only the primary target node will be drawn full - all other nodes would get drawn in
335 // real time mode. In case of a window, the primary node is the window content and the other
336 // node(s) are non client / filler nodes.
337 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
338 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800339 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700340 }
John Reck119907c2014-08-14 09:02:01 -0700341 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800342 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700343
John Reck2de950d2017-01-25 10:58:30 -0800344 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800345 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700346
John Reck306f3312016-06-10 16:01:55 -0700347 mIsDirty = true;
348
John Reck848f6512018-12-03 13:26:43 -0800349 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700350 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800351 info.out.canDrawThisFrame = false;
352 return;
353 }
354
John Reckf1480762016-07-03 18:28:25 -0700355 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700356 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700357 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800358 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700359 // The slight fudge-factor is to deal with cases where
360 // the vsync was estimated due to being slow handling the signal.
361 // See the logic in TimeLord#computeFrameTimeNanos or in
362 // Choreographer.java for details on when this happens
363 if (vsyncDelta < 2_ms) {
364 // Already drew for this vsync pulse, UI draw request missed
365 // the deadline for RT animations
366 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700367 }
Bo Hudd082242018-12-02 05:22:41 +0000368 } else {
369 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700370 }
John Recka5dda642014-05-22 15:43:54 -0700371
John Reckcc2eee82018-05-17 10:44:00 -0700372 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
373 // be an allowable combination?
374 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
375 info.out.canDrawThisFrame = false;
376 }
377
John Reck848f6512018-12-03 13:26:43 -0800378 if (info.out.canDrawThisFrame) {
379 int err = mNativeSurface->reserveNext();
380 if (err != OK) {
381 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
382 info.out.canDrawThisFrame = false;
383 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
384 if (err != TIMED_OUT) {
385 // A timed out surface can still recover, but assume others are permanently dead.
386 setSurface(nullptr);
387 return;
388 }
389 }
390 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700391 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700392 }
393
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400394 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700395 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700396 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
397 info.out.requiresUiRedraw = true;
398 }
John Reckcd028f32014-06-24 08:44:29 -0700399 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700400 // If animationsNeedsRedraw is set don't bother posting for an RT anim
401 // as we will just end up fighting the UI thread.
402 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400403 postedFrameCallback = true;
404 }
405 }
406
407 if (!postedFrameCallback &&
408 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
409 // Subtract the time of one frame so it can be displayed on time.
410 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
411 if (info.out.animatedImageDelay <= kFrameTime) {
412 mRenderThread.postFrameCallback(this);
413 } else {
414 const auto delay = info.out.animatedImageDelay - kFrameTime;
415 int genId = mGenerationID;
416 mRenderThread.queue().postDelayed(delay, [this, genId]() {
417 if (mGenerationID == genId) {
418 mRenderThread.postFrameCallback(this);
419 }
420 });
John Reckf9be7792014-05-02 18:21:16 -0700421 }
John Recke45b1fd2014-04-15 09:50:16 -0700422 }
423}
424
John Reckf47a5942014-06-30 16:20:04 -0700425void CanvasContext::stopDrawing() {
426 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700427 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400428 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700429}
430
John Recka5dda642014-05-22 15:43:54 -0700431void CanvasContext::notifyFramePending() {
432 ATRACE_CALL();
433 mRenderThread.pushBackFrameCallback(this);
434}
435
John Recke4267ea2014-06-03 15:53:15 -0700436void CanvasContext::draw() {
John Recke4267ea2014-06-03 15:53:15 -0700437 SkRect dirty;
438 mDamageAccumulator.finish(&dirty);
439
John Reck0fa0cbc2019-04-05 16:57:46 -0700440 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800441 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
chaviw5fc80e72020-04-30 12:14:35 -0700442 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
443 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700444 waitOnFences();
chaviw5fc80e72020-04-30 12:14:35 -0700445 for (auto& func : mFrameCompleteCallbacks) {
446 std::invoke(func, mFrameNumber);
447 }
448 mFrameCompleteCallbacks.clear();
John Reck0b19a732019-03-07 17:18:25 -0800449 return;
450 }
John Reck240ff622015-04-28 13:50:00 -0700451
John Reck240ff622015-04-28 13:50:00 -0700452 mCurrentFrameInfo->markIssueDrawCommandsStart();
453
Stan Iliev768e3932016-07-08 21:34:52 -0400454 Frame frame = mRenderPipeline->getFrame();
Stan Iliev768e3932016-07-08 21:34:52 -0400455 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800456
Stan Iliev768e3932016-07-08 21:34:52 -0400457 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700458 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
459 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800460
Stan Iliev7203e1f2019-07-25 13:12:02 -0400461 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700462
John Reck38f6c032016-03-17 10:23:49 -0700463 waitOnFences();
464
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700465 if (mNativeSurface) {
466 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000467 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
468 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
469 const auto inputEventId = mCurrentFrameInfo->get(FrameInfoIndex::NewestInputEvent);
470 native_window_set_frame_timeline_info(mNativeSurface->getNativeWindow(), vsyncId,
471 inputEventId);
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700472 }
473 }
474
Stan Iliev768e3932016-07-08 21:34:52 -0400475 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700476 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700477 bool didSwap =
478 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800479
John Reck306f3312016-06-10 16:01:55 -0700480 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800481
Stan Iliev768e3932016-07-08 21:34:52 -0400482 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700483 bool didDraw = true;
484 // Handle any swapchain errors
485 error = mNativeSurface->getAndClearError();
486 if (error == TIMED_OUT) {
487 // Try again
488 mRenderThread.postFrameCallback(this);
489 // But since this frame didn't happen, we need to mark full damage in the swap
490 // history
491 didDraw = false;
492
493 } else if (error != OK || !didSwap) {
494 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700495 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700496 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700497 }
John Reck59dd2ea2019-07-26 16:51:08 -0700498
John Recke486d932015-10-28 09:21:19 -0700499 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700500 if (didDraw) {
501 swap.damage = windowDirty;
502 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800503 float max = static_cast<float>(INT_MAX);
504 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700505 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100506 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700507 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700508 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800509 nsecs_t dequeueStart =
510 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700511 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
512 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700513 // and thus is not part of the frame.
514 swap.dequeueDuration = 0;
515 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700516 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800517 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700518 }
Alec Mourif023a322019-11-25 10:02:21 -0800519 swap.queueDuration =
520 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700521 } else {
522 swap.dequeueDuration = 0;
523 swap.queueDuration = 0;
524 }
John Reck1bcacfd2017-11-03 10:12:19 -0700525 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
526 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400527 mLast4FrameInfos[-1].second = frameCompleteNr;
John Reck149173d2015-08-10 09:52:29 -0700528 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700529 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700530 } else {
531 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
532 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400533 mLast4FrameInfos[-1].second = -1;
John Reck4f02bf42014-01-03 18:09:17 -0800534 }
John Reckfe5e7b72014-05-23 17:42:28 -0700535
John Reckba6adf62015-02-19 14:36:50 -0800536 // TODO: Use a fence for real completion?
537 mCurrentFrameInfo->markFrameCompleted();
John Reck149173d2015-08-10 09:52:29 -0700538
539#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700540 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
541 FrameInfoIndex::FrameCompleted) /
542 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700543 if (sFrameCount) {
544 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
545 } else {
546 sBenchMma = thisFrame;
547 }
548 if (++sFrameCount == 10) {
549 sFrameCount = 1;
550 ALOGD("Average frame time: %.4f", sBenchMma);
551 }
552#endif
553
John Reckcc2eee82018-05-17 10:44:00 -0700554 if (didSwap) {
555 for (auto& func : mFrameCompleteCallbacks) {
556 std::invoke(func, frameCompleteNr);
557 }
558 mFrameCompleteCallbacks.clear();
559 }
560
John Reck34781b22017-07-05 16:39:36 -0700561 mJankTracker.finishFrame(*mCurrentFrameInfo);
Andres Morales910beb82016-02-02 16:19:40 -0800562 if (CC_UNLIKELY(mFrameMetricsReporter.get() != nullptr)) {
563 mFrameMetricsReporter->reportFrameMetrics(mCurrentFrameInfo->data());
Andres Morales06f5bc72015-12-15 15:21:31 -0800564 }
John Reck38e0c322015-11-10 12:19:17 -0800565
Stan Iliev7203e1f2019-07-25 13:12:02 -0400566 if (mLast4FrameInfos.size() == mLast4FrameInfos.capacity()) {
567 // By looking 4 frames back, we guarantee all SF stats are available. There are at
568 // most 3 buffers in BufferQueue. Surface object keeps stats for the last 8 frames.
569 FrameInfo* forthBehind = mLast4FrameInfos.front().first;
570 int64_t composedFrameId = mLast4FrameInfos.front().second;
571 nsecs_t acquireTime = -1;
Alec Mouri026106f2020-03-26 17:17:09 -0700572 if (mNativeSurface) {
573 native_window_get_frame_timestamps(mNativeSurface->getNativeWindow(), composedFrameId,
574 nullptr, &acquireTime, nullptr, nullptr, nullptr,
575 nullptr, nullptr, nullptr, nullptr);
576 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400577 // Ignore default -1, NATIVE_WINDOW_TIMESTAMP_INVALID and NATIVE_WINDOW_TIMESTAMP_PENDING
578 forthBehind->set(FrameInfoIndex::GpuCompleted) = acquireTime > 0 ? acquireTime : -1;
579 mJankTracker.finishGpuDraw(*forthBehind);
580 }
581
Stan Ilieve0fae232020-01-07 17:21:49 -0500582 mRenderThread.cacheManager().onFrameCompleted();
John Reck4f02bf42014-01-03 18:09:17 -0800583}
584
John Recke45b1fd2014-04-15 09:50:16 -0700585// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700586void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400587 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700588 prepareAndDraw(nullptr);
589}
John Reck368cdd82014-05-07 13:11:00 -0700590
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400591SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800592 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
593 if (mNativeSurface == nullptr) {
594 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400595 }
Alec Mourif023a322019-11-25 10:02:21 -0800596 ANativeWindow* anw = mNativeSurface->getNativeWindow();
597
598 SkISize size;
599 size.fWidth = ANativeWindow_getWidth(anw);
600 size.fHeight = ANativeWindow_getHeight(anw);
601 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400602}
603
Skuhneea7a7fb2015-08-28 07:10:31 -0700604void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700605 ATRACE_CALL();
606
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800607 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700608 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700609 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
John Reckba6adf62015-02-19 14:36:50 -0800610 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700611 UiFrameInfoBuilder(frameInfo)
612 .addFlag(FrameInfoFlags::RTAnimation)
Ady Abrahamdfb13982020-10-05 17:59:09 -0700613 .setVsync(vsync, vsync, vsyncId, frameDeadline);
John Reckfe5e7b72014-05-23 17:42:28 -0700614
Chris Craike2e53a72015-10-28 15:55:40 -0700615 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100616 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700617 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700618 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700619 } else {
620 // wait on fences so tasks don't overlap next frame
621 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700622 }
John Recke45b1fd2014-04-15 09:50:16 -0700623}
624
John Reck998a6d82014-08-28 15:35:53 -0700625void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700626 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800627 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700628 }
629}
630
John Reck2de950d2017-01-25 10:58:30 -0800631void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700632 if (mPrefetchedLayers.size()) {
633 for (auto& node : mPrefetchedLayers) {
634 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700635 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800636 node->destroyLayers();
637 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700638 }
639 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700640 }
641}
642
John Reck2de950d2017-01-25 10:58:30 -0800643void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700644 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400645 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700646
John Reck3e824952014-08-20 10:08:39 -0700647 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
648 stopDrawing();
649
Chris Craike2e53a72015-10-28 15:55:40 -0700650 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700651 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700652 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700653 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700654 node->prepareTree(info);
655 SkRect ignore;
656 mDamageAccumulator.finish(&ignore);
657 // Tickle the GENERIC property on node to mark it as dirty for damaging
658 // purposes when the frame is actually drawn
659 node->setPropertyFieldsDirty(RenderNode::GENERIC);
660
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700661 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700662
Chris Craikd41c4d82015-01-05 15:51:13 -0800663 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700664 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700665}
666
John Reck2de950d2017-01-25 10:58:30 -0800667void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700668 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400669 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800670 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700671 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800672 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700673 }
Stan Iliev768e3932016-07-08 21:34:52 -0400674 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700675 }
676}
677
678void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700679 ATRACE_CALL();
680 if (!thread.getGrContext()) return;
681 ATRACE_CALL();
682 if (level >= TRIM_MEMORY_COMPLETE) {
683 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800684 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700685 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
686 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700687 }
688}
689
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400690DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400691 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700692}
693
John Reckba6adf62015-02-19 14:36:50 -0800694void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700695 mJankTracker.dumpStats(fd);
696 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800697}
698
699void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700700 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800701}
702
John Reckdf1742e2017-01-19 15:56:21 -0800703void CanvasContext::setName(const std::string&& name) {
704 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
705}
706
John Reck38f6c032016-03-17 10:23:49 -0700707void CanvasContext::waitOnFences() {
708 if (mFrameFences.size()) {
709 ATRACE_CALL();
710 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700711 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700712 }
713 mFrameFences.clear();
714 }
715}
716
John Reck38f6c032016-03-17 10:23:49 -0700717void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700718 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700719}
720
John Reck28912a52016-04-18 14:34:18 -0700721int64_t CanvasContext::getFrameNumber() {
722 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
723 if (mFrameNumber == -1 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800724 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700725 }
726 return mFrameNumber;
727}
728
John Reck0b19a732019-03-07 17:18:25 -0800729bool CanvasContext::surfaceRequiresRedraw() {
730 if (!mNativeSurface) return false;
731 if (mHaveNewSurface) return true;
732
Alec Mourif023a322019-11-25 10:02:21 -0800733 ANativeWindow* anw = mNativeSurface->getNativeWindow();
734 const int width = ANativeWindow_getWidth(anw);
735 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800736
John Reck41459192019-10-31 15:04:58 -0700737 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800738}
739
Stan Iliev768e3932016-07-08 21:34:52 -0400740SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
741 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
742 // can't rely on prior content of window if viewport size changes
743 dirty->setEmpty();
744 mLastFrameWidth = frame.width();
745 mLastFrameHeight = frame.height();
746 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
747 // New surface needs a full draw
748 dirty->setEmpty();
749 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400750 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700751 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
752 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400753 dirty->setEmpty();
754 }
755 profiler().unionDirty(dirty);
756 }
757
758 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400759 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400760 }
761
762 // At this point dirty is the area of the window to update. However,
763 // the area of the frame we need to repaint is potentially different, so
764 // stash the screen area for later
765 SkRect windowDirty(*dirty);
766
767 // If the buffer age is 0 we do a full-screen repaint (handled above)
768 // If the buffer age is 1 the buffer contents are the same as they were
769 // last frame so there's nothing to union() against
770 // Therefore we only care about the > 1 case.
771 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700772 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400773 // We don't have enough history to handle this old of a buffer
774 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400775 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400776 } else {
777 // At this point we haven't yet added the latest frame
778 // to the damage history (happens below)
779 // So we need to damage
780 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700781 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400782 dirty->join(mSwapHistory[i].damage);
783 }
784 }
785 }
786
787 return windowDirty;
788}
789
John Reck23b797a2014-01-03 18:08:34 -0800790} /* namespace renderthread */
791} /* namespace uirenderer */
792} /* namespace android */