blob: eacabfd1dbf9faba06efc7e2504cc1c0abf174d4 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
21#include <strings.h>
22#include <sys/stat.h>
23
24#include <algorithm>
25#include <cstdint>
26#include <cstdlib>
27#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080028
John Reck0fa0cbc2019-04-05 16:57:46 -070029#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070030#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050031#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070032#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070033#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080034#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070035#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040036#include "pipeline/skia/SkiaOpenGLPipeline.h"
37#include "pipeline/skia/SkiaPipeline.h"
38#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070039#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080040#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070041#include "utils/TimeUtils.h"
John Reck322b8ab2019-03-14 13:15:28 -070042#include "utils/TraceUtils.h"
John Recke248bd12015-08-05 13:53:53 -070043
John Reckf47a5942014-06-30 16:20:04 -070044#define TRIM_MEMORY_COMPLETE 80
45#define TRIM_MEMORY_UI_HIDDEN 20
46
John Reck149173d2015-08-10 09:52:29 -070047#define LOG_FRAMETIME_MMA 0
48
49#if LOG_FRAMETIME_MMA
50static float sBenchMma = 0;
51static int sFrameCount = 0;
52static const float NANOS_PER_MILLIS_F = 1000000.0f;
53#endif
54
John Reck23b797a2014-01-03 18:08:34 -080055namespace android {
56namespace uirenderer {
57namespace renderthread {
58
John Reck1bcacfd2017-11-03 10:12:19 -070059CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
60 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040061 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040062
Stan Iliev03de0742016-07-07 12:35:54 -040063 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040064 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040065 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070066 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040067 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040068 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070069 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040070 default:
John Reck1bcacfd2017-11-03 10:12:19 -070071 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040072 break;
73 }
74 return nullptr;
75}
76
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040077void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
78 ATRACE_CALL();
79 auto renderType = Properties::getRenderPipelineType();
80 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040081 case RenderPipelineType::SkiaGL:
82 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
83 break;
84 case RenderPipelineType::SkiaVulkan:
85 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
86 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040087 default:
John Reck1bcacfd2017-11-03 10:12:19 -070088 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040089 break;
90 }
91}
92
93void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -070094 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040095}
96
John Reck1bcacfd2017-11-03 10:12:19 -070097CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
98 IContextFactory* contextFactory,
99 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700100 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400101 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800102 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800103 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700104 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700105 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400106 , mContentDrawBounds(0, 0, 0, 0)
107 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800108 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700109 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700110 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck4d527222019-03-13 16:25:20 -0700111 setRenderAheadDepth(Properties::defaultRenderAhead);
John Reck23b797a2014-01-03 18:08:34 -0800112}
113
114CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800115 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800116 for (auto& node : mRenderNodes) {
117 node->clearRoot();
118 }
119 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800120}
121
John Reck2de950d2017-01-25 10:58:30 -0800122void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
123 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
124 node->makeRoot();
125 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
126}
127
128void CanvasContext::removeRenderNode(RenderNode* node) {
129 node->clearRoot();
130 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700131 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800132}
133
134void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700135 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800136 setSurface(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800137 freePrefetchedLayers();
138 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700139 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800140}
141
John Reckfbe14bb2020-09-21 14:37:41 -0700142static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700143 int query_value;
144 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
145 if (err != 0 || query_value < 0) {
146 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
147 return;
148 }
149 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
150
John Reckfbe14bb2020-09-21 14:37:41 -0700151 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
152 // query for min_undequeued
153 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700154 native_window_set_buffer_count(window, bufferCount);
155}
156
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800157void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800158 ATRACE_CALL();
159
Miguel de Dios509627b2019-05-14 23:55:36 +0000160 if (mRenderAheadDepth == 0 && DeviceInfo::get()->getMaxRefreshRate() > 66.6f) {
John Reckcf185f52019-04-11 16:11:24 -0700161 mFixedRenderAhead = false;
162 mRenderAheadCapacity = 1;
163 } else {
164 mFixedRenderAhead = true;
165 mRenderAheadCapacity = mRenderAheadDepth;
166 }
167
John Reck8ddbc592020-05-07 16:11:18 -0700168 if (window) {
169 mNativeSurface = std::make_unique<ReliableSurface>(window);
170 mNativeSurface->init();
171 if (enableTimeout) {
172 // TODO: Fix error handling & re-shorten timeout
173 ANativeWindow_setDequeueTimeout(window, 4000_ms);
174 }
175 mNativeSurface->setExtraBufferCount(mRenderAheadCapacity);
176 } else {
177 mNativeSurface = nullptr;
178 }
John Reckb36bfdd2020-07-23 13:47:49 -0700179 setupPipelineSurface();
180}
John Reck8ddbc592020-05-07 16:11:18 -0700181
John Reckb36bfdd2020-07-23 13:47:49 -0700182void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800183 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700184 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
185
186 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700187 setBufferCount(mNativeSurface->getNativeWindow());
188
John Reck8ddbc592020-05-07 16:11:18 -0700189 }
John Reck23b797a2014-01-03 18:08:34 -0800190
John Reck28912a52016-04-18 14:34:18 -0700191 mFrameNumber = -1;
192
John Reckb36bfdd2020-07-23 13:47:49 -0700193 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700194 mHaveNewSurface = true;
195 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500196 // Enable frame stats after the surface has been bound to the appropriate graphics API.
197 // Order is important when new and old surfaces are the same, because old surface has
198 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800199 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700200 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700201 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400202 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800203 }
John Reck23b797a2014-01-03 18:08:34 -0800204}
205
John Reck1125d1f2014-10-23 11:02:19 -0700206void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
207 mSwapBehavior = swapBehavior;
208}
209
John Reckf8441e62017-10-23 13:10:41 -0700210bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400211 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800212 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800213}
214
John Reck8afcc762016-04-13 10:24:06 -0700215void CanvasContext::setStopped(bool stopped) {
216 if (mStopped != stopped) {
217 mStopped = stopped;
218 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400219 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700220 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400221 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700222 } else if (mIsDirty && hasSurface()) {
223 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700224 }
225 }
226}
227
John Reck8785ceb2018-10-29 16:45:58 -0700228void CanvasContext::allocateBuffers() {
229 if (mNativeSurface) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800230 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700231 }
232}
233
234void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800235 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
236 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700237}
238
John Reck8785ceb2018-10-29 16:45:58 -0700239void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800240 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700241 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800242}
243
John Reck63a06672014-05-07 13:45:54 -0700244void CanvasContext::setOpaque(bool opaque) {
245 mOpaque = opaque;
246}
247
John Reckb36bfdd2020-07-23 13:47:49 -0700248void CanvasContext::setColorMode(ColorMode mode) {
249 mRenderPipeline->setSurfaceColorProperties(mode);
250 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700251}
252
John Reck8afcc762016-04-13 10:24:06 -0700253bool CanvasContext::makeCurrent() {
254 if (mStopped) return false;
255
Stan Iliev768e3932016-07-08 21:34:52 -0400256 auto result = mRenderPipeline->makeCurrent();
257 switch (result) {
258 case MakeCurrentResult::AlreadyCurrent:
259 return true;
260 case MakeCurrentResult::Failed:
261 mHaveNewSurface = true;
262 setSurface(nullptr);
263 return false;
264 case MakeCurrentResult::Succeeded:
265 mHaveNewSurface = true;
266 return true;
267 default:
268 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700269 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700270 }
Stan Iliev768e3932016-07-08 21:34:52 -0400271
272 return true;
John Reck860d1552014-04-11 19:15:05 -0700273}
274
John Reckbf3c6022015-06-02 15:55:00 -0700275static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700276 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700277}
278
John Reck0def73a2016-07-01 16:19:13 -0700279bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700280 static const auto SLOW_THRESHOLD = 6_ms;
281
John Reck0def73a2016-07-01 16:19:13 -0700282 if (mSwapHistory.size() != mSwapHistory.capacity()) {
283 // We want at least 3 frames of history before attempting to
284 // guess if the queue is stuffed
285 return false;
286 }
287 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
288 auto& swapA = mSwapHistory[0];
289
290 // Was there a happy queue & dequeue time? If so, don't
291 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700292 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700293 return false;
294 }
295
296 for (size_t i = 1; i < mSwapHistory.size(); i++) {
297 auto& swapB = mSwapHistory[i];
298
Chris Craik31635682016-07-19 17:59:12 -0700299 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700300 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800301 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700302 return false;
303 }
304
305 // Was there a happy queue & dequeue time? If so, don't
306 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700307 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700308 return false;
309 }
310
311 swapA = swapB;
312 }
313
314 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700315 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700316 return true;
317}
318
John Reck1bcacfd2017-11-03 10:12:19 -0700319void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
320 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700321 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700322
John Reckbf3c6022015-06-02 15:55:00 -0700323 // If the previous frame was dropped we don't need to hold onto it, so
324 // just keep using the previous frame's structure instead
325 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700326 mCurrentFrameInfo = mJankTracker.startFrame();
Stan Iliev7203e1f2019-07-25 13:12:02 -0400327 mLast4FrameInfos.next().first = mCurrentFrameInfo;
John Reckbf3c6022015-06-02 15:55:00 -0700328 }
John Reckba6adf62015-02-19 14:36:50 -0800329 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700330 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800331 mCurrentFrameInfo->markSyncStart();
332
John Recke4267ea2014-06-03 15:53:15 -0700333 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700334 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700335 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800336 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700337
John Reckec845a22014-09-05 15:23:38 -0700338 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700339 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700340 // Only the primary target node will be drawn full - all other nodes would get drawn in
341 // real time mode. In case of a window, the primary node is the window content and the other
342 // node(s) are non client / filler nodes.
343 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
344 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800345 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700346 }
John Reck119907c2014-08-14 09:02:01 -0700347 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800348 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700349
John Reck2de950d2017-01-25 10:58:30 -0800350 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800351 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700352
John Reck306f3312016-06-10 16:01:55 -0700353 mIsDirty = true;
354
John Reck848f6512018-12-03 13:26:43 -0800355 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700356 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800357 info.out.canDrawThisFrame = false;
358 return;
359 }
360
John Reckf1480762016-07-03 18:28:25 -0700361 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700362 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700363 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800364 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700365 // The slight fudge-factor is to deal with cases where
366 // the vsync was estimated due to being slow handling the signal.
367 // See the logic in TimeLord#computeFrameTimeNanos or in
368 // Choreographer.java for details on when this happens
369 if (vsyncDelta < 2_ms) {
370 // Already drew for this vsync pulse, UI draw request missed
371 // the deadline for RT animations
372 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700373 }
Bo Hudd082242018-12-02 05:22:41 +0000374 } else {
375 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700376 }
John Recka5dda642014-05-22 15:43:54 -0700377
John Reckcc2eee82018-05-17 10:44:00 -0700378 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
379 // be an allowable combination?
380 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
381 info.out.canDrawThisFrame = false;
382 }
383
John Reck848f6512018-12-03 13:26:43 -0800384 if (info.out.canDrawThisFrame) {
385 int err = mNativeSurface->reserveNext();
386 if (err != OK) {
387 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
388 info.out.canDrawThisFrame = false;
389 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
390 if (err != TIMED_OUT) {
391 // A timed out surface can still recover, but assume others are permanently dead.
392 setSurface(nullptr);
393 return;
394 }
395 }
396 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700397 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700398 }
399
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400400 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700401 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700402 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
403 info.out.requiresUiRedraw = true;
404 }
John Reckcd028f32014-06-24 08:44:29 -0700405 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700406 // If animationsNeedsRedraw is set don't bother posting for an RT anim
407 // as we will just end up fighting the UI thread.
408 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400409 postedFrameCallback = true;
410 }
411 }
412
413 if (!postedFrameCallback &&
414 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
415 // Subtract the time of one frame so it can be displayed on time.
416 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
417 if (info.out.animatedImageDelay <= kFrameTime) {
418 mRenderThread.postFrameCallback(this);
419 } else {
420 const auto delay = info.out.animatedImageDelay - kFrameTime;
421 int genId = mGenerationID;
422 mRenderThread.queue().postDelayed(delay, [this, genId]() {
423 if (mGenerationID == genId) {
424 mRenderThread.postFrameCallback(this);
425 }
426 });
John Reckf9be7792014-05-02 18:21:16 -0700427 }
John Recke45b1fd2014-04-15 09:50:16 -0700428 }
429}
430
John Reckf47a5942014-06-30 16:20:04 -0700431void CanvasContext::stopDrawing() {
432 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700433 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400434 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700435}
436
John Recka5dda642014-05-22 15:43:54 -0700437void CanvasContext::notifyFramePending() {
438 ATRACE_CALL();
439 mRenderThread.pushBackFrameCallback(this);
440}
441
John Reckcf185f52019-04-11 16:11:24 -0700442void CanvasContext::setPresentTime() {
443 int64_t presentTime = NATIVE_WINDOW_TIMESTAMP_AUTO;
444 int renderAhead = 0;
445 const auto frameIntervalNanos = mRenderThread.timeLord().frameIntervalNanos();
446 if (mFixedRenderAhead) {
447 renderAhead = std::min(mRenderAheadDepth, mRenderAheadCapacity);
448 } else if (frameIntervalNanos < 15_ms) {
449 renderAhead = std::min(1, static_cast<int>(mRenderAheadCapacity));
450 }
451
452 if (renderAhead) {
453 presentTime = mCurrentFrameInfo->get(FrameInfoIndex::Vsync) +
John Reck56cffe52020-02-03 16:47:12 -0800454 (frameIntervalNanos * (renderAhead + 1)) - DeviceInfo::get()->getAppOffset() +
455 (frameIntervalNanos / 2);
John Reckcf185f52019-04-11 16:11:24 -0700456 }
Alec Mourif023a322019-11-25 10:02:21 -0800457 native_window_set_buffers_timestamp(mNativeSurface->getNativeWindow(), presentTime);
John Reckcf185f52019-04-11 16:11:24 -0700458}
459
John Recke4267ea2014-06-03 15:53:15 -0700460void CanvasContext::draw() {
John Recke4267ea2014-06-03 15:53:15 -0700461 SkRect dirty;
462 mDamageAccumulator.finish(&dirty);
463
John Reck0fa0cbc2019-04-05 16:57:46 -0700464 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800465 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
chaviw5fc80e72020-04-30 12:14:35 -0700466 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
467 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700468 waitOnFences();
chaviw5fc80e72020-04-30 12:14:35 -0700469 for (auto& func : mFrameCompleteCallbacks) {
470 std::invoke(func, mFrameNumber);
471 }
472 mFrameCompleteCallbacks.clear();
John Reck0b19a732019-03-07 17:18:25 -0800473 return;
474 }
John Reck240ff622015-04-28 13:50:00 -0700475
John Reck240ff622015-04-28 13:50:00 -0700476 mCurrentFrameInfo->markIssueDrawCommandsStart();
477
Stan Iliev768e3932016-07-08 21:34:52 -0400478 Frame frame = mRenderPipeline->getFrame();
John Reckcf185f52019-04-11 16:11:24 -0700479 setPresentTime();
Chris Craikb565df12015-10-05 13:00:52 -0700480
Stan Iliev768e3932016-07-08 21:34:52 -0400481 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800482
Stan Iliev768e3932016-07-08 21:34:52 -0400483 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700484 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
485 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800486
Stan Iliev7203e1f2019-07-25 13:12:02 -0400487 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700488
John Reck38f6c032016-03-17 10:23:49 -0700489 waitOnFences();
490
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700491 if (mNativeSurface) {
492 // TODO(b/165985262): measure performance impact
493 if (const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
494 vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
495 native_window_set_frame_timeline_vsync(mNativeSurface->getNativeWindow(), vsyncId);
496 }
497 }
498
Stan Iliev768e3932016-07-08 21:34:52 -0400499 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700500 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700501 bool didSwap =
502 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800503
John Reck306f3312016-06-10 16:01:55 -0700504 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800505
Stan Iliev768e3932016-07-08 21:34:52 -0400506 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700507 bool didDraw = true;
508 // Handle any swapchain errors
509 error = mNativeSurface->getAndClearError();
510 if (error == TIMED_OUT) {
511 // Try again
512 mRenderThread.postFrameCallback(this);
513 // But since this frame didn't happen, we need to mark full damage in the swap
514 // history
515 didDraw = false;
516
517 } else if (error != OK || !didSwap) {
518 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700519 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700520 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700521 }
John Reck59dd2ea2019-07-26 16:51:08 -0700522
John Recke486d932015-10-28 09:21:19 -0700523 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700524 if (didDraw) {
525 swap.damage = windowDirty;
526 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800527 float max = static_cast<float>(INT_MAX);
528 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700529 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100530 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700531 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700532 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800533 nsecs_t dequeueStart =
534 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700535 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
536 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700537 // and thus is not part of the frame.
538 swap.dequeueDuration = 0;
539 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700540 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800541 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700542 }
Alec Mourif023a322019-11-25 10:02:21 -0800543 swap.queueDuration =
544 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700545 } else {
546 swap.dequeueDuration = 0;
547 swap.queueDuration = 0;
548 }
John Reck1bcacfd2017-11-03 10:12:19 -0700549 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
550 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400551 mLast4FrameInfos[-1].second = frameCompleteNr;
John Reck149173d2015-08-10 09:52:29 -0700552 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700553 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700554 } else {
555 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
556 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400557 mLast4FrameInfos[-1].second = -1;
John Reck4f02bf42014-01-03 18:09:17 -0800558 }
John Reckfe5e7b72014-05-23 17:42:28 -0700559
John Reckba6adf62015-02-19 14:36:50 -0800560 // TODO: Use a fence for real completion?
561 mCurrentFrameInfo->markFrameCompleted();
John Reck149173d2015-08-10 09:52:29 -0700562
563#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700564 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
565 FrameInfoIndex::FrameCompleted) /
566 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700567 if (sFrameCount) {
568 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
569 } else {
570 sBenchMma = thisFrame;
571 }
572 if (++sFrameCount == 10) {
573 sFrameCount = 1;
574 ALOGD("Average frame time: %.4f", sBenchMma);
575 }
576#endif
577
John Reckcc2eee82018-05-17 10:44:00 -0700578 if (didSwap) {
579 for (auto& func : mFrameCompleteCallbacks) {
580 std::invoke(func, frameCompleteNr);
581 }
582 mFrameCompleteCallbacks.clear();
583 }
584
John Reck34781b22017-07-05 16:39:36 -0700585 mJankTracker.finishFrame(*mCurrentFrameInfo);
Andres Morales910beb82016-02-02 16:19:40 -0800586 if (CC_UNLIKELY(mFrameMetricsReporter.get() != nullptr)) {
587 mFrameMetricsReporter->reportFrameMetrics(mCurrentFrameInfo->data());
Andres Morales06f5bc72015-12-15 15:21:31 -0800588 }
John Reck38e0c322015-11-10 12:19:17 -0800589
Stan Iliev7203e1f2019-07-25 13:12:02 -0400590 if (mLast4FrameInfos.size() == mLast4FrameInfos.capacity()) {
591 // By looking 4 frames back, we guarantee all SF stats are available. There are at
592 // most 3 buffers in BufferQueue. Surface object keeps stats for the last 8 frames.
593 FrameInfo* forthBehind = mLast4FrameInfos.front().first;
594 int64_t composedFrameId = mLast4FrameInfos.front().second;
595 nsecs_t acquireTime = -1;
Alec Mouri026106f2020-03-26 17:17:09 -0700596 if (mNativeSurface) {
597 native_window_get_frame_timestamps(mNativeSurface->getNativeWindow(), composedFrameId,
598 nullptr, &acquireTime, nullptr, nullptr, nullptr,
599 nullptr, nullptr, nullptr, nullptr);
600 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400601 // Ignore default -1, NATIVE_WINDOW_TIMESTAMP_INVALID and NATIVE_WINDOW_TIMESTAMP_PENDING
602 forthBehind->set(FrameInfoIndex::GpuCompleted) = acquireTime > 0 ? acquireTime : -1;
603 mJankTracker.finishGpuDraw(*forthBehind);
604 }
605
Stan Ilieve0fae232020-01-07 17:21:49 -0500606 mRenderThread.cacheManager().onFrameCompleted();
John Reck4f02bf42014-01-03 18:09:17 -0800607}
608
John Recke45b1fd2014-04-15 09:50:16 -0700609// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700610void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400611 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700612 prepareAndDraw(nullptr);
613}
John Reck368cdd82014-05-07 13:11:00 -0700614
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400615SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800616 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
617 if (mNativeSurface == nullptr) {
618 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400619 }
Alec Mourif023a322019-11-25 10:02:21 -0800620 ANativeWindow* anw = mNativeSurface->getNativeWindow();
621
622 SkISize size;
623 size.fWidth = ANativeWindow_getWidth(anw);
624 size.fHeight = ANativeWindow_getHeight(anw);
625 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400626}
627
Skuhneea7a7fb2015-08-28 07:10:31 -0700628void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700629 ATRACE_CALL();
630
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800631 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700632 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700633 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
John Reckba6adf62015-02-19 14:36:50 -0800634 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700635 UiFrameInfoBuilder(frameInfo)
636 .addFlag(FrameInfoFlags::RTAnimation)
Ady Abrahamdfb13982020-10-05 17:59:09 -0700637 .setVsync(vsync, vsync, vsyncId, frameDeadline);
John Reckfe5e7b72014-05-23 17:42:28 -0700638
Chris Craike2e53a72015-10-28 15:55:40 -0700639 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100640 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700641 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700642 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700643 } else {
644 // wait on fences so tasks don't overlap next frame
645 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700646 }
John Recke45b1fd2014-04-15 09:50:16 -0700647}
648
John Reck998a6d82014-08-28 15:35:53 -0700649void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700650 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800651 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700652 }
653}
654
John Reck2de950d2017-01-25 10:58:30 -0800655void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700656 if (mPrefetchedLayers.size()) {
657 for (auto& node : mPrefetchedLayers) {
658 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700659 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800660 node->destroyLayers();
661 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700662 }
663 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700664 }
665}
666
John Reck2de950d2017-01-25 10:58:30 -0800667void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700668 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400669 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700670
John Reck3e824952014-08-20 10:08:39 -0700671 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
672 stopDrawing();
673
Chris Craike2e53a72015-10-28 15:55:40 -0700674 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700675 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700676 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700677 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700678 node->prepareTree(info);
679 SkRect ignore;
680 mDamageAccumulator.finish(&ignore);
681 // Tickle the GENERIC property on node to mark it as dirty for damaging
682 // purposes when the frame is actually drawn
683 node->setPropertyFieldsDirty(RenderNode::GENERIC);
684
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700685 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700686
Chris Craikd41c4d82015-01-05 15:51:13 -0800687 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700688 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700689}
690
John Reck2de950d2017-01-25 10:58:30 -0800691void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700692 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400693 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800694 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700695 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800696 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700697 }
Stan Iliev768e3932016-07-08 21:34:52 -0400698 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700699 }
700}
701
702void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700703 ATRACE_CALL();
704 if (!thread.getGrContext()) return;
705 ATRACE_CALL();
706 if (level >= TRIM_MEMORY_COMPLETE) {
707 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800708 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700709 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
710 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700711 }
712}
713
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400714DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400715 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700716}
717
John Reckba6adf62015-02-19 14:36:50 -0800718void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700719 mJankTracker.dumpStats(fd);
720 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800721}
722
723void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700724 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800725}
726
John Reckdf1742e2017-01-19 15:56:21 -0800727void CanvasContext::setName(const std::string&& name) {
728 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
729}
730
John Reck38f6c032016-03-17 10:23:49 -0700731void CanvasContext::waitOnFences() {
732 if (mFrameFences.size()) {
733 ATRACE_CALL();
734 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700735 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700736 }
737 mFrameFences.clear();
738 }
739}
740
John Reck38f6c032016-03-17 10:23:49 -0700741void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700742 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700743}
744
John Reck28912a52016-04-18 14:34:18 -0700745int64_t CanvasContext::getFrameNumber() {
746 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
747 if (mFrameNumber == -1 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800748 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700749 }
750 return mFrameNumber;
751}
752
John Reck0b19a732019-03-07 17:18:25 -0800753bool CanvasContext::surfaceRequiresRedraw() {
754 if (!mNativeSurface) return false;
755 if (mHaveNewSurface) return true;
756
Alec Mourif023a322019-11-25 10:02:21 -0800757 ANativeWindow* anw = mNativeSurface->getNativeWindow();
758 const int width = ANativeWindow_getWidth(anw);
759 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800760
John Reck41459192019-10-31 15:04:58 -0700761 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800762}
763
John Reckcf185f52019-04-11 16:11:24 -0700764void CanvasContext::setRenderAheadDepth(int renderAhead) {
765 if (renderAhead > 2 || renderAhead < 0 || mNativeSurface) {
John Reck4d527222019-03-13 16:25:20 -0700766 return;
767 }
John Reckcf185f52019-04-11 16:11:24 -0700768 mFixedRenderAhead = true;
769 mRenderAheadDepth = static_cast<uint32_t>(renderAhead);
John Reck4d527222019-03-13 16:25:20 -0700770}
771
Stan Iliev768e3932016-07-08 21:34:52 -0400772SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
773 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
774 // can't rely on prior content of window if viewport size changes
775 dirty->setEmpty();
776 mLastFrameWidth = frame.width();
777 mLastFrameHeight = frame.height();
778 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
779 // New surface needs a full draw
780 dirty->setEmpty();
781 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400782 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700783 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
784 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400785 dirty->setEmpty();
786 }
787 profiler().unionDirty(dirty);
788 }
789
790 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400791 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400792 }
793
794 // At this point dirty is the area of the window to update. However,
795 // the area of the frame we need to repaint is potentially different, so
796 // stash the screen area for later
797 SkRect windowDirty(*dirty);
798
799 // If the buffer age is 0 we do a full-screen repaint (handled above)
800 // If the buffer age is 1 the buffer contents are the same as they were
801 // last frame so there's nothing to union() against
802 // Therefore we only care about the > 1 case.
803 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700804 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400805 // We don't have enough history to handle this old of a buffer
806 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400807 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400808 } else {
809 // At this point we haven't yet added the latest frame
810 // to the damage history (happens below)
811 // So we need to damage
812 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700813 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400814 dirty->join(mSwapHistory[i].damage);
815 }
816 }
817 }
818
819 return windowDirty;
820}
821
John Reck23b797a2014-01-03 18:08:34 -0800822} /* namespace renderthread */
823} /* namespace uirenderer */
824} /* namespace android */