blob: d09bc47cf8fd7399bc603eb8e87a2e901d74d2d6 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
John Reckc8024fc2021-10-18 19:25:46 -040021#include <gui/TraceUtils.h>
Alec Mouri8d451742019-08-01 19:19:16 -070022#include <strings.h>
23#include <sys/stat.h>
John Reckc8024fc2021-10-18 19:25:46 -040024#include <ui/Fence.h>
Alec Mouri8d451742019-08-01 19:19:16 -070025
26#include <algorithm>
27#include <cstdint>
28#include <cstdlib>
29#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080030
John Reck0fa0cbc2019-04-05 16:57:46 -070031#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070032#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050033#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070034#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070035#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080036#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070037#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040038#include "pipeline/skia/SkiaOpenGLPipeline.h"
39#include "pipeline/skia/SkiaPipeline.h"
40#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070041#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080042#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070043#include "utils/TimeUtils.h"
John Recke248bd12015-08-05 13:53:53 -070044
John Reck149173d2015-08-10 09:52:29 -070045#define LOG_FRAMETIME_MMA 0
46
47#if LOG_FRAMETIME_MMA
48static float sBenchMma = 0;
49static int sFrameCount = 0;
50static const float NANOS_PER_MILLIS_F = 1000000.0f;
51#endif
52
John Reck23b797a2014-01-03 18:08:34 -080053namespace android {
54namespace uirenderer {
55namespace renderthread {
56
Huihong Luo054b8d32021-02-24 18:48:12 -080057namespace {
58class ScopedActiveContext {
59public:
60 ScopedActiveContext(CanvasContext* context) { sActiveContext = context; }
61
62 ~ScopedActiveContext() { sActiveContext = nullptr; }
63
64 static CanvasContext* getActiveContext() { return sActiveContext; }
65
66private:
67 static CanvasContext* sActiveContext;
68};
69
70CanvasContext* ScopedActiveContext::sActiveContext = nullptr;
71} /* namespace */
72
John Reck1bcacfd2017-11-03 10:12:19 -070073CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
74 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040075 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040076
Stan Iliev03de0742016-07-07 12:35:54 -040077 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040078 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040079 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070080 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040081 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040082 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070083 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040084 default:
John Reck1bcacfd2017-11-03 10:12:19 -070085 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040086 break;
87 }
88 return nullptr;
89}
90
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040091void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
92 ATRACE_CALL();
93 auto renderType = Properties::getRenderPipelineType();
94 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040095 case RenderPipelineType::SkiaGL:
96 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
97 break;
98 case RenderPipelineType::SkiaVulkan:
99 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
100 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400101 default:
John Reck1bcacfd2017-11-03 10:12:19 -0700102 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400103 break;
104 }
105}
106
107void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -0700108 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400109}
110
John Reck1bcacfd2017-11-03 10:12:19 -0700111CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
112 IContextFactory* contextFactory,
113 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700114 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400115 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800116 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800117 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700118 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700119 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400120 , mContentDrawBounds(0, 0, 0, 0)
121 , mRenderPipeline(std::move(renderPipeline)) {
John Reck5f66fb82022-09-23 17:49:23 -0400122 mRenderThread.cacheManager().registerCanvasContext(this);
John Reck2de950d2017-01-25 10:58:30 -0800123 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700124 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700125 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck23b797a2014-01-03 18:08:34 -0800126}
127
128CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800129 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800130 for (auto& node : mRenderNodes) {
131 node->clearRoot();
132 }
133 mRenderNodes.clear();
John Reck5f66fb82022-09-23 17:49:23 -0400134 mRenderThread.cacheManager().unregisterCanvasContext(this);
John Reck4f02bf42014-01-03 18:09:17 -0800135}
136
John Reck2de950d2017-01-25 10:58:30 -0800137void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
138 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
139 node->makeRoot();
140 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
141}
142
143void CanvasContext::removeRenderNode(RenderNode* node) {
144 node->clearRoot();
145 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700146 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800147}
148
149void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700150 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800151 setSurface(nullptr);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800152 setSurfaceControl(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800153 freePrefetchedLayers();
154 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700155 mAnimationContext->destroy();
John Reck5f66fb82022-09-23 17:49:23 -0400156 mRenderThread.cacheManager().onContextStopped(this);
John Reck23b797a2014-01-03 18:08:34 -0800157}
158
John Reckfbe14bb2020-09-21 14:37:41 -0700159static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700160 int query_value;
161 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
162 if (err != 0 || query_value < 0) {
163 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
164 return;
165 }
166 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
167
John Reckfbe14bb2020-09-21 14:37:41 -0700168 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
169 // query for min_undequeued
170 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700171 native_window_set_buffer_count(window, bufferCount);
172}
173
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800174void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800175 ATRACE_CALL();
176
John Reck8ddbc592020-05-07 16:11:18 -0700177 if (window) {
178 mNativeSurface = std::make_unique<ReliableSurface>(window);
179 mNativeSurface->init();
180 if (enableTimeout) {
181 // TODO: Fix error handling & re-shorten timeout
182 ANativeWindow_setDequeueTimeout(window, 4000_ms);
183 }
John Reck8ddbc592020-05-07 16:11:18 -0700184 } else {
185 mNativeSurface = nullptr;
186 }
John Reckb36bfdd2020-07-23 13:47:49 -0700187 setupPipelineSurface();
188}
John Reck8ddbc592020-05-07 16:11:18 -0700189
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800190void CanvasContext::setSurfaceControl(ASurfaceControl* surfaceControl) {
191 if (surfaceControl == mSurfaceControl) return;
192
193 auto funcs = mRenderThread.getASurfaceControlFunctions();
Jorim Jaggi71db8892021-02-03 23:19:29 +0100194
Huihong Luoe76f1812021-05-06 10:28:12 -0700195 if (surfaceControl == nullptr) {
196 setASurfaceTransactionCallback(nullptr);
Huihong Luo34f42fd2021-05-03 14:47:36 -0700197 setPrepareSurfaceControlForWebviewCallback(nullptr);
Huihong Luoe76f1812021-05-06 10:28:12 -0700198 }
199
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800200 if (mSurfaceControl != nullptr) {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100201 funcs.unregisterListenerFunc(this, &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800202 funcs.releaseFunc(mSurfaceControl);
203 }
204 mSurfaceControl = surfaceControl;
Huihong Luo540fdf82021-06-25 13:59:39 -0700205 mSurfaceControlGenerationId++;
Jorim Jaggi71db8892021-02-03 23:19:29 +0100206 mExpectSurfaceStats = surfaceControl != nullptr;
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200207 if (mExpectSurfaceStats) {
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800208 funcs.acquireFunc(mSurfaceControl);
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200209 funcs.registerListenerFunc(surfaceControl, mSurfaceControlGenerationId, this,
210 &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800211 }
212}
213
John Reckb36bfdd2020-07-23 13:47:49 -0700214void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800215 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700216 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
217
218 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700219 setBufferCount(mNativeSurface->getNativeWindow());
220
John Reck8ddbc592020-05-07 16:11:18 -0700221 }
John Reck23b797a2014-01-03 18:08:34 -0800222
Pablo Gamito7e6a5f22021-08-31 13:49:19 +0000223 mFrameNumber = 0;
John Reck28912a52016-04-18 14:34:18 -0700224
John Reckb36bfdd2020-07-23 13:47:49 -0700225 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700226 mHaveNewSurface = true;
227 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500228 // Enable frame stats after the surface has been bound to the appropriate graphics API.
229 // Order is important when new and old surfaces are the same, because old surface has
230 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800231 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700232 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700233 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400234 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800235 }
John Reck23b797a2014-01-03 18:08:34 -0800236}
237
John Reck1125d1f2014-10-23 11:02:19 -0700238void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
239 mSwapBehavior = swapBehavior;
240}
241
John Reckf8441e62017-10-23 13:10:41 -0700242bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400243 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800244 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800245}
246
John Reck8afcc762016-04-13 10:24:06 -0700247void CanvasContext::setStopped(bool stopped) {
248 if (mStopped != stopped) {
249 mStopped = stopped;
250 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400251 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700252 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400253 mRenderPipeline->onStop();
John Reck5f66fb82022-09-23 17:49:23 -0400254 mRenderThread.cacheManager().onContextStopped(this);
John Reck306f3312016-06-10 16:01:55 -0700255 } else if (mIsDirty && hasSurface()) {
256 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700257 }
258 }
259}
260
John Reck8785ceb2018-10-29 16:45:58 -0700261void CanvasContext::allocateBuffers() {
Brett Chabot856db9e2021-05-21 08:41:58 -0700262 if (mNativeSurface && Properties::isDrawingEnabled()) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800263 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700264 }
265}
266
267void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800268 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
269 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700270}
271
John Reck8785ceb2018-10-29 16:45:58 -0700272void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800273 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700274 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800275}
276
John Reck63a06672014-05-07 13:45:54 -0700277void CanvasContext::setOpaque(bool opaque) {
278 mOpaque = opaque;
279}
280
John Reckb36bfdd2020-07-23 13:47:49 -0700281void CanvasContext::setColorMode(ColorMode mode) {
282 mRenderPipeline->setSurfaceColorProperties(mode);
283 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700284}
285
John Reck8afcc762016-04-13 10:24:06 -0700286bool CanvasContext::makeCurrent() {
287 if (mStopped) return false;
288
Stan Iliev768e3932016-07-08 21:34:52 -0400289 auto result = mRenderPipeline->makeCurrent();
290 switch (result) {
291 case MakeCurrentResult::AlreadyCurrent:
292 return true;
293 case MakeCurrentResult::Failed:
294 mHaveNewSurface = true;
295 setSurface(nullptr);
296 return false;
297 case MakeCurrentResult::Succeeded:
298 mHaveNewSurface = true;
299 return true;
300 default:
301 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700302 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700303 }
Stan Iliev768e3932016-07-08 21:34:52 -0400304
305 return true;
John Reck860d1552014-04-11 19:15:05 -0700306}
307
John Reckbf3c6022015-06-02 15:55:00 -0700308static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700309 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700310}
311
John Reck0def73a2016-07-01 16:19:13 -0700312bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700313 static const auto SLOW_THRESHOLD = 6_ms;
314
John Reck0def73a2016-07-01 16:19:13 -0700315 if (mSwapHistory.size() != mSwapHistory.capacity()) {
316 // We want at least 3 frames of history before attempting to
317 // guess if the queue is stuffed
318 return false;
319 }
320 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
321 auto& swapA = mSwapHistory[0];
322
323 // Was there a happy queue & dequeue time? If so, don't
324 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700325 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700326 return false;
327 }
328
329 for (size_t i = 1; i < mSwapHistory.size(); i++) {
330 auto& swapB = mSwapHistory[i];
331
Chris Craik31635682016-07-19 17:59:12 -0700332 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700333 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800334 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700335 return false;
336 }
337
338 // Was there a happy queue & dequeue time? If so, don't
339 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700340 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700341 return false;
342 }
343
344 swapA = swapB;
345 }
346
347 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700348 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700349 return true;
350}
351
John Reck1bcacfd2017-11-03 10:12:19 -0700352void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
353 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700354 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700355
John Reckbf3c6022015-06-02 15:55:00 -0700356 // If the previous frame was dropped we don't need to hold onto it, so
357 // just keep using the previous frame's structure instead
358 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700359 mCurrentFrameInfo = mJankTracker.startFrame();
John Reckbf3c6022015-06-02 15:55:00 -0700360 }
Jorim Jaggi71db8892021-02-03 23:19:29 +0100361
John Reckba6adf62015-02-19 14:36:50 -0800362 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700363 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800364 mCurrentFrameInfo->markSyncStart();
365
John Recke4267ea2014-06-03 15:53:15 -0700366 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700367 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700368 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800369 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700370
John Reckec845a22014-09-05 15:23:38 -0700371 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700372 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700373 // Only the primary target node will be drawn full - all other nodes would get drawn in
374 // real time mode. In case of a window, the primary node is the window content and the other
375 // node(s) are non client / filler nodes.
376 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
377 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800378 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700379 }
John Reck119907c2014-08-14 09:02:01 -0700380 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800381 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700382
John Reck2de950d2017-01-25 10:58:30 -0800383 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800384 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700385
John Reck306f3312016-06-10 16:01:55 -0700386 mIsDirty = true;
387
John Reck848f6512018-12-03 13:26:43 -0800388 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700389 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800390 info.out.canDrawThisFrame = false;
391 return;
392 }
393
chaviwadba0b12022-03-18 17:42:15 -0500394 if (CC_LIKELY(mSwapHistory.size() && !info.forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700395 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700396 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800397 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700398 // The slight fudge-factor is to deal with cases where
399 // the vsync was estimated due to being slow handling the signal.
400 // See the logic in TimeLord#computeFrameTimeNanos or in
401 // Choreographer.java for details on when this happens
402 if (vsyncDelta < 2_ms) {
403 // Already drew for this vsync pulse, UI draw request missed
404 // the deadline for RT animations
405 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700406 }
Bo Hudd082242018-12-02 05:22:41 +0000407 } else {
408 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700409 }
John Recka5dda642014-05-22 15:43:54 -0700410
John Reckcc2eee82018-05-17 10:44:00 -0700411 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
412 // be an allowable combination?
413 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
414 info.out.canDrawThisFrame = false;
415 }
416
John Reck848f6512018-12-03 13:26:43 -0800417 if (info.out.canDrawThisFrame) {
418 int err = mNativeSurface->reserveNext();
419 if (err != OK) {
420 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
421 info.out.canDrawThisFrame = false;
422 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
423 if (err != TIMED_OUT) {
424 // A timed out surface can still recover, but assume others are permanently dead.
425 setSurface(nullptr);
426 return;
427 }
428 }
429 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700430 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700431 }
432
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400433 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700434 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700435 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
436 info.out.requiresUiRedraw = true;
437 }
John Reckcd028f32014-06-24 08:44:29 -0700438 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700439 // If animationsNeedsRedraw is set don't bother posting for an RT anim
440 // as we will just end up fighting the UI thread.
441 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400442 postedFrameCallback = true;
443 }
444 }
445
446 if (!postedFrameCallback &&
447 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
448 // Subtract the time of one frame so it can be displayed on time.
449 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
450 if (info.out.animatedImageDelay <= kFrameTime) {
451 mRenderThread.postFrameCallback(this);
452 } else {
453 const auto delay = info.out.animatedImageDelay - kFrameTime;
454 int genId = mGenerationID;
455 mRenderThread.queue().postDelayed(delay, [this, genId]() {
456 if (mGenerationID == genId) {
457 mRenderThread.postFrameCallback(this);
458 }
459 });
John Reckf9be7792014-05-02 18:21:16 -0700460 }
John Recke45b1fd2014-04-15 09:50:16 -0700461 }
462}
463
John Reckf47a5942014-06-30 16:20:04 -0700464void CanvasContext::stopDrawing() {
465 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700466 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400467 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700468}
469
John Recka5dda642014-05-22 15:43:54 -0700470void CanvasContext::notifyFramePending() {
471 ATRACE_CALL();
472 mRenderThread.pushBackFrameCallback(this);
473}
474
Matt Buckley864ab952022-10-18 23:03:59 +0000475std::optional<nsecs_t> CanvasContext::draw() {
Greg Daniel16d11c62021-04-08 16:17:35 -0400476 if (auto grContext = getGrContext()) {
477 if (grContext->abandoned()) {
478 LOG_ALWAYS_FATAL("GrContext is abandoned/device lost at start of CanvasContext::draw");
Matt Buckley864ab952022-10-18 23:03:59 +0000479 return std::nullopt;
Greg Daniel16d11c62021-04-08 16:17:35 -0400480 }
481 }
John Recke4267ea2014-06-03 15:53:15 -0700482 SkRect dirty;
483 mDamageAccumulator.finish(&dirty);
484
Brett Chabot856db9e2021-05-21 08:41:58 -0700485 if (!Properties::isDrawingEnabled() ||
486 (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw())) {
John Reck0b19a732019-03-07 17:18:25 -0800487 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
Leon Scroggins III7fbd0ad2021-08-06 13:42:49 -0400488 if (auto grContext = getGrContext()) {
489 // Submit to ensure that any texture uploads complete and Skia can
490 // free its staging buffers.
491 grContext->flushAndSubmit();
492 }
493
chaviw5fc80e72020-04-30 12:14:35 -0700494 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
495 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700496 waitOnFences();
chaviw9c137532021-08-20 12:15:48 -0500497 for (auto& func : mFrameCommitCallbacks) {
498 std::invoke(func, false /* didProduceBuffer */);
chaviw5fc80e72020-04-30 12:14:35 -0700499 }
chaviw9c137532021-08-20 12:15:48 -0500500 mFrameCommitCallbacks.clear();
Matt Buckley864ab952022-10-18 23:03:59 +0000501 return std::nullopt;
John Reck0b19a732019-03-07 17:18:25 -0800502 }
John Reck240ff622015-04-28 13:50:00 -0700503
Huihong Luo054b8d32021-02-24 18:48:12 -0800504 ScopedActiveContext activeContext(this);
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100505 mCurrentFrameInfo->set(FrameInfoIndex::FrameInterval) =
506 mRenderThread.timeLord().frameIntervalNanos();
507
John Reck240ff622015-04-28 13:50:00 -0700508 mCurrentFrameInfo->markIssueDrawCommandsStart();
509
Stan Iliev768e3932016-07-08 21:34:52 -0400510 Frame frame = mRenderPipeline->getFrame();
Stan Iliev768e3932016-07-08 21:34:52 -0400511 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800512
John Reck8a1ddd72021-10-19 12:25:24 -0400513 ATRACE_FORMAT("Drawing " RECT_STRING, SK_RECT_ARGS(dirty));
514
John Reckb013c8d2022-08-15 11:08:41 -0400515 IRenderPipeline::DrawResult drawResult;
516 {
517 // FrameInfoVisualizer accesses the frame events, which cannot be mutated mid-draw
518 // or it can lead to memory corruption.
519 // This lock is overly broad, but it's the quickest fix since this mutex is otherwise
520 // not visible to IRenderPipeline much less FrameInfoVisualizer. And since this is
521 // the thread we're primarily concerned about being responsive, this being too broad
522 // shouldn't pose a performance issue.
523 std::scoped_lock lock(mFrameMetricsReporterMutex);
524 drawResult = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry,
525 &mLayerUpdateQueue, mContentDrawBounds, mOpaque,
526 mLightInfo, mRenderNodes, &(profiler()));
527 }
Chris Craik1dfa0702016-03-04 15:59:24 -0800528
Pablo Gamito35b80cd2021-08-24 11:03:51 +0200529 uint64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700530
John Reck38f6c032016-03-17 10:23:49 -0700531 waitOnFences();
532
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700533 if (mNativeSurface) {
534 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000535 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
536 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
Siarhei Vishniakou4bcbffd2021-02-17 06:19:36 +0000537 const auto inputEventId =
538 static_cast<int32_t>(mCurrentFrameInfo->get(FrameInfoIndex::InputEventId));
Rachel Lee4fa74842021-11-16 13:30:11 -0800539 native_window_set_frame_timeline_info(
540 mNativeSurface->getNativeWindow(), vsyncId, inputEventId,
541 mCurrentFrameInfo->get(FrameInfoIndex::FrameStartTime));
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700542 }
543 }
544
Stan Iliev768e3932016-07-08 21:34:52 -0400545 bool requireSwap = false;
Matt Buckley864ab952022-10-18 23:03:59 +0000546 bool didDraw = false;
547
John Reck59dd2ea2019-07-26 16:51:08 -0700548 int error = OK;
Alec Mouri3afb3972022-05-27 22:03:11 +0000549 bool didSwap = mRenderPipeline->swapBuffers(frame, drawResult.success, windowDirty,
550 mCurrentFrameInfo, &requireSwap);
551
552 mCurrentFrameInfo->set(FrameInfoIndex::CommandSubmissionCompleted) = std::max(
553 drawResult.commandSubmissionTime, mCurrentFrameInfo->get(FrameInfoIndex::SwapBuffers));
John Reck9372ac32016-01-19 11:46:52 -0800554
John Reck306f3312016-06-10 16:01:55 -0700555 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800556
Stan Iliev768e3932016-07-08 21:34:52 -0400557 if (requireSwap) {
Matt Buckley864ab952022-10-18 23:03:59 +0000558 didDraw = true;
John Reck59dd2ea2019-07-26 16:51:08 -0700559 // Handle any swapchain errors
560 error = mNativeSurface->getAndClearError();
561 if (error == TIMED_OUT) {
562 // Try again
563 mRenderThread.postFrameCallback(this);
564 // But since this frame didn't happen, we need to mark full damage in the swap
565 // history
566 didDraw = false;
567
568 } else if (error != OK || !didSwap) {
569 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700570 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700571 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700572 }
John Reck59dd2ea2019-07-26 16:51:08 -0700573
John Recke486d932015-10-28 09:21:19 -0700574 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700575 if (didDraw) {
576 swap.damage = windowDirty;
577 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800578 float max = static_cast<float>(INT_MAX);
579 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700580 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100581 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700582 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700583 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800584 nsecs_t dequeueStart =
585 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700586 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
587 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700588 // and thus is not part of the frame.
589 swap.dequeueDuration = 0;
590 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700591 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800592 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700593 }
Alec Mourif023a322019-11-25 10:02:21 -0800594 swap.queueDuration =
595 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700596 } else {
597 swap.dequeueDuration = 0;
598 swap.queueDuration = 0;
599 }
John Reck1bcacfd2017-11-03 10:12:19 -0700600 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
601 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
John Reck149173d2015-08-10 09:52:29 -0700602 mHaveNewSurface = false;
Pablo Gamito7e6a5f22021-08-31 13:49:19 +0000603 mFrameNumber = 0;
John Reck70e89c92016-08-05 10:50:36 -0700604 } else {
605 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
606 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
John Reck4f02bf42014-01-03 18:09:17 -0800607 }
John Reckfe5e7b72014-05-23 17:42:28 -0700608
Jorim Jaggi71db8892021-02-03 23:19:29 +0100609 mCurrentFrameInfo->markSwapBuffersCompleted();
John Reck149173d2015-08-10 09:52:29 -0700610
611#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700612 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
613 FrameInfoIndex::FrameCompleted) /
614 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700615 if (sFrameCount) {
616 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
617 } else {
618 sBenchMma = thisFrame;
619 }
620 if (++sFrameCount == 10) {
621 sFrameCount = 1;
622 ALOGD("Average frame time: %.4f", sBenchMma);
623 }
624#endif
625
John Reckcc2eee82018-05-17 10:44:00 -0700626 if (didSwap) {
chaviw9c137532021-08-20 12:15:48 -0500627 for (auto& func : mFrameCommitCallbacks) {
628 std::invoke(func, true /* didProduceBuffer */);
John Reckcc2eee82018-05-17 10:44:00 -0700629 }
chaviw9c137532021-08-20 12:15:48 -0500630 mFrameCommitCallbacks.clear();
John Reckcc2eee82018-05-17 10:44:00 -0700631 }
632
Jorim Jaggi71db8892021-02-03 23:19:29 +0100633 if (requireSwap) {
634 if (mExpectSurfaceStats) {
Siarhei Vishniakouadf50242021-03-11 21:21:05 +0000635 reportMetricsWithPresentTime();
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200636 { // acquire lock
637 std::lock_guard lock(mLast4FrameMetricsInfosMutex);
638 FrameMetricsInfo& next = mLast4FrameMetricsInfos.next();
639 next.frameInfo = mCurrentFrameInfo;
640 next.frameNumber = frameCompleteNr;
641 next.surfaceId = mSurfaceControlGenerationId;
642 } // release lock
Jorim Jaggi71db8892021-02-03 23:19:29 +0100643 } else {
644 mCurrentFrameInfo->markFrameCompleted();
645 mCurrentFrameInfo->set(FrameInfoIndex::GpuCompleted)
646 = mCurrentFrameInfo->get(FrameInfoIndex::FrameCompleted);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000647 std::scoped_lock lock(mFrameMetricsReporterMutex);
Pablo Gamito88660d72021-08-09 14:37:56 +0000648 mJankTracker.finishFrame(*mCurrentFrameInfo, mFrameMetricsReporter, frameCompleteNr,
649 mSurfaceControlGenerationId);
Alec Mouri026106f2020-03-26 17:17:09 -0700650 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400651 }
652
Stan Ilieve0fae232020-01-07 17:21:49 -0500653 mRenderThread.cacheManager().onFrameCompleted();
Matt Buckley864ab952022-10-18 23:03:59 +0000654 return didDraw ? std::make_optional(
655 mCurrentFrameInfo->get(FrameInfoIndex::DequeueBufferDuration))
656 : std::nullopt;
John Reck4f02bf42014-01-03 18:09:17 -0800657}
658
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000659void CanvasContext::reportMetricsWithPresentTime() {
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000660 { // acquire lock
661 std::scoped_lock lock(mFrameMetricsReporterMutex);
662 if (mFrameMetricsReporter == nullptr) {
663 return;
664 }
665 } // release lock
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000666 if (mNativeSurface == nullptr) {
667 return;
668 }
John Reckb7039c62021-07-07 19:18:34 -0400669 ATRACE_CALL();
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000670 FrameInfo* forthBehind;
671 int64_t frameNumber;
Pablo Gamito88660d72021-08-09 14:37:56 +0000672 int32_t surfaceControlId;
673
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000674 { // acquire lock
Pablo Gamito88660d72021-08-09 14:37:56 +0000675 std::scoped_lock lock(mLast4FrameMetricsInfosMutex);
676 if (mLast4FrameMetricsInfos.size() != mLast4FrameMetricsInfos.capacity()) {
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000677 // Not enough frames yet
678 return;
679 }
Pablo Gamito88660d72021-08-09 14:37:56 +0000680 auto frameMetricsInfo = mLast4FrameMetricsInfos.front();
681 forthBehind = frameMetricsInfo.frameInfo;
682 frameNumber = frameMetricsInfo.frameNumber;
683 surfaceControlId = frameMetricsInfo.surfaceId;
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000684 } // release lock
685
686 nsecs_t presentTime = 0;
687 native_window_get_frame_timestamps(
688 mNativeSurface->getNativeWindow(), frameNumber, nullptr /*outRequestedPresentTime*/,
689 nullptr /*outAcquireTime*/, nullptr /*outLatchTime*/,
690 nullptr /*outFirstRefreshStartTime*/, nullptr /*outLastRefreshStartTime*/,
691 nullptr /*outGpuCompositionDoneTime*/, &presentTime, nullptr /*outDequeueReadyTime*/,
692 nullptr /*outReleaseTime*/);
693
694 forthBehind->set(FrameInfoIndex::DisplayPresentTime) = presentTime;
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000695 { // acquire lock
696 std::scoped_lock lock(mFrameMetricsReporterMutex);
697 if (mFrameMetricsReporter != nullptr) {
Pablo Gamito88660d72021-08-09 14:37:56 +0000698 mFrameMetricsReporter->reportFrameMetrics(forthBehind->data(), true /*hasPresentTime*/,
699 frameNumber, surfaceControlId);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000700 }
701 } // release lock
702}
703
Pablo Gamito88660d72021-08-09 14:37:56 +0000704void CanvasContext::addFrameMetricsObserver(FrameMetricsObserver* observer) {
705 std::scoped_lock lock(mFrameMetricsReporterMutex);
706 if (mFrameMetricsReporter.get() == nullptr) {
707 mFrameMetricsReporter.reset(new FrameMetricsReporter());
708 }
709
710 // We want to make sure we aren't reporting frames that have already been queued by the
711 // BufferQueueProducer on the rendner thread but are still pending the callback to report their
712 // their frame metrics.
Pablo Gamito35b80cd2021-08-24 11:03:51 +0200713 uint64_t nextFrameNumber = getFrameNumber();
Pablo Gamito88660d72021-08-09 14:37:56 +0000714 observer->reportMetricsFrom(nextFrameNumber, mSurfaceControlGenerationId);
715 mFrameMetricsReporter->addObserver(observer);
716}
717
718void CanvasContext::removeFrameMetricsObserver(FrameMetricsObserver* observer) {
719 std::scoped_lock lock(mFrameMetricsReporterMutex);
720 if (mFrameMetricsReporter.get() != nullptr) {
721 mFrameMetricsReporter->removeObserver(observer);
722 if (!mFrameMetricsReporter->hasObservers()) {
723 mFrameMetricsReporter.reset(nullptr);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000724 }
725 }
Pablo Gamito88660d72021-08-09 14:37:56 +0000726}
727
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200728FrameInfo* CanvasContext::getFrameInfoFromLast4(uint64_t frameNumber, uint32_t surfaceControlId) {
Pablo Gamito88660d72021-08-09 14:37:56 +0000729 std::scoped_lock lock(mLast4FrameMetricsInfosMutex);
730 for (size_t i = 0; i < mLast4FrameMetricsInfos.size(); i++) {
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200731 if (mLast4FrameMetricsInfos[i].frameNumber == frameNumber &&
732 mLast4FrameMetricsInfos[i].surfaceId == surfaceControlId) {
733 return mLast4FrameMetricsInfos[i].frameInfo;
Pablo Gamito88660d72021-08-09 14:37:56 +0000734 }
735 }
736
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000737 return nullptr;
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000738}
739
Pablo Gamito14b28ce9c2021-09-06 16:33:23 +0000740void CanvasContext::onSurfaceStatsAvailable(void* context, int32_t surfaceControlId,
741 ASurfaceControlStats* stats) {
Pablo Gamito88660d72021-08-09 14:37:56 +0000742 auto* instance = static_cast<CanvasContext*>(context);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100743
744 const ASurfaceControlFunctions& functions =
745 instance->mRenderThread.getASurfaceControlFunctions();
746
747 nsecs_t gpuCompleteTime = functions.getAcquireTimeFunc(stats);
John Reckc8024fc2021-10-18 19:25:46 -0400748 if (gpuCompleteTime == Fence::SIGNAL_TIME_PENDING) {
749 gpuCompleteTime = -1;
750 }
Jorim Jaggi71db8892021-02-03 23:19:29 +0100751 uint64_t frameNumber = functions.getFrameNumberFunc(stats);
752
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200753 FrameInfo* frameInfo = instance->getFrameInfoFromLast4(frameNumber, surfaceControlId);
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000754
Jorim Jaggi71db8892021-02-03 23:19:29 +0100755 if (frameInfo != nullptr) {
John Reckb013c8d2022-08-15 11:08:41 -0400756 std::scoped_lock lock(instance->mFrameMetricsReporterMutex);
Jorim Jaggib891cdf2021-05-11 14:57:25 +0200757 frameInfo->set(FrameInfoIndex::FrameCompleted) = std::max(gpuCompleteTime,
758 frameInfo->get(FrameInfoIndex::SwapBuffersCompleted));
Alec Mouri3afb3972022-05-27 22:03:11 +0000759 frameInfo->set(FrameInfoIndex::GpuCompleted) = std::max(
760 gpuCompleteTime, frameInfo->get(FrameInfoIndex::CommandSubmissionCompleted));
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200761 instance->mJankTracker.finishFrame(*frameInfo, instance->mFrameMetricsReporter, frameNumber,
762 surfaceControlId);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100763 }
764}
765
John Recke45b1fd2014-04-15 09:50:16 -0700766// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700767void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400768 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700769 prepareAndDraw(nullptr);
770}
John Reck368cdd82014-05-07 13:11:00 -0700771
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400772SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800773 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
774 if (mNativeSurface == nullptr) {
775 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400776 }
Alec Mourif023a322019-11-25 10:02:21 -0800777 ANativeWindow* anw = mNativeSurface->getNativeWindow();
778
779 SkISize size;
780 size.fWidth = ANativeWindow_getWidth(anw);
781 size.fHeight = ANativeWindow_getHeight(anw);
John Reck5f66fb82022-09-23 17:49:23 -0400782 mRenderThread.cacheManager().notifyNextFrameSize(size.fWidth, size.fHeight);
Alec Mourif023a322019-11-25 10:02:21 -0800783 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400784}
785
Skuhneea7a7fb2015-08-28 07:10:31 -0700786void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700787 ATRACE_CALL();
788
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800789 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700790 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700791 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100792 int64_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
John Reckba6adf62015-02-19 14:36:50 -0800793 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700794 UiFrameInfoBuilder(frameInfo)
795 .addFlag(FrameInfoFlags::RTAnimation)
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100796 .setVsync(vsync, vsync, vsyncId, frameDeadline, frameInterval);
John Reckfe5e7b72014-05-23 17:42:28 -0700797
Chris Craike2e53a72015-10-28 15:55:40 -0700798 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100799 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700800 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700801 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700802 } else {
803 // wait on fences so tasks don't overlap next frame
804 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700805 }
John Recke45b1fd2014-04-15 09:50:16 -0700806}
807
John Reck998a6d82014-08-28 15:35:53 -0700808void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700809 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800810 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700811 }
812}
813
John Reck2de950d2017-01-25 10:58:30 -0800814void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700815 if (mPrefetchedLayers.size()) {
816 for (auto& node : mPrefetchedLayers) {
817 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700818 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800819 node->destroyLayers();
820 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700821 }
822 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700823 }
824}
825
John Reck2de950d2017-01-25 10:58:30 -0800826void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700827 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400828 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700829
John Reck3e824952014-08-20 10:08:39 -0700830 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
831 stopDrawing();
832
Chris Craike2e53a72015-10-28 15:55:40 -0700833 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700834 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700835 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700836 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700837 node->prepareTree(info);
838 SkRect ignore;
839 mDamageAccumulator.finish(&ignore);
840 // Tickle the GENERIC property on node to mark it as dirty for damaging
841 // purposes when the frame is actually drawn
842 node->setPropertyFieldsDirty(RenderNode::GENERIC);
843
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700844 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700845
Chris Craikd41c4d82015-01-05 15:51:13 -0800846 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700847 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700848}
849
John Reck2de950d2017-01-25 10:58:30 -0800850void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700851 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400852 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800853 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700854 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800855 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700856 }
Stan Iliev768e3932016-07-08 21:34:52 -0400857 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700858 }
859}
860
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400861DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400862 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700863}
864
John Reckba6adf62015-02-19 14:36:50 -0800865void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700866 mJankTracker.dumpStats(fd);
867 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800868}
869
870void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700871 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800872}
873
John Reckdf1742e2017-01-19 15:56:21 -0800874void CanvasContext::setName(const std::string&& name) {
875 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
876}
877
John Reck38f6c032016-03-17 10:23:49 -0700878void CanvasContext::waitOnFences() {
879 if (mFrameFences.size()) {
880 ATRACE_CALL();
881 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700882 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700883 }
884 mFrameFences.clear();
885 }
886}
887
John Reck38f6c032016-03-17 10:23:49 -0700888void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700889 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700890}
891
Pablo Gamito35b80cd2021-08-24 11:03:51 +0200892uint64_t CanvasContext::getFrameNumber() {
Pablo Gamito7e6a5f22021-08-31 13:49:19 +0000893 // mFrameNumber is reset to 0 when the surface changes or we swap buffers
894 if (mFrameNumber == 0 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800895 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700896 }
897 return mFrameNumber;
898}
899
John Reck0b19a732019-03-07 17:18:25 -0800900bool CanvasContext::surfaceRequiresRedraw() {
901 if (!mNativeSurface) return false;
902 if (mHaveNewSurface) return true;
903
Alec Mourif023a322019-11-25 10:02:21 -0800904 ANativeWindow* anw = mNativeSurface->getNativeWindow();
905 const int width = ANativeWindow_getWidth(anw);
906 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800907
John Reck41459192019-10-31 15:04:58 -0700908 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800909}
910
Stan Iliev768e3932016-07-08 21:34:52 -0400911SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
912 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
913 // can't rely on prior content of window if viewport size changes
914 dirty->setEmpty();
915 mLastFrameWidth = frame.width();
916 mLastFrameHeight = frame.height();
917 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
918 // New surface needs a full draw
919 dirty->setEmpty();
920 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400921 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700922 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
923 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400924 dirty->setEmpty();
925 }
926 profiler().unionDirty(dirty);
927 }
928
929 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400930 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400931 }
932
933 // At this point dirty is the area of the window to update. However,
934 // the area of the frame we need to repaint is potentially different, so
935 // stash the screen area for later
936 SkRect windowDirty(*dirty);
937
938 // If the buffer age is 0 we do a full-screen repaint (handled above)
939 // If the buffer age is 1 the buffer contents are the same as they were
940 // last frame so there's nothing to union() against
941 // Therefore we only care about the > 1 case.
942 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700943 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400944 // We don't have enough history to handle this old of a buffer
945 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400946 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400947 } else {
948 // At this point we haven't yet added the latest frame
949 // to the damage history (happens below)
950 // So we need to damage
951 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700952 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400953 dirty->join(mSwapHistory[i].damage);
954 }
955 }
956 }
957
958 return windowDirty;
959}
960
Huihong Luo054b8d32021-02-24 18:48:12 -0800961CanvasContext* CanvasContext::getActiveContext() {
962 return ScopedActiveContext::getActiveContext();
963}
964
965bool CanvasContext::mergeTransaction(ASurfaceTransaction* transaction, ASurfaceControl* control) {
966 if (!mASurfaceTransactionCallback) return false;
Huihong Luo4df41512021-06-24 10:04:32 -0700967 return std::invoke(mASurfaceTransactionCallback, reinterpret_cast<int64_t>(transaction),
968 reinterpret_cast<int64_t>(control), getFrameNumber());
Huihong Luo054b8d32021-02-24 18:48:12 -0800969}
970
Huihong Luo34f42fd2021-05-03 14:47:36 -0700971void CanvasContext::prepareSurfaceControlForWebview() {
972 if (mPrepareSurfaceControlForWebviewCallback) {
973 std::invoke(mPrepareSurfaceControlForWebviewCallback);
974 }
975}
976
John Reck23b797a2014-01-03 18:08:34 -0800977} /* namespace renderthread */
978} /* namespace uirenderer */
979} /* namespace android */