blob: b36b8be10779511fc4c4eb3d15d67b0684550910 [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
John Reckc8024fc2021-10-18 19:25:46 -040021#include <gui/TraceUtils.h>
Alec Mouri8d451742019-08-01 19:19:16 -070022#include <strings.h>
23#include <sys/stat.h>
John Reckc8024fc2021-10-18 19:25:46 -040024#include <ui/Fence.h>
Alec Mouri8d451742019-08-01 19:19:16 -070025
26#include <algorithm>
27#include <cstdint>
28#include <cstdlib>
29#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080030
John Reck0fa0cbc2019-04-05 16:57:46 -070031#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070032#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050033#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070034#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070035#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080036#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070037#include "hwui/Canvas.h"
Jerome Gaillardd26915c2024-03-26 12:05:52 +000038#include "pipeline/skia/SkiaCpuPipeline.h"
Jerome Gaillardbea67ce2024-03-28 14:21:16 +000039#include "pipeline/skia/SkiaGpuPipeline.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040040#include "pipeline/skia/SkiaOpenGLPipeline.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040041#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070042#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080043#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070044#include "utils/TimeUtils.h"
John Recke248bd12015-08-05 13:53:53 -070045
John Reck149173d2015-08-10 09:52:29 -070046#define LOG_FRAMETIME_MMA 0
47
48#if LOG_FRAMETIME_MMA
49static float sBenchMma = 0;
50static int sFrameCount = 0;
51static const float NANOS_PER_MILLIS_F = 1000000.0f;
52#endif
53
John Reck23b797a2014-01-03 18:08:34 -080054namespace android {
55namespace uirenderer {
56namespace renderthread {
57
Huihong Luo054b8d32021-02-24 18:48:12 -080058namespace {
59class ScopedActiveContext {
60public:
61 ScopedActiveContext(CanvasContext* context) { sActiveContext = context; }
62
63 ~ScopedActiveContext() { sActiveContext = nullptr; }
64
65 static CanvasContext* getActiveContext() { return sActiveContext; }
66
67private:
68 static CanvasContext* sActiveContext;
69};
70
71CanvasContext* ScopedActiveContext::sActiveContext = nullptr;
72} /* namespace */
73
John Reck1bcacfd2017-11-03 10:12:19 -070074CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
Matt Buckleye9023cf2022-11-23 22:39:25 +000075 RenderNode* rootRenderNode, IContextFactory* contextFactory,
Jerome Gaillard9da39e72024-04-09 14:43:21 +010076 pid_t uiThreadId, pid_t renderThreadId) {
Stan Iliev03de0742016-07-07 12:35:54 -040077 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040078
Stan Iliev03de0742016-07-07 12:35:54 -040079 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040080 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040081 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
Matt Buckleye9023cf2022-11-23 22:39:25 +000082 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread),
83 uiThreadId, renderThreadId);
Stan Iliev8a33e402016-07-08 09:57:49 -040084 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040085 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
Matt Buckleye9023cf2022-11-23 22:39:25 +000086 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread),
87 uiThreadId, renderThreadId);
Jerome Gaillardd26915c2024-03-26 12:05:52 +000088#ifndef __ANDROID__
89 case RenderPipelineType::SkiaCpu:
90 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
91 std::make_unique<skiapipeline::SkiaCpuPipeline>(thread),
92 uiThreadId, renderThreadId);
93#endif
Stan Iliev03de0742016-07-07 12:35:54 -040094 default:
John Reck1bcacfd2017-11-03 10:12:19 -070095 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040096 break;
97 }
98 return nullptr;
99}
100
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400101void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
102 ATRACE_CALL();
103 auto renderType = Properties::getRenderPipelineType();
104 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -0400105 case RenderPipelineType::SkiaGL:
106 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
107 break;
108 case RenderPipelineType::SkiaVulkan:
109 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
110 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400111 default:
John Reck1bcacfd2017-11-03 10:12:19 -0700112 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400113 break;
114 }
115}
116
117void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
Jerome Gaillardbea67ce2024-03-28 14:21:16 +0000118 skiapipeline::SkiaGpuPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -0400119}
120
John Reck1bcacfd2017-11-03 10:12:19 -0700121CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
122 IContextFactory* contextFactory,
Matt Buckleye9023cf2022-11-23 22:39:25 +0000123 std::unique_ptr<IRenderPipeline> renderPipeline, pid_t uiThreadId,
124 pid_t renderThreadId)
John Reck3b202512014-06-23 13:13:08 -0700125 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400126 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800127 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800128 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700129 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700130 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400131 , mContentDrawBounds(0, 0, 0, 0)
Matt Buckleye9023cf2022-11-23 22:39:25 +0000132 , mRenderPipeline(std::move(renderPipeline))
Matt Buckley0daae6a2023-09-14 22:56:50 +0000133 , mHintSessionWrapper(std::make_shared<HintSessionWrapper>(uiThreadId, renderThreadId)) {
John Reck5f66fb82022-09-23 17:49:23 -0400134 mRenderThread.cacheManager().registerCanvasContext(this);
John Reck20a4d682023-07-11 17:11:51 -0400135 mRenderThread.renderState().registerContextCallback(this);
John Reck2de950d2017-01-25 10:58:30 -0800136 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700137 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700138 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck23b797a2014-01-03 18:08:34 -0800139}
140
141CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800142 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800143 for (auto& node : mRenderNodes) {
144 node->clearRoot();
145 }
146 mRenderNodes.clear();
John Reck5f66fb82022-09-23 17:49:23 -0400147 mRenderThread.cacheManager().unregisterCanvasContext(this);
John Reck20a4d682023-07-11 17:11:51 -0400148 mRenderThread.renderState().removeContextCallback(this);
Matt Buckleycef0bca2023-10-16 22:00:11 +0000149 mHintSessionWrapper->destroy();
John Reck4f02bf42014-01-03 18:09:17 -0800150}
151
John Reck2de950d2017-01-25 10:58:30 -0800152void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
153 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
154 node->makeRoot();
155 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
156}
157
158void CanvasContext::removeRenderNode(RenderNode* node) {
159 node->clearRoot();
160 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700161 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800162}
163
164void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700165 stopDrawing();
Nader Jawada3521852023-01-30 20:23:46 -0800166 setHardwareBuffer(nullptr);
Chris Craikd41c4d82015-01-05 15:51:13 -0800167 setSurface(nullptr);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800168 setSurfaceControl(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800169 freePrefetchedLayers();
170 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700171 mAnimationContext->destroy();
John Reck5f66fb82022-09-23 17:49:23 -0400172 mRenderThread.cacheManager().onContextStopped(this);
Matt Buckley0daae6a2023-09-14 22:56:50 +0000173 mHintSessionWrapper->delayedDestroy(mRenderThread, 2_s, mHintSessionWrapper);
John Reck23b797a2014-01-03 18:08:34 -0800174}
175
John Reckfbe14bb2020-09-21 14:37:41 -0700176static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700177 int query_value;
178 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
179 if (err != 0 || query_value < 0) {
180 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
181 return;
182 }
183 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
184
John Reckfbe14bb2020-09-21 14:37:41 -0700185 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
186 // query for min_undequeued
187 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700188 native_window_set_buffer_count(window, bufferCount);
189}
190
Nader Jawada3521852023-01-30 20:23:46 -0800191void CanvasContext::setHardwareBuffer(AHardwareBuffer* buffer) {
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000192#ifdef __ANDROID__
Nader Jawada3521852023-01-30 20:23:46 -0800193 if (mHardwareBuffer) {
194 AHardwareBuffer_release(mHardwareBuffer);
195 mHardwareBuffer = nullptr;
196 }
197
198 if (buffer) {
199 AHardwareBuffer_acquire(buffer);
200 mHardwareBuffer = buffer;
201 }
202 mRenderPipeline->setHardwareBuffer(mHardwareBuffer);
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000203#endif
Nader Jawada3521852023-01-30 20:23:46 -0800204}
205
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800206void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800207 ATRACE_CALL();
208
Matt Buckley10d75fc2023-08-24 15:56:46 +0000209 startHintSession();
John Reck8ddbc592020-05-07 16:11:18 -0700210 if (window) {
211 mNativeSurface = std::make_unique<ReliableSurface>(window);
212 mNativeSurface->init();
213 if (enableTimeout) {
214 // TODO: Fix error handling & re-shorten timeout
215 ANativeWindow_setDequeueTimeout(window, 4000_ms);
216 }
John Reck8ddbc592020-05-07 16:11:18 -0700217 } else {
218 mNativeSurface = nullptr;
219 }
John Reckb36bfdd2020-07-23 13:47:49 -0700220 setupPipelineSurface();
221}
John Reck8ddbc592020-05-07 16:11:18 -0700222
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800223void CanvasContext::setSurfaceControl(ASurfaceControl* surfaceControl) {
224 if (surfaceControl == mSurfaceControl) return;
225
226 auto funcs = mRenderThread.getASurfaceControlFunctions();
Jorim Jaggi71db8892021-02-03 23:19:29 +0100227
Huihong Luoe76f1812021-05-06 10:28:12 -0700228 if (surfaceControl == nullptr) {
229 setASurfaceTransactionCallback(nullptr);
Huihong Luo34f42fd2021-05-03 14:47:36 -0700230 setPrepareSurfaceControlForWebviewCallback(nullptr);
Huihong Luoe76f1812021-05-06 10:28:12 -0700231 }
232
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800233 if (mSurfaceControl != nullptr) {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100234 funcs.unregisterListenerFunc(this, &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800235 funcs.releaseFunc(mSurfaceControl);
236 }
237 mSurfaceControl = surfaceControl;
Huihong Luo540fdf82021-06-25 13:59:39 -0700238 mSurfaceControlGenerationId++;
Jorim Jaggi71db8892021-02-03 23:19:29 +0100239 mExpectSurfaceStats = surfaceControl != nullptr;
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200240 if (mExpectSurfaceStats) {
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800241 funcs.acquireFunc(mSurfaceControl);
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200242 funcs.registerListenerFunc(surfaceControl, mSurfaceControlGenerationId, this,
243 &onSurfaceStatsAvailable);
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800244 }
245}
246
John Reckb36bfdd2020-07-23 13:47:49 -0700247void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800248 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700249 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
250
251 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700252 setBufferCount(mNativeSurface->getNativeWindow());
John Reck8ddbc592020-05-07 16:11:18 -0700253 }
John Reck23b797a2014-01-03 18:08:34 -0800254
Pablo Gamito7e6a5f22021-08-31 13:49:19 +0000255 mFrameNumber = 0;
John Reck28912a52016-04-18 14:34:18 -0700256
John Reckb36bfdd2020-07-23 13:47:49 -0700257 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700258 mHaveNewSurface = true;
259 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500260 // Enable frame stats after the surface has been bound to the appropriate graphics API.
261 // Order is important when new and old surfaces are the same, because old surface has
262 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800263 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
Chavi Weingarten084f9f982023-01-24 21:14:11 +0000264 native_window_set_scaling_mode(mNativeSurface->getNativeWindow(),
265 NATIVE_WINDOW_SCALING_MODE_FREEZE);
John Reck368cdd82014-05-07 13:11:00 -0700266 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700267 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400268 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800269 }
John Reck23b797a2014-01-03 18:08:34 -0800270}
271
John Reck1125d1f2014-10-23 11:02:19 -0700272void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
273 mSwapBehavior = swapBehavior;
274}
275
John Reckf8441e62017-10-23 13:10:41 -0700276bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400277 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800278 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800279}
280
John Reck8afcc762016-04-13 10:24:06 -0700281void CanvasContext::setStopped(bool stopped) {
282 if (mStopped != stopped) {
283 mStopped = stopped;
284 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400285 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700286 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400287 mRenderPipeline->onStop();
John Reck5f66fb82022-09-23 17:49:23 -0400288 mRenderThread.cacheManager().onContextStopped(this);
Nader Jawada3521852023-01-30 20:23:46 -0800289 } else if (mIsDirty && hasOutputTarget()) {
John Reck306f3312016-06-10 16:01:55 -0700290 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700291 }
292 }
293}
294
John Reck8785ceb2018-10-29 16:45:58 -0700295void CanvasContext::allocateBuffers() {
Brett Chabot856db9e2021-05-21 08:41:58 -0700296 if (mNativeSurface && Properties::isDrawingEnabled()) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800297 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700298 }
299}
300
301void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800302 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
303 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700304}
305
John Reck8785ceb2018-10-29 16:45:58 -0700306void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800307 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700308 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800309}
310
John Reck63a06672014-05-07 13:45:54 -0700311void CanvasContext::setOpaque(bool opaque) {
312 mOpaque = opaque;
313}
314
John Reck55887762023-01-25 16:51:18 -0500315float CanvasContext::setColorMode(ColorMode mode) {
316 if (mode != mColorMode) {
John Reck55887762023-01-25 16:51:18 -0500317 mColorMode = mode;
318 mRenderPipeline->setSurfaceColorProperties(mode);
319 setupPipelineSurface();
320 }
321 switch (mColorMode) {
322 case ColorMode::Hdr:
John Reck342f6212023-03-14 21:58:26 -0400323 return Properties::maxHdrHeadroomOn8bit;
John Reck0b3f3312023-01-31 16:21:28 -0500324 case ColorMode::Hdr10:
325 return 10.f;
John Reck55887762023-01-25 16:51:18 -0500326 default:
327 return 1.f;
328 }
329}
330
331float CanvasContext::targetSdrHdrRatio() const {
John Reck0b3f3312023-01-31 16:21:28 -0500332 if (mColorMode == ColorMode::Hdr || mColorMode == ColorMode::Hdr10) {
John Reck55887762023-01-25 16:51:18 -0500333 return mTargetSdrHdrRatio;
334 } else {
335 return 1.f;
336 }
337}
338
339void CanvasContext::setTargetSdrHdrRatio(float ratio) {
340 if (mTargetSdrHdrRatio == ratio) return;
341
342 mTargetSdrHdrRatio = ratio;
343 mRenderPipeline->setTargetSdrHdrRatio(ratio);
344 // We don't actually but we need to behave as if we do. Specifically we need to ensure
345 // all buffers in the swapchain are fully re-rendered as any partial updates to them will
346 // result in mixed target white points which looks really bad & flickery
347 mHaveNewSurface = true;
Romain Guy26a2b972017-04-17 09:39:51 -0700348}
349
John Reck8afcc762016-04-13 10:24:06 -0700350bool CanvasContext::makeCurrent() {
351 if (mStopped) return false;
352
Stan Iliev768e3932016-07-08 21:34:52 -0400353 auto result = mRenderPipeline->makeCurrent();
354 switch (result) {
355 case MakeCurrentResult::AlreadyCurrent:
356 return true;
357 case MakeCurrentResult::Failed:
358 mHaveNewSurface = true;
359 setSurface(nullptr);
360 return false;
361 case MakeCurrentResult::Succeeded:
362 mHaveNewSurface = true;
363 return true;
364 default:
365 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700366 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700367 }
Stan Iliev768e3932016-07-08 21:34:52 -0400368
369 return true;
John Reck860d1552014-04-11 19:15:05 -0700370}
371
Ady Abrahame088dcd2023-08-10 11:45:58 -0700372static std::optional<SkippedFrameReason> wasSkipped(FrameInfo* info) {
373 if (info) return info->getSkippedFrameReason();
374 return std::nullopt;
John Reckbf3c6022015-06-02 15:55:00 -0700375}
376
John Reck0def73a2016-07-01 16:19:13 -0700377bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700378 static const auto SLOW_THRESHOLD = 6_ms;
379
John Reck0def73a2016-07-01 16:19:13 -0700380 if (mSwapHistory.size() != mSwapHistory.capacity()) {
381 // We want at least 3 frames of history before attempting to
382 // guess if the queue is stuffed
383 return false;
384 }
385 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
386 auto& swapA = mSwapHistory[0];
387
388 // Was there a happy queue & dequeue time? If so, don't
389 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700390 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700391 return false;
392 }
393
394 for (size_t i = 1; i < mSwapHistory.size(); i++) {
395 auto& swapB = mSwapHistory[i];
396
Chris Craik31635682016-07-19 17:59:12 -0700397 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700398 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800399 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700400 return false;
401 }
402
403 // Was there a happy queue & dequeue time? If so, don't
404 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700405 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700406 return false;
407 }
408
409 swapA = swapB;
410 }
411
412 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700413 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700414 return true;
415}
416
John Reck1bcacfd2017-11-03 10:12:19 -0700417void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
418 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700419 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700420
John Reck1895e2e2024-11-13 11:48:29 -0500421 // Make sure we have a valid device info
422 if (!DeviceInfo::get()->hasMaxTextureSize()) {
423 (void)mRenderThread.requireGrContext();
424 }
425
John Reckbf3c6022015-06-02 15:55:00 -0700426 // If the previous frame was dropped we don't need to hold onto it, so
427 // just keep using the previous frame's structure instead
Sally Qi5f076a62024-03-05 02:00:02 +0000428 const auto reason = wasSkipped(mCurrentFrameInfo);
429 if (reason.has_value()) {
Ady Abraham75781c22023-06-07 10:34:32 -0700430 // Use the oldest skipped frame in case we skip more than a single frame
431 if (!mSkippedFrameInfo) {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700432 switch (*reason) {
433 case SkippedFrameReason::AlreadyDrawn:
434 case SkippedFrameReason::NoBuffer:
435 case SkippedFrameReason::NoOutputTarget:
436 mSkippedFrameInfo.emplace();
437 mSkippedFrameInfo->vsyncId =
438 mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
439 mSkippedFrameInfo->startTime =
440 mCurrentFrameInfo->get(FrameInfoIndex::FrameStartTime);
441 break;
442 case SkippedFrameReason::DrawingOff:
443 case SkippedFrameReason::ContextIsStopped:
444 case SkippedFrameReason::NothingToDraw:
445 // Do not report those as skipped frames as there was no frame expected to be
446 // drawn
447 break;
448 }
Ady Abraham75781c22023-06-07 10:34:32 -0700449 }
450 } else {
John Reck34781b22017-07-05 16:39:36 -0700451 mCurrentFrameInfo = mJankTracker.startFrame();
Ady Abraham75781c22023-06-07 10:34:32 -0700452 mSkippedFrameInfo.reset();
John Reckbf3c6022015-06-02 15:55:00 -0700453 }
Jorim Jaggi71db8892021-02-03 23:19:29 +0100454
John Reckba6adf62015-02-19 14:36:50 -0800455 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700456 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800457 mCurrentFrameInfo->markSyncStart();
458
John Recke4267ea2014-06-03 15:53:15 -0700459 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700460 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700461 info.damageGenerationId = mDamageId++;
Ady Abrahame088dcd2023-08-10 11:45:58 -0700462 info.out.skippedFrameReason = std::nullopt;
John Reck00e79c92015-07-21 10:23:59 -0700463
John Reckec845a22014-09-05 15:23:38 -0700464 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700465 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700466 // Only the primary target node will be drawn full - all other nodes would get drawn in
467 // real time mode. In case of a window, the primary node is the window content and the other
468 // node(s) are non client / filler nodes.
469 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
470 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800471 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700472 }
John Reck119907c2014-08-14 09:02:01 -0700473 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800474 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700475
John Reck2de950d2017-01-25 10:58:30 -0800476 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800477 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700478
John Reck306f3312016-06-10 16:01:55 -0700479 mIsDirty = true;
480
Nader Jawada3521852023-01-30 20:23:46 -0800481 if (CC_UNLIKELY(!hasOutputTarget())) {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700482 info.out.skippedFrameReason = SkippedFrameReason::NoOutputTarget;
483 mCurrentFrameInfo->setSkippedFrameReason(*info.out.skippedFrameReason);
John Reckaa95a882014-11-07 11:02:07 -0800484 return;
485 }
486
chaviwadba0b12022-03-18 17:42:15 -0500487 if (CC_LIKELY(mSwapHistory.size() && !info.forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700488 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700489 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800490 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700491 // The slight fudge-factor is to deal with cases where
492 // the vsync was estimated due to being slow handling the signal.
493 // See the logic in TimeLord#computeFrameTimeNanos or in
494 // Choreographer.java for details on when this happens
495 if (vsyncDelta < 2_ms) {
496 // Already drew for this vsync pulse, UI draw request missed
497 // the deadline for RT animations
Ady Abrahame088dcd2023-08-10 11:45:58 -0700498 info.out.skippedFrameReason = SkippedFrameReason::AlreadyDrawn;
John Reck6f75da02018-03-21 14:43:40 -0700499 }
Bo Hudd082242018-12-02 05:22:41 +0000500 } else {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700501 info.out.skippedFrameReason = std::nullopt;
John Recke486d932015-10-28 09:21:19 -0700502 }
John Recka5dda642014-05-22 15:43:54 -0700503
John Reckcc2eee82018-05-17 10:44:00 -0700504 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
505 // be an allowable combination?
506 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700507 info.out.skippedFrameReason = SkippedFrameReason::NothingToDraw;
John Reckcc2eee82018-05-17 10:44:00 -0700508 }
509
Ady Abrahame088dcd2023-08-10 11:45:58 -0700510 if (!info.out.skippedFrameReason) {
John Reck848f6512018-12-03 13:26:43 -0800511 int err = mNativeSurface->reserveNext();
512 if (err != OK) {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700513 info.out.skippedFrameReason = SkippedFrameReason::NoBuffer;
514 mCurrentFrameInfo->setSkippedFrameReason(*info.out.skippedFrameReason);
John Reck848f6512018-12-03 13:26:43 -0800515 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
516 if (err != TIMED_OUT) {
517 // A timed out surface can still recover, but assume others are permanently dead.
518 setSurface(nullptr);
519 return;
520 }
521 }
522 } else {
Ady Abrahame088dcd2023-08-10 11:45:58 -0700523 mCurrentFrameInfo->setSkippedFrameReason(*info.out.skippedFrameReason);
John Reckaef9dc82015-05-08 14:10:57 -0700524 }
525
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400526 bool postedFrameCallback = false;
Ady Abrahame088dcd2023-08-10 11:45:58 -0700527 if (info.out.hasAnimations || info.out.skippedFrameReason) {
John Reck9f516442017-09-25 10:27:21 -0700528 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
529 info.out.requiresUiRedraw = true;
530 }
John Reckcd028f32014-06-24 08:44:29 -0700531 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700532 // If animationsNeedsRedraw is set don't bother posting for an RT anim
533 // as we will just end up fighting the UI thread.
534 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400535 postedFrameCallback = true;
536 }
537 }
538
539 if (!postedFrameCallback &&
540 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
541 // Subtract the time of one frame so it can be displayed on time.
542 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
543 if (info.out.animatedImageDelay <= kFrameTime) {
544 mRenderThread.postFrameCallback(this);
545 } else {
546 const auto delay = info.out.animatedImageDelay - kFrameTime;
547 int genId = mGenerationID;
548 mRenderThread.queue().postDelayed(delay, [this, genId]() {
549 if (mGenerationID == genId) {
550 mRenderThread.postFrameCallback(this);
551 }
552 });
John Reckf9be7792014-05-02 18:21:16 -0700553 }
John Recke45b1fd2014-04-15 09:50:16 -0700554 }
555}
556
John Reckf47a5942014-06-30 16:20:04 -0700557void CanvasContext::stopDrawing() {
558 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700559 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400560 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700561}
562
John Recka5dda642014-05-22 15:43:54 -0700563void CanvasContext::notifyFramePending() {
564 ATRACE_CALL();
565 mRenderThread.pushBackFrameCallback(this);
Matt Buckley12271682022-12-02 22:04:34 +0000566 sendLoadResetHint();
John Recka5dda642014-05-22 15:43:54 -0700567}
568
Nader Jawad24617222023-03-30 12:29:01 -0700569Frame CanvasContext::getFrame() {
570 if (mHardwareBuffer != nullptr) {
571 return {mBufferParams.getLogicalWidth(), mBufferParams.getLogicalHeight(), 0};
572 } else {
573 return mRenderPipeline->getFrame();
574 }
575}
576
Ady Abrahamcab4afe2023-05-09 11:25:22 -0700577void CanvasContext::draw(bool solelyTextureViewUpdates) {
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000578#ifdef __ANDROID__
Greg Daniel16d11c62021-04-08 16:17:35 -0400579 if (auto grContext = getGrContext()) {
580 if (grContext->abandoned()) {
John Reckb96e6382023-10-20 14:15:15 -0400581 if (grContext->isDeviceLost()) {
582 LOG_ALWAYS_FATAL("Lost GPU device unexpectedly");
583 return;
584 }
585 LOG_ALWAYS_FATAL("GrContext is abandoned at start of CanvasContext::draw");
Matt Buckleye9023cf2022-11-23 22:39:25 +0000586 return;
Greg Daniel16d11c62021-04-08 16:17:35 -0400587 }
588 }
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000589#endif
John Recke4267ea2014-06-03 15:53:15 -0700590 SkRect dirty;
591 mDamageAccumulator.finish(&dirty);
592
Matt Buckleye9023cf2022-11-23 22:39:25 +0000593 // reset syncDelayDuration each time we draw
594 nsecs_t syncDelayDuration = mSyncDelayDuration;
595 nsecs_t idleDuration = mIdleDuration;
596 mSyncDelayDuration = 0;
597 mIdleDuration = 0;
598
Ady Abrahame088dcd2023-08-10 11:45:58 -0700599 const auto skippedFrameReason = [&]() -> std::optional<SkippedFrameReason> {
600 if (!Properties::isDrawingEnabled()) {
601 return SkippedFrameReason::DrawingOff;
602 }
603
604 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
605 return SkippedFrameReason::NothingToDraw;
606 }
607
608 return std::nullopt;
609 }();
610 if (skippedFrameReason) {
611 mCurrentFrameInfo->setSkippedFrameReason(*skippedFrameReason);
612
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000613#ifdef __ANDROID__
Leon Scroggins III7fbd0ad2021-08-06 13:42:49 -0400614 if (auto grContext = getGrContext()) {
615 // Submit to ensure that any texture uploads complete and Skia can
616 // free its staging buffers.
617 grContext->flushAndSubmit();
618 }
Jerome Gaillardd26915c2024-03-26 12:05:52 +0000619#endif
Leon Scroggins III7fbd0ad2021-08-06 13:42:49 -0400620
chaviw5fc80e72020-04-30 12:14:35 -0700621 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
622 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700623 waitOnFences();
chaviw9c137532021-08-20 12:15:48 -0500624 for (auto& func : mFrameCommitCallbacks) {
625 std::invoke(func, false /* didProduceBuffer */);
chaviw5fc80e72020-04-30 12:14:35 -0700626 }
chaviw9c137532021-08-20 12:15:48 -0500627 mFrameCommitCallbacks.clear();
Matt Buckleye9023cf2022-11-23 22:39:25 +0000628 return;
John Reck0b19a732019-03-07 17:18:25 -0800629 }
John Reck240ff622015-04-28 13:50:00 -0700630
Huihong Luo054b8d32021-02-24 18:48:12 -0800631 ScopedActiveContext activeContext(this);
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100632 mCurrentFrameInfo->set(FrameInfoIndex::FrameInterval) =
633 mRenderThread.timeLord().frameIntervalNanos();
634
John Reck240ff622015-04-28 13:50:00 -0700635 mCurrentFrameInfo->markIssueDrawCommandsStart();
636
Nader Jawad24617222023-03-30 12:29:01 -0700637 Frame frame = getFrame();
638
Stan Iliev768e3932016-07-08 21:34:52 -0400639 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800640
John Reck8a1ddd72021-10-19 12:25:24 -0400641 ATRACE_FORMAT("Drawing " RECT_STRING, SK_RECT_ARGS(dirty));
642
John Reckb013c8d2022-08-15 11:08:41 -0400643 IRenderPipeline::DrawResult drawResult;
644 {
645 // FrameInfoVisualizer accesses the frame events, which cannot be mutated mid-draw
646 // or it can lead to memory corruption.
Xin Li7b647e42024-01-30 10:36:03 -0800647 drawResult = mRenderPipeline->draw(
648 frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue, mContentDrawBounds,
649 mOpaque, mLightInfo, mRenderNodes, &(profiler()), mBufferParams, profilerLock());
John Reckb013c8d2022-08-15 11:08:41 -0400650 }
Chris Craik1dfa0702016-03-04 15:59:24 -0800651
Pablo Gamito35b80cd2021-08-24 11:03:51 +0200652 uint64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700653
John Reck38f6c032016-03-17 10:23:49 -0700654 waitOnFences();
655
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700656 if (mNativeSurface) {
657 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000658 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
659 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
Siarhei Vishniakou4bcbffd2021-02-17 06:19:36 +0000660 const auto inputEventId =
661 static_cast<int32_t>(mCurrentFrameInfo->get(FrameInfoIndex::InputEventId));
Kyle Holm0df722f2024-09-06 15:05:15 -0700662 ATRACE_FORMAT(
663 "frameTimelineInfo(frameNumber=%llu, vsyncId=%lld, inputEventId=0x%" PRIx32 ")",
664 frameCompleteNr, vsyncId, inputEventId);
Ady Abraham75781c22023-06-07 10:34:32 -0700665 const ANativeWindowFrameTimelineInfo ftl = {
666 .frameNumber = frameCompleteNr,
667 .frameTimelineVsyncId = vsyncId,
668 .inputEventId = inputEventId,
669 .startTimeNanos = mCurrentFrameInfo->get(FrameInfoIndex::FrameStartTime),
670 .useForRefreshRateSelection = solelyTextureViewUpdates,
671 .skippedFrameVsyncId = mSkippedFrameInfo ? mSkippedFrameInfo->vsyncId
672 : UiFrameInfoBuilder::INVALID_VSYNC_ID,
673 .skippedFrameStartTimeNanos =
674 mSkippedFrameInfo ? mSkippedFrameInfo->startTime : 0,
675 };
676 native_window_set_frame_timeline_info(mNativeSurface->getNativeWindow(), ftl);
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700677 }
678 }
679
Stan Iliev768e3932016-07-08 21:34:52 -0400680 bool requireSwap = false;
Matt Buckley864ab952022-10-18 23:03:59 +0000681 bool didDraw = false;
682
John Reck59dd2ea2019-07-26 16:51:08 -0700683 int error = OK;
John Reck5d3fac12023-11-08 23:08:10 -0500684 bool didSwap = mRenderPipeline->swapBuffers(frame, drawResult, windowDirty, mCurrentFrameInfo,
685 &requireSwap);
Alec Mouri3afb3972022-05-27 22:03:11 +0000686
687 mCurrentFrameInfo->set(FrameInfoIndex::CommandSubmissionCompleted) = std::max(
688 drawResult.commandSubmissionTime, mCurrentFrameInfo->get(FrameInfoIndex::SwapBuffers));
John Reck9372ac32016-01-19 11:46:52 -0800689
John Reck306f3312016-06-10 16:01:55 -0700690 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800691
Stan Iliev768e3932016-07-08 21:34:52 -0400692 if (requireSwap) {
Matt Buckley864ab952022-10-18 23:03:59 +0000693 didDraw = true;
John Reck59dd2ea2019-07-26 16:51:08 -0700694 // Handle any swapchain errors
695 error = mNativeSurface->getAndClearError();
696 if (error == TIMED_OUT) {
697 // Try again
698 mRenderThread.postFrameCallback(this);
699 // But since this frame didn't happen, we need to mark full damage in the swap
700 // history
701 didDraw = false;
702
703 } else if (error != OK || !didSwap) {
704 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700705 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700706 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700707 }
John Reck59dd2ea2019-07-26 16:51:08 -0700708
John Recke486d932015-10-28 09:21:19 -0700709 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700710 if (didDraw) {
711 swap.damage = windowDirty;
712 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800713 float max = static_cast<float>(INT_MAX);
714 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700715 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100716 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700717 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700718 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800719 nsecs_t dequeueStart =
720 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700721 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
722 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700723 // and thus is not part of the frame.
724 swap.dequeueDuration = 0;
725 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700726 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800727 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700728 }
Alec Mourif023a322019-11-25 10:02:21 -0800729 swap.queueDuration =
730 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700731 } else {
732 swap.dequeueDuration = 0;
733 swap.queueDuration = 0;
734 }
John Reck1bcacfd2017-11-03 10:12:19 -0700735 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
736 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
John Reck149173d2015-08-10 09:52:29 -0700737 mHaveNewSurface = false;
Pablo Gamito7e6a5f22021-08-31 13:49:19 +0000738 mFrameNumber = 0;
John Reck70e89c92016-08-05 10:50:36 -0700739 } else {
740 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
741 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
John Reck4f02bf42014-01-03 18:09:17 -0800742 }
John Reckfe5e7b72014-05-23 17:42:28 -0700743
Jorim Jaggi71db8892021-02-03 23:19:29 +0100744 mCurrentFrameInfo->markSwapBuffersCompleted();
John Reck149173d2015-08-10 09:52:29 -0700745
746#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700747 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
748 FrameInfoIndex::FrameCompleted) /
749 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700750 if (sFrameCount) {
751 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
752 } else {
753 sBenchMma = thisFrame;
754 }
755 if (++sFrameCount == 10) {
756 sFrameCount = 1;
757 ALOGD("Average frame time: %.4f", sBenchMma);
758 }
759#endif
760
John Reckcc2eee82018-05-17 10:44:00 -0700761 if (didSwap) {
chaviw9c137532021-08-20 12:15:48 -0500762 for (auto& func : mFrameCommitCallbacks) {
763 std::invoke(func, true /* didProduceBuffer */);
John Reckcc2eee82018-05-17 10:44:00 -0700764 }
chaviw9c137532021-08-20 12:15:48 -0500765 mFrameCommitCallbacks.clear();
John Reckcc2eee82018-05-17 10:44:00 -0700766 }
767
Jorim Jaggi71db8892021-02-03 23:19:29 +0100768 if (requireSwap) {
769 if (mExpectSurfaceStats) {
Siarhei Vishniakouadf50242021-03-11 21:21:05 +0000770 reportMetricsWithPresentTime();
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200771 { // acquire lock
Tang Lee6ba51252024-08-13 15:06:28 +0800772 std::lock_guard lock(mLastFrameMetricsInfosMutex);
773 FrameMetricsInfo& next = mLastFrameMetricsInfos.next();
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200774 next.frameInfo = mCurrentFrameInfo;
775 next.frameNumber = frameCompleteNr;
776 next.surfaceId = mSurfaceControlGenerationId;
777 } // release lock
Jorim Jaggi71db8892021-02-03 23:19:29 +0100778 } else {
779 mCurrentFrameInfo->markFrameCompleted();
780 mCurrentFrameInfo->set(FrameInfoIndex::GpuCompleted)
781 = mCurrentFrameInfo->get(FrameInfoIndex::FrameCompleted);
Jiang Tian8e6a8462024-01-09 11:52:05 +0800782 std::scoped_lock lock(mFrameInfoMutex);
Pablo Gamito88660d72021-08-09 14:37:56 +0000783 mJankTracker.finishFrame(*mCurrentFrameInfo, mFrameMetricsReporter, frameCompleteNr,
784 mSurfaceControlGenerationId);
Alec Mouri026106f2020-03-26 17:17:09 -0700785 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400786 }
787
Matt Buckleye9023cf2022-11-23 22:39:25 +0000788 int64_t intendedVsync = mCurrentFrameInfo->get(FrameInfoIndex::IntendedVsync);
789 int64_t frameDeadline = mCurrentFrameInfo->get(FrameInfoIndex::FrameDeadline);
790 int64_t dequeueBufferDuration = mCurrentFrameInfo->get(FrameInfoIndex::DequeueBufferDuration);
791
Matt Buckley0daae6a2023-09-14 22:56:50 +0000792 mHintSessionWrapper->updateTargetWorkDuration(frameDeadline - intendedVsync);
Matt Buckleye9023cf2022-11-23 22:39:25 +0000793
794 if (didDraw) {
795 int64_t frameStartTime = mCurrentFrameInfo->get(FrameInfoIndex::FrameStartTime);
796 int64_t frameDuration = systemTime(SYSTEM_TIME_MONOTONIC) - frameStartTime;
797 int64_t actualDuration = frameDuration -
798 (std::min(syncDelayDuration, mLastDequeueBufferDuration)) -
799 dequeueBufferDuration - idleDuration;
Matt Buckley0daae6a2023-09-14 22:56:50 +0000800 mHintSessionWrapper->reportActualWorkDuration(actualDuration);
Igor Kraskevich2cec1582024-03-13 11:23:40 +0000801 mHintSessionWrapper->setActiveFunctorThreads(
802 WebViewFunctorManager::instance().getRenderingThreadsForActiveFunctors());
Matt Buckleye9023cf2022-11-23 22:39:25 +0000803 }
804
805 mLastDequeueBufferDuration = dequeueBufferDuration;
806
Stan Ilieve0fae232020-01-07 17:21:49 -0500807 mRenderThread.cacheManager().onFrameCompleted();
Matt Buckleye9023cf2022-11-23 22:39:25 +0000808 return;
John Reck4f02bf42014-01-03 18:09:17 -0800809}
810
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000811void CanvasContext::reportMetricsWithPresentTime() {
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000812 { // acquire lock
Jiang Tian8e6a8462024-01-09 11:52:05 +0800813 std::scoped_lock lock(mFrameInfoMutex);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000814 if (mFrameMetricsReporter == nullptr) {
815 return;
816 }
817 } // release lock
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000818 if (mNativeSurface == nullptr) {
819 return;
820 }
John Reckb7039c62021-07-07 19:18:34 -0400821 ATRACE_CALL();
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000822 FrameInfo* forthBehind;
823 int64_t frameNumber;
Pablo Gamito88660d72021-08-09 14:37:56 +0000824 int32_t surfaceControlId;
825
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000826 { // acquire lock
Tang Lee6ba51252024-08-13 15:06:28 +0800827 std::scoped_lock lock(mLastFrameMetricsInfosMutex);
828 if (mLastFrameMetricsInfos.size() != mLastFrameMetricsInfos.capacity()) {
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000829 // Not enough frames yet
830 return;
831 }
Tang Lee6ba51252024-08-13 15:06:28 +0800832 auto frameMetricsInfo = mLastFrameMetricsInfos.front();
Pablo Gamito88660d72021-08-09 14:37:56 +0000833 forthBehind = frameMetricsInfo.frameInfo;
834 frameNumber = frameMetricsInfo.frameNumber;
835 surfaceControlId = frameMetricsInfo.surfaceId;
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000836 } // release lock
837
838 nsecs_t presentTime = 0;
839 native_window_get_frame_timestamps(
840 mNativeSurface->getNativeWindow(), frameNumber, nullptr /*outRequestedPresentTime*/,
841 nullptr /*outAcquireTime*/, nullptr /*outLatchTime*/,
842 nullptr /*outFirstRefreshStartTime*/, nullptr /*outLastRefreshStartTime*/,
843 nullptr /*outGpuCompositionDoneTime*/, &presentTime, nullptr /*outDequeueReadyTime*/,
844 nullptr /*outReleaseTime*/);
845
846 forthBehind->set(FrameInfoIndex::DisplayPresentTime) = presentTime;
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000847 { // acquire lock
Jiang Tian8e6a8462024-01-09 11:52:05 +0800848 std::scoped_lock lock(mFrameInfoMutex);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000849 if (mFrameMetricsReporter != nullptr) {
Pablo Gamito88660d72021-08-09 14:37:56 +0000850 mFrameMetricsReporter->reportFrameMetrics(forthBehind->data(), true /*hasPresentTime*/,
851 frameNumber, surfaceControlId);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000852 }
853 } // release lock
854}
855
Pablo Gamito88660d72021-08-09 14:37:56 +0000856void CanvasContext::addFrameMetricsObserver(FrameMetricsObserver* observer) {
Jiang Tian8e6a8462024-01-09 11:52:05 +0800857 std::scoped_lock lock(mFrameInfoMutex);
Pablo Gamito88660d72021-08-09 14:37:56 +0000858 if (mFrameMetricsReporter.get() == nullptr) {
859 mFrameMetricsReporter.reset(new FrameMetricsReporter());
860 }
861
862 // We want to make sure we aren't reporting frames that have already been queued by the
863 // BufferQueueProducer on the rendner thread but are still pending the callback to report their
864 // their frame metrics.
Pablo Gamito35b80cd2021-08-24 11:03:51 +0200865 uint64_t nextFrameNumber = getFrameNumber();
Pablo Gamito88660d72021-08-09 14:37:56 +0000866 observer->reportMetricsFrom(nextFrameNumber, mSurfaceControlGenerationId);
867 mFrameMetricsReporter->addObserver(observer);
868}
869
870void CanvasContext::removeFrameMetricsObserver(FrameMetricsObserver* observer) {
Jiang Tian8e6a8462024-01-09 11:52:05 +0800871 std::scoped_lock lock(mFrameInfoMutex);
Pablo Gamito88660d72021-08-09 14:37:56 +0000872 if (mFrameMetricsReporter.get() != nullptr) {
873 mFrameMetricsReporter->removeObserver(observer);
874 if (!mFrameMetricsReporter->hasObservers()) {
875 mFrameMetricsReporter.reset(nullptr);
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000876 }
877 }
Pablo Gamito88660d72021-08-09 14:37:56 +0000878}
879
Tang Lee6ba51252024-08-13 15:06:28 +0800880FrameInfo* CanvasContext::getFrameInfoFromLastFew(uint64_t frameNumber, uint32_t surfaceControlId) {
881 std::scoped_lock lock(mLastFrameMetricsInfosMutex);
882 for (size_t i = 0; i < mLastFrameMetricsInfos.size(); i++) {
883 if (mLastFrameMetricsInfos[i].frameNumber == frameNumber &&
884 mLastFrameMetricsInfos[i].surfaceId == surfaceControlId) {
885 return mLastFrameMetricsInfos[i].frameInfo;
Pablo Gamito88660d72021-08-09 14:37:56 +0000886 }
887 }
888
Siarhei Vishniakou07d35cb2021-07-03 02:22:12 +0000889 return nullptr;
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000890}
891
Pablo Gamito14b28ce9c2021-09-06 16:33:23 +0000892void CanvasContext::onSurfaceStatsAvailable(void* context, int32_t surfaceControlId,
893 ASurfaceControlStats* stats) {
Pablo Gamito88660d72021-08-09 14:37:56 +0000894 auto* instance = static_cast<CanvasContext*>(context);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100895
896 const ASurfaceControlFunctions& functions =
897 instance->mRenderThread.getASurfaceControlFunctions();
898
899 nsecs_t gpuCompleteTime = functions.getAcquireTimeFunc(stats);
John Reckc8024fc2021-10-18 19:25:46 -0400900 if (gpuCompleteTime == Fence::SIGNAL_TIME_PENDING) {
901 gpuCompleteTime = -1;
902 }
Jorim Jaggi71db8892021-02-03 23:19:29 +0100903 uint64_t frameNumber = functions.getFrameNumberFunc(stats);
904
Tang Lee6ba51252024-08-13 15:06:28 +0800905 FrameInfo* frameInfo = instance->getFrameInfoFromLastFew(frameNumber, surfaceControlId);
Siarhei Vishniakouf0cf18d2021-02-26 00:15:04 +0000906
Jorim Jaggi71db8892021-02-03 23:19:29 +0100907 if (frameInfo != nullptr) {
Jiang Tian8e6a8462024-01-09 11:52:05 +0800908 std::scoped_lock lock(instance->mFrameInfoMutex);
Jorim Jaggib891cdf2021-05-11 14:57:25 +0200909 frameInfo->set(FrameInfoIndex::FrameCompleted) = std::max(gpuCompleteTime,
910 frameInfo->get(FrameInfoIndex::SwapBuffersCompleted));
Alec Mouri3afb3972022-05-27 22:03:11 +0000911 frameInfo->set(FrameInfoIndex::GpuCompleted) = std::max(
912 gpuCompleteTime, frameInfo->get(FrameInfoIndex::CommandSubmissionCompleted));
Pablo Gamitobc9e5292021-08-23 17:12:29 +0200913 instance->mJankTracker.finishFrame(*frameInfo, instance->mFrameMetricsReporter, frameNumber,
914 surfaceControlId);
Jorim Jaggi71db8892021-02-03 23:19:29 +0100915 }
916}
917
John Recke45b1fd2014-04-15 09:50:16 -0700918// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700919void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400920 if (!mRenderPipeline->isSurfaceReady()) return;
Matt Buckleye9023cf2022-11-23 22:39:25 +0000921 mIdleDuration =
922 systemTime(SYSTEM_TIME_MONOTONIC) - mRenderThread.timeLord().computeFrameTimeNanos();
Skuhneea7a7fb2015-08-28 07:10:31 -0700923 prepareAndDraw(nullptr);
924}
John Reck368cdd82014-05-07 13:11:00 -0700925
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400926SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800927 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
928 if (mNativeSurface == nullptr) {
929 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400930 }
Alec Mourif023a322019-11-25 10:02:21 -0800931 ANativeWindow* anw = mNativeSurface->getNativeWindow();
932
933 SkISize size;
934 size.fWidth = ANativeWindow_getWidth(anw);
935 size.fHeight = ANativeWindow_getHeight(anw);
John Reck5f66fb82022-09-23 17:49:23 -0400936 mRenderThread.cacheManager().notifyNextFrameSize(size.fWidth, size.fHeight);
Alec Mourif023a322019-11-25 10:02:21 -0800937 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400938}
939
John Reckee184272023-03-21 12:29:21 -0400940const SkM44& CanvasContext::getPixelSnapMatrix() const {
941 return mRenderPipeline->getPixelSnapMatrix();
942}
943
Skuhneea7a7fb2015-08-28 07:10:31 -0700944void CanvasContext::prepareAndDraw(RenderNode* node) {
Ady Abraham97a0cbe2023-08-10 19:29:20 +0000945 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
946 ATRACE_FORMAT("%s %" PRId64, __func__, vsyncId);
John Recke45b1fd2014-04-15 09:50:16 -0700947
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800948 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700949 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100950 int64_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
John Reckba6adf62015-02-19 14:36:50 -0800951 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700952 UiFrameInfoBuilder(frameInfo)
953 .addFlag(FrameInfoFlags::RTAnimation)
Jorim Jaggi10f328c2021-01-19 00:08:02 +0100954 .setVsync(vsync, vsync, vsyncId, frameDeadline, frameInterval);
John Reckfe5e7b72014-05-23 17:42:28 -0700955
Chris Craike2e53a72015-10-28 15:55:40 -0700956 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100957 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
Ady Abrahame088dcd2023-08-10 11:45:58 -0700958 if (!info.out.skippedFrameReason) {
Ady Abrahamcab4afe2023-05-09 11:25:22 -0700959 draw(info.out.solelyTextureViewUpdates);
Chris Craik06e2e9c2016-08-31 17:32:46 -0700960 } else {
961 // wait on fences so tasks don't overlap next frame
962 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700963 }
John Recke45b1fd2014-04-15 09:50:16 -0700964}
965
John Reck998a6d82014-08-28 15:35:53 -0700966void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700967 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800968 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700969 }
970}
971
John Reck2de950d2017-01-25 10:58:30 -0800972void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700973 if (mPrefetchedLayers.size()) {
974 for (auto& node : mPrefetchedLayers) {
975 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700976 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800977 node->destroyLayers();
978 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700979 }
980 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700981 }
982}
983
John Reck2de950d2017-01-25 10:58:30 -0800984void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700985 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400986 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700987
John Reck3e824952014-08-20 10:08:39 -0700988 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
989 stopDrawing();
990
Minkyoung Kim29781f42023-11-07 18:35:11 +0900991 ScopedActiveContext activeContext(this);
Chris Craike2e53a72015-10-28 15:55:40 -0700992 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700993 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700994 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700995 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700996 node->prepareTree(info);
997 SkRect ignore;
998 mDamageAccumulator.finish(&ignore);
999 // Tickle the GENERIC property on node to mark it as dirty for damaging
1000 // purposes when the frame is actually drawn
1001 node->setPropertyFieldsDirty(RenderNode::GENERIC);
1002
Peiyong Lin1f6aa122018-09-10 16:28:08 -07001003 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -07001004
Chris Craikd41c4d82015-01-05 15:51:13 -08001005 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -07001006 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -07001007}
1008
John Reck2de950d2017-01-25 10:58:30 -08001009void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -07001010 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -04001011 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -08001012 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -07001013 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -08001014 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -07001015 }
Stan Iliev768e3932016-07-08 21:34:52 -04001016 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -07001017 }
1018}
1019
John Reck20a4d682023-07-11 17:11:51 -04001020void CanvasContext::onContextDestroyed() {
John Reck254972c2024-04-18 14:15:03 -04001021 // We don't want to destroyHardwareResources as that will invalidate display lists which
1022 // the client may not be expecting. Instead just purge all scratch resources
1023 if (mRenderPipeline->isContextReady()) {
1024 freePrefetchedLayers();
1025 for (const sp<RenderNode>& node : mRenderNodes) {
1026 node->destroyLayers();
1027 }
1028 mRenderPipeline->onDestroyHardwareResources();
1029 }
John Reck20a4d682023-07-11 17:11:51 -04001030}
1031
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -04001032DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -04001033 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -07001034}
1035
John Reckba6adf62015-02-19 14:36:50 -08001036void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -07001037 mJankTracker.dumpStats(fd);
1038 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -08001039}
1040
1041void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -07001042 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -08001043}
1044
John Reckdf1742e2017-01-19 15:56:21 -08001045void CanvasContext::setName(const std::string&& name) {
1046 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
1047}
1048
John Reck38f6c032016-03-17 10:23:49 -07001049void CanvasContext::waitOnFences() {
1050 if (mFrameFences.size()) {
1051 ATRACE_CALL();
1052 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -07001053 fence.get();
John Reck38f6c032016-03-17 10:23:49 -07001054 }
1055 mFrameFences.clear();
1056 }
1057}
1058
John Reck38f6c032016-03-17 10:23:49 -07001059void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -07001060 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -07001061}
1062
Pablo Gamito35b80cd2021-08-24 11:03:51 +02001063uint64_t CanvasContext::getFrameNumber() {
Pablo Gamito7e6a5f22021-08-31 13:49:19 +00001064 // mFrameNumber is reset to 0 when the surface changes or we swap buffers
1065 if (mFrameNumber == 0 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -08001066 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -07001067 }
1068 return mFrameNumber;
1069}
1070
John Reck0b19a732019-03-07 17:18:25 -08001071bool CanvasContext::surfaceRequiresRedraw() {
1072 if (!mNativeSurface) return false;
1073 if (mHaveNewSurface) return true;
1074
Alec Mourif023a322019-11-25 10:02:21 -08001075 ANativeWindow* anw = mNativeSurface->getNativeWindow();
1076 const int width = ANativeWindow_getWidth(anw);
1077 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -08001078
John Reck41459192019-10-31 15:04:58 -07001079 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -08001080}
1081
Stan Iliev768e3932016-07-08 21:34:52 -04001082SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
1083 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
1084 // can't rely on prior content of window if viewport size changes
1085 dirty->setEmpty();
1086 mLastFrameWidth = frame.width();
1087 mLastFrameHeight = frame.height();
1088 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
1089 // New surface needs a full draw
1090 dirty->setEmpty();
1091 } else {
Mike Reed39adc882019-08-22 11:53:05 -04001092 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -07001093 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
1094 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -04001095 dirty->setEmpty();
1096 }
1097 profiler().unionDirty(dirty);
1098 }
1099
1100 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -04001101 dirty->setIWH(frame.width(), frame.height());
Liangliang Sui6a747772024-02-05 18:38:19 +08001102 return *dirty;
Stan Iliev768e3932016-07-08 21:34:52 -04001103 }
1104
1105 // At this point dirty is the area of the window to update. However,
1106 // the area of the frame we need to repaint is potentially different, so
1107 // stash the screen area for later
1108 SkRect windowDirty(*dirty);
1109
1110 // If the buffer age is 0 we do a full-screen repaint (handled above)
1111 // If the buffer age is 1 the buffer contents are the same as they were
1112 // last frame so there's nothing to union() against
1113 // Therefore we only care about the > 1 case.
1114 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -07001115 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -04001116 // We don't have enough history to handle this old of a buffer
1117 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -04001118 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -04001119 } else {
1120 // At this point we haven't yet added the latest frame
1121 // to the damage history (happens below)
1122 // So we need to damage
1123 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -07001124 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -04001125 dirty->join(mSwapHistory[i].damage);
1126 }
1127 }
1128 }
1129
1130 return windowDirty;
1131}
1132
Huihong Luo054b8d32021-02-24 18:48:12 -08001133CanvasContext* CanvasContext::getActiveContext() {
1134 return ScopedActiveContext::getActiveContext();
1135}
1136
1137bool CanvasContext::mergeTransaction(ASurfaceTransaction* transaction, ASurfaceControl* control) {
1138 if (!mASurfaceTransactionCallback) return false;
Huihong Luo4df41512021-06-24 10:04:32 -07001139 return std::invoke(mASurfaceTransactionCallback, reinterpret_cast<int64_t>(transaction),
1140 reinterpret_cast<int64_t>(control), getFrameNumber());
Huihong Luo054b8d32021-02-24 18:48:12 -08001141}
1142
Huihong Luo34f42fd2021-05-03 14:47:36 -07001143void CanvasContext::prepareSurfaceControlForWebview() {
1144 if (mPrepareSurfaceControlForWebviewCallback) {
1145 std::invoke(mPrepareSurfaceControlForWebviewCallback);
1146 }
1147}
1148
Matt Buckleye9023cf2022-11-23 22:39:25 +00001149void CanvasContext::sendLoadResetHint() {
Matt Buckley0daae6a2023-09-14 22:56:50 +00001150 mHintSessionWrapper->sendLoadResetHint();
Matt Buckleye9023cf2022-11-23 22:39:25 +00001151}
1152
Matt Buckleyac5f7552022-12-19 22:03:27 +00001153void CanvasContext::sendLoadIncreaseHint() {
Matt Buckley0daae6a2023-09-14 22:56:50 +00001154 mHintSessionWrapper->sendLoadIncreaseHint();
Matt Buckleyac5f7552022-12-19 22:03:27 +00001155}
1156
Matt Buckleye9023cf2022-11-23 22:39:25 +00001157void CanvasContext::setSyncDelayDuration(nsecs_t duration) {
1158 mSyncDelayDuration = duration;
1159}
1160
Matt Buckleyf5f90f12023-01-28 04:09:14 +00001161void CanvasContext::startHintSession() {
Matt Buckley0daae6a2023-09-14 22:56:50 +00001162 mHintSessionWrapper->init();
Matt Buckleyf5f90f12023-01-28 04:09:14 +00001163}
1164
John Reckd13852e2023-05-09 14:33:53 -04001165bool CanvasContext::shouldDither() {
1166 CanvasContext* self = getActiveContext();
1167 if (!self) return false;
1168 return self->mColorMode != ColorMode::Default;
1169}
1170
John Reck07f3d912023-08-17 12:46:44 -04001171void CanvasContext::visitAllRenderNodes(std::function<void(const RenderNode&)> func) const {
1172 for (auto node : mRenderNodes) {
1173 node->visit(func);
1174 }
1175}
1176
John Reck23b797a2014-01-03 18:08:34 -08001177} /* namespace renderthread */
1178} /* namespace uirenderer */
1179} /* namespace android */