blob: 633f21ceba071b23d1030d8dd80b756abe90f7ab [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
Alec Mouri8d451742019-08-01 19:19:16 -070019#include <apex/window.h>
20#include <fcntl.h>
21#include <strings.h>
22#include <sys/stat.h>
23
24#include <algorithm>
25#include <cstdint>
26#include <cstdlib>
27#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080028
John Reck0fa0cbc2019-04-05 16:57:46 -070029#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070030#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050031#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070032#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070033#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080034#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070035#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040036#include "pipeline/skia/SkiaOpenGLPipeline.h"
37#include "pipeline/skia/SkiaPipeline.h"
38#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070039#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080040#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070041#include "utils/TimeUtils.h"
John Reck322b8ab2019-03-14 13:15:28 -070042#include "utils/TraceUtils.h"
John Recke248bd12015-08-05 13:53:53 -070043
John Reckf47a5942014-06-30 16:20:04 -070044#define TRIM_MEMORY_COMPLETE 80
45#define TRIM_MEMORY_UI_HIDDEN 20
46
John Reck149173d2015-08-10 09:52:29 -070047#define LOG_FRAMETIME_MMA 0
48
49#if LOG_FRAMETIME_MMA
50static float sBenchMma = 0;
51static int sFrameCount = 0;
52static const float NANOS_PER_MILLIS_F = 1000000.0f;
53#endif
54
John Reck23b797a2014-01-03 18:08:34 -080055namespace android {
56namespace uirenderer {
57namespace renderthread {
58
John Reck1bcacfd2017-11-03 10:12:19 -070059CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
60 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040061 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040062
Stan Iliev03de0742016-07-07 12:35:54 -040063 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040064 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040065 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070066 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040067 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040068 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070069 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040070 default:
John Reck1bcacfd2017-11-03 10:12:19 -070071 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040072 break;
73 }
74 return nullptr;
75}
76
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040077void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
78 ATRACE_CALL();
79 auto renderType = Properties::getRenderPipelineType();
80 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040081 case RenderPipelineType::SkiaGL:
82 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
83 break;
84 case RenderPipelineType::SkiaVulkan:
85 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
86 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040087 default:
John Reck1bcacfd2017-11-03 10:12:19 -070088 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040089 break;
90 }
91}
92
93void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -070094 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040095}
96
John Reck1bcacfd2017-11-03 10:12:19 -070097CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
98 IContextFactory* contextFactory,
99 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700100 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400101 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800102 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800103 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700104 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700105 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400106 , mContentDrawBounds(0, 0, 0, 0)
107 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800108 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700109 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700110 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck4d527222019-03-13 16:25:20 -0700111 setRenderAheadDepth(Properties::defaultRenderAhead);
John Reck23b797a2014-01-03 18:08:34 -0800112}
113
114CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800115 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800116 for (auto& node : mRenderNodes) {
117 node->clearRoot();
118 }
119 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800120}
121
John Reck2de950d2017-01-25 10:58:30 -0800122void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
123 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
124 node->makeRoot();
125 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
126}
127
128void CanvasContext::removeRenderNode(RenderNode* node) {
129 node->clearRoot();
130 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700131 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800132}
133
134void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700135 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800136 setSurface(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800137 freePrefetchedLayers();
138 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700139 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800140}
141
John Reckfbe14bb2020-09-21 14:37:41 -0700142static void setBufferCount(ANativeWindow* window) {
John Reck8ddbc592020-05-07 16:11:18 -0700143 int query_value;
144 int err = window->query(window, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &query_value);
145 if (err != 0 || query_value < 0) {
146 ALOGE("window->query failed: %s (%d) value=%d", strerror(-err), err, query_value);
147 return;
148 }
149 auto min_undequeued_buffers = static_cast<uint32_t>(query_value);
150
John Reckfbe14bb2020-09-21 14:37:41 -0700151 // We only need to set min_undequeued + 2 because the renderahead amount was already factored into the
152 // query for min_undequeued
153 int bufferCount = min_undequeued_buffers + 2;
John Reck8ddbc592020-05-07 16:11:18 -0700154 native_window_set_buffer_count(window, bufferCount);
155}
156
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800157void CanvasContext::setSurface(ANativeWindow* window, bool enableTimeout) {
John Reckfbc8df02014-11-14 16:18:41 -0800158 ATRACE_CALL();
159
Miguel de Dios509627b2019-05-14 23:55:36 +0000160 if (mRenderAheadDepth == 0 && DeviceInfo::get()->getMaxRefreshRate() > 66.6f) {
John Reckcf185f52019-04-11 16:11:24 -0700161 mFixedRenderAhead = false;
162 mRenderAheadCapacity = 1;
163 } else {
164 mFixedRenderAhead = true;
165 mRenderAheadCapacity = mRenderAheadDepth;
166 }
167
John Reck8ddbc592020-05-07 16:11:18 -0700168 if (window) {
169 mNativeSurface = std::make_unique<ReliableSurface>(window);
170 mNativeSurface->init();
171 if (enableTimeout) {
172 // TODO: Fix error handling & re-shorten timeout
173 ANativeWindow_setDequeueTimeout(window, 4000_ms);
174 }
175 mNativeSurface->setExtraBufferCount(mRenderAheadCapacity);
176 } else {
177 mNativeSurface = nullptr;
178 }
John Reckb36bfdd2020-07-23 13:47:49 -0700179 setupPipelineSurface();
180}
John Reck8ddbc592020-05-07 16:11:18 -0700181
John Reckb36bfdd2020-07-23 13:47:49 -0700182void CanvasContext::setupPipelineSurface() {
Alec Mourif023a322019-11-25 10:02:21 -0800183 bool hasSurface = mRenderPipeline->setSurface(
John Reck8ddbc592020-05-07 16:11:18 -0700184 mNativeSurface ? mNativeSurface->getNativeWindow() : nullptr, mSwapBehavior);
185
186 if (mNativeSurface && !mNativeSurface->didSetExtraBuffers()) {
John Reckfbe14bb2020-09-21 14:37:41 -0700187 setBufferCount(mNativeSurface->getNativeWindow());
188
John Reck8ddbc592020-05-07 16:11:18 -0700189 }
John Reck23b797a2014-01-03 18:08:34 -0800190
John Reck28912a52016-04-18 14:34:18 -0700191 mFrameNumber = -1;
192
John Reckb36bfdd2020-07-23 13:47:49 -0700193 if (mNativeSurface != nullptr && hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700194 mHaveNewSurface = true;
195 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500196 // Enable frame stats after the surface has been bound to the appropriate graphics API.
197 // Order is important when new and old surfaces are the same, because old surface has
198 // its frame stats disabled automatically.
Alec Mouriff07c8f2019-12-11 16:32:22 -0800199 native_window_enable_frame_timestamps(mNativeSurface->getNativeWindow(), true);
John Reck368cdd82014-05-07 13:11:00 -0700200 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700201 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400202 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800203 }
John Reck23b797a2014-01-03 18:08:34 -0800204}
205
John Reck1125d1f2014-10-23 11:02:19 -0700206void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
207 mSwapBehavior = swapBehavior;
208}
209
John Reckf8441e62017-10-23 13:10:41 -0700210bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400211 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800212 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800213}
214
John Reck8afcc762016-04-13 10:24:06 -0700215void CanvasContext::setStopped(bool stopped) {
216 if (mStopped != stopped) {
217 mStopped = stopped;
218 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400219 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700220 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400221 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700222 } else if (mIsDirty && hasSurface()) {
223 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700224 }
225 }
226}
227
John Reck8785ceb2018-10-29 16:45:58 -0700228void CanvasContext::allocateBuffers() {
229 if (mNativeSurface) {
Alec Mouric9d5f3d2020-02-13 13:58:25 -0800230 ANativeWindow_tryAllocateBuffers(mNativeSurface->getNativeWindow());
John Reck8785ceb2018-10-29 16:45:58 -0700231 }
232}
233
234void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800235 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
236 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700237}
238
John Reck8785ceb2018-10-29 16:45:58 -0700239void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800240 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700241 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800242}
243
John Reck63a06672014-05-07 13:45:54 -0700244void CanvasContext::setOpaque(bool opaque) {
245 mOpaque = opaque;
246}
247
John Reckb36bfdd2020-07-23 13:47:49 -0700248void CanvasContext::setColorMode(ColorMode mode) {
249 mRenderPipeline->setSurfaceColorProperties(mode);
250 setupPipelineSurface();
Romain Guy26a2b972017-04-17 09:39:51 -0700251}
252
John Reck8afcc762016-04-13 10:24:06 -0700253bool CanvasContext::makeCurrent() {
254 if (mStopped) return false;
255
Stan Iliev768e3932016-07-08 21:34:52 -0400256 auto result = mRenderPipeline->makeCurrent();
257 switch (result) {
258 case MakeCurrentResult::AlreadyCurrent:
259 return true;
260 case MakeCurrentResult::Failed:
261 mHaveNewSurface = true;
262 setSurface(nullptr);
263 return false;
264 case MakeCurrentResult::Succeeded:
265 mHaveNewSurface = true;
266 return true;
267 default:
268 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700269 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700270 }
Stan Iliev768e3932016-07-08 21:34:52 -0400271
272 return true;
John Reck860d1552014-04-11 19:15:05 -0700273}
274
John Reckbf3c6022015-06-02 15:55:00 -0700275static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700276 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700277}
278
John Reck0def73a2016-07-01 16:19:13 -0700279bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700280 static const auto SLOW_THRESHOLD = 6_ms;
281
John Reck0def73a2016-07-01 16:19:13 -0700282 if (mSwapHistory.size() != mSwapHistory.capacity()) {
283 // We want at least 3 frames of history before attempting to
284 // guess if the queue is stuffed
285 return false;
286 }
287 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
288 auto& swapA = mSwapHistory[0];
289
290 // Was there a happy queue & dequeue time? If so, don't
291 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700292 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700293 return false;
294 }
295
296 for (size_t i = 1; i < mSwapHistory.size(); i++) {
297 auto& swapB = mSwapHistory[i];
298
Chris Craik31635682016-07-19 17:59:12 -0700299 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700300 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800301 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700302 return false;
303 }
304
305 // Was there a happy queue & dequeue time? If so, don't
306 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700307 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700308 return false;
309 }
310
311 swapA = swapB;
312 }
313
314 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700315 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700316 return true;
317}
318
John Reck1bcacfd2017-11-03 10:12:19 -0700319void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
320 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700321 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700322
John Reckbf3c6022015-06-02 15:55:00 -0700323 // If the previous frame was dropped we don't need to hold onto it, so
324 // just keep using the previous frame's structure instead
325 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700326 mCurrentFrameInfo = mJankTracker.startFrame();
Stan Iliev7203e1f2019-07-25 13:12:02 -0400327 mLast4FrameInfos.next().first = mCurrentFrameInfo;
John Reckbf3c6022015-06-02 15:55:00 -0700328 }
John Reckba6adf62015-02-19 14:36:50 -0800329 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700330 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800331 mCurrentFrameInfo->markSyncStart();
332
John Recke4267ea2014-06-03 15:53:15 -0700333 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700334 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700335 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800336 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700337
John Reckec845a22014-09-05 15:23:38 -0700338 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700339 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700340 // Only the primary target node will be drawn full - all other nodes would get drawn in
341 // real time mode. In case of a window, the primary node is the window content and the other
342 // node(s) are non client / filler nodes.
343 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
344 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800345 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700346 }
John Reck119907c2014-08-14 09:02:01 -0700347 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800348 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700349
John Reck2de950d2017-01-25 10:58:30 -0800350 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800351 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700352
John Reck306f3312016-06-10 16:01:55 -0700353 mIsDirty = true;
354
John Reck848f6512018-12-03 13:26:43 -0800355 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700356 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800357 info.out.canDrawThisFrame = false;
358 return;
359 }
360
John Reckf1480762016-07-03 18:28:25 -0700361 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700362 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700363 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800364 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700365 // The slight fudge-factor is to deal with cases where
366 // the vsync was estimated due to being slow handling the signal.
367 // See the logic in TimeLord#computeFrameTimeNanos or in
368 // Choreographer.java for details on when this happens
369 if (vsyncDelta < 2_ms) {
370 // Already drew for this vsync pulse, UI draw request missed
371 // the deadline for RT animations
372 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700373 }
Bo Hudd082242018-12-02 05:22:41 +0000374 } else {
375 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700376 }
John Recka5dda642014-05-22 15:43:54 -0700377
John Reckcc2eee82018-05-17 10:44:00 -0700378 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
379 // be an allowable combination?
380 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
381 info.out.canDrawThisFrame = false;
382 }
383
John Reck848f6512018-12-03 13:26:43 -0800384 if (info.out.canDrawThisFrame) {
385 int err = mNativeSurface->reserveNext();
386 if (err != OK) {
387 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
388 info.out.canDrawThisFrame = false;
389 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
390 if (err != TIMED_OUT) {
391 // A timed out surface can still recover, but assume others are permanently dead.
392 setSurface(nullptr);
393 return;
394 }
395 }
396 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700397 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700398 }
399
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400400 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700401 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700402 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
403 info.out.requiresUiRedraw = true;
404 }
John Reckcd028f32014-06-24 08:44:29 -0700405 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700406 // If animationsNeedsRedraw is set don't bother posting for an RT anim
407 // as we will just end up fighting the UI thread.
408 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400409 postedFrameCallback = true;
410 }
411 }
412
413 if (!postedFrameCallback &&
414 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
415 // Subtract the time of one frame so it can be displayed on time.
416 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
417 if (info.out.animatedImageDelay <= kFrameTime) {
418 mRenderThread.postFrameCallback(this);
419 } else {
420 const auto delay = info.out.animatedImageDelay - kFrameTime;
421 int genId = mGenerationID;
422 mRenderThread.queue().postDelayed(delay, [this, genId]() {
423 if (mGenerationID == genId) {
424 mRenderThread.postFrameCallback(this);
425 }
426 });
John Reckf9be7792014-05-02 18:21:16 -0700427 }
John Recke45b1fd2014-04-15 09:50:16 -0700428 }
429}
430
John Reckf47a5942014-06-30 16:20:04 -0700431void CanvasContext::stopDrawing() {
432 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700433 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400434 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700435}
436
John Recka5dda642014-05-22 15:43:54 -0700437void CanvasContext::notifyFramePending() {
438 ATRACE_CALL();
439 mRenderThread.pushBackFrameCallback(this);
440}
441
John Reckcf185f52019-04-11 16:11:24 -0700442void CanvasContext::setPresentTime() {
443 int64_t presentTime = NATIVE_WINDOW_TIMESTAMP_AUTO;
444 int renderAhead = 0;
445 const auto frameIntervalNanos = mRenderThread.timeLord().frameIntervalNanos();
446 if (mFixedRenderAhead) {
447 renderAhead = std::min(mRenderAheadDepth, mRenderAheadCapacity);
448 } else if (frameIntervalNanos < 15_ms) {
449 renderAhead = std::min(1, static_cast<int>(mRenderAheadCapacity));
450 }
451
452 if (renderAhead) {
453 presentTime = mCurrentFrameInfo->get(FrameInfoIndex::Vsync) +
John Reck56cffe52020-02-03 16:47:12 -0800454 (frameIntervalNanos * (renderAhead + 1)) - DeviceInfo::get()->getAppOffset() +
455 (frameIntervalNanos / 2);
John Reckcf185f52019-04-11 16:11:24 -0700456 }
Alec Mourif023a322019-11-25 10:02:21 -0800457 native_window_set_buffers_timestamp(mNativeSurface->getNativeWindow(), presentTime);
John Reckcf185f52019-04-11 16:11:24 -0700458}
459
John Recke4267ea2014-06-03 15:53:15 -0700460void CanvasContext::draw() {
John Recke4267ea2014-06-03 15:53:15 -0700461 SkRect dirty;
462 mDamageAccumulator.finish(&dirty);
463
John Reck0fa0cbc2019-04-05 16:57:46 -0700464 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800465 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
chaviw5fc80e72020-04-30 12:14:35 -0700466 // Notify the callbacks, even if there's nothing to draw so they aren't waiting
467 // indefinitely
Vishnu Nair70a8f0e2020-10-14 18:47:46 -0700468 waitOnFences();
chaviw5fc80e72020-04-30 12:14:35 -0700469 for (auto& func : mFrameCompleteCallbacks) {
470 std::invoke(func, mFrameNumber);
471 }
472 mFrameCompleteCallbacks.clear();
John Reck0b19a732019-03-07 17:18:25 -0800473 return;
474 }
John Reck240ff622015-04-28 13:50:00 -0700475
John Reck240ff622015-04-28 13:50:00 -0700476 mCurrentFrameInfo->markIssueDrawCommandsStart();
477
Stan Iliev768e3932016-07-08 21:34:52 -0400478 Frame frame = mRenderPipeline->getFrame();
John Reckcf185f52019-04-11 16:11:24 -0700479 setPresentTime();
Chris Craikb565df12015-10-05 13:00:52 -0700480
Stan Iliev768e3932016-07-08 21:34:52 -0400481 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800482
Stan Iliev768e3932016-07-08 21:34:52 -0400483 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700484 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
485 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800486
Stan Iliev7203e1f2019-07-25 13:12:02 -0400487 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700488
John Reck38f6c032016-03-17 10:23:49 -0700489 waitOnFences();
490
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700491 if (mNativeSurface) {
492 // TODO(b/165985262): measure performance impact
Siarhei Vishniakoud11f38f2021-01-12 20:45:29 -1000493 const auto vsyncId = mCurrentFrameInfo->get(FrameInfoIndex::FrameTimelineVsyncId);
494 if (vsyncId != UiFrameInfoBuilder::INVALID_VSYNC_ID) {
495 const auto inputEventId = mCurrentFrameInfo->get(FrameInfoIndex::NewestInputEvent);
496 native_window_set_frame_timeline_info(mNativeSurface->getNativeWindow(), vsyncId,
497 inputEventId);
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700498 }
499 }
500
Stan Iliev768e3932016-07-08 21:34:52 -0400501 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700502 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700503 bool didSwap =
504 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800505
John Reck306f3312016-06-10 16:01:55 -0700506 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800507
Stan Iliev768e3932016-07-08 21:34:52 -0400508 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700509 bool didDraw = true;
510 // Handle any swapchain errors
511 error = mNativeSurface->getAndClearError();
512 if (error == TIMED_OUT) {
513 // Try again
514 mRenderThread.postFrameCallback(this);
515 // But since this frame didn't happen, we need to mark full damage in the swap
516 // history
517 didDraw = false;
518
519 } else if (error != OK || !didSwap) {
520 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700521 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700522 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700523 }
John Reck59dd2ea2019-07-26 16:51:08 -0700524
John Recke486d932015-10-28 09:21:19 -0700525 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700526 if (didDraw) {
527 swap.damage = windowDirty;
528 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800529 float max = static_cast<float>(INT_MAX);
530 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700531 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100532 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700533 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700534 if (didDraw) {
Alec Mourif023a322019-11-25 10:02:21 -0800535 nsecs_t dequeueStart =
536 ANativeWindow_getLastDequeueStartTime(mNativeSurface->getNativeWindow());
John Recka67b62e2017-06-01 12:44:58 -0700537 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
538 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700539 // and thus is not part of the frame.
540 swap.dequeueDuration = 0;
541 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700542 swap.dequeueDuration =
Alec Mourif023a322019-11-25 10:02:21 -0800543 ANativeWindow_getLastDequeueDuration(mNativeSurface->getNativeWindow());
John Reck32414ee2017-05-31 14:02:50 -0700544 }
Alec Mourif023a322019-11-25 10:02:21 -0800545 swap.queueDuration =
546 ANativeWindow_getLastQueueDuration(mNativeSurface->getNativeWindow());
John Reck882d5152016-08-01 14:41:08 -0700547 } else {
548 swap.dequeueDuration = 0;
549 swap.queueDuration = 0;
550 }
John Reck1bcacfd2017-11-03 10:12:19 -0700551 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
552 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400553 mLast4FrameInfos[-1].second = frameCompleteNr;
John Reck149173d2015-08-10 09:52:29 -0700554 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700555 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700556 } else {
557 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
558 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400559 mLast4FrameInfos[-1].second = -1;
John Reck4f02bf42014-01-03 18:09:17 -0800560 }
John Reckfe5e7b72014-05-23 17:42:28 -0700561
John Reckba6adf62015-02-19 14:36:50 -0800562 // TODO: Use a fence for real completion?
563 mCurrentFrameInfo->markFrameCompleted();
John Reck149173d2015-08-10 09:52:29 -0700564
565#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700566 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
567 FrameInfoIndex::FrameCompleted) /
568 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700569 if (sFrameCount) {
570 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
571 } else {
572 sBenchMma = thisFrame;
573 }
574 if (++sFrameCount == 10) {
575 sFrameCount = 1;
576 ALOGD("Average frame time: %.4f", sBenchMma);
577 }
578#endif
579
John Reckcc2eee82018-05-17 10:44:00 -0700580 if (didSwap) {
581 for (auto& func : mFrameCompleteCallbacks) {
582 std::invoke(func, frameCompleteNr);
583 }
584 mFrameCompleteCallbacks.clear();
585 }
586
John Reck34781b22017-07-05 16:39:36 -0700587 mJankTracker.finishFrame(*mCurrentFrameInfo);
Andres Morales910beb82016-02-02 16:19:40 -0800588 if (CC_UNLIKELY(mFrameMetricsReporter.get() != nullptr)) {
589 mFrameMetricsReporter->reportFrameMetrics(mCurrentFrameInfo->data());
Andres Morales06f5bc72015-12-15 15:21:31 -0800590 }
John Reck38e0c322015-11-10 12:19:17 -0800591
Stan Iliev7203e1f2019-07-25 13:12:02 -0400592 if (mLast4FrameInfos.size() == mLast4FrameInfos.capacity()) {
593 // By looking 4 frames back, we guarantee all SF stats are available. There are at
594 // most 3 buffers in BufferQueue. Surface object keeps stats for the last 8 frames.
595 FrameInfo* forthBehind = mLast4FrameInfos.front().first;
596 int64_t composedFrameId = mLast4FrameInfos.front().second;
597 nsecs_t acquireTime = -1;
Alec Mouri026106f2020-03-26 17:17:09 -0700598 if (mNativeSurface) {
599 native_window_get_frame_timestamps(mNativeSurface->getNativeWindow(), composedFrameId,
600 nullptr, &acquireTime, nullptr, nullptr, nullptr,
601 nullptr, nullptr, nullptr, nullptr);
602 }
Stan Iliev7203e1f2019-07-25 13:12:02 -0400603 // Ignore default -1, NATIVE_WINDOW_TIMESTAMP_INVALID and NATIVE_WINDOW_TIMESTAMP_PENDING
604 forthBehind->set(FrameInfoIndex::GpuCompleted) = acquireTime > 0 ? acquireTime : -1;
605 mJankTracker.finishGpuDraw(*forthBehind);
606 }
607
Stan Ilieve0fae232020-01-07 17:21:49 -0500608 mRenderThread.cacheManager().onFrameCompleted();
John Reck4f02bf42014-01-03 18:09:17 -0800609}
610
John Recke45b1fd2014-04-15 09:50:16 -0700611// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700612void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400613 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700614 prepareAndDraw(nullptr);
615}
John Reck368cdd82014-05-07 13:11:00 -0700616
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400617SkISize CanvasContext::getNextFrameSize() const {
Alec Mourif023a322019-11-25 10:02:21 -0800618 static constexpr SkISize defaultFrameSize = {INT32_MAX, INT32_MAX};
619 if (mNativeSurface == nullptr) {
620 return defaultFrameSize;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400621 }
Alec Mourif023a322019-11-25 10:02:21 -0800622 ANativeWindow* anw = mNativeSurface->getNativeWindow();
623
624 SkISize size;
625 size.fWidth = ANativeWindow_getWidth(anw);
626 size.fHeight = ANativeWindow_getHeight(anw);
627 return size;
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400628}
629
Skuhneea7a7fb2015-08-28 07:10:31 -0700630void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700631 ATRACE_CALL();
632
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800633 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700634 int64_t vsyncId = mRenderThread.timeLord().lastVsyncId();
Ady Abrahamdfb13982020-10-05 17:59:09 -0700635 int64_t frameDeadline = mRenderThread.timeLord().lastFrameDeadline();
John Reckba6adf62015-02-19 14:36:50 -0800636 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
Steven Thomas6fabb5a2020-08-21 16:56:08 -0700637 UiFrameInfoBuilder(frameInfo)
638 .addFlag(FrameInfoFlags::RTAnimation)
Ady Abrahamdfb13982020-10-05 17:59:09 -0700639 .setVsync(vsync, vsync, vsyncId, frameDeadline);
John Reckfe5e7b72014-05-23 17:42:28 -0700640
Chris Craike2e53a72015-10-28 15:55:40 -0700641 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100642 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700643 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700644 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700645 } else {
646 // wait on fences so tasks don't overlap next frame
647 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700648 }
John Recke45b1fd2014-04-15 09:50:16 -0700649}
650
John Reck998a6d82014-08-28 15:35:53 -0700651void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700652 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800653 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700654 }
655}
656
John Reck2de950d2017-01-25 10:58:30 -0800657void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700658 if (mPrefetchedLayers.size()) {
659 for (auto& node : mPrefetchedLayers) {
660 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700661 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800662 node->destroyLayers();
663 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700664 }
665 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700666 }
667}
668
John Reck2de950d2017-01-25 10:58:30 -0800669void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700670 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400671 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700672
John Reck3e824952014-08-20 10:08:39 -0700673 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
674 stopDrawing();
675
Chris Craike2e53a72015-10-28 15:55:40 -0700676 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700677 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700678 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700679 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700680 node->prepareTree(info);
681 SkRect ignore;
682 mDamageAccumulator.finish(&ignore);
683 // Tickle the GENERIC property on node to mark it as dirty for damaging
684 // purposes when the frame is actually drawn
685 node->setPropertyFieldsDirty(RenderNode::GENERIC);
686
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700687 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700688
Chris Craikd41c4d82015-01-05 15:51:13 -0800689 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700690 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700691}
692
John Reck2de950d2017-01-25 10:58:30 -0800693void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700694 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400695 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800696 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700697 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800698 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700699 }
Stan Iliev768e3932016-07-08 21:34:52 -0400700 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700701 }
702}
703
704void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700705 ATRACE_CALL();
706 if (!thread.getGrContext()) return;
707 ATRACE_CALL();
708 if (level >= TRIM_MEMORY_COMPLETE) {
709 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800710 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700711 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
712 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700713 }
714}
715
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400716DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400717 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700718}
719
John Reckba6adf62015-02-19 14:36:50 -0800720void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700721 mJankTracker.dumpStats(fd);
722 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800723}
724
725void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700726 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800727}
728
John Reckdf1742e2017-01-19 15:56:21 -0800729void CanvasContext::setName(const std::string&& name) {
730 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
731}
732
John Reck38f6c032016-03-17 10:23:49 -0700733void CanvasContext::waitOnFences() {
734 if (mFrameFences.size()) {
735 ATRACE_CALL();
736 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700737 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700738 }
739 mFrameFences.clear();
740 }
741}
742
John Reck38f6c032016-03-17 10:23:49 -0700743void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700744 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700745}
746
John Reck28912a52016-04-18 14:34:18 -0700747int64_t CanvasContext::getFrameNumber() {
748 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
749 if (mFrameNumber == -1 && mNativeSurface.get()) {
Alec Mouri80c7ef12019-12-10 15:09:01 -0800750 mFrameNumber = ANativeWindow_getNextFrameId(mNativeSurface->getNativeWindow());
John Reck28912a52016-04-18 14:34:18 -0700751 }
752 return mFrameNumber;
753}
754
John Reck0b19a732019-03-07 17:18:25 -0800755bool CanvasContext::surfaceRequiresRedraw() {
756 if (!mNativeSurface) return false;
757 if (mHaveNewSurface) return true;
758
Alec Mourif023a322019-11-25 10:02:21 -0800759 ANativeWindow* anw = mNativeSurface->getNativeWindow();
760 const int width = ANativeWindow_getWidth(anw);
761 const int height = ANativeWindow_getHeight(anw);
John Reck0b19a732019-03-07 17:18:25 -0800762
John Reck41459192019-10-31 15:04:58 -0700763 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800764}
765
John Reckcf185f52019-04-11 16:11:24 -0700766void CanvasContext::setRenderAheadDepth(int renderAhead) {
767 if (renderAhead > 2 || renderAhead < 0 || mNativeSurface) {
John Reck4d527222019-03-13 16:25:20 -0700768 return;
769 }
John Reckcf185f52019-04-11 16:11:24 -0700770 mFixedRenderAhead = true;
771 mRenderAheadDepth = static_cast<uint32_t>(renderAhead);
John Reck4d527222019-03-13 16:25:20 -0700772}
773
Stan Iliev768e3932016-07-08 21:34:52 -0400774SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
775 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
776 // can't rely on prior content of window if viewport size changes
777 dirty->setEmpty();
778 mLastFrameWidth = frame.width();
779 mLastFrameHeight = frame.height();
780 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
781 // New surface needs a full draw
782 dirty->setEmpty();
783 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400784 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700785 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
786 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400787 dirty->setEmpty();
788 }
789 profiler().unionDirty(dirty);
790 }
791
792 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400793 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400794 }
795
796 // At this point dirty is the area of the window to update. However,
797 // the area of the frame we need to repaint is potentially different, so
798 // stash the screen area for later
799 SkRect windowDirty(*dirty);
800
801 // If the buffer age is 0 we do a full-screen repaint (handled above)
802 // If the buffer age is 1 the buffer contents are the same as they were
803 // last frame so there's nothing to union() against
804 // Therefore we only care about the > 1 case.
805 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700806 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400807 // We don't have enough history to handle this old of a buffer
808 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400809 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400810 } else {
811 // At this point we haven't yet added the latest frame
812 // to the damage history (happens below)
813 // So we need to damage
814 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700815 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400816 dirty->join(mSwapHistory[i].damage);
817 }
818 }
819 }
820
821 return windowDirty;
822}
823
John Reck23b797a2014-01-03 18:08:34 -0800824} /* namespace renderthread */
825} /* namespace uirenderer */
826} /* namespace android */