blob: 1e011c231343c79cd19beb836e2574b63a043a4e [file] [log] [blame]
John Reck4f02bf42014-01-03 18:09:17 -08001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck4f02bf42014-01-03 18:09:17 -080017#include "RenderProxy.h"
18
rnleece9762b2021-05-21 15:40:53 -070019#include <gui/TraceUtils.h>
John Reckba6adf62015-02-19 14:36:50 -080020#include "DeferredLayerUpdater.h"
21#include "DisplayList.h"
John Recka8963062017-06-14 10:47:50 -070022#include "Properties.h"
John Reck10dd0582016-03-31 16:36:16 -070023#include "Readback.h"
John Reckba6adf62015-02-19 14:36:50 -080024#include "Rect.h"
John Reck5cca8f22018-12-10 17:06:22 -080025#include "WebViewFunctorManager.h"
John Reckba6adf62015-02-19 14:36:50 -080026#include "renderthread/CanvasContext.h"
27#include "renderthread/RenderTask.h"
28#include "renderthread/RenderThread.h"
29#include "utils/Macros.h"
John Reck43871902016-08-01 14:39:24 -070030#include "utils/TimeUtils.h"
John Reck4f02bf42014-01-03 18:09:17 -080031
Kevin Lubick1175dc02022-02-28 12:41:27 -050032#include <SkBitmap.h>
33#include <SkImage.h>
34#include <SkPicture.h>
35
Bo Liu6a3fc602021-07-17 16:42:13 -040036#include <pthread.h>
37
John Reck4f02bf42014-01-03 18:09:17 -080038namespace android {
39namespace uirenderer {
40namespace renderthread {
41
John Reck1bcacfd2017-11-03 10:12:19 -070042RenderProxy::RenderProxy(bool translucent, RenderNode* rootRenderNode,
43 IContextFactory* contextFactory)
44 : mRenderThread(RenderThread::getInstance()), mContext(nullptr) {
Matt Buckleye9023cf2022-11-23 22:39:25 +000045 pid_t uiThreadId = pthread_gettid_np(pthread_self());
46 pid_t renderThreadId = getRenderThreadTid();
47 mContext = mRenderThread.queue().runSync([=, this]() -> CanvasContext* {
48 return CanvasContext::create(mRenderThread, translucent, rootRenderNode, contextFactory,
49 uiThreadId, renderThreadId);
John Reckf8441e62017-10-23 13:10:41 -070050 });
Matt Buckleye9023cf2022-11-23 22:39:25 +000051 mDrawFrameTask.setContext(&mRenderThread, mContext, rootRenderNode);
John Reck4f02bf42014-01-03 18:09:17 -080052}
53
54RenderProxy::~RenderProxy() {
55 destroyContext();
56}
57
John Reck4f02bf42014-01-03 18:09:17 -080058void RenderProxy::destroyContext() {
59 if (mContext) {
Matt Buckleye9023cf2022-11-23 22:39:25 +000060 mDrawFrameTask.setContext(nullptr, nullptr, nullptr);
John Reck668f0e32014-03-26 15:10:40 -070061 // This is also a fence as we need to be certain that there are no
62 // outstanding mDrawFrame tasks posted before it is destroyed
John Reck1bcacfd2017-11-03 10:12:19 -070063 mRenderThread.queue().runSync([this]() { delete mContext; });
John Reckf8441e62017-10-23 13:10:41 -070064 mContext = nullptr;
John Reck4f02bf42014-01-03 18:09:17 -080065 }
66}
67
John Reck1125d1f2014-10-23 11:02:19 -070068void RenderProxy::setSwapBehavior(SwapBehavior swapBehavior) {
John Reck1bcacfd2017-11-03 10:12:19 -070069 mRenderThread.queue().post([this, swapBehavior]() { mContext->setSwapBehavior(swapBehavior); });
John Recke4280ba2014-05-05 16:39:37 -070070}
71
72bool RenderProxy::loadSystemProperties() {
John Reckf8441e62017-10-23 13:10:41 -070073 return mRenderThread.queue().runSync([this]() -> bool {
John Reckd9d7f122018-05-03 14:40:56 -070074 bool needsRedraw = Properties::load();
John Reckf8441e62017-10-23 13:10:41 -070075 if (mContext->profiler().consumeProperties()) {
76 needsRedraw = true;
77 }
78 return needsRedraw;
79 });
John Reckb36016c2015-03-11 08:50:53 -070080}
81
82void RenderProxy::setName(const char* name) {
John Reckf8441e62017-10-23 13:10:41 -070083 // block since name/value pointers owned by caller
84 // TODO: Support move arguments
John Reck1bcacfd2017-11-03 10:12:19 -070085 mRenderThread.queue().runSync([this, name]() { mContext->setName(std::string(name)); });
John Reck4f02bf42014-01-03 18:09:17 -080086}
87
Nader Jawada3521852023-01-30 20:23:46 -080088void RenderProxy::setHardwareBuffer(AHardwareBuffer* buffer) {
89 if (buffer) {
90 AHardwareBuffer_acquire(buffer);
91 }
92 mRenderThread.queue().post([this, hardwareBuffer = buffer]() mutable {
93 mContext->setHardwareBuffer(hardwareBuffer);
94 if (hardwareBuffer) {
95 AHardwareBuffer_release(hardwareBuffer);
96 }
97 });
98}
99
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800100void RenderProxy::setSurface(ANativeWindow* window, bool enableTimeout) {
John Recke95c62d2020-08-18 12:37:43 -0700101 if (window) { ANativeWindow_acquire(window); }
Alec Mouri43fe6fc2019-12-23 07:46:19 -0800102 mRenderThread.queue().post([this, win = window, enableTimeout]() mutable {
103 mContext->setSurface(win, enableTimeout);
John Recke95c62d2020-08-18 12:37:43 -0700104 if (win) { ANativeWindow_release(win); }
John Reckcd18c222019-11-21 14:40:53 -0800105 });
John Reck4f02bf42014-01-03 18:09:17 -0800106}
107
Huihong Luo5fdf7b82021-01-15 14:27:06 -0800108void RenderProxy::setSurfaceControl(ASurfaceControl* surfaceControl) {
109 auto funcs = mRenderThread.getASurfaceControlFunctions();
110 if (surfaceControl) {
111 funcs.acquireFunc(surfaceControl);
112 }
113 mRenderThread.queue().post([this, control = surfaceControl, funcs]() mutable {
114 mContext->setSurfaceControl(control);
115 if (control) {
116 funcs.releaseFunc(control);
117 }
118 });
119}
120
John Reck8785ceb2018-10-29 16:45:58 -0700121void RenderProxy::allocateBuffers() {
122 mRenderThread.queue().post([=]() { mContext->allocateBuffers(); });
Jorim Jaggi7823ee72018-07-17 15:24:16 +0200123}
124
John Reck8785ceb2018-10-29 16:45:58 -0700125bool RenderProxy::pause() {
John Reck1bcacfd2017-11-03 10:12:19 -0700126 return mRenderThread.queue().runSync([this]() -> bool { return mContext->pauseSurface(); });
John Reck8afcc762016-04-13 10:24:06 -0700127}
128
129void RenderProxy::setStopped(bool stopped) {
John Reck1bcacfd2017-11-03 10:12:19 -0700130 mRenderThread.queue().runSync([this, stopped]() { mContext->setStopped(stopped); });
John Reck8afcc762016-04-13 10:24:06 -0700131}
132
John Reck8785ceb2018-10-29 16:45:58 -0700133void RenderProxy::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
John Reck1bcacfd2017-11-03 10:12:19 -0700134 mRenderThread.queue().post(
John Reck8785ceb2018-10-29 16:45:58 -0700135 [=]() { mContext->setLightAlpha(ambientShadowAlpha, spotShadowAlpha); });
Alan Viverette50210d92015-05-14 18:05:36 -0700136}
137
John Reck8785ceb2018-10-29 16:45:58 -0700138void RenderProxy::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
139 mRenderThread.queue().post([=]() { mContext->setLightGeometry(lightCenter, lightRadius); });
John Reck63a06672014-05-07 13:45:54 -0700140}
141
142void RenderProxy::setOpaque(bool opaque) {
John Reck1bcacfd2017-11-03 10:12:19 -0700143 mRenderThread.queue().post([=]() { mContext->setOpaque(opaque); });
Romain Guy26a2b972017-04-17 09:39:51 -0700144}
145
John Reck55887762023-01-25 16:51:18 -0500146float RenderProxy::setColorMode(ColorMode mode) {
147 // We only need to figure out what the renderer supports for HDR, otherwise this can stay
148 // an async call since we already know the return value
149 if (mode == ColorMode::Hdr) {
150 return mRenderThread.queue().runSync(
151 [=]() -> float { return mContext->setColorMode(mode); });
152 } else {
153 mRenderThread.queue().post([=]() { mContext->setColorMode(mode); });
154 return 1.f;
155 }
156}
157
158void RenderProxy::setRenderSdrHdrRatio(float ratio) {
159 mDrawFrameTask.setRenderSdrHdrRatio(ratio);
Romain Guy26a2b972017-04-17 09:39:51 -0700160}
161
John Reckba6adf62015-02-19 14:36:50 -0800162int64_t* RenderProxy::frameInfo() {
163 return mDrawFrameTask.frameInfo();
164}
165
chaviwadba0b12022-03-18 17:42:15 -0500166void RenderProxy::forceDrawNextFrame() {
167 mDrawFrameTask.forceDrawNextFrame();
168}
169
John Reck2de950d2017-01-25 10:58:30 -0800170int RenderProxy::syncAndDrawFrame() {
171 return mDrawFrameTask.drawFrame();
John Reck4f02bf42014-01-03 18:09:17 -0800172}
173
John Reck2de950d2017-01-25 10:58:30 -0800174void RenderProxy::destroy() {
John Reckfae904d2014-04-14 11:01:57 -0700175 // destroyCanvasAndSurface() needs a fence as when it returns the
176 // underlying BufferQueue is going to be released from under
177 // the render thread.
John Reck1bcacfd2017-11-03 10:12:19 -0700178 mRenderThread.queue().runSync([=]() { mContext->destroy(); });
John Reck0d1f6342014-03-28 20:30:27 -0700179}
180
John Reck283bb462018-12-13 16:40:14 -0800181void RenderProxy::destroyFunctor(int functor) {
182 ATRACE_CALL();
183 RenderThread& thread = RenderThread::getInstance();
John Reck5cca8f22018-12-10 17:06:22 -0800184 thread.queue().post([=]() { WebViewFunctorManager::instance().destroyFunctor(functor); });
John Reck283bb462018-12-13 16:40:14 -0800185}
186
John Reck19b6bcf2014-02-14 20:03:38 -0800187DeferredLayerUpdater* RenderProxy::createTextureLayer() {
John Reckf8441e62017-10-23 13:10:41 -0700188 return mRenderThread.queue().runSync([this]() -> auto {
189 return mContext->createTextureLayer();
190 });
John Reck3e824952014-08-20 10:08:39 -0700191}
192
John Reck2de950d2017-01-25 10:58:30 -0800193void RenderProxy::buildLayer(RenderNode* node) {
John Reck1bcacfd2017-11-03 10:12:19 -0700194 mRenderThread.queue().runSync([&]() { mContext->buildLayer(node); });
John Reck19b6bcf2014-02-14 20:03:38 -0800195}
196
John Reck3731dc22015-04-13 15:20:29 -0700197bool RenderProxy::copyLayerInto(DeferredLayerUpdater* layer, SkBitmap& bitmap) {
John Reckfbeac3c2019-03-29 11:24:56 -0700198 ATRACE_NAME("TextureView#getBitmap");
Stan Iliev1a025a72018-09-05 16:35:11 -0400199 auto& thread = RenderThread::getInstance();
John Reck5cca8f22018-12-10 17:06:22 -0800200 return thread.queue().runSync([&]() -> bool {
201 return thread.readback().copyLayerInto(layer, &bitmap) == CopyResult::Success;
202 });
John Reck19b6bcf2014-02-14 20:03:38 -0800203}
204
John Reckd72e0a32014-05-29 18:56:11 -0700205void RenderProxy::pushLayerUpdate(DeferredLayerUpdater* layer) {
206 mDrawFrameTask.pushLayerUpdate(layer);
207}
208
209void RenderProxy::cancelLayerUpdate(DeferredLayerUpdater* layer) {
210 mDrawFrameTask.removeLayerUpdate(layer);
John Reck19b6bcf2014-02-14 20:03:38 -0800211}
212
John Reck918ad522014-06-27 14:45:25 -0700213void RenderProxy::detachSurfaceTexture(DeferredLayerUpdater* layer) {
John Reck1bcacfd2017-11-03 10:12:19 -0700214 return mRenderThread.queue().runSync([&]() { layer->detachSurfaceTexture(); });
John Recke1628b72014-05-23 15:11:19 -0700215}
216
John Reck2de950d2017-01-25 10:58:30 -0800217void RenderProxy::destroyHardwareResources() {
John Reck1bcacfd2017-11-03 10:12:19 -0700218 return mRenderThread.queue().runSync([&]() { mContext->destroyHardwareResources(); });
John Reckf47a5942014-06-30 16:20:04 -0700219}
220
221void RenderProxy::trimMemory(int level) {
John Reckcd3a22c2014-08-06 13:33:59 -0700222 // Avoid creating a RenderThread to do a trimMemory.
223 if (RenderThread::hasInstance()) {
224 RenderThread& thread = RenderThread::getInstance();
John Reck5f66fb82022-09-23 17:49:23 -0400225 const auto trimLevel = static_cast<TrimLevel>(level);
226 thread.queue().post([&thread, trimLevel]() { thread.trimMemory(trimLevel); });
John Reckcd3a22c2014-08-06 13:33:59 -0700227 }
John Reckf47a5942014-06-30 16:20:04 -0700228}
229
John Reck39207682021-05-12 19:10:47 -0400230void RenderProxy::purgeCaches() {
231 if (RenderThread::hasInstance()) {
232 RenderThread& thread = RenderThread::getInstance();
233 thread.queue().post([&thread]() {
234 if (thread.getGrContext()) {
John Reck5f66fb82022-09-23 17:49:23 -0400235 thread.cacheManager().trimMemory(TrimLevel::COMPLETE);
John Reck39207682021-05-12 19:10:47 -0400236 }
237 });
238 }
239}
240
Chris Craik2507c342015-05-04 14:36:49 -0700241void RenderProxy::overrideProperty(const char* name, const char* value) {
John Reckf8441e62017-10-23 13:10:41 -0700242 // expensive, but block here since name/value pointers owned by caller
John Reck1bcacfd2017-11-03 10:12:19 -0700243 RenderThread::getInstance().queue().runSync(
244 [&]() { Properties::overrideProperty(name, value); });
Chris Craik2507c342015-05-04 14:36:49 -0700245}
246
John Reck28ad7b52014-04-07 16:59:25 -0700247void RenderProxy::fence() {
John Reck1bcacfd2017-11-03 10:12:19 -0700248 mRenderThread.queue().runSync([]() {});
John Reck28ad7b52014-04-07 16:59:25 -0700249}
250
John Recke4c1e6c2018-05-24 16:27:35 -0700251int RenderProxy::maxTextureSize() {
John Reck5cca8f22018-12-10 17:06:22 -0800252 static int maxTextureSize = RenderThread::getInstance().queue().runSync(
253 []() { return DeviceInfo::get()->maxTextureSize(); });
John Recke4c1e6c2018-05-24 16:27:35 -0700254 return maxTextureSize;
John Reckf47a5942014-06-30 16:20:04 -0700255}
256
257void RenderProxy::stopDrawing() {
John Reck1bcacfd2017-11-03 10:12:19 -0700258 mRenderThread.queue().runSync([this]() { mContext->stopDrawing(); });
John Recka5dda642014-05-22 15:43:54 -0700259}
260
261void RenderProxy::notifyFramePending() {
John Reck1bcacfd2017-11-03 10:12:19 -0700262 mRenderThread.queue().post([this]() { mContext->notifyFramePending(); });
John Reckfe5e7b72014-05-23 17:42:28 -0700263}
264
Matt Buckleyd98e8052022-10-21 22:13:23 +0000265void RenderProxy::notifyCallbackPending() {
Matt Buckleye9023cf2022-11-23 22:39:25 +0000266 mRenderThread.queue().post([this]() { mContext->sendLoadResetHint(); });
Matt Buckleyd98e8052022-10-21 22:13:23 +0000267}
268
Matt Buckleyac5f7552022-12-19 22:03:27 +0000269void RenderProxy::notifyExpensiveFrame() {
270 mRenderThread.queue().post([this]() { mContext->sendLoadIncreaseHint(); });
271}
272
John Reckba6adf62015-02-19 14:36:50 -0800273void RenderProxy::dumpProfileInfo(int fd, int dumpFlags) {
John Reckf8441e62017-10-23 13:10:41 -0700274 mRenderThread.queue().runSync([&]() {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100275 std::lock_guard lock(mRenderThread.getJankDataMutex());
John Reckf8441e62017-10-23 13:10:41 -0700276 mContext->profiler().dumpData(fd);
277 if (dumpFlags & DumpFlags::FrameStats) {
278 mContext->dumpFrames(fd);
279 }
280 if (dumpFlags & DumpFlags::JankStats) {
281 mRenderThread.globalProfileData()->dump(fd);
282 }
283 if (dumpFlags & DumpFlags::Reset) {
284 mContext->resetFrameStats();
285 }
286 });
John Reck7f2e5e32015-05-05 11:00:53 -0700287}
288
289void RenderProxy::resetProfileInfo() {
Jorim Jaggi33adb572021-02-22 14:27:53 +0100290 mRenderThread.queue().runSync([=]() {
291 std::lock_guard lock(mRenderThread.getJankDataMutex());
292 mContext->resetFrameStats();
293 });
John Reck7f2e5e32015-05-05 11:00:53 -0700294}
295
John Reckf8441e62017-10-23 13:10:41 -0700296uint32_t RenderProxy::frameTimePercentile(int percentile) {
297 return mRenderThread.queue().runSync([&]() -> auto {
Jorim Jaggi71db8892021-02-03 23:19:29 +0100298 std::lock_guard lock(mRenderThread.globalProfileData().getDataMutex());
John Reckf8441e62017-10-23 13:10:41 -0700299 return mRenderThread.globalProfileData()->findPercentile(percentile);
300 });
John Reck0e89e2b2014-10-31 14:49:06 -0700301}
302
John Reck712eae02021-10-01 15:24:27 -0400303void RenderProxy::dumpGraphicsMemory(int fd, bool includeProfileData, bool resetProfile) {
John Reckba7e9652019-01-23 10:33:41 -0800304 if (RenderThread::hasInstance()) {
305 auto& thread = RenderThread::getInstance();
John Reck712eae02021-10-01 15:24:27 -0400306 thread.queue().runSync([&]() {
307 thread.dumpGraphicsMemory(fd, includeProfileData);
308 if (resetProfile) {
309 thread.globalProfileData()->reset();
310 }
311 });
John Reckba7e9652019-01-23 10:33:41 -0800312 }
John Reckedc524c2015-03-18 15:24:33 -0700313}
314
John Reck39207682021-05-12 19:10:47 -0400315void RenderProxy::getMemoryUsage(size_t* cpuUsage, size_t* gpuUsage) {
316 if (RenderThread::hasInstance()) {
317 auto& thread = RenderThread::getInstance();
318 thread.queue().runSync([&]() { thread.getMemoryUsage(cpuUsage, gpuUsage); });
319 }
320}
321
John Reckedc524c2015-03-18 15:24:33 -0700322void RenderProxy::setProcessStatsBuffer(int fd) {
John Reckdf1742e2017-01-19 15:56:21 -0800323 auto& rt = RenderThread::getInstance();
John Reck0fa0cbc2019-04-05 16:57:46 -0700324 rt.queue().post([&rt, fd = dup(fd)]() {
John Reckf8441e62017-10-23 13:10:41 -0700325 rt.globalProfileData().switchStorageToAshmem(fd);
326 close(fd);
327 });
John Reckdf1742e2017-01-19 15:56:21 -0800328}
329
330void RenderProxy::rotateProcessStatsBuffer() {
John Reckdf1742e2017-01-19 15:56:21 -0800331 auto& rt = RenderThread::getInstance();
John Reck1bcacfd2017-11-03 10:12:19 -0700332 rt.queue().post([&rt]() { rt.globalProfileData().rotateStorage(); });
John Reckedc524c2015-03-18 15:24:33 -0700333}
334
Tim Murray33eb07f2016-06-10 10:03:20 -0700335int RenderProxy::getRenderThreadTid() {
336 return mRenderThread.getTid();
337}
338
Skuhneea7a7fb2015-08-28 07:10:31 -0700339void RenderProxy::addRenderNode(RenderNode* node, bool placeFront) {
John Reck1bcacfd2017-11-03 10:12:19 -0700340 mRenderThread.queue().post([=]() { mContext->addRenderNode(node, placeFront); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700341}
342
343void RenderProxy::removeRenderNode(RenderNode* node) {
John Reck1bcacfd2017-11-03 10:12:19 -0700344 mRenderThread.queue().post([=]() { mContext->removeRenderNode(node); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700345}
346
347void RenderProxy::drawRenderNode(RenderNode* node) {
John Reck1bcacfd2017-11-03 10:12:19 -0700348 mRenderThread.queue().runSync([=]() { mContext->prepareAndDraw(node); });
Skuhneea7a7fb2015-08-28 07:10:31 -0700349}
350
Skuhneb8160872015-09-22 09:51:39 -0700351void RenderProxy::setContentDrawBounds(int left, int top, int right, int bottom) {
John Reckf138b172017-09-08 11:00:42 -0700352 mDrawFrameTask.setContentDrawBounds(left, top, right, bottom);
Skuhneea7a7fb2015-08-28 07:10:31 -0700353}
354
Nader Jawada3521852023-01-30 20:23:46 -0800355void RenderProxy::setHardwareBufferRenderParams(const HardwareBufferRenderParams& params) {
356 mDrawFrameTask.setHardwareBufferRenderParams(params);
357}
358
John Reck5cca8f22018-12-10 17:06:22 -0800359void RenderProxy::setPictureCapturedCallback(
360 const std::function<void(sk_sp<SkPicture>&&)>& callback) {
361 mRenderThread.queue().post(
John Reck0fa0cbc2019-04-05 16:57:46 -0700362 [this, cb = callback]() { mContext->setPictureCapturedCallback(cb); });
John Reck5cca8f22018-12-10 17:06:22 -0800363}
364
Huihong Luo054b8d32021-02-24 18:48:12 -0800365void RenderProxy::setASurfaceTransactionCallback(
Huihong Luo4df41512021-06-24 10:04:32 -0700366 const std::function<bool(int64_t, int64_t, int64_t)>& callback) {
Huihong Luo054b8d32021-02-24 18:48:12 -0800367 mRenderThread.queue().post(
368 [this, cb = callback]() { mContext->setASurfaceTransactionCallback(cb); });
369}
370
Huihong Luo34f42fd2021-05-03 14:47:36 -0700371void RenderProxy::setPrepareSurfaceControlForWebviewCallback(
372 const std::function<void()>& callback) {
373 mRenderThread.queue().post(
374 [this, cb = callback]() { mContext->setPrepareSurfaceControlForWebviewCallback(cb); });
375}
376
chaviwb6803712021-12-13 15:46:29 -0600377void RenderProxy::setFrameCallback(
378 std::function<std::function<void(bool)>(int32_t, int64_t)>&& callback) {
Mihai Popa95688002018-02-23 16:10:11 +0000379 mDrawFrameTask.setFrameCallback(std::move(callback));
380}
381
chaviw9c137532021-08-20 12:15:48 -0500382void RenderProxy::setFrameCommitCallback(std::function<void(bool)>&& callback) {
383 mDrawFrameTask.setFrameCommitCallback(std::move(callback));
384}
385
386void RenderProxy::setFrameCompleteCallback(std::function<void()>&& callback) {
John Reckcc2eee82018-05-17 10:44:00 -0700387 mDrawFrameTask.setFrameCompleteCallback(std::move(callback));
388}
389
John Reckf8441e62017-10-23 13:10:41 -0700390void RenderProxy::addFrameMetricsObserver(FrameMetricsObserver* observerPtr) {
John Reck0fa0cbc2019-04-05 16:57:46 -0700391 mRenderThread.queue().post([this, observer = sp{observerPtr}]() {
John Reckf8441e62017-10-23 13:10:41 -0700392 mContext->addFrameMetricsObserver(observer.get());
393 });
Andres Morales06f5bc72015-12-15 15:21:31 -0800394}
395
John Reckf8441e62017-10-23 13:10:41 -0700396void RenderProxy::removeFrameMetricsObserver(FrameMetricsObserver* observerPtr) {
John Reck0fa0cbc2019-04-05 16:57:46 -0700397 mRenderThread.queue().post([this, observer = sp{observerPtr}]() {
John Reckf8441e62017-10-23 13:10:41 -0700398 mContext->removeFrameMetricsObserver(observer.get());
399 });
John Reck10dd0582016-03-31 16:36:16 -0700400}
401
John Reckbb3a3582018-09-26 11:21:08 -0700402void RenderProxy::setForceDark(bool enable) {
John Reck5cca8f22018-12-10 17:06:22 -0800403 mRenderThread.queue().post([this, enable]() { mContext->setForceDark(enable); });
John Reckbb3a3582018-09-26 11:21:08 -0700404}
405
John Reck4d73cb12022-07-27 10:32:52 -0400406void RenderProxy::copySurfaceInto(ANativeWindow* window, std::shared_ptr<CopyRequest>&& request) {
John Reckf8441e62017-10-23 13:10:41 -0700407 auto& thread = RenderThread::getInstance();
John Reck4d73cb12022-07-27 10:32:52 -0400408 ANativeWindow_acquire(window);
409 thread.queue().post([&thread, window, request = std::move(request)] {
410 thread.readback().copySurfaceInto(window, request);
411 ANativeWindow_release(window);
412 });
John Reck43871902016-08-01 14:39:24 -0700413}
414
sergeyvec4a4b12016-10-20 18:39:04 -0700415void RenderProxy::prepareToDraw(Bitmap& bitmap) {
John Reck43871902016-08-01 14:39:24 -0700416 // If we haven't spun up a hardware accelerated window yet, there's no
417 // point in precaching these bitmaps as it can't impact jank.
418 // We also don't know if we even will spin up a hardware-accelerated
419 // window or not.
420 if (!RenderThread::hasInstance()) return;
421 RenderThread* renderThread = &RenderThread::getInstance();
sergeyvec4a4b12016-10-20 18:39:04 -0700422 bitmap.ref();
John Reckf8441e62017-10-23 13:10:41 -0700423 auto task = [renderThread, &bitmap]() {
424 CanvasContext::prepareToDraw(*renderThread, &bitmap);
425 bitmap.unref();
426 };
John Reck43871902016-08-01 14:39:24 -0700427 nsecs_t lastVsync = renderThread->timeLord().latestVsync();
428 nsecs_t estimatedNextVsync = lastVsync + renderThread->timeLord().frameIntervalNanos();
Jerome Gaillarde218c692019-06-14 12:58:57 +0100429 nsecs_t timeToNextVsync = estimatedNextVsync - systemTime(SYSTEM_TIME_MONOTONIC);
John Reck43871902016-08-01 14:39:24 -0700430 // We expect the UI thread to take 4ms and for RT to be active from VSYNC+4ms to
431 // VSYNC+12ms or so, so aim for the gap during which RT is expected to
432 // be idle
433 // TODO: Make this concept a first-class supported thing? RT could use
434 // knowledge of pending draws to better schedule this task
435 if (timeToNextVsync > -6_ms && timeToNextVsync < 1_ms) {
John Reckf8441e62017-10-23 13:10:41 -0700436 renderThread->queue().postAt(estimatedNextVsync + 8_ms, task);
John Reck43871902016-08-01 14:39:24 -0700437 } else {
John Reckf8441e62017-10-23 13:10:41 -0700438 renderThread->queue().post(task);
John Reck43871902016-08-01 14:39:24 -0700439 }
440}
441
Stan Iliev1a025a72018-09-05 16:35:11 -0400442int RenderProxy::copyHWBitmapInto(Bitmap* hwBitmap, SkBitmap* bitmap) {
John Reckfbeac3c2019-03-29 11:24:56 -0700443 ATRACE_NAME("HardwareBitmap readback");
Stan Iliev6983bc42017-02-02 14:11:53 -0500444 RenderThread& thread = RenderThread::getInstance();
John Reck1072fff2018-04-12 15:20:09 -0700445 if (gettid() == thread.getTid()) {
John Reck1bcacfd2017-11-03 10:12:19 -0700446 // TODO: fix everything that hits this. We should never be triggering a readback ourselves.
Stan Iliev1a025a72018-09-05 16:35:11 -0400447 return (int)thread.readback().copyHWBitmapInto(hwBitmap, bitmap);
Stan Iliev6983bc42017-02-02 14:11:53 -0500448 } else {
John Reck5cca8f22018-12-10 17:06:22 -0800449 return thread.queue().runSync(
450 [&]() -> int { return (int)thread.readback().copyHWBitmapInto(hwBitmap, bitmap); });
Stan Iliev6983bc42017-02-02 14:11:53 -0500451 }
sergeyv59eecb522016-11-17 17:54:57 -0800452}
453
John Reck76005182021-06-09 22:43:05 -0400454int RenderProxy::copyImageInto(const sk_sp<SkImage>& image, SkBitmap* bitmap) {
455 RenderThread& thread = RenderThread::getInstance();
456 if (gettid() == thread.getTid()) {
457 // TODO: fix everything that hits this. We should never be triggering a readback ourselves.
458 return (int)thread.readback().copyImageInto(image, bitmap);
459 } else {
460 return thread.queue().runSync(
461 [&]() -> int { return (int)thread.readback().copyImageInto(image, bitmap); });
462 }
463}
464
John Recka8963062017-06-14 10:47:50 -0700465void RenderProxy::disableVsync() {
466 Properties::disableVsync = true;
467}
468
Stan Iliev898123b2019-02-14 14:57:44 -0500469void RenderProxy::preload() {
470 // Create RenderThread object and start the thread. Then preload Vulkan/EGL driver.
471 auto& thread = RenderThread::getInstance();
John Reck0fa0cbc2019-04-05 16:57:46 -0700472 thread.queue().post([&thread]() { thread.preload(); });
Stan Iliev898123b2019-02-14 14:57:44 -0500473}
474
chaviw01053d432022-03-18 17:54:00 -0500475void RenderProxy::setRtAnimationsEnabled(bool enabled) {
476 if (RenderThread::hasInstance()) {
477 RenderThread::getInstance().queue().post(
478 [enabled]() { Properties::enableRTAnimations = enabled; });
479 } else {
480 Properties::enableRTAnimations = enabled;
481 }
482}
483
John Reck4f02bf42014-01-03 18:09:17 -0800484} /* namespace renderthread */
485} /* namespace uirenderer */
486} /* namespace android */