blob: e15e11cc20a8129b6a18afaf0912e84b14cc5b07 [file] [log] [blame]
Robert Carr78c25dd2019-08-15 14:10:33 -07001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Valerie Haud3b90d22019-11-06 09:37:31 -080017#undef LOG_TAG
18#define LOG_TAG "BLASTBufferQueue"
19
Valerie Haua32c5522019-12-09 10:11:08 -080020#define ATRACE_TAG ATRACE_TAG_GRAPHICS
Vishnu Naire1a42322020-10-02 17:42:04 -070021//#define LOG_NDEBUG 0
Valerie Haua32c5522019-12-09 10:11:08 -080022
Robert Carr78c25dd2019-08-15 14:10:33 -070023#include <gui/BLASTBufferQueue.h>
24#include <gui/BufferItemConsumer.h>
Vishnu Nair89496122020-12-14 17:14:53 -080025#include <gui/BufferQueueConsumer.h>
26#include <gui/BufferQueueCore.h>
27#include <gui/BufferQueueProducer.h>
Valerie Hau45e4b3b2019-12-03 10:49:17 -080028#include <gui/GLConsumer.h>
Vishnu Nair89496122020-12-14 17:14:53 -080029#include <gui/IProducerListener.h>
Robert Carr05086b22020-10-13 18:22:51 -070030#include <gui/Surface.h>
Vishnu Nair89496122020-12-14 17:14:53 -080031#include <utils/Singleton.h>
Valerie Haua32c5522019-12-09 10:11:08 -080032#include <utils/Trace.h>
33
Ady Abraham0bde6b52021-05-18 13:57:02 -070034#include <private/gui/ComposerService.h>
35
Robert Carr78c25dd2019-08-15 14:10:33 -070036#include <chrono>
37
38using namespace std::chrono_literals;
39
Vishnu Nairdab94092020-09-29 16:09:04 -070040namespace {
41inline const char* toString(bool b) {
42 return b ? "true" : "false";
43}
44} // namespace
45
Robert Carr78c25dd2019-08-15 14:10:33 -070046namespace android {
47
Vishnu Nairdab94092020-09-29 16:09:04 -070048// Macros to include adapter info in log messages
49#define BQA_LOGV(x, ...) \
50 ALOGV("[%s](f:%u,a:%u) " x, mName.c_str(), mNumFrameAvailable, mNumAcquired, ##__VA_ARGS__)
Vishnu Nairc6f89ee2020-12-11 14:27:32 -080051// enable logs for a single layer
52//#define BQA_LOGV(x, ...) \
53// ALOGV_IF((strstr(mName.c_str(), "SurfaceView") != nullptr), "[%s](f:%u,a:%u) " x, \
54// mName.c_str(), mNumFrameAvailable, mNumAcquired, ##__VA_ARGS__)
Vishnu Nairdab94092020-09-29 16:09:04 -070055#define BQA_LOGE(x, ...) \
56 ALOGE("[%s](f:%u,a:%u) " x, mName.c_str(), mNumFrameAvailable, mNumAcquired, ##__VA_ARGS__)
57
Valerie Hau871d6352020-01-29 08:44:02 -080058void BLASTBufferItemConsumer::onDisconnect() {
Hongguang Chen621ec582021-02-16 15:42:35 -080059 Mutex::Autolock lock(mMutex);
Valerie Hau871d6352020-01-29 08:44:02 -080060 mPreviouslyConnected = mCurrentlyConnected;
61 mCurrentlyConnected = false;
62 if (mPreviouslyConnected) {
63 mDisconnectEvents.push(mCurrentFrameNumber);
64 }
65 mFrameEventHistory.onDisconnect();
66}
67
68void BLASTBufferItemConsumer::addAndGetFrameTimestamps(const NewFrameEventsEntry* newTimestamps,
69 FrameEventHistoryDelta* outDelta) {
Hongguang Chen621ec582021-02-16 15:42:35 -080070 Mutex::Autolock lock(mMutex);
Valerie Hau871d6352020-01-29 08:44:02 -080071 if (newTimestamps) {
72 // BufferQueueProducer only adds a new timestamp on
73 // queueBuffer
74 mCurrentFrameNumber = newTimestamps->frameNumber;
75 mFrameEventHistory.addQueue(*newTimestamps);
76 }
77 if (outDelta) {
78 // frame event histories will be processed
79 // only after the producer connects and requests
80 // deltas for the first time. Forward this intent
81 // to SF-side to turn event processing back on
82 mPreviouslyConnected = mCurrentlyConnected;
83 mCurrentlyConnected = true;
84 mFrameEventHistory.getAndResetDelta(outDelta);
85 }
86}
87
88void BLASTBufferItemConsumer::updateFrameTimestamps(uint64_t frameNumber, nsecs_t refreshStartTime,
89 const sp<Fence>& glDoneFence,
90 const sp<Fence>& presentFence,
91 const sp<Fence>& prevReleaseFence,
92 CompositorTiming compositorTiming,
93 nsecs_t latchTime, nsecs_t dequeueReadyTime) {
Hongguang Chen621ec582021-02-16 15:42:35 -080094 Mutex::Autolock lock(mMutex);
Valerie Hau871d6352020-01-29 08:44:02 -080095
96 // if the producer is not connected, don't bother updating,
97 // the next producer that connects won't access this frame event
98 if (!mCurrentlyConnected) return;
99 std::shared_ptr<FenceTime> glDoneFenceTime = std::make_shared<FenceTime>(glDoneFence);
100 std::shared_ptr<FenceTime> presentFenceTime = std::make_shared<FenceTime>(presentFence);
101 std::shared_ptr<FenceTime> releaseFenceTime = std::make_shared<FenceTime>(prevReleaseFence);
102
103 mFrameEventHistory.addLatch(frameNumber, latchTime);
104 mFrameEventHistory.addRelease(frameNumber, dequeueReadyTime, std::move(releaseFenceTime));
105 mFrameEventHistory.addPreComposition(frameNumber, refreshStartTime);
106 mFrameEventHistory.addPostComposition(frameNumber, glDoneFenceTime, presentFenceTime,
107 compositorTiming);
108}
109
110void BLASTBufferItemConsumer::getConnectionEvents(uint64_t frameNumber, bool* needsDisconnect) {
111 bool disconnect = false;
Hongguang Chen621ec582021-02-16 15:42:35 -0800112 Mutex::Autolock lock(mMutex);
Valerie Hau871d6352020-01-29 08:44:02 -0800113 while (!mDisconnectEvents.empty() && mDisconnectEvents.front() <= frameNumber) {
114 disconnect = true;
115 mDisconnectEvents.pop();
116 }
117 if (needsDisconnect != nullptr) *needsDisconnect = disconnect;
118}
119
Hongguang Chen621ec582021-02-16 15:42:35 -0800120void BLASTBufferItemConsumer::setBlastBufferQueue(BLASTBufferQueue* blastbufferqueue) {
121 Mutex::Autolock lock(mMutex);
122 mBLASTBufferQueue = blastbufferqueue;
123}
124
125void BLASTBufferItemConsumer::onSidebandStreamChanged() {
126 Mutex::Autolock lock(mMutex);
127 if (mBLASTBufferQueue != nullptr) {
128 sp<NativeHandle> stream = getSidebandStream();
129 mBLASTBufferQueue->setSidebandStream(stream);
130 }
131}
132
Vishnu Nairdab94092020-09-29 16:09:04 -0700133BLASTBufferQueue::BLASTBufferQueue(const std::string& name, const sp<SurfaceControl>& surface,
Vishnu Nairdebd1cb2021-03-16 10:06:01 -0700134 int width, int height, int32_t format)
Vishnu Nairdab94092020-09-29 16:09:04 -0700135 : mName(name),
136 mSurfaceControl(surface),
Vishnu Nairea0de002020-11-17 17:42:37 -0800137 mSize(width, height),
138 mRequestedSize(mSize),
chaviw565ee542021-01-14 10:21:23 -0800139 mFormat(format),
Valerie Haud3b90d22019-11-06 09:37:31 -0800140 mNextTransaction(nullptr) {
Vishnu Nair89496122020-12-14 17:14:53 -0800141 createBufferQueue(&mProducer, &mConsumer);
Valerie Hau0889c622020-02-19 15:04:47 -0800142 // since the adapter is in the client process, set dequeue timeout
143 // explicitly so that dequeueBuffer will block
144 mProducer->setDequeueTimeout(std::numeric_limits<int64_t>::max());
Valerie Hau65b8e872020-02-13 09:45:14 -0800145
Vishnu Nairdebd1cb2021-03-16 10:06:01 -0700146 // safe default, most producers are expected to override this
147 mProducer->setMaxDequeuedBufferCount(2);
Vishnu Nair1618c672021-02-05 13:08:26 -0800148 mBufferItemConsumer = new BLASTBufferItemConsumer(mConsumer,
149 GraphicBuffer::USAGE_HW_COMPOSER |
150 GraphicBuffer::USAGE_HW_TEXTURE,
151 1, false);
Valerie Haua32c5522019-12-09 10:11:08 -0800152 static int32_t id = 0;
Vishnu Nairdab94092020-09-29 16:09:04 -0700153 auto consumerName = mName + "(BLAST Consumer)" + std::to_string(id);
Vishnu Nair2a52ca62021-06-24 13:08:53 -0700154 mQueuedBufferTrace = "QueuedBuffer - " + mName + "BLAST#" + std::to_string(id);
Valerie Haua32c5522019-12-09 10:11:08 -0800155 id++;
Vishnu Nairdab94092020-09-29 16:09:04 -0700156 mBufferItemConsumer->setName(String8(consumerName.c_str()));
Robert Carr78c25dd2019-08-15 14:10:33 -0700157 mBufferItemConsumer->setFrameAvailableListener(this);
158 mBufferItemConsumer->setBufferFreedListener(this);
Vishnu Nairea0de002020-11-17 17:42:37 -0800159 mBufferItemConsumer->setDefaultBufferSize(mSize.width, mSize.height);
chaviw497e81c2021-02-04 17:09:47 -0800160 mBufferItemConsumer->setDefaultBufferFormat(convertBufferFormat(format));
Hongguang Chen621ec582021-02-16 15:42:35 -0800161 mBufferItemConsumer->setBlastBufferQueue(this);
Robert Carr9f133d72020-04-01 15:51:46 -0700162
Ady Abraham899dcdb2021-06-15 16:56:21 -0700163 ComposerService::getComposerService()->getMaxAcquiredBufferCount(&mMaxAcquiredBuffers);
Ady Abraham0bde6b52021-05-18 13:57:02 -0700164 mBufferItemConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBuffers);
165
Valerie Hau2882e982020-01-23 13:33:10 -0800166 mTransformHint = mSurfaceControl->getTransformHint();
Robert Carr9f133d72020-04-01 15:51:46 -0700167 mBufferItemConsumer->setTransformHint(mTransformHint);
Vishnu Nairf6eddb62021-01-27 22:02:11 -0800168 SurfaceComposerClient::Transaction()
Robert Carr5b3b9142021-02-22 12:27:32 -0800169 .setFlags(surface, layer_state_t::eEnableBackpressure,
170 layer_state_t::eEnableBackpressure)
171 .apply();
Valerie Haua32c5522019-12-09 10:11:08 -0800172 mNumAcquired = 0;
173 mNumFrameAvailable = 0;
Robert Carr78c25dd2019-08-15 14:10:33 -0700174}
175
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800176BLASTBufferQueue::~BLASTBufferQueue() {
Hongguang Chen621ec582021-02-16 15:42:35 -0800177 mBufferItemConsumer->setBlastBufferQueue(nullptr);
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800178 if (mPendingTransactions.empty()) {
179 return;
180 }
181 BQA_LOGE("Applying pending transactions on dtor %d",
182 static_cast<uint32_t>(mPendingTransactions.size()));
183 SurfaceComposerClient::Transaction t;
184 for (auto& [targetFrameNumber, transaction] : mPendingTransactions) {
185 t.merge(std::move(transaction));
186 }
187 t.apply();
188}
189
chaviw565ee542021-01-14 10:21:23 -0800190void BLASTBufferQueue::update(const sp<SurfaceControl>& surface, uint32_t width, uint32_t height,
191 int32_t format) {
Robert Carr78c25dd2019-08-15 14:10:33 -0700192 std::unique_lock _lock{mMutex};
Vishnu Nair5cc9ac02021-04-19 13:23:38 -0700193 BQA_LOGV("update width=%d height=%d format=%d", width, height, format);
chaviw565ee542021-01-14 10:21:23 -0800194 if (mFormat != format) {
195 mFormat = format;
chaviw497e81c2021-02-04 17:09:47 -0800196 mBufferItemConsumer->setDefaultBufferFormat(convertBufferFormat(format));
chaviw565ee542021-01-14 10:21:23 -0800197 }
198
Vishnu Nairf6eddb62021-01-27 22:02:11 -0800199 SurfaceComposerClient::Transaction t;
200 bool applyTransaction = false;
201 if (!SurfaceControl::isSameSurface(mSurfaceControl, surface)) {
202 mSurfaceControl = surface;
203 t.setFlags(mSurfaceControl, layer_state_t::eEnableBackpressure,
204 layer_state_t::eEnableBackpressure);
205 applyTransaction = true;
206 }
207
Arthur Hungb6aa9a02021-06-09 14:23:01 +0800208 if (mSurfaceControl != nullptr) {
209 mTransformHint = mSurfaceControl->getTransformHint();
210 mBufferItemConsumer->setTransformHint(mTransformHint);
211 }
212
Vishnu Nairea0de002020-11-17 17:42:37 -0800213 ui::Size newSize(width, height);
214 if (mRequestedSize != newSize) {
215 mRequestedSize.set(newSize);
216 mBufferItemConsumer->setDefaultBufferSize(mRequestedSize.width, mRequestedSize.height);
Chavi Weingartena5aedbd2021-04-09 13:37:33 +0000217 if (mLastBufferInfo.scalingMode != NATIVE_WINDOW_SCALING_MODE_FREEZE) {
Vishnu Nair53c936c2020-12-03 11:46:37 -0800218 // If the buffer supports scaling, update the frame immediately since the client may
219 // want to scale the existing buffer to the new size.
220 mSize = mRequestedSize;
Chavi Weingartena5aedbd2021-04-09 13:37:33 +0000221 // We only need to update the scale if we've received at least one buffer. The reason
222 // for this is the scale is calculated based on the requested size and buffer size.
223 // If there's no buffer, the scale will always be 1.
224 if (mLastBufferInfo.hasBuffer) {
Vishnu Nair6bdec7d2021-05-10 15:01:13 -0700225 t.setDestinationFrame(mSurfaceControl,
226 Rect(0, 0, newSize.getWidth(), newSize.getHeight()));
Chavi Weingartena5aedbd2021-04-09 13:37:33 +0000227 }
Vishnu Nairf6eddb62021-01-27 22:02:11 -0800228 applyTransaction = true;
Vishnu Nair53c936c2020-12-03 11:46:37 -0800229 }
Robert Carrfc416512020-04-02 12:32:44 -0700230 }
Vishnu Nairf6eddb62021-01-27 22:02:11 -0800231 if (applyTransaction) {
232 t.apply();
233 }
Robert Carr78c25dd2019-08-15 14:10:33 -0700234}
235
236static void transactionCallbackThunk(void* context, nsecs_t latchTime,
237 const sp<Fence>& presentFence,
238 const std::vector<SurfaceControlStats>& stats) {
239 if (context == nullptr) {
240 return;
241 }
Robert Carrfbcbb4c2020-11-02 14:14:34 -0800242 sp<BLASTBufferQueue> bq = static_cast<BLASTBufferQueue*>(context);
Robert Carr78c25dd2019-08-15 14:10:33 -0700243 bq->transactionCallback(latchTime, presentFence, stats);
244}
245
246void BLASTBufferQueue::transactionCallback(nsecs_t /*latchTime*/, const sp<Fence>& /*presentFence*/,
247 const std::vector<SurfaceControlStats>& stats) {
chaviw71c2cc42020-10-23 16:42:02 -0700248 std::function<void(int64_t)> transactionCompleteCallback = nullptr;
249 uint64_t currFrameNumber = 0;
Vishnu Nairdab94092020-09-29 16:09:04 -0700250
chaviw71c2cc42020-10-23 16:42:02 -0700251 {
252 std::unique_lock _lock{mMutex};
253 ATRACE_CALL();
254 BQA_LOGV("transactionCallback");
chaviw71c2cc42020-10-23 16:42:02 -0700255
chaviw42026162021-04-16 15:46:12 -0500256 if (!mSurfaceControlsWithPendingCallback.empty()) {
257 sp<SurfaceControl> pendingSC = mSurfaceControlsWithPendingCallback.front();
258 mSurfaceControlsWithPendingCallback.pop();
259 bool found = false;
260 for (auto stat : stats) {
261 if (!SurfaceControl::isSameSurface(pendingSC, stat.surfaceControl)) {
262 continue;
Vishnu Nair1506b182021-02-22 14:35:15 -0800263 }
chaviw42026162021-04-16 15:46:12 -0500264
265 mTransformHint = stat.transformHint;
266 mBufferItemConsumer->setTransformHint(mTransformHint);
Vishnu Nairde66dc72021-06-17 17:54:41 -0700267 // Update frametime stamps if the frame was latched and presented, indicated by a
268 // valid latch time.
269 if (stat.latchTime > 0) {
270 mBufferItemConsumer
271 ->updateFrameTimestamps(stat.frameEventStats.frameNumber,
272 stat.frameEventStats.refreshStartTime,
273 stat.frameEventStats.gpuCompositionDoneFence,
274 stat.presentFence, stat.previousReleaseFence,
275 stat.frameEventStats.compositorTiming,
276 stat.latchTime,
277 stat.frameEventStats.dequeueReadyTime);
278 }
chaviw42026162021-04-16 15:46:12 -0500279 currFrameNumber = stat.frameEventStats.frameNumber;
280
281 if (mTransactionCompleteCallback &&
282 currFrameNumber >= mTransactionCompleteFrameNumber) {
283 if (currFrameNumber > mTransactionCompleteFrameNumber) {
284 BQA_LOGE("transactionCallback received for a newer framenumber=%" PRIu64
285 " than expected=%" PRIu64,
286 currFrameNumber, mTransactionCompleteFrameNumber);
287 }
288 transactionCompleteCallback = std::move(mTransactionCompleteCallback);
289 mTransactionCompleteFrameNumber = 0;
290 }
291
292 found = true;
293 break;
chaviw71c2cc42020-10-23 16:42:02 -0700294 }
chaviw42026162021-04-16 15:46:12 -0500295
296 if (!found) {
297 BQA_LOGE("Failed to find matching SurfaceControl in transaction callback");
298 }
299 } else {
300 BQA_LOGE("No matching SurfaceControls found: mSurfaceControlsWithPendingCallback was "
301 "empty.");
Valerie Haua32c5522019-12-09 10:11:08 -0800302 }
chaviw71c2cc42020-10-23 16:42:02 -0700303
chaviw71c2cc42020-10-23 16:42:02 -0700304 decStrong((void*)transactionCallbackThunk);
Robert Carr78c25dd2019-08-15 14:10:33 -0700305 }
Valerie Haua32c5522019-12-09 10:11:08 -0800306
chaviw71c2cc42020-10-23 16:42:02 -0700307 if (transactionCompleteCallback) {
308 transactionCompleteCallback(currFrameNumber);
Valerie Haua32c5522019-12-09 10:11:08 -0800309 }
Robert Carr78c25dd2019-08-15 14:10:33 -0700310}
311
Vishnu Nair1506b182021-02-22 14:35:15 -0800312// Unlike transactionCallbackThunk the release buffer callback does not extend the life of the
313// BBQ. This is because if the BBQ is destroyed, then the buffers will be released by the client.
314// So we pass in a weak pointer to the BBQ and if it still alive, then we release the buffer.
315// Otherwise, this is a no-op.
316static void releaseBufferCallbackThunk(wp<BLASTBufferQueue> context, uint64_t graphicBufferId,
Ady Abraham899dcdb2021-06-15 16:56:21 -0700317 const sp<Fence>& releaseFence, uint32_t transformHint,
318 uint32_t currentMaxAcquiredBufferCount) {
Vishnu Nair1506b182021-02-22 14:35:15 -0800319 sp<BLASTBufferQueue> blastBufferQueue = context.promote();
320 ALOGV("releaseBufferCallbackThunk graphicBufferId=%" PRIu64 " blastBufferQueue=%s",
321 graphicBufferId, blastBufferQueue ? "alive" : "dead");
322 if (blastBufferQueue) {
Ady Abraham899dcdb2021-06-15 16:56:21 -0700323 blastBufferQueue->releaseBufferCallback(graphicBufferId, releaseFence, transformHint,
324 currentMaxAcquiredBufferCount);
Vishnu Nair1506b182021-02-22 14:35:15 -0800325 }
326}
327
328void BLASTBufferQueue::releaseBufferCallback(uint64_t graphicBufferId,
Ady Abraham899dcdb2021-06-15 16:56:21 -0700329 const sp<Fence>& releaseFence, uint32_t transformHint,
330 uint32_t currentMaxAcquiredBufferCount) {
Vishnu Nair1506b182021-02-22 14:35:15 -0800331 ATRACE_CALL();
332 std::unique_lock _lock{mMutex};
333 BQA_LOGV("releaseBufferCallback graphicBufferId=%" PRIu64, graphicBufferId);
334
Robert Carr82d07c92021-05-10 11:36:43 -0700335 if (mSurfaceControl != nullptr) {
Robert Carr97e7cc02021-06-07 10:45:40 -0700336 mTransformHint = transformHint;
337 mSurfaceControl->setTransformHint(transformHint);
Robert Carr82d07c92021-05-10 11:36:43 -0700338 mBufferItemConsumer->setTransformHint(mTransformHint);
339 }
340
Ady Abraham899dcdb2021-06-15 16:56:21 -0700341 // Calculate how many buffers we need to hold before we release them back
342 // to the buffer queue. This will prevent higher latency when we are running
343 // on a lower refresh rate than the max supported. We only do that for EGL
344 // clients as others don't care about latency
345 const bool isEGL = [&] {
346 const auto it = mSubmitted.find(graphicBufferId);
347 return it != mSubmitted.end() && it->second.mApi == NATIVE_WINDOW_API_EGL;
348 }();
349
350 const auto numPendingBuffersToHold =
351 isEGL ? std::max(0u, mMaxAcquiredBuffers - currentMaxAcquiredBufferCount) : 0;
352 mPendingRelease.emplace_back(ReleasedBuffer{graphicBufferId, releaseFence});
353
354 // Release all buffers that are beyond the ones that we need to hold
355 while (mPendingRelease.size() > numPendingBuffersToHold) {
356 const auto releaseBuffer = mPendingRelease.front();
357 mPendingRelease.pop_front();
358 auto it = mSubmitted.find(releaseBuffer.bufferId);
359 if (it == mSubmitted.end()) {
360 BQA_LOGE("ERROR: releaseBufferCallback without corresponding submitted buffer %" PRIu64,
361 graphicBufferId);
362 return;
363 }
Vishnu Nair2a52ca62021-06-24 13:08:53 -0700364 mNumAcquired--;
Ady Abraham899dcdb2021-06-15 16:56:21 -0700365 mBufferItemConsumer->releaseBuffer(it->second, releaseBuffer.releaseFence);
366 mSubmitted.erase(it);
Vishnu Nair2a52ca62021-06-24 13:08:53 -0700367 processNextBufferLocked(false /* useNextTransaction */);
Vishnu Nair1506b182021-02-22 14:35:15 -0800368 }
369
Ady Abraham899dcdb2021-06-15 16:56:21 -0700370 ATRACE_INT("PendingRelease", mPendingRelease.size());
Vishnu Nair2a52ca62021-06-24 13:08:53 -0700371 ATRACE_INT(mQueuedBufferTrace.c_str(),
372 mNumFrameAvailable + mNumAcquired - mPendingRelease.size());
Vishnu Nair1506b182021-02-22 14:35:15 -0800373 mCallbackCV.notify_all();
374}
375
Robert Carr255acdc2020-04-17 14:08:55 -0700376void BLASTBufferQueue::processNextBufferLocked(bool useNextTransaction) {
Valerie Haua32c5522019-12-09 10:11:08 -0800377 ATRACE_CALL();
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800378 // If the next transaction is set, we want to guarantee the our acquire will not fail, so don't
379 // include the extra buffer when checking if we can acquire the next buffer.
380 const bool includeExtraAcquire = !useNextTransaction;
381 if (mNumFrameAvailable == 0 || maxBuffersAcquired(includeExtraAcquire)) {
Vishnu Nairea0de002020-11-17 17:42:37 -0800382 mCallbackCV.notify_all();
Valerie Haud3b90d22019-11-06 09:37:31 -0800383 return;
384 }
385
Valerie Haua32c5522019-12-09 10:11:08 -0800386 if (mSurfaceControl == nullptr) {
Vishnu Nair670b3f72020-09-29 17:52:18 -0700387 BQA_LOGE("ERROR : surface control is null");
Valerie Haud3b90d22019-11-06 09:37:31 -0800388 return;
389 }
390
Robert Carr78c25dd2019-08-15 14:10:33 -0700391 SurfaceComposerClient::Transaction localTransaction;
392 bool applyTransaction = true;
393 SurfaceComposerClient::Transaction* t = &localTransaction;
Robert Carr255acdc2020-04-17 14:08:55 -0700394 if (mNextTransaction != nullptr && useNextTransaction) {
Robert Carr78c25dd2019-08-15 14:10:33 -0700395 t = mNextTransaction;
396 mNextTransaction = nullptr;
397 applyTransaction = false;
398 }
399
Valerie Haua32c5522019-12-09 10:11:08 -0800400 BufferItem bufferItem;
Valerie Haud3b90d22019-11-06 09:37:31 -0800401
Vishnu Nairc6f89ee2020-12-11 14:27:32 -0800402 status_t status =
403 mBufferItemConsumer->acquireBuffer(&bufferItem, 0 /* expectedPresent */, false);
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800404 if (status == BufferQueue::NO_BUFFER_AVAILABLE) {
405 BQA_LOGV("Failed to acquire a buffer, err=NO_BUFFER_AVAILABLE");
406 return;
407 } else if (status != OK) {
Vishnu Nairbf255772020-10-16 10:54:41 -0700408 BQA_LOGE("Failed to acquire a buffer, err=%s", statusToString(status).c_str());
Robert Carr78c25dd2019-08-15 14:10:33 -0700409 return;
410 }
Valerie Haua32c5522019-12-09 10:11:08 -0800411 auto buffer = bufferItem.mGraphicBuffer;
412 mNumFrameAvailable--;
413
414 if (buffer == nullptr) {
415 mBufferItemConsumer->releaseBuffer(bufferItem, Fence::NO_FENCE);
Vishnu Nairbf255772020-10-16 10:54:41 -0700416 BQA_LOGE("Buffer was empty");
Valerie Haua32c5522019-12-09 10:11:08 -0800417 return;
418 }
419
Vishnu Nair670b3f72020-09-29 17:52:18 -0700420 if (rejectBuffer(bufferItem)) {
Vishnu Nairea0de002020-11-17 17:42:37 -0800421 BQA_LOGE("rejecting buffer:active_size=%dx%d, requested_size=%dx%d"
422 "buffer{size=%dx%d transform=%d}",
423 mSize.width, mSize.height, mRequestedSize.width, mRequestedSize.height,
424 buffer->getWidth(), buffer->getHeight(), bufferItem.mTransform);
425 mBufferItemConsumer->releaseBuffer(bufferItem, Fence::NO_FENCE);
426 processNextBufferLocked(useNextTransaction);
427 return;
Vishnu Nair670b3f72020-09-29 17:52:18 -0700428 }
429
Valerie Haua32c5522019-12-09 10:11:08 -0800430 mNumAcquired++;
Vishnu Nair1506b182021-02-22 14:35:15 -0800431 mSubmitted[buffer->getId()] = bufferItem;
Robert Carr78c25dd2019-08-15 14:10:33 -0700432
Valerie Hau871d6352020-01-29 08:44:02 -0800433 bool needsDisconnect = false;
434 mBufferItemConsumer->getConnectionEvents(bufferItem.mFrameNumber, &needsDisconnect);
435
436 // if producer disconnected before, notify SurfaceFlinger
437 if (needsDisconnect) {
438 t->notifyProducerDisconnect(mSurfaceControl);
439 }
440
Robert Carr78c25dd2019-08-15 14:10:33 -0700441 // Ensure BLASTBufferQueue stays alive until we receive the transaction complete callback.
442 incStrong((void*)transactionCallbackThunk);
443
Vishnu Nair5cc9ac02021-04-19 13:23:38 -0700444 Rect crop = computeCrop(bufferItem);
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800445 mLastAcquiredFrameNumber = bufferItem.mFrameNumber;
Chavi Weingartena5aedbd2021-04-09 13:37:33 +0000446 mLastBufferInfo.update(true /* hasBuffer */, bufferItem.mGraphicBuffer->getWidth(),
447 bufferItem.mGraphicBuffer->getHeight(), bufferItem.mTransform,
Vishnu Nair5cc9ac02021-04-19 13:23:38 -0700448 bufferItem.mScalingMode, crop);
Vishnu Nair53c936c2020-12-03 11:46:37 -0800449
Vishnu Nair1506b182021-02-22 14:35:15 -0800450 auto releaseBufferCallback =
451 std::bind(releaseBufferCallbackThunk, wp<BLASTBufferQueue>(this) /* callbackContext */,
Ady Abraham899dcdb2021-06-15 16:56:21 -0700452 std::placeholders::_1, std::placeholders::_2, std::placeholders::_3,
453 std::placeholders::_4);
Vishnu Nair1506b182021-02-22 14:35:15 -0800454 t->setBuffer(mSurfaceControl, buffer, releaseBufferCallback);
John Reck137069e2020-12-10 22:07:37 -0500455 t->setDataspace(mSurfaceControl, static_cast<ui::Dataspace>(bufferItem.mDataSpace));
456 t->setHdrMetadata(mSurfaceControl, bufferItem.mHdrMetadata);
457 t->setSurfaceDamageRegion(mSurfaceControl, bufferItem.mSurfaceDamage);
Robert Carr78c25dd2019-08-15 14:10:33 -0700458 t->setAcquireFence(mSurfaceControl,
Valerie Haua32c5522019-12-09 10:11:08 -0800459 bufferItem.mFence ? new Fence(bufferItem.mFence->dup()) : Fence::NO_FENCE);
Robert Carr78c25dd2019-08-15 14:10:33 -0700460 t->addTransactionCompletedCallback(transactionCallbackThunk, static_cast<void*>(this));
chaviw42026162021-04-16 15:46:12 -0500461 mSurfaceControlsWithPendingCallback.push(mSurfaceControl);
Robert Carr78c25dd2019-08-15 14:10:33 -0700462
Vishnu Nair6bdec7d2021-05-10 15:01:13 -0700463 t->setDestinationFrame(mSurfaceControl, Rect(0, 0, mSize.getWidth(), mSize.getHeight()));
464 t->setBufferCrop(mSurfaceControl, crop);
Valerie Haua32c5522019-12-09 10:11:08 -0800465 t->setTransform(mSurfaceControl, bufferItem.mTransform);
Valerie Hau2882e982020-01-23 13:33:10 -0800466 t->setTransformToDisplayInverse(mSurfaceControl, bufferItem.mTransformToDisplayInverse);
Ady Abrahamf0c56492020-12-17 18:04:15 -0800467 if (!bufferItem.mIsAutoTimestamp) {
468 t->setDesiredPresentTime(bufferItem.mTimestamp);
469 }
Vishnu Nair6b7c5c92020-09-29 17:27:05 -0700470 t->setFrameNumber(mSurfaceControl, bufferItem.mFrameNumber);
Robert Carr78c25dd2019-08-15 14:10:33 -0700471
Siarhei Vishniakoufc434ac2021-01-13 10:28:00 -1000472 if (!mNextFrameTimelineInfoQueue.empty()) {
Ady Abraham8db10102021-03-15 17:19:23 -0700473 t->setFrameTimelineInfo(mNextFrameTimelineInfoQueue.front());
Siarhei Vishniakoufc434ac2021-01-13 10:28:00 -1000474 mNextFrameTimelineInfoQueue.pop();
Jorim Jaggia3fe67b2020-12-01 00:24:33 +0100475 }
476
Vishnu Naircf26a0a2020-11-13 12:56:20 -0800477 if (mAutoRefresh != bufferItem.mAutoRefresh) {
478 t->setAutoRefresh(mSurfaceControl, bufferItem.mAutoRefresh);
479 mAutoRefresh = bufferItem.mAutoRefresh;
480 }
Vishnu Nairadf632b2021-01-07 14:05:08 -0800481 {
482 std::unique_lock _lock{mTimestampMutex};
483 auto dequeueTime = mDequeueTimestamps.find(buffer->getId());
484 if (dequeueTime != mDequeueTimestamps.end()) {
485 Parcel p;
486 p.writeInt64(dequeueTime->second);
487 t->setMetadata(mSurfaceControl, METADATA_DEQUEUE_TIME, p);
488 mDequeueTimestamps.erase(dequeueTime);
489 }
490 }
Vishnu Naircf26a0a2020-11-13 12:56:20 -0800491
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800492 auto mergeTransaction =
493 [&t, currentFrameNumber = bufferItem.mFrameNumber](
494 std::tuple<uint64_t, SurfaceComposerClient::Transaction> pendingTransaction) {
495 auto& [targetFrameNumber, transaction] = pendingTransaction;
496 if (currentFrameNumber < targetFrameNumber) {
497 return false;
498 }
499 t->merge(std::move(transaction));
500 return true;
501 };
502
503 mPendingTransactions.erase(std::remove_if(mPendingTransactions.begin(),
504 mPendingTransactions.end(), mergeTransaction),
505 mPendingTransactions.end());
506
Robert Carr78c25dd2019-08-15 14:10:33 -0700507 if (applyTransaction) {
Vishnu Nair277142c2021-01-05 18:35:29 -0800508 t->setApplyToken(mApplyToken).apply();
Robert Carr78c25dd2019-08-15 14:10:33 -0700509 }
Vishnu Nairdab94092020-09-29 16:09:04 -0700510
511 BQA_LOGV("processNextBufferLocked size=%dx%d mFrameNumber=%" PRIu64
Vishnu Nair1506b182021-02-22 14:35:15 -0800512 " applyTransaction=%s mTimestamp=%" PRId64 "%s mPendingTransactions.size=%d"
513 " graphicBufferId=%" PRIu64,
Vishnu Nairea0de002020-11-17 17:42:37 -0800514 mSize.width, mSize.height, bufferItem.mFrameNumber, toString(applyTransaction),
Vishnu Nair1506b182021-02-22 14:35:15 -0800515 bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp ? "(auto)" : "",
516 static_cast<uint32_t>(mPendingTransactions.size()),
517 bufferItem.mGraphicBuffer->getId());
Robert Carr78c25dd2019-08-15 14:10:33 -0700518}
519
Valerie Hau45e4b3b2019-12-03 10:49:17 -0800520Rect BLASTBufferQueue::computeCrop(const BufferItem& item) {
521 if (item.mScalingMode == NATIVE_WINDOW_SCALING_MODE_SCALE_CROP) {
Vishnu Nairea0de002020-11-17 17:42:37 -0800522 return GLConsumer::scaleDownCrop(item.mCrop, mSize.width, mSize.height);
Valerie Hau45e4b3b2019-12-03 10:49:17 -0800523 }
524 return item.mCrop;
525}
526
Vishnu Nairaef1de92020-10-22 12:15:53 -0700527void BLASTBufferQueue::onFrameAvailable(const BufferItem& item) {
Valerie Haua32c5522019-12-09 10:11:08 -0800528 ATRACE_CALL();
Valerie Hau0188adf2020-02-13 08:29:20 -0800529 std::unique_lock _lock{mMutex};
Valerie Haud3b90d22019-11-06 09:37:31 -0800530
Vishnu Nairdab94092020-09-29 16:09:04 -0700531 const bool nextTransactionSet = mNextTransaction != nullptr;
Vishnu Nair1506b182021-02-22 14:35:15 -0800532 if (nextTransactionSet) {
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800533 while (mNumFrameAvailable > 0 || maxBuffersAcquired(false /* includeExtraAcquire */)) {
Vishnu Nair7eb670a2020-10-15 12:16:10 -0700534 BQA_LOGV("waiting in onFrameAvailable...");
Valerie Hau0188adf2020-02-13 08:29:20 -0800535 mCallbackCV.wait(_lock);
536 }
537 }
Valerie Haud3b90d22019-11-06 09:37:31 -0800538 // add to shadow queue
Valerie Haua32c5522019-12-09 10:11:08 -0800539 mNumFrameAvailable++;
Vishnu Nair2a52ca62021-06-24 13:08:53 -0700540 ATRACE_INT(mQueuedBufferTrace.c_str(),
541 mNumFrameAvailable + mNumAcquired - mPendingRelease.size());
Vishnu Nair1506b182021-02-22 14:35:15 -0800542
543 BQA_LOGV("onFrameAvailable framenumber=%" PRIu64 " nextTransactionSet=%s", item.mFrameNumber,
544 toString(nextTransactionSet));
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800545 processNextBufferLocked(nextTransactionSet /* useNextTransaction */);
Valerie Haud3b90d22019-11-06 09:37:31 -0800546}
547
Vishnu Nairaef1de92020-10-22 12:15:53 -0700548void BLASTBufferQueue::onFrameReplaced(const BufferItem& item) {
549 BQA_LOGV("onFrameReplaced framenumber=%" PRIu64, item.mFrameNumber);
550 // Do nothing since we are not storing unacquired buffer items locally.
551}
552
Vishnu Nairadf632b2021-01-07 14:05:08 -0800553void BLASTBufferQueue::onFrameDequeued(const uint64_t bufferId) {
554 std::unique_lock _lock{mTimestampMutex};
555 mDequeueTimestamps[bufferId] = systemTime();
556};
557
558void BLASTBufferQueue::onFrameCancelled(const uint64_t bufferId) {
559 std::unique_lock _lock{mTimestampMutex};
560 mDequeueTimestamps.erase(bufferId);
561};
562
Robert Carr78c25dd2019-08-15 14:10:33 -0700563void BLASTBufferQueue::setNextTransaction(SurfaceComposerClient::Transaction* t) {
Valerie Haud3b90d22019-11-06 09:37:31 -0800564 std::lock_guard _lock{mMutex};
Robert Carr78c25dd2019-08-15 14:10:33 -0700565 mNextTransaction = t;
566}
567
Vishnu Nairea0de002020-11-17 17:42:37 -0800568bool BLASTBufferQueue::rejectBuffer(const BufferItem& item) {
Vishnu Nair670b3f72020-09-29 17:52:18 -0700569 if (item.mScalingMode != NATIVE_WINDOW_SCALING_MODE_FREEZE) {
Hongguang Chen33100282021-04-15 18:36:05 -0700570 mSize = mRequestedSize;
Vishnu Nair670b3f72020-09-29 17:52:18 -0700571 // Only reject buffers if scaling mode is freeze.
572 return false;
573 }
574
Vishnu Naire1a42322020-10-02 17:42:04 -0700575 uint32_t bufWidth = item.mGraphicBuffer->getWidth();
576 uint32_t bufHeight = item.mGraphicBuffer->getHeight();
577
578 // Take the buffer's orientation into account
579 if (item.mTransform & ui::Transform::ROT_90) {
580 std::swap(bufWidth, bufHeight);
581 }
Vishnu Nairea0de002020-11-17 17:42:37 -0800582 ui::Size bufferSize(bufWidth, bufHeight);
583 if (mRequestedSize != mSize && mRequestedSize == bufferSize) {
584 mSize = mRequestedSize;
585 return false;
586 }
Vishnu Naire1a42322020-10-02 17:42:04 -0700587
Vishnu Nair670b3f72020-09-29 17:52:18 -0700588 // reject buffers if the buffer size doesn't match.
Vishnu Nairea0de002020-11-17 17:42:37 -0800589 return mSize != bufferSize;
Vishnu Nair670b3f72020-09-29 17:52:18 -0700590}
Vishnu Nairbf255772020-10-16 10:54:41 -0700591
chaviw71c2cc42020-10-23 16:42:02 -0700592void BLASTBufferQueue::setTransactionCompleteCallback(
593 uint64_t frameNumber, std::function<void(int64_t)>&& transactionCompleteCallback) {
594 std::lock_guard _lock{mMutex};
595 if (transactionCompleteCallback == nullptr) {
596 mTransactionCompleteCallback = nullptr;
597 } else {
598 mTransactionCompleteCallback = std::move(transactionCompleteCallback);
599 mTransactionCompleteFrameNumber = frameNumber;
600 }
601}
602
Vishnu Nairbf255772020-10-16 10:54:41 -0700603// Check if we have acquired the maximum number of buffers.
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800604// Consumer can acquire an additional buffer if that buffer is not droppable. Set
605// includeExtraAcquire is true to include this buffer to the count. Since this depends on the state
606// of the buffer, the next acquire may return with NO_BUFFER_AVAILABLE.
607bool BLASTBufferQueue::maxBuffersAcquired(bool includeExtraAcquire) const {
Ady Abraham0bde6b52021-05-18 13:57:02 -0700608 int maxAcquiredBuffers = mMaxAcquiredBuffers + (includeExtraAcquire ? 2 : 1);
Vishnu Nair1506b182021-02-22 14:35:15 -0800609 return mNumAcquired == maxAcquiredBuffers;
Vishnu Nairbf255772020-10-16 10:54:41 -0700610}
611
Robert Carr05086b22020-10-13 18:22:51 -0700612class BBQSurface : public Surface {
Robert Carr9c006e02020-10-14 13:41:57 -0700613private:
614 sp<BLASTBufferQueue> mBbq;
Robert Carr05086b22020-10-13 18:22:51 -0700615public:
Vishnu Nair992496b2020-10-22 17:27:21 -0700616 BBQSurface(const sp<IGraphicBufferProducer>& igbp, bool controlledByApp,
617 const sp<IBinder>& scHandle, const sp<BLASTBufferQueue>& bbq)
618 : Surface(igbp, controlledByApp, scHandle), mBbq(bbq) {}
Robert Carr9c006e02020-10-14 13:41:57 -0700619
Robert Carr05086b22020-10-13 18:22:51 -0700620 void allocateBuffers() override {
621 uint32_t reqWidth = mReqWidth ? mReqWidth : mUserWidth;
622 uint32_t reqHeight = mReqHeight ? mReqHeight : mUserHeight;
623 auto gbp = getIGraphicBufferProducer();
624 std::thread ([reqWidth, reqHeight, gbp=getIGraphicBufferProducer(),
625 reqFormat=mReqFormat, reqUsage=mReqUsage] () {
626 gbp->allocateBuffers(reqWidth, reqHeight,
627 reqFormat, reqUsage);
628
629 }).detach();
630 }
Robert Carr9c006e02020-10-14 13:41:57 -0700631
Marin Shalamanovc5986772021-03-16 16:09:49 +0100632 status_t setFrameRate(float frameRate, int8_t compatibility,
633 int8_t changeFrameRateStrategy) override {
634 if (!ValidateFrameRate(frameRate, compatibility, changeFrameRateStrategy,
635 "BBQSurface::setFrameRate")) {
Robert Carr9c006e02020-10-14 13:41:57 -0700636 return BAD_VALUE;
637 }
Marin Shalamanovc5986772021-03-16 16:09:49 +0100638 return mBbq->setFrameRate(frameRate, compatibility, changeFrameRateStrategy);
Robert Carr9c006e02020-10-14 13:41:57 -0700639 }
Robert Carr9b611b72020-10-19 12:00:23 -0700640
Siarhei Vishniakoufc434ac2021-01-13 10:28:00 -1000641 status_t setFrameTimelineInfo(const FrameTimelineInfo& frameTimelineInfo) override {
642 return mBbq->setFrameTimelineInfo(frameTimelineInfo);
Robert Carr9b611b72020-10-19 12:00:23 -0700643 }
Robert Carr82d07c92021-05-10 11:36:43 -0700644 protected:
645 uint32_t getTransformHint() const override {
646 if (mStickyTransform == 0 && !transformToDisplayInverse()) {
647 return mBbq->getLastTransformHint();
648 } else {
649 return 0;
650 }
651 }
Robert Carr05086b22020-10-13 18:22:51 -0700652};
653
Robert Carr9c006e02020-10-14 13:41:57 -0700654// TODO: Can we coalesce this with frame updates? Need to confirm
655// no timing issues.
Marin Shalamanov46084422020-10-13 12:33:42 +0200656status_t BLASTBufferQueue::setFrameRate(float frameRate, int8_t compatibility,
657 bool shouldBeSeamless) {
Robert Carr9c006e02020-10-14 13:41:57 -0700658 std::unique_lock _lock{mMutex};
659 SurfaceComposerClient::Transaction t;
660
Marin Shalamanov46084422020-10-13 12:33:42 +0200661 return t.setFrameRate(mSurfaceControl, frameRate, compatibility, shouldBeSeamless).apply();
Robert Carr9c006e02020-10-14 13:41:57 -0700662}
663
Siarhei Vishniakoufc434ac2021-01-13 10:28:00 -1000664status_t BLASTBufferQueue::setFrameTimelineInfo(const FrameTimelineInfo& frameTimelineInfo) {
Robert Carr9b611b72020-10-19 12:00:23 -0700665 std::unique_lock _lock{mMutex};
Siarhei Vishniakoufc434ac2021-01-13 10:28:00 -1000666 mNextFrameTimelineInfoQueue.push(frameTimelineInfo);
Jorim Jaggia3fe67b2020-12-01 00:24:33 +0100667 return OK;
Robert Carr9b611b72020-10-19 12:00:23 -0700668}
669
Hongguang Chen621ec582021-02-16 15:42:35 -0800670void BLASTBufferQueue::setSidebandStream(const sp<NativeHandle>& stream) {
671 std::unique_lock _lock{mMutex};
672 SurfaceComposerClient::Transaction t;
673
674 t.setSidebandStream(mSurfaceControl, stream).apply();
675}
676
Vishnu Nair992496b2020-10-22 17:27:21 -0700677sp<Surface> BLASTBufferQueue::getSurface(bool includeSurfaceControlHandle) {
678 std::unique_lock _lock{mMutex};
679 sp<IBinder> scHandle = nullptr;
680 if (includeSurfaceControlHandle && mSurfaceControl) {
681 scHandle = mSurfaceControl->getHandle();
682 }
683 return new BBQSurface(mProducer, true, scHandle, this);
Robert Carr05086b22020-10-13 18:22:51 -0700684}
685
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800686void BLASTBufferQueue::mergeWithNextTransaction(SurfaceComposerClient::Transaction* t,
687 uint64_t frameNumber) {
688 std::lock_guard _lock{mMutex};
689 if (mLastAcquiredFrameNumber >= frameNumber) {
690 // Apply the transaction since we have already acquired the desired frame.
691 t->apply();
692 } else {
chaviwaad6cf52021-03-23 17:27:20 -0500693 mPendingTransactions.emplace_back(frameNumber, *t);
694 // Clear the transaction so it can't be applied elsewhere.
695 t->clear();
Vishnu Nairc4a40c12020-12-23 09:14:32 -0800696 }
697}
698
Vishnu Nair89496122020-12-14 17:14:53 -0800699// Maintains a single worker thread per process that services a list of runnables.
700class AsyncWorker : public Singleton<AsyncWorker> {
701private:
702 std::thread mThread;
703 bool mDone = false;
704 std::deque<std::function<void()>> mRunnables;
705 std::mutex mMutex;
706 std::condition_variable mCv;
707 void run() {
708 std::unique_lock<std::mutex> lock(mMutex);
709 while (!mDone) {
Vishnu Nair89496122020-12-14 17:14:53 -0800710 while (!mRunnables.empty()) {
711 std::function<void()> runnable = mRunnables.front();
712 mRunnables.pop_front();
713 runnable();
714 }
Wonsik Kim567533e2021-05-04 19:31:29 -0700715 mCv.wait(lock);
Vishnu Nair89496122020-12-14 17:14:53 -0800716 }
717 }
718
719public:
720 AsyncWorker() : Singleton<AsyncWorker>() { mThread = std::thread(&AsyncWorker::run, this); }
721
722 ~AsyncWorker() {
723 mDone = true;
724 mCv.notify_all();
725 if (mThread.joinable()) {
726 mThread.join();
727 }
728 }
729
730 void post(std::function<void()> runnable) {
731 std::unique_lock<std::mutex> lock(mMutex);
732 mRunnables.emplace_back(std::move(runnable));
733 mCv.notify_one();
734 }
735};
736ANDROID_SINGLETON_STATIC_INSTANCE(AsyncWorker);
737
738// Asynchronously calls ProducerListener functions so we can emulate one way binder calls.
739class AsyncProducerListener : public BnProducerListener {
740private:
741 const sp<IProducerListener> mListener;
742
743public:
744 AsyncProducerListener(const sp<IProducerListener>& listener) : mListener(listener) {}
745
746 void onBufferReleased() override {
747 AsyncWorker::getInstance().post([listener = mListener]() { listener->onBufferReleased(); });
748 }
749
750 void onBuffersDiscarded(const std::vector<int32_t>& slots) override {
751 AsyncWorker::getInstance().post(
752 [listener = mListener, slots = slots]() { listener->onBuffersDiscarded(slots); });
753 }
754};
755
756// Extends the BufferQueueProducer to create a wrapper around the listener so the listener calls
757// can be non-blocking when the producer is in the client process.
758class BBQBufferQueueProducer : public BufferQueueProducer {
759public:
760 BBQBufferQueueProducer(const sp<BufferQueueCore>& core)
761 : BufferQueueProducer(core, false /* consumerIsSurfaceFlinger*/) {}
762
763 status_t connect(const sp<IProducerListener>& listener, int api, bool producerControlledByApp,
764 QueueBufferOutput* output) override {
765 if (!listener) {
766 return BufferQueueProducer::connect(listener, api, producerControlledByApp, output);
767 }
768
769 return BufferQueueProducer::connect(new AsyncProducerListener(listener), api,
770 producerControlledByApp, output);
771 }
Vishnu Nair17dde612020-12-28 11:39:59 -0800772
773 int query(int what, int* value) override {
774 if (what == NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER) {
775 *value = 1;
776 return NO_ERROR;
777 }
778 return BufferQueueProducer::query(what, value);
779 }
Vishnu Nair89496122020-12-14 17:14:53 -0800780};
781
782// Similar to BufferQueue::createBufferQueue but creates an adapter specific bufferqueue producer.
783// This BQP allows invoking client specified ProducerListeners and invoke them asynchronously,
784// emulating one way binder call behavior. Without this, if the listener calls back into the queue,
785// we can deadlock.
786void BLASTBufferQueue::createBufferQueue(sp<IGraphicBufferProducer>* outProducer,
787 sp<IGraphicBufferConsumer>* outConsumer) {
788 LOG_ALWAYS_FATAL_IF(outProducer == nullptr, "BLASTBufferQueue: outProducer must not be NULL");
789 LOG_ALWAYS_FATAL_IF(outConsumer == nullptr, "BLASTBufferQueue: outConsumer must not be NULL");
790
791 sp<BufferQueueCore> core(new BufferQueueCore());
792 LOG_ALWAYS_FATAL_IF(core == nullptr, "BLASTBufferQueue: failed to create BufferQueueCore");
793
794 sp<IGraphicBufferProducer> producer(new BBQBufferQueueProducer(core));
795 LOG_ALWAYS_FATAL_IF(producer == nullptr,
796 "BLASTBufferQueue: failed to create BBQBufferQueueProducer");
797
Vishnu Nair8b30dd12021-01-25 14:16:54 -0800798 sp<BufferQueueConsumer> consumer(new BufferQueueConsumer(core));
799 consumer->setAllowExtraAcquire(true);
Vishnu Nair89496122020-12-14 17:14:53 -0800800 LOG_ALWAYS_FATAL_IF(consumer == nullptr,
801 "BLASTBufferQueue: failed to create BufferQueueConsumer");
802
803 *outProducer = producer;
804 *outConsumer = consumer;
805}
806
chaviw497e81c2021-02-04 17:09:47 -0800807PixelFormat BLASTBufferQueue::convertBufferFormat(PixelFormat& format) {
808 PixelFormat convertedFormat = format;
809 switch (format) {
810 case PIXEL_FORMAT_TRANSPARENT:
811 case PIXEL_FORMAT_TRANSLUCENT:
812 convertedFormat = PIXEL_FORMAT_RGBA_8888;
813 break;
814 case PIXEL_FORMAT_OPAQUE:
815 convertedFormat = PIXEL_FORMAT_RGBX_8888;
816 break;
817 }
818 return convertedFormat;
819}
820
Robert Carr82d07c92021-05-10 11:36:43 -0700821uint32_t BLASTBufferQueue::getLastTransformHint() const {
822 if (mSurfaceControl != nullptr) {
823 return mSurfaceControl->getTransformHint();
824 } else {
825 return 0;
826 }
827}
828
Robert Carr78c25dd2019-08-15 14:10:33 -0700829} // namespace android