blob: 94c4a81df2e590e1ce701c765dab4dda4c0f0e2b [file] [log] [blame]
David Sodman0c69cad2017-08-21 12:12:51 -07001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#undef LOG_TAG
19#define LOG_TAG "BufferLayer"
20#define ATRACE_TAG ATRACE_TAG_GRAPHICS
21
Alec Mourie60041e2019-06-14 18:59:51 -070022#include "BufferLayer.h"
Lloyd Piquefeb73d72018-12-04 17:23:44 -080023
24#include <compositionengine/CompositionEngine.h>
25#include <compositionengine/Layer.h>
26#include <compositionengine/LayerCreationArgs.h>
Lloyd Piquef5275482019-01-29 18:42:42 -080027#include <compositionengine/LayerFECompositionState.h>
Lloyd Pique37c2c9b2018-12-04 17:25:10 -080028#include <compositionengine/OutputLayer.h>
29#include <compositionengine/impl/OutputLayerCompositionState.h>
Lloyd Piquefeb73d72018-12-04 17:23:44 -080030#include <cutils/compiler.h>
31#include <cutils/native_handle.h>
32#include <cutils/properties.h>
33#include <gui/BufferItem.h>
34#include <gui/BufferQueue.h>
chaviwf83ce182019-09-12 14:43:08 -070035#include <gui/GLConsumer.h>
Lloyd Piquefeb73d72018-12-04 17:23:44 -080036#include <gui/LayerDebugInfo.h>
37#include <gui/Surface.h>
38#include <renderengine/RenderEngine.h>
39#include <ui/DebugUtils.h>
40#include <utils/Errors.h>
41#include <utils/Log.h>
42#include <utils/NativeHandle.h>
43#include <utils/StopWatch.h>
44#include <utils/Trace.h>
45
Alec Mourie60041e2019-06-14 18:59:51 -070046#include <cmath>
47#include <cstdlib>
48#include <mutex>
49#include <sstream>
50
David Sodman0c69cad2017-08-21 12:12:51 -070051#include "Colorizer.h"
52#include "DisplayDevice.h"
Mikael Pessa90092f42019-08-26 17:22:04 -070053#include "FrameTracer/FrameTracer.h"
David Sodman0c69cad2017-08-21 12:12:51 -070054#include "LayerRejecter.h"
Yiwei Zhang7e666a52018-11-15 13:33:42 -080055#include "TimeStats/TimeStats.h"
56
David Sodman0c69cad2017-08-21 12:12:51 -070057namespace android {
58
Lloyd Pique42ab75e2018-09-12 20:46:03 -070059BufferLayer::BufferLayer(const LayerCreationArgs& args)
Lloyd Piquefeb73d72018-12-04 17:23:44 -080060 : Layer(args),
chaviwb4c6e582019-08-16 14:35:07 -070061 mTextureName(args.textureName),
Lloyd Piquefeb73d72018-12-04 17:23:44 -080062 mCompositionLayer{mFlinger->getCompositionEngine().createLayer(
63 compositionengine::LayerCreationArgs{this})} {
Dominik Laskowski87a07e42019-10-10 20:38:02 -070064 ALOGV("Creating Layer %s", getDebugName());
David Sodman0c69cad2017-08-21 12:12:51 -070065
Lloyd Pique42ab75e2018-09-12 20:46:03 -070066 mPremultipliedAlpha = !(args.flags & ISurfaceComposerClient::eNonPremultiplied);
David Sodman0c69cad2017-08-21 12:12:51 -070067
Lloyd Pique42ab75e2018-09-12 20:46:03 -070068 mPotentialCursor = args.flags & ISurfaceComposerClient::eCursorWindow;
69 mProtectedByApp = args.flags & ISurfaceComposerClient::eProtectedByApp;
David Sodman0c69cad2017-08-21 12:12:51 -070070}
71
72BufferLayer::~BufferLayer() {
chaviwb4c6e582019-08-16 14:35:07 -070073 if (!isClone()) {
74 // The original layer and the clone layer share the same texture. Therefore, only one of
75 // the layers, in this case the original layer, needs to handle the deletion. The original
76 // layer and the clone should be removed at the same time so there shouldn't be any issue
77 // with the clone layer trying to use the deleted texture.
78 mFlinger->deleteTextureAsync(mTextureName);
79 }
Yiwei Zhang1a88c402019-11-18 10:43:58 -080080 const int32_t layerId = getSequence();
81 mFlinger->mTimeStats->onDestroy(layerId);
82 mFlinger->mFrameTracer->onDestroy(layerId);
David Sodman0c69cad2017-08-21 12:12:51 -070083}
84
David Sodmaneb085e02017-10-05 18:49:04 -070085void BufferLayer::useSurfaceDamage() {
86 if (mFlinger->mForceFullDamage) {
87 surfaceDamageRegion = Region::INVALID_REGION;
88 } else {
chaviw4244e032019-09-04 11:27:49 -070089 surfaceDamageRegion = mBufferInfo.mSurfaceDamage;
David Sodmaneb085e02017-10-05 18:49:04 -070090 }
91}
92
93void BufferLayer::useEmptyDamage() {
94 surfaceDamageRegion.clear();
95}
96
Marissa Wallfd668622018-05-10 10:21:13 -070097bool BufferLayer::isOpaque(const Layer::State& s) const {
98 // if we don't have a buffer or sidebandStream yet, we're translucent regardless of the
99 // layer's opaque flag.
chaviwd62d3062019-09-04 14:48:02 -0700100 if ((mSidebandStream == nullptr) && (mBufferInfo.mBuffer == nullptr)) {
Marissa Wallfd668622018-05-10 10:21:13 -0700101 return false;
102 }
103
104 // if the layer has the opaque flag, then we're always opaque,
105 // otherwise we use the current buffer's format.
106 return ((s.flags & layer_state_t::eLayerOpaque) != 0) || getOpacityForFormat(getPixelFormat());
David Sodman0c69cad2017-08-21 12:12:51 -0700107}
108
109bool BufferLayer::isVisible() const {
Dominik Laskowskif7a09ed2019-10-07 13:54:18 -0700110 return !isHiddenByPolicy() && getAlpha() > 0.0f &&
chaviwd62d3062019-09-04 14:48:02 -0700111 (mBufferInfo.mBuffer != nullptr || mSidebandStream != nullptr);
David Sodman0c69cad2017-08-21 12:12:51 -0700112}
113
114bool BufferLayer::isFixedSize() const {
115 return getEffectiveScalingMode() != NATIVE_WINDOW_SCALING_MODE_FREEZE;
116}
117
Lloyd Piquea83776c2019-01-29 18:42:32 -0800118bool BufferLayer::usesSourceCrop() const {
119 return true;
120}
121
David Sodman0c69cad2017-08-21 12:12:51 -0700122static constexpr mat4 inverseOrientation(uint32_t transform) {
David Sodman41fdfc92017-11-06 16:09:56 -0800123 const mat4 flipH(-1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1);
124 const mat4 flipV(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1);
125 const mat4 rot90(0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1);
David Sodman0c69cad2017-08-21 12:12:51 -0700126 mat4 tr;
127
128 if (transform & NATIVE_WINDOW_TRANSFORM_ROT_90) {
129 tr = tr * rot90;
130 }
131 if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_H) {
132 tr = tr * flipH;
133 }
134 if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_V) {
135 tr = tr * flipV;
136 }
137 return inverse(tr);
138}
139
Lloyd Piquef16688f2019-02-19 17:47:57 -0800140std::optional<renderengine::LayerSettings> BufferLayer::prepareClientComposition(
141 compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) {
David Sodman0c69cad2017-08-21 12:12:51 -0700142 ATRACE_CALL();
Lloyd Piquef16688f2019-02-19 17:47:57 -0800143
144 auto result = Layer::prepareClientComposition(targetSettings);
145 if (!result) {
146 return result;
147 }
148
chaviwd62d3062019-09-04 14:48:02 -0700149 if (CC_UNLIKELY(mBufferInfo.mBuffer == 0)) {
David Sodman0c69cad2017-08-21 12:12:51 -0700150 // the texture has not been created yet, this Layer has
151 // in fact never been drawn into. This happens frequently with
152 // SurfaceView because the WindowManager can't know when the client
153 // has drawn the first time.
154
155 // If there is nothing under us, we paint the screen in black, otherwise
156 // we just skip this update.
157
158 // figure out if there is something below us
159 Region under;
160 bool finished = false;
161 mFlinger->mDrawingState.traverseInZOrder([&](Layer* layer) {
162 if (finished || layer == static_cast<BufferLayer const*>(this)) {
163 finished = true;
164 return;
165 }
Lloyd Piquea2468662019-03-07 21:31:06 -0800166
167 under.orSelf(layer->getScreenBounds());
David Sodman0c69cad2017-08-21 12:12:51 -0700168 });
169 // if not everything below us is covered, we plug the holes!
Lloyd Piquef16688f2019-02-19 17:47:57 -0800170 Region holes(targetSettings.clip.subtract(under));
David Sodman0c69cad2017-08-21 12:12:51 -0700171 if (!holes.isEmpty()) {
Lloyd Piquef16688f2019-02-19 17:47:57 -0800172 targetSettings.clearRegion.orSelf(holes);
David Sodman0c69cad2017-08-21 12:12:51 -0700173 }
Lloyd Piquef16688f2019-02-19 17:47:57 -0800174 return std::nullopt;
David Sodman0c69cad2017-08-21 12:12:51 -0700175 }
Lloyd Pique688abd42019-02-15 15:42:24 -0800176 bool blackOutLayer = (isProtected() && !targetSettings.supportsProtectedContent) ||
Lloyd Piquef16688f2019-02-19 17:47:57 -0800177 (isSecure() && !targetSettings.isSecure);
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000178 const State& s(getDrawingState());
Lloyd Piquef16688f2019-02-19 17:47:57 -0800179 auto& layer = *result;
David Sodman0c69cad2017-08-21 12:12:51 -0700180 if (!blackOutLayer) {
chaviwd62d3062019-09-04 14:48:02 -0700181 layer.source.buffer.buffer = mBufferInfo.mBuffer;
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000182 layer.source.buffer.isOpaque = isOpaque(s);
chaviwd62d3062019-09-04 14:48:02 -0700183 layer.source.buffer.fence = mBufferInfo.mFence;
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000184 layer.source.buffer.textureName = mTextureName;
185 layer.source.buffer.usePremultipliedAlpha = getPremultipledAlpha();
186 layer.source.buffer.isY410BT2020 = isHdrY410();
David Sodman0c69cad2017-08-21 12:12:51 -0700187 // TODO: we could be more subtle with isFixedSize()
Lloyd Piquef16688f2019-02-19 17:47:57 -0800188 const bool useFiltering = targetSettings.needsFiltering || mNeedsFiltering || isFixedSize();
David Sodman0c69cad2017-08-21 12:12:51 -0700189
190 // Query the texture matrix given our current filtering mode.
191 float textureMatrix[16];
chaviwf83ce182019-09-12 14:43:08 -0700192 getDrawingTransformMatrix(useFiltering, textureMatrix);
David Sodman0c69cad2017-08-21 12:12:51 -0700193
194 if (getTransformToDisplayInverse()) {
195 /*
196 * the code below applies the primary display's inverse transform to
197 * the texture transform
198 */
199 uint32_t transform = DisplayDevice::getPrimaryDisplayOrientationTransform();
200 mat4 tr = inverseOrientation(transform);
201
202 /**
203 * TODO(b/36727915): This is basically a hack.
204 *
205 * Ensure that regardless of the parent transformation,
206 * this buffer is always transformed from native display
207 * orientation to display orientation. For example, in the case
208 * of a camera where the buffer remains in native orientation,
209 * we want the pixels to always be upright.
210 */
211 sp<Layer> p = mDrawingParent.promote();
212 if (p != nullptr) {
213 const auto parentTransform = p->getTransform();
214 tr = tr * inverseOrientation(parentTransform.getOrientation());
215 }
216
217 // and finally apply it to the original texture matrix
218 const mat4 texTransform(mat4(static_cast<const float*>(textureMatrix)) * tr);
219 memcpy(textureMatrix, texTransform.asArray(), sizeof(textureMatrix));
220 }
221
Vishnu Nair4351ad52019-02-11 14:13:02 -0800222 const Rect win{getBounds()};
Marissa Wall290ad082019-03-06 13:23:47 -0800223 float bufferWidth = getBufferSize(s).getWidth();
224 float bufferHeight = getBufferSize(s).getHeight();
225
226 // BufferStateLayers can have a "buffer size" of [0, 0, -1, -1] when no display frame has
227 // been set and there is no parent layer bounds. In that case, the scale is meaningless so
228 // ignore them.
229 if (!getBufferSize(s).isValid()) {
230 bufferWidth = float(win.right) - float(win.left);
231 bufferHeight = float(win.bottom) - float(win.top);
232 }
David Sodman0c69cad2017-08-21 12:12:51 -0700233
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000234 const float scaleHeight = (float(win.bottom) - float(win.top)) / bufferHeight;
235 const float scaleWidth = (float(win.right) - float(win.left)) / bufferWidth;
236 const float translateY = float(win.top) / bufferHeight;
237 const float translateX = float(win.left) / bufferWidth;
238
239 // Flip y-coordinates because GLConsumer expects OpenGL convention.
240 mat4 tr = mat4::translate(vec4(.5, .5, 0, 1)) * mat4::scale(vec4(1, -1, 1, 1)) *
241 mat4::translate(vec4(-.5, -.5, 0, 1)) *
242 mat4::translate(vec4(translateX, translateY, 0, 1)) *
243 mat4::scale(vec4(scaleWidth, scaleHeight, 1.0, 1.0));
244
245 layer.source.buffer.useTextureFiltering = useFiltering;
246 layer.source.buffer.textureTransform = mat4(static_cast<const float*>(textureMatrix)) * tr;
David Sodman0c69cad2017-08-21 12:12:51 -0700247 } else {
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000248 // If layer is blacked out, force alpha to 1 so that we draw a black color
249 // layer.
250 layer.source.buffer.buffer = nullptr;
251 layer.alpha = 1.0;
David Sodman0c69cad2017-08-21 12:12:51 -0700252 }
Alec Mourie7d1d4a2019-02-05 01:13:46 +0000253
Lloyd Piquef16688f2019-02-19 17:47:57 -0800254 return result;
David Sodman0c69cad2017-08-21 12:12:51 -0700255}
256
Marissa Wallfd668622018-05-10 10:21:13 -0700257bool BufferLayer::isHdrY410() const {
258 // pixel format is HDR Y410 masquerading as RGBA_1010102
chaviw4244e032019-09-04 11:27:49 -0700259 return (mBufferInfo.mDataspace == ui::Dataspace::BT2020_ITU_PQ &&
260 mBufferInfo.mApi == NATIVE_WINDOW_API_MEDIA &&
chaviwd62d3062019-09-04 14:48:02 -0700261 mBufferInfo.mBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102);
David Sodmaneb085e02017-10-05 18:49:04 -0700262}
263
Lloyd Piquef5275482019-01-29 18:42:42 -0800264void BufferLayer::latchPerFrameState(
265 compositionengine::LayerFECompositionState& compositionState) const {
266 Layer::latchPerFrameState(compositionState);
David Sodman0c69cad2017-08-21 12:12:51 -0700267
268 // Sideband layers
Lloyd Piquef5275482019-01-29 18:42:42 -0800269 if (compositionState.sidebandStream.get()) {
270 compositionState.compositionType = Hwc2::IComposerClient::Composition::SIDEBAND;
David Sodman15094112018-10-11 09:39:37 -0700271 } else {
Lloyd Piquef5275482019-01-29 18:42:42 -0800272 // Normal buffer layers
chaviw4244e032019-09-04 11:27:49 -0700273 compositionState.hdrMetadata = mBufferInfo.mHdrMetadata;
Lloyd Piquef5275482019-01-29 18:42:42 -0800274 compositionState.compositionType = mPotentialCursor
275 ? Hwc2::IComposerClient::Composition::CURSOR
276 : Hwc2::IComposerClient::Composition::DEVICE;
David Sodman0c69cad2017-08-21 12:12:51 -0700277 }
David Sodman0c69cad2017-08-21 12:12:51 -0700278}
279
Marissa Wallfd668622018-05-10 10:21:13 -0700280bool BufferLayer::onPreComposition(nsecs_t refreshStartTime) {
chaviwd62d3062019-09-04 14:48:02 -0700281 if (mBufferInfo.mBuffer != nullptr) {
Marissa Wallfd668622018-05-10 10:21:13 -0700282 Mutex::Autolock lock(mFrameEventHistoryMutex);
283 mFrameEventHistory.addPreComposition(mCurrentFrameNumber, refreshStartTime);
David Sodman0c69cad2017-08-21 12:12:51 -0700284 }
Marissa Wallfd668622018-05-10 10:21:13 -0700285 mRefreshPending = false;
286 return hasReadyFrame();
David Sodman0c69cad2017-08-21 12:12:51 -0700287}
288
Adithya Srinivasanb69e0762019-11-11 18:39:53 -0800289bool BufferLayer::onPostComposition(sp<const DisplayDevice> displayDevice,
Dominik Laskowski075d3172018-05-24 15:50:06 -0700290 const std::shared_ptr<FenceTime>& glDoneFence,
Marissa Wallfd668622018-05-10 10:21:13 -0700291 const std::shared_ptr<FenceTime>& presentFence,
292 const CompositorTiming& compositorTiming) {
293 // mFrameLatencyNeeded is true when a new frame was latched for the
294 // composition.
chaviw74b03172019-08-19 11:09:03 -0700295 if (!mBufferInfo.mFrameLatencyNeeded) return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700296
297 // Update mFrameEventHistory.
Dan Stoza436ccf32018-06-21 12:10:12 -0700298 {
Marissa Wallfd668622018-05-10 10:21:13 -0700299 Mutex::Autolock lock(mFrameEventHistoryMutex);
300 mFrameEventHistory.addPostComposition(mCurrentFrameNumber, glDoneFence, presentFence,
301 compositorTiming);
David Sodman0c69cad2017-08-21 12:12:51 -0700302 }
303
Marissa Wallfd668622018-05-10 10:21:13 -0700304 // Update mFrameTracker.
chaviw4244e032019-09-04 11:27:49 -0700305 nsecs_t desiredPresentTime = mBufferInfo.mDesiredPresentTime;
Marissa Wallfd668622018-05-10 10:21:13 -0700306 mFrameTracker.setDesiredPresentTime(desiredPresentTime);
307
Yiwei Zhang1a88c402019-11-18 10:43:58 -0800308 const int32_t layerId = getSequence();
309 mFlinger->mTimeStats->setDesiredTime(layerId, mCurrentFrameNumber, desiredPresentTime);
Marissa Wallfd668622018-05-10 10:21:13 -0700310
Adithya Srinivasanb69e0762019-11-11 18:39:53 -0800311 const auto outputLayer = findOutputLayerForDisplay(displayDevice);
312 if (outputLayer && outputLayer->requiresClientComposition()) {
313 nsecs_t clientCompositionTimestamp = outputLayer->getState().clientCompositionTimestamp;
314 mFlinger->mFrameTracer->traceTimestamp(layerId, getCurrentBufferId(), mCurrentFrameNumber,
315 clientCompositionTimestamp,
316 FrameTracer::FrameEvent::FALLBACK_COMPOSITION);
317 }
318
chaviw4244e032019-09-04 11:27:49 -0700319 std::shared_ptr<FenceTime> frameReadyFence = mBufferInfo.mFenceTime;
Marissa Wallfd668622018-05-10 10:21:13 -0700320 if (frameReadyFence->isValid()) {
321 mFrameTracker.setFrameReadyFence(std::move(frameReadyFence));
322 } else {
323 // There was no fence for this frame, so assume that it was ready
324 // to be presented at the desired present time.
325 mFrameTracker.setFrameReadyTime(desiredPresentTime);
Dominik Laskowski45de9bd2018-06-11 17:44:10 -0700326 }
Marissa Wallfd668622018-05-10 10:21:13 -0700327
Adithya Srinivasanb69e0762019-11-11 18:39:53 -0800328 const auto displayId = displayDevice->getId();
Marissa Wallfd668622018-05-10 10:21:13 -0700329 if (presentFence->isValid()) {
Yiwei Zhang1a88c402019-11-18 10:43:58 -0800330 mFlinger->mTimeStats->setPresentFence(layerId, mCurrentFrameNumber, presentFence);
331 mFlinger->mFrameTracer->traceFence(layerId, getCurrentBufferId(), mCurrentFrameNumber,
Mikael Pessa90092f42019-08-26 17:22:04 -0700332 presentFence, FrameTracer::FrameEvent::PRESENT_FENCE);
Marissa Wallfd668622018-05-10 10:21:13 -0700333 mFrameTracker.setActualPresentFence(std::shared_ptr<FenceTime>(presentFence));
Dominik Laskowski075d3172018-05-24 15:50:06 -0700334 } else if (displayId && mFlinger->getHwComposer().isConnected(*displayId)) {
Marissa Wallfd668622018-05-10 10:21:13 -0700335 // The HWC doesn't support present fences, so use the refresh
336 // timestamp instead.
Dominik Laskowski075d3172018-05-24 15:50:06 -0700337 const nsecs_t actualPresentTime = mFlinger->getHwComposer().getRefreshTimestamp(*displayId);
Yiwei Zhang1a88c402019-11-18 10:43:58 -0800338 mFlinger->mTimeStats->setPresentTime(layerId, mCurrentFrameNumber, actualPresentTime);
339 mFlinger->mFrameTracer->traceTimestamp(layerId, getCurrentBufferId(), mCurrentFrameNumber,
Mikael Pessa90092f42019-08-26 17:22:04 -0700340 actualPresentTime,
341 FrameTracer::FrameEvent::PRESENT_FENCE);
Marissa Wallfd668622018-05-10 10:21:13 -0700342 mFrameTracker.setActualPresentTime(actualPresentTime);
343 }
344
345 mFrameTracker.advanceFrame();
chaviw74b03172019-08-19 11:09:03 -0700346 mBufferInfo.mFrameLatencyNeeded = false;
Marissa Wallfd668622018-05-10 10:21:13 -0700347 return true;
David Sodman0c69cad2017-08-21 12:12:51 -0700348}
349
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700350bool BufferLayer::latchBuffer(bool& recomputeVisibleRegions, nsecs_t latchTime,
351 nsecs_t expectedPresentTime) {
Marissa Wallfd668622018-05-10 10:21:13 -0700352 ATRACE_CALL();
David Sodman0c69cad2017-08-21 12:12:51 -0700353
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800354 bool refreshRequired = latchSidebandStream(recomputeVisibleRegions);
David Sodman0c69cad2017-08-21 12:12:51 -0700355
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800356 if (refreshRequired) {
357 return refreshRequired;
David Sodman0c69cad2017-08-21 12:12:51 -0700358 }
359
Marissa Wallfd668622018-05-10 10:21:13 -0700360 if (!hasReadyFrame()) {
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800361 return false;
David Sodman0c69cad2017-08-21 12:12:51 -0700362 }
David Sodman0c69cad2017-08-21 12:12:51 -0700363
Marissa Wallfd668622018-05-10 10:21:13 -0700364 // if we've already called updateTexImage() without going through
365 // a composition step, we have to skip this layer at this point
366 // because we cannot call updateTeximage() without a corresponding
367 // compositionComplete() call.
368 // we'll trigger an update in onPreComposition().
369 if (mRefreshPending) {
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800370 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700371 }
372
373 // If the head buffer's acquire fence hasn't signaled yet, return and
374 // try again later
375 if (!fenceHasSignaled()) {
Ady Abraham09bd3922019-04-08 10:44:56 -0700376 ATRACE_NAME("!fenceHasSignaled()");
David Sodman0c69cad2017-08-21 12:12:51 -0700377 mFlinger->signalLayerUpdate();
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800378 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700379 }
380
381 // Capture the old state of the layer for comparisons later
382 const State& s(getDrawingState());
383 const bool oldOpacity = isOpaque(s);
chaviwd62d3062019-09-04 14:48:02 -0700384
385 BufferInfo oldBufferInfo = mBufferInfo;
Marissa Wallfd668622018-05-10 10:21:13 -0700386
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700387 if (!allTransactionsSignaled(expectedPresentTime)) {
Marissa Wallebb486e2019-05-15 14:08:08 -0700388 mFlinger->setTransactionFlags(eTraversalNeeded);
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800389 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700390 }
391
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700392 status_t err = updateTexImage(recomputeVisibleRegions, latchTime, expectedPresentTime);
Marissa Wallfd668622018-05-10 10:21:13 -0700393 if (err != NO_ERROR) {
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800394 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700395 }
396
397 err = updateActiveBuffer();
398 if (err != NO_ERROR) {
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800399 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700400 }
401
Marissa Wallfd668622018-05-10 10:21:13 -0700402 err = updateFrameNumber(latchTime);
403 if (err != NO_ERROR) {
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800404 return false;
Marissa Wallfd668622018-05-10 10:21:13 -0700405 }
406
chaviw4244e032019-09-04 11:27:49 -0700407 gatherBufferInfo();
408
Marissa Wallfd668622018-05-10 10:21:13 -0700409 mRefreshPending = true;
chaviw74b03172019-08-19 11:09:03 -0700410 mBufferInfo.mFrameLatencyNeeded = true;
chaviwd62d3062019-09-04 14:48:02 -0700411 if (oldBufferInfo.mBuffer == nullptr) {
Marissa Wallfd668622018-05-10 10:21:13 -0700412 // the first time we receive a buffer, we need to trigger a
413 // geometry invalidation.
414 recomputeVisibleRegions = true;
415 }
416
chaviw4244e032019-09-04 11:27:49 -0700417 if ((mBufferInfo.mCrop != oldBufferInfo.mCrop) ||
418 (mBufferInfo.mTransform != oldBufferInfo.mTransform) ||
419 (mBufferInfo.mScaleMode != oldBufferInfo.mScaleMode) ||
420 (mBufferInfo.mTransformToDisplayInverse != oldBufferInfo.mTransformToDisplayInverse)) {
Marissa Wallfd668622018-05-10 10:21:13 -0700421 recomputeVisibleRegions = true;
422 }
423
chaviwd62d3062019-09-04 14:48:02 -0700424 if (oldBufferInfo.mBuffer != nullptr) {
425 uint32_t bufWidth = mBufferInfo.mBuffer->getWidth();
426 uint32_t bufHeight = mBufferInfo.mBuffer->getHeight();
427 if (bufWidth != uint32_t(oldBufferInfo.mBuffer->width) ||
428 bufHeight != uint32_t(oldBufferInfo.mBuffer->height)) {
Marissa Wallfd668622018-05-10 10:21:13 -0700429 recomputeVisibleRegions = true;
430 }
431 }
432
433 if (oldOpacity != isOpaque(s)) {
434 recomputeVisibleRegions = true;
435 }
436
437 // Remove any sync points corresponding to the buffer which was just
438 // latched
439 {
440 Mutex::Autolock lock(mLocalSyncPointMutex);
441 auto point = mLocalSyncPoints.begin();
442 while (point != mLocalSyncPoints.end()) {
443 if (!(*point)->frameIsAvailable() || !(*point)->transactionIsApplied()) {
444 // This sync point must have been added since we started
445 // latching. Don't drop it yet.
446 ++point;
447 continue;
448 }
449
450 if ((*point)->getFrameNumber() <= mCurrentFrameNumber) {
Alec Mourie60041e2019-06-14 18:59:51 -0700451 std::stringstream ss;
452 ss << "Dropping sync point " << (*point)->getFrameNumber();
453 ATRACE_NAME(ss.str().c_str());
Marissa Wallfd668622018-05-10 10:21:13 -0700454 point = mLocalSyncPoints.erase(point);
455 } else {
456 ++point;
457 }
458 }
459 }
460
Vishnu Nair6194e2e2019-02-06 12:58:39 -0800461 return true;
Marissa Wallfd668622018-05-10 10:21:13 -0700462}
463
464// transaction
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700465void BufferLayer::notifyAvailableFrames(nsecs_t expectedPresentTime) {
466 const auto headFrameNumber = getHeadFrameNumber(expectedPresentTime);
Ady Abrahamcd1580c2019-04-29 15:40:03 -0700467 const bool headFenceSignaled = fenceHasSignaled();
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700468 const bool presentTimeIsCurrent = framePresentTimeIsCurrent(expectedPresentTime);
Marissa Wallfd668622018-05-10 10:21:13 -0700469 Mutex::Autolock lock(mLocalSyncPointMutex);
470 for (auto& point : mLocalSyncPoints) {
Ady Abrahamcd1580c2019-04-29 15:40:03 -0700471 if (headFrameNumber >= point->getFrameNumber() && headFenceSignaled &&
472 presentTimeIsCurrent) {
Marissa Wallfd668622018-05-10 10:21:13 -0700473 point->setFrameAvailable();
chaviw43cb3cb2019-05-31 15:23:41 -0700474 sp<Layer> requestedSyncLayer = point->getRequestedSyncLayer();
475 if (requestedSyncLayer) {
476 // Need to update the transaction flag to ensure the layer's pending transaction
477 // gets applied.
478 requestedSyncLayer->setTransactionFlags(eTransactionNeeded);
479 }
Marissa Wallfd668622018-05-10 10:21:13 -0700480 }
David Sodman0c69cad2017-08-21 12:12:51 -0700481 }
482}
483
Marissa Wallfd668622018-05-10 10:21:13 -0700484bool BufferLayer::hasReadyFrame() const {
Marissa Wall024a1912018-08-13 13:55:35 -0700485 return hasFrameUpdate() || getSidebandStreamChanged() || getAutoRefresh();
Marissa Wallfd668622018-05-10 10:21:13 -0700486}
487
488uint32_t BufferLayer::getEffectiveScalingMode() const {
489 if (mOverrideScalingMode >= 0) {
490 return mOverrideScalingMode;
491 }
492
chaviw4244e032019-09-04 11:27:49 -0700493 return mBufferInfo.mScaleMode;
Marissa Wallfd668622018-05-10 10:21:13 -0700494}
495
496bool BufferLayer::isProtected() const {
chaviwd62d3062019-09-04 14:48:02 -0700497 const sp<GraphicBuffer>& buffer(mBufferInfo.mBuffer);
Marissa Wallfd668622018-05-10 10:21:13 -0700498 return (buffer != 0) && (buffer->getUsage() & GRALLOC_USAGE_PROTECTED);
499}
500
501bool BufferLayer::latchUnsignaledBuffers() {
502 static bool propertyLoaded = false;
503 static bool latch = false;
504 static std::mutex mutex;
505 std::lock_guard<std::mutex> lock(mutex);
506 if (!propertyLoaded) {
507 char value[PROPERTY_VALUE_MAX] = {};
508 property_get("debug.sf.latch_unsignaled", value, "0");
509 latch = atoi(value);
510 propertyLoaded = true;
511 }
512 return latch;
513}
514
515// h/w composer set-up
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700516bool BufferLayer::allTransactionsSignaled(nsecs_t expectedPresentTime) {
517 const auto headFrameNumber = getHeadFrameNumber(expectedPresentTime);
Marissa Wallfd668622018-05-10 10:21:13 -0700518 bool matchingFramesFound = false;
519 bool allTransactionsApplied = true;
520 Mutex::Autolock lock(mLocalSyncPointMutex);
521
522 for (auto& point : mLocalSyncPoints) {
523 if (point->getFrameNumber() > headFrameNumber) {
524 break;
525 }
526 matchingFramesFound = true;
527
528 if (!point->frameIsAvailable()) {
529 // We haven't notified the remote layer that the frame for
530 // this point is available yet. Notify it now, and then
531 // abort this attempt to latch.
532 point->setFrameAvailable();
533 allTransactionsApplied = false;
534 break;
535 }
536
537 allTransactionsApplied = allTransactionsApplied && point->transactionIsApplied();
538 }
539 return !matchingFramesFound || allTransactionsApplied;
David Sodman0c69cad2017-08-21 12:12:51 -0700540}
541
542// As documented in libhardware header, formats in the range
543// 0x100 - 0x1FF are specific to the HAL implementation, and
544// are known to have no alpha channel
545// TODO: move definition for device-specific range into
546// hardware.h, instead of using hard-coded values here.
547#define HARDWARE_IS_DEVICE_FORMAT(f) ((f) >= 0x100 && (f) <= 0x1FF)
548
549bool BufferLayer::getOpacityForFormat(uint32_t format) {
550 if (HARDWARE_IS_DEVICE_FORMAT(format)) {
551 return true;
552 }
553 switch (format) {
554 case HAL_PIXEL_FORMAT_RGBA_8888:
555 case HAL_PIXEL_FORMAT_BGRA_8888:
556 case HAL_PIXEL_FORMAT_RGBA_FP16:
557 case HAL_PIXEL_FORMAT_RGBA_1010102:
558 return false;
559 }
560 // in all other case, we have no blending (also for unknown formats)
561 return true;
562}
563
Lloyd Pique37c2c9b2018-12-04 17:25:10 -0800564bool BufferLayer::needsFiltering(const sp<const DisplayDevice>& displayDevice) const {
Lloyd Piquef16688f2019-02-19 17:47:57 -0800565 // If we are not capturing based on the state of a known display device,
566 // just return false.
Lloyd Pique37c2c9b2018-12-04 17:25:10 -0800567 if (displayDevice == nullptr) {
Lloyd Piquef16688f2019-02-19 17:47:57 -0800568 return false;
Lloyd Pique37c2c9b2018-12-04 17:25:10 -0800569 }
570
571 const auto outputLayer = findOutputLayerForDisplay(displayDevice);
572 if (outputLayer == nullptr) {
Lloyd Piquef16688f2019-02-19 17:47:57 -0800573 return false;
Lloyd Pique37c2c9b2018-12-04 17:25:10 -0800574 }
575
Lloyd Piquef16688f2019-02-19 17:47:57 -0800576 // We need filtering if the sourceCrop rectangle size does not match the
577 // displayframe rectangle size (not a 1:1 render)
Lloyd Pique37c2c9b2018-12-04 17:25:10 -0800578 const auto& compositionState = outputLayer->getState();
579 const auto displayFrame = compositionState.displayFrame;
580 const auto sourceCrop = compositionState.sourceCrop;
Lloyd Piquef16688f2019-02-19 17:47:57 -0800581 return sourceCrop.getHeight() != displayFrame.getHeight() ||
Peiyong Linc2020ca2019-01-10 11:36:12 -0800582 sourceCrop.getWidth() != displayFrame.getWidth();
Chia-I Wu692e0832018-06-05 15:46:58 -0700583}
584
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700585uint64_t BufferLayer::getHeadFrameNumber(nsecs_t expectedPresentTime) const {
Lloyd Pique0449b0f2018-12-20 16:23:45 -0800586 if (hasFrameUpdate()) {
Dominik Laskowskia8955dd2019-07-10 10:19:09 -0700587 return getFrameNumber(expectedPresentTime);
David Sodman0c69cad2017-08-21 12:12:51 -0700588 } else {
589 return mCurrentFrameNumber;
590 }
591}
592
Vishnu Nair60356342018-11-13 13:00:45 -0800593Rect BufferLayer::getBufferSize(const State& s) const {
594 // If we have a sideband stream, or we are scaling the buffer then return the layer size since
595 // we cannot determine the buffer size.
596 if ((s.sidebandStream != nullptr) ||
597 (getEffectiveScalingMode() != NATIVE_WINDOW_SCALING_MODE_FREEZE)) {
598 return Rect(getActiveWidth(s), getActiveHeight(s));
599 }
600
chaviwd62d3062019-09-04 14:48:02 -0700601 if (mBufferInfo.mBuffer == nullptr) {
Vishnu Nair60356342018-11-13 13:00:45 -0800602 return Rect::INVALID_RECT;
603 }
604
chaviwd62d3062019-09-04 14:48:02 -0700605 uint32_t bufWidth = mBufferInfo.mBuffer->getWidth();
606 uint32_t bufHeight = mBufferInfo.mBuffer->getHeight();
Vishnu Nair60356342018-11-13 13:00:45 -0800607
608 // Undo any transformations on the buffer and return the result.
chaviw4244e032019-09-04 11:27:49 -0700609 if (mBufferInfo.mTransform & ui::Transform::ROT_90) {
Vishnu Nair60356342018-11-13 13:00:45 -0800610 std::swap(bufWidth, bufHeight);
611 }
612
Lloyd Pique0449b0f2018-12-20 16:23:45 -0800613 if (getTransformToDisplayInverse()) {
Vishnu Nair60356342018-11-13 13:00:45 -0800614 uint32_t invTransform = DisplayDevice::getPrimaryDisplayOrientationTransform();
615 if (invTransform & ui::Transform::ROT_90) {
616 std::swap(bufWidth, bufHeight);
617 }
618 }
619
620 return Rect(bufWidth, bufHeight);
621}
622
Lloyd Piquefeb73d72018-12-04 17:23:44 -0800623std::shared_ptr<compositionengine::Layer> BufferLayer::getCompositionLayer() const {
624 return mCompositionLayer;
625}
626
Vishnu Nair4351ad52019-02-11 14:13:02 -0800627FloatRect BufferLayer::computeSourceBounds(const FloatRect& parentBounds) const {
628 const State& s(getDrawingState());
629
630 // If we have a sideband stream, or we are scaling the buffer then return the layer size since
631 // we cannot determine the buffer size.
632 if ((s.sidebandStream != nullptr) ||
633 (getEffectiveScalingMode() != NATIVE_WINDOW_SCALING_MODE_FREEZE)) {
634 return FloatRect(0, 0, getActiveWidth(s), getActiveHeight(s));
635 }
636
chaviwd62d3062019-09-04 14:48:02 -0700637 if (mBufferInfo.mBuffer == nullptr) {
Vishnu Nair4351ad52019-02-11 14:13:02 -0800638 return parentBounds;
639 }
640
chaviwd62d3062019-09-04 14:48:02 -0700641 uint32_t bufWidth = mBufferInfo.mBuffer->getWidth();
642 uint32_t bufHeight = mBufferInfo.mBuffer->getHeight();
Vishnu Nair4351ad52019-02-11 14:13:02 -0800643
644 // Undo any transformations on the buffer and return the result.
chaviw4244e032019-09-04 11:27:49 -0700645 if (mBufferInfo.mTransform & ui::Transform::ROT_90) {
Vishnu Nair4351ad52019-02-11 14:13:02 -0800646 std::swap(bufWidth, bufHeight);
647 }
648
649 if (getTransformToDisplayInverse()) {
650 uint32_t invTransform = DisplayDevice::getPrimaryDisplayOrientationTransform();
651 if (invTransform & ui::Transform::ROT_90) {
652 std::swap(bufWidth, bufHeight);
653 }
654 }
655
656 return FloatRect(0, 0, bufWidth, bufHeight);
657}
658
chaviw49a108c2019-08-12 11:23:06 -0700659void BufferLayer::latchAndReleaseBuffer() {
660 mRefreshPending = false;
661 if (hasReadyFrame()) {
662 bool ignored = false;
663 latchBuffer(ignored, systemTime(), 0 /* expectedPresentTime */);
664 }
665 releasePendingBuffer(systemTime());
666}
667
chaviw4244e032019-09-04 11:27:49 -0700668PixelFormat BufferLayer::getPixelFormat() const {
669 return mBufferInfo.mPixelFormat;
670}
671
672bool BufferLayer::getTransformToDisplayInverse() const {
673 return mBufferInfo.mTransformToDisplayInverse;
674}
675
676Rect BufferLayer::getBufferCrop() const {
677 // this is the crop rectangle that applies to the buffer
678 // itself (as opposed to the window)
679 if (!mBufferInfo.mCrop.isEmpty()) {
680 // if the buffer crop is defined, we use that
681 return mBufferInfo.mCrop;
chaviwd62d3062019-09-04 14:48:02 -0700682 } else if (mBufferInfo.mBuffer != nullptr) {
chaviw4244e032019-09-04 11:27:49 -0700683 // otherwise we use the whole buffer
chaviwd62d3062019-09-04 14:48:02 -0700684 return mBufferInfo.mBuffer->getBounds();
chaviw4244e032019-09-04 11:27:49 -0700685 } else {
686 // if we don't have a buffer yet, we use an empty/invalid crop
687 return Rect();
688 }
689}
690
691uint32_t BufferLayer::getBufferTransform() const {
692 return mBufferInfo.mTransform;
693}
694
695ui::Dataspace BufferLayer::getDataSpace() const {
696 return mBufferInfo.mDataspace;
697}
698
699ui::Dataspace BufferLayer::translateDataspace(ui::Dataspace dataspace) {
700 ui::Dataspace updatedDataspace = dataspace;
701 // translate legacy dataspaces to modern dataspaces
702 switch (dataspace) {
703 case ui::Dataspace::SRGB:
704 updatedDataspace = ui::Dataspace::V0_SRGB;
705 break;
706 case ui::Dataspace::SRGB_LINEAR:
707 updatedDataspace = ui::Dataspace::V0_SRGB_LINEAR;
708 break;
709 case ui::Dataspace::JFIF:
710 updatedDataspace = ui::Dataspace::V0_JFIF;
711 break;
712 case ui::Dataspace::BT601_625:
713 updatedDataspace = ui::Dataspace::V0_BT601_625;
714 break;
715 case ui::Dataspace::BT601_525:
716 updatedDataspace = ui::Dataspace::V0_BT601_525;
717 break;
718 case ui::Dataspace::BT709:
719 updatedDataspace = ui::Dataspace::V0_BT709;
720 break;
721 default:
722 break;
723 }
724
725 return updatedDataspace;
726}
727
chaviwd62d3062019-09-04 14:48:02 -0700728sp<GraphicBuffer> BufferLayer::getBuffer() const {
729 return mBufferInfo.mBuffer;
730}
731
chaviwf83ce182019-09-12 14:43:08 -0700732void BufferLayer::getDrawingTransformMatrix(bool filteringEnabled, float outMatrix[16]) {
733 GLConsumer::computeTransformMatrix(outMatrix, mBufferInfo.mBuffer, mBufferInfo.mCrop,
734 mBufferInfo.mTransform, filteringEnabled);
735}
736
chaviwb4c6e582019-08-16 14:35:07 -0700737void BufferLayer::setInitialValuesForClone(const sp<Layer>& clonedFrom) {
738 Layer::setInitialValuesForClone(clonedFrom);
739
740 sp<BufferLayer> bufferClonedFrom = static_cast<BufferLayer*>(clonedFrom.get());
741 mPremultipliedAlpha = bufferClonedFrom->mPremultipliedAlpha;
742 mPotentialCursor = bufferClonedFrom->mPotentialCursor;
743 mProtectedByApp = bufferClonedFrom->mProtectedByApp;
chaviw74b03172019-08-19 11:09:03 -0700744
745 updateCloneBufferInfo();
746}
747
748void BufferLayer::updateCloneBufferInfo() {
749 if (!isClone() || !isClonedFromAlive()) {
750 return;
751 }
752
753 sp<BufferLayer> clonedFrom = static_cast<BufferLayer*>(getClonedFrom().get());
754 mBufferInfo = clonedFrom->mBufferInfo;
755 mSidebandStream = clonedFrom->mSidebandStream;
756 surfaceDamageRegion = clonedFrom->surfaceDamageRegion;
757 mCurrentFrameNumber = clonedFrom->mCurrentFrameNumber.load();
758 mPreviousFrameNumber = clonedFrom->mPreviousFrameNumber;
759
760 // After buffer info is updated, the drawingState from the real layer needs to be copied into
761 // the cloned. This is because some properties of drawingState can change when latchBuffer is
762 // called. However, copying the drawingState would also overwrite the cloned layer's relatives.
763 // Therefore, temporarily store the relatives so they can be set in the cloned drawingState
764 // again.
765 wp<Layer> tmpZOrderRelativeOf = mDrawingState.zOrderRelativeOf;
766 SortedVector<wp<Layer>> tmpZOrderRelatives = mDrawingState.zOrderRelatives;
767 mDrawingState = clonedFrom->mDrawingState;
768 // TODO: (b/140756730) Ignore input for now since InputDispatcher doesn't support multiple
769 // InputWindows per client token yet.
770 mDrawingState.inputInfo.token = nullptr;
771 mDrawingState.zOrderRelativeOf = tmpZOrderRelativeOf;
772 mDrawingState.zOrderRelatives = tmpZOrderRelatives;
chaviwb4c6e582019-08-16 14:35:07 -0700773}
774
David Sodman0c69cad2017-08-21 12:12:51 -0700775} // namespace android
776
777#if defined(__gl_h_)
778#error "don't include gl/gl.h in this file"
779#endif
780
781#if defined(__gl2_h_)
782#error "don't include gl2/gl2.h in this file"
783#endif