| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2017 The Android Open Source Project | 
 | 3 |  * | 
 | 4 |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
 | 5 |  * you may not use this file except in compliance with the License. | 
 | 6 |  * You may obtain a copy of the License at | 
 | 7 |  * | 
 | 8 |  *      http://www.apache.org/licenses/LICENSE-2.0 | 
 | 9 |  * | 
 | 10 |  * Unless required by applicable law or agreed to in writing, software | 
 | 11 |  * distributed under the License is distributed on an "AS IS" BASIS, | 
 | 12 |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
 | 13 |  * See the License for the specific language governing permissions and | 
 | 14 |  * limitations under the License. | 
 | 15 |  */ | 
 | 16 |  | 
 | 17 | //#define LOG_NDEBUG 0 | 
 | 18 | #undef LOG_TAG | 
 | 19 | #define LOG_TAG "BufferLayer" | 
 | 20 | #define ATRACE_TAG ATRACE_TAG_GRAPHICS | 
 | 21 |  | 
 | 22 | #include "BufferLayer.h" | 
 | 23 | #include "Colorizer.h" | 
 | 24 | #include "DisplayDevice.h" | 
 | 25 | #include "LayerRejecter.h" | 
 | 26 | #include "clz.h" | 
 | 27 |  | 
 | 28 | #include "RenderEngine/RenderEngine.h" | 
 | 29 |  | 
 | 30 | #include <gui/BufferItem.h> | 
 | 31 | #include <gui/BufferQueue.h> | 
 | 32 | #include <gui/LayerDebugInfo.h> | 
 | 33 | #include <gui/Surface.h> | 
 | 34 |  | 
 | 35 | #include <ui/DebugUtils.h> | 
 | 36 |  | 
 | 37 | #include <utils/Errors.h> | 
 | 38 | #include <utils/Log.h> | 
 | 39 | #include <utils/NativeHandle.h> | 
 | 40 | #include <utils/StopWatch.h> | 
 | 41 | #include <utils/Trace.h> | 
 | 42 |  | 
 | 43 | #include <cutils/compiler.h> | 
 | 44 | #include <cutils/native_handle.h> | 
 | 45 | #include <cutils/properties.h> | 
 | 46 |  | 
 | 47 | #include <math.h> | 
 | 48 | #include <stdlib.h> | 
 | 49 | #include <mutex> | 
 | 50 |  | 
 | 51 | namespace android { | 
 | 52 |  | 
 | 53 | BufferLayer::BufferLayer(SurfaceFlinger* flinger, const sp<Client>& client, const String8& name, | 
 | 54 |                          uint32_t w, uint32_t h, uint32_t flags) | 
 | 55 |       : Layer(flinger, client, name, w, h, flags), | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 56 |         mConsumer(nullptr), | 
| Ivan Lozano | eb13f9e | 2017-11-09 12:39:31 -0800 | [diff] [blame] | 57 |         mTextureName(UINT32_MAX), | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 58 |         mFormat(PIXEL_FORMAT_NONE), | 
 | 59 |         mCurrentScalingMode(NATIVE_WINDOW_SCALING_MODE_FREEZE), | 
 | 60 |         mBufferLatched(false), | 
 | 61 |         mPreviousFrameNumber(0), | 
 | 62 |         mUpdateTexImageFailed(false), | 
 | 63 |         mRefreshPending(false) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 64 |     ALOGV("Creating Layer %s", name.string()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 65 |  | 
 | 66 |     mFlinger->getRenderEngine().genTextures(1, &mTextureName); | 
 | 67 |     mTexture.init(Texture::TEXTURE_EXTERNAL, mTextureName); | 
 | 68 |  | 
 | 69 |     if (flags & ISurfaceComposerClient::eNonPremultiplied) mPremultipliedAlpha = false; | 
 | 70 |  | 
 | 71 |     mCurrentState.requested = mCurrentState.active; | 
 | 72 |  | 
 | 73 |     // drawing state & current state are identical | 
 | 74 |     mDrawingState = mCurrentState; | 
 | 75 | } | 
 | 76 |  | 
 | 77 | BufferLayer::~BufferLayer() { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 78 |     mFlinger->deleteTextureAsync(mTextureName); | 
 | 79 |  | 
| David Sodman | 6f65f3e | 2017-11-03 14:28:09 -0700 | [diff] [blame] | 80 |     if (!getBE().mHwcLayers.empty()) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 81 |         ALOGE("Found stale hardware composer layers when destroying " | 
 | 82 |               "surface flinger layer %s", | 
 | 83 |               mName.string()); | 
 | 84 |         destroyAllHwcLayers(); | 
 | 85 |     } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 86 | } | 
 | 87 |  | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 88 | void BufferLayer::useSurfaceDamage() { | 
 | 89 |     if (mFlinger->mForceFullDamage) { | 
 | 90 |         surfaceDamageRegion = Region::INVALID_REGION; | 
 | 91 |     } else { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 92 |         surfaceDamageRegion = mConsumer->getSurfaceDamage(); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 93 |     } | 
 | 94 | } | 
 | 95 |  | 
 | 96 | void BufferLayer::useEmptyDamage() { | 
 | 97 |     surfaceDamageRegion.clear(); | 
 | 98 | } | 
 | 99 |  | 
| David Sodman | 41fdfc9 | 2017-11-06 16:09:56 -0800 | [diff] [blame] | 100 | bool BufferLayer::isProtected() const { | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 101 |     const sp<GraphicBuffer>& buffer(mActiveBuffer); | 
 | 102 |     return (buffer != 0) && (buffer->getUsage() & GRALLOC_USAGE_PROTECTED); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 103 | } | 
 | 104 |  | 
 | 105 | bool BufferLayer::isVisible() const { | 
 | 106 |     return !(isHiddenByPolicy()) && getAlpha() > 0.0f && | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 107 |             (mActiveBuffer != nullptr || getBE().compositionInfo.hwc.sidebandStream != nullptr); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 108 | } | 
 | 109 |  | 
 | 110 | bool BufferLayer::isFixedSize() const { | 
 | 111 |     return getEffectiveScalingMode() != NATIVE_WINDOW_SCALING_MODE_FREEZE; | 
 | 112 | } | 
 | 113 |  | 
 | 114 | status_t BufferLayer::setBuffers(uint32_t w, uint32_t h, PixelFormat format, uint32_t flags) { | 
 | 115 |     uint32_t const maxSurfaceDims = | 
 | 116 |             min(mFlinger->getMaxTextureSize(), mFlinger->getMaxViewportDims()); | 
 | 117 |  | 
 | 118 |     // never allow a surface larger than what our underlying GL implementation | 
 | 119 |     // can handle. | 
 | 120 |     if ((uint32_t(w) > maxSurfaceDims) || (uint32_t(h) > maxSurfaceDims)) { | 
 | 121 |         ALOGE("dimensions too large %u x %u", uint32_t(w), uint32_t(h)); | 
 | 122 |         return BAD_VALUE; | 
 | 123 |     } | 
 | 124 |  | 
 | 125 |     mFormat = format; | 
 | 126 |  | 
 | 127 |     mPotentialCursor = (flags & ISurfaceComposerClient::eCursorWindow) ? true : false; | 
 | 128 |     mProtectedByApp = (flags & ISurfaceComposerClient::eProtectedByApp) ? true : false; | 
 | 129 |     mCurrentOpacity = getOpacityForFormat(format); | 
 | 130 |  | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 131 |     mConsumer->setDefaultBufferSize(w, h); | 
 | 132 |     mConsumer->setDefaultBufferFormat(format); | 
 | 133 |     mConsumer->setConsumerUsageBits(getEffectiveUsage(0)); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 134 |  | 
 | 135 |     return NO_ERROR; | 
 | 136 | } | 
 | 137 |  | 
 | 138 | static constexpr mat4 inverseOrientation(uint32_t transform) { | 
| David Sodman | 41fdfc9 | 2017-11-06 16:09:56 -0800 | [diff] [blame] | 139 |     const mat4 flipH(-1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1); | 
 | 140 |     const mat4 flipV(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1); | 
 | 141 |     const mat4 rot90(0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 142 |     mat4 tr; | 
 | 143 |  | 
 | 144 |     if (transform & NATIVE_WINDOW_TRANSFORM_ROT_90) { | 
 | 145 |         tr = tr * rot90; | 
 | 146 |     } | 
 | 147 |     if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_H) { | 
 | 148 |         tr = tr * flipH; | 
 | 149 |     } | 
 | 150 |     if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_V) { | 
 | 151 |         tr = tr * flipV; | 
 | 152 |     } | 
 | 153 |     return inverse(tr); | 
 | 154 | } | 
 | 155 |  | 
 | 156 | /* | 
 | 157 |  * onDraw will draw the current layer onto the presentable buffer | 
 | 158 |  */ | 
 | 159 | void BufferLayer::onDraw(const RenderArea& renderArea, const Region& clip, | 
 | 160 |                          bool useIdentityTransform) const { | 
 | 161 |     ATRACE_CALL(); | 
 | 162 |  | 
| David Sodman | dc5e062 | 2018-01-05 23:10:57 -0800 | [diff] [blame] | 163 |     CompositionInfo& compositionInfo = getBE().compositionInfo; | 
 | 164 |  | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 165 |     if (CC_UNLIKELY(mActiveBuffer == 0)) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 166 |         // the texture has not been created yet, this Layer has | 
 | 167 |         // in fact never been drawn into. This happens frequently with | 
 | 168 |         // SurfaceView because the WindowManager can't know when the client | 
 | 169 |         // has drawn the first time. | 
 | 170 |  | 
 | 171 |         // If there is nothing under us, we paint the screen in black, otherwise | 
 | 172 |         // we just skip this update. | 
 | 173 |  | 
 | 174 |         // figure out if there is something below us | 
 | 175 |         Region under; | 
 | 176 |         bool finished = false; | 
 | 177 |         mFlinger->mDrawingState.traverseInZOrder([&](Layer* layer) { | 
 | 178 |             if (finished || layer == static_cast<BufferLayer const*>(this)) { | 
 | 179 |                 finished = true; | 
 | 180 |                 return; | 
 | 181 |             } | 
 | 182 |             under.orSelf(renderArea.getTransform().transform(layer->visibleRegion)); | 
 | 183 |         }); | 
 | 184 |         // if not everything below us is covered, we plug the holes! | 
 | 185 |         Region holes(clip.subtract(under)); | 
 | 186 |         if (!holes.isEmpty()) { | 
 | 187 |             clearWithOpenGL(renderArea, 0, 0, 0, 1); | 
 | 188 |         } | 
 | 189 |         return; | 
 | 190 |     } | 
 | 191 |  | 
 | 192 |     // Bind the current buffer to the GL texture, and wait for it to be | 
 | 193 |     // ready for us to draw into. | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 194 |     status_t err = mConsumer->bindTextureImage(); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 195 |     if (err != NO_ERROR) { | 
 | 196 |         ALOGW("onDraw: bindTextureImage failed (err=%d)", err); | 
 | 197 |         // Go ahead and draw the buffer anyway; no matter what we do the screen | 
 | 198 |         // is probably going to have something visibly wrong. | 
 | 199 |     } | 
 | 200 |  | 
 | 201 |     bool blackOutLayer = isProtected() || (isSecure() && !renderArea.isSecure()); | 
 | 202 |  | 
| Lloyd Pique | 144e116 | 2017-12-20 16:44:52 -0800 | [diff] [blame] | 203 |     auto& engine(mFlinger->getRenderEngine()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 204 |  | 
 | 205 |     if (!blackOutLayer) { | 
 | 206 |         // TODO: we could be more subtle with isFixedSize() | 
 | 207 |         const bool useFiltering = getFiltering() || needsFiltering(renderArea) || isFixedSize(); | 
 | 208 |  | 
 | 209 |         // Query the texture matrix given our current filtering mode. | 
 | 210 |         float textureMatrix[16]; | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 211 |         mConsumer->setFilteringEnabled(useFiltering); | 
 | 212 |         mConsumer->getTransformMatrix(textureMatrix); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 213 |  | 
 | 214 |         if (getTransformToDisplayInverse()) { | 
 | 215 |             /* | 
 | 216 |              * the code below applies the primary display's inverse transform to | 
 | 217 |              * the texture transform | 
 | 218 |              */ | 
 | 219 |             uint32_t transform = DisplayDevice::getPrimaryDisplayOrientationTransform(); | 
 | 220 |             mat4 tr = inverseOrientation(transform); | 
 | 221 |  | 
 | 222 |             /** | 
 | 223 |              * TODO(b/36727915): This is basically a hack. | 
 | 224 |              * | 
 | 225 |              * Ensure that regardless of the parent transformation, | 
 | 226 |              * this buffer is always transformed from native display | 
 | 227 |              * orientation to display orientation. For example, in the case | 
 | 228 |              * of a camera where the buffer remains in native orientation, | 
 | 229 |              * we want the pixels to always be upright. | 
 | 230 |              */ | 
 | 231 |             sp<Layer> p = mDrawingParent.promote(); | 
 | 232 |             if (p != nullptr) { | 
 | 233 |                 const auto parentTransform = p->getTransform(); | 
 | 234 |                 tr = tr * inverseOrientation(parentTransform.getOrientation()); | 
 | 235 |             } | 
 | 236 |  | 
 | 237 |             // and finally apply it to the original texture matrix | 
 | 238 |             const mat4 texTransform(mat4(static_cast<const float*>(textureMatrix)) * tr); | 
 | 239 |             memcpy(textureMatrix, texTransform.asArray(), sizeof(textureMatrix)); | 
 | 240 |         } | 
 | 241 |  | 
 | 242 |         // Set things up for texturing. | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 243 |         mTexture.setDimensions(mActiveBuffer->getWidth(), mActiveBuffer->getHeight()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 244 |         mTexture.setFiltering(useFiltering); | 
 | 245 |         mTexture.setMatrix(textureMatrix); | 
| David Sodman | dc5e062 | 2018-01-05 23:10:57 -0800 | [diff] [blame] | 246 |         compositionInfo.re.texture = mTexture; | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 247 |  | 
 | 248 |         engine.setupLayerTexturing(mTexture); | 
 | 249 |     } else { | 
 | 250 |         engine.setupLayerBlackedOut(); | 
 | 251 |     } | 
 | 252 |     drawWithOpenGL(renderArea, useIdentityTransform); | 
 | 253 |     engine.disableTexturing(); | 
 | 254 | } | 
 | 255 |  | 
| David Sodman | dc5e062 | 2018-01-05 23:10:57 -0800 | [diff] [blame] | 256 | void BufferLayer::drawNow(const RenderArea& renderArea, bool useIdentityTransform) const { | 
 | 257 |     CompositionInfo& compositionInfo = getBE().compositionInfo; | 
 | 258 |     auto& engine(mFlinger->getRenderEngine()); | 
 | 259 |  | 
 | 260 |     draw(renderArea, useIdentityTransform); | 
 | 261 |  | 
 | 262 |     engine.setupLayerTexturing(compositionInfo.re.texture); | 
 | 263 |     engine.setupLayerBlending(compositionInfo.re.preMultipliedAlpha, compositionInfo.re.opaque, | 
 | 264 |             false, compositionInfo.re.color); | 
 | 265 |     engine.setSourceDataSpace(compositionInfo.hwc.dataspace); | 
 | 266 |     engine.setSourceY410BT2020(compositionInfo.re.Y410BT2020); | 
 | 267 |     engine.drawMesh(getBE().getMesh()); | 
 | 268 |     engine.disableBlending(); | 
 | 269 |     engine.disableTexturing(); | 
 | 270 |     engine.setSourceY410BT2020(false); | 
 | 271 | } | 
 | 272 |  | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 273 | void BufferLayer::onLayerDisplayed(const sp<Fence>& releaseFence) { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 274 |     mConsumer->setReleaseFence(releaseFence); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 275 | } | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 276 |  | 
 | 277 | void BufferLayer::abandon() { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 278 |     mConsumer->abandon(); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 279 | } | 
 | 280 |  | 
 | 281 | bool BufferLayer::shouldPresentNow(const DispSync& dispSync) const { | 
 | 282 |     if (mSidebandStreamChanged || mAutoRefresh) { | 
 | 283 |         return true; | 
 | 284 |     } | 
 | 285 |  | 
 | 286 |     Mutex::Autolock lock(mQueueItemLock); | 
 | 287 |     if (mQueueItems.empty()) { | 
 | 288 |         return false; | 
 | 289 |     } | 
 | 290 |     auto timestamp = mQueueItems[0].mTimestamp; | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 291 |     nsecs_t expectedPresent = mConsumer->computeExpectedPresent(dispSync); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 292 |  | 
 | 293 |     // Ignore timestamps more than a second in the future | 
 | 294 |     bool isPlausible = timestamp < (expectedPresent + s2ns(1)); | 
 | 295 |     ALOGW_IF(!isPlausible, | 
 | 296 |              "[%s] Timestamp %" PRId64 " seems implausible " | 
 | 297 |              "relative to expectedPresent %" PRId64, | 
 | 298 |              mName.string(), timestamp, expectedPresent); | 
 | 299 |  | 
 | 300 |     bool isDue = timestamp < expectedPresent; | 
 | 301 |     return isDue || !isPlausible; | 
 | 302 | } | 
 | 303 |  | 
 | 304 | void BufferLayer::setTransformHint(uint32_t orientation) const { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 305 |     mConsumer->setTransformHint(orientation); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 306 | } | 
 | 307 |  | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 308 | bool BufferLayer::onPreComposition(nsecs_t refreshStartTime) { | 
 | 309 |     if (mBufferLatched) { | 
 | 310 |         Mutex::Autolock lock(mFrameEventHistoryMutex); | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 311 |         mFrameEventHistory.addPreComposition(mCurrentFrameNumber, refreshStartTime); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 312 |     } | 
 | 313 |     mRefreshPending = false; | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 314 |     return mQueuedFrames > 0 || mSidebandStreamChanged || mAutoRefresh; | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 315 | } | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 316 | bool BufferLayer::onPostComposition(const std::shared_ptr<FenceTime>& glDoneFence, | 
 | 317 |                                     const std::shared_ptr<FenceTime>& presentFence, | 
 | 318 |                                     const CompositorTiming& compositorTiming) { | 
 | 319 |     // mFrameLatencyNeeded is true when a new frame was latched for the | 
 | 320 |     // composition. | 
 | 321 |     if (!mFrameLatencyNeeded) return false; | 
 | 322 |  | 
 | 323 |     // Update mFrameEventHistory. | 
 | 324 |     { | 
 | 325 |         Mutex::Autolock lock(mFrameEventHistoryMutex); | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 326 |         mFrameEventHistory.addPostComposition(mCurrentFrameNumber, glDoneFence, presentFence, | 
 | 327 |                                               compositorTiming); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 328 |     } | 
 | 329 |  | 
 | 330 |     // Update mFrameTracker. | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 331 |     nsecs_t desiredPresentTime = mConsumer->getTimestamp(); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 332 |     mFrameTracker.setDesiredPresentTime(desiredPresentTime); | 
 | 333 |  | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 334 |     std::shared_ptr<FenceTime> frameReadyFence = mConsumer->getCurrentFenceTime(); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 335 |     if (frameReadyFence->isValid()) { | 
 | 336 |         mFrameTracker.setFrameReadyFence(std::move(frameReadyFence)); | 
 | 337 |     } else { | 
 | 338 |         // There was no fence for this frame, so assume that it was ready | 
 | 339 |         // to be presented at the desired present time. | 
 | 340 |         mFrameTracker.setFrameReadyTime(desiredPresentTime); | 
 | 341 |     } | 
 | 342 |  | 
 | 343 |     if (presentFence->isValid()) { | 
 | 344 |         mFrameTracker.setActualPresentFence(std::shared_ptr<FenceTime>(presentFence)); | 
 | 345 |     } else { | 
 | 346 |         // The HWC doesn't support present fences, so use the refresh | 
 | 347 |         // timestamp instead. | 
 | 348 |         mFrameTracker.setActualPresentTime( | 
 | 349 |                 mFlinger->getHwComposer().getRefreshTimestamp(HWC_DISPLAY_PRIMARY)); | 
 | 350 |     } | 
 | 351 |  | 
 | 352 |     mFrameTracker.advanceFrame(); | 
 | 353 |     mFrameLatencyNeeded = false; | 
 | 354 |     return true; | 
 | 355 | } | 
 | 356 |  | 
 | 357 | std::vector<OccupancyTracker::Segment> BufferLayer::getOccupancyHistory(bool forceFlush) { | 
 | 358 |     std::vector<OccupancyTracker::Segment> history; | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 359 |     status_t result = mConsumer->getOccupancyHistory(forceFlush, &history); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 360 |     if (result != NO_ERROR) { | 
 | 361 |         ALOGW("[%s] Failed to obtain occupancy history (%d)", mName.string(), result); | 
 | 362 |         return {}; | 
 | 363 |     } | 
 | 364 |     return history; | 
 | 365 | } | 
 | 366 |  | 
 | 367 | bool BufferLayer::getTransformToDisplayInverse() const { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 368 |     return mConsumer->getTransformToDisplayInverse(); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 369 | } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 370 |  | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 371 | void BufferLayer::releasePendingBuffer(nsecs_t dequeueReadyTime) { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 372 |     if (!mConsumer->releasePendingBuffer()) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 373 |         return; | 
 | 374 |     } | 
 | 375 |  | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 376 |     auto releaseFenceTime = std::make_shared<FenceTime>(mConsumer->getPrevFinalReleaseFence()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 377 |     mReleaseTimeline.updateSignalTimes(); | 
 | 378 |     mReleaseTimeline.push(releaseFenceTime); | 
 | 379 |  | 
 | 380 |     Mutex::Autolock lock(mFrameEventHistoryMutex); | 
 | 381 |     if (mPreviousFrameNumber != 0) { | 
 | 382 |         mFrameEventHistory.addRelease(mPreviousFrameNumber, dequeueReadyTime, | 
 | 383 |                                       std::move(releaseFenceTime)); | 
 | 384 |     } | 
 | 385 | } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 386 |  | 
 | 387 | Region BufferLayer::latchBuffer(bool& recomputeVisibleRegions, nsecs_t latchTime) { | 
 | 388 |     ATRACE_CALL(); | 
 | 389 |  | 
 | 390 |     if (android_atomic_acquire_cas(true, false, &mSidebandStreamChanged) == 0) { | 
 | 391 |         // mSidebandStreamChanged was true | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 392 |         mSidebandStream = mConsumer->getSidebandStream(); | 
| David Sodman | 386c22e | 2017-11-09 16:34:46 -0800 | [diff] [blame] | 393 |         // replicated in LayerBE until FE/BE is ready to be synchronized | 
| David Sodman | 0cc6918 | 2017-11-17 12:12:07 -0800 | [diff] [blame] | 394 |         getBE().compositionInfo.hwc.sidebandStream = mSidebandStream; | 
| Peiyong Lin | 566a3b4 | 2018-01-09 18:22:43 -0800 | [diff] [blame] | 395 |         if (getBE().compositionInfo.hwc.sidebandStream != nullptr) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 396 |             setTransactionFlags(eTransactionNeeded); | 
 | 397 |             mFlinger->setTransactionFlags(eTraversalNeeded); | 
 | 398 |         } | 
 | 399 |         recomputeVisibleRegions = true; | 
 | 400 |  | 
 | 401 |         const State& s(getDrawingState()); | 
 | 402 |         return getTransform().transform(Region(Rect(s.active.w, s.active.h))); | 
 | 403 |     } | 
 | 404 |  | 
 | 405 |     Region outDirtyRegion; | 
 | 406 |     if (mQueuedFrames <= 0 && !mAutoRefresh) { | 
 | 407 |         return outDirtyRegion; | 
 | 408 |     } | 
 | 409 |  | 
 | 410 |     // if we've already called updateTexImage() without going through | 
 | 411 |     // a composition step, we have to skip this layer at this point | 
 | 412 |     // because we cannot call updateTeximage() without a corresponding | 
 | 413 |     // compositionComplete() call. | 
 | 414 |     // we'll trigger an update in onPreComposition(). | 
 | 415 |     if (mRefreshPending) { | 
 | 416 |         return outDirtyRegion; | 
 | 417 |     } | 
 | 418 |  | 
 | 419 |     // If the head buffer's acquire fence hasn't signaled yet, return and | 
 | 420 |     // try again later | 
 | 421 |     if (!headFenceHasSignaled()) { | 
 | 422 |         mFlinger->signalLayerUpdate(); | 
 | 423 |         return outDirtyRegion; | 
 | 424 |     } | 
 | 425 |  | 
 | 426 |     // Capture the old state of the layer for comparisons later | 
 | 427 |     const State& s(getDrawingState()); | 
 | 428 |     const bool oldOpacity = isOpaque(s); | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 429 |     sp<GraphicBuffer> oldBuffer = mActiveBuffer; | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 430 |  | 
 | 431 |     if (!allTransactionsSignaled()) { | 
 | 432 |         mFlinger->signalLayerUpdate(); | 
 | 433 |         return outDirtyRegion; | 
 | 434 |     } | 
 | 435 |  | 
 | 436 |     // This boolean is used to make sure that SurfaceFlinger's shadow copy | 
 | 437 |     // of the buffer queue isn't modified when the buffer queue is returning | 
 | 438 |     // BufferItem's that weren't actually queued. This can happen in shared | 
 | 439 |     // buffer mode. | 
 | 440 |     bool queuedBuffer = false; | 
 | 441 |     LayerRejecter r(mDrawingState, getCurrentState(), recomputeVisibleRegions, | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 442 |                     getProducerStickyTransform() != 0, mName.string(), mOverrideScalingMode, | 
 | 443 |                     mFreezeGeometryUpdates); | 
 | 444 |     status_t updateResult = mConsumer->updateTexImage(&r, mFlinger->mPrimaryDispSync, &mAutoRefresh, | 
 | 445 |                                                       &queuedBuffer, mLastFrameNumberReceived); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 446 |     if (updateResult == BufferQueue::PRESENT_LATER) { | 
 | 447 |         // Producer doesn't want buffer to be displayed yet.  Signal a | 
 | 448 |         // layer update so we check again at the next opportunity. | 
 | 449 |         mFlinger->signalLayerUpdate(); | 
 | 450 |         return outDirtyRegion; | 
| Chia-I Wu | 0cb75ac | 2017-11-27 15:56:04 -0800 | [diff] [blame] | 451 |     } else if (updateResult == BufferLayerConsumer::BUFFER_REJECTED) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 452 |         // If the buffer has been rejected, remove it from the shadow queue | 
 | 453 |         // and return early | 
 | 454 |         if (queuedBuffer) { | 
 | 455 |             Mutex::Autolock lock(mQueueItemLock); | 
 | 456 |             mQueueItems.removeAt(0); | 
 | 457 |             android_atomic_dec(&mQueuedFrames); | 
 | 458 |         } | 
 | 459 |         return outDirtyRegion; | 
 | 460 |     } else if (updateResult != NO_ERROR || mUpdateTexImageFailed) { | 
 | 461 |         // This can occur if something goes wrong when trying to create the | 
 | 462 |         // EGLImage for this buffer. If this happens, the buffer has already | 
 | 463 |         // been released, so we need to clean up the queue and bug out | 
 | 464 |         // early. | 
 | 465 |         if (queuedBuffer) { | 
 | 466 |             Mutex::Autolock lock(mQueueItemLock); | 
 | 467 |             mQueueItems.clear(); | 
 | 468 |             android_atomic_and(0, &mQueuedFrames); | 
 | 469 |         } | 
 | 470 |  | 
 | 471 |         // Once we have hit this state, the shadow queue may no longer | 
 | 472 |         // correctly reflect the incoming BufferQueue's contents, so even if | 
 | 473 |         // updateTexImage starts working, the only safe course of action is | 
 | 474 |         // to continue to ignore updates. | 
 | 475 |         mUpdateTexImageFailed = true; | 
 | 476 |  | 
 | 477 |         return outDirtyRegion; | 
 | 478 |     } | 
 | 479 |  | 
 | 480 |     if (queuedBuffer) { | 
 | 481 |         // Autolock scope | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 482 |         auto currentFrameNumber = mConsumer->getFrameNumber(); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 483 |  | 
 | 484 |         Mutex::Autolock lock(mQueueItemLock); | 
 | 485 |  | 
 | 486 |         // Remove any stale buffers that have been dropped during | 
 | 487 |         // updateTexImage | 
 | 488 |         while (mQueueItems[0].mFrameNumber != currentFrameNumber) { | 
 | 489 |             mQueueItems.removeAt(0); | 
 | 490 |             android_atomic_dec(&mQueuedFrames); | 
 | 491 |         } | 
 | 492 |  | 
 | 493 |         mQueueItems.removeAt(0); | 
 | 494 |     } | 
 | 495 |  | 
 | 496 |     // Decrement the queued-frames count.  Signal another event if we | 
 | 497 |     // have more frames pending. | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 498 |     if ((queuedBuffer && android_atomic_dec(&mQueuedFrames) > 1) || mAutoRefresh) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 499 |         mFlinger->signalLayerUpdate(); | 
 | 500 |     } | 
 | 501 |  | 
 | 502 |     // update the active buffer | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 503 |     mActiveBuffer = mConsumer->getCurrentBuffer(&mActiveBufferSlot); | 
 | 504 |     getBE().compositionInfo.mBuffer = mActiveBuffer; | 
 | 505 |     getBE().compositionInfo.mBufferSlot = mActiveBufferSlot; | 
 | 506 |  | 
 | 507 |     if (mActiveBuffer == nullptr) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 508 |         // this can only happen if the very first buffer was rejected. | 
 | 509 |         return outDirtyRegion; | 
 | 510 |     } | 
 | 511 |  | 
 | 512 |     mBufferLatched = true; | 
 | 513 |     mPreviousFrameNumber = mCurrentFrameNumber; | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 514 |     mCurrentFrameNumber = mConsumer->getFrameNumber(); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 515 |  | 
 | 516 |     { | 
 | 517 |         Mutex::Autolock lock(mFrameEventHistoryMutex); | 
 | 518 |         mFrameEventHistory.addLatch(mCurrentFrameNumber, latchTime); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 519 |     } | 
 | 520 |  | 
 | 521 |     mRefreshPending = true; | 
 | 522 |     mFrameLatencyNeeded = true; | 
| Peiyong Lin | 566a3b4 | 2018-01-09 18:22:43 -0800 | [diff] [blame] | 523 |     if (oldBuffer == nullptr) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 524 |         // the first time we receive a buffer, we need to trigger a | 
 | 525 |         // geometry invalidation. | 
 | 526 |         recomputeVisibleRegions = true; | 
 | 527 |     } | 
 | 528 |  | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 529 |     setDataSpace(mConsumer->getCurrentDataSpace()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 530 |  | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 531 |     Rect crop(mConsumer->getCurrentCrop()); | 
 | 532 |     const uint32_t transform(mConsumer->getCurrentTransform()); | 
 | 533 |     const uint32_t scalingMode(mConsumer->getCurrentScalingMode()); | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 534 |     if ((crop != mCurrentCrop) || (transform != mCurrentTransform) || | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 535 |         (scalingMode != mCurrentScalingMode)) { | 
 | 536 |         mCurrentCrop = crop; | 
 | 537 |         mCurrentTransform = transform; | 
 | 538 |         mCurrentScalingMode = scalingMode; | 
 | 539 |         recomputeVisibleRegions = true; | 
 | 540 |     } | 
 | 541 |  | 
| Peiyong Lin | 566a3b4 | 2018-01-09 18:22:43 -0800 | [diff] [blame] | 542 |     if (oldBuffer != nullptr) { | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 543 |         uint32_t bufWidth = mActiveBuffer->getWidth(); | 
 | 544 |         uint32_t bufHeight = mActiveBuffer->getHeight(); | 
 | 545 |         if (bufWidth != uint32_t(oldBuffer->width) || bufHeight != uint32_t(oldBuffer->height)) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 546 |             recomputeVisibleRegions = true; | 
 | 547 |         } | 
 | 548 |     } | 
 | 549 |  | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 550 |     mCurrentOpacity = getOpacityForFormat(mActiveBuffer->format); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 551 |     if (oldOpacity != isOpaque(s)) { | 
 | 552 |         recomputeVisibleRegions = true; | 
 | 553 |     } | 
 | 554 |  | 
 | 555 |     // Remove any sync points corresponding to the buffer which was just | 
 | 556 |     // latched | 
 | 557 |     { | 
 | 558 |         Mutex::Autolock lock(mLocalSyncPointMutex); | 
 | 559 |         auto point = mLocalSyncPoints.begin(); | 
 | 560 |         while (point != mLocalSyncPoints.end()) { | 
 | 561 |             if (!(*point)->frameIsAvailable() || !(*point)->transactionIsApplied()) { | 
 | 562 |                 // This sync point must have been added since we started | 
 | 563 |                 // latching. Don't drop it yet. | 
 | 564 |                 ++point; | 
 | 565 |                 continue; | 
 | 566 |             } | 
 | 567 |  | 
 | 568 |             if ((*point)->getFrameNumber() <= mCurrentFrameNumber) { | 
 | 569 |                 point = mLocalSyncPoints.erase(point); | 
 | 570 |             } else { | 
 | 571 |                 ++point; | 
 | 572 |             } | 
 | 573 |         } | 
 | 574 |     } | 
 | 575 |  | 
 | 576 |     // FIXME: postedRegion should be dirty & bounds | 
 | 577 |     Region dirtyRegion(Rect(s.active.w, s.active.h)); | 
 | 578 |  | 
 | 579 |     // transform the dirty region to window-manager space | 
 | 580 |     outDirtyRegion = (getTransform().transform(dirtyRegion)); | 
 | 581 |  | 
 | 582 |     return outDirtyRegion; | 
 | 583 | } | 
 | 584 |  | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 585 | void BufferLayer::setDefaultBufferSize(uint32_t w, uint32_t h) { | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 586 |     mConsumer->setDefaultBufferSize(w, h); | 
| David Sodman | eb085e0 | 2017-10-05 18:49:04 -0700 | [diff] [blame] | 587 | } | 
 | 588 |  | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 589 | void BufferLayer::setPerFrameData(const sp<const DisplayDevice>& displayDevice) { | 
 | 590 |     // Apply this display's projection's viewport to the visible region | 
 | 591 |     // before giving it to the HWC HAL. | 
 | 592 |     const Transform& tr = displayDevice->getTransform(); | 
 | 593 |     const auto& viewport = displayDevice->getViewport(); | 
 | 594 |     Region visible = tr.transform(visibleRegion.intersect(viewport)); | 
 | 595 |     auto hwcId = displayDevice->getHwcDisplayId(); | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 596 |     auto& hwcInfo = getBE().mHwcLayers[hwcId]; | 
 | 597 |     auto& hwcLayer = hwcInfo.layer; | 
 | 598 |     auto error = (*hwcLayer)->setVisibleRegion(visible); | 
 | 599 |     if (error != HWC2::Error::None) { | 
 | 600 |         ALOGE("[%s] Failed to set visible region: %s (%d)", mName.string(), | 
 | 601 |               to_string(error).c_str(), static_cast<int32_t>(error)); | 
 | 602 |         visible.dump(LOG_TAG); | 
 | 603 |     } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 604 |  | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 605 |     error = (*hwcLayer)->setSurfaceDamage(surfaceDamageRegion); | 
 | 606 |     if (error != HWC2::Error::None) { | 
 | 607 |         ALOGE("[%s] Failed to set surface damage: %s (%d)", mName.string(), | 
 | 608 |               to_string(error).c_str(), static_cast<int32_t>(error)); | 
 | 609 |         surfaceDamageRegion.dump(LOG_TAG); | 
 | 610 |     } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 611 |  | 
 | 612 |     // Sideband layers | 
| David Sodman | 0cc6918 | 2017-11-17 12:12:07 -0800 | [diff] [blame] | 613 |     if (getBE().compositionInfo.hwc.sidebandStream.get()) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 614 |         setCompositionType(hwcId, HWC2::Composition::Sideband); | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 615 |         ALOGV("[%s] Requesting Sideband composition", mName.string()); | 
 | 616 |         error = (*hwcLayer)->setSidebandStream(getBE().compositionInfo.hwc.sidebandStream->handle()); | 
 | 617 |         if (error != HWC2::Error::None) { | 
 | 618 |             ALOGE("[%s] Failed to set sideband stream %p: %s (%d)", mName.string(), | 
 | 619 |                   getBE().compositionInfo.hwc.sidebandStream->handle(), to_string(error).c_str(), | 
 | 620 |                   static_cast<int32_t>(error)); | 
 | 621 |         } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 622 |         return; | 
 | 623 |     } | 
 | 624 |  | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 625 |     // Device or Cursor layers | 
 | 626 |     if (mPotentialCursor) { | 
 | 627 |         ALOGV("[%s] Requesting Cursor composition", mName.string()); | 
 | 628 |         setCompositionType(hwcId, HWC2::Composition::Cursor); | 
 | 629 |     } else { | 
 | 630 |         ALOGV("[%s] Requesting Device composition", mName.string()); | 
 | 631 |         setCompositionType(hwcId, HWC2::Composition::Device); | 
 | 632 |     } | 
 | 633 |  | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 634 |     ALOGV("setPerFrameData: dataspace = %d", mDrawingState.dataSpace); | 
 | 635 |     error = (*hwcLayer)->setDataspace(mDrawingState.dataSpace); | 
 | 636 |     if (error != HWC2::Error::None) { | 
 | 637 |         ALOGE("[%s] Failed to set dataspace %d: %s (%d)", mName.string(), mDrawingState.dataSpace, | 
 | 638 |               to_string(error).c_str(), static_cast<int32_t>(error)); | 
 | 639 |     } | 
 | 640 |  | 
 | 641 |     const HdrMetadata& metadata = mConsumer->getCurrentHdrMetadata(); | 
 | 642 |     error = (*hwcLayer)->setHdrMetadata(metadata); | 
 | 643 |     if (error != HWC2::Error::None && error != HWC2::Error::Unsupported) { | 
 | 644 |         ALOGE("[%s] Failed to set hdrMetadata: %s (%d)", mName.string(), | 
 | 645 |               to_string(error).c_str(), static_cast<int32_t>(error)); | 
 | 646 |     } | 
 | 647 |  | 
 | 648 |     uint32_t hwcSlot = 0; | 
 | 649 |     sp<GraphicBuffer> hwcBuffer; | 
 | 650 |     getBE().mHwcLayers[hwcId].bufferCache.getHwcBuffer(mActiveBufferSlot, mActiveBuffer, &hwcSlot, | 
 | 651 |                                                        &hwcBuffer); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 652 |  | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 653 |     auto acquireFence = mConsumer->getCurrentFence(); | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 654 |     error = (*hwcLayer)->setBuffer(hwcSlot, hwcBuffer, acquireFence); | 
 | 655 |     if (error != HWC2::Error::None) { | 
 | 656 |         ALOGE("[%s] Failed to set buffer %p: %s (%d)", mName.string(), | 
 | 657 |               getBE().compositionInfo.mBuffer->handle, to_string(error).c_str(), | 
 | 658 |               static_cast<int32_t>(error)); | 
 | 659 |     } | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 660 | } | 
 | 661 |  | 
| David Sodman | 41fdfc9 | 2017-11-06 16:09:56 -0800 | [diff] [blame] | 662 | bool BufferLayer::isOpaque(const Layer::State& s) const { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 663 |     // if we don't have a buffer or sidebandStream yet, we're translucent regardless of the | 
 | 664 |     // layer's opaque flag. | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 665 |     if ((getBE().compositionInfo.hwc.sidebandStream == nullptr) && (mActiveBuffer == nullptr)) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 666 |         return false; | 
 | 667 |     } | 
 | 668 |  | 
 | 669 |     // if the layer has the opaque flag, then we're always opaque, | 
 | 670 |     // otherwise we use the current buffer's format. | 
 | 671 |     return ((s.flags & layer_state_t::eLayerOpaque) != 0) || mCurrentOpacity; | 
 | 672 | } | 
 | 673 |  | 
 | 674 | void BufferLayer::onFirstRef() { | 
 | 675 |     // Creates a custom BufferQueue for SurfaceFlingerConsumer to use | 
 | 676 |     sp<IGraphicBufferProducer> producer; | 
 | 677 |     sp<IGraphicBufferConsumer> consumer; | 
 | 678 |     BufferQueue::createBufferQueue(&producer, &consumer, true); | 
 | 679 |     mProducer = new MonitoredProducer(producer, mFlinger, this); | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 680 |     mConsumer = new BufferLayerConsumer(consumer, mFlinger->getRenderEngine(), mTextureName, this); | 
| Chia-I Wu | b28c674 | 2017-12-27 10:59:54 -0800 | [diff] [blame] | 681 |     mConsumer->setConsumerUsageBits(getEffectiveUsage(0)); | 
 | 682 |     mConsumer->setContentsChangedListener(this); | 
 | 683 |     mConsumer->setName(mName); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 684 |  | 
 | 685 |     if (mFlinger->isLayerTripleBufferingDisabled()) { | 
 | 686 |         mProducer->setMaxDequeuedBufferCount(2); | 
 | 687 |     } | 
 | 688 |  | 
 | 689 |     const sp<const DisplayDevice> hw(mFlinger->getDefaultDisplayDevice()); | 
 | 690 |     updateTransformHint(hw); | 
 | 691 | } | 
 | 692 |  | 
 | 693 | // --------------------------------------------------------------------------- | 
 | 694 | // Interface implementation for SurfaceFlingerConsumer::ContentsChangedListener | 
 | 695 | // --------------------------------------------------------------------------- | 
 | 696 |  | 
 | 697 | void BufferLayer::onFrameAvailable(const BufferItem& item) { | 
 | 698 |     // Add this buffer from our internal queue tracker | 
 | 699 |     { // Autolock scope | 
 | 700 |         Mutex::Autolock lock(mQueueItemLock); | 
| Lloyd Pique | 4dccc41 | 2018-01-22 17:21:36 -0800 | [diff] [blame] | 701 |         mFlinger->mInterceptor->saveBufferUpdate(this, item.mGraphicBuffer->getWidth(), | 
 | 702 |                                                  item.mGraphicBuffer->getHeight(), | 
 | 703 |                                                  item.mFrameNumber); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 704 |         // Reset the frame number tracker when we receive the first buffer after | 
 | 705 |         // a frame number reset | 
 | 706 |         if (item.mFrameNumber == 1) { | 
 | 707 |             mLastFrameNumberReceived = 0; | 
 | 708 |         } | 
 | 709 |  | 
 | 710 |         // Ensure that callbacks are handled in order | 
 | 711 |         while (item.mFrameNumber != mLastFrameNumberReceived + 1) { | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 712 |             status_t result = mQueueItemCondition.waitRelative(mQueueItemLock, ms2ns(500)); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 713 |             if (result != NO_ERROR) { | 
 | 714 |                 ALOGE("[%s] Timed out waiting on callback", mName.string()); | 
 | 715 |             } | 
 | 716 |         } | 
 | 717 |  | 
 | 718 |         mQueueItems.push_back(item); | 
 | 719 |         android_atomic_inc(&mQueuedFrames); | 
 | 720 |  | 
 | 721 |         // Wake up any pending callbacks | 
 | 722 |         mLastFrameNumberReceived = item.mFrameNumber; | 
 | 723 |         mQueueItemCondition.broadcast(); | 
 | 724 |     } | 
 | 725 |  | 
 | 726 |     mFlinger->signalLayerUpdate(); | 
 | 727 | } | 
 | 728 |  | 
 | 729 | void BufferLayer::onFrameReplaced(const BufferItem& item) { | 
 | 730 |     { // Autolock scope | 
 | 731 |         Mutex::Autolock lock(mQueueItemLock); | 
 | 732 |  | 
 | 733 |         // Ensure that callbacks are handled in order | 
 | 734 |         while (item.mFrameNumber != mLastFrameNumberReceived + 1) { | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 735 |             status_t result = mQueueItemCondition.waitRelative(mQueueItemLock, ms2ns(500)); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 736 |             if (result != NO_ERROR) { | 
 | 737 |                 ALOGE("[%s] Timed out waiting on callback", mName.string()); | 
 | 738 |             } | 
 | 739 |         } | 
 | 740 |  | 
 | 741 |         if (mQueueItems.empty()) { | 
 | 742 |             ALOGE("Can't replace a frame on an empty queue"); | 
 | 743 |             return; | 
 | 744 |         } | 
 | 745 |         mQueueItems.editItemAt(mQueueItems.size() - 1) = item; | 
 | 746 |  | 
 | 747 |         // Wake up any pending callbacks | 
 | 748 |         mLastFrameNumberReceived = item.mFrameNumber; | 
 | 749 |         mQueueItemCondition.broadcast(); | 
 | 750 |     } | 
 | 751 | } | 
 | 752 |  | 
 | 753 | void BufferLayer::onSidebandStreamChanged() { | 
 | 754 |     if (android_atomic_release_cas(false, true, &mSidebandStreamChanged) == 0) { | 
 | 755 |         // mSidebandStreamChanged was false | 
 | 756 |         mFlinger->signalLayerUpdate(); | 
 | 757 |     } | 
 | 758 | } | 
 | 759 |  | 
 | 760 | bool BufferLayer::needsFiltering(const RenderArea& renderArea) const { | 
 | 761 |     return mNeedsFiltering || renderArea.needsFiltering(); | 
 | 762 | } | 
 | 763 |  | 
 | 764 | // As documented in libhardware header, formats in the range | 
 | 765 | // 0x100 - 0x1FF are specific to the HAL implementation, and | 
 | 766 | // are known to have no alpha channel | 
 | 767 | // TODO: move definition for device-specific range into | 
 | 768 | // hardware.h, instead of using hard-coded values here. | 
 | 769 | #define HARDWARE_IS_DEVICE_FORMAT(f) ((f) >= 0x100 && (f) <= 0x1FF) | 
 | 770 |  | 
 | 771 | bool BufferLayer::getOpacityForFormat(uint32_t format) { | 
 | 772 |     if (HARDWARE_IS_DEVICE_FORMAT(format)) { | 
 | 773 |         return true; | 
 | 774 |     } | 
 | 775 |     switch (format) { | 
 | 776 |         case HAL_PIXEL_FORMAT_RGBA_8888: | 
 | 777 |         case HAL_PIXEL_FORMAT_BGRA_8888: | 
 | 778 |         case HAL_PIXEL_FORMAT_RGBA_FP16: | 
 | 779 |         case HAL_PIXEL_FORMAT_RGBA_1010102: | 
 | 780 |             return false; | 
 | 781 |     } | 
 | 782 |     // in all other case, we have no blending (also for unknown formats) | 
 | 783 |     return true; | 
 | 784 | } | 
 | 785 |  | 
| David Sodman | 41fdfc9 | 2017-11-06 16:09:56 -0800 | [diff] [blame] | 786 | void BufferLayer::drawWithOpenGL(const RenderArea& renderArea, bool useIdentityTransform) const { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 787 |     const State& s(getDrawingState()); | 
 | 788 |  | 
| David Sodman | 9eeae69 | 2017-11-02 10:53:32 -0700 | [diff] [blame] | 789 |     computeGeometry(renderArea, getBE().mMesh, useIdentityTransform); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 790 |  | 
 | 791 |     /* | 
 | 792 |      * NOTE: the way we compute the texture coordinates here produces | 
 | 793 |      * different results than when we take the HWC path -- in the later case | 
 | 794 |      * the "source crop" is rounded to texel boundaries. | 
 | 795 |      * This can produce significantly different results when the texture | 
 | 796 |      * is scaled by a large amount. | 
 | 797 |      * | 
 | 798 |      * The GL code below is more logical (imho), and the difference with | 
 | 799 |      * HWC is due to a limitation of the HWC API to integers -- a question | 
 | 800 |      * is suspend is whether we should ignore this problem or revert to | 
 | 801 |      * GL composition when a buffer scaling is applied (maybe with some | 
 | 802 |      * minimal value)? Or, we could make GL behave like HWC -- but this feel | 
 | 803 |      * like more of a hack. | 
 | 804 |      */ | 
| Dan Stoza | 80d6116 | 2017-12-20 15:57:52 -0800 | [diff] [blame] | 805 |     const Rect bounds{computeBounds()}; // Rounds from FloatRect | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 806 |  | 
 | 807 |     Transform t = getTransform(); | 
| Dan Stoza | 80d6116 | 2017-12-20 15:57:52 -0800 | [diff] [blame] | 808 |     Rect win = bounds; | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 809 |     if (!s.finalCrop.isEmpty()) { | 
 | 810 |         win = t.transform(win); | 
 | 811 |         if (!win.intersect(s.finalCrop, &win)) { | 
 | 812 |             win.clear(); | 
 | 813 |         } | 
 | 814 |         win = t.inverse().transform(win); | 
| Dan Stoza | 80d6116 | 2017-12-20 15:57:52 -0800 | [diff] [blame] | 815 |         if (!win.intersect(bounds, &win)) { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 816 |             win.clear(); | 
 | 817 |         } | 
 | 818 |     } | 
 | 819 |  | 
 | 820 |     float left = float(win.left) / float(s.active.w); | 
 | 821 |     float top = float(win.top) / float(s.active.h); | 
 | 822 |     float right = float(win.right) / float(s.active.w); | 
 | 823 |     float bottom = float(win.bottom) / float(s.active.h); | 
 | 824 |  | 
 | 825 |     // TODO: we probably want to generate the texture coords with the mesh | 
 | 826 |     // here we assume that we only have 4 vertices | 
| David Sodman | 9eeae69 | 2017-11-02 10:53:32 -0700 | [diff] [blame] | 827 |     Mesh::VertexArray<vec2> texCoords(getBE().mMesh.getTexCoordArray<vec2>()); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 828 |     texCoords[0] = vec2(left, 1.0f - top); | 
 | 829 |     texCoords[1] = vec2(left, 1.0f - bottom); | 
 | 830 |     texCoords[2] = vec2(right, 1.0f - bottom); | 
 | 831 |     texCoords[3] = vec2(right, 1.0f - top); | 
 | 832 |  | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 833 |     //getBE().compositionInfo.re.preMultipliedAlpha = mPremultipliedAlpha; | 
 | 834 |     //getBE().compositionInfo.re.opaque = isOpaque(s); | 
 | 835 |     //getBE().compositionInfo.re.disableTexture = false; | 
 | 836 |     //getBE().compositionInfo.re.color = getColor(); | 
 | 837 |     //getBE().compositionInfo.hwc.dataspace = mCurrentState.dataSpace; | 
| Chia-I Wu | 5c6e463 | 2018-01-11 08:54:38 -0800 | [diff] [blame] | 838 |  | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 839 |   auto& engine(mFlinger->getRenderEngine()); | 
 | 840 |     engine.setupLayerBlending(mPremultipliedAlpha, isOpaque(s), false /* disableTexture */, | 
 | 841 |                               getColor()); | 
 | 842 |     engine.setSourceDataSpace(mCurrentState.dataSpace); | 
 | 843 |      | 
| Chia-I Wu | 8d2651e | 2018-01-24 12:18:49 -0800 | [diff] [blame] | 844 |     if (mCurrentState.dataSpace == HAL_DATASPACE_BT2020_ITU_PQ && | 
| Chia-I Wu | 5c6e463 | 2018-01-11 08:54:38 -0800 | [diff] [blame] | 845 |         mConsumer->getCurrentApi() == NATIVE_WINDOW_API_MEDIA && | 
 | 846 |         getBE().compositionInfo.mBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102) { | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 847 |         engine.setSourceY410BT2020(true); | 
| Chia-I Wu | 5c6e463 | 2018-01-11 08:54:38 -0800 | [diff] [blame] | 848 |     } | 
| Chia-I Wu | 30505fb | 2018-03-26 16:20:31 -0700 | [diff] [blame] | 849 |  | 
 | 850 |     engine.drawMesh(getBE().mMesh); | 
 | 851 |     engine.disableBlending(); | 
 | 852 |  | 
 | 853 |     engine.setSourceY410BT2020(false); | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 854 | } | 
 | 855 |  | 
 | 856 | uint32_t BufferLayer::getProducerStickyTransform() const { | 
 | 857 |     int producerStickyTransform = 0; | 
 | 858 |     int ret = mProducer->query(NATIVE_WINDOW_STICKY_TRANSFORM, &producerStickyTransform); | 
 | 859 |     if (ret != OK) { | 
 | 860 |         ALOGW("%s: Error %s (%d) while querying window sticky transform.", __FUNCTION__, | 
 | 861 |               strerror(-ret), ret); | 
 | 862 |         return 0; | 
 | 863 |     } | 
 | 864 |     return static_cast<uint32_t>(producerStickyTransform); | 
 | 865 | } | 
 | 866 |  | 
 | 867 | bool BufferLayer::latchUnsignaledBuffers() { | 
 | 868 |     static bool propertyLoaded = false; | 
 | 869 |     static bool latch = false; | 
 | 870 |     static std::mutex mutex; | 
 | 871 |     std::lock_guard<std::mutex> lock(mutex); | 
 | 872 |     if (!propertyLoaded) { | 
 | 873 |         char value[PROPERTY_VALUE_MAX] = {}; | 
 | 874 |         property_get("debug.sf.latch_unsignaled", value, "0"); | 
 | 875 |         latch = atoi(value); | 
 | 876 |         propertyLoaded = true; | 
 | 877 |     } | 
 | 878 |     return latch; | 
 | 879 | } | 
 | 880 |  | 
 | 881 | uint64_t BufferLayer::getHeadFrameNumber() const { | 
 | 882 |     Mutex::Autolock lock(mQueueItemLock); | 
 | 883 |     if (!mQueueItems.empty()) { | 
 | 884 |         return mQueueItems[0].mFrameNumber; | 
 | 885 |     } else { | 
 | 886 |         return mCurrentFrameNumber; | 
 | 887 |     } | 
 | 888 | } | 
 | 889 |  | 
 | 890 | bool BufferLayer::headFenceHasSignaled() const { | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 891 |     if (latchUnsignaledBuffers()) { | 
 | 892 |         return true; | 
 | 893 |     } | 
 | 894 |  | 
 | 895 |     Mutex::Autolock lock(mQueueItemLock); | 
 | 896 |     if (mQueueItems.empty()) { | 
 | 897 |         return true; | 
 | 898 |     } | 
 | 899 |     if (mQueueItems[0].mIsDroppable) { | 
 | 900 |         // Even though this buffer's fence may not have signaled yet, it could | 
 | 901 |         // be replaced by another buffer before it has a chance to, which means | 
 | 902 |         // that it's possible to get into a situation where a buffer is never | 
 | 903 |         // able to be latched. To avoid this, grab this buffer anyway. | 
 | 904 |         return true; | 
 | 905 |     } | 
| David Sodman | 0cf8f8d | 2017-12-20 18:19:45 -0800 | [diff] [blame] | 906 |     return mQueueItems[0].mFenceTime->getSignalTime() != Fence::SIGNAL_TIME_PENDING; | 
| David Sodman | 0c69cad | 2017-08-21 12:12:51 -0700 | [diff] [blame] | 907 | } | 
 | 908 |  | 
 | 909 | uint32_t BufferLayer::getEffectiveScalingMode() const { | 
 | 910 |     if (mOverrideScalingMode >= 0) { | 
 | 911 |         return mOverrideScalingMode; | 
 | 912 |     } | 
 | 913 |     return mCurrentScalingMode; | 
 | 914 | } | 
 | 915 |  | 
 | 916 | // ---------------------------------------------------------------------------- | 
 | 917 | // transaction | 
 | 918 | // ---------------------------------------------------------------------------- | 
 | 919 |  | 
 | 920 | void BufferLayer::notifyAvailableFrames() { | 
 | 921 |     auto headFrameNumber = getHeadFrameNumber(); | 
 | 922 |     bool headFenceSignaled = headFenceHasSignaled(); | 
 | 923 |     Mutex::Autolock lock(mLocalSyncPointMutex); | 
 | 924 |     for (auto& point : mLocalSyncPoints) { | 
 | 925 |         if (headFrameNumber >= point->getFrameNumber() && headFenceSignaled) { | 
 | 926 |             point->setFrameAvailable(); | 
 | 927 |         } | 
 | 928 |     } | 
 | 929 | } | 
 | 930 |  | 
 | 931 | sp<IGraphicBufferProducer> BufferLayer::getProducer() const { | 
 | 932 |     return mProducer; | 
 | 933 | } | 
 | 934 |  | 
 | 935 | // --------------------------------------------------------------------------- | 
 | 936 | // h/w composer set-up | 
 | 937 | // --------------------------------------------------------------------------- | 
 | 938 |  | 
 | 939 | bool BufferLayer::allTransactionsSignaled() { | 
 | 940 |     auto headFrameNumber = getHeadFrameNumber(); | 
 | 941 |     bool matchingFramesFound = false; | 
 | 942 |     bool allTransactionsApplied = true; | 
 | 943 |     Mutex::Autolock lock(mLocalSyncPointMutex); | 
 | 944 |  | 
 | 945 |     for (auto& point : mLocalSyncPoints) { | 
 | 946 |         if (point->getFrameNumber() > headFrameNumber) { | 
 | 947 |             break; | 
 | 948 |         } | 
 | 949 |         matchingFramesFound = true; | 
 | 950 |  | 
 | 951 |         if (!point->frameIsAvailable()) { | 
 | 952 |             // We haven't notified the remote layer that the frame for | 
 | 953 |             // this point is available yet. Notify it now, and then | 
 | 954 |             // abort this attempt to latch. | 
 | 955 |             point->setFrameAvailable(); | 
 | 956 |             allTransactionsApplied = false; | 
 | 957 |             break; | 
 | 958 |         } | 
 | 959 |  | 
 | 960 |         allTransactionsApplied = allTransactionsApplied && point->transactionIsApplied(); | 
 | 961 |     } | 
 | 962 |     return !matchingFramesFound || allTransactionsApplied; | 
 | 963 | } | 
 | 964 |  | 
 | 965 | } // namespace android | 
 | 966 |  | 
 | 967 | #if defined(__gl_h_) | 
 | 968 | #error "don't include gl/gl.h in this file" | 
 | 969 | #endif | 
 | 970 |  | 
 | 971 | #if defined(__gl2_h_) | 
 | 972 | #error "don't include gl2/gl2.h in this file" | 
 | 973 | #endif |