Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright 2022 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | // #define LOG_NDEBUG 0 |
| 18 | #undef LOG_TAG |
Vishnu Nair | c638470 | 2023-07-31 12:22:20 -0700 | [diff] [blame] | 19 | #define LOG_TAG "SurfaceFlinger" |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 20 | #define ATRACE_TAG ATRACE_TAG_GRAPHICS |
| 21 | |
| 22 | #include <gui/GLConsumer.h> |
| 23 | #include <gui/TraceUtils.h> |
| 24 | #include <math/vec3.h> |
| 25 | #include <system/window.h> |
| 26 | #include <utils/Log.h> |
| 27 | |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 28 | #include "LayerFE.h" |
Leon Scroggins III | 85d4b22 | 2023-05-09 13:58:18 -0400 | [diff] [blame] | 29 | #include "SurfaceFlinger.h" |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 30 | |
| 31 | namespace android { |
| 32 | |
| 33 | namespace { |
| 34 | constexpr float defaultMaxLuminance = 1000.0; |
| 35 | |
| 36 | constexpr mat4 inverseOrientation(uint32_t transform) { |
| 37 | const mat4 flipH(-1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1); |
| 38 | const mat4 flipV(1, 0, 0, 0, 0, -1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1); |
| 39 | const mat4 rot90(0, 1, 0, 0, -1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 1); |
| 40 | mat4 tr; |
| 41 | |
| 42 | if (transform & NATIVE_WINDOW_TRANSFORM_ROT_90) { |
| 43 | tr = tr * rot90; |
| 44 | } |
| 45 | if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_H) { |
| 46 | tr = tr * flipH; |
| 47 | } |
| 48 | if (transform & NATIVE_WINDOW_TRANSFORM_FLIP_V) { |
| 49 | tr = tr * flipV; |
| 50 | } |
| 51 | return inverse(tr); |
| 52 | } |
| 53 | |
| 54 | FloatRect reduce(const FloatRect& win, const Region& exclude) { |
| 55 | if (CC_LIKELY(exclude.isEmpty())) { |
| 56 | return win; |
| 57 | } |
| 58 | // Convert through Rect (by rounding) for lack of FloatRegion |
| 59 | return Region(Rect{win}).subtract(exclude).getBounds().toFloatRect(); |
| 60 | } |
| 61 | |
| 62 | // Computes the transform matrix using the setFilteringEnabled to determine whether the |
| 63 | // transform matrix should be computed for use with bilinear filtering. |
| 64 | void getDrawingTransformMatrix(const std::shared_ptr<renderengine::ExternalTexture>& buffer, |
| 65 | Rect bufferCrop, uint32_t bufferTransform, bool filteringEnabled, |
| 66 | float outMatrix[16]) { |
| 67 | if (!buffer) { |
| 68 | ALOGE("Buffer should not be null!"); |
| 69 | return; |
| 70 | } |
| 71 | GLConsumer::computeTransformMatrix(outMatrix, static_cast<float>(buffer->getWidth()), |
| 72 | static_cast<float>(buffer->getHeight()), |
| 73 | buffer->getPixelFormat(), bufferCrop, bufferTransform, |
| 74 | filteringEnabled); |
| 75 | } |
| 76 | |
| 77 | } // namespace |
| 78 | |
| 79 | LayerFE::LayerFE(const std::string& name) : mName(name) {} |
| 80 | |
| 81 | const compositionengine::LayerFECompositionState* LayerFE::getCompositionState() const { |
| 82 | return mSnapshot.get(); |
| 83 | } |
| 84 | |
| 85 | bool LayerFE::onPreComposition(nsecs_t refreshStartTime, bool) { |
| 86 | mCompositionResult.refreshStartTime = refreshStartTime; |
| 87 | return mSnapshot->hasReadyFrame; |
| 88 | } |
| 89 | |
| 90 | std::optional<compositionengine::LayerFE::LayerSettings> LayerFE::prepareClientComposition( |
| 91 | compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) const { |
| 92 | std::optional<compositionengine::LayerFE::LayerSettings> layerSettings = |
| 93 | prepareClientCompositionInternal(targetSettings); |
| 94 | // Nothing to render. |
| 95 | if (!layerSettings) { |
| 96 | return {}; |
| 97 | } |
| 98 | |
| 99 | // HWC requests to clear this layer. |
| 100 | if (targetSettings.clearContent) { |
| 101 | prepareClearClientComposition(*layerSettings, false /* blackout */); |
| 102 | return layerSettings; |
| 103 | } |
| 104 | |
| 105 | // set the shadow for the layer if needed |
| 106 | prepareShadowClientComposition(*layerSettings, targetSettings.viewport); |
| 107 | |
| 108 | return layerSettings; |
| 109 | } |
| 110 | |
| 111 | std::optional<compositionengine::LayerFE::LayerSettings> LayerFE::prepareClientCompositionInternal( |
| 112 | compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) const { |
| 113 | ATRACE_CALL(); |
| 114 | compositionengine::LayerFE::LayerSettings layerSettings; |
| 115 | layerSettings.geometry.boundaries = |
| 116 | reduce(mSnapshot->geomLayerBounds, mSnapshot->transparentRegionHint); |
| 117 | layerSettings.geometry.positionTransform = mSnapshot->geomLayerTransform.asMatrix4(); |
| 118 | |
| 119 | // skip drawing content if the targetSettings indicate the content will be occluded |
| 120 | const bool drawContent = targetSettings.realContentIsVisible || targetSettings.clearContent; |
| 121 | layerSettings.skipContentDraw = !drawContent; |
| 122 | |
| 123 | if (!mSnapshot->colorTransformIsIdentity) { |
| 124 | layerSettings.colorTransform = mSnapshot->colorTransform; |
| 125 | } |
| 126 | |
| 127 | const auto& roundedCornerState = mSnapshot->roundedCorner; |
| 128 | layerSettings.geometry.roundedCornersRadius = roundedCornerState.radius; |
| 129 | layerSettings.geometry.roundedCornersCrop = roundedCornerState.cropRect; |
| 130 | |
| 131 | layerSettings.alpha = mSnapshot->alpha; |
| 132 | layerSettings.sourceDataspace = mSnapshot->dataspace; |
| 133 | |
| 134 | // Override the dataspace transfer from 170M to sRGB if the device configuration requests this. |
| 135 | // We do this here instead of in buffer info so that dumpsys can still report layers that are |
| 136 | // using the 170M transfer. |
| 137 | if (targetSettings.treat170mAsSrgb && |
| 138 | (layerSettings.sourceDataspace & HAL_DATASPACE_TRANSFER_MASK) == |
| 139 | HAL_DATASPACE_TRANSFER_SMPTE_170M) { |
| 140 | layerSettings.sourceDataspace = static_cast<ui::Dataspace>( |
| 141 | (layerSettings.sourceDataspace & HAL_DATASPACE_STANDARD_MASK) | |
| 142 | (layerSettings.sourceDataspace & HAL_DATASPACE_RANGE_MASK) | |
| 143 | HAL_DATASPACE_TRANSFER_SRGB); |
| 144 | } |
| 145 | |
| 146 | layerSettings.whitePointNits = targetSettings.whitePointNits; |
| 147 | switch (targetSettings.blurSetting) { |
| 148 | case LayerFE::ClientCompositionTargetSettings::BlurSetting::Enabled: |
| 149 | layerSettings.backgroundBlurRadius = mSnapshot->backgroundBlurRadius; |
| 150 | layerSettings.blurRegions = mSnapshot->blurRegions; |
Vishnu Nair | 8fc721b | 2022-12-22 20:06:32 +0000 | [diff] [blame] | 151 | layerSettings.blurRegionTransform = mSnapshot->localTransformInverse.asMatrix4(); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 152 | break; |
| 153 | case LayerFE::ClientCompositionTargetSettings::BlurSetting::BackgroundBlurOnly: |
| 154 | layerSettings.backgroundBlurRadius = mSnapshot->backgroundBlurRadius; |
| 155 | break; |
| 156 | case LayerFE::ClientCompositionTargetSettings::BlurSetting::BlurRegionsOnly: |
| 157 | layerSettings.blurRegions = mSnapshot->blurRegions; |
Vishnu Nair | 8fc721b | 2022-12-22 20:06:32 +0000 | [diff] [blame] | 158 | layerSettings.blurRegionTransform = mSnapshot->localTransformInverse.asMatrix4(); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 159 | break; |
| 160 | case LayerFE::ClientCompositionTargetSettings::BlurSetting::Disabled: |
| 161 | default: |
| 162 | break; |
| 163 | } |
| 164 | layerSettings.stretchEffect = mSnapshot->stretchEffect; |
| 165 | // Record the name of the layer for debugging further down the stack. |
| 166 | layerSettings.name = mSnapshot->name; |
| 167 | |
| 168 | if (hasEffect() && !hasBufferOrSidebandStream()) { |
| 169 | prepareEffectsClientComposition(layerSettings, targetSettings); |
| 170 | return layerSettings; |
| 171 | } |
| 172 | |
| 173 | prepareBufferStateClientComposition(layerSettings, targetSettings); |
| 174 | return layerSettings; |
| 175 | } |
| 176 | |
| 177 | void LayerFE::prepareClearClientComposition(LayerFE::LayerSettings& layerSettings, |
| 178 | bool blackout) const { |
| 179 | layerSettings.source.buffer.buffer = nullptr; |
| 180 | layerSettings.source.solidColor = half3(0.0f, 0.0f, 0.0f); |
| 181 | layerSettings.disableBlending = true; |
| 182 | layerSettings.bufferId = 0; |
| 183 | layerSettings.frameNumber = 0; |
| 184 | |
| 185 | // If layer is blacked out, force alpha to 1 so that we draw a black color layer. |
| 186 | layerSettings.alpha = blackout ? 1.0f : 0.0f; |
| 187 | layerSettings.name = mSnapshot->name; |
| 188 | } |
| 189 | |
| 190 | void LayerFE::prepareEffectsClientComposition( |
| 191 | compositionengine::LayerFE::LayerSettings& layerSettings, |
| 192 | compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) const { |
| 193 | // If fill bounds are occluded or the fill color is invalid skip the fill settings. |
| 194 | if (targetSettings.realContentIsVisible && fillsColor()) { |
| 195 | // Set color for color fill settings. |
| 196 | layerSettings.source.solidColor = mSnapshot->color.rgb; |
| 197 | } else if (hasBlur() || drawShadows()) { |
| 198 | layerSettings.skipContentDraw = true; |
| 199 | } |
| 200 | } |
| 201 | |
| 202 | void LayerFE::prepareBufferStateClientComposition( |
| 203 | compositionengine::LayerFE::LayerSettings& layerSettings, |
| 204 | compositionengine::LayerFE::ClientCompositionTargetSettings& targetSettings) const { |
| 205 | ATRACE_CALL(); |
| 206 | if (CC_UNLIKELY(!mSnapshot->externalTexture)) { |
| 207 | // If there is no buffer for the layer or we have sidebandstream where there is no |
| 208 | // activeBuffer, then we need to return LayerSettings. |
| 209 | return; |
| 210 | } |
Chavi Weingarten | 18fa7c6 | 2023-11-28 21:16:03 +0000 | [diff] [blame^] | 211 | bool blackOutLayer; |
| 212 | if (FlagManager::getInstance().display_protected()) { |
| 213 | blackOutLayer = (mSnapshot->hasProtectedContent && !targetSettings.isProtected) || |
| 214 | (mSnapshot->isSecure && !targetSettings.isSecure); |
| 215 | } else { |
| 216 | blackOutLayer = (mSnapshot->hasProtectedContent && !targetSettings.isProtected) || |
| 217 | ((mSnapshot->isSecure || mSnapshot->hasProtectedContent) && |
| 218 | !targetSettings.isSecure); |
| 219 | } |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 220 | const bool bufferCanBeUsedAsHwTexture = |
| 221 | mSnapshot->externalTexture->getUsage() & GraphicBuffer::USAGE_HW_TEXTURE; |
| 222 | if (blackOutLayer || !bufferCanBeUsedAsHwTexture) { |
| 223 | ALOGE_IF(!bufferCanBeUsedAsHwTexture, "%s is blacked out as buffer is not gpu readable", |
| 224 | mSnapshot->name.c_str()); |
| 225 | prepareClearClientComposition(layerSettings, true /* blackout */); |
| 226 | return; |
| 227 | } |
| 228 | |
| 229 | layerSettings.source.buffer.buffer = mSnapshot->externalTexture; |
| 230 | layerSettings.source.buffer.isOpaque = mSnapshot->contentOpaque; |
| 231 | layerSettings.source.buffer.fence = mSnapshot->acquireFence; |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 232 | layerSettings.source.buffer.usePremultipliedAlpha = mSnapshot->premultipliedAlpha; |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 233 | bool hasSmpte2086 = mSnapshot->hdrMetadata.validTypes & HdrMetadata::SMPTE2086; |
| 234 | bool hasCta861_3 = mSnapshot->hdrMetadata.validTypes & HdrMetadata::CTA861_3; |
| 235 | float maxLuminance = 0.f; |
| 236 | if (hasSmpte2086 && hasCta861_3) { |
| 237 | maxLuminance = std::min(mSnapshot->hdrMetadata.smpte2086.maxLuminance, |
| 238 | mSnapshot->hdrMetadata.cta8613.maxContentLightLevel); |
| 239 | } else if (hasSmpte2086) { |
| 240 | maxLuminance = mSnapshot->hdrMetadata.smpte2086.maxLuminance; |
| 241 | } else if (hasCta861_3) { |
| 242 | maxLuminance = mSnapshot->hdrMetadata.cta8613.maxContentLightLevel; |
| 243 | } else { |
| 244 | switch (layerSettings.sourceDataspace & HAL_DATASPACE_TRANSFER_MASK) { |
| 245 | case HAL_DATASPACE_TRANSFER_ST2084: |
| 246 | case HAL_DATASPACE_TRANSFER_HLG: |
| 247 | // Behavior-match previous releases for HDR content |
| 248 | maxLuminance = defaultMaxLuminance; |
| 249 | break; |
| 250 | } |
| 251 | } |
| 252 | layerSettings.source.buffer.maxLuminanceNits = maxLuminance; |
| 253 | layerSettings.frameNumber = mSnapshot->frameNumber; |
| 254 | layerSettings.bufferId = mSnapshot->externalTexture->getId(); |
| 255 | |
Sally Qi | 380ac3e | 2023-10-10 20:27:02 +0000 | [diff] [blame] | 256 | const bool useFiltering = targetSettings.needsFiltering || |
| 257 | mSnapshot->geomLayerTransform.needsBilinearFiltering(); |
| 258 | |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 259 | // Query the texture matrix given our current filtering mode. |
| 260 | float textureMatrix[16]; |
| 261 | getDrawingTransformMatrix(layerSettings.source.buffer.buffer, mSnapshot->geomContentCrop, |
Sally Qi | 380ac3e | 2023-10-10 20:27:02 +0000 | [diff] [blame] | 262 | mSnapshot->geomBufferTransform, useFiltering, |
Patrick Williams | 278a88f | 2023-01-27 16:52:40 -0600 | [diff] [blame] | 263 | textureMatrix); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 264 | |
| 265 | if (mSnapshot->geomBufferUsesDisplayInverseTransform) { |
| 266 | /* |
| 267 | * the code below applies the primary display's inverse transform to |
| 268 | * the texture transform |
| 269 | */ |
Leon Scroggins III | 85d4b22 | 2023-05-09 13:58:18 -0400 | [diff] [blame] | 270 | uint32_t transform = SurfaceFlinger::getActiveDisplayRotationFlags(); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 271 | mat4 tr = inverseOrientation(transform); |
| 272 | |
| 273 | /** |
| 274 | * TODO(b/36727915): This is basically a hack. |
| 275 | * |
| 276 | * Ensure that regardless of the parent transformation, |
| 277 | * this buffer is always transformed from native display |
| 278 | * orientation to display orientation. For example, in the case |
| 279 | * of a camera where the buffer remains in native orientation, |
| 280 | * we want the pixels to always be upright. |
| 281 | */ |
Vishnu Nair | 8fc721b | 2022-12-22 20:06:32 +0000 | [diff] [blame] | 282 | const auto parentTransform = mSnapshot->parentTransform; |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 283 | tr = tr * inverseOrientation(parentTransform.getOrientation()); |
| 284 | |
| 285 | // and finally apply it to the original texture matrix |
| 286 | const mat4 texTransform(mat4(static_cast<const float*>(textureMatrix)) * tr); |
| 287 | memcpy(textureMatrix, texTransform.asArray(), sizeof(textureMatrix)); |
| 288 | } |
| 289 | |
| 290 | const Rect win{layerSettings.geometry.boundaries}; |
| 291 | float bufferWidth = static_cast<float>(mSnapshot->bufferSize.getWidth()); |
| 292 | float bufferHeight = static_cast<float>(mSnapshot->bufferSize.getHeight()); |
| 293 | |
| 294 | // Layers can have a "buffer size" of [0, 0, -1, -1] when no display frame has |
| 295 | // been set and there is no parent layer bounds. In that case, the scale is meaningless so |
| 296 | // ignore them. |
| 297 | if (!mSnapshot->bufferSize.isValid()) { |
| 298 | bufferWidth = float(win.right) - float(win.left); |
| 299 | bufferHeight = float(win.bottom) - float(win.top); |
| 300 | } |
| 301 | |
| 302 | const float scaleHeight = (float(win.bottom) - float(win.top)) / bufferHeight; |
| 303 | const float scaleWidth = (float(win.right) - float(win.left)) / bufferWidth; |
| 304 | const float translateY = float(win.top) / bufferHeight; |
| 305 | const float translateX = float(win.left) / bufferWidth; |
| 306 | |
| 307 | // Flip y-coordinates because GLConsumer expects OpenGL convention. |
| 308 | mat4 tr = mat4::translate(vec4(.5f, .5f, 0.f, 1.f)) * mat4::scale(vec4(1.f, -1.f, 1.f, 1.f)) * |
| 309 | mat4::translate(vec4(-.5f, -.5f, 0.f, 1.f)) * |
| 310 | mat4::translate(vec4(translateX, translateY, 0.f, 1.f)) * |
| 311 | mat4::scale(vec4(scaleWidth, scaleHeight, 1.0f, 1.0f)); |
| 312 | |
Sally Qi | 380ac3e | 2023-10-10 20:27:02 +0000 | [diff] [blame] | 313 | layerSettings.source.buffer.useTextureFiltering = useFiltering; |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 314 | layerSettings.source.buffer.textureTransform = |
| 315 | mat4(static_cast<const float*>(textureMatrix)) * tr; |
| 316 | |
| 317 | return; |
| 318 | } |
| 319 | |
| 320 | void LayerFE::prepareShadowClientComposition(LayerFE::LayerSettings& caster, |
| 321 | const Rect& layerStackRect) const { |
Vishnu Nair | d9e4f46 | 2023-10-06 04:05:45 +0000 | [diff] [blame] | 322 | ShadowSettings state = mSnapshot->shadowSettings; |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 323 | if (state.length <= 0.f || (state.ambientColor.a <= 0.f && state.spotColor.a <= 0.f)) { |
| 324 | return; |
| 325 | } |
| 326 | |
| 327 | // Shift the spot light x-position to the middle of the display and then |
| 328 | // offset it by casting layer's screen pos. |
| 329 | state.lightPos.x = |
| 330 | (static_cast<float>(layerStackRect.width()) / 2.f) - mSnapshot->transformedBounds.left; |
| 331 | state.lightPos.y -= mSnapshot->transformedBounds.top; |
| 332 | caster.shadow = state; |
| 333 | } |
| 334 | |
Vishnu Nair | 7ee4f46 | 2023-04-19 09:54:09 -0700 | [diff] [blame] | 335 | void LayerFE::onLayerDisplayed(ftl::SharedFuture<FenceResult> futureFenceResult, |
| 336 | ui::LayerStack layerStack) { |
| 337 | mCompositionResult.releaseFences.emplace_back(std::move(futureFenceResult), layerStack); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 338 | } |
| 339 | |
| 340 | CompositionResult&& LayerFE::stealCompositionResult() { |
| 341 | return std::move(mCompositionResult); |
| 342 | } |
| 343 | |
| 344 | const char* LayerFE::getDebugName() const { |
| 345 | return mName.c_str(); |
| 346 | } |
| 347 | |
| 348 | const LayerMetadata* LayerFE::getMetadata() const { |
| 349 | return &mSnapshot->layerMetadata; |
| 350 | } |
| 351 | |
| 352 | const LayerMetadata* LayerFE::getRelativeMetadata() const { |
| 353 | return &mSnapshot->relativeLayerMetadata; |
| 354 | } |
| 355 | |
| 356 | int32_t LayerFE::getSequence() const { |
Vishnu Nair | 269f69d | 2023-09-08 11:45:26 -0700 | [diff] [blame] | 357 | return static_cast<int32_t>(mSnapshot->uniqueSequence); |
Vishnu Nair | e14c6b3 | 2022-08-06 04:20:15 +0000 | [diff] [blame] | 358 | } |
| 359 | |
| 360 | bool LayerFE::hasRoundedCorners() const { |
| 361 | return mSnapshot->roundedCorner.hasRoundedCorners(); |
| 362 | } |
| 363 | |
| 364 | void LayerFE::setWasClientComposed(const sp<Fence>& fence) { |
| 365 | mCompositionResult.lastClientCompositionFence = fence; |
| 366 | } |
| 367 | |
| 368 | bool LayerFE::hasBufferOrSidebandStream() const { |
| 369 | return mSnapshot->externalTexture || mSnapshot->sidebandStream; |
| 370 | } |
| 371 | |
| 372 | bool LayerFE::fillsColor() const { |
| 373 | return mSnapshot->color.r >= 0.0_hf && mSnapshot->color.g >= 0.0_hf && |
| 374 | mSnapshot->color.b >= 0.0_hf; |
| 375 | } |
| 376 | |
| 377 | bool LayerFE::hasBlur() const { |
| 378 | return mSnapshot->backgroundBlurRadius > 0 || mSnapshot->blurRegions.size() > 0; |
| 379 | } |
| 380 | |
| 381 | bool LayerFE::drawShadows() const { |
| 382 | return mSnapshot->shadowSettings.length > 0.f && |
| 383 | (mSnapshot->shadowSettings.ambientColor.a > 0 || |
| 384 | mSnapshot->shadowSettings.spotColor.a > 0); |
| 385 | }; |
| 386 | |
| 387 | const sp<GraphicBuffer> LayerFE::getBuffer() const { |
| 388 | return mSnapshot->externalTexture ? mSnapshot->externalTexture->getBuffer() : nullptr; |
| 389 | } |
| 390 | |
| 391 | } // namespace android |