blob: 899d1fa024eef8a2083dcce480fb133426891fd2 [file] [log] [blame]
Dan Stozaec460082018-12-17 15:35:09 -08001/*
2 * Copyright 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ady Abrahamb0dbdaa2020-01-06 16:19:42 -080017// TODO(b/129481165): remove the #pragma below and fix conversion issues
18#pragma clang diagnostic push
19#pragma clang diagnostic ignored "-Wconversion"
20
Dan Stozaec460082018-12-17 15:35:09 -080021//#define LOG_NDEBUG 0
22#define ATRACE_TAG ATRACE_TAG_GRAPHICS
23#undef LOG_TAG
24#define LOG_TAG "RegionSamplingThread"
25
26#include "RegionSamplingThread.h"
27
Kevin DuBoisb325c932019-05-21 08:34:09 -070028#include <compositionengine/Display.h>
29#include <compositionengine/impl/OutputCompositionState.h>
Dominik Laskowski98041832019-08-01 18:35:59 -070030#include <cutils/properties.h>
31#include <gui/IRegionSamplingListener.h>
32#include <ui/DisplayStatInfo.h>
33#include <utils/Trace.h>
34
35#include <string>
36
Dan Stozaec460082018-12-17 15:35:09 -080037#include "DisplayDevice.h"
Marin Shalamanovf6b5d182020-06-12 02:08:51 +020038#include "DisplayRenderArea.h"
Dan Stozaec460082018-12-17 15:35:09 -080039#include "Layer.h"
Marin Shalamanov1c434292020-06-12 01:47:29 +020040#include "Promise.h"
Dominik Laskowski98041832019-08-01 18:35:59 -070041#include "Scheduler/DispSync.h"
Dan Stozaec460082018-12-17 15:35:09 -080042#include "SurfaceFlinger.h"
43
44namespace android {
Kevin DuBois413287f2019-02-25 08:46:47 -080045using namespace std::chrono_literals;
Dan Stozaec460082018-12-17 15:35:09 -080046
47template <typename T>
48struct SpHash {
49 size_t operator()(const sp<T>& p) const { return std::hash<T*>()(p.get()); }
50};
51
Kevin DuBois413287f2019-02-25 08:46:47 -080052constexpr auto lumaSamplingStepTag = "LumaSamplingStep";
53enum class samplingStep {
54 noWorkNeeded,
55 idleTimerWaiting,
John Dias84be7832019-06-18 17:05:26 -070056 waitForQuietFrame,
Kevin DuBois413287f2019-02-25 08:46:47 -080057 waitForZeroPhase,
58 waitForSamplePhase,
59 sample
60};
61
John Dias84be7832019-06-18 17:05:26 -070062constexpr auto timeForRegionSampling = 5000000ns;
63constexpr auto maxRegionSamplingSkips = 10;
Kevin DuBois413287f2019-02-25 08:46:47 -080064constexpr auto defaultRegionSamplingOffset = -3ms;
65constexpr auto defaultRegionSamplingPeriod = 100ms;
66constexpr auto defaultRegionSamplingTimerTimeout = 100ms;
67// TODO: (b/127403193) duration to string conversion could probably be constexpr
68template <typename Rep, typename Per>
69inline std::string toNsString(std::chrono::duration<Rep, Per> t) {
70 return std::to_string(std::chrono::duration_cast<std::chrono::nanoseconds>(t).count());
Dan Stozaec460082018-12-17 15:35:09 -080071}
72
Kevin DuBois413287f2019-02-25 08:46:47 -080073RegionSamplingThread::EnvironmentTimingTunables::EnvironmentTimingTunables() {
74 char value[PROPERTY_VALUE_MAX] = {};
75
76 property_get("debug.sf.region_sampling_offset_ns", value,
77 toNsString(defaultRegionSamplingOffset).c_str());
78 int const samplingOffsetNsRaw = atoi(value);
79
80 property_get("debug.sf.region_sampling_period_ns", value,
81 toNsString(defaultRegionSamplingPeriod).c_str());
82 int const samplingPeriodNsRaw = atoi(value);
83
84 property_get("debug.sf.region_sampling_timer_timeout_ns", value,
85 toNsString(defaultRegionSamplingTimerTimeout).c_str());
86 int const samplingTimerTimeoutNsRaw = atoi(value);
87
88 if ((samplingPeriodNsRaw < 0) || (samplingTimerTimeoutNsRaw < 0)) {
89 ALOGW("User-specified sampling tuning options nonsensical. Using defaults");
90 mSamplingOffset = defaultRegionSamplingOffset;
91 mSamplingPeriod = defaultRegionSamplingPeriod;
92 mSamplingTimerTimeout = defaultRegionSamplingTimerTimeout;
93 } else {
94 mSamplingOffset = std::chrono::nanoseconds(samplingOffsetNsRaw);
95 mSamplingPeriod = std::chrono::nanoseconds(samplingPeriodNsRaw);
96 mSamplingTimerTimeout = std::chrono::nanoseconds(samplingTimerTimeoutNsRaw);
97 }
98}
99
100struct SamplingOffsetCallback : DispSync::Callback {
101 SamplingOffsetCallback(RegionSamplingThread& samplingThread, Scheduler& scheduler,
102 std::chrono::nanoseconds targetSamplingOffset)
103 : mRegionSamplingThread(samplingThread),
104 mScheduler(scheduler),
105 mTargetSamplingOffset(targetSamplingOffset) {}
106
107 ~SamplingOffsetCallback() { stopVsyncListener(); }
108
109 SamplingOffsetCallback(const SamplingOffsetCallback&) = delete;
110 SamplingOffsetCallback& operator=(const SamplingOffsetCallback&) = delete;
111
112 void startVsyncListener() {
113 std::lock_guard lock(mMutex);
114 if (mVsyncListening) return;
115
116 mPhaseIntervalSetting = Phase::ZERO;
Dominik Laskowski98041832019-08-01 18:35:59 -0700117 mScheduler.getPrimaryDispSync().addEventListener("SamplingThreadDispSyncListener", 0, this,
118 mLastCallbackTime);
Kevin DuBois413287f2019-02-25 08:46:47 -0800119 mVsyncListening = true;
120 }
121
122 void stopVsyncListener() {
123 std::lock_guard lock(mMutex);
124 stopVsyncListenerLocked();
125 }
126
127private:
128 void stopVsyncListenerLocked() /*REQUIRES(mMutex)*/ {
129 if (!mVsyncListening) return;
130
Dominik Laskowski98041832019-08-01 18:35:59 -0700131 mScheduler.getPrimaryDispSync().removeEventListener(this, &mLastCallbackTime);
Kevin DuBois413287f2019-02-25 08:46:47 -0800132 mVsyncListening = false;
133 }
134
Ady Abraham5facfb12020-04-22 15:18:31 -0700135 void onDispSyncEvent(nsecs_t /*when*/, nsecs_t /*expectedVSyncTimestamp*/) final {
Kevin DuBois413287f2019-02-25 08:46:47 -0800136 std::unique_lock<decltype(mMutex)> lock(mMutex);
137
138 if (mPhaseIntervalSetting == Phase::ZERO) {
139 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForSamplePhase));
140 mPhaseIntervalSetting = Phase::SAMPLING;
Dominik Laskowski98041832019-08-01 18:35:59 -0700141 mScheduler.getPrimaryDispSync().changePhaseOffset(this, mTargetSamplingOffset.count());
Kevin DuBois413287f2019-02-25 08:46:47 -0800142 return;
143 }
144
145 if (mPhaseIntervalSetting == Phase::SAMPLING) {
146 mPhaseIntervalSetting = Phase::ZERO;
Dominik Laskowski98041832019-08-01 18:35:59 -0700147 mScheduler.getPrimaryDispSync().changePhaseOffset(this, 0);
Kevin DuBois413287f2019-02-25 08:46:47 -0800148 stopVsyncListenerLocked();
149 lock.unlock();
150 mRegionSamplingThread.notifySamplingOffset();
151 return;
152 }
153 }
154
155 RegionSamplingThread& mRegionSamplingThread;
156 Scheduler& mScheduler;
157 const std::chrono::nanoseconds mTargetSamplingOffset;
158 mutable std::mutex mMutex;
Alec Mouri7355eb22019-03-05 14:19:10 -0800159 nsecs_t mLastCallbackTime = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800160 enum class Phase {
161 ZERO,
162 SAMPLING
163 } mPhaseIntervalSetting /*GUARDED_BY(mMutex) macro doesnt work with unique_lock?*/
164 = Phase::ZERO;
165 bool mVsyncListening /*GUARDED_BY(mMutex)*/ = false;
166};
167
168RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler,
169 const TimingTunables& tunables)
170 : mFlinger(flinger),
171 mScheduler(scheduler),
172 mTunables(tunables),
173 mIdleTimer(std::chrono::duration_cast<std::chrono::milliseconds>(
174 mTunables.mSamplingTimerTimeout),
175 [] {}, [this] { checkForStaleLuma(); }),
176 mPhaseCallback(std::make_unique<SamplingOffsetCallback>(*this, mScheduler,
177 tunables.mSamplingOffset)),
178 lastSampleTime(0ns) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700179 mThread = std::thread([this]() { threadMain(); });
180 pthread_setname_np(mThread.native_handle(), "RegionSamplingThread");
Kevin DuBois413287f2019-02-25 08:46:47 -0800181 mIdleTimer.start();
182}
183
184RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler)
185 : RegionSamplingThread(flinger, scheduler,
186 TimingTunables{defaultRegionSamplingOffset,
187 defaultRegionSamplingPeriod,
188 defaultRegionSamplingTimerTimeout}) {}
189
Dan Stozaec460082018-12-17 15:35:09 -0800190RegionSamplingThread::~RegionSamplingThread() {
Kevin DuBois413287f2019-02-25 08:46:47 -0800191 mIdleTimer.stop();
192
Dan Stozaec460082018-12-17 15:35:09 -0800193 {
Kevin DuBois26afc782019-05-06 16:46:45 -0700194 std::lock_guard lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800195 mRunning = false;
196 mCondition.notify_one();
197 }
198
Dan Stozaec460082018-12-17 15:35:09 -0800199 if (mThread.joinable()) {
200 mThread.join();
201 }
202}
203
Alec Mouri9a02eda2020-04-21 17:39:34 -0700204void RegionSamplingThread::addListener(const Rect& samplingArea, const wp<Layer>& stopLayer,
Dan Stozaec460082018-12-17 15:35:09 -0800205 const sp<IRegionSamplingListener>& listener) {
Dan Stozaec460082018-12-17 15:35:09 -0800206 sp<IBinder> asBinder = IInterface::asBinder(listener);
207 asBinder->linkToDeath(this);
Kevin DuBois26afc782019-05-06 16:46:45 -0700208 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800209 mDescriptors.emplace(wp<IBinder>(asBinder), Descriptor{samplingArea, stopLayer, listener});
210}
211
212void RegionSamplingThread::removeListener(const sp<IRegionSamplingListener>& listener) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700213 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800214 mDescriptors.erase(wp<IBinder>(IInterface::asBinder(listener)));
215}
216
Kevin DuBois413287f2019-02-25 08:46:47 -0800217void RegionSamplingThread::checkForStaleLuma() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700218 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800219
John Dias84be7832019-06-18 17:05:26 -0700220 if (mDiscardedFrames > 0) {
Kevin DuBois413287f2019-02-25 08:46:47 -0800221 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForZeroPhase));
John Dias84be7832019-06-18 17:05:26 -0700222 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800223 mPhaseCallback->startVsyncListener();
224 }
225}
226
227void RegionSamplingThread::notifyNewContent() {
228 doSample();
229}
230
231void RegionSamplingThread::notifySamplingOffset() {
232 doSample();
233}
234
235void RegionSamplingThread::doSample() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700236 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800237 auto now = std::chrono::nanoseconds(systemTime(SYSTEM_TIME_MONOTONIC));
238 if (lastSampleTime + mTunables.mSamplingPeriod > now) {
239 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::idleTimerWaiting));
John Dias84be7832019-06-18 17:05:26 -0700240 if (mDiscardedFrames == 0) mDiscardedFrames++;
Kevin DuBois413287f2019-02-25 08:46:47 -0800241 return;
242 }
John Dias84be7832019-06-18 17:05:26 -0700243 if (mDiscardedFrames < maxRegionSamplingSkips) {
244 // If there is relatively little time left for surfaceflinger
245 // until the next vsync deadline, defer this sampling work
246 // to a later frame, when hopefully there will be more time.
247 DisplayStatInfo stats;
248 mScheduler.getDisplayStatInfo(&stats);
249 if (std::chrono::nanoseconds(stats.vsyncTime) - now < timeForRegionSampling) {
250 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForQuietFrame));
251 mDiscardedFrames++;
252 return;
253 }
254 }
Kevin DuBois413287f2019-02-25 08:46:47 -0800255
256 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::sample));
257
John Dias84be7832019-06-18 17:05:26 -0700258 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800259 lastSampleTime = now;
260
261 mIdleTimer.reset();
262 mPhaseCallback->stopVsyncListener();
263
Dan Stozaec460082018-12-17 15:35:09 -0800264 mSampleRequested = true;
265 mCondition.notify_one();
266}
267
268void RegionSamplingThread::binderDied(const wp<IBinder>& who) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700269 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800270 mDescriptors.erase(who);
271}
272
Kevin DuBoisb325c932019-05-21 08:34:09 -0700273float sampleArea(const uint32_t* data, int32_t width, int32_t height, int32_t stride,
274 uint32_t orientation, const Rect& sample_area) {
275 if (!sample_area.isValid() || (sample_area.getWidth() > width) ||
276 (sample_area.getHeight() > height)) {
277 ALOGE("invalid sampling region requested");
278 return 0.0f;
279 }
280
281 // (b/133849373) ROT_90 screencap images produced upside down
282 auto area = sample_area;
283 if (orientation & ui::Transform::ROT_90) {
284 area.top = height - area.top;
285 area.bottom = height - area.bottom;
286 std::swap(area.top, area.bottom);
Kevin DuBois69162d02019-06-04 20:22:43 -0700287
288 area.left = width - area.left;
289 area.right = width - area.right;
290 std::swap(area.left, area.right);
Kevin DuBoisb325c932019-05-21 08:34:09 -0700291 }
292
Collin Fijalkovicha95e1702019-10-28 14:46:13 -0700293 const uint32_t pixelCount = (area.bottom - area.top) * (area.right - area.left);
294 uint32_t accumulatedLuma = 0;
Dan Stozaec460082018-12-17 15:35:09 -0800295
Collin Fijalkovicha95e1702019-10-28 14:46:13 -0700296 // Calculates luma with approximation of Rec. 709 primaries
Dan Stozaec460082018-12-17 15:35:09 -0800297 for (int32_t row = area.top; row < area.bottom; ++row) {
298 const uint32_t* rowBase = data + row * stride;
299 for (int32_t column = area.left; column < area.right; ++column) {
300 uint32_t pixel = rowBase[column];
Collin Fijalkovicha95e1702019-10-28 14:46:13 -0700301 const uint32_t r = pixel & 0xFF;
302 const uint32_t g = (pixel >> 8) & 0xFF;
303 const uint32_t b = (pixel >> 16) & 0xFF;
304 const uint32_t luma = (r * 7 + b * 2 + g * 23) >> 5;
305 accumulatedLuma += luma;
Dan Stozaec460082018-12-17 15:35:09 -0800306 }
307 }
308
Collin Fijalkovicha95e1702019-10-28 14:46:13 -0700309 return accumulatedLuma / (255.0f * pixelCount);
Dan Stozaec460082018-12-17 15:35:09 -0800310}
Dan Stozaec460082018-12-17 15:35:09 -0800311
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800312std::vector<float> RegionSamplingThread::sampleBuffer(
313 const sp<GraphicBuffer>& buffer, const Point& leftTop,
Kevin DuBoisb325c932019-05-21 08:34:09 -0700314 const std::vector<RegionSamplingThread::Descriptor>& descriptors, uint32_t orientation) {
Dan Stozaec460082018-12-17 15:35:09 -0800315 void* data_raw = nullptr;
316 buffer->lock(GRALLOC_USAGE_SW_READ_OFTEN, &data_raw);
317 std::shared_ptr<uint32_t> data(reinterpret_cast<uint32_t*>(data_raw),
318 [&buffer](auto) { buffer->unlock(); });
319 if (!data) return {};
320
Kevin DuBoisb325c932019-05-21 08:34:09 -0700321 const int32_t width = buffer->getWidth();
322 const int32_t height = buffer->getHeight();
Dan Stozaec460082018-12-17 15:35:09 -0800323 const int32_t stride = buffer->getStride();
324 std::vector<float> lumas(descriptors.size());
325 std::transform(descriptors.begin(), descriptors.end(), lumas.begin(),
326 [&](auto const& descriptor) {
Kevin DuBoisb325c932019-05-21 08:34:09 -0700327 return sampleArea(data.get(), width, height, stride, orientation,
328 descriptor.area - leftTop);
Dan Stozaec460082018-12-17 15:35:09 -0800329 });
330 return lumas;
331}
332
333void RegionSamplingThread::captureSample() {
334 ATRACE_CALL();
Kevin DuBois26afc782019-05-06 16:46:45 -0700335 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800336
337 if (mDescriptors.empty()) {
338 return;
339 }
340
Marin Shalamanov1c434292020-06-12 01:47:29 +0200341 wp<const DisplayDevice> displayWeak;
342
343 ui::LayerStack layerStack;
344 ui::Transform::RotationFlags orientation;
345 ui::Size displaySize;
346
347 {
348 // TODO(b/159112860): Don't keep sp<DisplayDevice> outside of SF main thread
349 const sp<const DisplayDevice> display = mFlinger.getDefaultDisplayDevice();
350 displayWeak = display;
351 layerStack = display->getLayerStack();
352 orientation = ui::Transform::toRotationFlags(display->getOrientation());
353 displaySize = display->getSize();
354 }
Kevin DuBoisb325c932019-05-21 08:34:09 -0700355
Dan Stozaec460082018-12-17 15:35:09 -0800356 std::vector<RegionSamplingThread::Descriptor> descriptors;
357 Region sampleRegion;
358 for (const auto& [listener, descriptor] : mDescriptors) {
359 sampleRegion.orSelf(descriptor.area);
360 descriptors.emplace_back(descriptor);
361 }
362
Kevin DuBoisb325c932019-05-21 08:34:09 -0700363 auto dx = 0;
364 auto dy = 0;
365 switch (orientation) {
366 case ui::Transform::ROT_90:
Marin Shalamanov1c434292020-06-12 01:47:29 +0200367 dx = displaySize.getWidth();
Kevin DuBoisb325c932019-05-21 08:34:09 -0700368 break;
369 case ui::Transform::ROT_180:
Marin Shalamanov1c434292020-06-12 01:47:29 +0200370 dx = displaySize.getWidth();
371 dy = displaySize.getHeight();
Kevin DuBoisb325c932019-05-21 08:34:09 -0700372 break;
373 case ui::Transform::ROT_270:
Marin Shalamanov1c434292020-06-12 01:47:29 +0200374 dy = displaySize.getHeight();
Kevin DuBoisb325c932019-05-21 08:34:09 -0700375 break;
376 default:
377 break;
378 }
379
380 ui::Transform t(orientation);
381 auto screencapRegion = t.transform(sampleRegion);
382 screencapRegion = screencapRegion.translate(dx, dy);
Marin Shalamanov1c434292020-06-12 01:47:29 +0200383
384 const Rect sampledBounds = sampleRegion.bounds();
385
386 SurfaceFlinger::RenderAreaFuture renderAreaFuture = promise::defer([=] {
Marin Shalamanovf6b5d182020-06-12 02:08:51 +0200387 return DisplayRenderArea::create(displayWeak, screencapRegion.bounds(),
388 sampledBounds.getSize(), ui::Dataspace::V0_SRGB,
389 orientation);
Marin Shalamanov1c434292020-06-12 01:47:29 +0200390 });
Dan Stozaec460082018-12-17 15:35:09 -0800391
392 std::unordered_set<sp<IRegionSamplingListener>, SpHash<IRegionSamplingListener>> listeners;
393
394 auto traverseLayers = [&](const LayerVector::Visitor& visitor) {
395 bool stopLayerFound = false;
396 auto filterVisitor = [&](Layer* layer) {
397 // We don't want to capture any layers beyond the stop layer
398 if (stopLayerFound) return;
399
400 // Likewise if we just found a stop layer, set the flag and abort
401 for (const auto& [area, stopLayer, listener] : descriptors) {
402 if (layer == stopLayer.promote().get()) {
403 stopLayerFound = true;
404 return;
405 }
406 }
407
408 // Compute the layer's position on the screen
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800409 const Rect bounds = Rect(layer->getBounds());
410 const ui::Transform transform = layer->getTransform();
Dan Stozaec460082018-12-17 15:35:09 -0800411 constexpr bool roundOutwards = true;
412 Rect transformed = transform.transform(bounds, roundOutwards);
413
Marin Shalamanov1c434292020-06-12 01:47:29 +0200414 // If this layer doesn't intersect with the larger sampledBounds, skip capturing it
Dan Stozaec460082018-12-17 15:35:09 -0800415 Rect ignore;
Marin Shalamanov1c434292020-06-12 01:47:29 +0200416 if (!transformed.intersect(sampledBounds, &ignore)) return;
Dan Stozaec460082018-12-17 15:35:09 -0800417
418 // If the layer doesn't intersect a sampling area, skip capturing it
419 bool intersectsAnyArea = false;
420 for (const auto& [area, stopLayer, listener] : descriptors) {
421 if (transformed.intersect(area, &ignore)) {
422 intersectsAnyArea = true;
423 listeners.insert(listener);
424 }
425 }
426 if (!intersectsAnyArea) return;
427
Dominik Laskowski87a07e42019-10-10 20:38:02 -0700428 ALOGV("Traversing [%s] [%d, %d, %d, %d]", layer->getDebugName(), bounds.left,
Dan Stozaec460082018-12-17 15:35:09 -0800429 bounds.top, bounds.right, bounds.bottom);
430 visitor(layer);
431 };
Marin Shalamanov1c434292020-06-12 01:47:29 +0200432 mFlinger.traverseLayersInLayerStack(layerStack, filterVisitor);
Dan Stozaec460082018-12-17 15:35:09 -0800433 };
434
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700435 sp<GraphicBuffer> buffer = nullptr;
Marin Shalamanov1c434292020-06-12 01:47:29 +0200436 if (mCachedBuffer && mCachedBuffer->getWidth() == sampledBounds.getWidth() &&
437 mCachedBuffer->getHeight() == sampledBounds.getHeight()) {
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700438 buffer = mCachedBuffer;
439 } else {
440 const uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_RENDER;
Marin Shalamanov1c434292020-06-12 01:47:29 +0200441 buffer = new GraphicBuffer(sampledBounds.getWidth(), sampledBounds.getHeight(),
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700442 PIXEL_FORMAT_RGBA_8888, 1, usage, "RegionSamplingThread");
443 }
Dan Stozaec460082018-12-17 15:35:09 -0800444
Robert Carr108b2c72019-04-02 16:32:58 -0700445 bool ignored;
Marin Shalamanov1c434292020-06-12 01:47:29 +0200446 mFlinger.captureScreenCommon(std::move(renderAreaFuture), traverseLayers, buffer,
447 false /* identityTransform */, true /* regionSampling */, ignored);
Dan Stozaec460082018-12-17 15:35:09 -0800448
449 std::vector<Descriptor> activeDescriptors;
450 for (const auto& descriptor : descriptors) {
451 if (listeners.count(descriptor.listener) != 0) {
452 activeDescriptors.emplace_back(descriptor);
453 }
454 }
455
456 ALOGV("Sampling %zu descriptors", activeDescriptors.size());
Kevin DuBoisb325c932019-05-21 08:34:09 -0700457 std::vector<float> lumas =
Marin Shalamanov1c434292020-06-12 01:47:29 +0200458 sampleBuffer(buffer, sampledBounds.leftTop(), activeDescriptors, orientation);
Dan Stozaec460082018-12-17 15:35:09 -0800459 if (lumas.size() != activeDescriptors.size()) {
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800460 ALOGW("collected %zu median luma values for %zu descriptors", lumas.size(),
461 activeDescriptors.size());
Dan Stozaec460082018-12-17 15:35:09 -0800462 return;
463 }
464
465 for (size_t d = 0; d < activeDescriptors.size(); ++d) {
466 activeDescriptors[d].listener->onSampleCollected(lumas[d]);
467 }
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700468
469 // Extend the lifetime of mCachedBuffer from the previous frame to here to ensure that:
470 // 1) The region sampling thread is the last owner of the buffer, and the freeing of the buffer
471 // happens in this thread, as opposed to the main thread.
472 // 2) The listener(s) receive their notifications prior to freeing the buffer.
473 mCachedBuffer = buffer;
Kevin DuBois413287f2019-02-25 08:46:47 -0800474 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::noWorkNeeded));
Dan Stozaec460082018-12-17 15:35:09 -0800475}
476
Kevin DuBois26afc782019-05-06 16:46:45 -0700477// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
478void RegionSamplingThread::threadMain() NO_THREAD_SAFETY_ANALYSIS {
479 std::unique_lock<std::mutex> lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800480 while (mRunning) {
481 if (mSampleRequested) {
482 mSampleRequested = false;
Kevin DuBois26afc782019-05-06 16:46:45 -0700483 lock.unlock();
Dan Stozaec460082018-12-17 15:35:09 -0800484 captureSample();
Kevin DuBois26afc782019-05-06 16:46:45 -0700485 lock.lock();
Dan Stozaec460082018-12-17 15:35:09 -0800486 }
Kevin DuBois26afc782019-05-06 16:46:45 -0700487 mCondition.wait(lock, [this]() REQUIRES(mThreadControlMutex) {
488 return mSampleRequested || !mRunning;
489 });
Dan Stozaec460082018-12-17 15:35:09 -0800490 }
491}
492
493} // namespace android
Ady Abrahamb0dbdaa2020-01-06 16:19:42 -0800494
495// TODO(b/129481165): remove the #pragma below and fix conversion issues
496#pragma clang diagnostic pop // ignored "-Wconversion"