blob: 80da1542780302cf12691ac9e6857661e770eade [file] [log] [blame]
Dan Stozaec460082018-12-17 15:35:09 -08001/*
2 * Copyright 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define ATRACE_TAG ATRACE_TAG_GRAPHICS
19#undef LOG_TAG
20#define LOG_TAG "RegionSamplingThread"
21
22#include "RegionSamplingThread.h"
23
Kevin DuBois413287f2019-02-25 08:46:47 -080024#include <cutils/properties.h>
Dan Stozaec460082018-12-17 15:35:09 -080025#include <gui/IRegionSamplingListener.h>
26#include <utils/Trace.h>
Kevin DuBois413287f2019-02-25 08:46:47 -080027#include <string>
Dan Stozaec460082018-12-17 15:35:09 -080028
Kevin DuBoisb325c932019-05-21 08:34:09 -070029#include <compositionengine/Display.h>
30#include <compositionengine/impl/OutputCompositionState.h>
Dan Stozaec460082018-12-17 15:35:09 -080031#include "DisplayDevice.h"
32#include "Layer.h"
33#include "SurfaceFlinger.h"
34
35namespace android {
Kevin DuBois413287f2019-02-25 08:46:47 -080036using namespace std::chrono_literals;
Dan Stozaec460082018-12-17 15:35:09 -080037
38template <typename T>
39struct SpHash {
40 size_t operator()(const sp<T>& p) const { return std::hash<T*>()(p.get()); }
41};
42
Kevin DuBois413287f2019-02-25 08:46:47 -080043constexpr auto lumaSamplingStepTag = "LumaSamplingStep";
44enum class samplingStep {
45 noWorkNeeded,
46 idleTimerWaiting,
John Dias84be7832019-06-18 17:05:26 -070047 waitForQuietFrame,
Kevin DuBois413287f2019-02-25 08:46:47 -080048 waitForZeroPhase,
49 waitForSamplePhase,
50 sample
51};
52
John Dias84be7832019-06-18 17:05:26 -070053constexpr auto timeForRegionSampling = 5000000ns;
54constexpr auto maxRegionSamplingSkips = 10;
Kevin DuBois413287f2019-02-25 08:46:47 -080055constexpr auto defaultRegionSamplingOffset = -3ms;
56constexpr auto defaultRegionSamplingPeriod = 100ms;
57constexpr auto defaultRegionSamplingTimerTimeout = 100ms;
58// TODO: (b/127403193) duration to string conversion could probably be constexpr
59template <typename Rep, typename Per>
60inline std::string toNsString(std::chrono::duration<Rep, Per> t) {
61 return std::to_string(std::chrono::duration_cast<std::chrono::nanoseconds>(t).count());
Dan Stozaec460082018-12-17 15:35:09 -080062}
63
Kevin DuBois413287f2019-02-25 08:46:47 -080064RegionSamplingThread::EnvironmentTimingTunables::EnvironmentTimingTunables() {
65 char value[PROPERTY_VALUE_MAX] = {};
66
67 property_get("debug.sf.region_sampling_offset_ns", value,
68 toNsString(defaultRegionSamplingOffset).c_str());
69 int const samplingOffsetNsRaw = atoi(value);
70
71 property_get("debug.sf.region_sampling_period_ns", value,
72 toNsString(defaultRegionSamplingPeriod).c_str());
73 int const samplingPeriodNsRaw = atoi(value);
74
75 property_get("debug.sf.region_sampling_timer_timeout_ns", value,
76 toNsString(defaultRegionSamplingTimerTimeout).c_str());
77 int const samplingTimerTimeoutNsRaw = atoi(value);
78
79 if ((samplingPeriodNsRaw < 0) || (samplingTimerTimeoutNsRaw < 0)) {
80 ALOGW("User-specified sampling tuning options nonsensical. Using defaults");
81 mSamplingOffset = defaultRegionSamplingOffset;
82 mSamplingPeriod = defaultRegionSamplingPeriod;
83 mSamplingTimerTimeout = defaultRegionSamplingTimerTimeout;
84 } else {
85 mSamplingOffset = std::chrono::nanoseconds(samplingOffsetNsRaw);
86 mSamplingPeriod = std::chrono::nanoseconds(samplingPeriodNsRaw);
87 mSamplingTimerTimeout = std::chrono::nanoseconds(samplingTimerTimeoutNsRaw);
88 }
89}
90
91struct SamplingOffsetCallback : DispSync::Callback {
92 SamplingOffsetCallback(RegionSamplingThread& samplingThread, Scheduler& scheduler,
93 std::chrono::nanoseconds targetSamplingOffset)
94 : mRegionSamplingThread(samplingThread),
95 mScheduler(scheduler),
96 mTargetSamplingOffset(targetSamplingOffset) {}
97
98 ~SamplingOffsetCallback() { stopVsyncListener(); }
99
100 SamplingOffsetCallback(const SamplingOffsetCallback&) = delete;
101 SamplingOffsetCallback& operator=(const SamplingOffsetCallback&) = delete;
102
103 void startVsyncListener() {
104 std::lock_guard lock(mMutex);
105 if (mVsyncListening) return;
106
107 mPhaseIntervalSetting = Phase::ZERO;
108 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
Alec Mouri7355eb22019-03-05 14:19:10 -0800109 sync.addEventListener("SamplingThreadDispSyncListener", 0, this, mLastCallbackTime);
Kevin DuBois413287f2019-02-25 08:46:47 -0800110 });
111 mVsyncListening = true;
112 }
113
114 void stopVsyncListener() {
115 std::lock_guard lock(mMutex);
116 stopVsyncListenerLocked();
117 }
118
119private:
120 void stopVsyncListenerLocked() /*REQUIRES(mMutex)*/ {
121 if (!mVsyncListening) return;
122
Alec Mouri7355eb22019-03-05 14:19:10 -0800123 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
124 sync.removeEventListener(this, &mLastCallbackTime);
125 });
Kevin DuBois413287f2019-02-25 08:46:47 -0800126 mVsyncListening = false;
127 }
128
129 void onDispSyncEvent(nsecs_t /* when */) final {
130 std::unique_lock<decltype(mMutex)> lock(mMutex);
131
132 if (mPhaseIntervalSetting == Phase::ZERO) {
133 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForSamplePhase));
134 mPhaseIntervalSetting = Phase::SAMPLING;
135 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
136 sync.changePhaseOffset(this, mTargetSamplingOffset.count());
137 });
138 return;
139 }
140
141 if (mPhaseIntervalSetting == Phase::SAMPLING) {
142 mPhaseIntervalSetting = Phase::ZERO;
143 mScheduler.withPrimaryDispSync(
144 [this](android::DispSync& sync) { sync.changePhaseOffset(this, 0); });
145 stopVsyncListenerLocked();
146 lock.unlock();
147 mRegionSamplingThread.notifySamplingOffset();
148 return;
149 }
150 }
151
152 RegionSamplingThread& mRegionSamplingThread;
153 Scheduler& mScheduler;
154 const std::chrono::nanoseconds mTargetSamplingOffset;
155 mutable std::mutex mMutex;
Alec Mouri7355eb22019-03-05 14:19:10 -0800156 nsecs_t mLastCallbackTime = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800157 enum class Phase {
158 ZERO,
159 SAMPLING
160 } mPhaseIntervalSetting /*GUARDED_BY(mMutex) macro doesnt work with unique_lock?*/
161 = Phase::ZERO;
162 bool mVsyncListening /*GUARDED_BY(mMutex)*/ = false;
163};
164
165RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler,
166 const TimingTunables& tunables)
167 : mFlinger(flinger),
168 mScheduler(scheduler),
169 mTunables(tunables),
170 mIdleTimer(std::chrono::duration_cast<std::chrono::milliseconds>(
171 mTunables.mSamplingTimerTimeout),
172 [] {}, [this] { checkForStaleLuma(); }),
173 mPhaseCallback(std::make_unique<SamplingOffsetCallback>(*this, mScheduler,
174 tunables.mSamplingOffset)),
175 lastSampleTime(0ns) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700176 mThread = std::thread([this]() { threadMain(); });
177 pthread_setname_np(mThread.native_handle(), "RegionSamplingThread");
Kevin DuBois413287f2019-02-25 08:46:47 -0800178 mIdleTimer.start();
179}
180
181RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler)
182 : RegionSamplingThread(flinger, scheduler,
183 TimingTunables{defaultRegionSamplingOffset,
184 defaultRegionSamplingPeriod,
185 defaultRegionSamplingTimerTimeout}) {}
186
Dan Stozaec460082018-12-17 15:35:09 -0800187RegionSamplingThread::~RegionSamplingThread() {
Kevin DuBois413287f2019-02-25 08:46:47 -0800188 mIdleTimer.stop();
189
Dan Stozaec460082018-12-17 15:35:09 -0800190 {
Kevin DuBois26afc782019-05-06 16:46:45 -0700191 std::lock_guard lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800192 mRunning = false;
193 mCondition.notify_one();
194 }
195
Dan Stozaec460082018-12-17 15:35:09 -0800196 if (mThread.joinable()) {
197 mThread.join();
198 }
199}
200
201void RegionSamplingThread::addListener(const Rect& samplingArea, const sp<IBinder>& stopLayerHandle,
202 const sp<IRegionSamplingListener>& listener) {
Steven Moreland4271f562020-03-11 17:48:31 -0700203 wp<Layer> stopLayer;
204 if (stopLayerHandle != nullptr && stopLayerHandle->localBinder() != nullptr) {
205 stopLayer = static_cast<Layer::Handle*>(stopLayerHandle.get())->owner;
206 }
Dan Stozaec460082018-12-17 15:35:09 -0800207
208 sp<IBinder> asBinder = IInterface::asBinder(listener);
209 asBinder->linkToDeath(this);
Kevin DuBois26afc782019-05-06 16:46:45 -0700210 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800211 mDescriptors.emplace(wp<IBinder>(asBinder), Descriptor{samplingArea, stopLayer, listener});
212}
213
214void RegionSamplingThread::removeListener(const sp<IRegionSamplingListener>& listener) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700215 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800216 mDescriptors.erase(wp<IBinder>(IInterface::asBinder(listener)));
217}
218
Kevin DuBois413287f2019-02-25 08:46:47 -0800219void RegionSamplingThread::checkForStaleLuma() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700220 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800221
John Dias84be7832019-06-18 17:05:26 -0700222 if (mDiscardedFrames > 0) {
Kevin DuBois413287f2019-02-25 08:46:47 -0800223 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForZeroPhase));
John Dias84be7832019-06-18 17:05:26 -0700224 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800225 mPhaseCallback->startVsyncListener();
226 }
227}
228
229void RegionSamplingThread::notifyNewContent() {
230 doSample();
231}
232
233void RegionSamplingThread::notifySamplingOffset() {
234 doSample();
235}
236
237void RegionSamplingThread::doSample() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700238 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800239 auto now = std::chrono::nanoseconds(systemTime(SYSTEM_TIME_MONOTONIC));
240 if (lastSampleTime + mTunables.mSamplingPeriod > now) {
241 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::idleTimerWaiting));
John Dias84be7832019-06-18 17:05:26 -0700242 if (mDiscardedFrames == 0) mDiscardedFrames++;
Kevin DuBois413287f2019-02-25 08:46:47 -0800243 return;
244 }
John Dias84be7832019-06-18 17:05:26 -0700245 if (mDiscardedFrames < maxRegionSamplingSkips) {
246 // If there is relatively little time left for surfaceflinger
247 // until the next vsync deadline, defer this sampling work
248 // to a later frame, when hopefully there will be more time.
249 DisplayStatInfo stats;
250 mScheduler.getDisplayStatInfo(&stats);
251 if (std::chrono::nanoseconds(stats.vsyncTime) - now < timeForRegionSampling) {
252 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForQuietFrame));
253 mDiscardedFrames++;
254 return;
255 }
256 }
Kevin DuBois413287f2019-02-25 08:46:47 -0800257
258 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::sample));
259
John Dias84be7832019-06-18 17:05:26 -0700260 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800261 lastSampleTime = now;
262
263 mIdleTimer.reset();
264 mPhaseCallback->stopVsyncListener();
265
Dan Stozaec460082018-12-17 15:35:09 -0800266 mSampleRequested = true;
267 mCondition.notify_one();
268}
269
270void RegionSamplingThread::binderDied(const wp<IBinder>& who) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700271 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800272 mDescriptors.erase(who);
273}
274
275namespace {
276// Using Rec. 709 primaries
John Diasd0b44a52019-06-11 18:16:08 -0700277inline float getLuma(float r, float g, float b) {
Dan Stozaec460082018-12-17 15:35:09 -0800278 constexpr auto rec709_red_primary = 0.2126f;
279 constexpr auto rec709_green_primary = 0.7152f;
280 constexpr auto rec709_blue_primary = 0.0722f;
281 return rec709_red_primary * r + rec709_green_primary * g + rec709_blue_primary * b;
282}
Kevin DuBoisbb27bcd2019-04-02 14:34:35 -0700283} // anonymous namespace
Dan Stozaec460082018-12-17 15:35:09 -0800284
Kevin DuBoisb325c932019-05-21 08:34:09 -0700285float sampleArea(const uint32_t* data, int32_t width, int32_t height, int32_t stride,
286 uint32_t orientation, const Rect& sample_area) {
287 if (!sample_area.isValid() || (sample_area.getWidth() > width) ||
288 (sample_area.getHeight() > height)) {
289 ALOGE("invalid sampling region requested");
290 return 0.0f;
291 }
292
293 // (b/133849373) ROT_90 screencap images produced upside down
294 auto area = sample_area;
295 if (orientation & ui::Transform::ROT_90) {
296 area.top = height - area.top;
297 area.bottom = height - area.bottom;
298 std::swap(area.top, area.bottom);
Kevin DuBois69162d02019-06-04 20:22:43 -0700299
300 area.left = width - area.left;
301 area.right = width - area.right;
302 std::swap(area.left, area.right);
Kevin DuBoisb325c932019-05-21 08:34:09 -0700303 }
304
Dan Stozaec460082018-12-17 15:35:09 -0800305 std::array<int32_t, 256> brightnessBuckets = {};
306 const int32_t majoritySampleNum = area.getWidth() * area.getHeight() / 2;
307
308 for (int32_t row = area.top; row < area.bottom; ++row) {
309 const uint32_t* rowBase = data + row * stride;
310 for (int32_t column = area.left; column < area.right; ++column) {
311 uint32_t pixel = rowBase[column];
John Diasd0b44a52019-06-11 18:16:08 -0700312 const float r = pixel & 0xFF;
313 const float g = (pixel >> 8) & 0xFF;
314 const float b = (pixel >> 16) & 0xFF;
315 const uint8_t luma = std::round(getLuma(r, g, b));
Dan Stozaec460082018-12-17 15:35:09 -0800316 ++brightnessBuckets[luma];
317 if (brightnessBuckets[luma] > majoritySampleNum) return luma / 255.0f;
318 }
319 }
320
321 int32_t accumulated = 0;
322 size_t bucket = 0;
Kevin DuBoisbb27bcd2019-04-02 14:34:35 -0700323 for (; bucket < brightnessBuckets.size(); bucket++) {
Dan Stozaec460082018-12-17 15:35:09 -0800324 accumulated += brightnessBuckets[bucket];
325 if (accumulated > majoritySampleNum) break;
326 }
327
328 return bucket / 255.0f;
329}
Dan Stozaec460082018-12-17 15:35:09 -0800330
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800331std::vector<float> RegionSamplingThread::sampleBuffer(
332 const sp<GraphicBuffer>& buffer, const Point& leftTop,
Kevin DuBoisb325c932019-05-21 08:34:09 -0700333 const std::vector<RegionSamplingThread::Descriptor>& descriptors, uint32_t orientation) {
Dan Stozaec460082018-12-17 15:35:09 -0800334 void* data_raw = nullptr;
335 buffer->lock(GRALLOC_USAGE_SW_READ_OFTEN, &data_raw);
336 std::shared_ptr<uint32_t> data(reinterpret_cast<uint32_t*>(data_raw),
337 [&buffer](auto) { buffer->unlock(); });
338 if (!data) return {};
339
Kevin DuBoisb325c932019-05-21 08:34:09 -0700340 const int32_t width = buffer->getWidth();
341 const int32_t height = buffer->getHeight();
Dan Stozaec460082018-12-17 15:35:09 -0800342 const int32_t stride = buffer->getStride();
343 std::vector<float> lumas(descriptors.size());
344 std::transform(descriptors.begin(), descriptors.end(), lumas.begin(),
345 [&](auto const& descriptor) {
Kevin DuBoisb325c932019-05-21 08:34:09 -0700346 return sampleArea(data.get(), width, height, stride, orientation,
347 descriptor.area - leftTop);
Dan Stozaec460082018-12-17 15:35:09 -0800348 });
349 return lumas;
350}
351
352void RegionSamplingThread::captureSample() {
353 ATRACE_CALL();
Kevin DuBois26afc782019-05-06 16:46:45 -0700354 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800355
356 if (mDescriptors.empty()) {
357 return;
358 }
359
Kevin DuBoisb325c932019-05-21 08:34:09 -0700360 const auto device = mFlinger.getDefaultDisplayDevice();
Kevin DuBois769ab6f2019-06-19 08:13:28 -0700361 const auto orientation = [](uint32_t orientation) {
362 switch (orientation) {
363 default:
364 case DisplayState::eOrientationDefault:
365 return ui::Transform::ROT_0;
366 case DisplayState::eOrientation90:
367 return ui::Transform::ROT_90;
368 case DisplayState::eOrientation180:
369 return ui::Transform::ROT_180;
370 case DisplayState::eOrientation270:
371 return ui::Transform::ROT_270;
372 }
373 }(device->getOrientation());
Kevin DuBoisb325c932019-05-21 08:34:09 -0700374
Dan Stozaec460082018-12-17 15:35:09 -0800375 std::vector<RegionSamplingThread::Descriptor> descriptors;
376 Region sampleRegion;
377 for (const auto& [listener, descriptor] : mDescriptors) {
378 sampleRegion.orSelf(descriptor.area);
379 descriptors.emplace_back(descriptor);
380 }
381
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800382 const Rect sampledArea = sampleRegion.bounds();
Dan Stozaec460082018-12-17 15:35:09 -0800383
Kevin DuBoisb325c932019-05-21 08:34:09 -0700384 auto dx = 0;
385 auto dy = 0;
386 switch (orientation) {
387 case ui::Transform::ROT_90:
388 dx = device->getWidth();
389 break;
390 case ui::Transform::ROT_180:
391 dx = device->getWidth();
392 dy = device->getHeight();
393 break;
394 case ui::Transform::ROT_270:
395 dy = device->getHeight();
396 break;
397 default:
398 break;
399 }
400
401 ui::Transform t(orientation);
402 auto screencapRegion = t.transform(sampleRegion);
403 screencapRegion = screencapRegion.translate(dx, dy);
404 DisplayRenderArea renderArea(device, screencapRegion.bounds(), sampledArea.getWidth(),
405 sampledArea.getHeight(), ui::Dataspace::V0_SRGB, orientation);
Dan Stozaec460082018-12-17 15:35:09 -0800406
407 std::unordered_set<sp<IRegionSamplingListener>, SpHash<IRegionSamplingListener>> listeners;
408
409 auto traverseLayers = [&](const LayerVector::Visitor& visitor) {
410 bool stopLayerFound = false;
411 auto filterVisitor = [&](Layer* layer) {
412 // We don't want to capture any layers beyond the stop layer
413 if (stopLayerFound) return;
414
415 // Likewise if we just found a stop layer, set the flag and abort
416 for (const auto& [area, stopLayer, listener] : descriptors) {
417 if (layer == stopLayer.promote().get()) {
418 stopLayerFound = true;
419 return;
420 }
421 }
422
423 // Compute the layer's position on the screen
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800424 const Rect bounds = Rect(layer->getBounds());
425 const ui::Transform transform = layer->getTransform();
Dan Stozaec460082018-12-17 15:35:09 -0800426 constexpr bool roundOutwards = true;
427 Rect transformed = transform.transform(bounds, roundOutwards);
428
429 // If this layer doesn't intersect with the larger sampledArea, skip capturing it
430 Rect ignore;
431 if (!transformed.intersect(sampledArea, &ignore)) return;
432
433 // If the layer doesn't intersect a sampling area, skip capturing it
434 bool intersectsAnyArea = false;
435 for (const auto& [area, stopLayer, listener] : descriptors) {
436 if (transformed.intersect(area, &ignore)) {
437 intersectsAnyArea = true;
438 listeners.insert(listener);
439 }
440 }
441 if (!intersectsAnyArea) return;
442
443 ALOGV("Traversing [%s] [%d, %d, %d, %d]", layer->getName().string(), bounds.left,
444 bounds.top, bounds.right, bounds.bottom);
445 visitor(layer);
446 };
447 mFlinger.traverseLayersInDisplay(device, filterVisitor);
448 };
449
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700450 sp<GraphicBuffer> buffer = nullptr;
451 if (mCachedBuffer && mCachedBuffer->getWidth() == sampledArea.getWidth() &&
452 mCachedBuffer->getHeight() == sampledArea.getHeight()) {
453 buffer = mCachedBuffer;
454 } else {
455 const uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_RENDER;
456 buffer = new GraphicBuffer(sampledArea.getWidth(), sampledArea.getHeight(),
457 PIXEL_FORMAT_RGBA_8888, 1, usage, "RegionSamplingThread");
458 }
Dan Stozaec460082018-12-17 15:35:09 -0800459
Robert Carr108b2c72019-04-02 16:32:58 -0700460 bool ignored;
461 mFlinger.captureScreenCommon(renderArea, traverseLayers, buffer, false, ignored);
Dan Stozaec460082018-12-17 15:35:09 -0800462
463 std::vector<Descriptor> activeDescriptors;
464 for (const auto& descriptor : descriptors) {
465 if (listeners.count(descriptor.listener) != 0) {
466 activeDescriptors.emplace_back(descriptor);
467 }
468 }
469
470 ALOGV("Sampling %zu descriptors", activeDescriptors.size());
Kevin DuBoisb325c932019-05-21 08:34:09 -0700471 std::vector<float> lumas =
472 sampleBuffer(buffer, sampledArea.leftTop(), activeDescriptors, orientation);
Dan Stozaec460082018-12-17 15:35:09 -0800473 if (lumas.size() != activeDescriptors.size()) {
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800474 ALOGW("collected %zu median luma values for %zu descriptors", lumas.size(),
475 activeDescriptors.size());
Dan Stozaec460082018-12-17 15:35:09 -0800476 return;
477 }
478
479 for (size_t d = 0; d < activeDescriptors.size(); ++d) {
480 activeDescriptors[d].listener->onSampleCollected(lumas[d]);
481 }
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700482
483 // Extend the lifetime of mCachedBuffer from the previous frame to here to ensure that:
484 // 1) The region sampling thread is the last owner of the buffer, and the freeing of the buffer
485 // happens in this thread, as opposed to the main thread.
486 // 2) The listener(s) receive their notifications prior to freeing the buffer.
487 mCachedBuffer = buffer;
Kevin DuBois413287f2019-02-25 08:46:47 -0800488 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::noWorkNeeded));
Dan Stozaec460082018-12-17 15:35:09 -0800489}
490
Kevin DuBois26afc782019-05-06 16:46:45 -0700491// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
492void RegionSamplingThread::threadMain() NO_THREAD_SAFETY_ANALYSIS {
493 std::unique_lock<std::mutex> lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800494 while (mRunning) {
495 if (mSampleRequested) {
496 mSampleRequested = false;
Kevin DuBois26afc782019-05-06 16:46:45 -0700497 lock.unlock();
Dan Stozaec460082018-12-17 15:35:09 -0800498 captureSample();
Kevin DuBois26afc782019-05-06 16:46:45 -0700499 lock.lock();
Dan Stozaec460082018-12-17 15:35:09 -0800500 }
Kevin DuBois26afc782019-05-06 16:46:45 -0700501 mCondition.wait(lock, [this]() REQUIRES(mThreadControlMutex) {
502 return mSampleRequested || !mRunning;
503 });
Dan Stozaec460082018-12-17 15:35:09 -0800504 }
505}
506
507} // namespace android