blob: 4fca63aa965212aea76bdc6dc9c8c586711c35f1 [file] [log] [blame]
Dan Stozaec460082018-12-17 15:35:09 -08001/*
2 * Copyright 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define ATRACE_TAG ATRACE_TAG_GRAPHICS
19#undef LOG_TAG
20#define LOG_TAG "RegionSamplingThread"
21
22#include "RegionSamplingThread.h"
23
Kevin DuBois413287f2019-02-25 08:46:47 -080024#include <cutils/properties.h>
Dan Stozaec460082018-12-17 15:35:09 -080025#include <gui/IRegionSamplingListener.h>
26#include <utils/Trace.h>
Kevin DuBois413287f2019-02-25 08:46:47 -080027#include <string>
Dan Stozaec460082018-12-17 15:35:09 -080028
Kevin DuBoisb325c932019-05-21 08:34:09 -070029#include <compositionengine/Display.h>
30#include <compositionengine/impl/OutputCompositionState.h>
Dan Stozaec460082018-12-17 15:35:09 -080031#include "DisplayDevice.h"
32#include "Layer.h"
33#include "SurfaceFlinger.h"
34
35namespace android {
Kevin DuBois413287f2019-02-25 08:46:47 -080036using namespace std::chrono_literals;
Dan Stozaec460082018-12-17 15:35:09 -080037
38template <typename T>
39struct SpHash {
40 size_t operator()(const sp<T>& p) const { return std::hash<T*>()(p.get()); }
41};
42
Kevin DuBois413287f2019-02-25 08:46:47 -080043constexpr auto lumaSamplingStepTag = "LumaSamplingStep";
44enum class samplingStep {
45 noWorkNeeded,
46 idleTimerWaiting,
John Dias84be7832019-06-18 17:05:26 -070047 waitForQuietFrame,
Kevin DuBois413287f2019-02-25 08:46:47 -080048 waitForZeroPhase,
49 waitForSamplePhase,
50 sample
51};
52
John Dias84be7832019-06-18 17:05:26 -070053constexpr auto timeForRegionSampling = 5000000ns;
54constexpr auto maxRegionSamplingSkips = 10;
Kevin DuBois413287f2019-02-25 08:46:47 -080055constexpr auto defaultRegionSamplingOffset = -3ms;
56constexpr auto defaultRegionSamplingPeriod = 100ms;
57constexpr auto defaultRegionSamplingTimerTimeout = 100ms;
58// TODO: (b/127403193) duration to string conversion could probably be constexpr
59template <typename Rep, typename Per>
60inline std::string toNsString(std::chrono::duration<Rep, Per> t) {
61 return std::to_string(std::chrono::duration_cast<std::chrono::nanoseconds>(t).count());
Dan Stozaec460082018-12-17 15:35:09 -080062}
63
Kevin DuBois413287f2019-02-25 08:46:47 -080064RegionSamplingThread::EnvironmentTimingTunables::EnvironmentTimingTunables() {
65 char value[PROPERTY_VALUE_MAX] = {};
66
67 property_get("debug.sf.region_sampling_offset_ns", value,
68 toNsString(defaultRegionSamplingOffset).c_str());
69 int const samplingOffsetNsRaw = atoi(value);
70
71 property_get("debug.sf.region_sampling_period_ns", value,
72 toNsString(defaultRegionSamplingPeriod).c_str());
73 int const samplingPeriodNsRaw = atoi(value);
74
75 property_get("debug.sf.region_sampling_timer_timeout_ns", value,
76 toNsString(defaultRegionSamplingTimerTimeout).c_str());
77 int const samplingTimerTimeoutNsRaw = atoi(value);
78
79 if ((samplingPeriodNsRaw < 0) || (samplingTimerTimeoutNsRaw < 0)) {
80 ALOGW("User-specified sampling tuning options nonsensical. Using defaults");
81 mSamplingOffset = defaultRegionSamplingOffset;
82 mSamplingPeriod = defaultRegionSamplingPeriod;
83 mSamplingTimerTimeout = defaultRegionSamplingTimerTimeout;
84 } else {
85 mSamplingOffset = std::chrono::nanoseconds(samplingOffsetNsRaw);
86 mSamplingPeriod = std::chrono::nanoseconds(samplingPeriodNsRaw);
87 mSamplingTimerTimeout = std::chrono::nanoseconds(samplingTimerTimeoutNsRaw);
88 }
89}
90
91struct SamplingOffsetCallback : DispSync::Callback {
92 SamplingOffsetCallback(RegionSamplingThread& samplingThread, Scheduler& scheduler,
93 std::chrono::nanoseconds targetSamplingOffset)
94 : mRegionSamplingThread(samplingThread),
95 mScheduler(scheduler),
96 mTargetSamplingOffset(targetSamplingOffset) {}
97
98 ~SamplingOffsetCallback() { stopVsyncListener(); }
99
100 SamplingOffsetCallback(const SamplingOffsetCallback&) = delete;
101 SamplingOffsetCallback& operator=(const SamplingOffsetCallback&) = delete;
102
103 void startVsyncListener() {
104 std::lock_guard lock(mMutex);
105 if (mVsyncListening) return;
106
107 mPhaseIntervalSetting = Phase::ZERO;
108 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
Alec Mouri7355eb22019-03-05 14:19:10 -0800109 sync.addEventListener("SamplingThreadDispSyncListener", 0, this, mLastCallbackTime);
Kevin DuBois413287f2019-02-25 08:46:47 -0800110 });
111 mVsyncListening = true;
112 }
113
114 void stopVsyncListener() {
115 std::lock_guard lock(mMutex);
116 stopVsyncListenerLocked();
117 }
118
119private:
120 void stopVsyncListenerLocked() /*REQUIRES(mMutex)*/ {
121 if (!mVsyncListening) return;
122
Alec Mouri7355eb22019-03-05 14:19:10 -0800123 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
124 sync.removeEventListener(this, &mLastCallbackTime);
125 });
Kevin DuBois413287f2019-02-25 08:46:47 -0800126 mVsyncListening = false;
127 }
128
129 void onDispSyncEvent(nsecs_t /* when */) final {
130 std::unique_lock<decltype(mMutex)> lock(mMutex);
131
132 if (mPhaseIntervalSetting == Phase::ZERO) {
133 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForSamplePhase));
134 mPhaseIntervalSetting = Phase::SAMPLING;
135 mScheduler.withPrimaryDispSync([this](android::DispSync& sync) {
136 sync.changePhaseOffset(this, mTargetSamplingOffset.count());
137 });
138 return;
139 }
140
141 if (mPhaseIntervalSetting == Phase::SAMPLING) {
142 mPhaseIntervalSetting = Phase::ZERO;
143 mScheduler.withPrimaryDispSync(
144 [this](android::DispSync& sync) { sync.changePhaseOffset(this, 0); });
145 stopVsyncListenerLocked();
146 lock.unlock();
147 mRegionSamplingThread.notifySamplingOffset();
148 return;
149 }
150 }
151
152 RegionSamplingThread& mRegionSamplingThread;
153 Scheduler& mScheduler;
154 const std::chrono::nanoseconds mTargetSamplingOffset;
155 mutable std::mutex mMutex;
Alec Mouri7355eb22019-03-05 14:19:10 -0800156 nsecs_t mLastCallbackTime = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800157 enum class Phase {
158 ZERO,
159 SAMPLING
160 } mPhaseIntervalSetting /*GUARDED_BY(mMutex) macro doesnt work with unique_lock?*/
161 = Phase::ZERO;
162 bool mVsyncListening /*GUARDED_BY(mMutex)*/ = false;
163};
164
165RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler,
166 const TimingTunables& tunables)
167 : mFlinger(flinger),
168 mScheduler(scheduler),
169 mTunables(tunables),
170 mIdleTimer(std::chrono::duration_cast<std::chrono::milliseconds>(
171 mTunables.mSamplingTimerTimeout),
172 [] {}, [this] { checkForStaleLuma(); }),
173 mPhaseCallback(std::make_unique<SamplingOffsetCallback>(*this, mScheduler,
174 tunables.mSamplingOffset)),
175 lastSampleTime(0ns) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700176 mThread = std::thread([this]() { threadMain(); });
177 pthread_setname_np(mThread.native_handle(), "RegionSamplingThread");
Kevin DuBois413287f2019-02-25 08:46:47 -0800178 mIdleTimer.start();
179}
180
181RegionSamplingThread::RegionSamplingThread(SurfaceFlinger& flinger, Scheduler& scheduler)
182 : RegionSamplingThread(flinger, scheduler,
183 TimingTunables{defaultRegionSamplingOffset,
184 defaultRegionSamplingPeriod,
185 defaultRegionSamplingTimerTimeout}) {}
186
Dan Stozaec460082018-12-17 15:35:09 -0800187RegionSamplingThread::~RegionSamplingThread() {
Kevin DuBois413287f2019-02-25 08:46:47 -0800188 mIdleTimer.stop();
189
Dan Stozaec460082018-12-17 15:35:09 -0800190 {
Kevin DuBois26afc782019-05-06 16:46:45 -0700191 std::lock_guard lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800192 mRunning = false;
193 mCondition.notify_one();
194 }
195
Dan Stozaec460082018-12-17 15:35:09 -0800196 if (mThread.joinable()) {
197 mThread.join();
198 }
199}
200
201void RegionSamplingThread::addListener(const Rect& samplingArea, const sp<IBinder>& stopLayerHandle,
202 const sp<IRegionSamplingListener>& listener) {
203 wp<Layer> stopLayer = stopLayerHandle != nullptr
204 ? static_cast<Layer::Handle*>(stopLayerHandle.get())->owner
205 : nullptr;
206
207 sp<IBinder> asBinder = IInterface::asBinder(listener);
208 asBinder->linkToDeath(this);
Kevin DuBois26afc782019-05-06 16:46:45 -0700209 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800210 mDescriptors.emplace(wp<IBinder>(asBinder), Descriptor{samplingArea, stopLayer, listener});
211}
212
213void RegionSamplingThread::removeListener(const sp<IRegionSamplingListener>& listener) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700214 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800215 mDescriptors.erase(wp<IBinder>(IInterface::asBinder(listener)));
216}
217
Kevin DuBois413287f2019-02-25 08:46:47 -0800218void RegionSamplingThread::checkForStaleLuma() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700219 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800220
John Dias84be7832019-06-18 17:05:26 -0700221 if (mDiscardedFrames > 0) {
Kevin DuBois413287f2019-02-25 08:46:47 -0800222 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForZeroPhase));
John Dias84be7832019-06-18 17:05:26 -0700223 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800224 mPhaseCallback->startVsyncListener();
225 }
226}
227
228void RegionSamplingThread::notifyNewContent() {
229 doSample();
230}
231
232void RegionSamplingThread::notifySamplingOffset() {
233 doSample();
234}
235
236void RegionSamplingThread::doSample() {
Kevin DuBois26afc782019-05-06 16:46:45 -0700237 std::lock_guard lock(mThreadControlMutex);
Kevin DuBois413287f2019-02-25 08:46:47 -0800238 auto now = std::chrono::nanoseconds(systemTime(SYSTEM_TIME_MONOTONIC));
239 if (lastSampleTime + mTunables.mSamplingPeriod > now) {
240 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::idleTimerWaiting));
John Dias84be7832019-06-18 17:05:26 -0700241 if (mDiscardedFrames == 0) mDiscardedFrames++;
Kevin DuBois413287f2019-02-25 08:46:47 -0800242 return;
243 }
John Dias84be7832019-06-18 17:05:26 -0700244 if (mDiscardedFrames < maxRegionSamplingSkips) {
245 // If there is relatively little time left for surfaceflinger
246 // until the next vsync deadline, defer this sampling work
247 // to a later frame, when hopefully there will be more time.
248 DisplayStatInfo stats;
249 mScheduler.getDisplayStatInfo(&stats);
250 if (std::chrono::nanoseconds(stats.vsyncTime) - now < timeForRegionSampling) {
251 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::waitForQuietFrame));
252 mDiscardedFrames++;
253 return;
254 }
255 }
Kevin DuBois413287f2019-02-25 08:46:47 -0800256
257 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::sample));
258
John Dias84be7832019-06-18 17:05:26 -0700259 mDiscardedFrames = 0;
Kevin DuBois413287f2019-02-25 08:46:47 -0800260 lastSampleTime = now;
261
262 mIdleTimer.reset();
263 mPhaseCallback->stopVsyncListener();
264
Dan Stozaec460082018-12-17 15:35:09 -0800265 mSampleRequested = true;
266 mCondition.notify_one();
267}
268
269void RegionSamplingThread::binderDied(const wp<IBinder>& who) {
Kevin DuBois26afc782019-05-06 16:46:45 -0700270 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800271 mDescriptors.erase(who);
272}
273
274namespace {
275// Using Rec. 709 primaries
276float getLuma(float r, float g, float b) {
277 constexpr auto rec709_red_primary = 0.2126f;
278 constexpr auto rec709_green_primary = 0.7152f;
279 constexpr auto rec709_blue_primary = 0.0722f;
280 return rec709_red_primary * r + rec709_green_primary * g + rec709_blue_primary * b;
281}
Kevin DuBoisbb27bcd2019-04-02 14:34:35 -0700282} // anonymous namespace
Dan Stozaec460082018-12-17 15:35:09 -0800283
Kevin DuBoisb325c932019-05-21 08:34:09 -0700284float sampleArea(const uint32_t* data, int32_t width, int32_t height, int32_t stride,
285 uint32_t orientation, const Rect& sample_area) {
286 if (!sample_area.isValid() || (sample_area.getWidth() > width) ||
287 (sample_area.getHeight() > height)) {
288 ALOGE("invalid sampling region requested");
289 return 0.0f;
290 }
291
292 // (b/133849373) ROT_90 screencap images produced upside down
293 auto area = sample_area;
294 if (orientation & ui::Transform::ROT_90) {
295 area.top = height - area.top;
296 area.bottom = height - area.bottom;
297 std::swap(area.top, area.bottom);
Kevin DuBois69162d02019-06-04 20:22:43 -0700298
299 area.left = width - area.left;
300 area.right = width - area.right;
301 std::swap(area.left, area.right);
Kevin DuBoisb325c932019-05-21 08:34:09 -0700302 }
303
Dan Stozaec460082018-12-17 15:35:09 -0800304 std::array<int32_t, 256> brightnessBuckets = {};
305 const int32_t majoritySampleNum = area.getWidth() * area.getHeight() / 2;
306
307 for (int32_t row = area.top; row < area.bottom; ++row) {
308 const uint32_t* rowBase = data + row * stride;
309 for (int32_t column = area.left; column < area.right; ++column) {
310 uint32_t pixel = rowBase[column];
311 const float r = (pixel & 0xFF) / 255.0f;
312 const float g = ((pixel >> 8) & 0xFF) / 255.0f;
313 const float b = ((pixel >> 16) & 0xFF) / 255.0f;
314 const uint8_t luma = std::round(getLuma(r, g, b) * 255.0f);
315 ++brightnessBuckets[luma];
316 if (brightnessBuckets[luma] > majoritySampleNum) return luma / 255.0f;
317 }
318 }
319
320 int32_t accumulated = 0;
321 size_t bucket = 0;
Kevin DuBoisbb27bcd2019-04-02 14:34:35 -0700322 for (; bucket < brightnessBuckets.size(); bucket++) {
Dan Stozaec460082018-12-17 15:35:09 -0800323 accumulated += brightnessBuckets[bucket];
324 if (accumulated > majoritySampleNum) break;
325 }
326
327 return bucket / 255.0f;
328}
Dan Stozaec460082018-12-17 15:35:09 -0800329
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800330std::vector<float> RegionSamplingThread::sampleBuffer(
331 const sp<GraphicBuffer>& buffer, const Point& leftTop,
Kevin DuBoisb325c932019-05-21 08:34:09 -0700332 const std::vector<RegionSamplingThread::Descriptor>& descriptors, uint32_t orientation) {
Dan Stozaec460082018-12-17 15:35:09 -0800333 void* data_raw = nullptr;
334 buffer->lock(GRALLOC_USAGE_SW_READ_OFTEN, &data_raw);
335 std::shared_ptr<uint32_t> data(reinterpret_cast<uint32_t*>(data_raw),
336 [&buffer](auto) { buffer->unlock(); });
337 if (!data) return {};
338
Kevin DuBoisb325c932019-05-21 08:34:09 -0700339 const int32_t width = buffer->getWidth();
340 const int32_t height = buffer->getHeight();
Dan Stozaec460082018-12-17 15:35:09 -0800341 const int32_t stride = buffer->getStride();
342 std::vector<float> lumas(descriptors.size());
343 std::transform(descriptors.begin(), descriptors.end(), lumas.begin(),
344 [&](auto const& descriptor) {
Kevin DuBoisb325c932019-05-21 08:34:09 -0700345 return sampleArea(data.get(), width, height, stride, orientation,
346 descriptor.area - leftTop);
Dan Stozaec460082018-12-17 15:35:09 -0800347 });
348 return lumas;
349}
350
351void RegionSamplingThread::captureSample() {
352 ATRACE_CALL();
Kevin DuBois26afc782019-05-06 16:46:45 -0700353 std::lock_guard lock(mSamplingMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800354
355 if (mDescriptors.empty()) {
356 return;
357 }
358
Kevin DuBoisb325c932019-05-21 08:34:09 -0700359 const auto device = mFlinger.getDefaultDisplayDevice();
360 const auto display = device->getCompositionDisplay();
361 const auto state = display->getState();
362 const auto orientation = static_cast<ui::Transform::orientation_flags>(state.orientation);
363
Dan Stozaec460082018-12-17 15:35:09 -0800364 std::vector<RegionSamplingThread::Descriptor> descriptors;
365 Region sampleRegion;
366 for (const auto& [listener, descriptor] : mDescriptors) {
367 sampleRegion.orSelf(descriptor.area);
368 descriptors.emplace_back(descriptor);
369 }
370
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800371 const Rect sampledArea = sampleRegion.bounds();
Dan Stozaec460082018-12-17 15:35:09 -0800372
Kevin DuBoisb325c932019-05-21 08:34:09 -0700373 auto dx = 0;
374 auto dy = 0;
375 switch (orientation) {
376 case ui::Transform::ROT_90:
377 dx = device->getWidth();
378 break;
379 case ui::Transform::ROT_180:
380 dx = device->getWidth();
381 dy = device->getHeight();
382 break;
383 case ui::Transform::ROT_270:
384 dy = device->getHeight();
385 break;
386 default:
387 break;
388 }
389
390 ui::Transform t(orientation);
391 auto screencapRegion = t.transform(sampleRegion);
392 screencapRegion = screencapRegion.translate(dx, dy);
393 DisplayRenderArea renderArea(device, screencapRegion.bounds(), sampledArea.getWidth(),
394 sampledArea.getHeight(), ui::Dataspace::V0_SRGB, orientation);
Dan Stozaec460082018-12-17 15:35:09 -0800395
396 std::unordered_set<sp<IRegionSamplingListener>, SpHash<IRegionSamplingListener>> listeners;
397
398 auto traverseLayers = [&](const LayerVector::Visitor& visitor) {
399 bool stopLayerFound = false;
400 auto filterVisitor = [&](Layer* layer) {
401 // We don't want to capture any layers beyond the stop layer
402 if (stopLayerFound) return;
403
404 // Likewise if we just found a stop layer, set the flag and abort
405 for (const auto& [area, stopLayer, listener] : descriptors) {
406 if (layer == stopLayer.promote().get()) {
407 stopLayerFound = true;
408 return;
409 }
410 }
411
412 // Compute the layer's position on the screen
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800413 const Rect bounds = Rect(layer->getBounds());
414 const ui::Transform transform = layer->getTransform();
Dan Stozaec460082018-12-17 15:35:09 -0800415 constexpr bool roundOutwards = true;
416 Rect transformed = transform.transform(bounds, roundOutwards);
417
418 // If this layer doesn't intersect with the larger sampledArea, skip capturing it
419 Rect ignore;
420 if (!transformed.intersect(sampledArea, &ignore)) return;
421
422 // If the layer doesn't intersect a sampling area, skip capturing it
423 bool intersectsAnyArea = false;
424 for (const auto& [area, stopLayer, listener] : descriptors) {
425 if (transformed.intersect(area, &ignore)) {
426 intersectsAnyArea = true;
427 listeners.insert(listener);
428 }
429 }
430 if (!intersectsAnyArea) return;
431
432 ALOGV("Traversing [%s] [%d, %d, %d, %d]", layer->getName().string(), bounds.left,
433 bounds.top, bounds.right, bounds.bottom);
434 visitor(layer);
435 };
436 mFlinger.traverseLayersInDisplay(device, filterVisitor);
437 };
438
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700439 sp<GraphicBuffer> buffer = nullptr;
440 if (mCachedBuffer && mCachedBuffer->getWidth() == sampledArea.getWidth() &&
441 mCachedBuffer->getHeight() == sampledArea.getHeight()) {
442 buffer = mCachedBuffer;
443 } else {
444 const uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_RENDER;
445 buffer = new GraphicBuffer(sampledArea.getWidth(), sampledArea.getHeight(),
446 PIXEL_FORMAT_RGBA_8888, 1, usage, "RegionSamplingThread");
447 }
Dan Stozaec460082018-12-17 15:35:09 -0800448
Robert Carr108b2c72019-04-02 16:32:58 -0700449 bool ignored;
450 mFlinger.captureScreenCommon(renderArea, traverseLayers, buffer, false, ignored);
Dan Stozaec460082018-12-17 15:35:09 -0800451
452 std::vector<Descriptor> activeDescriptors;
453 for (const auto& descriptor : descriptors) {
454 if (listeners.count(descriptor.listener) != 0) {
455 activeDescriptors.emplace_back(descriptor);
456 }
457 }
458
459 ALOGV("Sampling %zu descriptors", activeDescriptors.size());
Kevin DuBoisb325c932019-05-21 08:34:09 -0700460 std::vector<float> lumas =
461 sampleBuffer(buffer, sampledArea.leftTop(), activeDescriptors, orientation);
Dan Stozaec460082018-12-17 15:35:09 -0800462 if (lumas.size() != activeDescriptors.size()) {
Kevin DuBois7cbcc372019-02-25 14:53:28 -0800463 ALOGW("collected %zu median luma values for %zu descriptors", lumas.size(),
464 activeDescriptors.size());
Dan Stozaec460082018-12-17 15:35:09 -0800465 return;
466 }
467
468 for (size_t d = 0; d < activeDescriptors.size(); ++d) {
469 activeDescriptors[d].listener->onSampleCollected(lumas[d]);
470 }
Kevin DuBois4efd1f52019-04-29 10:09:43 -0700471
472 // Extend the lifetime of mCachedBuffer from the previous frame to here to ensure that:
473 // 1) The region sampling thread is the last owner of the buffer, and the freeing of the buffer
474 // happens in this thread, as opposed to the main thread.
475 // 2) The listener(s) receive their notifications prior to freeing the buffer.
476 mCachedBuffer = buffer;
Kevin DuBois413287f2019-02-25 08:46:47 -0800477 ATRACE_INT(lumaSamplingStepTag, static_cast<int>(samplingStep::noWorkNeeded));
Dan Stozaec460082018-12-17 15:35:09 -0800478}
479
Kevin DuBois26afc782019-05-06 16:46:45 -0700480// NO_THREAD_SAFETY_ANALYSIS is because std::unique_lock presently lacks thread safety annotations.
481void RegionSamplingThread::threadMain() NO_THREAD_SAFETY_ANALYSIS {
482 std::unique_lock<std::mutex> lock(mThreadControlMutex);
Dan Stozaec460082018-12-17 15:35:09 -0800483 while (mRunning) {
484 if (mSampleRequested) {
485 mSampleRequested = false;
Kevin DuBois26afc782019-05-06 16:46:45 -0700486 lock.unlock();
Dan Stozaec460082018-12-17 15:35:09 -0800487 captureSample();
Kevin DuBois26afc782019-05-06 16:46:45 -0700488 lock.lock();
Dan Stozaec460082018-12-17 15:35:09 -0800489 }
Kevin DuBois26afc782019-05-06 16:46:45 -0700490 mCondition.wait(lock, [this]() REQUIRES(mThreadControlMutex) {
491 return mSampleRequested || !mRunning;
492 });
Dan Stozaec460082018-12-17 15:35:09 -0800493 }
494}
495
496} // namespace android