blob: b71cfd17b9b3912920ca9c1ebfb67588114a86a6 [file] [log] [blame]
Igor Murashkinf1b9ae72012-12-07 15:08:35 -08001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <gtest/gtest.h>
18
19#define LOG_TAG "CameraBurstTest"
20//#define LOG_NDEBUG 0
21#include <utils/Log.h>
Eino-Ville Talvala7d831712013-07-01 18:47:09 -070022#include <utils/Timers.h>
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080023
24#include <cmath>
25
26#include "CameraStreamFixture.h"
27#include "TestExtensions.h"
28
Eino-Ville Talvala7d831712013-07-01 18:47:09 -070029#define CAMERA_FRAME_TIMEOUT 1000000000LL //nsecs (1 secs)
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080030#define CAMERA_HEAP_COUNT 2 //HALBUG: 1 means registerBuffers fails
31#define CAMERA_BURST_DEBUGGING 0
32#define CAMERA_FRAME_BURST_COUNT 10
33
34/* constants for the exposure test */
35#define CAMERA_EXPOSURE_DOUBLE 2
36#define CAMERA_EXPOSURE_DOUBLING_THRESHOLD 1.0f
37#define CAMERA_EXPOSURE_DOUBLING_COUNT 4
Eino-Ville Talvala4c543a12013-06-25 18:12:19 -070038#define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080039#define CAMERA_EXPOSURE_STARTING 100000 // 1/10ms, up to 51.2ms with 10 steps
40
Eino-Ville Talvala7d831712013-07-01 18:47:09 -070041#define USEC 1000LL // in ns
42#define MSEC 1000000LL // in ns
43#define SEC 1000000000LL // in ns
44
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080045#if CAMERA_BURST_DEBUGGING
46#define dout std::cout
47#else
48#define dout if (0) std::cout
49#endif
50
51using namespace android;
52using namespace android::camera2;
53
54namespace android {
55namespace camera2 {
56namespace tests {
57
58static CameraStreamParams STREAM_PARAMETERS = {
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080059 /*mFormat*/ CAMERA_EXPOSURE_FORMAT,
60 /*mHeapCount*/ CAMERA_HEAP_COUNT
61};
62
63class CameraBurstTest
64 : public ::testing::Test,
65 public CameraStreamFixture {
66
67public:
68 CameraBurstTest() : CameraStreamFixture(STREAM_PARAMETERS) {
69 TEST_EXTENSION_FORKING_CONSTRUCTOR;
70
71 if (HasFatalFailure()) {
72 return;
73 }
74
75 CreateStream();
76 }
77
78 ~CameraBurstTest() {
79 TEST_EXTENSION_FORKING_DESTRUCTOR;
80
81 if (mDevice.get()) {
82 mDevice->waitUntilDrained();
83 }
84 DeleteStream();
85 }
86
87 virtual void SetUp() {
88 TEST_EXTENSION_FORKING_SET_UP;
89 }
90 virtual void TearDown() {
91 TEST_EXTENSION_FORKING_TEAR_DOWN;
92 }
93
Eino-Ville Talvala4c543a12013-06-25 18:12:19 -070094 /* this assumes the format is YUV420sp or flexible YUV */
Igor Murashkinf1b9ae72012-12-07 15:08:35 -080095 long long TotalBrightness(const CpuConsumer::LockedBuffer& imgBuffer,
96 int *underexposed,
97 int *overexposed) const {
98
99 const uint8_t* buf = imgBuffer.data;
100 size_t stride = imgBuffer.stride;
101
102 /* iterate over the Y plane only */
103 long long acc = 0;
104
105 *underexposed = 0;
106 *overexposed = 0;
107
108 for (size_t y = 0; y < imgBuffer.height; ++y) {
109 for (size_t x = 0; x < imgBuffer.width; ++x) {
110 const uint8_t p = buf[y * stride + x];
111
112 if (p == 0) {
113 if (underexposed) {
114 ++*underexposed;
115 }
116 continue;
117 } else if (p == 255) {
118 if (overexposed) {
119 ++*overexposed;
120 }
121 continue;
122 }
123
124 acc += p;
125 }
126 }
127
128 return acc;
129 }
Eino-Ville Talvala7d831712013-07-01 18:47:09 -0700130
131 // Parses a comma-separated string list into a Vector
132 template<typename T>
133 void ParseList(const char *src, Vector<T> &list) {
134 std::istringstream s(src);
135 while (!s.eof()) {
136 char c = s.peek();
137 if (c == ',' || c == ' ') {
138 s.ignore(1, EOF);
139 continue;
140 }
141 T val;
142 s >> val;
143 list.push_back(val);
144 }
145 }
146
Igor Murashkinf1b9ae72012-12-07 15:08:35 -0800147};
148
149TEST_F(CameraBurstTest, ManualExposureControl) {
150
151 TEST_EXTENSION_FORKING_INIT;
152
153 // Range of valid exposure times, in nanoseconds
154 int64_t minExp, maxExp;
155 {
156 camera_metadata_ro_entry exposureTimeRange =
157 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
158
159 ASSERT_EQ(2u, exposureTimeRange.count);
160 minExp = exposureTimeRange.data.i64[0];
161 maxExp = exposureTimeRange.data.i64[1];
162 }
163
164 dout << "Min exposure is " << minExp;
165 dout << " max exposure is " << maxExp << std::endl;
166
167 // Calculate some set of valid exposure times for each request
168 int64_t exposures[CAMERA_FRAME_BURST_COUNT];
169 exposures[0] = CAMERA_EXPOSURE_STARTING;
170 for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
171 exposures[i] = exposures[i-1] * CAMERA_EXPOSURE_DOUBLE;
172 }
173 // Our calculated exposure times should be in [minExp, maxExp]
174 EXPECT_LE(minExp, exposures[0])
175 << "Minimum exposure range is too high, wanted at most "
176 << exposures[0] << "ns";
177 EXPECT_GE(maxExp, exposures[CAMERA_FRAME_BURST_COUNT-1])
178 << "Maximum exposure range is too low, wanted at least "
179 << exposures[CAMERA_FRAME_BURST_COUNT-1] << "ns";
180
181 // Create a preview request, turning off all 3A
182 CameraMetadata previewRequest;
183 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
184 &previewRequest));
185 {
186 Vector<uint8_t> outputStreamIds;
187 outputStreamIds.push(mStreamId);
188 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
189 outputStreamIds));
190
191 // Disable all 3A routines
192 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
193 ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
194 &cmOff, 1));
Eino-Ville Talvala4c543a12013-06-25 18:12:19 -0700195
196 int requestId = 1;
197 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
198 &requestId, 1));
199
Igor Murashkinf1b9ae72012-12-07 15:08:35 -0800200 if (CAMERA_BURST_DEBUGGING) {
201 int frameCount = 0;
202 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_FRAME_COUNT,
203 &frameCount, 1));
204 }
205 }
206
207 if (CAMERA_BURST_DEBUGGING) {
208 previewRequest.dump(STDOUT_FILENO);
209 }
210
211 // Submit capture requests
212 for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
213 CameraMetadata tmpRequest = previewRequest;
214 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
215 &exposures[i], 1));
216 ALOGV("Submitting capture request %d with exposure %lld", i,
217 exposures[i]);
218 dout << "Capture request " << i << " exposure is "
219 << (exposures[i]/1e6f) << std::endl;
220 ASSERT_EQ(OK, mDevice->capture(tmpRequest));
221 }
222
223 dout << "Buffer dimensions " << mWidth << "x" << mHeight << std::endl;
224
225 float brightnesses[CAMERA_FRAME_BURST_COUNT];
226 // Get each frame (metadata) and then the buffer. Calculate brightness.
227 for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
228 ALOGV("Reading capture request %d with exposure %lld", i, exposures[i]);
229 ASSERT_EQ(OK, mDevice->waitForNextFrame(CAMERA_FRAME_TIMEOUT));
230 ALOGV("Reading capture request-1 %d", i);
231 CameraMetadata frameMetadata;
232 ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
233 ALOGV("Reading capture request-2 %d", i);
234
235 ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
236 ALOGV("We got the frame now");
237
238 CpuConsumer::LockedBuffer imgBuffer;
239 ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
240
241 int underexposed, overexposed;
242 long long brightness = TotalBrightness(imgBuffer, &underexposed,
243 &overexposed);
244 float avgBrightness = brightness * 1.0f /
245 (mWidth * mHeight - (underexposed + overexposed));
246 ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
247 "overexposed %d), avg %f", i, brightness, underexposed,
248 overexposed, avgBrightness);
249 dout << "Average brightness (frame " << i << ") was " << avgBrightness
250 << " (underexposed " << underexposed << ", overexposed "
251 << overexposed << ")" << std::endl;
252
253 ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
254
255 brightnesses[i] = avgBrightness;
256 }
257
258 // Calculate max consecutive frame exposure doubling
259 float prev = brightnesses[0];
260 int doubling_count = 1;
261 int max_doubling_count = 0;
262 for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
263 if (fabs(brightnesses[i] - prev*CAMERA_EXPOSURE_DOUBLE)
264 <= CAMERA_EXPOSURE_DOUBLING_THRESHOLD) {
265 doubling_count++;
266 }
267 else {
268 max_doubling_count = std::max(max_doubling_count, doubling_count);
269 doubling_count = 1;
270 }
271 prev = brightnesses[i];
272 }
273
274 dout << "max doubling count: " << max_doubling_count << std::endl;
275
276 EXPECT_LE(CAMERA_EXPOSURE_DOUBLING_COUNT, max_doubling_count)
277 << "average brightness should double at least "
278 << CAMERA_EXPOSURE_DOUBLING_COUNT
279 << " times over each consecutive frame as the exposure is doubled";
280}
281
Eino-Ville Talvala7d831712013-07-01 18:47:09 -0700282/**
283 * This test varies exposure time, frame duration, and sensitivity for a
284 * burst of captures. It picks values by default, but the selection can be
285 * overridden with the environment variables
286 * CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES
287 * CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS
288 * CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES
289 * which must all be a list of comma-separated values, and each list must be
290 * the same length. In addition, if the environment variable
291 * CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES
292 * is set to 1, then the YUV buffers are dumped into files named
293 * "camera2_test_variable_burst_frame_NNN.yuv"
294 *
295 * For example:
296 * $ setenv CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES 10000000,20000000
297 * $ setenv CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS 40000000,40000000
298 * $ setenv CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES 200,100
299 * $ setenv CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES 1
300 * $ /data/nativetest/camera2_test/camera2_test --gtest_filter="*VariableBurst"
301 */
302TEST_F(CameraBurstTest, VariableBurst) {
303
304 TEST_EXTENSION_FORKING_INIT;
305
306 // Bounds for checking frame duration is within range
307 const nsecs_t DURATION_UPPER_BOUND = 10 * MSEC;
308 const nsecs_t DURATION_LOWER_BOUND = 20 * MSEC;
309
310 // Threshold for considering two captures to have equivalent exposure value,
311 // as a ratio of the smaller EV to the larger EV.
312 const float EV_MATCH_BOUND = 0.95;
313 // Bound for two captures with equivalent exp values to have the same
314 // measured brightness, in 0-255 luminance.
315 const float BRIGHTNESS_MATCH_BOUND = 5;
316
317 // Environment variables to look for to override test settings
318 const char *expEnv = "CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES";
319 const char *durationEnv = "CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS";
320 const char *sensitivityEnv = "CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES";
321 const char *dumpFrameEnv = "CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES";
322
323 // Range of valid exposure times, in nanoseconds
324 int64_t minExp = 0, maxExp = 0;
325 // List of valid sensor sensitivities
326 Vector<int32_t> sensitivities;
327 // Range of valid frame durations, in nanoseconds
328 int64_t minDuration = 0, maxDuration = 0;
329
330 {
331 camera_metadata_ro_entry exposureTimeRange =
332 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
333
334 EXPECT_EQ(2u, exposureTimeRange.count) << "Bad exposure time range tag."
335 "Using default values";
336 if (exposureTimeRange.count == 2) {
337 minExp = exposureTimeRange.data.i64[0];
338 maxExp = exposureTimeRange.data.i64[1];
339 }
340
341 EXPECT_LT(0, minExp) << "Minimum exposure time is 0";
342 EXPECT_LT(0, maxExp) << "Maximum exposure time is 0";
343 EXPECT_LE(minExp, maxExp) << "Minimum exposure is greater than maximum";
344
345 if (minExp == 0) {
346 minExp = 1 * MSEC; // Fallback minimum exposure time
347 }
348
349 if (maxExp == 0) {
350 maxExp = 10 * SEC; // Fallback maximum exposure time
351 }
352 }
353
354 dout << "Stream size is " << mWidth << " x " << mHeight << std::endl;
355 dout << "Valid exposure range is: " <<
356 minExp << " - " << maxExp << " ns " << std::endl;
357
358 {
359 camera_metadata_ro_entry availableSensitivities =
360 GetStaticEntry(ANDROID_SENSOR_INFO_AVAILABLE_SENSITIVITIES);
361
362 EXPECT_LT(0u, availableSensitivities.count) << "No sensitivities listed."
363 "Falling back to default set.";
364 sensitivities.appendArray(availableSensitivities.data.i32,
365 availableSensitivities.count);
366 if (availableSensitivities.count == 0) {
367 sensitivities.push_back(100);
368 sensitivities.push_back(200);
369 sensitivities.push_back(400);
370 sensitivities.push_back(800);
371 }
372 }
373
374 dout << "Available sensitivities: ";
375 for (size_t i = 0; i < sensitivities.size(); i++) {
376 dout << sensitivities[i] << " ";
377 }
378 dout << std::endl;
379
380 {
381 camera_metadata_ro_entry availableProcessedSizes =
382 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
383
384 camera_metadata_ro_entry availableProcessedMinFrameDurations =
385 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
386
387 EXPECT_EQ(availableProcessedSizes.count,
388 availableProcessedMinFrameDurations.count * 2) <<
389 "The number of minimum frame durations doesn't match the number of "
390 "available sizes. Using fallback values";
391
392 if (availableProcessedSizes.count ==
393 availableProcessedMinFrameDurations.count * 2) {
394 bool gotSize = false;
395 for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
396 if (availableProcessedSizes.data.i32[i] == mWidth &&
397 availableProcessedSizes.data.i32[i+1] == mHeight) {
398 gotSize = true;
399 minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
400 }
401 }
402 EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
403 "available sizes: " << mWidth << ", " << mHeight;
404 }
405 if (minDuration == 0) {
406 minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
407 }
408
409 ASSERT_LT(0, minDuration);
410
411 camera_metadata_ro_entry maxFrameDuration =
412 GetStaticEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION);
413
414 EXPECT_EQ(1u, maxFrameDuration.count) << "No valid maximum frame duration";
415
416 if (maxFrameDuration.count == 1) {
417 maxDuration = maxFrameDuration.data.i64[0];
418 }
419
420 EXPECT_GT(0, maxDuration) << "Max duration is 0 or not given, using fallback";
421
422 if (maxDuration == 0) {
423 maxDuration = 10 * SEC; // Fall back to 10 seconds as max duration
424 }
425
426 }
427 dout << "Available frame duration range for configured stream size: "
428 << minDuration << " - " << maxDuration << " ns" << std::endl;
429
430 // Get environment variables if set
431 const char *expVal = getenv(expEnv);
432 const char *durationVal = getenv(durationEnv);
433 const char *sensitivityVal = getenv(sensitivityEnv);
434
435 bool gotExp = (expVal != NULL);
436 bool gotDuration = (durationVal != NULL);
437 bool gotSensitivity = (sensitivityVal != NULL);
438
439 // All or none must be provided if using override envs
440 ASSERT_TRUE( (gotDuration && gotExp && gotSensitivity) ||
441 (!gotDuration && !gotExp && !gotSensitivity) ) <<
442 "Incomplete set of environment variable overrides provided";
443
444 Vector<int64_t> expList, durationList;
445 Vector<int32_t> sensitivityList;
446 if (gotExp) {
447 ParseList(expVal, expList);
448 ParseList(durationVal, durationList);
449 ParseList(sensitivityVal, sensitivityList);
450
451 ASSERT_TRUE(
452 (expList.size() == durationList.size()) &&
453 (durationList.size() == sensitivityList.size())) <<
454 "Mismatched sizes in env lists, or parse error";
455
456 dout << "Using burst list from environment with " << expList.size() <<
457 " captures" << std::endl;
458 } else {
459 // Create a default set of controls based on the available ranges
460
461 int64_t e;
462 int64_t d;
463 int32_t s;
464
465 // Exposure ramp
466
467 e = minExp;
468 d = minDuration;
469 s = sensitivities[0];
470 while (e < maxExp) {
471 expList.push_back(e);
472 durationList.push_back(d);
473 sensitivityList.push_back(s);
474 e = e * 2;
475 }
476 e = maxExp;
477 expList.push_back(e);
478 durationList.push_back(d);
479 sensitivityList.push_back(s);
480
481 // Duration ramp
482
483 e = 30 * MSEC;
484 d = minDuration;
485 s = sensitivities[0];
486 while (d < maxDuration) {
487 // make sure exposure <= frame duration
488 expList.push_back(e > d ? d : e);
489 durationList.push_back(d);
490 sensitivityList.push_back(s);
491 d = d * 2;
492 }
493
494 // Sensitivity ramp
495
496 e = 30 * MSEC;
497 d = 30 * MSEC;
498 d = d > minDuration ? d : minDuration;
499 for (size_t i = 0; i < sensitivities.size(); i++) {
500 expList.push_back(e);
501 durationList.push_back(d);
502 sensitivityList.push_back(sensitivities[i]);
503 }
504
505 // Constant-EV ramp, duration == exposure
506
507 e = 30 * MSEC; // at ISO 100
508 for (size_t i = 0; i < sensitivities.size(); i++) {
509 int64_t e_adj = e * 100 / sensitivities[i];
510 expList.push_back(e_adj);
511 durationList.push_back(e_adj > minDuration ? e_adj : minDuration);
512 sensitivityList.push_back(sensitivities[i]);
513 }
514
515 dout << "Default burst sequence created with " << expList.size() <<
516 " entries" << std::endl;
517 }
518
519 // Validate the list, but warn only
520 for (size_t i = 0; i < expList.size(); i++) {
521 EXPECT_GE(maxExp, expList[i])
522 << "Capture " << i << " exposure too long: " << expList[i];
523 EXPECT_LE(minExp, expList[i])
524 << "Capture " << i << " exposure too short: " << expList[i];
525 EXPECT_GE(maxDuration, durationList[i])
526 << "Capture " << i << " duration too long: " << durationList[i];
527 EXPECT_LE(minDuration, durationList[i])
528 << "Capture " << i << " duration too short: " << durationList[i];
529 bool validSensitivity = false;
530 for (size_t j = 0; j < sensitivities.size(); j++) {
531 if (sensitivityList[i] == sensitivities[j]) {
532 validSensitivity = true;
533 break;
534 }
535 }
536 EXPECT_TRUE(validSensitivity)
537 << "Capture " << i << " sensitivity not in list: " << sensitivityList[i];
538 }
539
540 // Check if debug yuv dumps are requested
541
542 bool dumpFrames = false;
543 {
544 const char *frameDumpVal = getenv(dumpFrameEnv);
545 if (frameDumpVal != NULL) {
546 if (frameDumpVal[0] == '1') dumpFrames = true;
547 }
548 }
549
550 dout << "Dumping YUV frames " <<
551 (dumpFrames ? "enabled, not checking timing" : "disabled") << std::endl;
552
553 // Create a base preview request, turning off all 3A
554 CameraMetadata previewRequest;
555 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
556 &previewRequest));
557 {
558 Vector<uint8_t> outputStreamIds;
559 outputStreamIds.push(mStreamId);
560 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
561 outputStreamIds));
562
563 // Disable all 3A routines
564 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
565 ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
566 &cmOff, 1));
567
568 int requestId = 1;
569 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
570 &requestId, 1));
571 }
572
573 // Submit capture requests
574
575 for (size_t i = 0; i < expList.size(); ++i) {
576 CameraMetadata tmpRequest = previewRequest;
577 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
578 &expList[i], 1));
579 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_FRAME_DURATION,
580 &durationList[i], 1));
581 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_SENSITIVITY,
582 &sensitivityList[i], 1));
583 ALOGV("Submitting capture %d with exposure %lld, frame duration %lld, sensitivity %d",
584 i, expList[i], durationList[i], sensitivityList[i]);
585 dout << "Capture request " << i <<
586 ": exposure is " << (expList[i]/1e6f) << " ms" <<
587 ", frame duration is " << (durationList[i]/1e6f) << " ms" <<
588 ", sensitivity is " << sensitivityList[i] <<
589 std::endl;
590 ASSERT_EQ(OK, mDevice->capture(tmpRequest));
591 }
592
593 Vector<float> brightnesses;
594 Vector<nsecs_t> captureTimes;
595 brightnesses.setCapacity(expList.size());
596 captureTimes.setCapacity(expList.size());
597
598 // Get each frame (metadata) and then the buffer. Calculate brightness.
599 for (size_t i = 0; i < expList.size(); ++i) {
600
601 ALOGV("Reading request %d", i);
602 dout << "Waiting for capture " << i << ": " <<
603 " exposure " << (expList[i]/1e6f) << " ms," <<
604 " frame duration " << (durationList[i]/1e6f) << " ms," <<
605 " sensitivity " << sensitivityList[i] <<
606 std::endl;
607
608 // Set wait limit based on expected frame duration, or minimum timeout
609 int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
610 if (expList[i] * 2 > waitLimit) waitLimit = expList[i] * 2;
611 if (durationList[i] * 2 > waitLimit) waitLimit = durationList[i] * 2;
612
613 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
614 ALOGV("Reading capture request-1 %d", i);
615 CameraMetadata frameMetadata;
616 ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
617 ALOGV("Reading capture request-2 %d", i);
618
619 ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
620 ALOGV("We got the frame now");
621
622 captureTimes.push_back(systemTime());
623
624 CpuConsumer::LockedBuffer imgBuffer;
625 ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
626
627 int underexposed, overexposed;
628 float avgBrightness = 0;
629 long long brightness = TotalBrightness(imgBuffer, &underexposed,
630 &overexposed);
631 int numValidPixels = mWidth * mHeight - (underexposed + overexposed);
632 if (numValidPixels != 0) {
633 avgBrightness = brightness * 1.0f / numValidPixels;
634 } else if (underexposed < overexposed) {
635 avgBrightness = 255;
636 }
637
638 ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
639 "overexposed %d), avg %f", i, brightness, underexposed,
640 overexposed, avgBrightness);
641 dout << "Average brightness (frame " << i << ") was " << avgBrightness
642 << " (underexposed " << underexposed << ", overexposed "
643 << overexposed << ")" << std::endl;
644 brightnesses.push_back(avgBrightness);
645
646 if (i != 0) {
647 float prevEv = static_cast<float>(expList[i - 1]) * sensitivityList[i - 1];
648 float currentEv = static_cast<float>(expList[i]) * sensitivityList[i];
649 float evRatio = (prevEv > currentEv) ? (currentEv / prevEv) :
650 (prevEv / currentEv);
651 if ( evRatio > EV_MATCH_BOUND ) {
652 EXPECT_LT( fabs(brightnesses[i] - brightnesses[i - 1]),
653 BRIGHTNESS_MATCH_BOUND) <<
654 "Capture brightness different from previous, even though "
655 "they have the same EV value. Ev now: " << currentEv <<
656 ", previous: " << prevEv << ". Brightness now: " <<
657 brightnesses[i] << ", previous: " << brightnesses[i-1];
658 }
659 // Only check timing if not saving to disk, since that slows things
660 // down substantially
661 if (!dumpFrames) {
662 nsecs_t timeDelta = captureTimes[i] - captureTimes[i-1];
663 nsecs_t expectedDelta = expList[i] > durationList[i] ?
664 expList[i] : durationList[i];
665 EXPECT_LT(timeDelta, expectedDelta + DURATION_UPPER_BOUND) <<
666 "Capture took " << timeDelta << " ns to receive, but expected"
667 " frame duration was " << expectedDelta << " ns.";
668 EXPECT_GT(timeDelta, expectedDelta - DURATION_LOWER_BOUND) <<
669 "Capture took " << timeDelta << " ns to receive, but expected"
670 " frame duration was " << expectedDelta << " ns.";
671 dout << "Time delta from previous frame: " << timeDelta / 1e6 <<
672 " ms. Expected " << expectedDelta / 1e6 << " ms" << std::endl;
673 }
674 }
675
676 if (dumpFrames) {
677 String8 dumpName =
678 String8::format("/data/local/tmp/camera2_test_variable_burst_frame_%03d.yuv", i);
679 dout << " Writing YUV dump to " << dumpName << std::endl;
680 DumpYuvToFile(dumpName, imgBuffer);
681 }
682
683 ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
684 }
685
686}
687
Igor Murashkinf1b9ae72012-12-07 15:08:35 -0800688}
689}
690}