blob: 679ff4e116d1e246c30d1e7bffd8d8e55db51ae1 [file] [log] [blame]
Zhijun He8ef01442013-08-13 17:36:17 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "CameraMultiStreamTest"
18//#define LOG_NDEBUG 0
19#include "CameraStreamFixture.h"
20#include "TestExtensions.h"
21
22#include <gtest/gtest.h>
23#include <utils/Log.h>
24#include <utils/StrongPointer.h>
25#include <common/CameraDeviceBase.h>
26#include <hardware/hardware.h>
27#include <hardware/camera2.h>
28#include <gui/SurfaceComposerClient.h>
29#include <gui/Surface.h>
30
31#define DEFAULT_FRAME_DURATION 33000000LL // 33ms
32#define CAMERA_HEAP_COUNT 1
33#define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
34#define CAMERA_DISPLAY_FORMAT HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
35#define CAMERA_MULTI_STREAM_DEBUGGING 0
36#define CAMERA_FRAME_TIMEOUT 1000000000LL // nsecs (1 secs)
37#define PREVIEW_RENDERING_TIME_INTERVAL 200000 // in unit of us, 200ms
38/* constants for display */
39#define DISPLAY_BUFFER_HEIGHT 1024
40#define DISPLAY_BUFFER_WIDTH 1024
41#define DISPLAY_BUFFER_FORMAT PIXEL_FORMAT_RGB_888
42
43// This test intends to test large preview size but less than 1080p.
44#define PREVIEW_WIDTH_CAP 1920
45#define PREVIEW_HEIGHT_CAP 1080
46// This test intends to test small metering burst size that is less than 640x480
47#define METERING_WIDTH_CAP 640
48#define METERING_HEIGHT_CAP 480
49
50#define EXP_WAIT_MULTIPLIER 2
51
52namespace android {
53namespace camera2 {
54namespace tests {
55
56static const CameraStreamParams DEFAULT_STREAM_PARAMETERS = {
57 /*mFormat*/ CAMERA_EXPOSURE_FORMAT,
58 /*mHeapCount*/ CAMERA_HEAP_COUNT
59};
60
61static const CameraStreamParams DISPLAY_STREAM_PARAMETERS = {
62 /*mFormat*/ CAMERA_DISPLAY_FORMAT,
63 /*mHeapCount*/ CAMERA_HEAP_COUNT
64};
65
66class CameraMultiStreamTest
67 : public ::testing::Test,
68 public CameraStreamFixture {
69
70public:
71 CameraMultiStreamTest() : CameraStreamFixture(DEFAULT_STREAM_PARAMETERS) {
72 TEST_EXTENSION_FORKING_CONSTRUCTOR;
73
74 if (HasFatalFailure()) {
75 return;
76 }
77 /**
78 * Don't create default stream, each test is in charge of creating
79 * its own streams.
80 */
81 }
82
83 ~CameraMultiStreamTest() {
84 TEST_EXTENSION_FORKING_DESTRUCTOR;
85 }
86
87 sp<SurfaceComposerClient> mComposerClient;
88 sp<SurfaceControl> mSurfaceControl;
89
90 void CreateOnScreenSurface(sp<ANativeWindow>& surface) {
91 mComposerClient = new SurfaceComposerClient;
92 ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
93
94 mSurfaceControl = mComposerClient->createSurface(
95 String8("CameraMultiStreamTest StreamingImage Surface"),
96 DISPLAY_BUFFER_HEIGHT, DISPLAY_BUFFER_WIDTH,
97 DISPLAY_BUFFER_FORMAT, 0);
98
99 ASSERT_NE((void*)NULL, mSurfaceControl.get());
100 ASSERT_TRUE(mSurfaceControl->isValid());
101
102 SurfaceComposerClient::openGlobalTransaction();
103 ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
104 ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
105 SurfaceComposerClient::closeGlobalTransaction();
106
107 surface = mSurfaceControl->getSurface();
108
109 ASSERT_NE((void*)NULL, surface.get());
110 }
111
112 struct Size {
113 int32_t width;
114 int32_t height;
115 };
116
117 // Select minimal size by number of pixels.
118 void GetMinSize(const int32_t* data, size_t count,
119 Size* min, int32_t* idx) {
120 ASSERT_NE((int32_t*)NULL, data);
121 int32_t minIdx = 0;
122 int32_t minSize = INT_MAX, tempSize;
123 for (size_t i = 0; i < count; i+=2) {
124 tempSize = data[i] * data[i+1];
125 if (minSize > tempSize) {
126 minSize = tempSize;
127 minIdx = i;
128 }
129 }
130 min->width = data[minIdx];
131 min->height = data[minIdx + 1];
132 *idx = minIdx;
133 }
134
135 // Select maximal size by number of pixels.
136 void GetMaxSize(const int32_t* data, size_t count,
137 Size* max, int32_t* idx) {
138 ASSERT_NE((int32_t*)NULL, data);
139 int32_t maxIdx = 0;
140 int32_t maxSize = INT_MIN, tempSize;
141 for (size_t i = 0; i < count; i+=2) {
142 tempSize = data[i] * data[i+1];
143 if (maxSize < tempSize) {
144 maxSize = tempSize;
145 maxIdx = i;
146 }
147 }
148 max->width = data[maxIdx];
149 max->height = data[maxIdx + 1];
150 *idx = maxIdx;
151 }
152
153 // Cap size by number of pixels.
154 Size CapSize(Size cap, Size input) {
155 if (input.width * input.height > cap.width * cap.height) {
156 return cap;
157 }
158 return input;
159 }
160
161 struct CameraStream : public RefBase {
162
163 public:
164 /**
165 * Only initialize the variables here, do the ASSERT check in
166 * SetUp function. To make this stream useful, the SetUp must
167 * be called before using it.
168 */
169 CameraStream(
170 int width,
171 int height,
172 const sp<CameraDeviceBase>& device,
173 CameraStreamParams param, sp<ANativeWindow> surface,
174 bool useCpuConsumer)
175 : mDevice(device),
176 mWidth(width),
177 mHeight(height) {
178 mFormat = param.mFormat;
179 if (useCpuConsumer) {
180 sp<BufferQueue> bq = new BufferQueue();
181 mCpuConsumer = new CpuConsumer(bq, param.mHeapCount);
182 mCpuConsumer->setName(String8(
183 "CameraMultiStreamTest::mCpuConsumer"));
184 mNativeWindow = new Surface(bq);
185 } else {
186 // Render the stream to screen.
187 mCpuConsumer = NULL;
188 mNativeWindow = surface;
189 }
190
191 mFrameListener = new FrameListener();
192 if (mCpuConsumer != 0) {
193 mCpuConsumer->setFrameAvailableListener(mFrameListener);
194 }
195 }
196
197 /**
198 * Finally create camera stream, and do the ASSERT check, since we
199 * can not do it in ctor.
200 */
201 void SetUp() {
202 ASSERT_EQ(OK,
203 mDevice->createStream(mNativeWindow,
204 mWidth, mHeight, mFormat, /*size (for jpegs)*/0,
205 &mStreamId));
206
207 ASSERT_NE(-1, mStreamId);
208 }
209
210 int GetStreamId() { return mStreamId; }
211 sp<CpuConsumer> GetConsumer() { return mCpuConsumer; }
212 sp<FrameListener> GetFrameListener() { return mFrameListener; }
213
214 protected:
215 ~CameraStream() {
216 if (mDevice.get()) {
217 mDevice->waitUntilDrained();
218 mDevice->deleteStream(mStreamId);
219 }
220 // Clear producer before consumer.
221 mNativeWindow.clear();
222 mCpuConsumer.clear();
223 }
224
225 private:
226 sp<FrameListener> mFrameListener;
227 sp<CpuConsumer> mCpuConsumer;
228 sp<ANativeWindow> mNativeWindow;
229 sp<CameraDeviceBase> mDevice;
230 int mStreamId;
231 int mWidth;
232 int mHeight;
233 int mFormat;
234 };
235
236 int64_t GetExposureValue(const CameraMetadata& metaData) {
237 camera_metadata_ro_entry_t entry =
238 metaData.find(ANDROID_SENSOR_EXPOSURE_TIME);
239 EXPECT_EQ(1u, entry.count);
240 if (entry.count == 1) {
241 return entry.data.i64[0];
242 }
243 return -1;
244 }
245
246 int32_t GetSensitivity(const CameraMetadata& metaData) {
247 camera_metadata_ro_entry_t entry =
248 metaData.find(ANDROID_SENSOR_SENSITIVITY);
249 EXPECT_EQ(1u, entry.count);
250 if (entry.count == 1) {
251 return entry.data.i32[0];
252 }
253 return -1;
254 }
255
256 int64_t GetFrameDuration(const CameraMetadata& metaData) {
257 camera_metadata_ro_entry_t entry =
258 metaData.find(ANDROID_SENSOR_FRAME_DURATION);
259 EXPECT_EQ(1u, entry.count);
260 if (entry.count == 1) {
261 return entry.data.i64[0];
262 }
263 return -1;
264 }
265
266 void CreateRequests(CameraMetadata& previewRequest,
267 CameraMetadata& meteringRequest,
268 CameraMetadata& captureRequest,
269 int previewStreamId,
270 int meteringStreamId,
271 int captureStreamId) {
272 int32_t requestId = 1;
273 Vector<uint8_t> previewStreamIds;
274 previewStreamIds.push(previewStreamId);
275 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
276 &previewRequest));
277 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
278 previewStreamIds));
279 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
280 &requestId, 1));
281
282 // Create metering request, manual settings
283 // Manual control: Disable 3A, noise reduction, edge sharping
284 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
285 uint8_t nrOff = static_cast<uint8_t>(ANDROID_NOISE_REDUCTION_MODE_OFF);
286 uint8_t sharpOff = static_cast<uint8_t>(ANDROID_EDGE_MODE_OFF);
287 Vector<uint8_t> meteringStreamIds;
288 meteringStreamIds.push(meteringStreamId);
289 ASSERT_EQ(OK, mDevice->createDefaultRequest(
290 CAMERA2_TEMPLATE_PREVIEW,
291 &meteringRequest));
292 ASSERT_EQ(OK, meteringRequest.update(
293 ANDROID_REQUEST_OUTPUT_STREAMS,
294 meteringStreamIds));
295 ASSERT_EQ(OK, meteringRequest.update(
296 ANDROID_REQUEST_ID,
297 &requestId, 1));
298 ASSERT_EQ(OK, meteringRequest.update(
299 ANDROID_CONTROL_MODE,
300 &cmOff, 1));
301 ASSERT_EQ(OK, meteringRequest.update(
302 ANDROID_NOISE_REDUCTION_MODE,
303 &nrOff, 1));
304 ASSERT_EQ(OK, meteringRequest.update(
305 ANDROID_EDGE_MODE,
306 &sharpOff, 1));
307
308 // Create capture request, manual settings
309 requestId++;
310 Vector<uint8_t> captureStreamIds;
311 captureStreamIds.push(captureStreamId);
312 ASSERT_EQ(OK, mDevice->createDefaultRequest(
313 CAMERA2_TEMPLATE_PREVIEW,
314 &captureRequest));
315 ASSERT_EQ(OK, captureRequest.update(
316 ANDROID_REQUEST_OUTPUT_STREAMS,
317 captureStreamIds));
318 ASSERT_EQ(OK, captureRequest.update(
319 ANDROID_REQUEST_ID,
320 &requestId, 1));
321 ASSERT_EQ(OK, captureRequest.update(
322 ANDROID_CONTROL_MODE,
323 &cmOff, 1));
324 ASSERT_EQ(OK, captureRequest.update(
325 ANDROID_NOISE_REDUCTION_MODE,
326 &nrOff, 1));
327 ASSERT_EQ(OK, captureRequest.update(
328 ANDROID_EDGE_MODE,
329 &sharpOff, 1));
330 }
331
332 sp<CameraStream> CreateStream(
333 int width,
334 int height,
335 const sp<CameraDeviceBase>& device,
336 CameraStreamParams param = DEFAULT_STREAM_PARAMETERS,
337 sp<ANativeWindow> surface = NULL,
338 bool useCpuConsumer = true) {
339 param.mFormat = MapAutoFormat(param.mFormat);
340 return new CameraStream(width, height, device,
341 param, surface, useCpuConsumer);
342 }
343
344 void CaptureBurst(CameraMetadata& request, size_t requestCount,
345 const Vector<int64_t>& exposures,
346 const Vector<int32_t>& sensitivities,
347 const sp<CameraStream>& stream,
348 int64_t minFrameDuration) {
349 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_FRAME_DURATION,
350 &minFrameDuration, 1));
351 // Submit a series of requests with the specified exposure/gain values.
352 for (size_t i = 0; i < requestCount; i++) {
353 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_EXPOSURE_TIME,
354 &exposures[i], 1));
355 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_SENSITIVITY,
356 &sensitivities[i], 1));
357 ASSERT_EQ(OK, mDevice->capture(request));
358 ALOGV("Submitting capture %d with exposure %lld, sensitivity %d",
359 i, exposures[i], sensitivities[i]);
360 if (CAMERA_MULTI_STREAM_DEBUGGING) {
361 request.dump(STDOUT_FILENO);
362 }
363 }
364 // Get capture burst results.
365 Vector<nsecs_t> captureBurstTimes;
366 sp<CpuConsumer> consumer = stream->GetConsumer();
367 sp<FrameListener> listener = stream->GetFrameListener();
368
369 // Set wait limit based on expected frame duration.
370 int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
371 for (size_t i = 0; i < requestCount; i++) {
372 ALOGV("Reading request result %d", i);
373
374 /**
375 * Raise the timeout to be at least twice as long as the exposure
376 * time. to avoid a false positive when the timeout is too short.
377 */
378 if ((exposures[i] * EXP_WAIT_MULTIPLIER) > waitLimit) {
379 waitLimit = exposures[i] * EXP_WAIT_MULTIPLIER;
380 }
381
382 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
383 CameraMetadata frameMetadata;
384 ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
385 ALOGV("Got capture burst result for request %d", i);
386 // Validate capture result
387 if (CAMERA_MULTI_STREAM_DEBUGGING) {
388 frameMetadata.dump(STDOUT_FILENO);
389 }
390
391 // TODO: Need revisit it to figure out an accurate margin.
392 EXPECT_EQ(sensitivities[i], GetSensitivity(frameMetadata));
393 EXPECT_EQ(exposures[i], GetExposureValue(frameMetadata));
394
395 ASSERT_EQ(OK, listener->waitForFrame(waitLimit));
396 captureBurstTimes.push_back(systemTime());
397 CpuConsumer::LockedBuffer imgBuffer;
398 ASSERT_EQ(OK, consumer->lockNextBuffer(&imgBuffer));
399 ALOGV("Got capture buffer for request %d", i);
400
401 /**
402 * TODO: Validate capture buffer. Current brightness calculation
403 * is too slow, it also doesn't account for saturation effects,
404 * which is quite common since we are going over a significant
405 * range of EVs. we need figure out some reliable way to validate
406 * buffer data.
407 */
408
409 ASSERT_EQ(OK, consumer->unlockBuffer(imgBuffer));
410 if (i > 0) {
411 nsecs_t timeDelta =
412 captureBurstTimes[i] - captureBurstTimes[i-1];
413 EXPECT_GE(timeDelta, exposures[i]);
414 }
415 }
416 }
417
418 /**
419 * Intentionally shadow default CreateStream function from base class,
420 * because we don't want any test in this class to use the default
421 * stream creation function.
422 */
423 void CreateStream() {
424 }
425};
426
427/**
428 * This test adds multiple stream use case test, basically, test 3
429 * streams:
430 *
431 * 1. Preview stream, with large size that is no bigger than 1080p
432 * we render this stream to display and vary the exposure time for
433 * for certain amount of time for visualization purpose.
434 *
435 * 2. Metering stream, with small size that is no bigger than VGA size.
436 * a burst is issued for different exposure times and analog gains
437 * (or analog gain implemented sensitivities) then check if the capture
438 * result metadata matches the request.
439 *
440 * 3. Capture stream, this is basically similar as meterting stream, but
441 * has large size, which is the largest supported JPEG capture size.
442 *
443 * This multiple stream test is to test if HAL supports:
444 *
445 * 1. Multiple streams like above, HAL should support at least 3 streams
446 * concurrently: one preview stream, 2 other YUV stream.
447 *
448 * 2. Manual control(gain/exposure) of mutiple burst capture.
449 */
450TEST_F(CameraMultiStreamTest, MultiBurst) {
451
452 TEST_EXTENSION_FORKING_INIT;
453
454 camera_metadata_ro_entry availableProcessedSizes =
455 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
456 ASSERT_EQ(0u, availableProcessedSizes.count % 2);
457 ASSERT_GE(availableProcessedSizes.count, 2u);
458 camera_metadata_ro_entry availableProcessedMinFrameDurations =
459 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
460 EXPECT_EQ(availableProcessedSizes.count,
461 availableProcessedMinFrameDurations.count * 2);
462
463 camera_metadata_ro_entry availableJpegSizes =
464 GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
465 ASSERT_EQ(0u, availableJpegSizes.count % 2);
466 ASSERT_GE(availableJpegSizes.count, 2u);
467
468 // Find the right sizes for preview, metering, and capture streams
469 // assumes at least 2 entries in availableProcessedSizes.
470 int64_t minFrameDuration = DEFAULT_FRAME_DURATION;
471 Size processedMinSize, processedMaxSize, jpegMaxSize;
472 const int32_t* data = availableProcessedSizes.data.i32;
473 size_t count = availableProcessedSizes.count;
474
475 int32_t minIdx, maxIdx;
476 GetMinSize(data, count, &processedMinSize, &minIdx);
477 GetMaxSize(data, count, &processedMaxSize, &maxIdx);
478 ALOGV("Found processed max size: %dx%d, min size = %dx%d",
479 processedMaxSize.width, processedMaxSize.height,
480 processedMinSize.width, processedMinSize.height);
481
482 if (availableProcessedSizes.count ==
483 availableProcessedMinFrameDurations.count * 2) {
484 minFrameDuration =
485 availableProcessedMinFrameDurations.data.i64[maxIdx / 2];
486 }
487
488 EXPECT_GT(minFrameDuration, 0);
489
490 if (minFrameDuration <= 0) {
491 minFrameDuration = DEFAULT_FRAME_DURATION;
492 }
493
494 ALOGV("targeted minimal frame duration is: %lldns", minFrameDuration);
495
496 data = &(availableJpegSizes.data.i32[0]);
497 count = availableJpegSizes.count;
498 GetMaxSize(data, count, &jpegMaxSize, &maxIdx);
499 ALOGV("Found Jpeg size max idx = %d", maxIdx);
500
501 // Max Jpeg size should be available in processed sizes. Use it for
502 // YUV capture anyway.
503 EXPECT_EQ(processedMaxSize.width, jpegMaxSize.width);
504 EXPECT_EQ(processedMaxSize.height, jpegMaxSize.height);
505
506 // Cap preview size.
507 Size previewLimit = { PREVIEW_WIDTH_CAP, PREVIEW_HEIGHT_CAP };
508 // FIXME: need make sure the previewLimit is supported by HAL.
509 Size previewSize = CapSize(previewLimit, processedMaxSize);
510 // Cap Metering size.
511 Size meteringLimit = { METERING_WIDTH_CAP, METERING_HEIGHT_CAP };
512 // Cap metering size to VGA (VGA is mandatory by CDD)
513 Size meteringSize = CapSize(meteringLimit, processedMinSize);
514 // Capture stream should be the max size of jpeg sizes.
515 ALOGV("preview size: %dx%d, metering size: %dx%d, capture size: %dx%d",
516 previewSize.width, previewSize.height,
517 meteringSize.width, meteringSize.height,
518 jpegMaxSize.width, jpegMaxSize.height);
519
520 // Create streams
521 // Preview stream: small resolution, render on the screen.
522 sp<CameraStream> previewStream;
523 {
524 sp<ANativeWindow> surface;
525 ASSERT_NO_FATAL_FAILURE(CreateOnScreenSurface(/*out*/surface));
526 previewStream = CreateStream(
527 previewSize.width,
528 previewSize.height,
529 mDevice,
530 DISPLAY_STREAM_PARAMETERS,
531 surface,
532 false);
533 ASSERT_NE((void*)NULL, previewStream.get());
534 ASSERT_NO_FATAL_FAILURE(previewStream->SetUp());
535 }
536 // Metering burst stream: small resolution yuv stream
537 sp<CameraStream> meteringStream =
538 CreateStream(
539 meteringSize.width,
540 meteringSize.height,
541 mDevice);
542 ASSERT_NE((void*)NULL, meteringStream.get());
543 ASSERT_NO_FATAL_FAILURE(meteringStream->SetUp());
544 // Capture burst stream: full resolution yuv stream
545 sp<CameraStream> captureStream =
546 CreateStream(
547 jpegMaxSize.width,
548 jpegMaxSize.height,
549 mDevice);
550 ASSERT_NE((void*)NULL, captureStream.get());
551 ASSERT_NO_FATAL_FAILURE(captureStream->SetUp());
552
553 // Create Preview request.
554 CameraMetadata previewRequest, meteringRequest, captureRequest;
555 ASSERT_NO_FATAL_FAILURE(CreateRequests(previewRequest, meteringRequest,
556 captureRequest, previewStream->GetStreamId(),
557 meteringStream->GetStreamId(), captureStream->GetStreamId()));
558
559 // Start preview
560 if (CAMERA_MULTI_STREAM_DEBUGGING) {
561 previewRequest.dump(STDOUT_FILENO);
562 }
563
564 // Generate exposure and sensitivity lists
565 camera_metadata_ro_entry exposureTimeRange =
566 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
567 ASSERT_EQ(exposureTimeRange.count, 2u);
568 int64_t minExp = exposureTimeRange.data.i64[0];
569 int64_t maxExp = exposureTimeRange.data.i64[1];
570 ASSERT_GT(maxExp, minExp);
571
572 camera_metadata_ro_entry sensivityRange =
573 GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
574 ASSERT_EQ(2u, sensivityRange.count);
575 int32_t minSensitivity = sensivityRange.data.i32[0];
576 int32_t maxSensitivity = sensivityRange.data.i32[1];
577 camera_metadata_ro_entry maxAnalogSenEntry =
578 GetStaticEntry(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY);
579 EXPECT_EQ(1u, maxAnalogSenEntry.count);
580 int32_t maxAnalogSensitivity = maxAnalogSenEntry.data.i32[0];
581 EXPECT_LE(maxAnalogSensitivity, maxSensitivity);
582 // Only test the sensitivity implemented by analog gain.
583 if (maxAnalogSensitivity > maxSensitivity) {
584 // Fallback to maxSensitity
585 maxAnalogSensitivity = maxSensitivity;
586 }
587
588 // sensitivity list, only include the sensitivities that are implemented
589 // purely by analog gain if possible.
590 Vector<int32_t> sensitivities;
591 Vector<int64_t> exposures;
592 count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
593 sensitivities.push_back(minSensitivity);
594 for (size_t i = 1; i < count; i++) {
595 sensitivities.push_back(minSensitivity + i * 100);
596 }
597 sensitivities.push_back(maxAnalogSensitivity);
598 ALOGV("Sensitivity Range: min=%d, max=%d", minSensitivity,
599 maxAnalogSensitivity);
600 int64_t exp = minExp;
601 while (exp < maxExp) {
602 exposures.push_back(exp);
603 exp *= 2;
604 }
605 // Sweep the exposure value for preview, just for visual inspection purpose.
606 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
607 for (size_t i = 0; i < exposures.size(); i++) {
608 ASSERT_EQ(OK, previewRequest.update(
609 ANDROID_CONTROL_MODE,
610 &cmOff, 1));
611 ASSERT_EQ(OK, previewRequest.update(
612 ANDROID_SENSOR_EXPOSURE_TIME,
613 &exposures[i], 1));
614 ALOGV("Submitting preview request %d with exposure %lld",
615 i, exposures[i]);
616
617 ASSERT_EQ(OK, mDevice->setStreamingRequest(previewRequest));
618
619 // Let preview run 200ms on screen for each exposure time.
620 usleep(PREVIEW_RENDERING_TIME_INTERVAL);
621 }
622
623 size_t requestCount = sensitivities.size();
624 if (requestCount > exposures.size()) {
625 requestCount = exposures.size();
626 }
627
628 /**
629 * Submit metering request, set default frame duration to minimal possible
630 * value, we want the capture to run as fast as possible. HAL should adjust
631 * the frame duration to minimal necessary value to support the requested
632 * exposure value if exposure is larger than frame duration.
633 */
634 CaptureBurst(meteringRequest, requestCount, exposures, sensitivities,
635 meteringStream, minFrameDuration);
636
637 /**
638 * Submit capture request, set default frame duration to minimal possible
639 * value, we want the capture to run as fast as possible. HAL should adjust
640 * the frame duration to minimal necessary value to support the requested
641 * exposure value if exposure is larger than frame duration.
642 */
643 CaptureBurst(captureRequest, requestCount, exposures, sensitivities,
644 captureStream, minFrameDuration);
645
646 ASSERT_EQ(OK, mDevice->clearStreamingRequest());
647}
648
649}
650}
651}