blob: c7db6799fee85bf2091ba83a46def2faf47b213d [file] [log] [blame]
Zhijun He8ef01442013-08-13 17:36:17 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "CameraMultiStreamTest"
18//#define LOG_NDEBUG 0
19#include "CameraStreamFixture.h"
20#include "TestExtensions.h"
21
22#include <gtest/gtest.h>
23#include <utils/Log.h>
24#include <utils/StrongPointer.h>
25#include <common/CameraDeviceBase.h>
26#include <hardware/hardware.h>
27#include <hardware/camera2.h>
28#include <gui/SurfaceComposerClient.h>
29#include <gui/Surface.h>
30
31#define DEFAULT_FRAME_DURATION 33000000LL // 33ms
32#define CAMERA_HEAP_COUNT 1
33#define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
34#define CAMERA_DISPLAY_FORMAT HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
35#define CAMERA_MULTI_STREAM_DEBUGGING 0
36#define CAMERA_FRAME_TIMEOUT 1000000000LL // nsecs (1 secs)
37#define PREVIEW_RENDERING_TIME_INTERVAL 200000 // in unit of us, 200ms
Gaurav Batraa1bef2b2014-03-28 17:03:36 -070038// 1% tolerance margin for exposure sanity check against metadata
39#define TOLERANCE_MARGIN_METADATA 0.01
40// 5% tolerance margin for exposure sanity check against capture times
41#define TOLERANCE_MARGIN_CAPTURE 0.05
Zhijun He8ef01442013-08-13 17:36:17 -070042/* constants for display */
43#define DISPLAY_BUFFER_HEIGHT 1024
44#define DISPLAY_BUFFER_WIDTH 1024
45#define DISPLAY_BUFFER_FORMAT PIXEL_FORMAT_RGB_888
46
47// This test intends to test large preview size but less than 1080p.
48#define PREVIEW_WIDTH_CAP 1920
49#define PREVIEW_HEIGHT_CAP 1080
50// This test intends to test small metering burst size that is less than 640x480
51#define METERING_WIDTH_CAP 640
52#define METERING_HEIGHT_CAP 480
53
54#define EXP_WAIT_MULTIPLIER 2
55
56namespace android {
57namespace camera2 {
58namespace tests {
59
60static const CameraStreamParams DEFAULT_STREAM_PARAMETERS = {
61 /*mFormat*/ CAMERA_EXPOSURE_FORMAT,
62 /*mHeapCount*/ CAMERA_HEAP_COUNT
63};
64
65static const CameraStreamParams DISPLAY_STREAM_PARAMETERS = {
66 /*mFormat*/ CAMERA_DISPLAY_FORMAT,
67 /*mHeapCount*/ CAMERA_HEAP_COUNT
68};
69
70class CameraMultiStreamTest
71 : public ::testing::Test,
72 public CameraStreamFixture {
73
74public:
75 CameraMultiStreamTest() : CameraStreamFixture(DEFAULT_STREAM_PARAMETERS) {
76 TEST_EXTENSION_FORKING_CONSTRUCTOR;
77
78 if (HasFatalFailure()) {
79 return;
80 }
81 /**
82 * Don't create default stream, each test is in charge of creating
83 * its own streams.
84 */
85 }
86
87 ~CameraMultiStreamTest() {
88 TEST_EXTENSION_FORKING_DESTRUCTOR;
89 }
90
91 sp<SurfaceComposerClient> mComposerClient;
92 sp<SurfaceControl> mSurfaceControl;
93
94 void CreateOnScreenSurface(sp<ANativeWindow>& surface) {
95 mComposerClient = new SurfaceComposerClient;
96 ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
97
98 mSurfaceControl = mComposerClient->createSurface(
99 String8("CameraMultiStreamTest StreamingImage Surface"),
100 DISPLAY_BUFFER_HEIGHT, DISPLAY_BUFFER_WIDTH,
101 DISPLAY_BUFFER_FORMAT, 0);
102
103 ASSERT_NE((void*)NULL, mSurfaceControl.get());
104 ASSERT_TRUE(mSurfaceControl->isValid());
105
106 SurfaceComposerClient::openGlobalTransaction();
107 ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
108 ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
109 SurfaceComposerClient::closeGlobalTransaction();
110
111 surface = mSurfaceControl->getSurface();
112
113 ASSERT_NE((void*)NULL, surface.get());
114 }
115
116 struct Size {
117 int32_t width;
118 int32_t height;
119 };
120
121 // Select minimal size by number of pixels.
122 void GetMinSize(const int32_t* data, size_t count,
123 Size* min, int32_t* idx) {
124 ASSERT_NE((int32_t*)NULL, data);
125 int32_t minIdx = 0;
126 int32_t minSize = INT_MAX, tempSize;
127 for (size_t i = 0; i < count; i+=2) {
128 tempSize = data[i] * data[i+1];
129 if (minSize > tempSize) {
130 minSize = tempSize;
131 minIdx = i;
132 }
133 }
134 min->width = data[minIdx];
135 min->height = data[minIdx + 1];
136 *idx = minIdx;
137 }
138
139 // Select maximal size by number of pixels.
140 void GetMaxSize(const int32_t* data, size_t count,
141 Size* max, int32_t* idx) {
142 ASSERT_NE((int32_t*)NULL, data);
143 int32_t maxIdx = 0;
144 int32_t maxSize = INT_MIN, tempSize;
145 for (size_t i = 0; i < count; i+=2) {
146 tempSize = data[i] * data[i+1];
147 if (maxSize < tempSize) {
148 maxSize = tempSize;
149 maxIdx = i;
150 }
151 }
152 max->width = data[maxIdx];
153 max->height = data[maxIdx + 1];
154 *idx = maxIdx;
155 }
156
157 // Cap size by number of pixels.
158 Size CapSize(Size cap, Size input) {
159 if (input.width * input.height > cap.width * cap.height) {
160 return cap;
161 }
162 return input;
163 }
164
165 struct CameraStream : public RefBase {
166
167 public:
168 /**
169 * Only initialize the variables here, do the ASSERT check in
170 * SetUp function. To make this stream useful, the SetUp must
171 * be called before using it.
172 */
173 CameraStream(
174 int width,
175 int height,
176 const sp<CameraDeviceBase>& device,
177 CameraStreamParams param, sp<ANativeWindow> surface,
178 bool useCpuConsumer)
179 : mDevice(device),
180 mWidth(width),
181 mHeight(height) {
182 mFormat = param.mFormat;
183 if (useCpuConsumer) {
Dan Stoza5dce9e42014-04-07 13:39:37 -0700184 sp<IGraphicBufferProducer> producer;
185 sp<IGraphicBufferConsumer> consumer;
186 BufferQueue::createBufferQueue(&producer, &consumer);
187 mCpuConsumer = new CpuConsumer(consumer, param.mHeapCount);
Zhijun He8ef01442013-08-13 17:36:17 -0700188 mCpuConsumer->setName(String8(
189 "CameraMultiStreamTest::mCpuConsumer"));
Dan Stoza5dce9e42014-04-07 13:39:37 -0700190 mNativeWindow = new Surface(producer);
Zhijun He8ef01442013-08-13 17:36:17 -0700191 } else {
192 // Render the stream to screen.
193 mCpuConsumer = NULL;
194 mNativeWindow = surface;
195 }
196
197 mFrameListener = new FrameListener();
198 if (mCpuConsumer != 0) {
199 mCpuConsumer->setFrameAvailableListener(mFrameListener);
200 }
201 }
202
203 /**
204 * Finally create camera stream, and do the ASSERT check, since we
205 * can not do it in ctor.
206 */
207 void SetUp() {
208 ASSERT_EQ(OK,
209 mDevice->createStream(mNativeWindow,
210 mWidth, mHeight, mFormat, /*size (for jpegs)*/0,
211 &mStreamId));
212
213 ASSERT_NE(-1, mStreamId);
214 }
215
216 int GetStreamId() { return mStreamId; }
217 sp<CpuConsumer> GetConsumer() { return mCpuConsumer; }
218 sp<FrameListener> GetFrameListener() { return mFrameListener; }
219
220 protected:
221 ~CameraStream() {
222 if (mDevice.get()) {
223 mDevice->waitUntilDrained();
224 mDevice->deleteStream(mStreamId);
225 }
226 // Clear producer before consumer.
227 mNativeWindow.clear();
228 mCpuConsumer.clear();
229 }
230
231 private:
232 sp<FrameListener> mFrameListener;
233 sp<CpuConsumer> mCpuConsumer;
234 sp<ANativeWindow> mNativeWindow;
235 sp<CameraDeviceBase> mDevice;
236 int mStreamId;
237 int mWidth;
238 int mHeight;
239 int mFormat;
240 };
241
242 int64_t GetExposureValue(const CameraMetadata& metaData) {
243 camera_metadata_ro_entry_t entry =
244 metaData.find(ANDROID_SENSOR_EXPOSURE_TIME);
245 EXPECT_EQ(1u, entry.count);
246 if (entry.count == 1) {
247 return entry.data.i64[0];
248 }
249 return -1;
250 }
251
252 int32_t GetSensitivity(const CameraMetadata& metaData) {
253 camera_metadata_ro_entry_t entry =
254 metaData.find(ANDROID_SENSOR_SENSITIVITY);
255 EXPECT_EQ(1u, entry.count);
256 if (entry.count == 1) {
257 return entry.data.i32[0];
258 }
259 return -1;
260 }
261
262 int64_t GetFrameDuration(const CameraMetadata& metaData) {
263 camera_metadata_ro_entry_t entry =
264 metaData.find(ANDROID_SENSOR_FRAME_DURATION);
265 EXPECT_EQ(1u, entry.count);
266 if (entry.count == 1) {
267 return entry.data.i64[0];
268 }
269 return -1;
270 }
271
272 void CreateRequests(CameraMetadata& previewRequest,
273 CameraMetadata& meteringRequest,
274 CameraMetadata& captureRequest,
275 int previewStreamId,
276 int meteringStreamId,
277 int captureStreamId) {
Zhijun He914226c2013-09-08 10:56:24 -0700278 int32_t requestId = 0;
279 Vector<int32_t> previewStreamIds;
280 previewStreamIds.push(previewStreamId);
281 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
282 &previewRequest));
283 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
284 previewStreamIds));
285 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
286 &requestId, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700287
Zhijun He914226c2013-09-08 10:56:24 -0700288 // Create metering request, manual settings
289 // Manual control: Disable 3A, noise reduction, edge sharping
290 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
291 uint8_t nrOff = static_cast<uint8_t>(ANDROID_NOISE_REDUCTION_MODE_OFF);
292 uint8_t sharpOff = static_cast<uint8_t>(ANDROID_EDGE_MODE_OFF);
293 Vector<int32_t> meteringStreamIds;
294 meteringStreamIds.push(meteringStreamId);
295 ASSERT_EQ(OK, mDevice->createDefaultRequest(
296 CAMERA2_TEMPLATE_PREVIEW,
297 &meteringRequest));
298 ASSERT_EQ(OK, meteringRequest.update(
299 ANDROID_REQUEST_OUTPUT_STREAMS,
300 meteringStreamIds));
301 ASSERT_EQ(OK, meteringRequest.update(
302 ANDROID_CONTROL_MODE,
303 &cmOff, 1));
304 ASSERT_EQ(OK, meteringRequest.update(
305 ANDROID_NOISE_REDUCTION_MODE,
306 &nrOff, 1));
307 ASSERT_EQ(OK, meteringRequest.update(
308 ANDROID_EDGE_MODE,
309 &sharpOff, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700310
Zhijun He914226c2013-09-08 10:56:24 -0700311 // Create capture request, manual settings
312 Vector<int32_t> captureStreamIds;
313 captureStreamIds.push(captureStreamId);
314 ASSERT_EQ(OK, mDevice->createDefaultRequest(
315 CAMERA2_TEMPLATE_PREVIEW,
316 &captureRequest));
317 ASSERT_EQ(OK, captureRequest.update(
318 ANDROID_REQUEST_OUTPUT_STREAMS,
319 captureStreamIds));
320 ASSERT_EQ(OK, captureRequest.update(
321 ANDROID_CONTROL_MODE,
322 &cmOff, 1));
323 ASSERT_EQ(OK, captureRequest.update(
324 ANDROID_NOISE_REDUCTION_MODE,
325 &nrOff, 1));
326 ASSERT_EQ(OK, captureRequest.update(
327 ANDROID_EDGE_MODE,
328 &sharpOff, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700329 }
330
331 sp<CameraStream> CreateStream(
332 int width,
333 int height,
334 const sp<CameraDeviceBase>& device,
335 CameraStreamParams param = DEFAULT_STREAM_PARAMETERS,
336 sp<ANativeWindow> surface = NULL,
337 bool useCpuConsumer = true) {
338 param.mFormat = MapAutoFormat(param.mFormat);
339 return new CameraStream(width, height, device,
340 param, surface, useCpuConsumer);
341 }
342
343 void CaptureBurst(CameraMetadata& request, size_t requestCount,
344 const Vector<int64_t>& exposures,
345 const Vector<int32_t>& sensitivities,
346 const sp<CameraStream>& stream,
Zhijun He914226c2013-09-08 10:56:24 -0700347 int64_t minFrameDuration,
348 int32_t* requestIdStart) {
Zhijun He8ef01442013-08-13 17:36:17 -0700349 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_FRAME_DURATION,
350 &minFrameDuration, 1));
351 // Submit a series of requests with the specified exposure/gain values.
Zhijun He914226c2013-09-08 10:56:24 -0700352 int32_t targetRequestId = *requestIdStart;
Zhijun He8ef01442013-08-13 17:36:17 -0700353 for (size_t i = 0; i < requestCount; i++) {
Zhijun He914226c2013-09-08 10:56:24 -0700354 ASSERT_EQ(OK, request.update(ANDROID_REQUEST_ID, requestIdStart, 1));
355 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposures[i], 1));
356 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_SENSITIVITY, &sensitivities[i], 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700357 ASSERT_EQ(OK, mDevice->capture(request));
Zhijun He914226c2013-09-08 10:56:24 -0700358 ALOGV("Submitting request with: id %d with exposure %lld, sensitivity %d",
359 *requestIdStart, exposures[i], sensitivities[i]);
Zhijun He8ef01442013-08-13 17:36:17 -0700360 if (CAMERA_MULTI_STREAM_DEBUGGING) {
361 request.dump(STDOUT_FILENO);
362 }
Zhijun He914226c2013-09-08 10:56:24 -0700363 (*requestIdStart)++;
Zhijun He8ef01442013-08-13 17:36:17 -0700364 }
365 // Get capture burst results.
366 Vector<nsecs_t> captureBurstTimes;
367 sp<CpuConsumer> consumer = stream->GetConsumer();
368 sp<FrameListener> listener = stream->GetFrameListener();
369
370 // Set wait limit based on expected frame duration.
371 int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
372 for (size_t i = 0; i < requestCount; i++) {
373 ALOGV("Reading request result %d", i);
374
375 /**
376 * Raise the timeout to be at least twice as long as the exposure
377 * time. to avoid a false positive when the timeout is too short.
378 */
379 if ((exposures[i] * EXP_WAIT_MULTIPLIER) > waitLimit) {
380 waitLimit = exposures[i] * EXP_WAIT_MULTIPLIER;
381 }
382
Jianing Weif816eea2014-04-10 14:17:57 -0700383 CaptureResult result;
Zhijun He8ef01442013-08-13 17:36:17 -0700384 CameraMetadata frameMetadata;
Zhijun He914226c2013-09-08 10:56:24 -0700385 int32_t resultRequestId;
386 do {
387 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
Jianing Weif816eea2014-04-10 14:17:57 -0700388 ASSERT_EQ(OK, mDevice->getNextResult(&result));
389 frameMetadata = result.mMetadata;
Zhijun He914226c2013-09-08 10:56:24 -0700390
391 camera_metadata_entry_t resultEntry = frameMetadata.find(ANDROID_REQUEST_ID);
392 ASSERT_EQ(1u, resultEntry.count);
393 resultRequestId = resultEntry.data.i32[0];
394 if (CAMERA_MULTI_STREAM_DEBUGGING) {
395 std::cout << "capture result req id: " << resultRequestId << std::endl;
396 }
397 } while (resultRequestId != targetRequestId);
398 targetRequestId++;
Zhijun He8ef01442013-08-13 17:36:17 -0700399 ALOGV("Got capture burst result for request %d", i);
Zhijun He914226c2013-09-08 10:56:24 -0700400
Zhijun He8ef01442013-08-13 17:36:17 -0700401 // Validate capture result
402 if (CAMERA_MULTI_STREAM_DEBUGGING) {
403 frameMetadata.dump(STDOUT_FILENO);
404 }
405
406 // TODO: Need revisit it to figure out an accurate margin.
Zhijun He914226c2013-09-08 10:56:24 -0700407 int64_t resultExposure = GetExposureValue(frameMetadata);
408 int32_t resultSensitivity = GetSensitivity(frameMetadata);
Gaurav Batraa1bef2b2014-03-28 17:03:36 -0700409 EXPECT_LE(sensitivities[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultSensitivity);
410 EXPECT_GE(sensitivities[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultSensitivity);
411 EXPECT_LE(exposures[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultExposure);
412 EXPECT_GE(exposures[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultExposure);
Zhijun He8ef01442013-08-13 17:36:17 -0700413
414 ASSERT_EQ(OK, listener->waitForFrame(waitLimit));
415 captureBurstTimes.push_back(systemTime());
416 CpuConsumer::LockedBuffer imgBuffer;
417 ASSERT_EQ(OK, consumer->lockNextBuffer(&imgBuffer));
418 ALOGV("Got capture buffer for request %d", i);
419
420 /**
421 * TODO: Validate capture buffer. Current brightness calculation
422 * is too slow, it also doesn't account for saturation effects,
423 * which is quite common since we are going over a significant
424 * range of EVs. we need figure out some reliable way to validate
425 * buffer data.
426 */
427
428 ASSERT_EQ(OK, consumer->unlockBuffer(imgBuffer));
429 if (i > 0) {
430 nsecs_t timeDelta =
431 captureBurstTimes[i] - captureBurstTimes[i-1];
Gaurav Batraa1bef2b2014-03-28 17:03:36 -0700432 EXPECT_GE(timeDelta * ( 1 + TOLERANCE_MARGIN_CAPTURE), exposures[i]);
Zhijun He8ef01442013-08-13 17:36:17 -0700433 }
434 }
435 }
436
437 /**
438 * Intentionally shadow default CreateStream function from base class,
439 * because we don't want any test in this class to use the default
440 * stream creation function.
441 */
442 void CreateStream() {
443 }
444};
445
446/**
447 * This test adds multiple stream use case test, basically, test 3
448 * streams:
449 *
450 * 1. Preview stream, with large size that is no bigger than 1080p
451 * we render this stream to display and vary the exposure time for
452 * for certain amount of time for visualization purpose.
453 *
454 * 2. Metering stream, with small size that is no bigger than VGA size.
455 * a burst is issued for different exposure times and analog gains
456 * (or analog gain implemented sensitivities) then check if the capture
457 * result metadata matches the request.
458 *
459 * 3. Capture stream, this is basically similar as meterting stream, but
460 * has large size, which is the largest supported JPEG capture size.
461 *
462 * This multiple stream test is to test if HAL supports:
463 *
464 * 1. Multiple streams like above, HAL should support at least 3 streams
465 * concurrently: one preview stream, 2 other YUV stream.
466 *
467 * 2. Manual control(gain/exposure) of mutiple burst capture.
468 */
469TEST_F(CameraMultiStreamTest, MultiBurst) {
470
471 TEST_EXTENSION_FORKING_INIT;
472
473 camera_metadata_ro_entry availableProcessedSizes =
474 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
475 ASSERT_EQ(0u, availableProcessedSizes.count % 2);
476 ASSERT_GE(availableProcessedSizes.count, 2u);
477 camera_metadata_ro_entry availableProcessedMinFrameDurations =
478 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
479 EXPECT_EQ(availableProcessedSizes.count,
480 availableProcessedMinFrameDurations.count * 2);
481
482 camera_metadata_ro_entry availableJpegSizes =
483 GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
484 ASSERT_EQ(0u, availableJpegSizes.count % 2);
485 ASSERT_GE(availableJpegSizes.count, 2u);
486
Zhijun He3bf3b452013-09-18 23:42:12 -0700487 camera_metadata_ro_entry hardwareLevel =
488 GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
489 ASSERT_EQ(1u, hardwareLevel.count);
490 uint8_t level = hardwareLevel.data.u8[0];
491 ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
492 ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
493 if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
494 const ::testing::TestInfo* const test_info =
495 ::testing::UnitTest::GetInstance()->current_test_info();
496 std::cerr << "Skipping test "
497 << test_info->test_case_name() << "."
498 << test_info->name()
499 << " because HAL hardware supported level is limited "
500 << std::endl;
501 return;
502 }
503
Zhijun He8ef01442013-08-13 17:36:17 -0700504 // Find the right sizes for preview, metering, and capture streams
505 // assumes at least 2 entries in availableProcessedSizes.
506 int64_t minFrameDuration = DEFAULT_FRAME_DURATION;
507 Size processedMinSize, processedMaxSize, jpegMaxSize;
508 const int32_t* data = availableProcessedSizes.data.i32;
509 size_t count = availableProcessedSizes.count;
510
511 int32_t minIdx, maxIdx;
512 GetMinSize(data, count, &processedMinSize, &minIdx);
513 GetMaxSize(data, count, &processedMaxSize, &maxIdx);
514 ALOGV("Found processed max size: %dx%d, min size = %dx%d",
515 processedMaxSize.width, processedMaxSize.height,
516 processedMinSize.width, processedMinSize.height);
517
518 if (availableProcessedSizes.count ==
519 availableProcessedMinFrameDurations.count * 2) {
520 minFrameDuration =
521 availableProcessedMinFrameDurations.data.i64[maxIdx / 2];
522 }
523
524 EXPECT_GT(minFrameDuration, 0);
525
526 if (minFrameDuration <= 0) {
527 minFrameDuration = DEFAULT_FRAME_DURATION;
528 }
529
530 ALOGV("targeted minimal frame duration is: %lldns", minFrameDuration);
531
532 data = &(availableJpegSizes.data.i32[0]);
533 count = availableJpegSizes.count;
534 GetMaxSize(data, count, &jpegMaxSize, &maxIdx);
535 ALOGV("Found Jpeg size max idx = %d", maxIdx);
536
537 // Max Jpeg size should be available in processed sizes. Use it for
538 // YUV capture anyway.
539 EXPECT_EQ(processedMaxSize.width, jpegMaxSize.width);
540 EXPECT_EQ(processedMaxSize.height, jpegMaxSize.height);
541
542 // Cap preview size.
543 Size previewLimit = { PREVIEW_WIDTH_CAP, PREVIEW_HEIGHT_CAP };
544 // FIXME: need make sure the previewLimit is supported by HAL.
545 Size previewSize = CapSize(previewLimit, processedMaxSize);
546 // Cap Metering size.
547 Size meteringLimit = { METERING_WIDTH_CAP, METERING_HEIGHT_CAP };
548 // Cap metering size to VGA (VGA is mandatory by CDD)
549 Size meteringSize = CapSize(meteringLimit, processedMinSize);
550 // Capture stream should be the max size of jpeg sizes.
551 ALOGV("preview size: %dx%d, metering size: %dx%d, capture size: %dx%d",
552 previewSize.width, previewSize.height,
553 meteringSize.width, meteringSize.height,
554 jpegMaxSize.width, jpegMaxSize.height);
555
556 // Create streams
557 // Preview stream: small resolution, render on the screen.
558 sp<CameraStream> previewStream;
559 {
560 sp<ANativeWindow> surface;
561 ASSERT_NO_FATAL_FAILURE(CreateOnScreenSurface(/*out*/surface));
562 previewStream = CreateStream(
563 previewSize.width,
564 previewSize.height,
565 mDevice,
566 DISPLAY_STREAM_PARAMETERS,
567 surface,
568 false);
569 ASSERT_NE((void*)NULL, previewStream.get());
570 ASSERT_NO_FATAL_FAILURE(previewStream->SetUp());
571 }
572 // Metering burst stream: small resolution yuv stream
573 sp<CameraStream> meteringStream =
574 CreateStream(
575 meteringSize.width,
576 meteringSize.height,
577 mDevice);
578 ASSERT_NE((void*)NULL, meteringStream.get());
579 ASSERT_NO_FATAL_FAILURE(meteringStream->SetUp());
580 // Capture burst stream: full resolution yuv stream
581 sp<CameraStream> captureStream =
582 CreateStream(
583 jpegMaxSize.width,
584 jpegMaxSize.height,
585 mDevice);
586 ASSERT_NE((void*)NULL, captureStream.get());
587 ASSERT_NO_FATAL_FAILURE(captureStream->SetUp());
588
589 // Create Preview request.
590 CameraMetadata previewRequest, meteringRequest, captureRequest;
591 ASSERT_NO_FATAL_FAILURE(CreateRequests(previewRequest, meteringRequest,
592 captureRequest, previewStream->GetStreamId(),
593 meteringStream->GetStreamId(), captureStream->GetStreamId()));
594
595 // Start preview
596 if (CAMERA_MULTI_STREAM_DEBUGGING) {
597 previewRequest.dump(STDOUT_FILENO);
598 }
599
600 // Generate exposure and sensitivity lists
601 camera_metadata_ro_entry exposureTimeRange =
602 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
603 ASSERT_EQ(exposureTimeRange.count, 2u);
604 int64_t minExp = exposureTimeRange.data.i64[0];
605 int64_t maxExp = exposureTimeRange.data.i64[1];
606 ASSERT_GT(maxExp, minExp);
607
608 camera_metadata_ro_entry sensivityRange =
609 GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
610 ASSERT_EQ(2u, sensivityRange.count);
611 int32_t minSensitivity = sensivityRange.data.i32[0];
612 int32_t maxSensitivity = sensivityRange.data.i32[1];
613 camera_metadata_ro_entry maxAnalogSenEntry =
614 GetStaticEntry(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY);
615 EXPECT_EQ(1u, maxAnalogSenEntry.count);
616 int32_t maxAnalogSensitivity = maxAnalogSenEntry.data.i32[0];
617 EXPECT_LE(maxAnalogSensitivity, maxSensitivity);
618 // Only test the sensitivity implemented by analog gain.
619 if (maxAnalogSensitivity > maxSensitivity) {
620 // Fallback to maxSensitity
621 maxAnalogSensitivity = maxSensitivity;
622 }
623
624 // sensitivity list, only include the sensitivities that are implemented
625 // purely by analog gain if possible.
626 Vector<int32_t> sensitivities;
627 Vector<int64_t> exposures;
628 count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
629 sensitivities.push_back(minSensitivity);
630 for (size_t i = 1; i < count; i++) {
631 sensitivities.push_back(minSensitivity + i * 100);
632 }
633 sensitivities.push_back(maxAnalogSensitivity);
634 ALOGV("Sensitivity Range: min=%d, max=%d", minSensitivity,
635 maxAnalogSensitivity);
636 int64_t exp = minExp;
637 while (exp < maxExp) {
638 exposures.push_back(exp);
639 exp *= 2;
640 }
641 // Sweep the exposure value for preview, just for visual inspection purpose.
642 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
643 for (size_t i = 0; i < exposures.size(); i++) {
644 ASSERT_EQ(OK, previewRequest.update(
645 ANDROID_CONTROL_MODE,
646 &cmOff, 1));
647 ASSERT_EQ(OK, previewRequest.update(
648 ANDROID_SENSOR_EXPOSURE_TIME,
649 &exposures[i], 1));
650 ALOGV("Submitting preview request %d with exposure %lld",
651 i, exposures[i]);
652
653 ASSERT_EQ(OK, mDevice->setStreamingRequest(previewRequest));
654
655 // Let preview run 200ms on screen for each exposure time.
656 usleep(PREVIEW_RENDERING_TIME_INTERVAL);
657 }
658
659 size_t requestCount = sensitivities.size();
660 if (requestCount > exposures.size()) {
661 requestCount = exposures.size();
662 }
663
Zhijun He914226c2013-09-08 10:56:24 -0700664 // To maintain the request id uniqueness (preview request id is 0), make burst capture start
665 // request id 1 here.
666 int32_t requestIdStart = 1;
Zhijun He8ef01442013-08-13 17:36:17 -0700667 /**
668 * Submit metering request, set default frame duration to minimal possible
669 * value, we want the capture to run as fast as possible. HAL should adjust
670 * the frame duration to minimal necessary value to support the requested
671 * exposure value if exposure is larger than frame duration.
672 */
673 CaptureBurst(meteringRequest, requestCount, exposures, sensitivities,
Zhijun He914226c2013-09-08 10:56:24 -0700674 meteringStream, minFrameDuration, &requestIdStart);
Zhijun He8ef01442013-08-13 17:36:17 -0700675
676 /**
677 * Submit capture request, set default frame duration to minimal possible
678 * value, we want the capture to run as fast as possible. HAL should adjust
679 * the frame duration to minimal necessary value to support the requested
680 * exposure value if exposure is larger than frame duration.
681 */
682 CaptureBurst(captureRequest, requestCount, exposures, sensitivities,
Zhijun He914226c2013-09-08 10:56:24 -0700683 captureStream, minFrameDuration, &requestIdStart);
Zhijun He8ef01442013-08-13 17:36:17 -0700684
685 ASSERT_EQ(OK, mDevice->clearStreamingRequest());
686}
687
688}
689}
690}