blob: a78950c098cf490a63bc1d2a37092c4581ba39c2 [file] [log] [blame]
Zhijun He8ef01442013-08-13 17:36:17 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "CameraMultiStreamTest"
18//#define LOG_NDEBUG 0
19#include "CameraStreamFixture.h"
20#include "TestExtensions.h"
21
22#include <gtest/gtest.h>
23#include <utils/Log.h>
24#include <utils/StrongPointer.h>
25#include <common/CameraDeviceBase.h>
26#include <hardware/hardware.h>
27#include <hardware/camera2.h>
28#include <gui/SurfaceComposerClient.h>
29#include <gui/Surface.h>
30
31#define DEFAULT_FRAME_DURATION 33000000LL // 33ms
32#define CAMERA_HEAP_COUNT 1
33#define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
34#define CAMERA_DISPLAY_FORMAT HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
35#define CAMERA_MULTI_STREAM_DEBUGGING 0
36#define CAMERA_FRAME_TIMEOUT 1000000000LL // nsecs (1 secs)
37#define PREVIEW_RENDERING_TIME_INTERVAL 200000 // in unit of us, 200ms
Gaurav Batraa1bef2b2014-03-28 17:03:36 -070038// 1% tolerance margin for exposure sanity check against metadata
39#define TOLERANCE_MARGIN_METADATA 0.01
40// 5% tolerance margin for exposure sanity check against capture times
41#define TOLERANCE_MARGIN_CAPTURE 0.05
Zhijun He8ef01442013-08-13 17:36:17 -070042/* constants for display */
43#define DISPLAY_BUFFER_HEIGHT 1024
44#define DISPLAY_BUFFER_WIDTH 1024
45#define DISPLAY_BUFFER_FORMAT PIXEL_FORMAT_RGB_888
46
47// This test intends to test large preview size but less than 1080p.
48#define PREVIEW_WIDTH_CAP 1920
49#define PREVIEW_HEIGHT_CAP 1080
50// This test intends to test small metering burst size that is less than 640x480
51#define METERING_WIDTH_CAP 640
52#define METERING_HEIGHT_CAP 480
53
54#define EXP_WAIT_MULTIPLIER 2
55
56namespace android {
57namespace camera2 {
58namespace tests {
59
60static const CameraStreamParams DEFAULT_STREAM_PARAMETERS = {
61 /*mFormat*/ CAMERA_EXPOSURE_FORMAT,
62 /*mHeapCount*/ CAMERA_HEAP_COUNT
63};
64
65static const CameraStreamParams DISPLAY_STREAM_PARAMETERS = {
66 /*mFormat*/ CAMERA_DISPLAY_FORMAT,
67 /*mHeapCount*/ CAMERA_HEAP_COUNT
68};
69
70class CameraMultiStreamTest
71 : public ::testing::Test,
72 public CameraStreamFixture {
73
74public:
75 CameraMultiStreamTest() : CameraStreamFixture(DEFAULT_STREAM_PARAMETERS) {
76 TEST_EXTENSION_FORKING_CONSTRUCTOR;
77
78 if (HasFatalFailure()) {
79 return;
80 }
81 /**
82 * Don't create default stream, each test is in charge of creating
83 * its own streams.
84 */
85 }
86
87 ~CameraMultiStreamTest() {
88 TEST_EXTENSION_FORKING_DESTRUCTOR;
89 }
90
91 sp<SurfaceComposerClient> mComposerClient;
92 sp<SurfaceControl> mSurfaceControl;
93
94 void CreateOnScreenSurface(sp<ANativeWindow>& surface) {
95 mComposerClient = new SurfaceComposerClient;
96 ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
97
98 mSurfaceControl = mComposerClient->createSurface(
99 String8("CameraMultiStreamTest StreamingImage Surface"),
100 DISPLAY_BUFFER_HEIGHT, DISPLAY_BUFFER_WIDTH,
101 DISPLAY_BUFFER_FORMAT, 0);
102
103 ASSERT_NE((void*)NULL, mSurfaceControl.get());
104 ASSERT_TRUE(mSurfaceControl->isValid());
105
106 SurfaceComposerClient::openGlobalTransaction();
107 ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
108 ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
109 SurfaceComposerClient::closeGlobalTransaction();
110
111 surface = mSurfaceControl->getSurface();
112
113 ASSERT_NE((void*)NULL, surface.get());
114 }
115
116 struct Size {
117 int32_t width;
118 int32_t height;
119 };
120
121 // Select minimal size by number of pixels.
122 void GetMinSize(const int32_t* data, size_t count,
123 Size* min, int32_t* idx) {
124 ASSERT_NE((int32_t*)NULL, data);
125 int32_t minIdx = 0;
126 int32_t minSize = INT_MAX, tempSize;
127 for (size_t i = 0; i < count; i+=2) {
128 tempSize = data[i] * data[i+1];
129 if (minSize > tempSize) {
130 minSize = tempSize;
131 minIdx = i;
132 }
133 }
134 min->width = data[minIdx];
135 min->height = data[minIdx + 1];
136 *idx = minIdx;
137 }
138
139 // Select maximal size by number of pixels.
140 void GetMaxSize(const int32_t* data, size_t count,
141 Size* max, int32_t* idx) {
142 ASSERT_NE((int32_t*)NULL, data);
143 int32_t maxIdx = 0;
144 int32_t maxSize = INT_MIN, tempSize;
145 for (size_t i = 0; i < count; i+=2) {
146 tempSize = data[i] * data[i+1];
147 if (maxSize < tempSize) {
148 maxSize = tempSize;
149 maxIdx = i;
150 }
151 }
152 max->width = data[maxIdx];
153 max->height = data[maxIdx + 1];
154 *idx = maxIdx;
155 }
156
157 // Cap size by number of pixels.
158 Size CapSize(Size cap, Size input) {
159 if (input.width * input.height > cap.width * cap.height) {
160 return cap;
161 }
162 return input;
163 }
164
165 struct CameraStream : public RefBase {
166
167 public:
168 /**
169 * Only initialize the variables here, do the ASSERT check in
170 * SetUp function. To make this stream useful, the SetUp must
171 * be called before using it.
172 */
173 CameraStream(
174 int width,
175 int height,
176 const sp<CameraDeviceBase>& device,
177 CameraStreamParams param, sp<ANativeWindow> surface,
178 bool useCpuConsumer)
179 : mDevice(device),
180 mWidth(width),
181 mHeight(height) {
182 mFormat = param.mFormat;
183 if (useCpuConsumer) {
184 sp<BufferQueue> bq = new BufferQueue();
185 mCpuConsumer = new CpuConsumer(bq, param.mHeapCount);
186 mCpuConsumer->setName(String8(
187 "CameraMultiStreamTest::mCpuConsumer"));
188 mNativeWindow = new Surface(bq);
189 } else {
190 // Render the stream to screen.
191 mCpuConsumer = NULL;
192 mNativeWindow = surface;
193 }
194
195 mFrameListener = new FrameListener();
196 if (mCpuConsumer != 0) {
197 mCpuConsumer->setFrameAvailableListener(mFrameListener);
198 }
199 }
200
201 /**
202 * Finally create camera stream, and do the ASSERT check, since we
203 * can not do it in ctor.
204 */
205 void SetUp() {
206 ASSERT_EQ(OK,
207 mDevice->createStream(mNativeWindow,
208 mWidth, mHeight, mFormat, /*size (for jpegs)*/0,
209 &mStreamId));
210
211 ASSERT_NE(-1, mStreamId);
212 }
213
214 int GetStreamId() { return mStreamId; }
215 sp<CpuConsumer> GetConsumer() { return mCpuConsumer; }
216 sp<FrameListener> GetFrameListener() { return mFrameListener; }
217
218 protected:
219 ~CameraStream() {
220 if (mDevice.get()) {
221 mDevice->waitUntilDrained();
222 mDevice->deleteStream(mStreamId);
223 }
224 // Clear producer before consumer.
225 mNativeWindow.clear();
226 mCpuConsumer.clear();
227 }
228
229 private:
230 sp<FrameListener> mFrameListener;
231 sp<CpuConsumer> mCpuConsumer;
232 sp<ANativeWindow> mNativeWindow;
233 sp<CameraDeviceBase> mDevice;
234 int mStreamId;
235 int mWidth;
236 int mHeight;
237 int mFormat;
238 };
239
240 int64_t GetExposureValue(const CameraMetadata& metaData) {
241 camera_metadata_ro_entry_t entry =
242 metaData.find(ANDROID_SENSOR_EXPOSURE_TIME);
243 EXPECT_EQ(1u, entry.count);
244 if (entry.count == 1) {
245 return entry.data.i64[0];
246 }
247 return -1;
248 }
249
250 int32_t GetSensitivity(const CameraMetadata& metaData) {
251 camera_metadata_ro_entry_t entry =
252 metaData.find(ANDROID_SENSOR_SENSITIVITY);
253 EXPECT_EQ(1u, entry.count);
254 if (entry.count == 1) {
255 return entry.data.i32[0];
256 }
257 return -1;
258 }
259
260 int64_t GetFrameDuration(const CameraMetadata& metaData) {
261 camera_metadata_ro_entry_t entry =
262 metaData.find(ANDROID_SENSOR_FRAME_DURATION);
263 EXPECT_EQ(1u, entry.count);
264 if (entry.count == 1) {
265 return entry.data.i64[0];
266 }
267 return -1;
268 }
269
270 void CreateRequests(CameraMetadata& previewRequest,
271 CameraMetadata& meteringRequest,
272 CameraMetadata& captureRequest,
273 int previewStreamId,
274 int meteringStreamId,
275 int captureStreamId) {
Zhijun He914226c2013-09-08 10:56:24 -0700276 int32_t requestId = 0;
277 Vector<int32_t> previewStreamIds;
278 previewStreamIds.push(previewStreamId);
279 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
280 &previewRequest));
281 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
282 previewStreamIds));
283 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
284 &requestId, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700285
Zhijun He914226c2013-09-08 10:56:24 -0700286 // Create metering request, manual settings
287 // Manual control: Disable 3A, noise reduction, edge sharping
288 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
289 uint8_t nrOff = static_cast<uint8_t>(ANDROID_NOISE_REDUCTION_MODE_OFF);
290 uint8_t sharpOff = static_cast<uint8_t>(ANDROID_EDGE_MODE_OFF);
291 Vector<int32_t> meteringStreamIds;
292 meteringStreamIds.push(meteringStreamId);
293 ASSERT_EQ(OK, mDevice->createDefaultRequest(
294 CAMERA2_TEMPLATE_PREVIEW,
295 &meteringRequest));
296 ASSERT_EQ(OK, meteringRequest.update(
297 ANDROID_REQUEST_OUTPUT_STREAMS,
298 meteringStreamIds));
299 ASSERT_EQ(OK, meteringRequest.update(
300 ANDROID_CONTROL_MODE,
301 &cmOff, 1));
302 ASSERT_EQ(OK, meteringRequest.update(
303 ANDROID_NOISE_REDUCTION_MODE,
304 &nrOff, 1));
305 ASSERT_EQ(OK, meteringRequest.update(
306 ANDROID_EDGE_MODE,
307 &sharpOff, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700308
Zhijun He914226c2013-09-08 10:56:24 -0700309 // Create capture request, manual settings
310 Vector<int32_t> captureStreamIds;
311 captureStreamIds.push(captureStreamId);
312 ASSERT_EQ(OK, mDevice->createDefaultRequest(
313 CAMERA2_TEMPLATE_PREVIEW,
314 &captureRequest));
315 ASSERT_EQ(OK, captureRequest.update(
316 ANDROID_REQUEST_OUTPUT_STREAMS,
317 captureStreamIds));
318 ASSERT_EQ(OK, captureRequest.update(
319 ANDROID_CONTROL_MODE,
320 &cmOff, 1));
321 ASSERT_EQ(OK, captureRequest.update(
322 ANDROID_NOISE_REDUCTION_MODE,
323 &nrOff, 1));
324 ASSERT_EQ(OK, captureRequest.update(
325 ANDROID_EDGE_MODE,
326 &sharpOff, 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700327 }
328
329 sp<CameraStream> CreateStream(
330 int width,
331 int height,
332 const sp<CameraDeviceBase>& device,
333 CameraStreamParams param = DEFAULT_STREAM_PARAMETERS,
334 sp<ANativeWindow> surface = NULL,
335 bool useCpuConsumer = true) {
336 param.mFormat = MapAutoFormat(param.mFormat);
337 return new CameraStream(width, height, device,
338 param, surface, useCpuConsumer);
339 }
340
341 void CaptureBurst(CameraMetadata& request, size_t requestCount,
342 const Vector<int64_t>& exposures,
343 const Vector<int32_t>& sensitivities,
344 const sp<CameraStream>& stream,
Zhijun He914226c2013-09-08 10:56:24 -0700345 int64_t minFrameDuration,
346 int32_t* requestIdStart) {
Zhijun He8ef01442013-08-13 17:36:17 -0700347 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_FRAME_DURATION,
348 &minFrameDuration, 1));
349 // Submit a series of requests with the specified exposure/gain values.
Zhijun He914226c2013-09-08 10:56:24 -0700350 int32_t targetRequestId = *requestIdStart;
Zhijun He8ef01442013-08-13 17:36:17 -0700351 for (size_t i = 0; i < requestCount; i++) {
Zhijun He914226c2013-09-08 10:56:24 -0700352 ASSERT_EQ(OK, request.update(ANDROID_REQUEST_ID, requestIdStart, 1));
353 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposures[i], 1));
354 ASSERT_EQ(OK, request.update(ANDROID_SENSOR_SENSITIVITY, &sensitivities[i], 1));
Zhijun He8ef01442013-08-13 17:36:17 -0700355 ASSERT_EQ(OK, mDevice->capture(request));
Zhijun He914226c2013-09-08 10:56:24 -0700356 ALOGV("Submitting request with: id %d with exposure %lld, sensitivity %d",
357 *requestIdStart, exposures[i], sensitivities[i]);
Zhijun He8ef01442013-08-13 17:36:17 -0700358 if (CAMERA_MULTI_STREAM_DEBUGGING) {
359 request.dump(STDOUT_FILENO);
360 }
Zhijun He914226c2013-09-08 10:56:24 -0700361 (*requestIdStart)++;
Zhijun He8ef01442013-08-13 17:36:17 -0700362 }
363 // Get capture burst results.
364 Vector<nsecs_t> captureBurstTimes;
365 sp<CpuConsumer> consumer = stream->GetConsumer();
366 sp<FrameListener> listener = stream->GetFrameListener();
367
368 // Set wait limit based on expected frame duration.
369 int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
370 for (size_t i = 0; i < requestCount; i++) {
371 ALOGV("Reading request result %d", i);
372
373 /**
374 * Raise the timeout to be at least twice as long as the exposure
375 * time. to avoid a false positive when the timeout is too short.
376 */
377 if ((exposures[i] * EXP_WAIT_MULTIPLIER) > waitLimit) {
378 waitLimit = exposures[i] * EXP_WAIT_MULTIPLIER;
379 }
380
Zhijun He8ef01442013-08-13 17:36:17 -0700381 CameraMetadata frameMetadata;
Zhijun He914226c2013-09-08 10:56:24 -0700382 int32_t resultRequestId;
383 do {
384 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
385 ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
386
387 camera_metadata_entry_t resultEntry = frameMetadata.find(ANDROID_REQUEST_ID);
388 ASSERT_EQ(1u, resultEntry.count);
389 resultRequestId = resultEntry.data.i32[0];
390 if (CAMERA_MULTI_STREAM_DEBUGGING) {
391 std::cout << "capture result req id: " << resultRequestId << std::endl;
392 }
393 } while (resultRequestId != targetRequestId);
394 targetRequestId++;
Zhijun He8ef01442013-08-13 17:36:17 -0700395 ALOGV("Got capture burst result for request %d", i);
Zhijun He914226c2013-09-08 10:56:24 -0700396
Zhijun He8ef01442013-08-13 17:36:17 -0700397 // Validate capture result
398 if (CAMERA_MULTI_STREAM_DEBUGGING) {
399 frameMetadata.dump(STDOUT_FILENO);
400 }
401
402 // TODO: Need revisit it to figure out an accurate margin.
Zhijun He914226c2013-09-08 10:56:24 -0700403 int64_t resultExposure = GetExposureValue(frameMetadata);
404 int32_t resultSensitivity = GetSensitivity(frameMetadata);
Gaurav Batraa1bef2b2014-03-28 17:03:36 -0700405 EXPECT_LE(sensitivities[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultSensitivity);
406 EXPECT_GE(sensitivities[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultSensitivity);
407 EXPECT_LE(exposures[i] * (1.0 - TOLERANCE_MARGIN_METADATA), resultExposure);
408 EXPECT_GE(exposures[i] * (1.0 + TOLERANCE_MARGIN_METADATA), resultExposure);
Zhijun He8ef01442013-08-13 17:36:17 -0700409
410 ASSERT_EQ(OK, listener->waitForFrame(waitLimit));
411 captureBurstTimes.push_back(systemTime());
412 CpuConsumer::LockedBuffer imgBuffer;
413 ASSERT_EQ(OK, consumer->lockNextBuffer(&imgBuffer));
414 ALOGV("Got capture buffer for request %d", i);
415
416 /**
417 * TODO: Validate capture buffer. Current brightness calculation
418 * is too slow, it also doesn't account for saturation effects,
419 * which is quite common since we are going over a significant
420 * range of EVs. we need figure out some reliable way to validate
421 * buffer data.
422 */
423
424 ASSERT_EQ(OK, consumer->unlockBuffer(imgBuffer));
425 if (i > 0) {
426 nsecs_t timeDelta =
427 captureBurstTimes[i] - captureBurstTimes[i-1];
Gaurav Batraa1bef2b2014-03-28 17:03:36 -0700428 EXPECT_GE(timeDelta * ( 1 + TOLERANCE_MARGIN_CAPTURE), exposures[i]);
Zhijun He8ef01442013-08-13 17:36:17 -0700429 }
430 }
431 }
432
433 /**
434 * Intentionally shadow default CreateStream function from base class,
435 * because we don't want any test in this class to use the default
436 * stream creation function.
437 */
438 void CreateStream() {
439 }
440};
441
442/**
443 * This test adds multiple stream use case test, basically, test 3
444 * streams:
445 *
446 * 1. Preview stream, with large size that is no bigger than 1080p
447 * we render this stream to display and vary the exposure time for
448 * for certain amount of time for visualization purpose.
449 *
450 * 2. Metering stream, with small size that is no bigger than VGA size.
451 * a burst is issued for different exposure times and analog gains
452 * (or analog gain implemented sensitivities) then check if the capture
453 * result metadata matches the request.
454 *
455 * 3. Capture stream, this is basically similar as meterting stream, but
456 * has large size, which is the largest supported JPEG capture size.
457 *
458 * This multiple stream test is to test if HAL supports:
459 *
460 * 1. Multiple streams like above, HAL should support at least 3 streams
461 * concurrently: one preview stream, 2 other YUV stream.
462 *
463 * 2. Manual control(gain/exposure) of mutiple burst capture.
464 */
465TEST_F(CameraMultiStreamTest, MultiBurst) {
466
467 TEST_EXTENSION_FORKING_INIT;
468
469 camera_metadata_ro_entry availableProcessedSizes =
470 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
471 ASSERT_EQ(0u, availableProcessedSizes.count % 2);
472 ASSERT_GE(availableProcessedSizes.count, 2u);
473 camera_metadata_ro_entry availableProcessedMinFrameDurations =
474 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
475 EXPECT_EQ(availableProcessedSizes.count,
476 availableProcessedMinFrameDurations.count * 2);
477
478 camera_metadata_ro_entry availableJpegSizes =
479 GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
480 ASSERT_EQ(0u, availableJpegSizes.count % 2);
481 ASSERT_GE(availableJpegSizes.count, 2u);
482
Zhijun He3bf3b452013-09-18 23:42:12 -0700483 camera_metadata_ro_entry hardwareLevel =
484 GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
485 ASSERT_EQ(1u, hardwareLevel.count);
486 uint8_t level = hardwareLevel.data.u8[0];
487 ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
488 ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
489 if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
490 const ::testing::TestInfo* const test_info =
491 ::testing::UnitTest::GetInstance()->current_test_info();
492 std::cerr << "Skipping test "
493 << test_info->test_case_name() << "."
494 << test_info->name()
495 << " because HAL hardware supported level is limited "
496 << std::endl;
497 return;
498 }
499
Zhijun He8ef01442013-08-13 17:36:17 -0700500 // Find the right sizes for preview, metering, and capture streams
501 // assumes at least 2 entries in availableProcessedSizes.
502 int64_t minFrameDuration = DEFAULT_FRAME_DURATION;
503 Size processedMinSize, processedMaxSize, jpegMaxSize;
504 const int32_t* data = availableProcessedSizes.data.i32;
505 size_t count = availableProcessedSizes.count;
506
507 int32_t minIdx, maxIdx;
508 GetMinSize(data, count, &processedMinSize, &minIdx);
509 GetMaxSize(data, count, &processedMaxSize, &maxIdx);
510 ALOGV("Found processed max size: %dx%d, min size = %dx%d",
511 processedMaxSize.width, processedMaxSize.height,
512 processedMinSize.width, processedMinSize.height);
513
514 if (availableProcessedSizes.count ==
515 availableProcessedMinFrameDurations.count * 2) {
516 minFrameDuration =
517 availableProcessedMinFrameDurations.data.i64[maxIdx / 2];
518 }
519
520 EXPECT_GT(minFrameDuration, 0);
521
522 if (minFrameDuration <= 0) {
523 minFrameDuration = DEFAULT_FRAME_DURATION;
524 }
525
526 ALOGV("targeted minimal frame duration is: %lldns", minFrameDuration);
527
528 data = &(availableJpegSizes.data.i32[0]);
529 count = availableJpegSizes.count;
530 GetMaxSize(data, count, &jpegMaxSize, &maxIdx);
531 ALOGV("Found Jpeg size max idx = %d", maxIdx);
532
533 // Max Jpeg size should be available in processed sizes. Use it for
534 // YUV capture anyway.
535 EXPECT_EQ(processedMaxSize.width, jpegMaxSize.width);
536 EXPECT_EQ(processedMaxSize.height, jpegMaxSize.height);
537
538 // Cap preview size.
539 Size previewLimit = { PREVIEW_WIDTH_CAP, PREVIEW_HEIGHT_CAP };
540 // FIXME: need make sure the previewLimit is supported by HAL.
541 Size previewSize = CapSize(previewLimit, processedMaxSize);
542 // Cap Metering size.
543 Size meteringLimit = { METERING_WIDTH_CAP, METERING_HEIGHT_CAP };
544 // Cap metering size to VGA (VGA is mandatory by CDD)
545 Size meteringSize = CapSize(meteringLimit, processedMinSize);
546 // Capture stream should be the max size of jpeg sizes.
547 ALOGV("preview size: %dx%d, metering size: %dx%d, capture size: %dx%d",
548 previewSize.width, previewSize.height,
549 meteringSize.width, meteringSize.height,
550 jpegMaxSize.width, jpegMaxSize.height);
551
552 // Create streams
553 // Preview stream: small resolution, render on the screen.
554 sp<CameraStream> previewStream;
555 {
556 sp<ANativeWindow> surface;
557 ASSERT_NO_FATAL_FAILURE(CreateOnScreenSurface(/*out*/surface));
558 previewStream = CreateStream(
559 previewSize.width,
560 previewSize.height,
561 mDevice,
562 DISPLAY_STREAM_PARAMETERS,
563 surface,
564 false);
565 ASSERT_NE((void*)NULL, previewStream.get());
566 ASSERT_NO_FATAL_FAILURE(previewStream->SetUp());
567 }
568 // Metering burst stream: small resolution yuv stream
569 sp<CameraStream> meteringStream =
570 CreateStream(
571 meteringSize.width,
572 meteringSize.height,
573 mDevice);
574 ASSERT_NE((void*)NULL, meteringStream.get());
575 ASSERT_NO_FATAL_FAILURE(meteringStream->SetUp());
576 // Capture burst stream: full resolution yuv stream
577 sp<CameraStream> captureStream =
578 CreateStream(
579 jpegMaxSize.width,
580 jpegMaxSize.height,
581 mDevice);
582 ASSERT_NE((void*)NULL, captureStream.get());
583 ASSERT_NO_FATAL_FAILURE(captureStream->SetUp());
584
585 // Create Preview request.
586 CameraMetadata previewRequest, meteringRequest, captureRequest;
587 ASSERT_NO_FATAL_FAILURE(CreateRequests(previewRequest, meteringRequest,
588 captureRequest, previewStream->GetStreamId(),
589 meteringStream->GetStreamId(), captureStream->GetStreamId()));
590
591 // Start preview
592 if (CAMERA_MULTI_STREAM_DEBUGGING) {
593 previewRequest.dump(STDOUT_FILENO);
594 }
595
596 // Generate exposure and sensitivity lists
597 camera_metadata_ro_entry exposureTimeRange =
598 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
599 ASSERT_EQ(exposureTimeRange.count, 2u);
600 int64_t minExp = exposureTimeRange.data.i64[0];
601 int64_t maxExp = exposureTimeRange.data.i64[1];
602 ASSERT_GT(maxExp, minExp);
603
604 camera_metadata_ro_entry sensivityRange =
605 GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
606 ASSERT_EQ(2u, sensivityRange.count);
607 int32_t minSensitivity = sensivityRange.data.i32[0];
608 int32_t maxSensitivity = sensivityRange.data.i32[1];
609 camera_metadata_ro_entry maxAnalogSenEntry =
610 GetStaticEntry(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY);
611 EXPECT_EQ(1u, maxAnalogSenEntry.count);
612 int32_t maxAnalogSensitivity = maxAnalogSenEntry.data.i32[0];
613 EXPECT_LE(maxAnalogSensitivity, maxSensitivity);
614 // Only test the sensitivity implemented by analog gain.
615 if (maxAnalogSensitivity > maxSensitivity) {
616 // Fallback to maxSensitity
617 maxAnalogSensitivity = maxSensitivity;
618 }
619
620 // sensitivity list, only include the sensitivities that are implemented
621 // purely by analog gain if possible.
622 Vector<int32_t> sensitivities;
623 Vector<int64_t> exposures;
624 count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
625 sensitivities.push_back(minSensitivity);
626 for (size_t i = 1; i < count; i++) {
627 sensitivities.push_back(minSensitivity + i * 100);
628 }
629 sensitivities.push_back(maxAnalogSensitivity);
630 ALOGV("Sensitivity Range: min=%d, max=%d", minSensitivity,
631 maxAnalogSensitivity);
632 int64_t exp = minExp;
633 while (exp < maxExp) {
634 exposures.push_back(exp);
635 exp *= 2;
636 }
637 // Sweep the exposure value for preview, just for visual inspection purpose.
638 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
639 for (size_t i = 0; i < exposures.size(); i++) {
640 ASSERT_EQ(OK, previewRequest.update(
641 ANDROID_CONTROL_MODE,
642 &cmOff, 1));
643 ASSERT_EQ(OK, previewRequest.update(
644 ANDROID_SENSOR_EXPOSURE_TIME,
645 &exposures[i], 1));
646 ALOGV("Submitting preview request %d with exposure %lld",
647 i, exposures[i]);
648
649 ASSERT_EQ(OK, mDevice->setStreamingRequest(previewRequest));
650
651 // Let preview run 200ms on screen for each exposure time.
652 usleep(PREVIEW_RENDERING_TIME_INTERVAL);
653 }
654
655 size_t requestCount = sensitivities.size();
656 if (requestCount > exposures.size()) {
657 requestCount = exposures.size();
658 }
659
Zhijun He914226c2013-09-08 10:56:24 -0700660 // To maintain the request id uniqueness (preview request id is 0), make burst capture start
661 // request id 1 here.
662 int32_t requestIdStart = 1;
Zhijun He8ef01442013-08-13 17:36:17 -0700663 /**
664 * Submit metering request, set default frame duration to minimal possible
665 * value, we want the capture to run as fast as possible. HAL should adjust
666 * the frame duration to minimal necessary value to support the requested
667 * exposure value if exposure is larger than frame duration.
668 */
669 CaptureBurst(meteringRequest, requestCount, exposures, sensitivities,
Zhijun He914226c2013-09-08 10:56:24 -0700670 meteringStream, minFrameDuration, &requestIdStart);
Zhijun He8ef01442013-08-13 17:36:17 -0700671
672 /**
673 * Submit capture request, set default frame duration to minimal possible
674 * value, we want the capture to run as fast as possible. HAL should adjust
675 * the frame duration to minimal necessary value to support the requested
676 * exposure value if exposure is larger than frame duration.
677 */
678 CaptureBurst(captureRequest, requestCount, exposures, sensitivities,
Zhijun He914226c2013-09-08 10:56:24 -0700679 captureStream, minFrameDuration, &requestIdStart);
Zhijun He8ef01442013-08-13 17:36:17 -0700680
681 ASSERT_EQ(OK, mDevice->clearStreamingRequest());
682}
683
684}
685}
686}