Extend EVS interfaces and data types

This change extends the definition of CameraDesc with the additional
camera metadata field that various camera module informaiton will be
stored.

IEvsEnumerator is extended with three new methods:
    - getCameraList_1_1() returns a list of new camera descriptors
    - openCamera_1_1() tries to open a camera device with a given
      stream configuration.

IEvsCameraStream has below changes:
    - deliverFrame_1_1() method is added to handle a frame in new
      BufferDesc.
    - Rename notifyEvent() as notify().

IEvsCamera also has below new methods:
    - getCameraInfo_1_1() returns a new camera descriptor.
    - getParameterList() returns a list of camera control parameters
      supported by the camera device.
    - getIntParameterRange() returns a valid range of parameter values.
    - Parameter setter and getter methods are renamed.

VTS test cases are updated to use new methods and below test cases are
added to validate their behaviors:
    - CameraUseStreamConfigToDisplay verifies end-to-end video stream
      with a stream configuration it finds from CameraDesc's metadata.
    - MultiCameraStreamUseConfig verifies two clients can start and
      stop video streams on the same underlying camera with same stream
      configuration.

Bug: 128851019
Test: VtsHalEvsV1_1Target
Change-Id: Ia6b0b94aff869129cb400d0a4c4df91e72682784
Signed-off-by: Changyeon Jo <changyeon@google.com>
diff --git a/automotive/evs/1.1/vts/functional/Android.bp b/automotive/evs/1.1/vts/functional/Android.bp
index 55c50a4..4753933 100644
--- a/automotive/evs/1.1/vts/functional/Android.bp
+++ b/automotive/evs/1.1/vts/functional/Android.bp
@@ -23,6 +23,7 @@
     defaults: ["VtsHalTargetTestDefaults"],
     shared_libs: [
         "libui",
+        "libcamera_metadata",
     ],
     static_libs: [
         "android.hardware.automotive.evs@1.0",
@@ -31,6 +32,7 @@
         "android.hardware.graphics.common@1.0",
         "android.hardware.graphics.common@1.1",
         "android.hardware.graphics.common@1.2",
+        "android.hardware.camera.device@3.2",
     ],
     test_suites: ["general-tests"],
     cflags: [
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.cpp b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
index 1627689..6d53652 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.cpp
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
@@ -138,93 +138,93 @@
 }
 
 
-Return<void> FrameHandler::notifyEvent(const EvsEvent& event) {
-    // Local flag we use to keep track of when the stream is stopping
-    auto type = event.getDiscriminator();
-    if (type == EvsEvent::hidl_discriminator::info) {
-        mLock.lock();
-        mLatestEventDesc = event.info();
-        if (mLatestEventDesc.aType == InfoEventType::STREAM_STOPPED) {
-            // Signal that the last frame has been received and the stream is stopped
-            mRunning = false;
-        } else if (mLatestEventDesc.aType == InfoEventType::PARAMETER_CHANGED) {
-            ALOGD("Camera parameter 0x%X is changed to 0x%X",
-                  mLatestEventDesc.payload[0], mLatestEventDesc.payload[1]);
+Return<void> FrameHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
+    const AHardwareBuffer_Desc* pDesc =
+        reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
+    ALOGD("Received a frame from the camera (%p)",
+          bufDesc.buffer.nativeHandle.getNativeHandle());
+
+    // Store a dimension of a received frame.
+    mFrameWidth = pDesc->width;
+    mFrameHeight = pDesc->height;
+
+    // If we were given an opened display at construction time, then send the received
+    // image back down the camera.
+    if (mDisplay.get()) {
+        // Get the output buffer we'll use to display the imagery
+        BufferDesc_1_0 tgtBuffer = {};
+        mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
+                                      tgtBuffer = buff;
+                                  }
+        );
+
+        if (tgtBuffer.memHandle == nullptr) {
+            printf("Didn't get target buffer - frame lost\n");
+            ALOGE("Didn't get requested output buffer -- skipping this frame.");
         } else {
-            ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
-        }
-        mLock.unlock();
-        mEventSignal.notify_all();
-    } else {
-        auto bufDesc = event.buffer();
-        const AHardwareBuffer_Desc* pDesc =
-            reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
-        ALOGD("Received a frame from the camera (%p)",
-              bufDesc.buffer.nativeHandle.getNativeHandle());
+            // Copy the contents of the of buffer.memHandle into tgtBuffer
+            copyBufferContents(tgtBuffer, bufDesc);
 
-        // Store a dimension of a received frame.
-        mFrameWidth = pDesc->width;
-        mFrameHeight = pDesc->height;
-
-        // If we were given an opened display at construction time, then send the received
-        // image back down the camera.
-        if (mDisplay.get()) {
-            // Get the output buffer we'll use to display the imagery
-            BufferDesc_1_0 tgtBuffer = {};
-            mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
-                                          tgtBuffer = buff;
-                                      }
-            );
-
-            if (tgtBuffer.memHandle == nullptr) {
-                printf("Didn't get target buffer - frame lost\n");
-                ALOGE("Didn't get requested output buffer -- skipping this frame.");
+            // Send the target buffer back for display
+            Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
+            if (!result.isOk()) {
+                printf("HIDL error on display buffer (%s)- frame lost\n",
+                       result.description().c_str());
+                ALOGE("Error making the remote function call.  HIDL said %s",
+                      result.description().c_str());
+            } else if (result != EvsResult::OK) {
+                printf("Display reported error - frame lost\n");
+                ALOGE("We encountered error %d when returning a buffer to the display!",
+                      (EvsResult) result);
             } else {
-                // Copy the contents of the of buffer.memHandle into tgtBuffer
-                copyBufferContents(tgtBuffer, bufDesc);
-
-                // Send the target buffer back for display
-                Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
-                if (!result.isOk()) {
-                    printf("HIDL error on display buffer (%s)- frame lost\n",
-                           result.description().c_str());
-                    ALOGE("Error making the remote function call.  HIDL said %s",
-                          result.description().c_str());
-                } else if (result != EvsResult::OK) {
-                    printf("Display reported error - frame lost\n");
-                    ALOGE("We encountered error %d when returning a buffer to the display!",
-                          (EvsResult) result);
-                } else {
-                    // Everything looks good!
-                    // Keep track so tests or watch dogs can monitor progress
-                    mLock.lock();
-                    mFramesDisplayed++;
-                    mLock.unlock();
-                }
+                // Everything looks good!
+                // Keep track so tests or watch dogs can monitor progress
+                mLock.lock();
+                mFramesDisplayed++;
+                mLock.unlock();
             }
         }
-
-
-        switch (mReturnMode) {
-        case eAutoReturn:
-            // Send the camera buffer back now that the client has seen it
-            ALOGD("Calling doneWithFrame");
-            // TODO:  Why is it that we get a HIDL crash if we pass back the cloned buffer?
-            mCamera->doneWithFrame_1_1(bufDesc);
-            break;
-        case eNoAutoReturn:
-            // Hang onto the buffer handle for now -- the client will return it explicitly later
-            mHeldBuffers.push(bufDesc);
-        }
-
-        mLock.lock();
-        ++mFramesReceived;
-        mLock.unlock();
-        mFrameSignal.notify_all();
-
-        ALOGD("Frame handling complete");
     }
 
+
+    switch (mReturnMode) {
+    case eAutoReturn:
+        // Send the camera buffer back now that the client has seen it
+        ALOGD("Calling doneWithFrame");
+        mCamera->doneWithFrame_1_1(bufDesc);
+        break;
+    case eNoAutoReturn:
+        // Hang onto the buffer handle for now -- the client will return it explicitly later
+        mHeldBuffers.push(bufDesc);
+    }
+
+    mLock.lock();
+    ++mFramesReceived;
+    mLock.unlock();
+    mFrameSignal.notify_all();
+
+    ALOGD("Frame handling complete");
+
+    return Void();
+}
+
+
+Return<void> FrameHandler::notify(const EvsEvent& event) {
+    // Local flag we use to keep track of when the stream is stopping
+    mLock.lock();
+    mLatestEventDesc = event;
+    if (mLatestEventDesc.aType == EvsEventType::STREAM_STOPPED) {
+        // Signal that the last frame has been received and the stream is stopped
+        mRunning = false;
+    } else if (mLatestEventDesc.aType == EvsEventType::PARAMETER_CHANGED) {
+        ALOGD("Camera parameter 0x%X is changed to 0x%X",
+              mLatestEventDesc.payload[0], mLatestEventDesc.payload[1]);
+    } else {
+        ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
+    }
+    mLock.unlock();
+    mEventSignal.notify_all();
+
     return Void();
 }
 
@@ -342,18 +342,18 @@
     }
 }
 
-bool FrameHandler::waitForEvent(const InfoEventType aTargetEvent,
-                                InfoEventDesc &eventDesc) {
+bool FrameHandler::waitForEvent(const EvsEventType aTargetEvent,
+                                EvsEvent &event) {
     // Wait until we get an expected parameter change event.
     std::unique_lock<std::mutex> lock(mLock);
     auto now = std::chrono::system_clock::now();
     bool result = mEventSignal.wait_until(lock, now + 5s,
-        [this, aTargetEvent, &eventDesc](){
+        [this, aTargetEvent, &event](){
             bool flag = mLatestEventDesc.aType == aTargetEvent;
             if (flag) {
-                eventDesc.aType = mLatestEventDesc.aType;
-                eventDesc.payload[0] = mLatestEventDesc.payload[0];
-                eventDesc.payload[1] = mLatestEventDesc.payload[1];
+                event.aType = mLatestEventDesc.aType;
+                event.payload[0] = mLatestEventDesc.payload[0];
+                event.payload[1] = mLatestEventDesc.payload[1];
             }
 
             return flag;
@@ -363,21 +363,22 @@
     return !result;
 }
 
-const char *FrameHandler::eventToString(const InfoEventType aType) {
+const char *FrameHandler::eventToString(const EvsEventType aType) {
     switch (aType) {
-        case InfoEventType::STREAM_STARTED:
+        case EvsEventType::STREAM_STARTED:
             return "STREAM_STARTED";
-        case InfoEventType::STREAM_STOPPED:
+        case EvsEventType::STREAM_STOPPED:
             return "STREAM_STOPPED";
-        case InfoEventType::FRAME_DROPPED:
+        case EvsEventType::FRAME_DROPPED:
             return "FRAME_DROPPED";
-        case InfoEventType::TIMEOUT:
+        case EvsEventType::TIMEOUT:
             return "TIMEOUT";
-        case InfoEventType::PARAMETER_CHANGED:
+        case EvsEventType::PARAMETER_CHANGED:
             return "PARAMETER_CHANGED";
-        case InfoEventType::MASTER_RELEASED:
+        case EvsEventType::MASTER_RELEASED:
             return "MASTER_RELEASED";
         default:
             return "Unknown";
     }
 }
+
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.h b/automotive/evs/1.1/vts/functional/FrameHandler.h
index 7f87cb4..e5f1b8f 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.h
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.h
@@ -33,7 +33,6 @@
 using ::android::sp;
 using ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
 using ::android::hardware::automotive::evs::V1_0::EvsResult;
-using ::android::hardware::automotive::evs::V1_0::CameraDesc;
 using BufferDesc_1_0 = ::android::hardware::automotive::evs::V1_0::BufferDesc;
 using BufferDesc_1_1 = ::android::hardware::automotive::evs::V1_1::BufferDesc;
 
@@ -56,6 +55,13 @@
     FrameHandler(android::sp <IEvsCamera> pCamera, CameraDesc cameraInfo,
                  android::sp <IEvsDisplay> pDisplay = nullptr,
                  BufferControlFlag mode = eAutoReturn);
+    virtual ~FrameHandler() {
+        if (mCamera != nullptr) {
+            /* shutdown a camera explicitly */
+            shutdown();
+        }
+    }
+
     void shutdown();
 
     bool startStream();
@@ -67,19 +73,22 @@
     bool isRunning();
 
     void waitForFrameCount(unsigned frameCount);
-    bool waitForEvent(const InfoEventType aTargetEvent,
-                            InfoEventDesc &eventDesc);
+    bool waitForEvent(const EvsEventType aTargetEvent,
+                            EvsEvent &eventDesc);
     void getFramesCounters(unsigned* received, unsigned* displayed);
     void getFrameDimension(unsigned* width, unsigned* height);
 
 private:
-    // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
+    // Implementation for ::android::hardware::automotive::evs::V1_0::IEvsCameraStream
     Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
-    Return<void> notifyEvent(const EvsEvent& event) override;
+
+    // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
+    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<void> notify(const EvsEvent& event) override;
 
     // Local implementation details
     bool copyBufferContents(const BufferDesc_1_0& tgtBuffer, const BufferDesc_1_1& srcBuffer);
-    const char *eventToString(const InfoEventType aType);
+    const char *eventToString(const EvsEventType aType);
 
     // Values initialized as startup
     android::sp <IEvsCamera>    mCamera;
@@ -100,7 +109,7 @@
     unsigned                    mFramesDisplayed = 0;   // Simple counter -- rolls over eventually!
     unsigned                    mFrameWidth = 0;
     unsigned                    mFrameHeight = 0;
-    InfoEventDesc               mLatestEventDesc;
+    EvsEvent                    mLatestEventDesc;
 };
 
 
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index a6e4881..1d3fd87 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -38,8 +38,9 @@
 
 #include "FrameHandler.h"
 
-#include <stdio.h>
-#include <string.h>
+#include <cstdio>
+#include <cstring>
+#include <cstdlib>
 
 #include <hidl/HidlTransportSupport.h>
 #include <hwbinder/ProcessState.h>
@@ -50,8 +51,10 @@
 #include <android/log.h>
 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
-#include <android/hardware/automotive/evs/1.0/IEvsEnumerator.h>
+#include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
 #include <android/hardware/automotive/evs/1.0/IEvsDisplay.h>
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <system/camera_metadata.h>
 
 #include <VtsHalHidlTargetTestBase.h>
 #include <VtsHalHidlTargetTestEnvBase.h>
@@ -64,13 +67,28 @@
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_string;
 using ::android::sp;
-using ::android::hardware::automotive::evs::V1_0::CameraDesc;
+using ::android::hardware::camera::device::V3_2::Stream;
 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
 using ::android::hardware::automotive::evs::V1_0::DisplayState;
-using ::android::hardware::automotive::evs::V1_0::IEvsEnumerator;
+using ::android::hardware::graphics::common::V1_0::PixelFormat;
 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
 
+/*
+ * Plese note that this is different from what is defined in
+ * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
+ * field to store a framerate.
+ */
+const size_t kStreamCfgSz = 5;
+typedef struct {
+    int32_t width;
+    int32_t height;
+    int32_t format;
+    int32_t direction;
+    int32_t framerate;
+} RawStreamConfig;
+
+
 // Test environment for Evs HIDL HAL.
 class EvsHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase {
    public:
@@ -107,15 +125,16 @@
         assert(pEnumerator != nullptr);
 
         // Get the camera list
-        pEnumerator->getCameraList([this](hidl_vec <CameraDesc> cameraList) {
-                                       ALOGI("Camera list callback received %zu cameras",
-                                             cameraList.size());
-                                       cameraInfo.reserve(cameraList.size());
-                                       for (auto&& cam: cameraList) {
-                                           ALOGI("Found camera %s", cam.cameraId.c_str());
-                                           cameraInfo.push_back(cam);
-                                       }
-                                   }
+        pEnumerator->getCameraList_1_1(
+            [this](hidl_vec <CameraDesc> cameraList) {
+                ALOGI("Camera list callback received %zu cameras",
+                      cameraList.size());
+                cameraInfo.reserve(cameraList.size());
+                for (auto&& cam: cameraList) {
+                    ALOGI("Found camera %s", cam.v1.cameraId.c_str());
+                    cameraInfo.push_back(cam);
+                }
+            }
         );
 
         // We insist on at least one camera for EVS to pass any camera tests
@@ -143,19 +162,23 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
         for (int pass = 0; pass < 2; pass++) {
             sp<IEvsCamera_1_1> pCam =
-                IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+                IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
                 .withDefault(nullptr);
             ASSERT_NE(pCam, nullptr);
 
             // Verify that this camera self-identifies correctly
-            pCam->getCameraInfo([&cam](CameraDesc desc) {
-                                    ALOGD("Found camera %s", desc.cameraId.c_str());
-                                    EXPECT_EQ(cam.cameraId, desc.cameraId);
-                                }
+            pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                        ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                        EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                    }
             );
 
             // Explicitly close the camera so resources are released right away
@@ -177,22 +200,26 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
         // Verify that this camera self-identifies correctly
-        pCam->getCameraInfo([&cam](CameraDesc desc) {
-                                ALOGD("Found camera %s", desc.cameraId.c_str());
-                                EXPECT_EQ(cam.cameraId, desc.cameraId);
-                            }
+        pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                    ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                    EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                }
         );
 
         sp<IEvsCamera_1_1> pCam2 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, pCam2);
         ASSERT_NE(pCam2, nullptr);
@@ -210,10 +237,10 @@
         pEnumerator->closeCamera(pCam);
 
         // Verify that the second camera instance self-identifies correctly
-        pCam2->getCameraInfo([&cam](CameraDesc desc) {
-                                 ALOGD("Found camera %s", desc.cameraId.c_str());
-                                 EXPECT_EQ(cam.cameraId, desc.cameraId);
-                             }
+        pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                     ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                     EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                 }
         );
 
         // Close the second camera instance
@@ -235,10 +262,14 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -303,11 +334,15 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
 
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -371,6 +406,10 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Request exclusive access to the EVS display
     sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
     ASSERT_NE(pDisplay, nullptr);
@@ -378,7 +417,7 @@
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -439,16 +478,20 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCam0 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
         sp<IEvsCamera_1_1> pCam1 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
@@ -486,7 +529,6 @@
         nsecs_t runTime = end - firstFrame;
         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
-        printf("Measured camera rate %3.2f fps and %3.2f fps\n", framesPerSecond0, framesPerSecond1);
         ALOGI("Measured camera rate %3.2f fps and %3.2f fps", framesPerSecond0, framesPerSecond1);
         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
@@ -526,14 +568,33 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
+    Return<EvsResult> result = EvsResult::OK;
     for (auto&& cam: cameraInfo) {
         // Create a camera client
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Get the parameter list
+        std::vector<CameraParam> cmds;
+        pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
+                cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cmds.push_back(cmd);
+                }
+            }
+        );
+
+        if (cmds.size() < 1) {
+            continue;
+        }
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          nullptr,
@@ -547,83 +608,70 @@
         // Ensure the stream starts
         frameHandler->waitForFrameCount(1);
 
-        // Try to program few parameters
-        EvsResult result = EvsResult::OK;
-        int32_t val0 = 100;
-        int32_t val1 = 0;
-
         result = pCam->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
-        pCam->setParameter(CameraParam::BRIGHTNESS, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
+        for (auto &cmd : cmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCam->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::BRIGHTNESS,
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 0;
+                pCam->getIntParameter(CameraParam::AUTO_FOCUS,
+                                   [&result, &val1](auto status, auto value) {
+                                       result = status;
+                                       if (status == EvsResult::OK) {
+                                          val1 = value;
+                                       }
+                                   });
+                if (val1 != 0) {
+                    pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                       [&result, &val1](auto status, auto effectiveValue) {
+                                           result = status;
+                                           val1 = effectiveValue;
+                                       });
+                    ASSERT_EQ(EvsResult::OK, result);
+                    ASSERT_EQ(val1, 0);
+                }
+            }
+
+            // Try to program a parameter with a random value [minVal, maxVal]
+            int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
+            int32_t val1 = 0;
+
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCam->setIntParameter(cmd, val0,
+                               [&result, &val1](auto status, auto effectiveValue) {
+                                   result = status;
+                                   val1 = effectiveValue;
+                               });
+
+            ASSERT_EQ(EvsResult::OK, result);
+
+            pCam->getIntParameter(cmd,
                                [&result, &val1](auto status, auto value) {
                                    result = status;
                                    if (status == EvsResult::OK) {
                                       val1 = value;
                                    }
                                });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
-        }
-
-        val0 = 80;
-        val1 = 0;
-        pCam->setParameter(CameraParam::CONTRAST, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::CONTRAST,
-                               [&result, &val1](auto status, auto value) {
-                                   result = status;
-                                   if (status == EvsResult::OK) {
-                                      val1 = value;
-                                   }
-                               });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
-        }
-
-        val0 = 300;
-        val1 = 0;
-        pCam->setParameter(CameraParam::ABSOLUTE_ZOOM, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::ABSOLUTE_ZOOM,
-                               [&result, &val1](auto status, auto value) {
-                                   result = status;
-                                   if (status == EvsResult::OK) {
-                                      val1 = value;
-                                   }
-                               });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
+            ASSERT_EQ(EvsResult::OK, result);
             ASSERT_EQ(val0, val1) << "Values are not matched.";
         }
 
         result = pCam->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Shutdown
         frameHandler->shutdown();
@@ -650,15 +698,19 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
         sp<IEvsCamera_1_1> pCamNonMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
@@ -698,15 +750,15 @@
 
         // Non-master client expects to receive a master role relesed
         // notification.
-        InfoEventDesc aNotification = {};
+        EvsEvent aNotification = {};
 
         // Release a master role.
         pCamMaster->unsetMaster();
 
         // Verify a change notification.
-        frameHandlerNonMaster->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(InfoEventType::MASTER_RELEASED,
-                  static_cast<InfoEventType>(aNotification.aType));
+        frameHandlerNonMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+                  static_cast<EvsEventType>(aNotification.aType));
 
         // Non-master becomes a master.
         result = pCamNonMaster->setMaster();
@@ -720,9 +772,9 @@
         frameHandlerNonMaster->shutdown();
 
         // Verify a change notification.
-        frameHandlerMaster->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(InfoEventType::MASTER_RELEASED,
-                  static_cast<InfoEventType>(aNotification.aType));
+        frameHandlerMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+                  static_cast<EvsEventType>(aNotification.aType));
 
         // Closing another stream.
         frameHandlerMaster->shutdown();
@@ -752,18 +804,46 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
         sp<IEvsCamera_1_1> pCamNonMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Get the parameter list
+        std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
+        pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
+                camMasterCmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    camMasterCmds.push_back(cmd);
+                }
+            }
+        );
+
+        pCamNonMaster->getParameterList([&camNonMasterCmds](hidl_vec<CameraParam> cmdList) {
+                camNonMasterCmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    camNonMasterCmds.push_back(cmd);
+                }
+            }
+        );
+
+        if (camMasterCmds.size() < 1 ||
+            camNonMasterCmds.size() < 1) {
+            // Skip a camera device if it does not support any parameter.
+            continue;
+        }
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandlerMaster =
             new FrameHandler(pCamMaster, cam,
@@ -778,11 +858,11 @@
 
         // Set one client as the master
         EvsResult result = pCamMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to set another client as the master.
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
+        ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
 
         // Start the camera's video stream via a master client.
         bool startResult = frameHandlerMaster->startStream();
@@ -798,131 +878,168 @@
         // Ensure the stream starts
         frameHandlerNonMaster->waitForFrameCount(1);
 
-        // Try to program CameraParam::BRIGHTNESS
-        int32_t val0 = 100;
+        int32_t val0 = 0;
         int32_t val1 = 0;
+        for (auto &cmd : camMasterCmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCamMaster->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        pCamMaster->setParameter(CameraParam::BRIGHTNESS, val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
-                                     result = status;
-                                     val1 = effectiveValue;
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 1;
+                pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
 
-        // Non-master client expects to receive a parameter change notification
-        // whenever a master client adjusts it.
-        InfoEventDesc aNotification = {};
+            // Try to program a parameter
+            val0 = minVal + (std::rand() % (maxVal - minVal));
 
-        pCamMaster->getParameter(CameraParam::BRIGHTNESS,
-                                 [&result, &val1](auto status, auto value) {
-                                     result = status;
-                                     if (status == EvsResult::OK) {
-                                        val1 = value;
-                                     }
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
-        if (result == EvsResult::OK) {
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCamMaster->setIntParameter(cmd, val0,
+                                     [&result, &val1](auto status, auto effectiveValue) {
+                                         result = status;
+                                         val1 = effectiveValue;
+                                     });
+            ASSERT_EQ(EvsResult::OK, result);
 
-            // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(InfoEventType::PARAMETER_CHANGED,
-                      static_cast<InfoEventType>(aNotification.aType));
-            ASSERT_EQ(CameraParam::BRIGHTNESS,
-                      static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+            // Wait a moment
+            sleep(1);
 
-        // Try to program CameraParam::CONTRAST
-        val0 = 80;
-        val1 = 0;
-        pCamMaster->setParameter(CameraParam::CONTRAST, val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
-                                     result = status;
-                                     val1 = effectiveValue;
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEvent aNotification = {};
 
-        if (result == EvsResult::OK) {
-            pCamMaster->getParameter(CameraParam::CONTRAST,
+            pCamMaster->getIntParameter(cmd,
                                      [&result, &val1](auto status, auto value) {
                                          result = status;
                                          if (status == EvsResult::OK) {
                                             val1 = value;
                                          }
                                      });
-            ASSERT_TRUE(result == EvsResult::OK);
+            ASSERT_EQ(EvsResult::OK, result);
             ASSERT_EQ(val0, val1) << "Values are not matched.";
 
-
             // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(InfoEventType::PARAMETER_CHANGED,
-                      static_cast<InfoEventType>(aNotification.aType));
-            ASSERT_EQ(CameraParam::CONTRAST,
+            frameHandlerNonMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification.aType));
+            ASSERT_EQ(cmd,
                       static_cast<CameraParam>(aNotification.payload[0]));
             ASSERT_EQ(val1,
                       static_cast<int32_t>(aNotification.payload[1]));
         }
 
         // Try to adjust a parameter via non-master client
-        pCamNonMaster->setParameter(CameraParam::CONTRAST, val0,
+        pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
                                     [&result, &val1](auto status, auto effectiveValue) {
                                         result = status;
                                         val1 = effectiveValue;
                                     });
-        ASSERT_TRUE(result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
         // Non-master client attemps to be a master
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
+        ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
 
         // Master client retires from a master role
         result = pCamMaster->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to adjust a parameter after being retired
-        pCamMaster->setParameter(CameraParam::BRIGHTNESS, val0,
+        pCamMaster->setIntParameter(camMasterCmds[0], val0,
                                  [&result, &val1](auto status, auto effectiveValue) {
                                      result = status;
                                      val1 = effectiveValue;
                                  });
-        ASSERT_TRUE(result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
         // Non-master client becomes a master
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to adjust a parameter via new master client
-        pCamNonMaster->setParameter(CameraParam::BRIGHTNESS, val0,
-                                    [&result, &val1](auto status, auto effectiveValue) {
-                                        result = status;
-                                        val1 = effectiveValue;
-                                    });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+        for (auto &cmd : camNonMasterCmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCamNonMaster->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        // Wait a moment
-        sleep(1);
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 1;
+                pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
 
-        // Verify a change notification
-        if (result == EvsResult::OK) {
-            frameHandlerMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
+            // Try to program a parameter
+            val0 = minVal + (std::rand() % (maxVal - minVal));
+
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCamNonMaster->setIntParameter(cmd, val0,
+                                        [&result, &val1](auto status, auto effectiveValue) {
+                                            result = status;
+                                            val1 = effectiveValue;
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+
+            // Wait a moment
+            sleep(1);
+
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEvent aNotification = {};
+
+            pCamNonMaster->getIntParameter(cmd,
+                                        [&result, &val1](auto status, auto value) {
+                                            result = status;
+                                            if (status == EvsResult::OK) {
+                                               val1 = value;
+                                            }
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+            ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+            // Verify a change notification
+            frameHandlerMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification.aType));
+            ASSERT_EQ(cmd,
+                      static_cast<CameraParam>(aNotification.payload[0]));
             ASSERT_EQ(val1,
                       static_cast<int32_t>(aNotification.payload[1]));
         }
 
         // New master retires from a master role
         result = pCamNonMaster->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Shutdown
         frameHandlerMaster->shutdown();
@@ -943,9 +1060,18 @@
 TEST_F(EvsHidlTest, HighPriorityCameraClient) {
     ALOGI("Starting HighPriorityCameraClient test");
 
+    if (mIsHwModule) {
+        // This test is not for HW module implementation.
+        return;
+    }
+
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Request exclusive access to the EVS display
     sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
     ASSERT_NE(pDisplay, nullptr);
@@ -954,15 +1080,38 @@
     for (auto&& cam: cameraInfo) {
         // Create two clients
         sp<IEvsCamera_1_1> pCam0 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
         sp<IEvsCamera_1_1> pCam1 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Get the parameter list; this test will use the first command in both
+        // lists.
+        std::vector<CameraParam> cam0Cmds, cam1Cmds;
+        pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
+                cam0Cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cam0Cmds.push_back(cmd);
+                }
+            }
+        );
+
+        pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
+                cam1Cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cam1Cmds.push_back(cmd);
+                }
+            }
+        );
+        if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
+            // Cannot execute this test.
+            return;
+        }
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
                                                           pDisplay,
@@ -982,67 +1131,121 @@
         frameHandler0->waitForFrameCount(1);
         frameHandler1->waitForFrameCount(1);
 
-        // Client 1 becomes a master and programs a brightness.
+        // Client 1 becomes a master and programs a parameter.
         EvsResult result = EvsResult::OK;
-        int32_t val0 = 100;
+        // Get a valid parameter value range
+        int32_t minVal, maxVal, step;
+        pCam1->getIntParameterRange(
+            cam1Cmds[0],
+            [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                minVal = val0;
+                maxVal = val1;
+                step   = val2;
+            }
+        );
+
+        if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            // Try to turn off auto-focus
+            int32_t val1 = 0;
+            pCam1->getIntParameter(CameraParam::AUTO_FOCUS,
+                               [&result, &val1](auto status, auto value) {
+                                   result = status;
+                                   if (status == EvsResult::OK) {
+                                      val1 = value;
+                                   }
+                               });
+            if (val1 != 0) {
+                pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
+        }
+
+        // Try to program a parameter with a random value [minVal, maxVal]
+        int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
         int32_t val1 = 0;
 
-        result = pCam1->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        // Rounding down
+        val0 = val0 - (val0 % step);
 
-        pCam1->setParameter(CameraParam::BRIGHTNESS, val0,
+        result = pCam1->setMaster();
+        ASSERT_EQ(EvsResult::OK, result);
+
+        pCam1->setIntParameter(cam1Cmds[0], val0,
                             [&result, &val1](auto status, auto effectiveValue) {
                                 result = status;
                                 val1 = effectiveValue;
                             });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Verify a change notification
-        InfoEventDesc aNotification = {};
-        if (result == EvsResult::OK) {
-            bool timeout =
-                frameHandler0->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_FALSE(timeout) << "Expected event does not arrive";
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+        EvsEvent aNotification = {};
+        bool timeout =
+            frameHandler0->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+        ASSERT_FALSE(timeout) << "Expected event does not arrive";
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::PARAMETER_CHANGED);
+        ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
+                  cam1Cmds[0]);
+        ASSERT_EQ(val1,
+                  static_cast<int32_t>(aNotification.payload[1]));
 
         // Client 0 steals a master role
         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
 
-        frameHandler1->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                  InfoEventType::MASTER_RELEASED);
+        frameHandler1->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::MASTER_RELEASED);
 
-        // Client 0 programs a brightness
-        val0 = 50;
+        // Client 0 programs a parameter
+        val0 = minVal + (std::rand() % (maxVal - minVal));
         val1 = 0;
-        pCam0->setParameter(CameraParam::BRIGHTNESS, val0,
+
+        // Rounding down
+        val0 = val0 - (val0 % step);
+
+        if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            // Try to turn off auto-focus
+            int32_t val1 = 0;
+            pCam0->getIntParameter(CameraParam::AUTO_FOCUS,
+                               [&result, &val1](auto status, auto value) {
+                                   result = status;
+                                   if (status == EvsResult::OK) {
+                                      val1 = value;
+                                   }
+                               });
+            if (val1 != 0) {
+                pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
+        }
+
+        pCam0->setIntParameter(cam0Cmds[0], val0,
                             [&result, &val1](auto status, auto effectiveValue) {
                                 result = status;
                                 val1 = effectiveValue;
                             });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Verify a change notification
-        if (result == EvsResult::OK) {
-            bool timeout =
-                frameHandler1->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_FALSE(timeout) << "Expected event does not arrive";
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+        timeout =
+            frameHandler1->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+        ASSERT_FALSE(timeout) << "Expected event does not arrive";
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::PARAMETER_CHANGED);
+        ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
+                  cam0Cmds[0]);
+        ASSERT_EQ(val1,
+                  static_cast<int32_t>(aNotification.payload[1]));
 
         // Turn off the display (yes, before the stream stops -- it should be handled)
         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
@@ -1061,6 +1264,248 @@
 }
 
 
+/*
+ * CameraUseStreamConfigToDisplay:
+ * End to end test of data flowing from the camera to the display.  Similar to
+ * CameraToDisplayRoundTrip test case but this case retrieves available stream
+ * configurations from EVS and uses one of them to start a video stream.
+ */
+TEST_F(EvsHidlTest, CameraUseStreamConfigToDisplay) {
+    ALOGI("Starting CameraUseStreamConfigToDisplay test");
+
+    // Get the camera list
+    loadCameraList();
+
+    // Request exclusive access to the EVS display
+    sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
+    ASSERT_NE(pDisplay, nullptr);
+
+    // Test each reported camera
+    for (auto&& cam: cameraInfo) {
+        // choose a configuration that has a frame rate faster than minReqFps.
+        Stream targetCfg = {};
+        const int32_t minReqFps = 15;
+        int32_t maxArea = 0;
+        camera_metadata_entry_t streamCfgs;
+        bool foundCfg = false;
+        if (!find_camera_metadata_entry(
+                 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
+                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                 &streamCfgs)) {
+            // Stream configurations are found in metadata
+            RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
+            for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
+                if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                    ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
+
+                    if (ptr->width * ptr->height > maxArea &&
+                        ptr->framerate >= minReqFps) {
+                        targetCfg.width = ptr->width;
+                        targetCfg.height = ptr->height;
+
+                        maxArea = ptr->width * ptr->height;
+                        foundCfg = true;
+                    }
+                }
+                ++ptr;
+            }
+        }
+        targetCfg.format =
+            static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
+
+        if (!foundCfg) {
+            // Current EVS camera does not provide stream configurations in the
+            // metadata.
+            continue;
+        }
+
+        sp<IEvsCamera_1_1> pCam =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam, nullptr);
+
+        // Set up a frame receiver object which will fire up its own thread.
+        sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
+                                                         pDisplay,
+                                                         FrameHandler::eAutoReturn);
+
+
+        // Activate the display
+        pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
+
+        // Start the camera's video stream
+        bool startResult = frameHandler->startStream();
+        ASSERT_TRUE(startResult);
+
+        // Wait a while to let the data flow
+        static const int kSecondsToWait = 5;
+        const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
+                                 kMaxStreamStartMilliseconds;
+        const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
+                                               kSecondsToMilliseconds;
+        sleep(kSecondsToWait);
+        unsigned framesReceived = 0;
+        unsigned framesDisplayed = 0;
+        frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
+        EXPECT_EQ(framesReceived, framesDisplayed);
+        EXPECT_GE(framesDisplayed, minimumFramesExpected);
+
+        // Turn off the display (yes, before the stream stops -- it should be handled)
+        pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
+
+        // Shut down the streamer
+        frameHandler->shutdown();
+
+        // Explicitly release the camera
+        pEnumerator->closeCamera(pCam);
+    }
+
+    // Explicitly release the display
+    pEnumerator->closeDisplay(pDisplay);
+}
+
+
+/*
+ * MultiCameraStreamUseConfig:
+ * Verify that each client can start and stop video streams on the same
+ * underlying camera with same configuration.
+ */
+TEST_F(EvsHidlTest, MultiCameraStreamUseConfig) {
+    ALOGI("Starting MultiCameraStream test");
+
+    if (mIsHwModule) {
+        // This test is not for HW module implementation.
+        return;
+    }
+
+    // Get the camera list
+    loadCameraList();
+
+    // Test each reported camera
+    for (auto&& cam: cameraInfo) {
+        // choose a configuration that has a frame rate faster than minReqFps.
+        Stream targetCfg = {};
+        const int32_t minReqFps = 15;
+        int32_t maxArea = 0;
+        camera_metadata_entry_t streamCfgs;
+        bool foundCfg = false;
+        if (!find_camera_metadata_entry(
+                 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
+                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                 &streamCfgs)) {
+            // Stream configurations are found in metadata
+            RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
+            for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
+                if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                    ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
+
+                    if (ptr->width * ptr->height > maxArea &&
+                        ptr->framerate >= minReqFps) {
+                        targetCfg.width = ptr->width;
+                        targetCfg.height = ptr->height;
+
+                        maxArea = ptr->width * ptr->height;
+                        foundCfg = true;
+                    }
+                }
+                ++ptr;
+            }
+        }
+        targetCfg.format =
+            static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
+
+        if (!foundCfg) {
+            ALOGI("Device %s does not provide a list of supported stream configurations, skipped",
+                  cam.v1.cameraId.c_str());
+
+            continue;
+        }
+
+        // Create the first camera client with a selected stream configuration.
+        sp<IEvsCamera_1_1> pCam0 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam0, nullptr);
+
+        // Try to create the second camera client with different stream
+        // configuration.
+        int32_t id = targetCfg.id;
+        targetCfg.id += 1;  // EVS manager sees only the stream id.
+        sp<IEvsCamera_1_1> pCam1 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_EQ(pCam1, nullptr);
+
+        // Try again with same stream configuration.
+        targetCfg.id = id;
+        pCam1 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam1, nullptr);
+
+        // Set up per-client frame receiver objects which will fire up its own thread
+        sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
+                                                          nullptr,
+                                                          FrameHandler::eAutoReturn);
+        ASSERT_NE(frameHandler0, nullptr);
+
+        sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
+                                                          nullptr,
+                                                          FrameHandler::eAutoReturn);
+        ASSERT_NE(frameHandler1, nullptr);
+
+        // Start the camera's video stream via client 0
+        bool startResult = false;
+        startResult = frameHandler0->startStream() &&
+                      frameHandler1->startStream();
+        ASSERT_TRUE(startResult);
+
+        // Ensure the stream starts
+        frameHandler0->waitForFrameCount(1);
+        frameHandler1->waitForFrameCount(1);
+
+        nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
+
+        // Wait a bit, then ensure both clients get at least the required minimum number of frames
+        sleep(5);
+        nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
+        unsigned framesReceived0 = 0, framesReceived1 = 0;
+        frameHandler0->getFramesCounters(&framesReceived0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceived1, nullptr);
+        framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
+        framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
+        nsecs_t runTime = end - firstFrame;
+        float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
+        float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
+        ALOGI("Measured camera rate %3.2f fps and %3.2f fps", framesPerSecond0, framesPerSecond1);
+        EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
+        EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
+
+        // Shutdown one client
+        frameHandler0->shutdown();
+
+        // Read frame counters again
+        frameHandler0->getFramesCounters(&framesReceived0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceived1, nullptr);
+
+        // Wait a bit again
+        sleep(5);
+        unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
+        frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
+        EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
+        EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
+
+        // Shutdown another
+        frameHandler1->shutdown();
+
+        // Explicitly release the camera
+        pEnumerator->closeCamera(pCam0);
+        pEnumerator->closeCamera(pCam1);
+    }
+}
+
+
 int main(int argc, char** argv) {
     ::testing::AddGlobalTestEnvironment(EvsHidlEnvironment::Instance());
     ::testing::InitGoogleTest(&argc, argv);