Prerequsite changes for EVS multi-camera support

This change modifies existing methods and data types for upcoming EVS
multi-camera support.
- deliverFrame_1_1() and doneWithFrame_1_1() are modified to take
  multiple buffer descriptors.
- setIntParameter() and getIntParameter() are changed to return multiple
  parameter values.
- Device ID and timestamp fields are added to BufferDesc.
- EvsEvent is renamed as EvsEventDesc and Device Id is added.

Bug: 142275664
Test: VtsHalEvsV1_1TargetTest
Change-Id: I0415b2cb0642d1377f4d23a4e154080a66c81187
Signed-off-by: Changyeon Jo <changyeon@google.com>
diff --git a/automotive/evs/1.1/IEvsCamera.hal b/automotive/evs/1.1/IEvsCamera.hal
index 975b6c6..acc2eec 100644
--- a/automotive/evs/1.1/IEvsCamera.hal
+++ b/automotive/evs/1.1/IEvsCamera.hal
@@ -51,7 +51,7 @@
     resumeVideoStream() generates (EvsResult result);
 
     /**
-     * Returns a frame that was delivered by to the IEvsCameraStream.
+     * Returns frame that were delivered by to the IEvsCameraStream.
      *
      * When done consuming a frame delivered to the IEvsCameraStream
      * interface, it must be returned to the IEvsCamera for reuse.
@@ -59,10 +59,10 @@
      * as one), and if the supply is exhausted, no further frames may be
      * delivered until a buffer is returned.
      *
-     * @param  buffer A buffer to be returned.
+     * @param  buffer Buffers to be returned.
      * @return result Return EvsResult::OK if this call is successful.
      */
-    doneWithFrame_1_1(BufferDesc buffer) generates (EvsResult result);
+    doneWithFrame_1_1(vec<BufferDesc> buffer) generates (EvsResult result);
 
     /**
      * Requests to be a master client.
@@ -127,8 +127,13 @@
         generates (int32_t min, int32_t max, int32_t step);
 
     /**
-     * Requests to set a camera parameter.  Only a request from the master
-     * client will be processed successfully.
+     * Requests to set a camera parameter.
+     *
+     * Only a request from the master client will be processed successfully.
+     * When this method is called on a logical camera device, it will be forwarded
+     * to each physical device and, if it fails to program any physical device,
+     * it will return an error code with the same number of effective values as
+     * the number of backing camera devices.
      *
      * @param  id             The identifier of camera parameter, CameraParam enum.
      *         value          A desired parameter value.
@@ -138,21 +143,22 @@
      *                        parameter is not supported.
      *                        EvsResult::UNDERLYING_SERVICE_ERROR if it fails to
      *                        program a value by any other reason.
-     *         effectiveValue A programmed parameter value.  This may differ
+     *         effectiveValue Programmed parameter values.  This may differ
      *                        from what the client gives if, for example, the
      *                        driver does not support a target parameter.
      */
     setIntParameter(CameraParam id, int32_t value)
-        generates (EvsResult result, int32_t effectiveValue);
+        generates (EvsResult result, vec<int32_t> effectiveValue);
 
     /**
-     * Retrieves a value of given camera parameter.
+     * Retrieves values of given camera parameter.
      *
      * @param  id     The identifier of camera parameter, CameraParam enum.
      * @return result EvsResult::OK if it succeeds to read a parameter.
      *                EvsResult::INVALID_ARG if either a requested parameter is
      *                not supported.
-     *         value  A value of requested camera parameter.
+     *         value  Values of requested camera parameter, the same number of
+     *                values as backing camera devices.
      */
-    getIntParameter(CameraParam id) generates(EvsResult result, int32_t value);
+    getIntParameter(CameraParam id) generates(EvsResult result, vec<int32_t> value);
 };
diff --git a/automotive/evs/1.1/IEvsCameraStream.hal b/automotive/evs/1.1/IEvsCameraStream.hal
index 9e4ea19..aa35c62 100644
--- a/automotive/evs/1.1/IEvsCameraStream.hal
+++ b/automotive/evs/1.1/IEvsCameraStream.hal
@@ -18,7 +18,7 @@
 
 import @1.0::IEvsCameraStream;
 import @1.1::BufferDesc;
-import @1.1::EvsEvent;
+import @1.1::EvsEventDesc;
 
 /**
  * Implemented on client side to receive asynchronous streaming event deliveries.
@@ -26,7 +26,7 @@
 interface IEvsCameraStream extends @1.0::IEvsCameraStream {
 
     /**
-     * Receives calls from the HAL each time a video frame is ready for inspection.
+     * Receives calls from the HAL each time video frames is ready for inspection.
      * Buffer handles received by this method must be returned via calls to
      * IEvsCamera::doneWithFrame_1_1(). When the video stream is stopped via a call
      * to IEvsCamera::stopVideoStream(), this callback may continue to happen for
@@ -35,14 +35,19 @@
      * event must be delivered.  No further frame deliveries may happen
      * thereafter.
      *
-     * @param buffer a buffer descriptor of a delivered image frame.
+     * A camera device will deliver the same number of frames as number of
+     * backing physical camera devices; it means, a physical camera device
+     * sends always a single frame and a logical camera device sends multiple
+     * frames as many as number of backing physical camera devices.
+     *
+     * @param buffer Buffer descriptors of delivered image frames.
      */
-    oneway deliverFrame_1_1(BufferDesc buffer);
+    oneway deliverFrame_1_1(vec<BufferDesc> buffer);
 
     /**
      * Receives calls from the HAL each time an event happens.
      *
      * @param  event EVS event with possible event information.
      */
-    oneway notify(EvsEvent event);
+    oneway notify(EvsEventDesc event);
 };
diff --git a/automotive/evs/1.1/default/Android.bp b/automotive/evs/1.1/default/Android.bp
index 41cb426..88fd657 100644
--- a/automotive/evs/1.1/default/Android.bp
+++ b/automotive/evs/1.1/default/Android.bp
@@ -16,7 +16,7 @@
     shared_libs: [
         "android.hardware.automotive.evs@1.0",
         "android.hardware.automotive.evs@1.1",
-        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
         "libbase",
         "libbinder",
         "liblog",
diff --git a/automotive/evs/1.1/default/ConfigManager.cpp b/automotive/evs/1.1/default/ConfigManager.cpp
index 96a2f98..986793e 100644
--- a/automotive/evs/1.1/default/ConfigManager.cpp
+++ b/automotive/evs/1.1/default/ConfigManager.cpp
@@ -42,55 +42,32 @@
     while (curElem != nullptr) {
         if (!strcmp(curElem->Name(), "group")) {
             /* camera group identifier */
-            const char *group_id = curElem->FindAttribute("group_id")->Value();
+            const char *id = curElem->FindAttribute("id")->Value();
 
-            /* create CameraGroup */
-            unique_ptr<ConfigManager::CameraGroup> aCameraGroup(new ConfigManager::CameraGroup());
+            /* create a camera group to be filled */
+            CameraGroupInfo *aCamera = new CameraGroupInfo();
 
-            /* add a camera device to its group */
-            addCameraDevices(curElem->FindAttribute("device_id")->Value(), aCameraGroup);
-
-            /* a list of camera stream configurations */
-            const XMLElement *childElem =
-                curElem->FirstChildElement("caps")->FirstChildElement("stream");
-            while (childElem != nullptr) {
-                /* read 5 attributes */
-                const XMLAttribute *idAttr     = childElem->FindAttribute("id");
-                const XMLAttribute *widthAttr  = childElem->FindAttribute("width");
-                const XMLAttribute *heightAttr = childElem->FindAttribute("height");
-                const XMLAttribute *fmtAttr    = childElem->FindAttribute("format");
-                const XMLAttribute *fpsAttr    = childElem->FindAttribute("framerate");
-
-                const int32_t id = stoi(idAttr->Value());
-                int32_t framerate = 0;
-                if (fpsAttr != nullptr) {
-                    framerate = stoi(fpsAttr->Value());
-                }
-
-                int32_t pixFormat;
-                if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
-                                                            pixFormat)) {
-                    RawStreamConfiguration cfg = {
-                        id,
-                        stoi(widthAttr->Value()),
-                        stoi(heightAttr->Value()),
-                        pixFormat,
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
-                        framerate
-                    };
-                    aCameraGroup->streamConfigurations[id] = cfg;
-                }
-
-                childElem = childElem->NextSiblingElement("stream");
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
             }
 
             /* camera group synchronization */
             const char *sync = curElem->FindAttribute("synchronized")->Value();
-            aCameraGroup->synchronized =
-                static_cast<bool>(strcmp(sync, "false"));
+            if (!strcmp(sync, "CALIBRATED")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED;
+            } else if (!strcmp(sync, "APPROXIMATE")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE;
+            } else {
+                aCamera->synchronized = 0; // Not synchronized
+            }
 
             /* add a group to hash map */
-            mCameraGroups[group_id] = std::move(aCameraGroup);
+            mCameraGroupInfos.insert_or_assign(id, unique_ptr<CameraGroupInfo>(aCamera));
         } else if (!strcmp(curElem->Name(), "device")) {
             /* camera unique identifier */
             const char *id = curElem->FindAttribute("id")->Value();
@@ -98,8 +75,18 @@
             /* camera mount location */
             const char *pos = curElem->FindAttribute("position")->Value();
 
+            /* create a camera device to be filled */
+            CameraInfo *aCamera = new CameraInfo();
+
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
+            }
+
             /* store read camera module information */
-            mCameraInfo[id] = readCameraDeviceInfo(curElem);
+            mCameraInfo.insert_or_assign(id, unique_ptr<CameraInfo>(aCamera));
 
             /* assign a camera device to a position group */
             mCameraPosition[pos].emplace(id);
@@ -113,15 +100,13 @@
 }
 
 
-unique_ptr<ConfigManager::CameraInfo>
-ConfigManager::readCameraDeviceInfo(const XMLElement *aDeviceElem) {
-    if (aDeviceElem == nullptr) {
-        return nullptr;
+bool
+ConfigManager::readCameraDeviceInfo(CameraInfo *aCamera,
+                                    const XMLElement *aDeviceElem) {
+    if (aCamera == nullptr || aDeviceElem == nullptr) {
+        return false;
     }
 
-    /* create a CameraInfo to be filled */
-    unique_ptr<ConfigManager::CameraInfo> aCamera(new ConfigManager::CameraInfo());
-
     /* size information to allocate camera_metadata_t */
     size_t totalEntries = 0;
     size_t totalDataSize = 0;
@@ -145,14 +130,15 @@
               "allocated memory was not large enough");
     }
 
-    return aCamera;
+    return true;
 }
 
 
-size_t ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
-                                             unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                             size_t &dataSize) {
-    if (aCapElem == nullptr) {
+size_t
+ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
+                                      CameraInfo *aCamera,
+                                      size_t &dataSize) {
+    if (aCapElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -214,7 +200,7 @@
                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
                 framerate
             };
-            aCamera->streamConfigurations[id] = cfg;
+            aCamera->streamConfigurations.insert_or_assign(id, cfg);
         }
 
         curElem = curElem->NextSiblingElement("stream");
@@ -232,10 +218,11 @@
 }
 
 
-size_t ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
-                                       unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                       size_t &dataSize) {
-    if (aParamElem == nullptr) {
+size_t
+ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
+                                  CameraInfo *aCamera,
+                                  size_t &dataSize) {
+    if (aParamElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -258,8 +245,9 @@
                                         count
                                    );
 
-                    aCamera->cameraMetadata[tag] =
-                        make_pair(make_unique<void *>(data), count);
+                    aCamera->cameraMetadata.insert_or_assign(
+                        tag, make_pair(make_unique<void *>(data), count)
+                    );
 
                     ++numEntries;
                     dataSize += calculate_camera_metadata_entry_data_size(
@@ -269,6 +257,52 @@
                     break;
                 }
 
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+                    camera_metadata_enum_android_request_available_capabilities_t *data =
+                        new camera_metadata_enum_android_request_available_capabilities_t[1];
+                    if (ConfigManagerUtil::convertToCameraCapability(
+                            curElem->FindAttribute("value")->Value(), *data)) {
+                                        curElem->FindAttribute("value")->Value(),
+                        aCamera->cameraMetadata.insert_or_assign(
+                            tag, make_pair(make_unique<void *>(data), 1)
+                        );
+
+                        ++numEntries;
+                        dataSize += calculate_camera_metadata_entry_data_size(
+                                        get_camera_metadata_tag_type(tag), 1
+                                    );
+                    }
+                    break;
+                }
+
+                case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
+                    /* a comma-separated list of physical camera devices */
+                    size_t len = strlen(curElem->FindAttribute("value")->Value());
+                    char *data = new char[len + 1];
+                    memcpy(data,
+                           curElem->FindAttribute("value")->Value(),
+                           len * sizeof(char));
+
+                    /* replace commas with null char */
+                    char *p = data;
+                    while (*p != '\0') {
+                        if (*p == ',') {
+                            *p = '\0';
+                        }
+                        ++p;
+                    }
+
+                    aCamera->cameraMetadata.insert_or_assign(
+                        tag, make_pair(make_unique<void *>(data), len)
+                    );
+
+                    ++numEntries;
+                    dataSize += calculate_camera_metadata_entry_data_size(
+                                    get_camera_metadata_tag_type(tag), len
+                                );
+                    break;
+                }
+
                 default:
                     ALOGW("Parameter %s is not supported",
                           curElem->FindAttribute("name")->Value());
@@ -283,10 +317,11 @@
 }
 
 
-bool ConfigManager::constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
-                                            const size_t totalEntries,
-                                            const size_t totalDataSize) {
-    if (!aCamera->allocate(totalEntries, totalDataSize)) {
+bool
+ConfigManager::constructCameraMetadata(CameraInfo *aCamera,
+                                       const size_t totalEntries,
+                                       const size_t totalDataSize) {
+    if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
         ALOGE("Failed to allocate memory for camera metadata");
         return false;
     }
@@ -401,14 +436,14 @@
                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
                         0   // unused
                     };
-                    dpy->streamConfigurations[id] = cfg;
+                    dpy->streamConfigurations.insert_or_assign(id, cfg);
                 }
 
                 curStream = curStream->NextSiblingElement("stream");
             }
         }
 
-        mDisplayInfo[id] = std::move(dpy);
+        mDisplayInfo.insert_or_assign(id, std::move(dpy));
         curDev = curDev->NextSiblingElement("device");
     }
 
@@ -457,16 +492,6 @@
 }
 
 
-void ConfigManager::addCameraDevices(const char *devices,
-                                     unique_ptr<CameraGroup> &aGroup) {
-    stringstream device_list(devices);
-    string token;
-    while (getline(device_list, token, ',')) {
-        aGroup->devices.emplace(token);
-    }
-}
-
-
 std::unique_ptr<ConfigManager> ConfigManager::Create(const char *path) {
     unique_ptr<ConfigManager> cfgMgr(new ConfigManager(path));
 
diff --git a/automotive/evs/1.1/default/ConfigManager.h b/automotive/evs/1.1/default/ConfigManager.h
index 0275f90..870af1c 100644
--- a/automotive/evs/1.1/default/ConfigManager.h
+++ b/automotive/evs/1.1/default/ConfigManager.h
@@ -82,9 +82,6 @@
         unordered_map<CameraParam,
                       tuple<int32_t, int32_t, int32_t>> controls;
 
-        /* List of supported frame rates */
-        unordered_set<int32_t> frameRates;
-
         /*
          * List of supported output stream configurations; each array stores
          * format, width, height, and direction values in the order.
@@ -102,21 +99,15 @@
         camera_metadata_t *characteristics;
     };
 
-    class CameraGroup {
+    class CameraGroupInfo : public CameraInfo {
     public:
-        CameraGroup() {}
+        CameraGroupInfo() {}
 
         /* ID of member camera devices */
         unordered_set<string> devices;
 
         /* The capture operation of member camera devices are synchronized */
         bool synchronized = false;
-
-        /*
-         * List of stream configurations that are supposed by all camera devices
-         * in this group.
-         */
-        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
     };
 
     class SystemInfo {
@@ -165,11 +156,11 @@
     /*
      * Return a list of cameras
      *
-     * @return CameraGroup
+     * @return CameraGroupInfo
      *         A pointer to a camera group identified by a given id.
      */
-    unique_ptr<CameraGroup>& getCameraGroup(const string& gid) {
-        return mCameraGroups[gid];
+    unique_ptr<CameraGroupInfo>& getCameraGroupInfo(const string& gid) {
+        return mCameraGroupInfos[gid];
     }
 
 
@@ -203,8 +194,8 @@
     /* Internal data structure for camera device information */
     unordered_map<string, unique_ptr<DisplayInfo>> mDisplayInfo;
 
-    /* Camera groups are stored in <groud id, CameraGroup> hash map */
-    unordered_map<string, unique_ptr<CameraGroup>> mCameraGroups;
+    /* Camera groups are stored in <groud id, CameraGroupInfo> hash map */
+    unordered_map<string, unique_ptr<CameraGroupInfo>> mCameraGroupInfos;
 
     /*
      * Camera positions are stored in <position, camera id set> hash map.
@@ -253,16 +244,19 @@
     /*
      * read camera device information
      *
-     * @param  aDeviceElem
+     * @param  aCamera
+     *         A pointer to CameraInfo that will be completed by this
+     *         method.
+     *         aDeviceElem
      *         A pointer to "device" XML element that contains camera module
      *         capability info and its characteristics.
      *
-     * @return unique_ptr<CameraInfo>
-     *         A pointer to CameraInfo class that contains camera module
-     *         capability and characteristics.  Please note that this transfers
-     *         the ownership of created CameraInfo to the caller.
+     * @return bool
+     *         Return false upon any failure in reading and processing camera
+     *         device information.
      */
-    unique_ptr<CameraInfo> readCameraDeviceInfo(const XMLElement *aDeviceElem);
+    bool readCameraDeviceInfo(CameraInfo *aCamera,
+                              const XMLElement *aDeviceElem);
 
     /*
      * read camera metadata
@@ -280,7 +274,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraCapabilities(const XMLElement * const aCapElem,
-                                  unique_ptr<CameraInfo> &aCamera,
+                                  CameraInfo *aCamera,
                                   size_t &dataSize);
 
     /*
@@ -298,7 +292,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraMetadata(const XMLElement * const aParamElem,
-                              unique_ptr<CameraInfo> &aCamera,
+                              CameraInfo *aCamera,
                               size_t &dataSize);
 
     /*
@@ -316,21 +310,9 @@
      *         or its size is not large enough to add all found camera metadata
      *         entries.
      */
-    bool constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+    bool constructCameraMetadata(CameraInfo *aCamera,
                                  const size_t totalEntries,
                                  const size_t totalDataSize);
-
-    /*
-     * parse a comma-separated list of camera devices and add them to
-     * CameraGroup.
-     *
-     * @param  devices
-     *         A comma-separated list of camera device identifiers.
-     * @param  aGroup
-     *         Camera group which cameras will be added to.
-     */
-    void addCameraDevices(const char *devices,
-                          unique_ptr<CameraGroup> &aGroup);
 };
 #endif // CONFIG_MANAGER_H
 
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.cpp b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
index 8206daa..d10f236 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.cpp
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
@@ -90,6 +90,30 @@
         aTag =  ANDROID_LENS_POSE_ROTATION;
     } else if (!strcmp(name, "LENS_POSE_TRANSLATION")) {
         aTag =  ANDROID_LENS_POSE_TRANSLATION;
+    } else if (!strcmp(name, "REQUEST_AVAILABLE_CAPABILITIES")) {
+        aTag =  ANDROID_REQUEST_AVAILABLE_CAPABILITIES;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA_PHYSICAL_IDS")) {
+        aTag =  ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS;
+    } else {
+        return false;
+    }
+
+    return true;
+}
+
+
+bool ConfigManagerUtil::convertToCameraCapability(
+    const char *name,
+    camera_metadata_enum_android_request_available_capabilities_t &cap) {
+
+    if (!strcmp(name, "DEPTH_OUTPUT")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA;
+    } else if (!strcmp(name, "MONOCHROME")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME;
+    } else if (!strcmp(name, "SECURE_IMAGE_DATA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
     } else {
         return false;
     }
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.h b/automotive/evs/1.1/default/ConfigManagerUtil.h
index 8c89ae7..1710cac 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.h
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.h
@@ -55,6 +55,14 @@
      */
     static string trimString(const string &src,
                              const string &ws = " \n\r\t\f\v");
+
+    /**
+     * Convert a given string to corresponding camera capabilities
+     */
+    static bool convertToCameraCapability(
+        const char *name,
+        camera_metadata_enum_android_request_available_capabilities_t &cap);
+
 };
 
 #endif // CONFIG_MANAGER_UTIL_H
diff --git a/automotive/evs/1.1/default/EvsCamera.cpp b/automotive/evs/1.1/default/EvsCamera.cpp
index 5ba753d..e200b53 100644
--- a/automotive/evs/1.1/default/EvsCamera.cpp
+++ b/automotive/evs/1.1/default/EvsCamera.cpp
@@ -21,7 +21,7 @@
 
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
-
+#include <utils/SystemClock.h>
 
 namespace android {
 namespace hardware {
@@ -240,9 +240,12 @@
 }
 
 
-Return<EvsResult> EvsCamera::doneWithFrame_1_1(const BufferDesc_1_1& bufDesc)  {
+Return<EvsResult> EvsCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers)  {
     std::lock_guard <std::mutex> lock(mAccessLock);
-    returnBuffer(bufDesc.bufferId, bufDesc.buffer.nativeHandle);
+
+    for (auto&& buffer : buffers) {
+        returnBuffer(buffer.bufferId, buffer.buffer.nativeHandle);
+    }
 
     return EvsResult::OK;
 }
@@ -490,12 +493,17 @@
             newBuffer.buffer.nativeHandle = mBuffers[idx].handle;
             newBuffer.pixelSize = sizeof(uint32_t);
             newBuffer.bufferId = idx;
+            newBuffer.deviceId = mDescription.v1.cameraId;
+            newBuffer.timestamp = elapsedRealtimeNano();
 
             // Write test data into the image buffer
             fillTestFrame(newBuffer);
 
             // Issue the (asynchronous) callback to the client -- can't be holding the lock
-            auto result = mStream->deliverFrame_1_1(newBuffer);
+            hidl_vec<BufferDesc_1_1> frames;
+            frames.resize(1);
+            frames[0] = newBuffer;
+            auto result = mStream->deliverFrame_1_1(frames);
             if (result.isOk()) {
                 ALOGD("Delivered %p as id %d",
                       newBuffer.buffer.nativeHandle.getNativeHandle(), newBuffer.bufferId);
@@ -527,7 +535,7 @@
     }
 
     // If we've been asked to stop, send an event to signal the actual end of stream
-    EvsEvent event;
+    EvsEventDesc event;
     event.aType = EvsEventType::STREAM_STOPPED;
     auto result = mStream->notify(event);
     if (!result.isOk()) {
diff --git a/automotive/evs/1.1/default/EvsCamera.h b/automotive/evs/1.1/default/EvsCamera.h
index c15b4b1..a32fa75 100644
--- a/automotive/evs/1.1/default/EvsCamera.h
+++ b/automotive/evs/1.1/default/EvsCamera.h
@@ -64,7 +64,7 @@
     Return<void>      getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)  override;
     Return<EvsResult> pauseVideoStream() override;
     Return<EvsResult> resumeVideoStream() override;
-    Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<EvsResult> doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
     Return<EvsResult> setMaster() override;
     Return<EvsResult> forceMaster(const sp<IEvsDisplay>& display) override;
     Return<EvsResult> unsetMaster() override;
diff --git a/automotive/evs/1.1/default/resources/evs_default_configuration.xml b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
index 692102e..a79e7c2 100644
--- a/automotive/evs/1.1/default/resources/evs_default_configuration.xml
+++ b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
@@ -28,8 +28,31 @@
         <num_cameras value='1'/>
     </system>
 
-    <!-- camera device information -->
+    <!-- camera information -->
     <camera>
+        <!-- camera group starts -->
+        <group id='group1' synchronized='APPROXIMATE'>
+            <caps>
+                <stream id='0' width='640'  height='360'  format='RGBA_8888' framerate='30'/>
+            </caps>
+
+            <!-- list of parameters -->
+            <characteristics>
+                <parameter
+                    name='REQUEST_AVAILABLE_CAPABILITIES'
+                    type='enum'
+                    size='1'
+                    value='LOGICAL_MULTI_CAMERA'
+                />
+                <parameter
+                    name='LOGICAL_MULTI_CAMERA_PHYSICAL_IDS'
+                    type='byte[]'
+                    size='1'
+                    value='/dev/video1'
+                />
+            </characteristics>
+        </group>
+
         <!-- camera device starts -->
         <device id='/dev/video1' position='rear'>
             <caps>
diff --git a/automotive/evs/1.1/types.hal b/automotive/evs/1.1/types.hal
index dcb2abb..f88d223 100644
--- a/automotive/evs/1.1/types.hal
+++ b/automotive/evs/1.1/types.hal
@@ -61,6 +61,14 @@
      * Opaque value from driver
      */
     uint32_t bufferId;
+    /**
+     * Unique identifier of the physical camera device that produces this buffer.
+     */
+    string deviceId;
+    /**
+     * Time that this buffer is being filled.
+     */
+    int64_t timestamp;
 };
 
 /**
@@ -97,12 +105,16 @@
 /**
  * Structure that describes informative events occurred during EVS is streaming
  */
-struct EvsEvent {
+struct EvsEventDesc {
     /**
      * Type of an informative event
      */
     EvsEventType aType;
     /**
+     * Device identifier
+     */
+    string deviceId;
+    /**
      * Possible additional information
      */
     uint32_t[4] payload;
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.cpp b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
index 6d53652..38c709f 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.cpp
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
@@ -80,7 +80,7 @@
     asyncStopStream();
 
     // Wait until the stream has actually stopped
-    std::unique_lock<std::mutex> lock(mLock);
+    std::unique_lock<std::mutex> lock(mEventLock);
     if (mRunning) {
         mEventSignal.wait(lock, [this]() { return !mRunning; });
     }
@@ -96,9 +96,9 @@
         return false;
     }
 
-    BufferDesc_1_1 buffer = mHeldBuffers.front();
+    hidl_vec<BufferDesc_1_1> buffers = mHeldBuffers.front();
     mHeldBuffers.pop();
-    mCamera->doneWithFrame_1_1(buffer);
+    mCamera->doneWithFrame_1_1(buffers);
 
     return true;
 }
@@ -138,50 +138,52 @@
 }
 
 
-Return<void> FrameHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
-    const AHardwareBuffer_Desc* pDesc =
-        reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
-    ALOGD("Received a frame from the camera (%p)",
-          bufDesc.buffer.nativeHandle.getNativeHandle());
+Return<void> FrameHandler::deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
+    for (auto&& buffer : buffers) {
+        const AHardwareBuffer_Desc* pDesc =
+            reinterpret_cast<const AHardwareBuffer_Desc *>(&buffer.buffer.description);
+        ALOGD("Received a frame from the camera (%p)",
+              buffer.buffer.nativeHandle.getNativeHandle());
 
-    // Store a dimension of a received frame.
-    mFrameWidth = pDesc->width;
-    mFrameHeight = pDesc->height;
+        // Store a dimension of a received frame.
+        mFrameWidth = pDesc->width;
+        mFrameHeight = pDesc->height;
 
-    // If we were given an opened display at construction time, then send the received
-    // image back down the camera.
-    if (mDisplay.get()) {
-        // Get the output buffer we'll use to display the imagery
-        BufferDesc_1_0 tgtBuffer = {};
-        mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
-                                      tgtBuffer = buff;
-                                  }
-        );
+        // If we were given an opened display at construction time, then send the received
+        // image back down the camera.
+        if (mDisplay.get()) {
+            // Get the output buffer we'll use to display the imagery
+            BufferDesc_1_0 tgtBuffer = {};
+            mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
+                                          tgtBuffer = buff;
+                                      }
+            );
 
-        if (tgtBuffer.memHandle == nullptr) {
-            printf("Didn't get target buffer - frame lost\n");
-            ALOGE("Didn't get requested output buffer -- skipping this frame.");
-        } else {
-            // Copy the contents of the of buffer.memHandle into tgtBuffer
-            copyBufferContents(tgtBuffer, bufDesc);
-
-            // Send the target buffer back for display
-            Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
-            if (!result.isOk()) {
-                printf("HIDL error on display buffer (%s)- frame lost\n",
-                       result.description().c_str());
-                ALOGE("Error making the remote function call.  HIDL said %s",
-                      result.description().c_str());
-            } else if (result != EvsResult::OK) {
-                printf("Display reported error - frame lost\n");
-                ALOGE("We encountered error %d when returning a buffer to the display!",
-                      (EvsResult) result);
+            if (tgtBuffer.memHandle == nullptr) {
+                printf("Didn't get target buffer - frame lost\n");
+                ALOGE("Didn't get requested output buffer -- skipping this frame.");
             } else {
-                // Everything looks good!
-                // Keep track so tests or watch dogs can monitor progress
-                mLock.lock();
-                mFramesDisplayed++;
-                mLock.unlock();
+                // Copy the contents of the of buffer.memHandle into tgtBuffer
+                copyBufferContents(tgtBuffer, buffer);
+
+                // Send the target buffer back for display
+                Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
+                if (!result.isOk()) {
+                    printf("HIDL error on display buffer (%s)- frame lost\n",
+                           result.description().c_str());
+                    ALOGE("Error making the remote function call.  HIDL said %s",
+                          result.description().c_str());
+                } else if (result != EvsResult::OK) {
+                    printf("Display reported error - frame lost\n");
+                    ALOGE("We encountered error %d when returning a buffer to the display!",
+                          (EvsResult) result);
+                } else {
+                    // Everything looks good!
+                    // Keep track so tests or watch dogs can monitor progress
+                    mLock.lock();
+                    mFramesDisplayed++;
+                    mLock.unlock();
+                }
             }
         }
     }
@@ -191,11 +193,11 @@
     case eAutoReturn:
         // Send the camera buffer back now that the client has seen it
         ALOGD("Calling doneWithFrame");
-        mCamera->doneWithFrame_1_1(bufDesc);
+        mCamera->doneWithFrame_1_1(buffers);
         break;
     case eNoAutoReturn:
-        // Hang onto the buffer handle for now -- the client will return it explicitly later
-        mHeldBuffers.push(bufDesc);
+        // Hang onto the buffer handles for now -- the client will return it explicitly later
+        mHeldBuffers.push(buffers);
     }
 
     mLock.lock();
@@ -209,7 +211,7 @@
 }
 
 
-Return<void> FrameHandler::notify(const EvsEvent& event) {
+Return<void> FrameHandler::notify(const EvsEventDesc& event) {
     // Local flag we use to keep track of when the stream is stopping
     mLock.lock();
     mLatestEventDesc = event;
@@ -223,7 +225,7 @@
         ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
     }
     mLock.unlock();
-    mEventSignal.notify_all();
+    mEventSignal.notify_one();
 
     return Void();
 }
@@ -342,19 +344,20 @@
     }
 }
 
-bool FrameHandler::waitForEvent(const EvsEventType aTargetEvent,
-                                EvsEvent &event) {
+bool FrameHandler::waitForEvent(const EvsEventDesc& aTargetEvent,
+                                      EvsEventDesc& aReceivedEvent) {
     // Wait until we get an expected parameter change event.
-    std::unique_lock<std::mutex> lock(mLock);
+    std::unique_lock<std::mutex> lock(mEventLock);
     auto now = std::chrono::system_clock::now();
     bool result = mEventSignal.wait_until(lock, now + 5s,
-        [this, aTargetEvent, &event](){
-            bool flag = mLatestEventDesc.aType == aTargetEvent;
-            if (flag) {
-                event.aType = mLatestEventDesc.aType;
-                event.payload[0] = mLatestEventDesc.payload[0];
-                event.payload[1] = mLatestEventDesc.payload[1];
-            }
+        [this, aTargetEvent, &aReceivedEvent](){
+            bool flag = (mLatestEventDesc.aType == aTargetEvent.aType) &&
+                        (mLatestEventDesc.payload[0] == aTargetEvent.payload[0]) &&
+                        (mLatestEventDesc.payload[1] == aTargetEvent.payload[1]);
+
+            aReceivedEvent.aType = mLatestEventDesc.aType;
+            aReceivedEvent.payload[0] = mLatestEventDesc.payload[0];
+            aReceivedEvent.payload[1] = mLatestEventDesc.payload[1];
 
             return flag;
         }
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.h b/automotive/evs/1.1/vts/functional/FrameHandler.h
index e5f1b8f..51e5a86 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.h
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.h
@@ -73,8 +73,8 @@
     bool isRunning();
 
     void waitForFrameCount(unsigned frameCount);
-    bool waitForEvent(const EvsEventType aTargetEvent,
-                            EvsEvent &eventDesc);
+    bool waitForEvent(const EvsEventDesc& aTargetEvent,
+                            EvsEventDesc& aReceivedEvent);
     void getFramesCounters(unsigned* received, unsigned* displayed);
     void getFrameDimension(unsigned* width, unsigned* height);
 
@@ -83,8 +83,8 @@
     Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
 
     // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
-    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
-    Return<void> notify(const EvsEvent& event) override;
+    Return<void> deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
+    Return<void> notify(const EvsEventDesc& event) override;
 
     // Local implementation details
     bool copyBufferContents(const BufferDesc_1_0& tgtBuffer, const BufferDesc_1_1& srcBuffer);
@@ -99,17 +99,18 @@
     // Since we get frames delivered to us asynchronously via the IEvsCameraStream interface,
     // we need to protect all member variables that may be modified while we're streaming
     // (ie: those below)
-    std::mutex                  mLock;
-    std::condition_variable     mEventSignal;
-    std::condition_variable     mFrameSignal;
+    std::mutex                            mLock;
+    std::mutex                            mEventLock;
+    std::condition_variable               mEventSignal;
+    std::condition_variable               mFrameSignal;
+    std::queue<hidl_vec<BufferDesc_1_1>>  mHeldBuffers;
 
-    std::queue<BufferDesc_1_1>  mHeldBuffers;
     bool                        mRunning = false;
     unsigned                    mFramesReceived = 0;    // Simple counter -- rolls over eventually!
     unsigned                    mFramesDisplayed = 0;   // Simple counter -- rolls over eventually!
     unsigned                    mFrameWidth = 0;
     unsigned                    mFrameHeight = 0;
-    EvsEvent                    mLatestEventDesc;
+    EvsEventDesc                mLatestEventDesc;
 };
 
 
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index 1d3fd87..8847a95 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -67,6 +67,7 @@
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_string;
 using ::android::sp;
+using ::android::wp;
 using ::android::hardware::camera::device::V3_2::Stream;
 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
 using ::android::hardware::automotive::evs::V1_0::DisplayState;
@@ -117,7 +118,15 @@
         mIsHwModule = !service_name.compare(kEnumeratorName);
     }
 
-    virtual void TearDown() override {}
+    virtual void TearDown() override {
+        // Attempt to close any active camera
+        for (auto &&c : activeCameras) {
+            sp<IEvsCamera_1_1> cam = c.promote();
+            if (cam != nullptr) {
+                pEnumerator->closeCamera(cam);
+            }
+        }
+    }
 
 protected:
     void loadCameraList() {
@@ -141,10 +150,12 @@
         ASSERT_GE(cameraInfo.size(), 1u);
     }
 
-    sp<IEvsEnumerator>        pEnumerator;    // Every test needs access to the service
-    std::vector <CameraDesc>  cameraInfo;     // Empty unless/until loadCameraList() is called
-    bool                      mIsHwModule;    // boolean to tell current module under testing
-                                              // is HW module implementation.
+    sp<IEvsEnumerator>              pEnumerator;   // Every test needs access to the service
+    std::vector<CameraDesc>         cameraInfo;    // Empty unless/until loadCameraList() is called
+    bool                            mIsHwModule;   // boolean to tell current module under testing
+                                                   // is HW module implementation.
+    std::deque<wp<IEvsCamera_1_1>>  activeCameras; // A list of active camera handles that are
+                                                   // needed to be cleaned up.
 };
 
 
@@ -169,11 +180,15 @@
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
         for (int pass = 0; pass < 2; pass++) {
+            activeCameras.clear();
             sp<IEvsCamera_1_1> pCam =
                 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
                 .withDefault(nullptr);
             ASSERT_NE(pCam, nullptr);
 
+            // Store a camera handle for a clean-up
+            activeCameras.push_back(pCam);
+
             // Verify that this camera self-identifies correctly
             pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
                                         ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -206,11 +221,15 @@
 
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Verify that this camera self-identifies correctly
         pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
                                     ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -221,9 +240,13 @@
         sp<IEvsCamera_1_1> pCam2 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
-        ASSERT_NE(pCam, pCam2);
         ASSERT_NE(pCam2, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam2);
+
+        ASSERT_NE(pCam, pCam2);
+
         Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
         if (mIsHwModule) {
             // Verify that the old camera rejects calls via HW module.
@@ -268,11 +291,15 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          nullptr,
@@ -340,12 +367,15 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
-
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Ask for a crazy number of buffers in flight to ensure it errors correctly
         Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
         EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
@@ -416,11 +446,15 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          pDisplay,
@@ -484,17 +518,24 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCam0 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         sp<IEvsCamera_1_1> pCam1 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam1);
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
                                                           nullptr,
@@ -575,12 +616,16 @@
     // Test each reported camera
     Return<EvsResult> result = EvsResult::OK;
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // Create a camera client
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera
+        activeCameras.push_back(pCam);
+
         // Get the parameter list
         std::vector<CameraParam> cmds;
         pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
@@ -626,48 +671,54 @@
             EvsResult result = EvsResult::OK;
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 0;
-                pCam->getIntParameter(CameraParam::AUTO_FOCUS,
-                                   [&result, &val1](auto status, auto value) {
+                std::vector<int32_t> values;
+                pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
                                        if (status == EvsResult::OK) {
-                                          val1 = value;
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
                                        }
                                    });
-                if (val1 != 0) {
-                    pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                       [&result, &val1](auto status, auto effectiveValue) {
-                                           result = status;
-                                           val1 = effectiveValue;
-                                       });
-                    ASSERT_EQ(EvsResult::OK, result);
-                    ASSERT_EQ(val1, 0);
+                ASSERT_EQ(EvsResult::OK, result);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
                 }
             }
 
             // Try to program a parameter with a random value [minVal, maxVal]
             int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
-            int32_t val1 = 0;
+            std::vector<int32_t> values;
 
             // Rounding down
             val0 = val0 - (val0 % step);
             pCam->setIntParameter(cmd, val0,
-                               [&result, &val1](auto status, auto effectiveValue) {
+                               [&result, &values](auto status, auto effectiveValues) {
                                    result = status;
-                                   val1 = effectiveValue;
+                                   if (status == EvsResult::OK) {
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
+                                   }
                                });
 
             ASSERT_EQ(EvsResult::OK, result);
 
+            values.clear();
             pCam->getIntParameter(cmd,
-                               [&result, &val1](auto status, auto value) {
+                               [&result, &values](auto status, auto readValues) {
                                    result = status;
                                    if (status == EvsResult::OK) {
-                                      val1 = value;
+                                      for (auto &&v : readValues) {
+                                          values.push_back(v);
+                                      }
                                    }
                                });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
         }
 
         result = pCam->unsetMaster();
@@ -704,16 +755,24 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
+
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamMaster);
+
         sp<IEvsCamera_1_1> pCamNonMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamNonMaster);
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandlerMaster =
             new FrameHandler(pCamMaster, cam,
@@ -750,13 +809,15 @@
 
         // Non-master client expects to receive a master role relesed
         // notification.
-        EvsEvent aNotification = {};
+        EvsEventDesc aTargetEvent  = {};
+        EvsEventDesc aNotification = {};
 
         // Release a master role.
         pCamMaster->unsetMaster();
 
         // Verify a change notification.
-        frameHandlerNonMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+        frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification);
         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
                   static_cast<EvsEventType>(aNotification.aType));
 
@@ -772,7 +833,8 @@
         frameHandlerNonMaster->shutdown();
 
         // Verify a change notification.
-        frameHandlerMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+        frameHandlerMaster->waitForEvent(aTargetEvent, aNotification);
         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
                   static_cast<EvsEventType>(aNotification.aType));
 
@@ -810,16 +872,24 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
+
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamMaster);
+
         sp<IEvsCamera_1_1> pCamNonMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamNonMaster);
+
         // Get the parameter list
         std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
         pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
@@ -879,7 +949,7 @@
         frameHandlerNonMaster->waitForFrameCount(1);
 
         int32_t val0 = 0;
-        int32_t val1 = 0;
+        std::vector<int32_t> values;
         for (auto &cmd : camMasterCmds) {
             // Get a valid parameter value range
             int32_t minVal, maxVal, step;
@@ -895,14 +965,19 @@
             EvsResult result = EvsResult::OK;
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 1;
                 pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
-                                       val1 = effectiveValue;
+                                       if (status == EvsResult::OK) {
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
+                                       }
                                    });
                 ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
+                }
             }
 
             // Try to program a parameter
@@ -910,45 +985,63 @@
 
             // Rounding down
             val0 = val0 - (val0 % step);
+            values.clear();
             pCamMaster->setIntParameter(cmd, val0,
-                                     [&result, &val1](auto status, auto effectiveValue) {
-                                         result = status;
-                                         val1 = effectiveValue;
-                                     });
-            ASSERT_EQ(EvsResult::OK, result);
-
-            // Wait a moment
-            sleep(1);
-
-            // Non-master client expects to receive a parameter change notification
-            // whenever a master client adjusts it.
-            EvsEvent aNotification = {};
-
-            pCamMaster->getIntParameter(cmd,
-                                     [&result, &val1](auto status, auto value) {
+                                     [&result, &values](auto status, auto effectiveValues) {
                                          result = status;
                                          if (status == EvsResult::OK) {
-                                            val1 = value;
+                                            for (auto &&v : effectiveValues) {
+                                                values.push_back(v);
+                                            }
                                          }
                                      });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEventDesc aTargetEvent  = {};
+            EvsEventDesc aNotification = {};
+
+            values.clear();
+            pCamMaster->getIntParameter(cmd,
+                                     [&result, &values](auto status, auto readValues) {
+                                         result = status;
+                                         if (status == EvsResult::OK) {
+                                            for (auto &&v : readValues) {
+                                                values.push_back(v);
+                                            }
+                                         }
+                                     });
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
 
             // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+            aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+            aTargetEvent.payload[1] = static_cast<uint32_t>(val0);
+            frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification);
             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
                       static_cast<EvsEventType>(aNotification.aType));
             ASSERT_EQ(cmd,
                       static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
+            for (auto &&v : values) {
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification.payload[1]));
+            }
         }
 
         // Try to adjust a parameter via non-master client
+        values.clear();
         pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
-                                    [&result, &val1](auto status, auto effectiveValue) {
+                                    [&result, &values](auto status, auto effectiveValues) {
                                         result = status;
-                                        val1 = effectiveValue;
+                                        if (status == EvsResult::OK) {
+                                            for (auto &&v : effectiveValues) {
+                                                values.push_back(v);
+                                            }
+                                        }
                                     });
         ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
@@ -961,10 +1054,15 @@
         ASSERT_EQ(EvsResult::OK, result);
 
         // Try to adjust a parameter after being retired
+        values.clear();
         pCamMaster->setIntParameter(camMasterCmds[0], val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
+                                 [&result, &values](auto status, auto effectiveValues) {
                                      result = status;
-                                     val1 = effectiveValue;
+                                     if (status == EvsResult::OK) {
+                                        for (auto &&v : effectiveValues) {
+                                            values.push_back(v);
+                                        }
+                                     }
                                  });
         ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
@@ -986,16 +1084,22 @@
             );
 
             EvsResult result = EvsResult::OK;
+            values.clear();
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 1;
                 pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
-                                       val1 = effectiveValue;
+                                       if (status == EvsResult::OK) {
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
+                                       }
                                    });
                 ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
+                }
             }
 
             // Try to program a parameter
@@ -1003,38 +1107,51 @@
 
             // Rounding down
             val0 = val0 - (val0 % step);
+            values.clear();
             pCamNonMaster->setIntParameter(cmd, val0,
-                                        [&result, &val1](auto status, auto effectiveValue) {
-                                            result = status;
-                                            val1 = effectiveValue;
-                                        });
-            ASSERT_EQ(EvsResult::OK, result);
-
-            // Wait a moment
-            sleep(1);
-
-            // Non-master client expects to receive a parameter change notification
-            // whenever a master client adjusts it.
-            EvsEvent aNotification = {};
-
-            pCamNonMaster->getIntParameter(cmd,
-                                        [&result, &val1](auto status, auto value) {
+                                        [&result, &values](auto status, auto effectiveValues) {
                                             result = status;
                                             if (status == EvsResult::OK) {
-                                               val1 = value;
+                                                for (auto &&v : effectiveValues) {
+                                                    values.push_back(v);
+                                                }
                                             }
                                         });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEventDesc aTargetEvent  = {};
+            EvsEventDesc aNotification = {};
+
+            values.clear();
+            pCamNonMaster->getIntParameter(cmd,
+                                        [&result, &values](auto status, auto readValues) {
+                                            result = status;
+                                            if (status == EvsResult::OK) {
+                                                for (auto &&v : readValues) {
+                                                    values.push_back(v);
+                                                }
+                                            }
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
 
             // Verify a change notification
-            frameHandlerMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+            aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+            aTargetEvent.payload[1] = static_cast<uint32_t>(val0);
+            frameHandlerMaster->waitForEvent(aTargetEvent, aNotification);
             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
                       static_cast<EvsEventType>(aNotification.aType));
             ASSERT_EQ(cmd,
                       static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
+            for (auto &&v : values) {
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification.payload[1]));
+            }
         }
 
         // New master retires from a master role
@@ -1078,17 +1195,25 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
+
         // Create two clients
         sp<IEvsCamera_1_1> pCam0 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         sp<IEvsCamera_1_1> pCam1 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam1);
+
         // Get the parameter list; this test will use the first command in both
         // lists.
         std::vector<CameraParam> cam0Cmds, cam1Cmds;
@@ -1144,108 +1269,141 @@
             }
         );
 
+        // Client1 becomes a master
+        result = pCam1->setMaster();
+        ASSERT_EQ(EvsResult::OK, result);
+
+        std::vector<int32_t> values;
+        EvsEventDesc aTargetEvent  = {};
+        EvsEventDesc aNotification = {};
         if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
             // Try to turn off auto-focus
-            int32_t val1 = 0;
-            pCam1->getIntParameter(CameraParam::AUTO_FOCUS,
-                               [&result, &val1](auto status, auto value) {
+            pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                               [&result, &values](auto status, auto effectiveValues) {
                                    result = status;
                                    if (status == EvsResult::OK) {
-                                      val1 = value;
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
                                    }
                                });
-            if (val1 != 0) {
-                pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
-                                       result = status;
-                                       val1 = effectiveValue;
-                                   });
-                ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(v, 0);
             }
+
+            // Make sure AUTO_FOCUS is off.
+            aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+            aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+            aTargetEvent.payload[1] = 0;
+            bool timeout =
+                frameHandler0->waitForEvent(aTargetEvent, aNotification);
+            ASSERT_FALSE(timeout) << "Expected event does not arrive";
         }
 
         // Try to program a parameter with a random value [minVal, maxVal]
         int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
-        int32_t val1 = 0;
 
         // Rounding down
         val0 = val0 - (val0 % step);
-
-        result = pCam1->setMaster();
-        ASSERT_EQ(EvsResult::OK, result);
-
+        values.clear();
         pCam1->setIntParameter(cam1Cmds[0], val0,
-                            [&result, &val1](auto status, auto effectiveValue) {
+                            [&result, &values](auto status, auto effectiveValues) {
                                 result = status;
-                                val1 = effectiveValue;
+                                if (status == EvsResult::OK) {
+                                    for (auto &&v : effectiveValues) {
+                                        values.push_back(v);
+                                    }
+                                }
                             });
         ASSERT_EQ(EvsResult::OK, result);
+        for (auto &&v : values) {
+            ASSERT_EQ(val0, v);
+        }
 
         // Verify a change notification
-        EvsEvent aNotification = {};
+        aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+        aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
+        aTargetEvent.payload[1] = static_cast<uint32_t>(val0);
         bool timeout =
-            frameHandler0->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            frameHandler0->waitForEvent(aTargetEvent, aNotification);
         ASSERT_FALSE(timeout) << "Expected event does not arrive";
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::PARAMETER_CHANGED);
         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
                   cam1Cmds[0]);
-        ASSERT_EQ(val1,
-                  static_cast<int32_t>(aNotification.payload[1]));
+        for (auto &&v : values) {
+            ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+        }
 
         // Client 0 steals a master role
         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
 
-        frameHandler1->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+        aTargetEvent.payload[0] = 0;
+        aTargetEvent.payload[1] = 0;
+        frameHandler1->waitForEvent(aTargetEvent, aNotification);
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::MASTER_RELEASED);
 
         // Client 0 programs a parameter
         val0 = minVal + (std::rand() % (maxVal - minVal));
-        val1 = 0;
 
         // Rounding down
         val0 = val0 - (val0 % step);
 
         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
             // Try to turn off auto-focus
-            int32_t val1 = 0;
-            pCam0->getIntParameter(CameraParam::AUTO_FOCUS,
-                               [&result, &val1](auto status, auto value) {
+            values.clear();
+            pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                               [&result, &values](auto status, auto effectiveValues) {
                                    result = status;
                                    if (status == EvsResult::OK) {
-                                      val1 = value;
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
                                    }
                                });
-            if (val1 != 0) {
-                pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
-                                       result = status;
-                                       val1 = effectiveValue;
-                                   });
-                ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(v, 0);
             }
+
+            // Make sure AUTO_FOCUS is off.
+            aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+            aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+            aTargetEvent.payload[1] = 0;
+            bool timeout =
+                frameHandler1->waitForEvent(aTargetEvent, aNotification);
+            ASSERT_FALSE(timeout) << "Expected event does not arrive";
         }
 
+        values.clear();
         pCam0->setIntParameter(cam0Cmds[0], val0,
-                            [&result, &val1](auto status, auto effectiveValue) {
+                            [&result, &values](auto status, auto effectiveValues) {
                                 result = status;
-                                val1 = effectiveValue;
+                                if (status == EvsResult::OK) {
+                                    for (auto &&v : effectiveValues) {
+                                        values.push_back(v);
+                                    }
+                                }
                             });
         ASSERT_EQ(EvsResult::OK, result);
 
         // Verify a change notification
+        aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+        aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
+        aTargetEvent.payload[1] = static_cast<uint32_t>(val0);
         timeout =
-            frameHandler1->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            frameHandler1->waitForEvent(aTargetEvent, aNotification);
         ASSERT_FALSE(timeout) << "Expected event does not arrive";
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::PARAMETER_CHANGED);
         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
                   cam0Cmds[0]);
-        ASSERT_EQ(val1,
-                  static_cast<int32_t>(aNotification.payload[1]));
+        for (auto &&v : values) {
+            ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+        }
 
         // Turn off the display (yes, before the stream stops -- it should be handled)
         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
@@ -1282,6 +1440,7 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // choose a configuration that has a frame rate faster than minReqFps.
         Stream targetCfg = {};
         const int32_t minReqFps = 15;
@@ -1324,6 +1483,9 @@
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          pDisplay,
@@ -1383,6 +1545,7 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // choose a configuration that has a frame rate faster than minReqFps.
         Stream targetCfg = {};
         const int32_t minReqFps = 15;
@@ -1427,6 +1590,9 @@
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         // Try to create the second camera client with different stream
         // configuration.
         int32_t id = targetCfg.id;
@@ -1436,6 +1602,9 @@
             .withDefault(nullptr);
         ASSERT_EQ(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         // Try again with same stream configuration.
         targetCfg.id = id;
         pCam1 =