Revert "Revert "Revert "Revert "Extend EVS interfaces and data types""""

This reverts commit 468cc1d476ae4a78787e0730cac8cf53e1806ed1.

Change-Id: If7e80111213ba02bf2c49f17f9880cd9cd14429e
diff --git a/automotive/evs/1.1/Android.bp b/automotive/evs/1.1/Android.bp
index d2e85f1..c850c91 100644
--- a/automotive/evs/1.1/Android.bp
+++ b/automotive/evs/1.1/Android.bp
@@ -10,13 +10,15 @@
         "types.hal",
         "IEvsCamera.hal",
         "IEvsCameraStream.hal",
+        "IEvsEnumerator.hal",
     ],
     interfaces: [
         "android.hardware.automotive.evs@1.0",
+        "android.hardware.camera.device@3.2",
         "android.hardware.graphics.common@1.0",
         "android.hardware.graphics.common@1.1",
         "android.hardware.graphics.common@1.2",
         "android.hidl.base@1.0",
     ],
-    gen_java: true,
+    gen_java: false,
 }
diff --git a/automotive/evs/1.1/IEvsCamera.hal b/automotive/evs/1.1/IEvsCamera.hal
index 21ca79e..975b6c6 100644
--- a/automotive/evs/1.1/IEvsCamera.hal
+++ b/automotive/evs/1.1/IEvsCamera.hal
@@ -26,6 +26,14 @@
  */
 interface IEvsCamera extends @1.0::IEvsCamera {
     /**
+     * Returns the description of this camera.
+     *
+     * @return info The description of this camera.  This must be the same value as
+     *              reported by EvsEnumerator::getCameraList_1_1().
+     */
+    getCameraInfo_1_1() generates (CameraDesc info);
+
+    /**
      * Requests to pause EVS camera stream events.
      *
      * Like stopVideoStream(), events may continue to arrive for some time
@@ -100,7 +108,27 @@
     unsetMaster() generates (EvsResult result);
 
     /**
-     * Requests to set a camera parameter.
+     * Retrieves a list of parameters this camera supports.
+     *
+     * @return params A list of CameraParam that this camera supports.
+     */
+    getParameterList() generates (vec<CameraParam> params);
+
+    /**
+     * Requests a valid value range of a camera parameter
+     *
+     * @param  id    The identifier of camera parameter, CameraParam enum.
+     *
+     * @return min   The lower bound of valid parameter value range.
+     * @return max   The upper bound of valid parameter value range.
+     * @return step  The resolution of values in valid range.
+     */
+    getIntParameterRange(CameraParam id)
+        generates (int32_t min, int32_t max, int32_t step);
+
+    /**
+     * Requests to set a camera parameter.  Only a request from the master
+     * client will be processed successfully.
      *
      * @param  id             The identifier of camera parameter, CameraParam enum.
      *         value          A desired parameter value.
@@ -114,7 +142,7 @@
      *                        from what the client gives if, for example, the
      *                        driver does not support a target parameter.
      */
-    setParameter(CameraParam id, int32_t value)
+    setIntParameter(CameraParam id, int32_t value)
         generates (EvsResult result, int32_t effectiveValue);
 
     /**
@@ -126,5 +154,5 @@
      *                not supported.
      *         value  A value of requested camera parameter.
      */
-    getParameter(CameraParam id) generates(EvsResult result, int32_t value);
+    getIntParameter(CameraParam id) generates(EvsResult result, int32_t value);
 };
diff --git a/automotive/evs/1.1/IEvsCameraStream.hal b/automotive/evs/1.1/IEvsCameraStream.hal
index 7c7f832..9e4ea19 100644
--- a/automotive/evs/1.1/IEvsCameraStream.hal
+++ b/automotive/evs/1.1/IEvsCameraStream.hal
@@ -17,15 +17,32 @@
 package android.hardware.automotive.evs@1.1;
 
 import @1.0::IEvsCameraStream;
+import @1.1::BufferDesc;
+import @1.1::EvsEvent;
 
 /**
  * Implemented on client side to receive asynchronous streaming event deliveries.
  */
 interface IEvsCameraStream extends @1.0::IEvsCameraStream {
+
+    /**
+     * Receives calls from the HAL each time a video frame is ready for inspection.
+     * Buffer handles received by this method must be returned via calls to
+     * IEvsCamera::doneWithFrame_1_1(). When the video stream is stopped via a call
+     * to IEvsCamera::stopVideoStream(), this callback may continue to happen for
+     * some time as the pipeline drains. Each frame must still be returned.
+     * When the last frame in the stream has been delivered, STREAM_STOPPED
+     * event must be delivered.  No further frame deliveries may happen
+     * thereafter.
+     *
+     * @param buffer a buffer descriptor of a delivered image frame.
+     */
+    oneway deliverFrame_1_1(BufferDesc buffer);
+
     /**
      * Receives calls from the HAL each time an event happens.
      *
      * @param  event EVS event with possible event information.
      */
-    oneway notifyEvent(EvsEvent event);
+    oneway notify(EvsEvent event);
 };
diff --git a/automotive/evs/1.1/IEvsEnumerator.hal b/automotive/evs/1.1/IEvsEnumerator.hal
new file mode 100644
index 0000000..1695821
--- /dev/null
+++ b/automotive/evs/1.1/IEvsEnumerator.hal
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.automotive.evs@1.1;
+
+import IEvsCamera;
+import @1.0::IEvsEnumerator;
+import @1.0::EvsResult;
+import android.hardware.camera.device@3.2::Stream;
+
+/**
+ * Provides the mechanism for EVS camera discovery
+ */
+interface IEvsEnumerator extends @1.0::IEvsEnumerator {
+    /**
+     * Returns a list of all EVS cameras available to the system
+     *
+     * @return cameras A list of cameras availale for EVS service.
+     */
+    getCameraList_1_1() generates (vec<CameraDesc> cameras);
+
+    /**
+     * Gets the IEvsCamera associated with a cameraId from a CameraDesc
+     *
+     * Given a camera's unique cameraId from CameraDesc, returns the
+     * IEvsCamera interface associated with the specified camera. When
+     * done using the camera, the caller may release it by calling closeCamera().
+     *
+     * @param  cameraId  A unique identifier of the camera.
+     * @param  streamCfg A stream configuration the client wants to use.
+     * @return evsCamera EvsCamera object associated with a given cameraId.
+     *                   Returned object would be null if a camera device does
+     *                   not support a given stream configuration or is already
+     *                   configured differently by another client.
+     */
+    openCamera_1_1(string cameraId, Stream streamCfg) generates (IEvsCamera evsCamera);
+};
diff --git a/automotive/evs/1.1/default/Android.bp b/automotive/evs/1.1/default/Android.bp
index a463471..41cb426 100644
--- a/automotive/evs/1.1/default/Android.bp
+++ b/automotive/evs/1.1/default/Android.bp
@@ -7,25 +7,41 @@
         "service.cpp",
         "EvsCamera.cpp",
         "EvsEnumerator.cpp",
-        "EvsDisplay.cpp"
+        "EvsDisplay.cpp",
+        "ConfigManager.cpp",
+        "ConfigManagerUtil.cpp",
     ],
     init_rc: ["android.hardware.automotive.evs@1.1-service.rc"],
 
     shared_libs: [
         "android.hardware.automotive.evs@1.0",
         "android.hardware.automotive.evs@1.1",
+        "android.hardware.camera.device@3.2",
         "libbase",
         "libbinder",
-        "libcutils",
+        "liblog",
         "libhardware",
         "libhidlbase",
         "liblog",
         "libui",
         "libutils",
+        "libcamera_metadata",
+        "libtinyxml2",
     ],
 
     cflags: [
         "-O0",
         "-g",
     ],
+
+    required: [
+        "evs_default_configuration.xml",
+    ],
+}
+
+prebuilt_etc {
+    name: "evs_default_configuration.xml",
+
+    src: "resources/evs_default_configuration.xml",
+    sub_dir: "automotive/evs",
 }
diff --git a/automotive/evs/1.1/default/ConfigManager.cpp b/automotive/evs/1.1/default/ConfigManager.cpp
new file mode 100644
index 0000000..4f46f9d
--- /dev/null
+++ b/automotive/evs/1.1/default/ConfigManager.cpp
@@ -0,0 +1,487 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sstream>
+#include <fstream>
+#include <thread>
+
+#include <hardware/gralloc.h>
+#include <utils/SystemClock.h>
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+
+#include "ConfigManager.h"
+
+using ::android::hardware::camera::device::V3_2::StreamRotation;
+
+
+ConfigManager::~ConfigManager() {
+    /* Nothing to do */
+}
+
+
+void ConfigManager::readCameraInfo(const XMLElement * const aCameraElem) {
+    if (aCameraElem == nullptr) {
+        ALOGW("XML file does not have required camera element");
+        return;
+    }
+
+    const XMLElement *curElem = aCameraElem->FirstChildElement();
+    while (curElem != nullptr) {
+        if (!strcmp(curElem->Name(), "group")) {
+            /* camera group identifier */
+            const char *group_id = curElem->FindAttribute("group_id")->Value();
+
+            /* create CameraGroup */
+            unique_ptr<ConfigManager::CameraGroup> aCameraGroup(new ConfigManager::CameraGroup());
+
+            /* add a camera device to its group */
+            addCameraDevices(curElem->FindAttribute("device_id")->Value(), aCameraGroup);
+
+            /* a list of camera stream configurations */
+            const XMLElement *childElem =
+                curElem->FirstChildElement("caps")->FirstChildElement("stream");
+            while (childElem != nullptr) {
+                /* read 5 attributes */
+                const XMLAttribute *idAttr     = childElem->FindAttribute("id");
+                const XMLAttribute *widthAttr  = childElem->FindAttribute("width");
+                const XMLAttribute *heightAttr = childElem->FindAttribute("height");
+                const XMLAttribute *fmtAttr    = childElem->FindAttribute("format");
+                const XMLAttribute *fpsAttr    = childElem->FindAttribute("framerate");
+
+                const int32_t id = stoi(idAttr->Value());
+                int32_t framerate = 0;
+                if (fpsAttr != nullptr) {
+                    framerate = stoi(fpsAttr->Value());
+                }
+
+                int32_t pixFormat;
+                if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
+                                                            pixFormat)) {
+                    RawStreamConfiguration cfg = {
+                        id,
+                        stoi(widthAttr->Value()),
+                        stoi(heightAttr->Value()),
+                        pixFormat,
+                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+                        framerate
+                    };
+                    aCameraGroup->streamConfigurations[id] = cfg;
+                }
+
+                childElem = childElem->NextSiblingElement("stream");
+            }
+
+            /* camera group synchronization */
+            const char *sync = curElem->FindAttribute("synchronized")->Value();
+            aCameraGroup->synchronized =
+                static_cast<bool>(strcmp(sync, "false"));
+
+            /* add a group to hash map */
+            mCameraGroups[group_id] = std::move(aCameraGroup);
+        } else if (!strcmp(curElem->Name(), "device")) {
+            /* camera unique identifier */
+            const char *id = curElem->FindAttribute("id")->Value();
+
+            /* camera mount location */
+            const char *pos = curElem->FindAttribute("position")->Value();
+
+            /* store read camera module information */
+            mCameraInfo[id] = readCameraDeviceInfo(curElem);
+
+            /* assign a camera device to a position group */
+            mCameraPosition[pos].emplace(id);
+        } else {
+            /* ignore other device types */
+            ALOGD("Unknown element %s is ignored", curElem->Name());
+        }
+
+        curElem = curElem->NextSiblingElement();
+    }
+}
+
+
+unique_ptr<ConfigManager::CameraInfo>
+ConfigManager::readCameraDeviceInfo(const XMLElement *aDeviceElem) {
+    if (aDeviceElem == nullptr) {
+        return nullptr;
+    }
+
+    /* create a CameraInfo to be filled */
+    unique_ptr<ConfigManager::CameraInfo> aCamera(new ConfigManager::CameraInfo());
+
+    /* size information to allocate camera_metadata_t */
+    size_t totalEntries = 0;
+    size_t totalDataSize = 0;
+
+    /* read device capabilities */
+    totalEntries +=
+        readCameraCapabilities(aDeviceElem->FirstChildElement("caps"),
+                               aCamera,
+                               totalDataSize);
+
+
+    /* read camera metadata */
+    totalEntries +=
+        readCameraMetadata(aDeviceElem->FirstChildElement("characteristics"),
+                           aCamera,
+                           totalDataSize);
+
+    /* construct camera_metadata_t */
+    if (!constructCameraMetadata(aCamera, totalEntries, totalDataSize)) {
+        ALOGW("Either failed to allocate memory or "
+              "allocated memory was not large enough");
+    }
+
+    return aCamera;
+}
+
+
+size_t ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
+                                             unique_ptr<ConfigManager::CameraInfo> &aCamera,
+                                             size_t &dataSize) {
+    if (aCapElem == nullptr) {
+        return 0;
+    }
+
+    string token;
+    const XMLElement *curElem = nullptr;
+
+    /* a list of supported camera parameters/controls */
+    curElem = aCapElem->FirstChildElement("supported_controls");
+    if (curElem != nullptr) {
+        const XMLElement *ctrlElem = curElem->FirstChildElement("control");
+        while (ctrlElem != nullptr) {
+            const char *nameAttr = ctrlElem->FindAttribute("name")->Value();;
+            const int32_t minVal = stoi(ctrlElem->FindAttribute("min")->Value());
+            const int32_t maxVal = stoi(ctrlElem->FindAttribute("max")->Value());
+
+            int32_t stepVal = 1;
+            const XMLAttribute *stepAttr = ctrlElem->FindAttribute("step");
+            if (stepAttr != nullptr) {
+                stepVal = stoi(stepAttr->Value());
+            }
+
+            CameraParam aParam;
+            if (ConfigManagerUtil::convertToEvsCameraParam(nameAttr,
+                                                           aParam)) {
+                aCamera->controls.emplace(
+                    aParam,
+                    make_tuple(minVal, maxVal, stepVal)
+                );
+            }
+
+            ctrlElem = ctrlElem->NextSiblingElement("control");
+        }
+    }
+
+    /* a list of camera stream configurations */
+    curElem = aCapElem->FirstChildElement("stream");
+    while (curElem != nullptr) {
+        /* read 5 attributes */
+        const XMLAttribute *idAttr     = curElem->FindAttribute("id");
+        const XMLAttribute *widthAttr  = curElem->FindAttribute("width");
+        const XMLAttribute *heightAttr = curElem->FindAttribute("height");
+        const XMLAttribute *fmtAttr    = curElem->FindAttribute("format");
+        const XMLAttribute *fpsAttr    = curElem->FindAttribute("framerate");
+
+        const int32_t id = stoi(idAttr->Value());
+        int32_t framerate = 0;
+        if (fpsAttr != nullptr) {
+            framerate = stoi(fpsAttr->Value());
+        }
+
+        int32_t pixFormat;
+        if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
+                                                    pixFormat)) {
+            RawStreamConfiguration cfg = {
+                id,
+                stoi(widthAttr->Value()),
+                stoi(heightAttr->Value()),
+                pixFormat,
+                ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
+                framerate
+            };
+            aCamera->streamConfigurations[id] = cfg;
+        }
+
+        curElem = curElem->NextSiblingElement("stream");
+    }
+
+    dataSize = calculate_camera_metadata_entry_data_size(
+                   get_camera_metadata_tag_type(
+                       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+                   ),
+                   aCamera->streamConfigurations.size() * kStreamCfgSz
+               );
+
+    /* a single camera metadata entry contains multiple stream configurations */
+    return dataSize > 0 ? 1 : 0;
+}
+
+
+size_t ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
+                                       unique_ptr<ConfigManager::CameraInfo> &aCamera,
+                                       size_t &dataSize) {
+    if (aParamElem == nullptr) {
+        return 0;
+    }
+
+    const XMLElement *curElem = aParamElem->FirstChildElement("parameter");
+    size_t numEntries = 0;
+    camera_metadata_tag_t tag;
+    while (curElem != nullptr) {
+        if (!ConfigManagerUtil::convertToMetadataTag(curElem->FindAttribute("name")->Value(),
+                                                     tag)) {
+            switch(tag) {
+                case ANDROID_LENS_DISTORTION:
+                case ANDROID_LENS_POSE_ROTATION:
+                case ANDROID_LENS_POSE_TRANSLATION:
+                case ANDROID_LENS_INTRINSIC_CALIBRATION: {
+                    /* float[] */
+                    size_t count = 0;
+                    void   *data = ConfigManagerUtil::convertFloatArray(
+                                        curElem->FindAttribute("size")->Value(),
+                                        curElem->FindAttribute("value")->Value(),
+                                        count
+                                   );
+
+                    aCamera->cameraMetadata[tag] =
+                        make_pair(make_unique<void *>(data), count);
+
+                    ++numEntries;
+                    dataSize += calculate_camera_metadata_entry_data_size(
+                                    get_camera_metadata_tag_type(tag), count
+                                );
+
+                    break;
+                }
+
+                default:
+                    ALOGW("Parameter %s is not supported",
+                          curElem->FindAttribute("name")->Value());
+                    break;
+            }
+        }
+
+        curElem = curElem->NextSiblingElement("parameter");
+    }
+
+    return numEntries;
+}
+
+
+bool ConfigManager::constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+                                            const size_t totalEntries,
+                                            const size_t totalDataSize) {
+    if (!aCamera->allocate(totalEntries, totalDataSize)) {
+        ALOGE("Failed to allocate memory for camera metadata");
+        return false;
+    }
+
+    const size_t numStreamConfigs = aCamera->streamConfigurations.size();
+    unique_ptr<int32_t[]> data(new int32_t[kStreamCfgSz * numStreamConfigs]);
+    int32_t *ptr = data.get();
+    for (auto &cfg : aCamera->streamConfigurations) {
+        for (auto i = 0; i < kStreamCfgSz; ++i) {
+          *ptr++ = cfg.second[i];
+        }
+    }
+    int32_t err = add_camera_metadata_entry(aCamera->characteristics,
+                                            ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                                            data.get(),
+                                            numStreamConfigs * kStreamCfgSz);
+
+    if (err) {
+        ALOGE("Failed to add stream configurations to metadata, ignored");
+        return false;
+    }
+
+    bool success = true;
+    for (auto &[tag, entry] : aCamera->cameraMetadata) {
+        /* try to add new camera metadata entry */
+        int32_t err = add_camera_metadata_entry(aCamera->characteristics,
+                                                tag,
+                                                entry.first.get(),
+                                                entry.second);
+        if (err) {
+            ALOGE("Failed to add an entry with a tag 0x%X", tag);
+
+            /* may exceed preallocated capacity */
+            ALOGE("Camera metadata has %ld / %ld entries and %ld / %ld bytes are filled",
+                  get_camera_metadata_entry_count(aCamera->characteristics),
+                  get_camera_metadata_entry_capacity(aCamera->characteristics),
+                  get_camera_metadata_data_count(aCamera->characteristics),
+                  get_camera_metadata_data_capacity(aCamera->characteristics));
+            ALOGE("\tCurrent metadata entry requires %ld bytes",
+                  calculate_camera_metadata_entry_data_size(tag, entry.second));
+
+            success = false;
+        }
+    }
+
+    ALOGV("Camera metadata has %ld / %ld entries and %ld / %ld bytes are filled",
+          get_camera_metadata_entry_count(aCamera->characteristics),
+          get_camera_metadata_entry_capacity(aCamera->characteristics),
+          get_camera_metadata_data_count(aCamera->characteristics),
+          get_camera_metadata_data_capacity(aCamera->characteristics));
+
+    return success;
+}
+
+
+void ConfigManager::readSystemInfo(const XMLElement * const aSysElem) {
+    if (aSysElem == nullptr) {
+        return;
+    }
+
+    /*
+     * Please note that this function assumes that a given system XML element
+     * and its child elements follow DTD.  If it does not, it will cause a
+     * segmentation fault due to the failure of finding expected attributes.
+     */
+
+    /* read number of cameras available in the system */
+    const XMLElement *xmlElem = aSysElem->FirstChildElement("num_cameras");
+    if (xmlElem != nullptr) {
+        mSystemInfo.numCameras =
+            stoi(xmlElem->FindAttribute("value")->Value());
+    }
+}
+
+
+void ConfigManager::readDisplayInfo(const XMLElement * const aDisplayElem) {
+    if (aDisplayElem == nullptr) {
+        ALOGW("XML file does not have required camera element");
+        return;
+    }
+
+    const XMLElement *curDev = aDisplayElem->FirstChildElement("device");
+    while (curDev != nullptr) {
+        const char *id = curDev->FindAttribute("id")->Value();
+        //const char *pos = curDev->FirstAttribute("position")->Value();
+
+        unique_ptr<DisplayInfo> dpy(new DisplayInfo());
+        if (dpy == nullptr) {
+            ALOGE("Failed to allocate memory for DisplayInfo");
+            return;
+        }
+
+        const XMLElement *cap = curDev->FirstChildElement("caps");
+        if (cap != nullptr) {
+            const XMLElement *curStream = cap->FirstChildElement("stream");
+            while (curStream != nullptr) {
+                /* read 4 attributes */
+                const XMLAttribute *idAttr     = curStream->FindAttribute("id");
+                const XMLAttribute *widthAttr  = curStream->FindAttribute("width");
+                const XMLAttribute *heightAttr = curStream->FindAttribute("height");
+                const XMLAttribute *fmtAttr    = curStream->FindAttribute("format");
+
+                const int32_t id = stoi(idAttr->Value());
+                int32_t pixFormat;
+                if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
+                                                            pixFormat)) {
+                    RawStreamConfiguration cfg = {
+                        id,
+                        stoi(widthAttr->Value()),
+                        stoi(heightAttr->Value()),
+                        pixFormat,
+                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
+                        0   // unused
+                    };
+                    dpy->streamConfigurations[id] = cfg;
+                }
+
+                curStream = curStream->NextSiblingElement("stream");
+            }
+        }
+
+        mDisplayInfo[id] = std::move(dpy);
+        curDev = curDev->NextSiblingElement("device");
+    }
+
+    return;
+}
+
+
+bool ConfigManager::readConfigDataFromXML() noexcept {
+    XMLDocument xmlDoc;
+
+    const int64_t parsingStart = android::elapsedRealtimeNano();
+
+    /* load and parse a configuration file */
+    xmlDoc.LoadFile(mConfigFilePath);
+    if (xmlDoc.ErrorID() != XML_SUCCESS) {
+        ALOGE("Failed to load and/or parse a configuration file, %s", xmlDoc.ErrorStr());
+        return false;
+    }
+
+    /* retrieve the root element */
+    const XMLElement *rootElem = xmlDoc.RootElement();
+    if (strcmp(rootElem->Name(), "configuration")) {
+        ALOGE("A configuration file is not in the required format.  "
+              "See /etc/automotive/evs/evs_configuration.dtd");
+        return false;
+    }
+
+    /*
+     * parse camera information; this needs to be done before reading system
+     * information
+     */
+    readCameraInfo(rootElem->FirstChildElement("camera"));
+
+    /* parse system information */
+    readSystemInfo(rootElem->FirstChildElement("system"));
+
+    /* parse display information */
+    readDisplayInfo(rootElem->FirstChildElement("display"));
+
+    const int64_t parsingEnd = android::elapsedRealtimeNano();
+    ALOGI("Parsing configuration file takes %lf (ms)",
+          (double)(parsingEnd - parsingStart) / 1000000.0);
+
+
+    return true;
+}
+
+
+void ConfigManager::addCameraDevices(const char *devices,
+                                     unique_ptr<CameraGroup> &aGroup) {
+    stringstream device_list(devices);
+    string token;
+    while (getline(device_list, token, ',')) {
+        aGroup->devices.emplace(token);
+    }
+}
+
+
+std::unique_ptr<ConfigManager> ConfigManager::Create(const char *path) {
+    unique_ptr<ConfigManager> cfgMgr(new ConfigManager(path));
+
+    /*
+     * Read a configuration from XML file
+     *
+     * If this is too slow, ConfigManager::readConfigDataFromBinary() and
+     * ConfigManager::writeConfigDataToBinary()can serialize CameraInfo object
+     * to the filesystem and construct CameraInfo instead; this was
+     * evaluated as 10x faster.
+     */
+    if (!cfgMgr->readConfigDataFromXML()) {
+        return nullptr;
+    } else {
+        return cfgMgr;
+    }
+}
+
diff --git a/automotive/evs/1.1/default/ConfigManager.h b/automotive/evs/1.1/default/ConfigManager.h
new file mode 100644
index 0000000..0275f90
--- /dev/null
+++ b/automotive/evs/1.1/default/ConfigManager.h
@@ -0,0 +1,336 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CONFIG_MANAGER_H
+#define CONFIG_MANAGER_H
+
+#include <vector>
+#include <string>
+#include <unordered_map>
+#include <unordered_set>
+
+#include <tinyxml2.h>
+
+#include <system/camera_metadata.h>
+#include <log/log.h>
+#include <android/hardware/automotive/evs/1.1/types.h>
+
+#include "ConfigManagerUtil.h"
+
+using namespace std;
+using namespace tinyxml2;
+
+using ::android::hardware::hidl_vec;
+using ::android::hardware::camera::device::V3_2::Stream;
+using ::android::hardware::automotive::evs::V1_1::CameraParam;
+
+/*
+ * Plese note that this is different from what is defined in
+ * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
+ * field to store a framerate.
+ */
+const size_t kStreamCfgSz = 6;
+typedef std::array<int32_t, kStreamCfgSz> RawStreamConfiguration;
+
+class ConfigManager {
+public:
+    static std::unique_ptr<ConfigManager> Create(const char *path = "");
+    ConfigManager(const ConfigManager&) = delete;
+    ConfigManager& operator=(const ConfigManager&) = delete;
+
+    virtual ~ConfigManager();
+
+    /* Camera device's capabilities and metadata */
+    class CameraInfo {
+    public:
+        CameraInfo() :
+            characteristics(nullptr) {
+            /* Nothing to do */
+        }
+
+        virtual ~CameraInfo() {
+            free_camera_metadata(characteristics);
+        }
+
+        /* Allocate memory for camera_metadata_t */
+        bool allocate(size_t entry_cap, size_t data_cap) {
+            if (characteristics != nullptr) {
+                ALOGE("Camera metadata is already allocated");
+                return false;
+            }
+
+            characteristics = allocate_camera_metadata(entry_cap, data_cap);
+            return characteristics != nullptr;
+        }
+
+        /*
+         * List of supported controls that the master client can program.
+         * Paraemters are stored with its valid range
+         */
+        unordered_map<CameraParam,
+                      tuple<int32_t, int32_t, int32_t>> controls;
+
+        /* List of supported frame rates */
+        unordered_set<int32_t> frameRates;
+
+        /*
+         * List of supported output stream configurations; each array stores
+         * format, width, height, and direction values in the order.
+         */
+        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
+
+        /*
+         * Internal storage for camera metadata.  Each entry holds a pointer to
+         * data and number of elements
+         */
+        unordered_map<camera_metadata_tag_t,
+                      pair<unique_ptr<void *>, size_t>> cameraMetadata;
+
+        /* Camera module characteristics */
+        camera_metadata_t *characteristics;
+    };
+
+    class CameraGroup {
+    public:
+        CameraGroup() {}
+
+        /* ID of member camera devices */
+        unordered_set<string> devices;
+
+        /* The capture operation of member camera devices are synchronized */
+        bool synchronized = false;
+
+        /*
+         * List of stream configurations that are supposed by all camera devices
+         * in this group.
+         */
+        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
+    };
+
+    class SystemInfo {
+    public:
+        /* number of available cameras */
+        int32_t numCameras = 0;
+    };
+
+    class DisplayInfo {
+    public:
+        /*
+         * List of supported input stream configurations; each array stores
+         * format, width, height, and direction values in the order.
+         */
+        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
+    };
+
+    /*
+     * Return system information
+     *
+     * @return SystemInfo
+     *         Constant reference of SystemInfo.
+     */
+    const SystemInfo &getSystemInfo() {
+        return mSystemInfo;
+    }
+
+    /*
+     * Return a list of cameras
+     *
+     * This function assumes that it is not being called frequently.
+     *
+     * @return vector<string>
+     *         A vector that contains unique camera device identifiers.
+     */
+    vector<string> getCameraList() {
+        vector<string> aList;
+        for (auto &v : mCameraInfo) {
+            aList.emplace_back(v.first);
+        }
+
+        return aList;
+    }
+
+
+    /*
+     * Return a list of cameras
+     *
+     * @return CameraGroup
+     *         A pointer to a camera group identified by a given id.
+     */
+    unique_ptr<CameraGroup>& getCameraGroup(const string& gid) {
+        return mCameraGroups[gid];
+    }
+
+
+    /*
+     * Return a camera metadata
+     *
+     * @param  cameraId
+     *         Unique camera node identifier in string
+     *
+     * @return unique_ptr<CameraInfo>
+     *         A pointer to CameraInfo that is associated with a given camera
+     *         ID.  This returns a null pointer if this does not recognize a
+     *         given camera identifier.
+     */
+    unique_ptr<CameraInfo>& getCameraInfo(const string cameraId) noexcept {
+        return mCameraInfo[cameraId];
+    }
+
+private:
+    /* Constructors */
+    ConfigManager(const char *xmlPath) :
+        mConfigFilePath(xmlPath) {
+    }
+
+    /* System configuration */
+    SystemInfo mSystemInfo;
+
+    /* Internal data structure for camera device information */
+    unordered_map<string, unique_ptr<CameraInfo>> mCameraInfo;
+
+    /* Internal data structure for camera device information */
+    unordered_map<string, unique_ptr<DisplayInfo>> mDisplayInfo;
+
+    /* Camera groups are stored in <groud id, CameraGroup> hash map */
+    unordered_map<string, unique_ptr<CameraGroup>> mCameraGroups;
+
+    /*
+     * Camera positions are stored in <position, camera id set> hash map.
+     * The position must be one of front, rear, left, and right.
+     */
+    unordered_map<string, unordered_set<string>>  mCameraPosition;
+
+    /* A path to XML configuration file */
+    const char *mConfigFilePath;
+
+    /*
+     * Parse a given EVS configuration file and store the information
+     * internally.
+     *
+     * @return bool
+     *         True if it completes parsing a file successfully.
+     */
+    bool readConfigDataFromXML() noexcept;
+
+    /*
+     * read the information of the vehicle
+     *
+     * @param  aSysElem
+     *         A pointer to "system" XML element.
+     */
+    void readSystemInfo(const XMLElement * const aSysElem);
+
+    /*
+     * read the information of camera devices
+     *
+     * @param  aCameraElem
+     *         A pointer to "camera" XML element that may contain multiple
+     *         "device" elements.
+     */
+    void readCameraInfo(const XMLElement * const aCameraElem);
+
+    /*
+     * read display device information
+     *
+     * @param  aDisplayElem
+     *         A pointer to "display" XML element that may contain multiple
+     *         "device" elements.
+     */
+    void readDisplayInfo(const XMLElement * const aDisplayElem);
+
+    /*
+     * read camera device information
+     *
+     * @param  aDeviceElem
+     *         A pointer to "device" XML element that contains camera module
+     *         capability info and its characteristics.
+     *
+     * @return unique_ptr<CameraInfo>
+     *         A pointer to CameraInfo class that contains camera module
+     *         capability and characteristics.  Please note that this transfers
+     *         the ownership of created CameraInfo to the caller.
+     */
+    unique_ptr<CameraInfo> readCameraDeviceInfo(const XMLElement *aDeviceElem);
+
+    /*
+     * read camera metadata
+     *
+     * @param  aCapElem
+     *         A pointer to "cap" XML element.
+     * @param  aCamera
+     *         A pointer to CameraInfo that is being filled by this method.
+     * @param  dataSize
+     *         Required size of memory to store camera metadata found in this
+     *         method.  This is calculated in this method and returned to the
+     *         caller for camera_metadata allocation.
+     *
+     * @return size_t
+     *         Number of camera metadata entries
+     */
+    size_t readCameraCapabilities(const XMLElement * const aCapElem,
+                                  unique_ptr<CameraInfo> &aCamera,
+                                  size_t &dataSize);
+
+    /*
+     * read camera metadata
+     *
+     * @param  aParamElem
+     *         A pointer to "characteristics" XML element.
+     * @param  aCamera
+     *         A pointer to CameraInfo that is being filled by this method.
+     * @param  dataSize
+     *         Required size of memory to store camera metadata found in this
+     *         method.
+     *
+     * @return size_t
+     *         Number of camera metadata entries
+     */
+    size_t readCameraMetadata(const XMLElement * const aParamElem,
+                              unique_ptr<CameraInfo> &aCamera,
+                              size_t &dataSize);
+
+    /*
+     * construct camera_metadata_t from camera capabilities and metadata
+     *
+     * @param  aCamera
+     *         A pointer to CameraInfo that is being filled by this method.
+     * @param  totalEntries
+     *         Number of camera metadata entries to be added.
+     * @param  totalDataSize
+     *         Sum of sizes of camera metadata entries to be added.
+     *
+     * @return bool
+     *         False if either it fails to allocate memory for camera metadata
+     *         or its size is not large enough to add all found camera metadata
+     *         entries.
+     */
+    bool constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+                                 const size_t totalEntries,
+                                 const size_t totalDataSize);
+
+    /*
+     * parse a comma-separated list of camera devices and add them to
+     * CameraGroup.
+     *
+     * @param  devices
+     *         A comma-separated list of camera device identifiers.
+     * @param  aGroup
+     *         Camera group which cameras will be added to.
+     */
+    void addCameraDevices(const char *devices,
+                          unique_ptr<CameraGroup> &aGroup);
+};
+#endif // CONFIG_MANAGER_H
+
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.cpp b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
new file mode 100644
index 0000000..8206daa
--- /dev/null
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ConfigManagerUtil.h"
+
+#include <string>
+#include <sstream>
+#include <linux/videodev2.h>
+
+#include <log/log.h>
+#include <system/graphics-base-v1.0.h>
+
+
+bool ConfigManagerUtil::convertToEvsCameraParam(const string &id,
+                                                CameraParam &camParam) {
+    string trimmed = ConfigManagerUtil::trimString(id);
+    bool success = true;
+
+    if (!trimmed.compare("BRIGHTNESS")) {
+        camParam =  CameraParam::BRIGHTNESS;
+    } else if (!trimmed.compare("CONTRAST")) {
+        camParam =  CameraParam::CONTRAST;
+    } else if (!trimmed.compare("AUTOGAIN")) {
+        camParam =  CameraParam::AUTOGAIN;
+    } else if (!trimmed.compare("GAIN")) {
+        camParam =  CameraParam::GAIN;
+    } else if (!trimmed.compare("AUTO_WHITE_BALANCE")) {
+        camParam =  CameraParam::AUTO_WHITE_BALANCE;
+    } else if (!trimmed.compare("WHITE_BALANCE_TEMPERATURE")) {
+        camParam =  CameraParam::WHITE_BALANCE_TEMPERATURE;
+    } else if (!trimmed.compare("SHARPNESS")) {
+        camParam =  CameraParam::SHARPNESS;
+    } else if (!trimmed.compare("AUTO_EXPOSURE")) {
+        camParam =  CameraParam::AUTO_EXPOSURE;
+    } else if (!trimmed.compare("ABSOLUTE_EXPOSURE")) {
+        camParam =  CameraParam::ABSOLUTE_EXPOSURE;
+    } else if (!trimmed.compare("ABSOLUTE_FOCUS")) {
+        camParam =  CameraParam::ABSOLUTE_FOCUS;
+    } else if (!trimmed.compare("AUTO_FOCUS")) {
+        camParam =  CameraParam::AUTO_FOCUS;
+    } else if (!trimmed.compare("ABSOLUTE_ZOOM")) {
+        camParam =  CameraParam::ABSOLUTE_ZOOM;
+    } else {
+        success = false;
+    }
+
+    return success;
+}
+
+
+bool ConfigManagerUtil::convertToPixelFormat(const string &format,
+                                             int32_t &pixFormat) {
+    string trimmed = ConfigManagerUtil::trimString(format);
+    bool success = true;
+
+    if (!trimmed.compare("RGBA_8888")) {
+        pixFormat =  HAL_PIXEL_FORMAT_RGBA_8888;
+    } else if (!trimmed.compare("YCRCB_420_SP")) {
+        pixFormat =  HAL_PIXEL_FORMAT_YCRCB_420_SP;
+    } else if (!trimmed.compare("YCBCR_422_I")) {
+        pixFormat =  HAL_PIXEL_FORMAT_YCBCR_422_I;
+    } else {
+        success = false;
+    }
+
+    return success;
+}
+
+
+bool ConfigManagerUtil::convertToMetadataTag(const char *name,
+                                             camera_metadata_tag &aTag) {
+    if (!strcmp(name, "LENS_DISTORTION")) {
+        aTag =  ANDROID_LENS_DISTORTION;
+    } else if (!strcmp(name, "LENS_INTRINSIC_CALIBRATION")) {
+        aTag =  ANDROID_LENS_INTRINSIC_CALIBRATION;
+    } else if (!strcmp(name, "LENS_POSE_ROTATION")) {
+        aTag =  ANDROID_LENS_POSE_ROTATION;
+    } else if (!strcmp(name, "LENS_POSE_TRANSLATION")) {
+        aTag =  ANDROID_LENS_POSE_TRANSLATION;
+    } else {
+        return false;
+    }
+
+    return true;
+}
+
+
+float *ConfigManagerUtil::convertFloatArray(const char *sz, const char *vals,
+                                            size_t &count, const char delimiter) {
+    string size_string(sz);
+    string value_string(vals);
+
+    count = stoi(size_string);
+    float *result = new float[count];
+    stringstream values(value_string);
+
+    int32_t idx = 0;
+    string token;
+    while (getline(values, token, delimiter)) {
+        result[idx++] = stof(token);
+    }
+
+    return result;
+}
+
+
+string ConfigManagerUtil::trimString(const string &src, const string &ws) {
+    const auto s = src.find_first_not_of(ws);
+    if (s == string::npos) {
+        return "";
+    }
+
+    const auto e = src.find_last_not_of(ws);
+    const auto r = e - s + 1;
+
+    return src.substr(s, r);
+}
+
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.h b/automotive/evs/1.1/default/ConfigManagerUtil.h
new file mode 100644
index 0000000..8c89ae7
--- /dev/null
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef CONFIG_MANAGER_UTIL_H
+#define CONFIG_MANAGER_UTIL_H
+
+#include <utility>
+#include <string>
+#include <system/camera_metadata.h>
+#include <android/hardware/automotive/evs/1.1/types.h>
+
+using namespace std;
+using ::android::hardware::automotive::evs::V1_1::CameraParam;
+
+
+class ConfigManagerUtil {
+public:
+    /**
+     * Convert a given string into V4L2_CID_*
+     */
+    static bool convertToEvsCameraParam(const string &id,
+                                        CameraParam &camParam);
+    /**
+     * Convert a given string into android.hardware.graphics.common.PixelFormat
+     */
+    static bool convertToPixelFormat(const string &format,
+                                     int32_t &pixelFormat);
+    /**
+     * Convert a given string into corresponding camera metadata data tag defined in
+     * system/media/camera/include/system/camera_metadta_tags.h
+     */
+    static bool convertToMetadataTag(const char *name,
+                                     camera_metadata_tag &aTag);
+    /**
+     * Convert a given string into a floating value array
+     */
+    static float *convertFloatArray(const char *sz,
+                                    const char *vals,
+                                    size_t &count,
+                                    const char delimiter = ',');
+    /**
+     * Trim a string
+     */
+    static string trimString(const string &src,
+                             const string &ws = " \n\r\t\f\v");
+};
+
+#endif // CONFIG_MANAGER_UTIL_H
+
diff --git a/automotive/evs/1.1/default/EvsCamera.cpp b/automotive/evs/1.1/default/EvsCamera.cpp
index 2d55566..5ba753d 100644
--- a/automotive/evs/1.1/default/EvsCamera.cpp
+++ b/automotive/evs/1.1/default/EvsCamera.cpp
@@ -40,28 +40,21 @@
 const unsigned MAX_BUFFERS_IN_FLIGHT = 100;
 
 
-EvsCamera::EvsCamera(const char *id) :
+EvsCamera::EvsCamera(const char *id,
+                     unique_ptr<ConfigManager::CameraInfo> &camInfo) :
         mFramesAllowed(0),
         mFramesInUse(0),
-        mStreamState(STOPPED) {
+        mStreamState(STOPPED),
+        mCameraInfo(camInfo) {
 
     ALOGD("EvsCamera instantiated");
 
-    mDescription.cameraId = id;
+    /* set a camera id */
+    mDescription.v1.cameraId = id;
 
-    // Set up dummy data for testing
-    if (mDescription.cameraId == kCameraName_Backup) {
-        mWidth  = 640;          // full NTSC/VGA
-        mHeight = 480;          // full NTSC/VGA
-        mDescription.vendorFlags = 0xFFFFFFFF;   // Arbitrary value
-    } else {
-        mWidth  = 320;          // 1/2 NTSC/VGA
-        mHeight = 240;          // 1/2 NTSC/VGA
-    }
-
-    mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
-    mUsage  = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_CAMERA_WRITE |
-              GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_RARELY;
+    /* set camera metadata */
+    mDescription.metadata.setToExternal((uint8_t *)camInfo->characteristics,
+                                        get_camera_metadata_size(camInfo->characteristics));
 }
 
 
@@ -109,7 +102,7 @@
     ALOGD("getCameraInfo");
 
     // Send back our self description
-    _hidl_cb(mDescription);
+    _hidl_cb(mDescription.v1);
     return Void();
 }
 
@@ -194,7 +187,7 @@
 
         // Block outside the mutex until the "stop" flag has been acknowledged
         // We won't send any more frames, but the client might still get some already in flight
-        ALOGD("Waiting for stream thread to end..");
+        ALOGD("Waiting for stream thread to end...");
         lock.unlock();
         mCaptureThread.join();
         lock.lock();
@@ -238,6 +231,15 @@
 
 
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
+Return<void> EvsCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) {
+    ALOGD("getCameraInfo_1_1");
+
+    // Send back our self description
+    _hidl_cb(mDescription);
+    return Void();
+}
+
+
 Return<EvsResult> EvsCamera::doneWithFrame_1_1(const BufferDesc_1_1& bufDesc)  {
     std::lock_guard <std::mutex> lock(mAccessLock);
     returnBuffer(bufDesc.bufferId, bufDesc.buffer.nativeHandle);
@@ -278,8 +280,29 @@
 }
 
 
-Return<void> EvsCamera::setParameter(CameraParam id, int32_t value,
-                                     setParameter_cb _hidl_cb) {
+Return<void> EvsCamera::getParameterList(getParameterList_cb _hidl_cb) {
+    hidl_vec<CameraParam> hidlCtrls;
+    hidlCtrls.resize(mCameraInfo->controls.size());
+    unsigned idx = 0;
+    for (auto& [cid, cfg] : mCameraInfo->controls) {
+        hidlCtrls[idx++] = cid;
+    }
+
+    _hidl_cb(hidlCtrls);
+    return Void();
+}
+
+
+Return<void> EvsCamera::getIntParameterRange(CameraParam id,
+                                             getIntParameterRange_cb _hidl_cb) {
+    auto range = mCameraInfo->controls[id];
+    _hidl_cb(get<0>(range), get<1>(range), get<2>(range));
+    return Void();
+}
+
+
+Return<void> EvsCamera::setIntParameter(CameraParam id, int32_t value,
+                                        setIntParameter_cb _hidl_cb) {
     // Default implementation does not support this.
     (void)id;
     (void)value;
@@ -288,7 +311,8 @@
 }
 
 
-Return<void> EvsCamera::getParameter(CameraParam id, getParameter_cb _hidl_cb) {
+Return<void> EvsCamera::getIntParameter(CameraParam id,
+                                        getIntParameter_cb _hidl_cb) {
     // Default implementation does not support this.
     (void)id;
     _hidl_cb(EvsResult::INVALID_ARG, 0);
@@ -471,9 +495,7 @@
             fillTestFrame(newBuffer);
 
             // Issue the (asynchronous) callback to the client -- can't be holding the lock
-            EvsEvent event;
-            event.buffer(newBuffer);
-            auto result = mStream->notifyEvent(event);
+            auto result = mStream->deliverFrame_1_1(newBuffer);
             if (result.isOk()) {
                 ALOGD("Delivered %p as id %d",
                       newBuffer.buffer.nativeHandle.getNativeHandle(), newBuffer.bufferId);
@@ -506,10 +528,8 @@
 
     // If we've been asked to stop, send an event to signal the actual end of stream
     EvsEvent event;
-    InfoEventDesc desc = {};
-    desc.aType = InfoEventType::STREAM_STOPPED;
-    event.info(desc);
-    auto result = mStream->notifyEvent(event);
+    event.aType = EvsEventType::STREAM_STOPPED;
+    auto result = mStream->notify(event);
     if (!result.isOk()) {
         ALOGE("Error delivering end of stream marker");
     }
@@ -616,6 +636,38 @@
 }
 
 
+sp<EvsCamera> EvsCamera::Create(const char *deviceName) {
+    unique_ptr<ConfigManager::CameraInfo> nullCamInfo = nullptr;
+
+    return Create(deviceName, nullCamInfo);
+}
+
+
+sp<EvsCamera> EvsCamera::Create(const char *deviceName,
+                                unique_ptr<ConfigManager::CameraInfo> &camInfo,
+                                const Stream *streamCfg) {
+    sp<EvsCamera> evsCamera = new EvsCamera(deviceName, camInfo);
+    if (evsCamera == nullptr) {
+        return nullptr;
+    }
+
+    /* default implementation does not use a given configuration */
+    (void)streamCfg;
+
+    /* Use the first resolution from the list for the testing */
+    auto it = camInfo->streamConfigurations.begin();
+    evsCamera->mWidth = it->second[1];
+    evsCamera->mHeight = it->second[2];
+    evsCamera->mDescription.v1.vendorFlags = 0xFFFFFFFF; // Arbitrary test value
+
+    evsCamera->mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
+    evsCamera->mUsage  = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_CAMERA_WRITE |
+                         GRALLOC_USAGE_SW_READ_RARELY | GRALLOC_USAGE_SW_WRITE_RARELY;
+
+    return evsCamera;
+}
+
+
 } // namespace implementation
 } // namespace V1_0
 } // namespace evs
diff --git a/automotive/evs/1.1/default/EvsCamera.h b/automotive/evs/1.1/default/EvsCamera.h
index 47a3164..c15b4b1 100644
--- a/automotive/evs/1.1/default/EvsCamera.h
+++ b/automotive/evs/1.1/default/EvsCamera.h
@@ -25,6 +25,8 @@
 
 #include <thread>
 
+#include "ConfigManager.h"
+
 using BufferDesc_1_0 = ::android::hardware::automotive::evs::V1_0::BufferDesc;
 using BufferDesc_1_1 = ::android::hardware::automotive::evs::V1_1::BufferDesc;
 using IEvsCameraStream_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCameraStream;
@@ -59,18 +61,28 @@
     Return<EvsResult> setExtendedInfo(uint32_t opaqueIdentifier, int32_t opaqueValue) override;
 
     // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
+    Return<void>      getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)  override;
     Return<EvsResult> pauseVideoStream() override;
     Return<EvsResult> resumeVideoStream() override;
     Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
     Return<EvsResult> setMaster() override;
     Return<EvsResult> forceMaster(const sp<IEvsDisplay>& display) override;
     Return<EvsResult> unsetMaster() override;
-    Return<void>      setParameter(CameraParam id, int32_t value,
-                                   setParameter_cb _hidl_cb) override;
-    Return<void>      getParameter(CameraParam id, getParameter_cb _hidl_cb) override;
+    Return<void>      getParameterList(getParameterList_cb _hidl_cb) override;
+    Return<void>      getIntParameterRange(CameraParam id,
+                                           getIntParameterRange_cb _hidl_cb) override;
+    Return<void>      setIntParameter(CameraParam id, int32_t value,
+                                      setIntParameter_cb _hidl_cb) override;
+    Return<void>      getIntParameter(CameraParam id,
+                                      getIntParameter_cb _hidl_cb) override;
 
-    // Implementation details
-    EvsCamera(const char *id);
+    static sp<EvsCamera> Create(const char *deviceName);
+    static sp<EvsCamera> Create(const char *deviceName,
+                                unique_ptr<ConfigManager::CameraInfo> &camInfo,
+                                const Stream *streamCfg = nullptr);
+    EvsCamera(const EvsCamera&) = delete;
+    EvsCamera& operator=(const EvsCamera&) = delete;
+
     virtual ~EvsCamera() override;
     void forceShutdown();   // This gets called if another caller "steals" ownership of the camera
 
@@ -79,7 +91,10 @@
     static const char kCameraName_Backup[];
 
 private:
+    EvsCamera(const char *id,
+              unique_ptr<ConfigManager::CameraInfo> &camInfo);
     // These three functions are expected to be called while mAccessLock is held
+    //
     bool setAvailableFrames_Locked(unsigned bufferCount);
     unsigned increaseAvailableFrames_Locked(unsigned numToAdd);
     unsigned decreaseAvailableFrames_Locked(unsigned numToRemove);
@@ -124,6 +139,9 @@
 
     // Synchronization necessary to deconflict mCaptureThread from the main service thread
     std::mutex mAccessLock;
+
+    // Static camera module information
+    unique_ptr<ConfigManager::CameraInfo> &mCameraInfo;
 };
 
 } // namespace implementation
diff --git a/automotive/evs/1.1/default/EvsEnumerator.cpp b/automotive/evs/1.1/default/EvsEnumerator.cpp
index b324907..a010729 100644
--- a/automotive/evs/1.1/default/EvsEnumerator.cpp
+++ b/automotive/evs/1.1/default/EvsEnumerator.cpp
@@ -33,6 +33,7 @@
 //        constructs a new instance for each client.
 std::list<EvsEnumerator::CameraRecord>   EvsEnumerator::sCameraList;
 wp<EvsDisplay>                           EvsEnumerator::sActiveDisplay;
+unique_ptr<ConfigManager>                EvsEnumerator::sConfigManager;
 
 
 EvsEnumerator::EvsEnumerator() {
@@ -40,9 +41,11 @@
 
     // Add sample camera data to our list of cameras
     // In a real driver, this would be expected to can the available hardware
-    sCameraList.emplace_back(EvsCamera::kCameraName_Backup);
-    sCameraList.emplace_back("LaneView");
-    sCameraList.emplace_back("right turn");
+    sConfigManager =
+        ConfigManager::Create("/etc/automotive/evs/evs_sample_configuration.xml");
+    for (auto v : sConfigManager->getCameraList()) {
+        sCameraList.emplace_back(v.c_str());
+    }
 }
 
 
@@ -57,7 +60,7 @@
     std::vector<CameraDesc_1_0> descriptions;
     descriptions.reserve(numCameras);
     for (const auto& cam : sCameraList) {
-        descriptions.push_back( cam.desc );
+        descriptions.push_back( cam.desc.v1 );
     }
 
     // Encapsulate our camera descriptions in the HIDL vec type
@@ -78,7 +81,7 @@
     // Find the named camera
     CameraRecord *pRecord = nullptr;
     for (auto &&cam : sCameraList) {
-        if (cam.desc.cameraId == cameraId) {
+        if (cam.desc.v1.cameraId == cameraId) {
             // Found a match!
             pRecord = &cam;
             break;
@@ -99,7 +102,12 @@
     }
 
     // Construct a camera instance for the caller
-    pActiveCamera = new EvsCamera(cameraId.c_str());
+    if (sConfigManager == nullptr) {
+        pActiveCamera = EvsCamera::Create(cameraId.c_str());
+    } else {
+        pActiveCamera = EvsCamera::Create(cameraId.c_str(),
+                                          sConfigManager->getCameraInfo(cameraId));
+    }
     pRecord->activeInstance = pActiveCamera;
     if (pActiveCamera == nullptr) {
         ALOGE("Failed to allocate new EvsCamera object for %s\n", cameraId.c_str());
@@ -120,15 +128,15 @@
 
     // Get the camera id so we can find it in our list
     std::string cameraId;
-    pCamera_1_1->getCameraInfo([&cameraId](CameraDesc desc) {
-                               cameraId = desc.cameraId;
+    pCamera_1_1->getCameraInfo_1_1([&cameraId](CameraDesc desc) {
+                               cameraId = desc.v1.cameraId;
                            }
     );
 
     // Find the named camera
     CameraRecord *pRecord = nullptr;
     for (auto &&cam : sCameraList) {
-        if (cam.desc.cameraId == cameraId) {
+        if (cam.desc.v1.cameraId == cameraId) {
             // Found a match!
             pRecord = &cam;
             break;
@@ -209,6 +217,89 @@
 }
 
 
+// Methods from ::android::hardware::automotive::evs::V1_1::IEvsEnumerator follow.
+Return<void> EvsEnumerator::getCameraList_1_1(getCameraList_1_1_cb _hidl_cb)  {
+    ALOGD("getCameraList");
+
+    const unsigned numCameras = sCameraList.size();
+
+    // Build up a packed array of CameraDesc for return
+    // NOTE:  Only has to live until the callback returns
+    std::vector<CameraDesc_1_1> descriptions;
+    descriptions.reserve(numCameras);
+    for (const auto& cam : sCameraList) {
+        descriptions.push_back( cam.desc );
+    }
+
+    // Encapsulate our camera descriptions in the HIDL vec type
+    hidl_vec<CameraDesc_1_1> hidlCameras(descriptions);
+
+    // Send back the results
+    ALOGD("reporting %zu cameras available", hidlCameras.size());
+    _hidl_cb(hidlCameras);
+
+    // HIDL convention says we return Void if we sent our result back via callback
+    return Void();
+}
+
+Return<sp<IEvsCamera_1_1>>
+EvsEnumerator::openCamera_1_1(const hidl_string& cameraId,
+                              const Stream& streamCfg) {
+    // Find the named camera
+    CameraRecord *pRecord = nullptr;
+    for (auto &&cam : sCameraList) {
+        if (cam.desc.v1.cameraId == cameraId) {
+            // Found a match!
+            pRecord = &cam;
+            break;
+        }
+    }
+
+    // Is this a recognized camera id?
+    if (!pRecord) {
+        ALOGE("Requested camera %s not found", cameraId.c_str());
+        return nullptr;
+    }
+
+    // Has this camera already been instantiated by another caller?
+    sp<EvsCamera> pActiveCamera = pRecord->activeInstance.promote();
+    if (pActiveCamera != nullptr) {
+        ALOGW("Killing previous camera because of new caller");
+        closeCamera(pActiveCamera);
+    }
+
+    // Construct a camera instance for the caller
+    if (sConfigManager == nullptr) {
+        pActiveCamera = EvsCamera::Create(cameraId.c_str());
+    } else {
+        pActiveCamera = EvsCamera::Create(cameraId.c_str(),
+                                          sConfigManager->getCameraInfo(cameraId),
+                                          &streamCfg);
+    }
+
+    pRecord->activeInstance = pActiveCamera;
+    if (pActiveCamera == nullptr) {
+        ALOGE("Failed to allocate new EvsCamera object for %s\n", cameraId.c_str());
+    }
+
+    return pActiveCamera;
+}
+
+
+EvsEnumerator::CameraRecord* EvsEnumerator::findCameraById(const std::string& cameraId) {
+    // Find the named camera
+    CameraRecord *pRecord = nullptr;
+    for (auto &&cam : sCameraList) {
+        if (cam.desc.v1.cameraId == cameraId) {
+            // Found a match!
+            pRecord = &cam;
+            break;
+        }
+    }
+
+    return pRecord;
+}
+
 } // namespace implementation
 } // namespace V1_1
 } // namespace evs
diff --git a/automotive/evs/1.1/default/EvsEnumerator.h b/automotive/evs/1.1/default/EvsEnumerator.h
index 11c2170..475ec76 100644
--- a/automotive/evs/1.1/default/EvsEnumerator.h
+++ b/automotive/evs/1.1/default/EvsEnumerator.h
@@ -17,18 +17,20 @@
 #ifndef ANDROID_HARDWARE_AUTOMOTIVE_EVS_V1_1_EVSCAMERAENUMERATOR_H
 #define ANDROID_HARDWARE_AUTOMOTIVE_EVS_V1_1_EVSCAMERAENUMERATOR_H
 
-#include <android/hardware/automotive/evs/1.0/IEvsEnumerator.h>
+#include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
 
 #include <list>
 
+#include "ConfigManager.h"
+
 using ::android::hardware::automotive::evs::V1_0::EvsResult;
 using ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
 using ::android::hardware::automotive::evs::V1_0::DisplayState;
-using ::android::hardware::automotive::evs::V1_0::IEvsEnumerator;
 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
 using CameraDesc_1_0 = ::android::hardware::automotive::evs::V1_0::CameraDesc;
+using CameraDesc_1_1 = ::android::hardware::automotive::evs::V1_1::CameraDesc;
 
 
 namespace android {
@@ -53,6 +55,11 @@
     Return<void>                closeDisplay(const ::android::sp<IEvsDisplay>& display)  override;
     Return<DisplayState>        getDisplayState()  override;
 
+    // Methods from ::android::hardware::automotive::evs::V1_1::IEvsEnumerator follow.
+    Return<void> getCameraList_1_1(getCameraList_1_1_cb _hidl_cb)  override;
+    Return<sp<IEvsCamera_1_1>>  openCamera_1_1(const hidl_string& cameraId,
+                                               const Stream& streamCfg) override;
+
     // Implementation details
     EvsEnumerator();
 
@@ -61,14 +68,20 @@
     //        That is to say, this is effectively a singleton despite the fact that HIDL
     //        constructs a new instance for each client.
     struct CameraRecord {
-        CameraDesc_1_0      desc;
+        CameraDesc_1_1      desc;
         wp<EvsCamera>       activeInstance;
 
-        CameraRecord(const char *cameraId) : desc() { desc.cameraId = cameraId; }
+        CameraRecord(const char *cameraId) : desc() { desc.v1.cameraId = cameraId; }
     };
-    static std::list<CameraRecord> sCameraList;
 
-    static wp<EvsDisplay>          sActiveDisplay; // Weak pointer. Object destructs if client dies.
+    static CameraRecord* findCameraById(const std::string& cameraId);
+
+    static std::list<CameraRecord>   sCameraList;
+
+    // Weak pointer. Object destructs if client dies.
+    static wp<EvsDisplay>            sActiveDisplay;
+
+    static unique_ptr<ConfigManager> sConfigManager;
 };
 
 } // namespace implementation
diff --git a/automotive/evs/1.1/default/resources/evs_default_configuration.xml b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
new file mode 100644
index 0000000..692102e
--- /dev/null
+++ b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
@@ -0,0 +1,68 @@
+<?xml version='1.0' encoding='utf-8'?>
+<!-- Copyright (C) 2019 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<!-- Exterior View System Example Configuration
+
+     Android Automotive axes are used to define coordinates.
+     See https://source.android.com/devices/sensors/sensor-types#auto_axes
+
+     Use evs_configuration.dtd with xmllint tool, to validate XML configuration file
+-->
+
+<configuration>
+    <!-- system configuration -->
+    <system>
+        <!-- number of cameras available to EVS -->
+        <num_cameras value='1'/>
+    </system>
+
+    <!-- camera device information -->
+    <camera>
+        <!-- camera device starts -->
+        <device id='/dev/video1' position='rear'>
+            <caps>
+                <!-- list of supported controls -->
+                <supported_controls>
+                    <control name='BRIGHTNESS' min='0' max='255'/>
+                    <control name='CONTRAST' min='0' max='255'/>
+                </supported_controls>
+
+                <stream id='0' width='640'  height='360'  format='RGBA_8888' framerate='30'/>
+            </caps>
+
+            <!-- list of parameters -->
+            <characteristics>
+                <!-- Camera intrinsic calibration matrix. See
+                     https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#LENS_INTRINSIC_CALIBRATION
+                -->
+                <parameter
+                    name='LENS_INTRINSIC_CALIBRATION'
+                    type='float'
+                    size='5'
+                    value='0.0,0.0,0.0,0.0,0.0'
+                />
+            </characteristics>
+        </device>
+    </camera>
+    <display>
+        <device id='display0' position='driver'>
+            <caps>
+                <!-- list of supported inpu stream configurations -->
+                <stream id='0' width='1280' height='720' format='RGBA_8888' framerate='30'/>
+            </caps>
+        </device>
+    </display>
+</configuration>
+
diff --git a/automotive/evs/1.1/default/service.cpp b/automotive/evs/1.1/default/service.cpp
index 128a14a..5135864 100644
--- a/automotive/evs/1.1/default/service.cpp
+++ b/automotive/evs/1.1/default/service.cpp
@@ -33,7 +33,7 @@
 using android::hardware::joinRpcThreadpool;
 
 // Generated HIDL files
-using android::hardware::automotive::evs::V1_0::IEvsEnumerator;
+using android::hardware::automotive::evs::V1_1::IEvsEnumerator;
 using android::hardware::automotive::evs::V1_0::IEvsDisplay;
 
 // The namespace in which all our implementation code lives
diff --git a/automotive/evs/1.1/types.hal b/automotive/evs/1.1/types.hal
index 2c6b2ed..dcb2abb 100644
--- a/automotive/evs/1.1/types.hal
+++ b/automotive/evs/1.1/types.hal
@@ -21,6 +21,22 @@
 import @1.0::DisplayState;
 import @1.0::EvsResult;
 import android.hardware.graphics.common@1.2::HardwareBuffer;
+import android.hardware.camera.device@3.2::CameraMetadata;
+
+/**
+ * Structure describing the basic properties of an EVS camera, extended from its
+ * v1.0 declaration.
+ *
+ * The HAL is responsible for filling out this structure for each
+ * EVS camera in the system.
+ */
+struct CameraDesc {
+    @1.0::CameraDesc v1;
+    /**
+     * Store camera metadata such as lens characteristics.
+     */
+    CameraMetadata metadata;
+};
 
 /**
  * Structure representing an image buffer through our APIs
@@ -50,7 +66,7 @@
 /**
  * Types of informative streaming events
  */
-enum InfoEventType : uint32_t {
+enum EvsEventType : uint32_t {
     /**
      * Video stream is started
      */
@@ -81,11 +97,11 @@
 /**
  * Structure that describes informative events occurred during EVS is streaming
  */
-struct InfoEventDesc {
+struct EvsEvent {
     /**
      * Type of an informative event
      */
-    InfoEventType aType;
+    EvsEventType aType;
     /**
      * Possible additional information
      */
@@ -93,20 +109,6 @@
 };
 
 /**
- * EVS event definition
- */
-safe_union EvsEvent {
-    /**
-     * A buffer descriptor of an image frame
-     */
-    BufferDesc buffer;
-    /**
-     * General streaming events
-     */
-    InfoEventDesc info;
-};
-
-/**
  * EVS Camera Parameter
  */
 enum CameraParam : uint32_t {
@@ -127,14 +129,6 @@
      */
     GAIN,
     /**
-     * Mirror the image horizontally
-     */
-    HFLIP,
-    /**
-     * Mirror the image vertically
-     */
-    VFLIP,
-    /**
      * Automatic Whitebalance
      */
     AUTO_WHITE_BALANCE,
@@ -156,11 +150,6 @@
      */
     ABSOLUTE_EXPOSURE,
     /**
-     * When AEC is running in either auto or aperture priority, this parameter
-     * sets whether a frame rate varies.
-     */
-    AUTO_EXPOSURE_PRIORITY,
-    /**
      * Set the focal point of the camera to the specified position.  This
      * parameter may not be effective when auto focus is enabled.
      */
diff --git a/automotive/evs/1.1/vts/functional/Android.bp b/automotive/evs/1.1/vts/functional/Android.bp
index 55c50a4..4753933 100644
--- a/automotive/evs/1.1/vts/functional/Android.bp
+++ b/automotive/evs/1.1/vts/functional/Android.bp
@@ -23,6 +23,7 @@
     defaults: ["VtsHalTargetTestDefaults"],
     shared_libs: [
         "libui",
+        "libcamera_metadata",
     ],
     static_libs: [
         "android.hardware.automotive.evs@1.0",
@@ -31,6 +32,7 @@
         "android.hardware.graphics.common@1.0",
         "android.hardware.graphics.common@1.1",
         "android.hardware.graphics.common@1.2",
+        "android.hardware.camera.device@3.2",
     ],
     test_suites: ["general-tests"],
     cflags: [
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.cpp b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
index 1627689..6d53652 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.cpp
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
@@ -138,93 +138,93 @@
 }
 
 
-Return<void> FrameHandler::notifyEvent(const EvsEvent& event) {
-    // Local flag we use to keep track of when the stream is stopping
-    auto type = event.getDiscriminator();
-    if (type == EvsEvent::hidl_discriminator::info) {
-        mLock.lock();
-        mLatestEventDesc = event.info();
-        if (mLatestEventDesc.aType == InfoEventType::STREAM_STOPPED) {
-            // Signal that the last frame has been received and the stream is stopped
-            mRunning = false;
-        } else if (mLatestEventDesc.aType == InfoEventType::PARAMETER_CHANGED) {
-            ALOGD("Camera parameter 0x%X is changed to 0x%X",
-                  mLatestEventDesc.payload[0], mLatestEventDesc.payload[1]);
+Return<void> FrameHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
+    const AHardwareBuffer_Desc* pDesc =
+        reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
+    ALOGD("Received a frame from the camera (%p)",
+          bufDesc.buffer.nativeHandle.getNativeHandle());
+
+    // Store a dimension of a received frame.
+    mFrameWidth = pDesc->width;
+    mFrameHeight = pDesc->height;
+
+    // If we were given an opened display at construction time, then send the received
+    // image back down the camera.
+    if (mDisplay.get()) {
+        // Get the output buffer we'll use to display the imagery
+        BufferDesc_1_0 tgtBuffer = {};
+        mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
+                                      tgtBuffer = buff;
+                                  }
+        );
+
+        if (tgtBuffer.memHandle == nullptr) {
+            printf("Didn't get target buffer - frame lost\n");
+            ALOGE("Didn't get requested output buffer -- skipping this frame.");
         } else {
-            ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
-        }
-        mLock.unlock();
-        mEventSignal.notify_all();
-    } else {
-        auto bufDesc = event.buffer();
-        const AHardwareBuffer_Desc* pDesc =
-            reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
-        ALOGD("Received a frame from the camera (%p)",
-              bufDesc.buffer.nativeHandle.getNativeHandle());
+            // Copy the contents of the of buffer.memHandle into tgtBuffer
+            copyBufferContents(tgtBuffer, bufDesc);
 
-        // Store a dimension of a received frame.
-        mFrameWidth = pDesc->width;
-        mFrameHeight = pDesc->height;
-
-        // If we were given an opened display at construction time, then send the received
-        // image back down the camera.
-        if (mDisplay.get()) {
-            // Get the output buffer we'll use to display the imagery
-            BufferDesc_1_0 tgtBuffer = {};
-            mDisplay->getTargetBuffer([&tgtBuffer](const BufferDesc_1_0& buff) {
-                                          tgtBuffer = buff;
-                                      }
-            );
-
-            if (tgtBuffer.memHandle == nullptr) {
-                printf("Didn't get target buffer - frame lost\n");
-                ALOGE("Didn't get requested output buffer -- skipping this frame.");
+            // Send the target buffer back for display
+            Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
+            if (!result.isOk()) {
+                printf("HIDL error on display buffer (%s)- frame lost\n",
+                       result.description().c_str());
+                ALOGE("Error making the remote function call.  HIDL said %s",
+                      result.description().c_str());
+            } else if (result != EvsResult::OK) {
+                printf("Display reported error - frame lost\n");
+                ALOGE("We encountered error %d when returning a buffer to the display!",
+                      (EvsResult) result);
             } else {
-                // Copy the contents of the of buffer.memHandle into tgtBuffer
-                copyBufferContents(tgtBuffer, bufDesc);
-
-                // Send the target buffer back for display
-                Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
-                if (!result.isOk()) {
-                    printf("HIDL error on display buffer (%s)- frame lost\n",
-                           result.description().c_str());
-                    ALOGE("Error making the remote function call.  HIDL said %s",
-                          result.description().c_str());
-                } else if (result != EvsResult::OK) {
-                    printf("Display reported error - frame lost\n");
-                    ALOGE("We encountered error %d when returning a buffer to the display!",
-                          (EvsResult) result);
-                } else {
-                    // Everything looks good!
-                    // Keep track so tests or watch dogs can monitor progress
-                    mLock.lock();
-                    mFramesDisplayed++;
-                    mLock.unlock();
-                }
+                // Everything looks good!
+                // Keep track so tests or watch dogs can monitor progress
+                mLock.lock();
+                mFramesDisplayed++;
+                mLock.unlock();
             }
         }
-
-
-        switch (mReturnMode) {
-        case eAutoReturn:
-            // Send the camera buffer back now that the client has seen it
-            ALOGD("Calling doneWithFrame");
-            // TODO:  Why is it that we get a HIDL crash if we pass back the cloned buffer?
-            mCamera->doneWithFrame_1_1(bufDesc);
-            break;
-        case eNoAutoReturn:
-            // Hang onto the buffer handle for now -- the client will return it explicitly later
-            mHeldBuffers.push(bufDesc);
-        }
-
-        mLock.lock();
-        ++mFramesReceived;
-        mLock.unlock();
-        mFrameSignal.notify_all();
-
-        ALOGD("Frame handling complete");
     }
 
+
+    switch (mReturnMode) {
+    case eAutoReturn:
+        // Send the camera buffer back now that the client has seen it
+        ALOGD("Calling doneWithFrame");
+        mCamera->doneWithFrame_1_1(bufDesc);
+        break;
+    case eNoAutoReturn:
+        // Hang onto the buffer handle for now -- the client will return it explicitly later
+        mHeldBuffers.push(bufDesc);
+    }
+
+    mLock.lock();
+    ++mFramesReceived;
+    mLock.unlock();
+    mFrameSignal.notify_all();
+
+    ALOGD("Frame handling complete");
+
+    return Void();
+}
+
+
+Return<void> FrameHandler::notify(const EvsEvent& event) {
+    // Local flag we use to keep track of when the stream is stopping
+    mLock.lock();
+    mLatestEventDesc = event;
+    if (mLatestEventDesc.aType == EvsEventType::STREAM_STOPPED) {
+        // Signal that the last frame has been received and the stream is stopped
+        mRunning = false;
+    } else if (mLatestEventDesc.aType == EvsEventType::PARAMETER_CHANGED) {
+        ALOGD("Camera parameter 0x%X is changed to 0x%X",
+              mLatestEventDesc.payload[0], mLatestEventDesc.payload[1]);
+    } else {
+        ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
+    }
+    mLock.unlock();
+    mEventSignal.notify_all();
+
     return Void();
 }
 
@@ -342,18 +342,18 @@
     }
 }
 
-bool FrameHandler::waitForEvent(const InfoEventType aTargetEvent,
-                                InfoEventDesc &eventDesc) {
+bool FrameHandler::waitForEvent(const EvsEventType aTargetEvent,
+                                EvsEvent &event) {
     // Wait until we get an expected parameter change event.
     std::unique_lock<std::mutex> lock(mLock);
     auto now = std::chrono::system_clock::now();
     bool result = mEventSignal.wait_until(lock, now + 5s,
-        [this, aTargetEvent, &eventDesc](){
+        [this, aTargetEvent, &event](){
             bool flag = mLatestEventDesc.aType == aTargetEvent;
             if (flag) {
-                eventDesc.aType = mLatestEventDesc.aType;
-                eventDesc.payload[0] = mLatestEventDesc.payload[0];
-                eventDesc.payload[1] = mLatestEventDesc.payload[1];
+                event.aType = mLatestEventDesc.aType;
+                event.payload[0] = mLatestEventDesc.payload[0];
+                event.payload[1] = mLatestEventDesc.payload[1];
             }
 
             return flag;
@@ -363,21 +363,22 @@
     return !result;
 }
 
-const char *FrameHandler::eventToString(const InfoEventType aType) {
+const char *FrameHandler::eventToString(const EvsEventType aType) {
     switch (aType) {
-        case InfoEventType::STREAM_STARTED:
+        case EvsEventType::STREAM_STARTED:
             return "STREAM_STARTED";
-        case InfoEventType::STREAM_STOPPED:
+        case EvsEventType::STREAM_STOPPED:
             return "STREAM_STOPPED";
-        case InfoEventType::FRAME_DROPPED:
+        case EvsEventType::FRAME_DROPPED:
             return "FRAME_DROPPED";
-        case InfoEventType::TIMEOUT:
+        case EvsEventType::TIMEOUT:
             return "TIMEOUT";
-        case InfoEventType::PARAMETER_CHANGED:
+        case EvsEventType::PARAMETER_CHANGED:
             return "PARAMETER_CHANGED";
-        case InfoEventType::MASTER_RELEASED:
+        case EvsEventType::MASTER_RELEASED:
             return "MASTER_RELEASED";
         default:
             return "Unknown";
     }
 }
+
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.h b/automotive/evs/1.1/vts/functional/FrameHandler.h
index 7f87cb4..e5f1b8f 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.h
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.h
@@ -33,7 +33,6 @@
 using ::android::sp;
 using ::android::hardware::automotive::evs::V1_0::IEvsDisplay;
 using ::android::hardware::automotive::evs::V1_0::EvsResult;
-using ::android::hardware::automotive::evs::V1_0::CameraDesc;
 using BufferDesc_1_0 = ::android::hardware::automotive::evs::V1_0::BufferDesc;
 using BufferDesc_1_1 = ::android::hardware::automotive::evs::V1_1::BufferDesc;
 
@@ -56,6 +55,13 @@
     FrameHandler(android::sp <IEvsCamera> pCamera, CameraDesc cameraInfo,
                  android::sp <IEvsDisplay> pDisplay = nullptr,
                  BufferControlFlag mode = eAutoReturn);
+    virtual ~FrameHandler() {
+        if (mCamera != nullptr) {
+            /* shutdown a camera explicitly */
+            shutdown();
+        }
+    }
+
     void shutdown();
 
     bool startStream();
@@ -67,19 +73,22 @@
     bool isRunning();
 
     void waitForFrameCount(unsigned frameCount);
-    bool waitForEvent(const InfoEventType aTargetEvent,
-                            InfoEventDesc &eventDesc);
+    bool waitForEvent(const EvsEventType aTargetEvent,
+                            EvsEvent &eventDesc);
     void getFramesCounters(unsigned* received, unsigned* displayed);
     void getFrameDimension(unsigned* width, unsigned* height);
 
 private:
-    // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
+    // Implementation for ::android::hardware::automotive::evs::V1_0::IEvsCameraStream
     Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
-    Return<void> notifyEvent(const EvsEvent& event) override;
+
+    // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
+    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<void> notify(const EvsEvent& event) override;
 
     // Local implementation details
     bool copyBufferContents(const BufferDesc_1_0& tgtBuffer, const BufferDesc_1_1& srcBuffer);
-    const char *eventToString(const InfoEventType aType);
+    const char *eventToString(const EvsEventType aType);
 
     // Values initialized as startup
     android::sp <IEvsCamera>    mCamera;
@@ -100,7 +109,7 @@
     unsigned                    mFramesDisplayed = 0;   // Simple counter -- rolls over eventually!
     unsigned                    mFrameWidth = 0;
     unsigned                    mFrameHeight = 0;
-    InfoEventDesc               mLatestEventDesc;
+    EvsEvent                    mLatestEventDesc;
 };
 
 
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index a6e4881..1d3fd87 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -38,8 +38,9 @@
 
 #include "FrameHandler.h"
 
-#include <stdio.h>
-#include <string.h>
+#include <cstdio>
+#include <cstring>
+#include <cstdlib>
 
 #include <hidl/HidlTransportSupport.h>
 #include <hwbinder/ProcessState.h>
@@ -50,8 +51,10 @@
 #include <android/log.h>
 #include <android/hardware/automotive/evs/1.1/IEvsCamera.h>
 #include <android/hardware/automotive/evs/1.1/IEvsCameraStream.h>
-#include <android/hardware/automotive/evs/1.0/IEvsEnumerator.h>
+#include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
 #include <android/hardware/automotive/evs/1.0/IEvsDisplay.h>
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <system/camera_metadata.h>
 
 #include <VtsHalHidlTargetTestBase.h>
 #include <VtsHalHidlTargetTestEnvBase.h>
@@ -64,13 +67,28 @@
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_string;
 using ::android::sp;
-using ::android::hardware::automotive::evs::V1_0::CameraDesc;
+using ::android::hardware::camera::device::V3_2::Stream;
 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
 using ::android::hardware::automotive::evs::V1_0::DisplayState;
-using ::android::hardware::automotive::evs::V1_0::IEvsEnumerator;
+using ::android::hardware::graphics::common::V1_0::PixelFormat;
 using IEvsCamera_1_0 = ::android::hardware::automotive::evs::V1_0::IEvsCamera;
 using IEvsCamera_1_1 = ::android::hardware::automotive::evs::V1_1::IEvsCamera;
 
+/*
+ * Plese note that this is different from what is defined in
+ * libhardware/modules/camera/3_4/metadata/types.h; this has one additional
+ * field to store a framerate.
+ */
+const size_t kStreamCfgSz = 5;
+typedef struct {
+    int32_t width;
+    int32_t height;
+    int32_t format;
+    int32_t direction;
+    int32_t framerate;
+} RawStreamConfig;
+
+
 // Test environment for Evs HIDL HAL.
 class EvsHidlEnvironment : public ::testing::VtsHalHidlTargetTestEnvBase {
    public:
@@ -107,15 +125,16 @@
         assert(pEnumerator != nullptr);
 
         // Get the camera list
-        pEnumerator->getCameraList([this](hidl_vec <CameraDesc> cameraList) {
-                                       ALOGI("Camera list callback received %zu cameras",
-                                             cameraList.size());
-                                       cameraInfo.reserve(cameraList.size());
-                                       for (auto&& cam: cameraList) {
-                                           ALOGI("Found camera %s", cam.cameraId.c_str());
-                                           cameraInfo.push_back(cam);
-                                       }
-                                   }
+        pEnumerator->getCameraList_1_1(
+            [this](hidl_vec <CameraDesc> cameraList) {
+                ALOGI("Camera list callback received %zu cameras",
+                      cameraList.size());
+                cameraInfo.reserve(cameraList.size());
+                for (auto&& cam: cameraList) {
+                    ALOGI("Found camera %s", cam.v1.cameraId.c_str());
+                    cameraInfo.push_back(cam);
+                }
+            }
         );
 
         // We insist on at least one camera for EVS to pass any camera tests
@@ -143,19 +162,23 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
         for (int pass = 0; pass < 2; pass++) {
             sp<IEvsCamera_1_1> pCam =
-                IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+                IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
                 .withDefault(nullptr);
             ASSERT_NE(pCam, nullptr);
 
             // Verify that this camera self-identifies correctly
-            pCam->getCameraInfo([&cam](CameraDesc desc) {
-                                    ALOGD("Found camera %s", desc.cameraId.c_str());
-                                    EXPECT_EQ(cam.cameraId, desc.cameraId);
-                                }
+            pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                        ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                        EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                    }
             );
 
             // Explicitly close the camera so resources are released right away
@@ -177,22 +200,26 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
         // Verify that this camera self-identifies correctly
-        pCam->getCameraInfo([&cam](CameraDesc desc) {
-                                ALOGD("Found camera %s", desc.cameraId.c_str());
-                                EXPECT_EQ(cam.cameraId, desc.cameraId);
-                            }
+        pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                    ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                    EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                }
         );
 
         sp<IEvsCamera_1_1> pCam2 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, pCam2);
         ASSERT_NE(pCam2, nullptr);
@@ -210,10 +237,10 @@
         pEnumerator->closeCamera(pCam);
 
         // Verify that the second camera instance self-identifies correctly
-        pCam2->getCameraInfo([&cam](CameraDesc desc) {
-                                 ALOGD("Found camera %s", desc.cameraId.c_str());
-                                 EXPECT_EQ(cam.cameraId, desc.cameraId);
-                             }
+        pCam2->getCameraInfo_1_1([&cam](CameraDesc desc) {
+                                     ALOGD("Found camera %s", desc.v1.cameraId.c_str());
+                                     EXPECT_EQ(cam.v1.cameraId, desc.v1.cameraId);
+                                 }
         );
 
         // Close the second camera instance
@@ -235,10 +262,14 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -303,11 +334,15 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
 
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -371,6 +406,10 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Request exclusive access to the EVS display
     sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
     ASSERT_NE(pDisplay, nullptr);
@@ -378,7 +417,7 @@
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
@@ -439,16 +478,20 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCam0 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
         sp<IEvsCamera_1_1> pCam1 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
@@ -486,7 +529,6 @@
         nsecs_t runTime = end - firstFrame;
         float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
         float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
-        printf("Measured camera rate %3.2f fps and %3.2f fps\n", framesPerSecond0, framesPerSecond1);
         ALOGI("Measured camera rate %3.2f fps and %3.2f fps", framesPerSecond0, framesPerSecond1);
         EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
         EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
@@ -526,14 +568,33 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
+    Return<EvsResult> result = EvsResult::OK;
     for (auto&& cam: cameraInfo) {
         // Create a camera client
         sp<IEvsCamera_1_1> pCam =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Get the parameter list
+        std::vector<CameraParam> cmds;
+        pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
+                cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cmds.push_back(cmd);
+                }
+            }
+        );
+
+        if (cmds.size() < 1) {
+            continue;
+        }
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          nullptr,
@@ -547,83 +608,70 @@
         // Ensure the stream starts
         frameHandler->waitForFrameCount(1);
 
-        // Try to program few parameters
-        EvsResult result = EvsResult::OK;
-        int32_t val0 = 100;
-        int32_t val1 = 0;
-
         result = pCam->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
-        pCam->setParameter(CameraParam::BRIGHTNESS, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
+        for (auto &cmd : cmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCam->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::BRIGHTNESS,
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 0;
+                pCam->getIntParameter(CameraParam::AUTO_FOCUS,
+                                   [&result, &val1](auto status, auto value) {
+                                       result = status;
+                                       if (status == EvsResult::OK) {
+                                          val1 = value;
+                                       }
+                                   });
+                if (val1 != 0) {
+                    pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                       [&result, &val1](auto status, auto effectiveValue) {
+                                           result = status;
+                                           val1 = effectiveValue;
+                                       });
+                    ASSERT_EQ(EvsResult::OK, result);
+                    ASSERT_EQ(val1, 0);
+                }
+            }
+
+            // Try to program a parameter with a random value [minVal, maxVal]
+            int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
+            int32_t val1 = 0;
+
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCam->setIntParameter(cmd, val0,
+                               [&result, &val1](auto status, auto effectiveValue) {
+                                   result = status;
+                                   val1 = effectiveValue;
+                               });
+
+            ASSERT_EQ(EvsResult::OK, result);
+
+            pCam->getIntParameter(cmd,
                                [&result, &val1](auto status, auto value) {
                                    result = status;
                                    if (status == EvsResult::OK) {
                                       val1 = value;
                                    }
                                });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
-        }
-
-        val0 = 80;
-        val1 = 0;
-        pCam->setParameter(CameraParam::CONTRAST, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::CONTRAST,
-                               [&result, &val1](auto status, auto value) {
-                                   result = status;
-                                   if (status == EvsResult::OK) {
-                                      val1 = value;
-                                   }
-                               });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
-        }
-
-        val0 = 300;
-        val1 = 0;
-        pCam->setParameter(CameraParam::ABSOLUTE_ZOOM, val0,
-                           [&result, &val1](auto status, auto effectiveValue) {
-                               result = status;
-                               val1 = effectiveValue;
-                           });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
-        if (result == EvsResult::OK) {
-            pCam->getParameter(CameraParam::ABSOLUTE_ZOOM,
-                               [&result, &val1](auto status, auto value) {
-                                   result = status;
-                                   if (status == EvsResult::OK) {
-                                      val1 = value;
-                                   }
-                               });
-            ASSERT_TRUE(result == EvsResult::OK ||
-                        result == EvsResult::INVALID_ARG);
+            ASSERT_EQ(EvsResult::OK, result);
             ASSERT_EQ(val0, val1) << "Values are not matched.";
         }
 
         result = pCam->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Shutdown
         frameHandler->shutdown();
@@ -650,15 +698,19 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
         sp<IEvsCamera_1_1> pCamNonMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
@@ -698,15 +750,15 @@
 
         // Non-master client expects to receive a master role relesed
         // notification.
-        InfoEventDesc aNotification = {};
+        EvsEvent aNotification = {};
 
         // Release a master role.
         pCamMaster->unsetMaster();
 
         // Verify a change notification.
-        frameHandlerNonMaster->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(InfoEventType::MASTER_RELEASED,
-                  static_cast<InfoEventType>(aNotification.aType));
+        frameHandlerNonMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+                  static_cast<EvsEventType>(aNotification.aType));
 
         // Non-master becomes a master.
         result = pCamNonMaster->setMaster();
@@ -720,9 +772,9 @@
         frameHandlerNonMaster->shutdown();
 
         // Verify a change notification.
-        frameHandlerMaster->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(InfoEventType::MASTER_RELEASED,
-                  static_cast<InfoEventType>(aNotification.aType));
+        frameHandlerMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+                  static_cast<EvsEventType>(aNotification.aType));
 
         // Closing another stream.
         frameHandlerMaster->shutdown();
@@ -752,18 +804,46 @@
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
         sp<IEvsCamera_1_1> pCamNonMaster =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Get the parameter list
+        std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
+        pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
+                camMasterCmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    camMasterCmds.push_back(cmd);
+                }
+            }
+        );
+
+        pCamNonMaster->getParameterList([&camNonMasterCmds](hidl_vec<CameraParam> cmdList) {
+                camNonMasterCmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    camNonMasterCmds.push_back(cmd);
+                }
+            }
+        );
+
+        if (camMasterCmds.size() < 1 ||
+            camNonMasterCmds.size() < 1) {
+            // Skip a camera device if it does not support any parameter.
+            continue;
+        }
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandlerMaster =
             new FrameHandler(pCamMaster, cam,
@@ -778,11 +858,11 @@
 
         // Set one client as the master
         EvsResult result = pCamMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to set another client as the master.
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
+        ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
 
         // Start the camera's video stream via a master client.
         bool startResult = frameHandlerMaster->startStream();
@@ -798,131 +878,168 @@
         // Ensure the stream starts
         frameHandlerNonMaster->waitForFrameCount(1);
 
-        // Try to program CameraParam::BRIGHTNESS
-        int32_t val0 = 100;
+        int32_t val0 = 0;
         int32_t val1 = 0;
+        for (auto &cmd : camMasterCmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCamMaster->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        pCamMaster->setParameter(CameraParam::BRIGHTNESS, val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
-                                     result = status;
-                                     val1 = effectiveValue;
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 1;
+                pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
 
-        // Non-master client expects to receive a parameter change notification
-        // whenever a master client adjusts it.
-        InfoEventDesc aNotification = {};
+            // Try to program a parameter
+            val0 = minVal + (std::rand() % (maxVal - minVal));
 
-        pCamMaster->getParameter(CameraParam::BRIGHTNESS,
-                                 [&result, &val1](auto status, auto value) {
-                                     result = status;
-                                     if (status == EvsResult::OK) {
-                                        val1 = value;
-                                     }
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
-        if (result == EvsResult::OK) {
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCamMaster->setIntParameter(cmd, val0,
+                                     [&result, &val1](auto status, auto effectiveValue) {
+                                         result = status;
+                                         val1 = effectiveValue;
+                                     });
+            ASSERT_EQ(EvsResult::OK, result);
 
-            // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(InfoEventType::PARAMETER_CHANGED,
-                      static_cast<InfoEventType>(aNotification.aType));
-            ASSERT_EQ(CameraParam::BRIGHTNESS,
-                      static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+            // Wait a moment
+            sleep(1);
 
-        // Try to program CameraParam::CONTRAST
-        val0 = 80;
-        val1 = 0;
-        pCamMaster->setParameter(CameraParam::CONTRAST, val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
-                                     result = status;
-                                     val1 = effectiveValue;
-                                 });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEvent aNotification = {};
 
-        if (result == EvsResult::OK) {
-            pCamMaster->getParameter(CameraParam::CONTRAST,
+            pCamMaster->getIntParameter(cmd,
                                      [&result, &val1](auto status, auto value) {
                                          result = status;
                                          if (status == EvsResult::OK) {
                                             val1 = value;
                                          }
                                      });
-            ASSERT_TRUE(result == EvsResult::OK);
+            ASSERT_EQ(EvsResult::OK, result);
             ASSERT_EQ(val0, val1) << "Values are not matched.";
 
-
             // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(InfoEventType::PARAMETER_CHANGED,
-                      static_cast<InfoEventType>(aNotification.aType));
-            ASSERT_EQ(CameraParam::CONTRAST,
+            frameHandlerNonMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification.aType));
+            ASSERT_EQ(cmd,
                       static_cast<CameraParam>(aNotification.payload[0]));
             ASSERT_EQ(val1,
                       static_cast<int32_t>(aNotification.payload[1]));
         }
 
         // Try to adjust a parameter via non-master client
-        pCamNonMaster->setParameter(CameraParam::CONTRAST, val0,
+        pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
                                     [&result, &val1](auto status, auto effectiveValue) {
                                         result = status;
                                         val1 = effectiveValue;
                                     });
-        ASSERT_TRUE(result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
         // Non-master client attemps to be a master
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
+        ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
 
         // Master client retires from a master role
         result = pCamMaster->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to adjust a parameter after being retired
-        pCamMaster->setParameter(CameraParam::BRIGHTNESS, val0,
+        pCamMaster->setIntParameter(camMasterCmds[0], val0,
                                  [&result, &val1](auto status, auto effectiveValue) {
                                      result = status;
                                      val1 = effectiveValue;
                                  });
-        ASSERT_TRUE(result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
         // Non-master client becomes a master
         result = pCamNonMaster->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Try to adjust a parameter via new master client
-        pCamNonMaster->setParameter(CameraParam::BRIGHTNESS, val0,
-                                    [&result, &val1](auto status, auto effectiveValue) {
-                                        result = status;
-                                        val1 = effectiveValue;
-                                    });
-        ASSERT_TRUE(result == EvsResult::OK ||            // Succeeded to program
-                    result == EvsResult::INVALID_ARG);    // Camera parameter is not supported
+        for (auto &cmd : camNonMasterCmds) {
+            // Get a valid parameter value range
+            int32_t minVal, maxVal, step;
+            pCamNonMaster->getIntParameterRange(
+                cmd,
+                [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                    minVal = val0;
+                    maxVal = val1;
+                    step   = val2;
+                }
+            );
 
-        // Wait a moment
-        sleep(1);
+            EvsResult result = EvsResult::OK;
+            if (cmd == CameraParam::ABSOLUTE_FOCUS) {
+                // Try to turn off auto-focus
+                int32_t val1 = 1;
+                pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
 
-        // Verify a change notification
-        if (result == EvsResult::OK) {
-            frameHandlerMaster->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
+            // Try to program a parameter
+            val0 = minVal + (std::rand() % (maxVal - minVal));
+
+            // Rounding down
+            val0 = val0 - (val0 % step);
+            pCamNonMaster->setIntParameter(cmd, val0,
+                                        [&result, &val1](auto status, auto effectiveValue) {
+                                            result = status;
+                                            val1 = effectiveValue;
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+
+            // Wait a moment
+            sleep(1);
+
+            // Non-master client expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            EvsEvent aNotification = {};
+
+            pCamNonMaster->getIntParameter(cmd,
+                                        [&result, &val1](auto status, auto value) {
+                                            result = status;
+                                            if (status == EvsResult::OK) {
+                                               val1 = value;
+                                            }
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+            ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+            // Verify a change notification
+            frameHandlerMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification.aType));
+            ASSERT_EQ(cmd,
+                      static_cast<CameraParam>(aNotification.payload[0]));
             ASSERT_EQ(val1,
                       static_cast<int32_t>(aNotification.payload[1]));
         }
 
         // New master retires from a master role
         result = pCamNonMaster->unsetMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Shutdown
         frameHandlerMaster->shutdown();
@@ -943,9 +1060,18 @@
 TEST_F(EvsHidlTest, HighPriorityCameraClient) {
     ALOGI("Starting HighPriorityCameraClient test");
 
+    if (mIsHwModule) {
+        // This test is not for HW module implementation.
+        return;
+    }
+
     // Get the camera list
     loadCameraList();
 
+    // Using null stream configuration makes EVS uses the default resolution and
+    // output format.
+    Stream nullCfg = {};
+
     // Request exclusive access to the EVS display
     sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
     ASSERT_NE(pDisplay, nullptr);
@@ -954,15 +1080,38 @@
     for (auto&& cam: cameraInfo) {
         // Create two clients
         sp<IEvsCamera_1_1> pCam0 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
         sp<IEvsCamera_1_1> pCam1 =
-            IEvsCamera_1_1::castFrom(pEnumerator->openCamera(cam.cameraId))
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Get the parameter list; this test will use the first command in both
+        // lists.
+        std::vector<CameraParam> cam0Cmds, cam1Cmds;
+        pCam0->getParameterList([&cam0Cmds](hidl_vec<CameraParam> cmdList) {
+                cam0Cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cam0Cmds.push_back(cmd);
+                }
+            }
+        );
+
+        pCam1->getParameterList([&cam1Cmds](hidl_vec<CameraParam> cmdList) {
+                cam1Cmds.reserve(cmdList.size());
+                for (auto &&cmd : cmdList) {
+                    cam1Cmds.push_back(cmd);
+                }
+            }
+        );
+        if (cam0Cmds.size() < 1 || cam1Cmds.size() < 1) {
+            // Cannot execute this test.
+            return;
+        }
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
                                                           pDisplay,
@@ -982,67 +1131,121 @@
         frameHandler0->waitForFrameCount(1);
         frameHandler1->waitForFrameCount(1);
 
-        // Client 1 becomes a master and programs a brightness.
+        // Client 1 becomes a master and programs a parameter.
         EvsResult result = EvsResult::OK;
-        int32_t val0 = 100;
+        // Get a valid parameter value range
+        int32_t minVal, maxVal, step;
+        pCam1->getIntParameterRange(
+            cam1Cmds[0],
+            [&minVal, &maxVal, &step](int32_t val0, int32_t val1, int32_t val2) {
+                minVal = val0;
+                maxVal = val1;
+                step   = val2;
+            }
+        );
+
+        if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            // Try to turn off auto-focus
+            int32_t val1 = 0;
+            pCam1->getIntParameter(CameraParam::AUTO_FOCUS,
+                               [&result, &val1](auto status, auto value) {
+                                   result = status;
+                                   if (status == EvsResult::OK) {
+                                      val1 = value;
+                                   }
+                               });
+            if (val1 != 0) {
+                pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
+        }
+
+        // Try to program a parameter with a random value [minVal, maxVal]
+        int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
         int32_t val1 = 0;
 
-        result = pCam1->setMaster();
-        ASSERT_TRUE(result == EvsResult::OK);
+        // Rounding down
+        val0 = val0 - (val0 % step);
 
-        pCam1->setParameter(CameraParam::BRIGHTNESS, val0,
+        result = pCam1->setMaster();
+        ASSERT_EQ(EvsResult::OK, result);
+
+        pCam1->setIntParameter(cam1Cmds[0], val0,
                             [&result, &val1](auto status, auto effectiveValue) {
                                 result = status;
                                 val1 = effectiveValue;
                             });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
-
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Verify a change notification
-        InfoEventDesc aNotification = {};
-        if (result == EvsResult::OK) {
-            bool timeout =
-                frameHandler0->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_FALSE(timeout) << "Expected event does not arrive";
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+        EvsEvent aNotification = {};
+        bool timeout =
+            frameHandler0->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+        ASSERT_FALSE(timeout) << "Expected event does not arrive";
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::PARAMETER_CHANGED);
+        ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
+                  cam1Cmds[0]);
+        ASSERT_EQ(val1,
+                  static_cast<int32_t>(aNotification.payload[1]));
 
         // Client 0 steals a master role
         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
 
-        frameHandler1->waitForEvent(InfoEventType::MASTER_RELEASED, aNotification);
-        ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                  InfoEventType::MASTER_RELEASED);
+        frameHandler1->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::MASTER_RELEASED);
 
-        // Client 0 programs a brightness
-        val0 = 50;
+        // Client 0 programs a parameter
+        val0 = minVal + (std::rand() % (maxVal - minVal));
         val1 = 0;
-        pCam0->setParameter(CameraParam::BRIGHTNESS, val0,
+
+        // Rounding down
+        val0 = val0 - (val0 % step);
+
+        if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            // Try to turn off auto-focus
+            int32_t val1 = 0;
+            pCam0->getIntParameter(CameraParam::AUTO_FOCUS,
+                               [&result, &val1](auto status, auto value) {
+                                   result = status;
+                                   if (status == EvsResult::OK) {
+                                      val1 = value;
+                                   }
+                               });
+            if (val1 != 0) {
+                pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &val1](auto status, auto effectiveValue) {
+                                       result = status;
+                                       val1 = effectiveValue;
+                                   });
+                ASSERT_EQ(EvsResult::OK, result);
+                ASSERT_EQ(val1, 0);
+            }
+        }
+
+        pCam0->setIntParameter(cam0Cmds[0], val0,
                             [&result, &val1](auto status, auto effectiveValue) {
                                 result = status;
                                 val1 = effectiveValue;
                             });
-        ASSERT_TRUE(result == EvsResult::OK ||
-                    result == EvsResult::INVALID_ARG);
+        ASSERT_EQ(EvsResult::OK, result);
 
         // Verify a change notification
-        if (result == EvsResult::OK) {
-            bool timeout =
-                frameHandler1->waitForEvent(InfoEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_FALSE(timeout) << "Expected event does not arrive";
-            ASSERT_EQ(static_cast<InfoEventType>(aNotification.aType),
-                      InfoEventType::PARAMETER_CHANGED);
-            ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
-                      CameraParam::BRIGHTNESS);
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
-        }
+        timeout =
+            frameHandler1->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
+        ASSERT_FALSE(timeout) << "Expected event does not arrive";
+        ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                  EvsEventType::PARAMETER_CHANGED);
+        ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
+                  cam0Cmds[0]);
+        ASSERT_EQ(val1,
+                  static_cast<int32_t>(aNotification.payload[1]));
 
         // Turn off the display (yes, before the stream stops -- it should be handled)
         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
@@ -1061,6 +1264,248 @@
 }
 
 
+/*
+ * CameraUseStreamConfigToDisplay:
+ * End to end test of data flowing from the camera to the display.  Similar to
+ * CameraToDisplayRoundTrip test case but this case retrieves available stream
+ * configurations from EVS and uses one of them to start a video stream.
+ */
+TEST_F(EvsHidlTest, CameraUseStreamConfigToDisplay) {
+    ALOGI("Starting CameraUseStreamConfigToDisplay test");
+
+    // Get the camera list
+    loadCameraList();
+
+    // Request exclusive access to the EVS display
+    sp<IEvsDisplay> pDisplay = pEnumerator->openDisplay();
+    ASSERT_NE(pDisplay, nullptr);
+
+    // Test each reported camera
+    for (auto&& cam: cameraInfo) {
+        // choose a configuration that has a frame rate faster than minReqFps.
+        Stream targetCfg = {};
+        const int32_t minReqFps = 15;
+        int32_t maxArea = 0;
+        camera_metadata_entry_t streamCfgs;
+        bool foundCfg = false;
+        if (!find_camera_metadata_entry(
+                 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
+                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                 &streamCfgs)) {
+            // Stream configurations are found in metadata
+            RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
+            for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
+                if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                    ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
+
+                    if (ptr->width * ptr->height > maxArea &&
+                        ptr->framerate >= minReqFps) {
+                        targetCfg.width = ptr->width;
+                        targetCfg.height = ptr->height;
+
+                        maxArea = ptr->width * ptr->height;
+                        foundCfg = true;
+                    }
+                }
+                ++ptr;
+            }
+        }
+        targetCfg.format =
+            static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
+
+        if (!foundCfg) {
+            // Current EVS camera does not provide stream configurations in the
+            // metadata.
+            continue;
+        }
+
+        sp<IEvsCamera_1_1> pCam =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam, nullptr);
+
+        // Set up a frame receiver object which will fire up its own thread.
+        sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
+                                                         pDisplay,
+                                                         FrameHandler::eAutoReturn);
+
+
+        // Activate the display
+        pDisplay->setDisplayState(DisplayState::VISIBLE_ON_NEXT_FRAME);
+
+        // Start the camera's video stream
+        bool startResult = frameHandler->startStream();
+        ASSERT_TRUE(startResult);
+
+        // Wait a while to let the data flow
+        static const int kSecondsToWait = 5;
+        const int streamTimeMs = kSecondsToWait * kSecondsToMilliseconds -
+                                 kMaxStreamStartMilliseconds;
+        const unsigned minimumFramesExpected = streamTimeMs * kMinimumFramesPerSecond /
+                                               kSecondsToMilliseconds;
+        sleep(kSecondsToWait);
+        unsigned framesReceived = 0;
+        unsigned framesDisplayed = 0;
+        frameHandler->getFramesCounters(&framesReceived, &framesDisplayed);
+        EXPECT_EQ(framesReceived, framesDisplayed);
+        EXPECT_GE(framesDisplayed, minimumFramesExpected);
+
+        // Turn off the display (yes, before the stream stops -- it should be handled)
+        pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
+
+        // Shut down the streamer
+        frameHandler->shutdown();
+
+        // Explicitly release the camera
+        pEnumerator->closeCamera(pCam);
+    }
+
+    // Explicitly release the display
+    pEnumerator->closeDisplay(pDisplay);
+}
+
+
+/*
+ * MultiCameraStreamUseConfig:
+ * Verify that each client can start and stop video streams on the same
+ * underlying camera with same configuration.
+ */
+TEST_F(EvsHidlTest, MultiCameraStreamUseConfig) {
+    ALOGI("Starting MultiCameraStream test");
+
+    if (mIsHwModule) {
+        // This test is not for HW module implementation.
+        return;
+    }
+
+    // Get the camera list
+    loadCameraList();
+
+    // Test each reported camera
+    for (auto&& cam: cameraInfo) {
+        // choose a configuration that has a frame rate faster than minReqFps.
+        Stream targetCfg = {};
+        const int32_t minReqFps = 15;
+        int32_t maxArea = 0;
+        camera_metadata_entry_t streamCfgs;
+        bool foundCfg = false;
+        if (!find_camera_metadata_entry(
+                 reinterpret_cast<camera_metadata_t *>(cam.metadata.data()),
+                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                 &streamCfgs)) {
+            // Stream configurations are found in metadata
+            RawStreamConfig *ptr = reinterpret_cast<RawStreamConfig *>(streamCfgs.data.i32);
+            for (unsigned idx = 0; idx < streamCfgs.count; idx += kStreamCfgSz) {
+                if (ptr->direction == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT &&
+                    ptr->format == HAL_PIXEL_FORMAT_RGBA_8888) {
+
+                    if (ptr->width * ptr->height > maxArea &&
+                        ptr->framerate >= minReqFps) {
+                        targetCfg.width = ptr->width;
+                        targetCfg.height = ptr->height;
+
+                        maxArea = ptr->width * ptr->height;
+                        foundCfg = true;
+                    }
+                }
+                ++ptr;
+            }
+        }
+        targetCfg.format =
+            static_cast<PixelFormat>(HAL_PIXEL_FORMAT_RGBA_8888);
+
+        if (!foundCfg) {
+            ALOGI("Device %s does not provide a list of supported stream configurations, skipped",
+                  cam.v1.cameraId.c_str());
+
+            continue;
+        }
+
+        // Create the first camera client with a selected stream configuration.
+        sp<IEvsCamera_1_1> pCam0 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam0, nullptr);
+
+        // Try to create the second camera client with different stream
+        // configuration.
+        int32_t id = targetCfg.id;
+        targetCfg.id += 1;  // EVS manager sees only the stream id.
+        sp<IEvsCamera_1_1> pCam1 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_EQ(pCam1, nullptr);
+
+        // Try again with same stream configuration.
+        targetCfg.id = id;
+        pCam1 =
+            IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, targetCfg))
+            .withDefault(nullptr);
+        ASSERT_NE(pCam1, nullptr);
+
+        // Set up per-client frame receiver objects which will fire up its own thread
+        sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
+                                                          nullptr,
+                                                          FrameHandler::eAutoReturn);
+        ASSERT_NE(frameHandler0, nullptr);
+
+        sp<FrameHandler> frameHandler1 = new FrameHandler(pCam1, cam,
+                                                          nullptr,
+                                                          FrameHandler::eAutoReturn);
+        ASSERT_NE(frameHandler1, nullptr);
+
+        // Start the camera's video stream via client 0
+        bool startResult = false;
+        startResult = frameHandler0->startStream() &&
+                      frameHandler1->startStream();
+        ASSERT_TRUE(startResult);
+
+        // Ensure the stream starts
+        frameHandler0->waitForFrameCount(1);
+        frameHandler1->waitForFrameCount(1);
+
+        nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
+
+        // Wait a bit, then ensure both clients get at least the required minimum number of frames
+        sleep(5);
+        nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
+        unsigned framesReceived0 = 0, framesReceived1 = 0;
+        frameHandler0->getFramesCounters(&framesReceived0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceived1, nullptr);
+        framesReceived0 = framesReceived0 - 1;    // Back out the first frame we already waited for
+        framesReceived1 = framesReceived1 - 1;    // Back out the first frame we already waited for
+        nsecs_t runTime = end - firstFrame;
+        float framesPerSecond0 = framesReceived0 / (runTime * kNanoToSeconds);
+        float framesPerSecond1 = framesReceived1 / (runTime * kNanoToSeconds);
+        ALOGI("Measured camera rate %3.2f fps and %3.2f fps", framesPerSecond0, framesPerSecond1);
+        EXPECT_GE(framesPerSecond0, kMinimumFramesPerSecond);
+        EXPECT_GE(framesPerSecond1, kMinimumFramesPerSecond);
+
+        // Shutdown one client
+        frameHandler0->shutdown();
+
+        // Read frame counters again
+        frameHandler0->getFramesCounters(&framesReceived0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceived1, nullptr);
+
+        // Wait a bit again
+        sleep(5);
+        unsigned framesReceivedAfterStop0 = 0, framesReceivedAfterStop1 = 0;
+        frameHandler0->getFramesCounters(&framesReceivedAfterStop0, nullptr);
+        frameHandler1->getFramesCounters(&framesReceivedAfterStop1, nullptr);
+        EXPECT_EQ(framesReceived0, framesReceivedAfterStop0);
+        EXPECT_LT(framesReceived1, framesReceivedAfterStop1);
+
+        // Shutdown another
+        frameHandler1->shutdown();
+
+        // Explicitly release the camera
+        pEnumerator->closeCamera(pCam0);
+        pEnumerator->closeCamera(pCam1);
+    }
+}
+
+
 int main(int argc, char** argv) {
     ::testing::AddGlobalTestEnvironment(EvsHidlEnvironment::Instance());
     ::testing::InitGoogleTest(&argc, argv);