Merge "Add owners file for rebootescrow"
diff --git a/automotive/can/1.0/default/CanBus.cpp b/automotive/can/1.0/default/CanBus.cpp
index 454ab00..8fb09eb 100644
--- a/automotive/can/1.0/default/CanBus.cpp
+++ b/automotive/can/1.0/default/CanBus.cpp
@@ -42,6 +42,8 @@
 
     struct canfd_frame frame = {};
     frame.can_id = message.id;
+    if (message.isExtendedId) frame.can_id |= CAN_EFF_FLAG;
+    if (message.remoteTransmissionRequest) frame.can_id |= CAN_RTR_FLAG;
     frame.len = message.payload.size();
     memcpy(frame.data, message.payload.data(), message.payload.size());
 
@@ -226,8 +228,8 @@
 static bool satisfiesFilterFlag(FilterFlag filterFlag, bool flag) {
     // TODO(b/144458917) add testing for this to VTS tests
     if (filterFlag == FilterFlag::DONT_CARE) return true;
-    if (filterFlag == FilterFlag::REQUIRE) return flag;
-    if (filterFlag == FilterFlag::EXCLUDE) return !flag;
+    if (filterFlag == FilterFlag::SET) return flag;
+    if (filterFlag == FilterFlag::NOT_SET) return !flag;
     return false;
 }
 
@@ -241,25 +243,26 @@
  * \param id Message id to filter
  * \return true if the message id matches the filter, false otherwise
  */
-static bool match(const hidl_vec<CanMessageFilter>& filter, CanMessageId id, bool isExtendedId,
-                  bool isRtr) {
+static bool match(const hidl_vec<CanMessageFilter>& filter, CanMessageId id, bool isRtr,
+                  bool isExtendedId) {
     if (filter.size() == 0) return true;
 
-    bool anyNonInvertedPresent = false;
-    bool anyNonInvertedSatisfied = false;
+    bool anyNonExcludeRulePresent = false;
+    bool anyNonExcludeRuleSatisfied = false;
     for (auto& rule : filter) {
-        const bool satisfied = ((id & rule.mask) == rule.id) == !rule.inverted &&
+        const bool satisfied = ((id & rule.mask) == rule.id) &&
                                satisfiesFilterFlag(rule.rtr, isRtr) &&
                                satisfiesFilterFlag(rule.extendedFormat, isExtendedId);
-        if (rule.inverted) {
-            // Any inverted (blacklist) rule not being satisfied invalidates the whole filter set.
-            if (!satisfied) return false;
+
+        if (rule.exclude) {
+            // Any excluded (blacklist) rule not being satisfied invalidates the whole filter set.
+            if (satisfied) return false;
         } else {
-            anyNonInvertedPresent = true;
-            if (satisfied) anyNonInvertedSatisfied = true;
+            anyNonExcludeRulePresent = true;
+            if (satisfied) anyNonExcludeRuleSatisfied = true;
         }
     }
-    return !anyNonInvertedPresent || anyNonInvertedSatisfied;
+    return !anyNonExcludeRulePresent || anyNonExcludeRuleSatisfied;
 }
 
 void CanBus::notifyErrorListeners(ErrorEvent err, bool isFatal) {
diff --git a/automotive/can/1.0/types.hal b/automotive/can/1.0/types.hal
index f09c940..5eeed53 100644
--- a/automotive/can/1.0/types.hal
+++ b/automotive/can/1.0/types.hal
@@ -73,23 +73,22 @@
  * Single filter rule for CAN messages.
  *
  * A filter is satisfied if:
- * ((receivedId & mask) == (id & mask)) == !inverted
+ * ((receivedId & mask) == (id & mask)) == !exclude
  *
- * In order for set of filters to match, at least one non-inverted filters must match (if there is
- * one) and all inverted filters must match. In other words:
- *  - a single matching non-inverted filter makes the whole set matching;
- *  - a single non-matching inverted filter makes the whole set non-matching.
- *
- * Additional less common options for filtering include:
- * rtr - Remote Transmission Request; another ECU requests DLC bytes of data on this message ID
- * extendedFormat - 29 bit message ID is used instead of 11 bits
+ * In order for set of filters to match, at least one non-exclude filters must match (if there is
+ * one) and all exclude filters must match. In other words:
+ *  - a single matching non-exclude filter makes the whole set matching;
+ *  - a single non-matching excluded filter makes the whole set non-matching.
  */
 struct CanMessageFilter {
     CanMessageId id;
     uint32_t mask;
-    bool inverted;
+    /** Remote Transmission Request; another ECU requests <DLC> bytes of data on this message ID */
     FilterFlag rtr;
+    /** 29 bit message ID is used instead of 11 bits */
     FilterFlag extendedFormat;
+    /** 'exclude' *DOES* apply to rtr and extendedFormat! */
+    bool exclude;
 };
 
 
@@ -100,9 +99,9 @@
     /** Default, FilterFlag doesn't effect what messages filtered */
     DONT_CARE = 0,
     /** This FilterFlag MUST be present in received messages to pass though the filter */
-    REQUIRE,
+    SET,
     /** This FilterFlag must NOT be present in received messages to pass though the filter */
-    EXCLUDE,
+    NOT_SET,
 };
 
 enum Result : uint8_t {
diff --git a/automotive/can/1.0/vts/functional/VtsHalCanBusV1_0TargetTest.cpp b/automotive/can/1.0/vts/functional/VtsHalCanBusV1_0TargetTest.cpp
index 8deaed6..cdea8b6 100644
--- a/automotive/can/1.0/vts/functional/VtsHalCanBusV1_0TargetTest.cpp
+++ b/automotive/can/1.0/vts/functional/VtsHalCanBusV1_0TargetTest.cpp
@@ -78,7 +78,7 @@
 TEST_F(CanBusHalTest, SendNoPayload) {
     CanMessage msg = {};
     msg.id = 0x123;
-
+    ASSERT_NE(mCanBus, nullptr);
     const auto result = mCanBus->send(msg);
     ASSERT_EQ(Result::OK, result);
 }
@@ -118,9 +118,9 @@
 
 TEST_F(CanBusHalTest, ListenSomeFilter) {
     hidl_vec<CanMessageFilter> filters = {
-            {0x123, 0x1FF, false, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
-            {0x001, 0x00F, true, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
-            {0x200, 0x100, false, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
+            {0x123, 0x1FF, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+            {0x001, 0x00F, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x200, 0x100, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
     };
 
     const auto [result, closeHandle] = listen(filters, new CanMessageListener());
@@ -171,14 +171,20 @@
 }  // namespace android::hardware::automotive::can::V1_0::vts
 
 /**
+ * This test requires that you bring up a valid bus first.
+ *
+ * Before running:
+ * mma -j && adb root && adb remount && adb sync
+ *
  * Example manual invocation:
  * adb shell /data/nativetest64/VtsHalCanBusV1_0TargetTest/VtsHalCanBusV1_0TargetTest \
- *     --hal_service_instance=android.hardware.automotive.can@1.0::ICanBus/test
+ *     --hal_service_instance=android.hardware.automotive.can@1.0::ICanBus/<NAME_OF_VALID_BUS>
  */
 int main(int argc, char** argv) {
     using android::hardware::automotive::can::V1_0::ICanBus;
     using android::hardware::automotive::can::V1_0::vts::gEnv;
     using android::hardware::automotive::can::V1_0::vts::utils::SimpleHidlEnvironment;
+    setenv("TREBLE_TESTING_OVERRIDE", "true", true);
     android::base::SetDefaultTag("CanBusVts");
     android::base::SetMinimumLogSeverity(android::base::VERBOSE);
     gEnv = new SimpleHidlEnvironment<ICanBus>;
diff --git a/automotive/can/1.0/vts/functional/VtsHalCanBusVirtualV1_0TargetTest.cpp b/automotive/can/1.0/vts/functional/VtsHalCanBusVirtualV1_0TargetTest.cpp
index ca661fe..efaad53 100644
--- a/automotive/can/1.0/vts/functional/VtsHalCanBusVirtualV1_0TargetTest.cpp
+++ b/automotive/can/1.0/vts/functional/VtsHalCanBusVirtualV1_0TargetTest.cpp
@@ -121,6 +121,7 @@
     }
 
     void send(const CanMessage& msg) {
+        EXPECT_NE(mBus, nullptr);
         const auto result = mBus->send(msg);
         EXPECT_EQ(Result::OK, result);
     }
@@ -155,9 +156,11 @@
 hidl_vec<hidl_string> CanBusVirtualHalTest::mBusNames;
 bool CanBusVirtualHalTest::mTestCaseInitialized = false;
 
-static CanMessage makeMessage(CanMessageId id) {
+static CanMessage makeMessage(CanMessageId id, bool rtr, bool extended) {
     CanMessage msg = {};
     msg.id = id;
+    msg.remoteTransmissionRequest = rtr;
+    msg.isExtendedId = extended;
     return msg;
 }
 
@@ -251,56 +254,621 @@
     bus1.send({});
 }
 
-TEST_F(CanBusVirtualHalTest, Filter) {
+TEST_F(CanBusVirtualHalTest, FilterPositive) {
     if (mBusNames.size() < 2u) GTEST_SKIP() << "Not testable with less than two CAN buses.";
     auto bus1 = makeBus();
     auto bus2 = makeBus();
 
+    /* clang-format off */
+    /*        id,            mask,           rtr,                   eff,          exclude */
     hidl_vec<CanMessageFilter> filterPositive = {
-            {0x101, 0x100, false, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
-            {0x010, 0x0F0, false, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
+            {0x334,           0x73F, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+            {0x49D,           0x700, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+            {0x325,           0x7FC, FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   false},
+            {0x246,           0x7FF, FilterFlag::SET,       FilterFlag::DONT_CARE, false},
+            {0x1A2,           0x7FB, FilterFlag::SET,       FilterFlag::NOT_SET,   false},
+            {0x607,           0x7C9, FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, false},
+            {0x7F4,           0x777, FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   false},
+            {0x1BF19EAF, 0x10F0F0F0, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+            {0x12E99200, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       false},
+            {0x06B70270, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::DONT_CARE, false},
+            {0x096CFD2B, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       false},
+            {0x1BDCB008, 0x0F0F0F0F, FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, false},
+            {0x08318B46, 0x10F0F0F0, FilterFlag::NOT_SET,   FilterFlag::SET,       false},
+            {0x06B,           0x70F, FilterFlag::DONT_CARE, FilterFlag::SET,       false},
+            {0x750,           0x70F, FilterFlag::SET,       FilterFlag::SET,       false},
+            {0x5CF,           0x70F, FilterFlag::NOT_SET,   FilterFlag::SET,       false},
     };
+    /* clang-format on */
     auto listenerPositive = bus2.listen(filterPositive);
 
-    hidl_vec<CanMessageFilter> filterNegative = {
-            {0x123, 0x0FF, true, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
-            {0x004, 0x00F, true, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE},
-    };
-    auto listenerNegative = bus2.listen(filterNegative);
+    // 334:73F, DNC, DNC
+    bus1.send(makeMessage(0x3F4, false, false));
+    bus1.send(makeMessage(0x334, false, true));
+    bus1.send(makeMessage(0x374, true, false));
+    bus1.send(makeMessage(0x3F4, true, true));
 
-    bus1.send(makeMessage(0));
-    bus1.send(makeMessage(0x1A0));
-    bus1.send(makeMessage(0x1A1));
-    bus1.send(makeMessage(0x2A0));
-    bus1.send(makeMessage(0x3A0));
-    bus1.send(makeMessage(0x010));
-    bus1.send(makeMessage(0x123));
-    bus1.send(makeMessage(0x023));
-    bus1.send(makeMessage(0x124));
+    // 49D:700, DNC, DNC
+    bus1.send(makeMessage(0x404, false, false));
+    bus1.send(makeMessage(0x4A5, false, true));
+    bus1.send(makeMessage(0x4FF, true, false));
+    bus1.send(makeMessage(0x46B, true, true));
+
+    // 325:7FC, DNC, NS
+    bus1.send(makeMessage(0x324, false, false));
+    bus1.send(makeMessage(0x325, false, true));  // filtered out
+    bus1.send(makeMessage(0x326, true, false));
+    bus1.send(makeMessage(0x327, true, true));  // filtered out
+
+    // 246:7FF, SET, DNC
+    bus1.send(makeMessage(0x246, false, false));  // filtered out
+    bus1.send(makeMessage(0x246, false, true));   // filtered out
+    bus1.send(makeMessage(0x246, true, false));
+    bus1.send(makeMessage(0x246, true, true));
+
+    // 1A2:7FB, SET, NS
+    bus1.send(makeMessage(0x1A2, false, false));  // filtered out
+    bus1.send(makeMessage(0x1A6, false, true));   // filtered out
+    bus1.send(makeMessage(0x1A2, true, false));
+    bus1.send(makeMessage(0x1A6, true, true));  // filtered out
+
+    // 607:7C9, NS, DNC
+    bus1.send(makeMessage(0x607, false, false));
+    bus1.send(makeMessage(0x613, false, true));
+    bus1.send(makeMessage(0x625, true, false));  // filtered out
+    bus1.send(makeMessage(0x631, true, true));   // filtered out
+
+    // 7F4:777, NS, NS
+    bus1.send(makeMessage(0x774, false, false));
+    bus1.send(makeMessage(0x7F4, false, true));  // filtered out
+    bus1.send(makeMessage(0x77C, true, false));  // filtered out
+    bus1.send(makeMessage(0x7FC, true, false));  // filtered out
+
+    // 1BF19EAF:10F0F0F0, DNC, DNC
+    bus1.send(makeMessage(0x11F293A4, false, false));
+    bus1.send(makeMessage(0x15F697A8, false, true));
+    bus1.send(makeMessage(0x19FA9BAC, true, false));
+    bus1.send(makeMessage(0x1DFE9FA0, true, true));
+
+    // 12E99200:1FFFFFFF, DNC, SET
+    bus1.send(makeMessage(0x12E99200, false, false));  // filtered out
+    bus1.send(makeMessage(0x12E99200, false, true));
+    bus1.send(makeMessage(0x12E99200, true, false));  // filtered out
+    bus1.send(makeMessage(0x12E99200, true, true));
+
+    // 06B70270:1FFFFFFF, SET, DNC
+    bus1.send(makeMessage(0x06B70270, false, false));  // filtered out
+    bus1.send(makeMessage(0x06B70270, false, true));   // filtered out
+    bus1.send(makeMessage(0x06B70270, true, false));
+    bus1.send(makeMessage(0x06B70270, true, true));
+
+    // 096CFD2B:1FFFFFFF, SET, SET
+    bus1.send(makeMessage(0x096CFD2B, false, false));  // filtered out
+    bus1.send(makeMessage(0x096CFD2B, false, true));   // filtered out
+    bus1.send(makeMessage(0x096CFD2B, true, false));   // filtered out
+    bus1.send(makeMessage(0x096CFD2B, true, true));
+
+    // 1BDCB008:0F0F0F0F, NS, DNC
+    bus1.send(makeMessage(0x1B2C3048, false, false));
+    bus1.send(makeMessage(0x0B5C6078, false, true));
+    bus1.send(makeMessage(0x1B8C90A8, true, false));  // filtered out
+    bus1.send(makeMessage(0x0BBCC0D8, true, true));   // filtered out
+
+    // 08318B46:10F0F0F0, NS, SET
+    bus1.send(makeMessage(0x0F3E8D4C, false, false));  // filtered out
+    bus1.send(makeMessage(0x0B3A8948, false, true));
+    bus1.send(makeMessage(0x07368544, true, false));  // filtered out
+    bus1.send(makeMessage(0x03328140, true, true));   // filtered out
+
+    // 06B:70F, DNC, SET
+    bus1.send(makeMessage(0x00B, false, false));  // filtered out
+    bus1.send(makeMessage(0x04B, false, true));
+    bus1.send(makeMessage(0x08B, true, false));  // filtered out
+    bus1.send(makeMessage(0x0FB, true, true));
+
+    // 750:70F, SET, SET
+    bus1.send(makeMessage(0x7F0, false, false));  // filtered out
+    bus1.send(makeMessage(0x780, false, true));   // filtered out
+    bus1.send(makeMessage(0x740, true, false));   // filtered out
+    bus1.send(makeMessage(0x700, true, true));
+
+    // 5CF:70F, NS, SET
+    bus1.send(makeMessage(0x51F, false, false));  // filtered out
+    bus1.send(makeMessage(0x53F, false, true));
+    bus1.send(makeMessage(0x57F, true, false));  // filtered out
+    bus1.send(makeMessage(0x5FF, true, true));   // filtered out
 
     std::vector<can::V1_0::CanMessage> expectedPositive{
-            makeMessage(0x1A0),  //
-            makeMessage(0x1A1),  //
-            makeMessage(0x3A0),  //
-            makeMessage(0x010),  //
-            makeMessage(0x123),  //
-            makeMessage(0x124),  //
+            makeMessage(0x3F4, false, false),       // 334:73F, DNC, DNC
+            makeMessage(0x334, false, true),        // 334:73F, DNC, DNC
+            makeMessage(0x374, true, false),        // 334:73F, DNC, DNC
+            makeMessage(0x3F4, true, true),         // 334:73F, DNC, DNC
+            makeMessage(0x404, false, false),       // 49D:700, DNC, DNC
+            makeMessage(0x4A5, false, true),        // 49D:700, DNC, DNC
+            makeMessage(0x4FF, true, false),        // 49D:700, DNC, DNC
+            makeMessage(0x46B, true, true),         // 49D:700, DNC, DNC
+            makeMessage(0x324, false, false),       // 325:7FC, DNC, NS
+            makeMessage(0x326, true, false),        // 325:7FC, DNC, NS
+            makeMessage(0x246, true, false),        // 246:7FF, SET, DNC
+            makeMessage(0x246, true, true),         // 246:7FF, SET, DNC
+            makeMessage(0x1A2, true, false),        // 1A2:7FB, SET, NS
+            makeMessage(0x607, false, false),       // 607:7C9, NS, DNC
+            makeMessage(0x613, false, true),        // 607:7C9, NS, DNC
+            makeMessage(0x774, false, false),       // 7F4:777, NS, NS
+            makeMessage(0x11F293A4, false, false),  // 1BF19EAF:10F0F0F0, DNC, DNC
+            makeMessage(0x15F697A8, false, true),   // 1BF19EAF:10F0F0F0, DNC, DNC
+            makeMessage(0x19FA9BAC, true, false),   // 1BF19EAF:10F0F0F0, DNC, DNC
+            makeMessage(0x1DFE9FA0, true, true),    // 1BF19EAF:10F0F0F0, DNC, DNC
+            makeMessage(0x12E99200, false, true),   // 12E99200:1FFFFFFF, DNC, SET
+            makeMessage(0x12E99200, true, true),    // 12E99200:1FFFFFFF, DNC, SET
+            makeMessage(0x06B70270, true, false),   // 06B70270:1FFFFFFF, SET, DNC
+            makeMessage(0x06B70270, true, true),    // 06B70270:1FFFFFFF, SET, DNC
+            makeMessage(0x096CFD2B, true, true),    // 096CFD2B:1FFFFFFF, SET, SET
+            makeMessage(0x1B2C3048, false, false),  // 1BDCB008:0F0F0F0F, NS, DNC
+            makeMessage(0x0B5C6078, false, true),   // 1BDCB008:0F0F0F0F, NS, DNC
+            makeMessage(0x0B3A8948, false, true),   // 08318B46:10F0F0F0, NS, SET
+            makeMessage(0x04B, false, true),        // 06B:70F, DNC, SET
+            makeMessage(0x0FB, true, true),         // 06B:70F, DNC, SET
+            makeMessage(0x700, true, true),         // 750:70F, SET, SET
+            makeMessage(0x53F, false, true),        // 5CF:70F, NS, SET
     };
+
+    auto messagesPositive = listenerPositive->fetchMessages(100ms, expectedPositive.size());
+    clearTimestamps(messagesPositive);
+    ASSERT_EQ(expectedPositive, messagesPositive);
+}
+
+TEST_F(CanBusVirtualHalTest, FilterNegative) {
+    if (mBusNames.size() < 2u) GTEST_SKIP() << "Not testable with less than two CAN buses.";
+    auto bus1 = makeBus();
+    auto bus2 = makeBus();
+
+    /* clang-format off */
+    /*        id,             mask,           rtr,                   eff          exclude */
+    hidl_vec<CanMessageFilter> filterNegative = {
+            {0x063,           0x7F3, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x0A1,           0x78F, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x18B,           0x7E3, FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x1EE,           0x7EC, FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x23F,           0x7A5, FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x31F,           0x77F, FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x341,           0x77F, FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x196573DB, 0x1FFFFF7F, FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x1CFCB417, 0x1FFFFFEC, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x17CCC433, 0x1FFFFFEC, FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x0BC2F508, 0x1FFFFFC3, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x1179B5D2, 0x1FFFFFC3, FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x082AF63D, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x66D,           0x76F, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x748,           0x7CC, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x784,           0x7CC, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+    };
+    /* clang-format on */
+
+    auto listenerNegative = bus2.listen(filterNegative);
+
+    // 063:7F3, DNC, DNC: ~06[3,7,B,F]
+    bus1.send(makeMessage(0x063, false, false));  // filtered out
+    bus1.send(makeMessage(0x060, false, true));
+    bus1.send(makeMessage(0x05B, true, false));
+    bus1.send(makeMessage(0x06F, true, true));  // filtered out
+
+    // 0A1:78F, DNC, DNC: ~0[8-F]1
+    bus1.send(makeMessage(0x081, false, false));  // filtered out
+    bus1.send(makeMessage(0x031, false, true));
+    bus1.send(makeMessage(0x061, true, false));
+    bus1.send(makeMessage(0x071, true, true));
+
+    // 18B:7E3, DNC, NS: ~1[8-9][7,B,F]
+    bus1.send(makeMessage(0x18B, false, false));  // filtered out
+    bus1.send(makeMessage(0x188, false, true));
+    bus1.send(makeMessage(0x123, true, false));
+    bus1.send(makeMessage(0x1D5, true, true));
+
+    // 1EE:7EC, SET, DNC: ~1[E-F][C-F]
+    bus1.send(makeMessage(0x17E, false, false));
+    bus1.send(makeMessage(0x138, false, true));
+    bus1.send(makeMessage(0x123, true, false));
+    bus1.send(makeMessage(0x1EC, true, true));  // filtered out
+
+    // 23F:7A5, SET, NS: ~2[2,3,6,7][5,7,D,F]
+    bus1.send(makeMessage(0x222, false, false));
+    bus1.send(makeMessage(0x275, false, true));
+    bus1.send(makeMessage(0x23f, true, false));  // filtered out
+    bus1.send(makeMessage(0x241, true, false));
+    bus1.send(makeMessage(0x2FF, true, true));
+
+    // 31F:77F, NS, DNC: ~3[1,9]F
+    bus1.send(makeMessage(0x32F, false, false));
+    bus1.send(makeMessage(0x31F, false, true));  // filtered out
+    bus1.send(makeMessage(0x36F, false, true));
+    bus1.send(makeMessage(0x31F, true, false));
+    bus1.send(makeMessage(0x3F3, true, true));
+
+    // 341:77F, NS, NS: ~3[4,C]1
+    bus1.send(makeMessage(0x341, false, false));  // filtered out
+    bus1.send(makeMessage(0x352, false, false));
+    bus1.send(makeMessage(0x3AA, false, true));
+    bus1.send(makeMessage(0x3BC, true, false));
+    bus1.send(makeMessage(0x3FF, true, true));
+
+    // 196573DB:1FFFFF7F, DNC, DNC: ~196573[5,D]B
+    bus1.send(makeMessage(0x1965733B, false, false));
+    bus1.send(makeMessage(0x1965734B, false, true));
+    bus1.send(makeMessage(0x1965735B, true, false));  // filtered out
+    bus1.send(makeMessage(0x1965736B, true, true));
+
+    // 1CFCB417:1FFFFFEC, DNC, SET: ~1CFCB4[0-1][4-7]
+    bus1.send(makeMessage(0x1CFCB407, false, false));
+    bus1.send(makeMessage(0x1CFCB4FF, false, true));
+    bus1.send(makeMessage(0x1CFCB414, true, false));
+    bus1.send(makeMessage(0x1CFCB407, true, true));  // filtered out
+
+    // 17CCC433:1FFFFFEC, SET, DNC: ~17CCC4[2-3][0-3]
+    bus1.send(makeMessage(0x17CCC430, false, false));
+    bus1.send(makeMessage(0x17CCC423, false, true));
+    bus1.send(makeMessage(0x17CCC420, true, false));  // filtered out
+    bus1.send(makeMessage(0x17CCC444, true, true));
+
+    // 0BC2F508:1FFFFFC3, SET, SET: ~5[0-3][0,4,8,C]
+    bus1.send(makeMessage(0x0BC2F504, false, false));
+    bus1.send(makeMessage(0x0BC2F518, false, true));
+    bus1.send(makeMessage(0x0BC2F52C, true, false));
+    bus1.send(makeMessage(0x0BC2F500, true, true));  // filtered out
+    bus1.send(makeMessage(0x0BC2F543, true, true));
+
+    // 1179B5D2:1FFFFFC3, NS, DNC: ~5[C-F][2,6,A,E]
+    bus1.send(makeMessage(0x1179B5BB, false, false));
+    bus1.send(makeMessage(0x1179B5EA, false, true));  // filtered out
+    bus1.send(makeMessage(0x1179B5C2, true, false));
+    bus1.send(makeMessage(0x1179B5DA, true, true));
+
+    // 082AF63D:1FFFFF6F, NS, SET: ~6[2,3,A,B]D
+    bus1.send(makeMessage(0x082AF62D, false, false));
+    bus1.send(makeMessage(0x082AF63D, false, true));  // filtered out
+    bus1.send(makeMessage(0x082AF60D, false, true));
+    bus1.send(makeMessage(0x082AF6AD, true, false));
+    bus1.send(makeMessage(0x082AF6BD, true, true));
+
+    // 66D:76F, DNC, SET: ~6[6,7,E,F]D
+    bus1.send(makeMessage(0x66D, false, false));
+    bus1.send(makeMessage(0x68D, false, true));
+    bus1.send(makeMessage(0x67D, true, false));
+    bus1.send(makeMessage(0x6ED, true, true));  // filtered out
+
+    // 748:7CC, SET, SET: ~0x7[4-7][8-F]
+    bus1.send(makeMessage(0x749, false, false));
+    bus1.send(makeMessage(0x75A, false, true));
+    bus1.send(makeMessage(0x76B, true, false));
+    bus1.send(makeMessage(0x748, true, true));  // filtered out
+    bus1.send(makeMessage(0x788, true, true));
+
+    // 784:7CC, NS, SET: ~0x7[8-F][4-7]
+    bus1.send(makeMessage(0x795, false, false));
+    bus1.send(makeMessage(0x784, false, true));  // filtered out
+    bus1.send(makeMessage(0x71B, false, true));
+    bus1.send(makeMessage(0x769, true, false));
+    bus1.send(makeMessage(0x784, true, true));
+
     std::vector<can::V1_0::CanMessage> expectedNegative{
-            makeMessage(0),      //
-            makeMessage(0x1A0),  //
-            makeMessage(0x1A1),  //
-            makeMessage(0x2A0),  //
-            makeMessage(0x3A0),  //
-            makeMessage(0x010),  //
+            makeMessage(0x060, false, true),        // 063:7F3, DNC, DNC
+            makeMessage(0x05B, true, false),        // 063:7F3, DNC, DNC
+            makeMessage(0x031, false, true),        // 0A1:78F, DNC, DNC
+            makeMessage(0x061, true, false),        // 0A1:78F, DNC, DNC
+            makeMessage(0x071, true, true),         // 0A1:78F, DNC, DNC
+            makeMessage(0x188, false, true),        // 18B:7E3, DNC, NS
+            makeMessage(0x123, true, false),        // 18B:7E3, DNC, NS
+            makeMessage(0x1D5, true, true),         // 18B:7E3, DNC, NS
+            makeMessage(0x17E, false, false),       // 1EE:7EC, SET, DNC
+            makeMessage(0x138, false, true),        // 1EE:7EC, SET, DNC
+            makeMessage(0x123, true, false),        // 1EE:7EC, SET, DNC
+            makeMessage(0x222, false, false),       // 23F:7A5, SET, NS
+            makeMessage(0x275, false, true),        // 23F:7A5, SET, NS
+            makeMessage(0x241, true, false),        // 23F:7A5, SET, NS
+            makeMessage(0x2FF, true, true),         // 23F:7A5, SET, NS
+            makeMessage(0x32F, false, false),       // 31F:77F, NS, DNC
+            makeMessage(0x36F, false, true),        // 31F:77F, NS, DNC
+            makeMessage(0x31F, true, false),        // 31F:77F, NS, DNC
+            makeMessage(0x3F3, true, true),         // 31F:77F, NS, DNC
+            makeMessage(0x352, false, false),       // 341:77F, NS, NS
+            makeMessage(0x3AA, false, true),        // 341:77F, NS, NS
+            makeMessage(0x3BC, true, false),        // 341:77F, NS, NS
+            makeMessage(0x3FF, true, true),         // 341:77F, NS, NS
+            makeMessage(0x1965733B, false, false),  // 196573DB:1FFFFF7F, DNC, DNC
+            makeMessage(0x1965734B, false, true),   // 196573DB:1FFFFF7F, DNC, DNC
+            makeMessage(0x1965736B, true, true),    // 196573DB:1FFFFF7F, DNC, DNC
+            makeMessage(0x1CFCB407, false, false),  // 1CFCB417:1FFFFFEC, DNC, SET
+            makeMessage(0x1CFCB4FF, false, true),   // 1CFCB417:1FFFFFEC, DNC, SET
+            makeMessage(0x1CFCB414, true, false),   // 1CFCB417:1FFFFFEC, DNC, SET
+            makeMessage(0x17CCC430, false, false),  // 17CCC433:1FFFFFEC, SET, DNC
+            makeMessage(0x17CCC423, false, true),   // 17CCC433:1FFFFFEC, SET, DNC
+            makeMessage(0x17CCC444, true, true),    // 17CCC433:1FFFFFEC, SET, DNC
+            makeMessage(0x0BC2F504, false, false),  // 0BC2F508:1FFFFFC3, SET, SET
+            makeMessage(0x0BC2F518, false, true),   // 0BC2F508:1FFFFFC3, SET, SET
+            makeMessage(0x0BC2F52C, true, false),   // 0BC2F508:1FFFFFC3, SET, SET
+            makeMessage(0x0BC2F543, true, true),    // 0BC2F508:1FFFFFC3, SET, SET
+            makeMessage(0x1179B5BB, false, false),  // 1179B5D2:1FFFFFC3, NS, DNC
+            makeMessage(0x1179B5C2, true, false),   // 1179B5D2:1FFFFFC3, NS, DNC
+            makeMessage(0x1179B5DA, true, true),    // 1179B5D2:1FFFFFC3, NS, DNC
+            makeMessage(0x082AF62D, false, false),  // 082AF63D:1FFFFF6F, NS, SET
+            makeMessage(0x082AF60D, false, true),   // 082AF63D:1FFFFF6F, NS, SET
+            makeMessage(0x082AF6AD, true, false),   // 082AF63D:1FFFFF6F, NS, SET
+            makeMessage(0x082AF6BD, true, true),    // 082AF63D:1FFFFF6F, NS, SET
+            makeMessage(0x66D, false, false),       // 66D:76F, DNC, SET
+            makeMessage(0x68D, false, true),        // 66D:76F, DNC, SET
+            makeMessage(0x67D, true, false),        // 66D:76F, DNC, SET
+            makeMessage(0x749, false, false),       // 748:7CC, SET, SET
+            makeMessage(0x75A, false, true),        // 748:7CC, SET, SET
+            makeMessage(0x76B, true, false),        // 748:7CC, SET, SET
+            makeMessage(0x788, true, true),         // 748:7CC, SET, SET
+            makeMessage(0x795, false, false),       // 784:7CC, NS, SET
+            makeMessage(0x71B, false, true),        // 784:7CC, NS, SET
+            makeMessage(0x769, true, false),        // 784:7CC, NS, SET
+            makeMessage(0x784, true, true),         // 784:7CC, NS, SET
     };
 
     auto messagesNegative = listenerNegative->fetchMessages(100ms, expectedNegative.size());
-    auto messagesPositive = listenerPositive->fetchMessages(100ms, expectedPositive.size());
     clearTimestamps(messagesNegative);
-    clearTimestamps(messagesPositive);
     ASSERT_EQ(expectedNegative, messagesNegative);
-    ASSERT_EQ(expectedPositive, messagesPositive);
+}
+
+TEST_F(CanBusVirtualHalTest, FilterMixed) {
+    if (mBusNames.size() < 2u) GTEST_SKIP() << "Not testable with less than two CAN buses.";
+    auto bus1 = makeBus();
+    auto bus2 = makeBus();
+
+    /* clang-format off */
+    /*        id,           mask,             rtr,                   eff          exclude */
+    hidl_vec<CanMessageFilter> filterMixed = {
+            {0x000,      0x700,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+            {0x0D5,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x046,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x11D89097, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x0AB,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x00D,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x0F82400E, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x08F,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x0BE,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x0A271011, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x0BE,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x100,      0x700,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   false},
+            {0x138,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x1BF,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x13AB6165, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x17A,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x13C,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x102C5197, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x19B,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x1B8,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x0D6D5185, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x1B8,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x096A2200, 0x1FFFFF00, FilterFlag::DONT_CARE, FilterFlag::SET,       false},
+            {0x201,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x22A,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x1D1C3238, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x2C0,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x23C,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x016182C6, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x27B,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x2A5,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x160EB24B, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x2A5,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x300,      0x700,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, false},
+            {0x339,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x3D4,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x182263BE, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x327,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x36B,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x1A1D8374, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x319,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x39E,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x1B657332, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x39E,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x06C5D400, 0x1FFFFF00, FilterFlag::NOT_SET,   FilterFlag::SET,       false},
+            {0x492,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x4EE,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x07725454, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x4D5,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x402,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x139714A7, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x464,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x454,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x0EF4B46F, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x454,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x500,      0x700,      FilterFlag::SET,       FilterFlag::DONT_CARE, false},
+            {0x503,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x566,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x137605E7, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x564,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x58E,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x05F9052D, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x595,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x563,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x13358537, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x563,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x600,      0x700,      FilterFlag::SET,       FilterFlag::NOT_SET,   false},
+            {0x64D,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x620,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x1069A676, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x62D,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x6C4,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x14C76629, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x689,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x6A4,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x0BCCA6C2, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x6A4,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+
+            {0x04BB1700, 0x1FFFFF00, FilterFlag::SET,       FilterFlag::SET,       false},
+            {0x784,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, true},
+            {0x7F9,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::NOT_SET,   true},
+            {0x0200F77D, 0x1FFFFFFF, FilterFlag::DONT_CARE, FilterFlag::SET,       true},
+            {0x783,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::DONT_CARE, true},
+            {0x770,      0x7FF,      FilterFlag::NOT_SET,   FilterFlag::NOT_SET,   true},
+            {0x06602719, 0x1FFFFFFF, FilterFlag::NOT_SET,   FilterFlag::SET,       true},
+            {0x76B,      0x7FF,      FilterFlag::SET,       FilterFlag::DONT_CARE, true},
+            {0x7DF,      0x7FF,      FilterFlag::SET,       FilterFlag::NOT_SET,   true},
+            {0x1939E736, 0x1FFFFFFF, FilterFlag::SET,       FilterFlag::SET,       true},
+            {0x7DF,      0x7FF,      FilterFlag::DONT_CARE, FilterFlag::DONT_CARE, false},
+    };
+    /* clang-format on */
+
+    auto listenerMixed = bus2.listen(filterMixed);
+
+    bus1.send(makeMessage(0x000, true, true));  // positive filter
+    bus1.send(makeMessage(0x0D5, false, false));
+    bus1.send(makeMessage(0x046, true, false));
+    bus1.send(makeMessage(0x046, false, false));
+    bus1.send(makeMessage(0x11D89097, true, true));
+    bus1.send(makeMessage(0x11D89097, false, true));
+    bus1.send(makeMessage(0x0AB, false, false));
+    bus1.send(makeMessage(0x0AB, false, true));
+    bus1.send(makeMessage(0x00D, false, false));
+    bus1.send(makeMessage(0x0F82400E, false, true));
+    bus1.send(makeMessage(0x08F, true, false));
+    bus1.send(makeMessage(0x08F, true, true));
+    bus1.send(makeMessage(0x0BE, true, false));
+    bus1.send(makeMessage(0x0A271011, true, true));
+    bus1.send(makeMessage(0x0BE, false, true));   // not filtered
+    bus1.send(makeMessage(0x100, false, false));  // positive filter
+    bus1.send(makeMessage(0x138, false, true));
+    bus1.send(makeMessage(0x138, true, false));
+    bus1.send(makeMessage(0x1BF, false, false));
+    bus1.send(makeMessage(0x1BF, true, false));
+    bus1.send(makeMessage(0x13AB6165, false, true));
+    bus1.send(makeMessage(0x13AB6165, true, true));
+    bus1.send(makeMessage(0x17A, false, false));
+    bus1.send(makeMessage(0x17A, false, true));
+    bus1.send(makeMessage(0x13C, false, false));
+    bus1.send(makeMessage(0x102C5197, false, true));
+    bus1.send(makeMessage(0x19B, true, false));
+    bus1.send(makeMessage(0x19B, true, true));
+    bus1.send(makeMessage(0x1B8, true, false));
+    bus1.send(makeMessage(0x0D6D5185, true, true));
+    bus1.send(makeMessage(0x1B8, false, true));       // not filtered
+    bus1.send(makeMessage(0x096A2200, false, true));  // positive filter
+    bus1.send(makeMessage(0x201, false, true));
+    bus1.send(makeMessage(0x201, true, false));
+    bus1.send(makeMessage(0x22A, false, false));
+    bus1.send(makeMessage(0x22A, true, false));
+    bus1.send(makeMessage(0x1D1C3238, false, true));
+    bus1.send(makeMessage(0x1D1C3238, true, true));
+    bus1.send(makeMessage(0x2C0, false, false));
+    bus1.send(makeMessage(0x2C0, false, true));
+    bus1.send(makeMessage(0x23C, false, false));
+    bus1.send(makeMessage(0x016182C6, false, true));
+    bus1.send(makeMessage(0x27B, true, false));
+    bus1.send(makeMessage(0x27B, true, true));
+    bus1.send(makeMessage(0x2A5, true, false));
+    bus1.send(makeMessage(0x160EB24B, true, true));
+    bus1.send(makeMessage(0x2A5, false, true));   // not filtereed
+    bus1.send(makeMessage(0x300, false, false));  // positive filter
+    bus1.send(makeMessage(0x339, false, true));
+    bus1.send(makeMessage(0x339, false, false));
+    bus1.send(makeMessage(0x3D4, true, false));
+    bus1.send(makeMessage(0x182263BE, false, true));
+    bus1.send(makeMessage(0x182263BE, true, true));
+    bus1.send(makeMessage(0x327, false, false));
+    bus1.send(makeMessage(0x327, false, true));
+    bus1.send(makeMessage(0x36B, false, false));
+    bus1.send(makeMessage(0x1A1D8374, false, true));
+    bus1.send(makeMessage(0x319, true, false));
+    bus1.send(makeMessage(0x319, true, true));
+    bus1.send(makeMessage(0x39E, true, false));
+    bus1.send(makeMessage(0x1B657332, true, true));
+    bus1.send(makeMessage(0x39E, false, true));       // not filtered
+    bus1.send(makeMessage(0x06C5D400, false, true));  // positive filter
+    bus1.send(makeMessage(0x492, false, true));
+    bus1.send(makeMessage(0x492, true, false));
+    bus1.send(makeMessage(0x4EE, false, false));
+    bus1.send(makeMessage(0x4EE, true, false));
+    bus1.send(makeMessage(0x07725454, false, true));
+    bus1.send(makeMessage(0x07725454, true, true));
+    bus1.send(makeMessage(0x4D5, false, false));
+    bus1.send(makeMessage(0x4D5, false, true));
+    bus1.send(makeMessage(0x402, false, false));
+    bus1.send(makeMessage(0x139714A7, false, true));
+    bus1.send(makeMessage(0x464, true, false));
+    bus1.send(makeMessage(0x464, true, true));
+    bus1.send(makeMessage(0x454, true, false));
+    bus1.send(makeMessage(0x0EF4B46F, true, true));
+    bus1.send(makeMessage(0x454, false, true));  // not filtered
+    bus1.send(makeMessage(0x500, true, false));  // positive filter
+    bus1.send(makeMessage(0x503, false, true));
+    bus1.send(makeMessage(0x503, true, false));
+    bus1.send(makeMessage(0x566, false, false));
+    bus1.send(makeMessage(0x566, true, false));
+    bus1.send(makeMessage(0x137605E7, false, true));
+    bus1.send(makeMessage(0x137605E7, true, true));
+    bus1.send(makeMessage(0x564, false, false));
+    bus1.send(makeMessage(0x564, false, true));
+    bus1.send(makeMessage(0x58E, false, false));
+    bus1.send(makeMessage(0x05F9052D, false, true));
+    bus1.send(makeMessage(0x595, true, false));
+    bus1.send(makeMessage(0x595, true, true));
+    bus1.send(makeMessage(0x563, true, false));
+    bus1.send(makeMessage(0x13358537, true, true));
+    bus1.send(makeMessage(0x563, false, true));  // not filtered
+    bus1.send(makeMessage(0x600, true, false));  // positive filter
+    bus1.send(makeMessage(0x64D, false, true));
+    bus1.send(makeMessage(0x64D, true, false));
+    bus1.send(makeMessage(0x620, false, false));
+    bus1.send(makeMessage(0x620, true, false));
+    bus1.send(makeMessage(0x1069A676, false, true));
+    bus1.send(makeMessage(0x1069A676, true, true));
+    bus1.send(makeMessage(0x62D, false, false));
+    bus1.send(makeMessage(0x62D, false, true));
+    bus1.send(makeMessage(0x6C4, false, false));
+    bus1.send(makeMessage(0x14C76629, false, true));
+    bus1.send(makeMessage(0x689, true, false));
+    bus1.send(makeMessage(0x689, true, true));
+    bus1.send(makeMessage(0x6A4, true, false));
+    bus1.send(makeMessage(0x0BCCA6C2, true, true));
+    bus1.send(makeMessage(0x6A4, false, true));      // not filtered
+    bus1.send(makeMessage(0x04BB1700, true, true));  // positive filter
+    bus1.send(makeMessage(0x784, false, true));
+    bus1.send(makeMessage(0x784, true, false));
+    bus1.send(makeMessage(0x7F9, false, false));
+    bus1.send(makeMessage(0x7F9, true, false));
+    bus1.send(makeMessage(0x0200F77D, false, true));
+    bus1.send(makeMessage(0x0200F77D, true, true));
+    bus1.send(makeMessage(0x783, false, false));
+    bus1.send(makeMessage(0x783, false, true));
+    bus1.send(makeMessage(0x770, false, false));
+    bus1.send(makeMessage(0x06602719, false, true));
+    bus1.send(makeMessage(0x76B, true, false));
+    bus1.send(makeMessage(0x76B, true, true));
+    bus1.send(makeMessage(0x7DF, true, false));
+    bus1.send(makeMessage(0x1939E736, true, true));
+    bus1.send(makeMessage(0x7DF, false, true));  // not filtered
+
+    std::vector<can::V1_0::CanMessage> expectedMixed{
+            makeMessage(0x000, true, true),  // 0x000:0x700, DONT_CARE, DONT_CARE
+            makeMessage(0x0BE, false, true),
+            makeMessage(0x100, false, false),  // 0x100:0x700, DONT_CARE, NOT_SET
+            makeMessage(0x1B8, false, true),
+            makeMessage(0x096A2200, false, true),  // 0x096A2200:0x1FFFFF00, DONT_CARE, SET
+            makeMessage(0x2A5, false, true),
+            makeMessage(0x300, false, false),  // 0x300:0x700, NOT_SET, DONT_CARE
+            makeMessage(0x39E, false, true),
+            makeMessage(0x06C5D400, false, true),  // 0x06C5D400:0x1FFFFF00, NOT_SET, SET
+            makeMessage(0x454, false, true),
+            makeMessage(0x500, true, false),  // 0x500:0x700, SET, DONT_CARE
+            makeMessage(0x563, false, true),
+            makeMessage(0x600, true, false),  // 0x600:0x700, SET, NOT_SET
+            makeMessage(0x6A4, false, true),
+            makeMessage(0x04BB1700, true, true),  // 0x04BB1700:0x1FFFFF00, SET, SET
+            makeMessage(0x7DF, false, true),
+    };
+
+    auto messagesMixed = listenerMixed->fetchMessages(100ms, expectedMixed.size());
+    clearTimestamps(messagesMixed);
+    ASSERT_EQ(expectedMixed, messagesMixed);
 }
 
 }  // namespace android::hardware::automotive::can::V1_0::vts
diff --git a/automotive/evs/1.1/IEvsCamera.hal b/automotive/evs/1.1/IEvsCamera.hal
index 975b6c6..fc68e60 100644
--- a/automotive/evs/1.1/IEvsCamera.hal
+++ b/automotive/evs/1.1/IEvsCamera.hal
@@ -34,6 +34,23 @@
     getCameraInfo_1_1() generates (CameraDesc info);
 
     /**
+     * Returns the description of the physical camera device that backs this
+     * logical camera.
+     *
+     * If a requested device does not either exist or back this logical device,
+     * this method returns a null camera descriptor.  And, if this is called on
+     * a physical camera device, this method is the same as getCameraInfo_1_1()
+     * method if a given device ID is matched.  Otherwise, this will return a
+     * null camera descriptor.
+     *
+     * @param  deviceId Physical camera device identifier string.
+     * @return info     The description of a member physical camera device.
+     *                  This must be the same value as reported by
+     *                  EvsEnumerator::getCameraList_1_1().
+     */
+    getPhysicalCameraInfo(string deviceId) generates (CameraDesc info);
+
+    /**
      * Requests to pause EVS camera stream events.
      *
      * Like stopVideoStream(), events may continue to arrive for some time
@@ -51,7 +68,7 @@
     resumeVideoStream() generates (EvsResult result);
 
     /**
-     * Returns a frame that was delivered by to the IEvsCameraStream.
+     * Returns frame that were delivered by to the IEvsCameraStream.
      *
      * When done consuming a frame delivered to the IEvsCameraStream
      * interface, it must be returned to the IEvsCamera for reuse.
@@ -59,10 +76,10 @@
      * as one), and if the supply is exhausted, no further frames may be
      * delivered until a buffer is returned.
      *
-     * @param  buffer A buffer to be returned.
+     * @param  buffer Buffers to be returned.
      * @return result Return EvsResult::OK if this call is successful.
      */
-    doneWithFrame_1_1(BufferDesc buffer) generates (EvsResult result);
+    doneWithFrame_1_1(vec<BufferDesc> buffer) generates (EvsResult result);
 
     /**
      * Requests to be a master client.
@@ -127,8 +144,13 @@
         generates (int32_t min, int32_t max, int32_t step);
 
     /**
-     * Requests to set a camera parameter.  Only a request from the master
-     * client will be processed successfully.
+     * Requests to set a camera parameter.
+     *
+     * Only a request from the master client will be processed successfully.
+     * When this method is called on a logical camera device, it will be forwarded
+     * to each physical device and, if it fails to program any physical device,
+     * it will return an error code with the same number of effective values as
+     * the number of backing camera devices.
      *
      * @param  id             The identifier of camera parameter, CameraParam enum.
      *         value          A desired parameter value.
@@ -138,21 +160,22 @@
      *                        parameter is not supported.
      *                        EvsResult::UNDERLYING_SERVICE_ERROR if it fails to
      *                        program a value by any other reason.
-     *         effectiveValue A programmed parameter value.  This may differ
+     *         effectiveValue Programmed parameter values.  This may differ
      *                        from what the client gives if, for example, the
      *                        driver does not support a target parameter.
      */
     setIntParameter(CameraParam id, int32_t value)
-        generates (EvsResult result, int32_t effectiveValue);
+        generates (EvsResult result, vec<int32_t> effectiveValue);
 
     /**
-     * Retrieves a value of given camera parameter.
+     * Retrieves values of given camera parameter.
      *
      * @param  id     The identifier of camera parameter, CameraParam enum.
      * @return result EvsResult::OK if it succeeds to read a parameter.
      *                EvsResult::INVALID_ARG if either a requested parameter is
      *                not supported.
-     *         value  A value of requested camera parameter.
+     *         value  Values of requested camera parameter, the same number of
+     *                values as backing camera devices.
      */
-    getIntParameter(CameraParam id) generates(EvsResult result, int32_t value);
+    getIntParameter(CameraParam id) generates(EvsResult result, vec<int32_t> value);
 };
diff --git a/automotive/evs/1.1/IEvsCameraStream.hal b/automotive/evs/1.1/IEvsCameraStream.hal
index 9e4ea19..aa35c62 100644
--- a/automotive/evs/1.1/IEvsCameraStream.hal
+++ b/automotive/evs/1.1/IEvsCameraStream.hal
@@ -18,7 +18,7 @@
 
 import @1.0::IEvsCameraStream;
 import @1.1::BufferDesc;
-import @1.1::EvsEvent;
+import @1.1::EvsEventDesc;
 
 /**
  * Implemented on client side to receive asynchronous streaming event deliveries.
@@ -26,7 +26,7 @@
 interface IEvsCameraStream extends @1.0::IEvsCameraStream {
 
     /**
-     * Receives calls from the HAL each time a video frame is ready for inspection.
+     * Receives calls from the HAL each time video frames is ready for inspection.
      * Buffer handles received by this method must be returned via calls to
      * IEvsCamera::doneWithFrame_1_1(). When the video stream is stopped via a call
      * to IEvsCamera::stopVideoStream(), this callback may continue to happen for
@@ -35,14 +35,19 @@
      * event must be delivered.  No further frame deliveries may happen
      * thereafter.
      *
-     * @param buffer a buffer descriptor of a delivered image frame.
+     * A camera device will deliver the same number of frames as number of
+     * backing physical camera devices; it means, a physical camera device
+     * sends always a single frame and a logical camera device sends multiple
+     * frames as many as number of backing physical camera devices.
+     *
+     * @param buffer Buffer descriptors of delivered image frames.
      */
-    oneway deliverFrame_1_1(BufferDesc buffer);
+    oneway deliverFrame_1_1(vec<BufferDesc> buffer);
 
     /**
      * Receives calls from the HAL each time an event happens.
      *
      * @param  event EVS event with possible event information.
      */
-    oneway notify(EvsEvent event);
+    oneway notify(EvsEventDesc event);
 };
diff --git a/automotive/evs/1.1/default/Android.bp b/automotive/evs/1.1/default/Android.bp
index 41cb426..88fd657 100644
--- a/automotive/evs/1.1/default/Android.bp
+++ b/automotive/evs/1.1/default/Android.bp
@@ -16,7 +16,7 @@
     shared_libs: [
         "android.hardware.automotive.evs@1.0",
         "android.hardware.automotive.evs@1.1",
-        "android.hardware.camera.device@3.2",
+        "android.hardware.camera.device@3.3",
         "libbase",
         "libbinder",
         "liblog",
diff --git a/automotive/evs/1.1/default/ConfigManager.cpp b/automotive/evs/1.1/default/ConfigManager.cpp
index 96a2f98..986793e 100644
--- a/automotive/evs/1.1/default/ConfigManager.cpp
+++ b/automotive/evs/1.1/default/ConfigManager.cpp
@@ -42,55 +42,32 @@
     while (curElem != nullptr) {
         if (!strcmp(curElem->Name(), "group")) {
             /* camera group identifier */
-            const char *group_id = curElem->FindAttribute("group_id")->Value();
+            const char *id = curElem->FindAttribute("id")->Value();
 
-            /* create CameraGroup */
-            unique_ptr<ConfigManager::CameraGroup> aCameraGroup(new ConfigManager::CameraGroup());
+            /* create a camera group to be filled */
+            CameraGroupInfo *aCamera = new CameraGroupInfo();
 
-            /* add a camera device to its group */
-            addCameraDevices(curElem->FindAttribute("device_id")->Value(), aCameraGroup);
-
-            /* a list of camera stream configurations */
-            const XMLElement *childElem =
-                curElem->FirstChildElement("caps")->FirstChildElement("stream");
-            while (childElem != nullptr) {
-                /* read 5 attributes */
-                const XMLAttribute *idAttr     = childElem->FindAttribute("id");
-                const XMLAttribute *widthAttr  = childElem->FindAttribute("width");
-                const XMLAttribute *heightAttr = childElem->FindAttribute("height");
-                const XMLAttribute *fmtAttr    = childElem->FindAttribute("format");
-                const XMLAttribute *fpsAttr    = childElem->FindAttribute("framerate");
-
-                const int32_t id = stoi(idAttr->Value());
-                int32_t framerate = 0;
-                if (fpsAttr != nullptr) {
-                    framerate = stoi(fpsAttr->Value());
-                }
-
-                int32_t pixFormat;
-                if (ConfigManagerUtil::convertToPixelFormat(fmtAttr->Value(),
-                                                            pixFormat)) {
-                    RawStreamConfiguration cfg = {
-                        id,
-                        stoi(widthAttr->Value()),
-                        stoi(heightAttr->Value()),
-                        pixFormat,
-                        ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
-                        framerate
-                    };
-                    aCameraGroup->streamConfigurations[id] = cfg;
-                }
-
-                childElem = childElem->NextSiblingElement("stream");
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
             }
 
             /* camera group synchronization */
             const char *sync = curElem->FindAttribute("synchronized")->Value();
-            aCameraGroup->synchronized =
-                static_cast<bool>(strcmp(sync, "false"));
+            if (!strcmp(sync, "CALIBRATED")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED;
+            } else if (!strcmp(sync, "APPROXIMATE")) {
+                aCamera->synchronized =
+                    ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE;
+            } else {
+                aCamera->synchronized = 0; // Not synchronized
+            }
 
             /* add a group to hash map */
-            mCameraGroups[group_id] = std::move(aCameraGroup);
+            mCameraGroupInfos.insert_or_assign(id, unique_ptr<CameraGroupInfo>(aCamera));
         } else if (!strcmp(curElem->Name(), "device")) {
             /* camera unique identifier */
             const char *id = curElem->FindAttribute("id")->Value();
@@ -98,8 +75,18 @@
             /* camera mount location */
             const char *pos = curElem->FindAttribute("position")->Value();
 
+            /* create a camera device to be filled */
+            CameraInfo *aCamera = new CameraInfo();
+
+            /* read camera device information */
+            if (!readCameraDeviceInfo(aCamera, curElem)) {
+                ALOGW("Failed to read a camera information of %s", id);
+                delete aCamera;
+                continue;
+            }
+
             /* store read camera module information */
-            mCameraInfo[id] = readCameraDeviceInfo(curElem);
+            mCameraInfo.insert_or_assign(id, unique_ptr<CameraInfo>(aCamera));
 
             /* assign a camera device to a position group */
             mCameraPosition[pos].emplace(id);
@@ -113,15 +100,13 @@
 }
 
 
-unique_ptr<ConfigManager::CameraInfo>
-ConfigManager::readCameraDeviceInfo(const XMLElement *aDeviceElem) {
-    if (aDeviceElem == nullptr) {
-        return nullptr;
+bool
+ConfigManager::readCameraDeviceInfo(CameraInfo *aCamera,
+                                    const XMLElement *aDeviceElem) {
+    if (aCamera == nullptr || aDeviceElem == nullptr) {
+        return false;
     }
 
-    /* create a CameraInfo to be filled */
-    unique_ptr<ConfigManager::CameraInfo> aCamera(new ConfigManager::CameraInfo());
-
     /* size information to allocate camera_metadata_t */
     size_t totalEntries = 0;
     size_t totalDataSize = 0;
@@ -145,14 +130,15 @@
               "allocated memory was not large enough");
     }
 
-    return aCamera;
+    return true;
 }
 
 
-size_t ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
-                                             unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                             size_t &dataSize) {
-    if (aCapElem == nullptr) {
+size_t
+ConfigManager::readCameraCapabilities(const XMLElement * const aCapElem,
+                                      CameraInfo *aCamera,
+                                      size_t &dataSize) {
+    if (aCapElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -214,7 +200,7 @@
                 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
                 framerate
             };
-            aCamera->streamConfigurations[id] = cfg;
+            aCamera->streamConfigurations.insert_or_assign(id, cfg);
         }
 
         curElem = curElem->NextSiblingElement("stream");
@@ -232,10 +218,11 @@
 }
 
 
-size_t ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
-                                       unique_ptr<ConfigManager::CameraInfo> &aCamera,
-                                       size_t &dataSize) {
-    if (aParamElem == nullptr) {
+size_t
+ConfigManager::readCameraMetadata(const XMLElement * const aParamElem,
+                                  CameraInfo *aCamera,
+                                  size_t &dataSize) {
+    if (aParamElem == nullptr || aCamera == nullptr) {
         return 0;
     }
 
@@ -258,8 +245,9 @@
                                         count
                                    );
 
-                    aCamera->cameraMetadata[tag] =
-                        make_pair(make_unique<void *>(data), count);
+                    aCamera->cameraMetadata.insert_or_assign(
+                        tag, make_pair(make_unique<void *>(data), count)
+                    );
 
                     ++numEntries;
                     dataSize += calculate_camera_metadata_entry_data_size(
@@ -269,6 +257,52 @@
                     break;
                 }
 
+                case ANDROID_REQUEST_AVAILABLE_CAPABILITIES: {
+                    camera_metadata_enum_android_request_available_capabilities_t *data =
+                        new camera_metadata_enum_android_request_available_capabilities_t[1];
+                    if (ConfigManagerUtil::convertToCameraCapability(
+                            curElem->FindAttribute("value")->Value(), *data)) {
+                                        curElem->FindAttribute("value")->Value(),
+                        aCamera->cameraMetadata.insert_or_assign(
+                            tag, make_pair(make_unique<void *>(data), 1)
+                        );
+
+                        ++numEntries;
+                        dataSize += calculate_camera_metadata_entry_data_size(
+                                        get_camera_metadata_tag_type(tag), 1
+                                    );
+                    }
+                    break;
+                }
+
+                case ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS: {
+                    /* a comma-separated list of physical camera devices */
+                    size_t len = strlen(curElem->FindAttribute("value")->Value());
+                    char *data = new char[len + 1];
+                    memcpy(data,
+                           curElem->FindAttribute("value")->Value(),
+                           len * sizeof(char));
+
+                    /* replace commas with null char */
+                    char *p = data;
+                    while (*p != '\0') {
+                        if (*p == ',') {
+                            *p = '\0';
+                        }
+                        ++p;
+                    }
+
+                    aCamera->cameraMetadata.insert_or_assign(
+                        tag, make_pair(make_unique<void *>(data), len)
+                    );
+
+                    ++numEntries;
+                    dataSize += calculate_camera_metadata_entry_data_size(
+                                    get_camera_metadata_tag_type(tag), len
+                                );
+                    break;
+                }
+
                 default:
                     ALOGW("Parameter %s is not supported",
                           curElem->FindAttribute("name")->Value());
@@ -283,10 +317,11 @@
 }
 
 
-bool ConfigManager::constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
-                                            const size_t totalEntries,
-                                            const size_t totalDataSize) {
-    if (!aCamera->allocate(totalEntries, totalDataSize)) {
+bool
+ConfigManager::constructCameraMetadata(CameraInfo *aCamera,
+                                       const size_t totalEntries,
+                                       const size_t totalDataSize) {
+    if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
         ALOGE("Failed to allocate memory for camera metadata");
         return false;
     }
@@ -401,14 +436,14 @@
                         ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT,
                         0   // unused
                     };
-                    dpy->streamConfigurations[id] = cfg;
+                    dpy->streamConfigurations.insert_or_assign(id, cfg);
                 }
 
                 curStream = curStream->NextSiblingElement("stream");
             }
         }
 
-        mDisplayInfo[id] = std::move(dpy);
+        mDisplayInfo.insert_or_assign(id, std::move(dpy));
         curDev = curDev->NextSiblingElement("device");
     }
 
@@ -457,16 +492,6 @@
 }
 
 
-void ConfigManager::addCameraDevices(const char *devices,
-                                     unique_ptr<CameraGroup> &aGroup) {
-    stringstream device_list(devices);
-    string token;
-    while (getline(device_list, token, ',')) {
-        aGroup->devices.emplace(token);
-    }
-}
-
-
 std::unique_ptr<ConfigManager> ConfigManager::Create(const char *path) {
     unique_ptr<ConfigManager> cfgMgr(new ConfigManager(path));
 
diff --git a/automotive/evs/1.1/default/ConfigManager.h b/automotive/evs/1.1/default/ConfigManager.h
index 0275f90..870af1c 100644
--- a/automotive/evs/1.1/default/ConfigManager.h
+++ b/automotive/evs/1.1/default/ConfigManager.h
@@ -82,9 +82,6 @@
         unordered_map<CameraParam,
                       tuple<int32_t, int32_t, int32_t>> controls;
 
-        /* List of supported frame rates */
-        unordered_set<int32_t> frameRates;
-
         /*
          * List of supported output stream configurations; each array stores
          * format, width, height, and direction values in the order.
@@ -102,21 +99,15 @@
         camera_metadata_t *characteristics;
     };
 
-    class CameraGroup {
+    class CameraGroupInfo : public CameraInfo {
     public:
-        CameraGroup() {}
+        CameraGroupInfo() {}
 
         /* ID of member camera devices */
         unordered_set<string> devices;
 
         /* The capture operation of member camera devices are synchronized */
         bool synchronized = false;
-
-        /*
-         * List of stream configurations that are supposed by all camera devices
-         * in this group.
-         */
-        unordered_map<int32_t, RawStreamConfiguration> streamConfigurations;
     };
 
     class SystemInfo {
@@ -165,11 +156,11 @@
     /*
      * Return a list of cameras
      *
-     * @return CameraGroup
+     * @return CameraGroupInfo
      *         A pointer to a camera group identified by a given id.
      */
-    unique_ptr<CameraGroup>& getCameraGroup(const string& gid) {
-        return mCameraGroups[gid];
+    unique_ptr<CameraGroupInfo>& getCameraGroupInfo(const string& gid) {
+        return mCameraGroupInfos[gid];
     }
 
 
@@ -203,8 +194,8 @@
     /* Internal data structure for camera device information */
     unordered_map<string, unique_ptr<DisplayInfo>> mDisplayInfo;
 
-    /* Camera groups are stored in <groud id, CameraGroup> hash map */
-    unordered_map<string, unique_ptr<CameraGroup>> mCameraGroups;
+    /* Camera groups are stored in <groud id, CameraGroupInfo> hash map */
+    unordered_map<string, unique_ptr<CameraGroupInfo>> mCameraGroupInfos;
 
     /*
      * Camera positions are stored in <position, camera id set> hash map.
@@ -253,16 +244,19 @@
     /*
      * read camera device information
      *
-     * @param  aDeviceElem
+     * @param  aCamera
+     *         A pointer to CameraInfo that will be completed by this
+     *         method.
+     *         aDeviceElem
      *         A pointer to "device" XML element that contains camera module
      *         capability info and its characteristics.
      *
-     * @return unique_ptr<CameraInfo>
-     *         A pointer to CameraInfo class that contains camera module
-     *         capability and characteristics.  Please note that this transfers
-     *         the ownership of created CameraInfo to the caller.
+     * @return bool
+     *         Return false upon any failure in reading and processing camera
+     *         device information.
      */
-    unique_ptr<CameraInfo> readCameraDeviceInfo(const XMLElement *aDeviceElem);
+    bool readCameraDeviceInfo(CameraInfo *aCamera,
+                              const XMLElement *aDeviceElem);
 
     /*
      * read camera metadata
@@ -280,7 +274,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraCapabilities(const XMLElement * const aCapElem,
-                                  unique_ptr<CameraInfo> &aCamera,
+                                  CameraInfo *aCamera,
                                   size_t &dataSize);
 
     /*
@@ -298,7 +292,7 @@
      *         Number of camera metadata entries
      */
     size_t readCameraMetadata(const XMLElement * const aParamElem,
-                              unique_ptr<CameraInfo> &aCamera,
+                              CameraInfo *aCamera,
                               size_t &dataSize);
 
     /*
@@ -316,21 +310,9 @@
      *         or its size is not large enough to add all found camera metadata
      *         entries.
      */
-    bool constructCameraMetadata(unique_ptr<CameraInfo> &aCamera,
+    bool constructCameraMetadata(CameraInfo *aCamera,
                                  const size_t totalEntries,
                                  const size_t totalDataSize);
-
-    /*
-     * parse a comma-separated list of camera devices and add them to
-     * CameraGroup.
-     *
-     * @param  devices
-     *         A comma-separated list of camera device identifiers.
-     * @param  aGroup
-     *         Camera group which cameras will be added to.
-     */
-    void addCameraDevices(const char *devices,
-                          unique_ptr<CameraGroup> &aGroup);
 };
 #endif // CONFIG_MANAGER_H
 
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.cpp b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
index 8206daa..d10f236 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.cpp
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.cpp
@@ -90,6 +90,30 @@
         aTag =  ANDROID_LENS_POSE_ROTATION;
     } else if (!strcmp(name, "LENS_POSE_TRANSLATION")) {
         aTag =  ANDROID_LENS_POSE_TRANSLATION;
+    } else if (!strcmp(name, "REQUEST_AVAILABLE_CAPABILITIES")) {
+        aTag =  ANDROID_REQUEST_AVAILABLE_CAPABILITIES;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA_PHYSICAL_IDS")) {
+        aTag =  ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS;
+    } else {
+        return false;
+    }
+
+    return true;
+}
+
+
+bool ConfigManagerUtil::convertToCameraCapability(
+    const char *name,
+    camera_metadata_enum_android_request_available_capabilities_t &cap) {
+
+    if (!strcmp(name, "DEPTH_OUTPUT")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT;
+    } else if (!strcmp(name, "LOGICAL_MULTI_CAMERA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA;
+    } else if (!strcmp(name, "MONOCHROME")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME;
+    } else if (!strcmp(name, "SECURE_IMAGE_DATA")) {
+        cap = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
     } else {
         return false;
     }
diff --git a/automotive/evs/1.1/default/ConfigManagerUtil.h b/automotive/evs/1.1/default/ConfigManagerUtil.h
index 8c89ae7..1710cac 100644
--- a/automotive/evs/1.1/default/ConfigManagerUtil.h
+++ b/automotive/evs/1.1/default/ConfigManagerUtil.h
@@ -55,6 +55,14 @@
      */
     static string trimString(const string &src,
                              const string &ws = " \n\r\t\f\v");
+
+    /**
+     * Convert a given string to corresponding camera capabilities
+     */
+    static bool convertToCameraCapability(
+        const char *name,
+        camera_metadata_enum_android_request_available_capabilities_t &cap);
+
 };
 
 #endif // CONFIG_MANAGER_UTIL_H
diff --git a/automotive/evs/1.1/default/EvsCamera.cpp b/automotive/evs/1.1/default/EvsCamera.cpp
index 5ba753d..b7e4efa 100644
--- a/automotive/evs/1.1/default/EvsCamera.cpp
+++ b/automotive/evs/1.1/default/EvsCamera.cpp
@@ -21,7 +21,7 @@
 
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
-
+#include <utils/SystemClock.h>
 
 namespace android {
 namespace hardware {
@@ -240,9 +240,23 @@
 }
 
 
-Return<EvsResult> EvsCamera::doneWithFrame_1_1(const BufferDesc_1_1& bufDesc)  {
+Return<void> EvsCamera::getPhysicalCameraInfo(const hidl_string& id,
+                                              getCameraInfo_1_1_cb _hidl_cb) {
+    ALOGD("%s", __FUNCTION__);
+
+    // This works exactly same as getCameraInfo_1_1() in default implementation.
+    (void)id;
+    _hidl_cb(mDescription);
+    return Void();
+}
+
+
+Return<EvsResult> EvsCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers)  {
     std::lock_guard <std::mutex> lock(mAccessLock);
-    returnBuffer(bufDesc.bufferId, bufDesc.buffer.nativeHandle);
+
+    for (auto&& buffer : buffers) {
+        returnBuffer(buffer.bufferId, buffer.buffer.nativeHandle);
+    }
 
     return EvsResult::OK;
 }
@@ -490,12 +504,17 @@
             newBuffer.buffer.nativeHandle = mBuffers[idx].handle;
             newBuffer.pixelSize = sizeof(uint32_t);
             newBuffer.bufferId = idx;
+            newBuffer.deviceId = mDescription.v1.cameraId;
+            newBuffer.timestamp = elapsedRealtimeNano();
 
             // Write test data into the image buffer
             fillTestFrame(newBuffer);
 
             // Issue the (asynchronous) callback to the client -- can't be holding the lock
-            auto result = mStream->deliverFrame_1_1(newBuffer);
+            hidl_vec<BufferDesc_1_1> frames;
+            frames.resize(1);
+            frames[0] = newBuffer;
+            auto result = mStream->deliverFrame_1_1(frames);
             if (result.isOk()) {
                 ALOGD("Delivered %p as id %d",
                       newBuffer.buffer.nativeHandle.getNativeHandle(), newBuffer.bufferId);
@@ -527,7 +546,7 @@
     }
 
     // If we've been asked to stop, send an event to signal the actual end of stream
-    EvsEvent event;
+    EvsEventDesc event;
     event.aType = EvsEventType::STREAM_STOPPED;
     auto result = mStream->notify(event);
     if (!result.isOk()) {
diff --git a/automotive/evs/1.1/default/EvsCamera.h b/automotive/evs/1.1/default/EvsCamera.h
index c15b4b1..72a1b57 100644
--- a/automotive/evs/1.1/default/EvsCamera.h
+++ b/automotive/evs/1.1/default/EvsCamera.h
@@ -62,9 +62,11 @@
 
     // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
     Return<void>      getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb)  override;
+    Return<void>      getPhysicalCameraInfo(const hidl_string& id,
+                                            getPhysicalCameraInfo_cb _hidl_cb)  override;
     Return<EvsResult> pauseVideoStream() override;
     Return<EvsResult> resumeVideoStream() override;
-    Return<EvsResult> doneWithFrame_1_1(const BufferDesc_1_1& buffer) override;
+    Return<EvsResult> doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
     Return<EvsResult> setMaster() override;
     Return<EvsResult> forceMaster(const sp<IEvsDisplay>& display) override;
     Return<EvsResult> unsetMaster() override;
diff --git a/automotive/evs/1.1/default/resources/evs_default_configuration.xml b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
index 692102e..a79e7c2 100644
--- a/automotive/evs/1.1/default/resources/evs_default_configuration.xml
+++ b/automotive/evs/1.1/default/resources/evs_default_configuration.xml
@@ -28,8 +28,31 @@
         <num_cameras value='1'/>
     </system>
 
-    <!-- camera device information -->
+    <!-- camera information -->
     <camera>
+        <!-- camera group starts -->
+        <group id='group1' synchronized='APPROXIMATE'>
+            <caps>
+                <stream id='0' width='640'  height='360'  format='RGBA_8888' framerate='30'/>
+            </caps>
+
+            <!-- list of parameters -->
+            <characteristics>
+                <parameter
+                    name='REQUEST_AVAILABLE_CAPABILITIES'
+                    type='enum'
+                    size='1'
+                    value='LOGICAL_MULTI_CAMERA'
+                />
+                <parameter
+                    name='LOGICAL_MULTI_CAMERA_PHYSICAL_IDS'
+                    type='byte[]'
+                    size='1'
+                    value='/dev/video1'
+                />
+            </characteristics>
+        </group>
+
         <!-- camera device starts -->
         <device id='/dev/video1' position='rear'>
             <caps>
diff --git a/automotive/evs/1.1/types.hal b/automotive/evs/1.1/types.hal
index dcb2abb..f88d223 100644
--- a/automotive/evs/1.1/types.hal
+++ b/automotive/evs/1.1/types.hal
@@ -61,6 +61,14 @@
      * Opaque value from driver
      */
     uint32_t bufferId;
+    /**
+     * Unique identifier of the physical camera device that produces this buffer.
+     */
+    string deviceId;
+    /**
+     * Time that this buffer is being filled.
+     */
+    int64_t timestamp;
 };
 
 /**
@@ -97,12 +105,16 @@
 /**
  * Structure that describes informative events occurred during EVS is streaming
  */
-struct EvsEvent {
+struct EvsEventDesc {
     /**
      * Type of an informative event
      */
     EvsEventType aType;
     /**
+     * Device identifier
+     */
+    string deviceId;
+    /**
      * Possible additional information
      */
     uint32_t[4] payload;
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.cpp b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
index 6d53652..ebf488a 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.cpp
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.cpp
@@ -80,7 +80,7 @@
     asyncStopStream();
 
     // Wait until the stream has actually stopped
-    std::unique_lock<std::mutex> lock(mLock);
+    std::unique_lock<std::mutex> lock(mEventLock);
     if (mRunning) {
         mEventSignal.wait(lock, [this]() { return !mRunning; });
     }
@@ -88,7 +88,7 @@
 
 
 bool FrameHandler::returnHeldBuffer() {
-    std::unique_lock<std::mutex> lock(mLock);
+    std::lock_guard<std::mutex> lock(mLock);
 
     // Return the oldest buffer we're holding
     if (mHeldBuffers.empty()) {
@@ -96,16 +96,16 @@
         return false;
     }
 
-    BufferDesc_1_1 buffer = mHeldBuffers.front();
+    hidl_vec<BufferDesc_1_1> buffers = mHeldBuffers.front();
     mHeldBuffers.pop();
-    mCamera->doneWithFrame_1_1(buffer);
+    mCamera->doneWithFrame_1_1(buffers);
 
     return true;
 }
 
 
 bool FrameHandler::isRunning() {
-    std::unique_lock<std::mutex> lock(mLock);
+    std::lock_guard<std::mutex> lock(mLock);
     return mRunning;
 }
 
@@ -120,7 +120,7 @@
 
 
 void FrameHandler::getFramesCounters(unsigned* received, unsigned* displayed) {
-    std::unique_lock<std::mutex> lock(mLock);
+    std::lock_guard<std::mutex> lock(mLock);
 
     if (received) {
         *received = mFramesReceived;
@@ -138,11 +138,17 @@
 }
 
 
-Return<void> FrameHandler::deliverFrame_1_1(const BufferDesc_1_1& bufDesc) {
+Return<void> FrameHandler::deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
+    mLock.lock();
+    // For VTS tests, FrameHandler uses a single frame among delivered frames.
+    auto bufferIdx = mFramesDisplayed % buffers.size();
+    auto buffer = buffers[bufferIdx];
+    mLock.unlock();
+
     const AHardwareBuffer_Desc* pDesc =
-        reinterpret_cast<const AHardwareBuffer_Desc *>(&bufDesc.buffer.description);
+        reinterpret_cast<const AHardwareBuffer_Desc *>(&buffer.buffer.description);
     ALOGD("Received a frame from the camera (%p)",
-          bufDesc.buffer.nativeHandle.getNativeHandle());
+          buffer.buffer.nativeHandle.getNativeHandle());
 
     // Store a dimension of a received frame.
     mFrameWidth = pDesc->width;
@@ -150,6 +156,7 @@
 
     // If we were given an opened display at construction time, then send the received
     // image back down the camera.
+    bool displayed = false;
     if (mDisplay.get()) {
         // Get the output buffer we'll use to display the imagery
         BufferDesc_1_0 tgtBuffer = {};
@@ -163,7 +170,7 @@
             ALOGE("Didn't get requested output buffer -- skipping this frame.");
         } else {
             // Copy the contents of the of buffer.memHandle into tgtBuffer
-            copyBufferContents(tgtBuffer, bufDesc);
+            copyBufferContents(tgtBuffer, buffer);
 
             // Send the target buffer back for display
             Return<EvsResult> result = mDisplay->returnTargetBufferForDisplay(tgtBuffer);
@@ -179,40 +186,42 @@
             } else {
                 // Everything looks good!
                 // Keep track so tests or watch dogs can monitor progress
-                mLock.lock();
-                mFramesDisplayed++;
-                mLock.unlock();
+                displayed = true;
             }
         }
     }
 
+    mLock.lock();
+    // increases counters
+    ++mFramesReceived;
+    mFramesDisplayed += (int)displayed;
+    mLock.unlock();
+    mFrameSignal.notify_all();
 
     switch (mReturnMode) {
     case eAutoReturn:
         // Send the camera buffer back now that the client has seen it
         ALOGD("Calling doneWithFrame");
-        mCamera->doneWithFrame_1_1(bufDesc);
+        mCamera->doneWithFrame_1_1(buffers);
         break;
     case eNoAutoReturn:
-        // Hang onto the buffer handle for now -- the client will return it explicitly later
-        mHeldBuffers.push(bufDesc);
+        // Hang onto the buffer handles for now -- the client will return it explicitly later
+        mHeldBuffers.push(buffers);
+        break;
     }
 
-    mLock.lock();
-    ++mFramesReceived;
-    mLock.unlock();
-    mFrameSignal.notify_all();
-
     ALOGD("Frame handling complete");
 
     return Void();
 }
 
 
-Return<void> FrameHandler::notify(const EvsEvent& event) {
+Return<void> FrameHandler::notify(const EvsEventDesc& event) {
     // Local flag we use to keep track of when the stream is stopping
-    mLock.lock();
-    mLatestEventDesc = event;
+    std::unique_lock<std::mutex> lock(mEventLock);
+    mLatestEventDesc.aType = event.aType;
+    mLatestEventDesc.payload[0] = event.payload[0];
+    mLatestEventDesc.payload[1] = event.payload[1];
     if (mLatestEventDesc.aType == EvsEventType::STREAM_STOPPED) {
         // Signal that the last frame has been received and the stream is stopped
         mRunning = false;
@@ -222,8 +231,8 @@
     } else {
         ALOGD("Received an event %s", eventToString(mLatestEventDesc.aType));
     }
-    mLock.unlock();
-    mEventSignal.notify_all();
+    lock.unlock();
+    mEventSignal.notify_one();
 
     return Void();
 }
@@ -342,25 +351,34 @@
     }
 }
 
-bool FrameHandler::waitForEvent(const EvsEventType aTargetEvent,
-                                EvsEvent &event) {
+bool FrameHandler::waitForEvent(const EvsEventDesc& aTargetEvent,
+                                      EvsEventDesc& aReceivedEvent,
+                                      bool ignorePayload) {
     // Wait until we get an expected parameter change event.
-    std::unique_lock<std::mutex> lock(mLock);
+    std::unique_lock<std::mutex> lock(mEventLock);
     auto now = std::chrono::system_clock::now();
-    bool result = mEventSignal.wait_until(lock, now + 5s,
-        [this, aTargetEvent, &event](){
-            bool flag = mLatestEventDesc.aType == aTargetEvent;
-            if (flag) {
-                event.aType = mLatestEventDesc.aType;
-                event.payload[0] = mLatestEventDesc.payload[0];
-                event.payload[1] = mLatestEventDesc.payload[1];
+    bool found = false;
+    while (!found) {
+        bool result = mEventSignal.wait_until(lock, now + 5s,
+            [this, aTargetEvent, ignorePayload, &aReceivedEvent, &found](){
+                found = (mLatestEventDesc.aType == aTargetEvent.aType) &&
+                        (ignorePayload || (mLatestEventDesc.payload[0] == aTargetEvent.payload[0] &&
+                                           mLatestEventDesc.payload[1] == aTargetEvent.payload[1]));
+
+                aReceivedEvent.aType = mLatestEventDesc.aType;
+                aReceivedEvent.payload[0] = mLatestEventDesc.payload[0];
+                aReceivedEvent.payload[1] = mLatestEventDesc.payload[1];
+                return found;
             }
+        );
 
-            return flag;
+        if (!result) {
+            ALOGW("A timer is expired before a target event has happened.");
+            break;
         }
-    );
+    }
 
-    return !result;
+    return found;
 }
 
 const char *FrameHandler::eventToString(const EvsEventType aType) {
diff --git a/automotive/evs/1.1/vts/functional/FrameHandler.h b/automotive/evs/1.1/vts/functional/FrameHandler.h
index e5f1b8f..21e85fe 100644
--- a/automotive/evs/1.1/vts/functional/FrameHandler.h
+++ b/automotive/evs/1.1/vts/functional/FrameHandler.h
@@ -73,8 +73,9 @@
     bool isRunning();
 
     void waitForFrameCount(unsigned frameCount);
-    bool waitForEvent(const EvsEventType aTargetEvent,
-                            EvsEvent &eventDesc);
+    bool waitForEvent(const EvsEventDesc& aTargetEvent,
+                            EvsEventDesc& aReceivedEvent,
+                            bool ignorePayload = false);
     void getFramesCounters(unsigned* received, unsigned* displayed);
     void getFrameDimension(unsigned* width, unsigned* height);
 
@@ -83,8 +84,8 @@
     Return<void> deliverFrame(const BufferDesc_1_0& buffer) override;
 
     // Implementation for ::android::hardware::automotive::evs::V1_1::IEvsCameraStream
-    Return<void> deliverFrame_1_1(const BufferDesc_1_1& buffer) override;
-    Return<void> notify(const EvsEvent& event) override;
+    Return<void> deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffer) override;
+    Return<void> notify(const EvsEventDesc& event) override;
 
     // Local implementation details
     bool copyBufferContents(const BufferDesc_1_0& tgtBuffer, const BufferDesc_1_1& srcBuffer);
@@ -99,17 +100,18 @@
     // Since we get frames delivered to us asynchronously via the IEvsCameraStream interface,
     // we need to protect all member variables that may be modified while we're streaming
     // (ie: those below)
-    std::mutex                  mLock;
-    std::condition_variable     mEventSignal;
-    std::condition_variable     mFrameSignal;
+    std::mutex                            mLock;
+    std::mutex                            mEventLock;
+    std::condition_variable               mEventSignal;
+    std::condition_variable               mFrameSignal;
+    std::queue<hidl_vec<BufferDesc_1_1>>  mHeldBuffers;
 
-    std::queue<BufferDesc_1_1>  mHeldBuffers;
     bool                        mRunning = false;
     unsigned                    mFramesReceived = 0;    // Simple counter -- rolls over eventually!
     unsigned                    mFramesDisplayed = 0;   // Simple counter -- rolls over eventually!
     unsigned                    mFrameWidth = 0;
     unsigned                    mFrameHeight = 0;
-    EvsEvent                    mLatestEventDesc;
+    EvsEventDesc                mLatestEventDesc;
 };
 
 
diff --git a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
index 1d3fd87..4fc4e4c 100644
--- a/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
+++ b/automotive/evs/1.1/vts/functional/VtsHalEvsV1_1TargetTest.cpp
@@ -41,6 +41,8 @@
 #include <cstdio>
 #include <cstring>
 #include <cstdlib>
+#include <thread>
+#include <unordered_set>
 
 #include <hidl/HidlTransportSupport.h>
 #include <hwbinder/ProcessState.h>
@@ -67,6 +69,7 @@
 using ::android::hardware::hidl_handle;
 using ::android::hardware::hidl_string;
 using ::android::sp;
+using ::android::wp;
 using ::android::hardware::camera::device::V3_2::Stream;
 using ::android::hardware::automotive::evs::V1_0::DisplayDesc;
 using ::android::hardware::automotive::evs::V1_0::DisplayState;
@@ -117,7 +120,15 @@
         mIsHwModule = !service_name.compare(kEnumeratorName);
     }
 
-    virtual void TearDown() override {}
+    virtual void TearDown() override {
+        // Attempt to close any active camera
+        for (auto &&c : activeCameras) {
+            sp<IEvsCamera_1_1> cam = c.promote();
+            if (cam != nullptr) {
+                pEnumerator->closeCamera(cam);
+            }
+        }
+    }
 
 protected:
     void loadCameraList() {
@@ -141,10 +152,90 @@
         ASSERT_GE(cameraInfo.size(), 1u);
     }
 
-    sp<IEvsEnumerator>        pEnumerator;    // Every test needs access to the service
-    std::vector <CameraDesc>  cameraInfo;     // Empty unless/until loadCameraList() is called
-    bool                      mIsHwModule;    // boolean to tell current module under testing
-                                              // is HW module implementation.
+    bool isLogicalCamera(const camera_metadata_t *metadata) {
+        if (metadata == nullptr) {
+            // A logical camera device must have a valid camera metadata.
+            return false;
+        }
+
+        // Looking for LOGICAL_MULTI_CAMERA capability from metadata.
+        camera_metadata_ro_entry_t entry;
+        int rc = find_camera_metadata_ro_entry(metadata,
+                                               ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+                                               &entry);
+        if (0 != rc) {
+            // No capabilities are found.
+            return false;
+        }
+
+        for (size_t i = 0; i < entry.count; ++i) {
+            uint8_t cap = entry.data.u8[i];
+            if (cap == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA) {
+                return true;
+            }
+        }
+
+        return false;
+    }
+
+    std::unordered_set<std::string> getPhysicalCameraIds(const std::string& id,
+                                                         bool& flag) {
+        std::unordered_set<std::string> physicalCameras;
+
+        auto it = cameraInfo.begin();
+        while (it != cameraInfo.end()) {
+            if (it->v1.cameraId == id) {
+                break;
+            }
+            ++it;
+        }
+
+        if (it == cameraInfo.end()) {
+            // Unknown camera is requested.  Return an empty list.
+            return physicalCameras;
+        }
+
+        const camera_metadata_t *metadata =
+            reinterpret_cast<camera_metadata_t *>(&it->metadata[0]);
+        flag = isLogicalCamera(metadata);
+        if (!flag) {
+            // EVS assumes that the device w/o a valid metadata is a physical
+            // device.
+            ALOGI("%s is not a logical camera device.", id.c_str());
+            physicalCameras.emplace(id);
+            return physicalCameras;
+        }
+
+        // Look for physical camera identifiers
+        camera_metadata_ro_entry entry;
+        int rc = find_camera_metadata_ro_entry(metadata,
+                                               ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
+                                               &entry);
+        ALOGE_IF(rc, "No physical camera ID is found for a logical camera device");
+
+        const uint8_t *ids = entry.data.u8;
+        size_t start = 0;
+        for (size_t i = 0; i < entry.count; ++i) {
+            if (ids[i] == '\0') {
+                if (start != i) {
+                    std::string id(reinterpret_cast<const char *>(ids + start));
+                    physicalCameras.emplace(id);
+                }
+                start = i + 1;
+            }
+        }
+
+        ALOGI("%s consists of %d physical camera devices.", id.c_str(), (int)physicalCameras.size());
+        return physicalCameras;
+    }
+
+
+    sp<IEvsEnumerator>              pEnumerator;   // Every test needs access to the service
+    std::vector<CameraDesc>         cameraInfo;    // Empty unless/until loadCameraList() is called
+    bool                            mIsHwModule;   // boolean to tell current module under testing
+                                                   // is HW module implementation.
+    std::deque<wp<IEvsCamera_1_1>>  activeCameras; // A list of active camera handles that are
+                                                   // needed to be cleaned up.
 };
 
 
@@ -168,12 +259,32 @@
 
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (mIsHwModule && isLogicalCam) {
+            ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+            continue;
+        }
+
         for (int pass = 0; pass < 2; pass++) {
+            activeCameras.clear();
             sp<IEvsCamera_1_1> pCam =
                 IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
                 .withDefault(nullptr);
             ASSERT_NE(pCam, nullptr);
 
+            for (auto&& devName : devices) {
+                bool matched = false;
+                pCam->getPhysicalCameraInfo(devName,
+                                            [&devName, &matched](const CameraDesc& info) {
+                                                matched = devName == info.v1.cameraId;
+                                            });
+                ASSERT_TRUE(matched);
+            }
+
+            // Store a camera handle for a clean-up
+            activeCameras.push_back(pCam);
+
             // Verify that this camera self-identifies correctly
             pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
                                         ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -206,11 +317,22 @@
 
     // Open and close each camera twice
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (mIsHwModule && isLogicalCam) {
+            ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Verify that this camera self-identifies correctly
         pCam->getCameraInfo_1_1([&cam](CameraDesc desc) {
                                     ALOGD("Found camera %s", desc.v1.cameraId.c_str());
@@ -221,9 +343,13 @@
         sp<IEvsCamera_1_1> pCam2 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
-        ASSERT_NE(pCam, pCam2);
         ASSERT_NE(pCam2, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam2);
+
+        ASSERT_NE(pCam, pCam2);
+
         Return<EvsResult> result = pCam->setMaxFramesInFlight(2);
         if (mIsHwModule) {
             // Verify that the old camera rejects calls via HW module.
@@ -268,11 +394,22 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (mIsHwModule && isLogicalCam) {
+            ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          nullptr,
@@ -280,6 +417,7 @@
 
         // Start the camera's video stream
         nsecs_t start = systemTime(SYSTEM_TIME_MONOTONIC);
+
         bool startResult = frameHandler->startStream();
         ASSERT_TRUE(startResult);
 
@@ -287,9 +425,17 @@
         frameHandler->waitForFrameCount(1);
         nsecs_t firstFrame = systemTime(SYSTEM_TIME_MONOTONIC);
         nsecs_t timeToFirstFrame = systemTime(SYSTEM_TIME_MONOTONIC) - start;
-        EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame), kMaxStreamStartMilliseconds);
-        printf("Measured time to first frame %0.2f ms\n", timeToFirstFrame * kNanoToMilliseconds);
-        ALOGI("Measured time to first frame %0.2f ms", timeToFirstFrame * kNanoToMilliseconds);
+
+        // Extra delays are expected when we attempt to start a video stream on
+        // the logical camera device.  The amount of delay is expected the
+        // number of physical camera devices multiplied by
+        // kMaxStreamStartMilliseconds at most.
+        EXPECT_LE(nanoseconds_to_milliseconds(timeToFirstFrame),
+                  kMaxStreamStartMilliseconds * devices.size());
+        printf("%s: Measured time to first frame %0.2f ms\n",
+               cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
+        ALOGI("%s: Measured time to first frame %0.2f ms",
+              cam.v1.cameraId.c_str(), timeToFirstFrame * kNanoToMilliseconds);
 
         // Check aspect ratio
         unsigned width = 0, height = 0;
@@ -299,6 +445,13 @@
         // Wait a bit, then ensure we get at least the required minimum number of frames
         sleep(5);
         nsecs_t end = systemTime(SYSTEM_TIME_MONOTONIC);
+
+        // Even when the camera pointer goes out of scope, the FrameHandler object will
+        // keep the stream alive unless we tell it to shutdown.
+        // Also note that the FrameHandle and the Camera have a mutual circular reference, so
+        // we have to break that cycle in order for either of them to get cleaned up.
+        frameHandler->shutdown();
+
         unsigned framesReceived = 0;
         frameHandler->getFramesCounters(&framesReceived, nullptr);
         framesReceived = framesReceived - 1;    // Back out the first frame we already waited for
@@ -308,12 +461,6 @@
         ALOGI("Measured camera rate %3.2f fps", framesPerSecond);
         EXPECT_GE(framesPerSecond, kMinimumFramesPerSecond);
 
-        // Even when the camera pointer goes out of scope, the FrameHandler object will
-        // keep the stream alive unless we tell it to shutdown.
-        // Also note that the FrameHandle and the Camera have a mutual circular reference, so
-        // we have to break that cycle in order for either of them to get cleaned up.
-        frameHandler->shutdown();
-
         // Explicitly release the camera
         pEnumerator->closeCamera(pCam);
     }
@@ -340,12 +487,22 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (mIsHwModule && isLogicalCam) {
+            ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+            continue;
+        }
 
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Ask for a crazy number of buffers in flight to ensure it errors correctly
         Return<EvsResult> badResult = pCam->setMaxFramesInFlight(0xFFFFFFFF);
         EXPECT_EQ(EvsResult::BUFFER_NOT_AVAILABLE, badResult);
@@ -366,7 +523,7 @@
 
         // Check that the video stream stalls once we've gotten exactly the number of buffers
         // we requested since we told the frameHandler not to return them.
-        sleep(2);   // 1 second should be enough for at least 5 frames to be delivered worst case
+        sleep(1);   // 1 second should be enough for at least 5 frames to be delivered worst case
         unsigned framesReceived = 0;
         frameHandler->getFramesCounters(&framesReceived, nullptr);
         ASSERT_EQ(kBuffersToHold, framesReceived) << "Stream didn't stall at expected buffer limit";
@@ -416,11 +573,22 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (mIsHwModule && isLogicalCam) {
+            ALOGI("Skip a logical device %s for HW module", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          pDisplay,
@@ -484,17 +652,24 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCam0 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         sp<IEvsCamera_1_1> pCam1 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam1);
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandler0 = new FrameHandler(pCam0, cam,
                                                           nullptr,
@@ -554,6 +729,11 @@
         // Explicitly release the camera
         pEnumerator->closeCamera(pCam0);
         pEnumerator->closeCamera(pCam1);
+
+        // TODO(b/145459970, b/145457727): below sleep() is added to ensure the
+        // destruction of active camera objects; this may be related with two
+        // issues.
+        sleep(1);
     }
 }
 
@@ -575,12 +755,25 @@
     // Test each reported camera
     Return<EvsResult> result = EvsResult::OK;
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (isLogicalCam) {
+            // TODO(b/145465724): Support camera parameter programming on
+            // logical devices.
+            ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         // Create a camera client
         sp<IEvsCamera_1_1> pCam =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera
+        activeCameras.push_back(pCam);
+
         // Get the parameter list
         std::vector<CameraParam> cmds;
         pCam->getParameterList([&cmds](hidl_vec<CameraParam> cmdList) {
@@ -626,48 +819,54 @@
             EvsResult result = EvsResult::OK;
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 0;
-                pCam->getIntParameter(CameraParam::AUTO_FOCUS,
-                                   [&result, &val1](auto status, auto value) {
+                std::vector<int32_t> values;
+                pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
                                        if (status == EvsResult::OK) {
-                                          val1 = value;
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
                                        }
                                    });
-                if (val1 != 0) {
-                    pCam->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                       [&result, &val1](auto status, auto effectiveValue) {
-                                           result = status;
-                                           val1 = effectiveValue;
-                                       });
-                    ASSERT_EQ(EvsResult::OK, result);
-                    ASSERT_EQ(val1, 0);
+                ASSERT_EQ(EvsResult::OK, result);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
                 }
             }
 
             // Try to program a parameter with a random value [minVal, maxVal]
             int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
-            int32_t val1 = 0;
+            std::vector<int32_t> values;
 
             // Rounding down
             val0 = val0 - (val0 % step);
             pCam->setIntParameter(cmd, val0,
-                               [&result, &val1](auto status, auto effectiveValue) {
+                               [&result, &values](auto status, auto effectiveValues) {
                                    result = status;
-                                   val1 = effectiveValue;
+                                   if (status == EvsResult::OK) {
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
+                                   }
                                });
 
             ASSERT_EQ(EvsResult::OK, result);
 
+            values.clear();
             pCam->getIntParameter(cmd,
-                               [&result, &val1](auto status, auto value) {
+                               [&result, &values](auto status, auto readValues) {
                                    result = status;
                                    if (status == EvsResult::OK) {
-                                      val1 = value;
+                                      for (auto &&v : readValues) {
+                                          values.push_back(v);
+                                      }
                                    }
                                });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
         }
 
         result = pCam->unsetMaster();
@@ -704,16 +903,33 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (isLogicalCam) {
+            // TODO(b/145465724): Support camera parameter programming on
+            // logical devices.
+            ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
+
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamMaster);
+
         sp<IEvsCamera_1_1> pCamNonMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamNonMaster);
+
         // Set up per-client frame receiver objects which will fire up its own thread
         sp<FrameHandler> frameHandlerMaster =
             new FrameHandler(pCamMaster, cam,
@@ -750,13 +966,45 @@
 
         // Non-master client expects to receive a master role relesed
         // notification.
-        EvsEvent aNotification = {};
+        EvsEventDesc aTargetEvent  = {};
+        EvsEventDesc aNotification = {};
+
+        bool listening = false;
+        std::mutex eventLock;
+        std::condition_variable eventCond;
+        std::thread listener = std::thread(
+            [&aNotification, &frameHandlerNonMaster, &listening, &eventCond]() {
+                // Notify that a listening thread is running.
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+                if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+
+            }
+        );
+
+        // Wait until a listening thread starts.
+        std::unique_lock<std::mutex> lock(eventLock);
+        auto timer = std::chrono::system_clock::now();
+        while (!listening) {
+            timer += 1s;
+            eventCond.wait_until(lock, timer);
+        }
+        lock.unlock();
 
         // Release a master role.
         pCamMaster->unsetMaster();
 
-        // Verify a change notification.
-        frameHandlerNonMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        // Join a listening thread.
+        if (listener.joinable()) {
+            listener.join();
+        }
+
+        // Verify change notifications.
         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
                   static_cast<EvsEventType>(aNotification.aType));
 
@@ -768,23 +1016,49 @@
         result = pCamMaster->setMaster();
         ASSERT_TRUE(result == EvsResult::OWNERSHIP_LOST);
 
+        listening = false;
+        listener = std::thread(
+            [&aNotification, &frameHandlerMaster, &listening, &eventCond]() {
+                // Notify that a listening thread is running.
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+                if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification, true)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+
+            }
+        );
+
+        // Wait until a listening thread starts.
+        timer = std::chrono::system_clock::now();
+        lock.lock();
+        while (!listening) {
+            eventCond.wait_until(lock, timer + 1s);
+        }
+        lock.unlock();
+
         // Closing current master client.
         frameHandlerNonMaster->shutdown();
 
-        // Verify a change notification.
-        frameHandlerMaster->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        // Join a listening thread.
+        if (listener.joinable()) {
+            listener.join();
+        }
+
+        // Verify change notifications.
         ASSERT_EQ(EvsEventType::MASTER_RELEASED,
                   static_cast<EvsEventType>(aNotification.aType));
 
-        // Closing another stream.
+        // Closing streams.
         frameHandlerMaster->shutdown();
 
         // Explicitly release the camera
         pEnumerator->closeCamera(pCamMaster);
         pEnumerator->closeCamera(pCamNonMaster);
     }
-
-
 }
 
 
@@ -810,16 +1084,33 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (isLogicalCam) {
+            // TODO(b/145465724): Support camera parameter programming on
+            // logical devices.
+            ALOGI("Skip a logical device %s", cam.v1.cameraId.c_str());
+            continue;
+        }
+
+        activeCameras.clear();
         // Create two camera clients.
         sp<IEvsCamera_1_1> pCamMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamMaster, nullptr);
+
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamMaster);
+
         sp<IEvsCamera_1_1> pCamNonMaster =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCamNonMaster, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCamNonMaster);
+
         // Get the parameter list
         std::vector<CameraParam> camMasterCmds, camNonMasterCmds;
         pCamMaster->getParameterList([&camMasterCmds](hidl_vec<CameraParam> cmdList) {
@@ -879,7 +1170,9 @@
         frameHandlerNonMaster->waitForFrameCount(1);
 
         int32_t val0 = 0;
-        int32_t val1 = 0;
+        std::vector<int32_t> values;
+        EvsEventDesc aNotification0 = {};
+        EvsEventDesc aNotification1 = {};
         for (auto &cmd : camMasterCmds) {
             // Get a valid parameter value range
             int32_t minVal, maxVal, step;
@@ -895,60 +1188,143 @@
             EvsResult result = EvsResult::OK;
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 1;
+                values.clear();
                 pCamMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
-                                       val1 = effectiveValue;
+                                       if (status == EvsResult::OK) {
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
+                                       }
                                    });
                 ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
+                }
             }
 
-            // Try to program a parameter
+            // Calculate a parameter value to program.
             val0 = minVal + (std::rand() % (maxVal - minVal));
-
-            // Rounding down
             val0 = val0 - (val0 % step);
+
+            // Prepare and start event listeners.
+            bool listening0 = false;
+            bool listening1 = false;
+            std::condition_variable eventCond;
+            std::thread listener0 = std::thread(
+                [cmd, val0,
+                 &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+                    listening0 = true;
+                    if (listening1) {
+                        eventCond.notify_all();
+                    }
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+                    aTargetEvent.payload[1] = val0;
+                    if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+            std::thread listener1 = std::thread(
+                [cmd, val0,
+                 &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+                    listening1 = true;
+                    if (listening0) {
+                        eventCond.notify_all();
+                    }
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+                    aTargetEvent.payload[1] = val0;
+                    if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+
+            // Wait until a listening thread starts.
+            std::mutex eventLock;
+            std::unique_lock<std::mutex> lock(eventLock);
+            auto timer = std::chrono::system_clock::now();
+            while (!listening0 || !listening1) {
+                eventCond.wait_until(lock, timer + 1s);
+            }
+            lock.unlock();
+
+            // Try to program a parameter
+            values.clear();
             pCamMaster->setIntParameter(cmd, val0,
-                                     [&result, &val1](auto status, auto effectiveValue) {
-                                         result = status;
-                                         val1 = effectiveValue;
-                                     });
-            ASSERT_EQ(EvsResult::OK, result);
-
-            // Wait a moment
-            sleep(1);
-
-            // Non-master client expects to receive a parameter change notification
-            // whenever a master client adjusts it.
-            EvsEvent aNotification = {};
-
-            pCamMaster->getIntParameter(cmd,
-                                     [&result, &val1](auto status, auto value) {
+                                     [&result, &values](auto status, auto effectiveValues) {
                                          result = status;
                                          if (status == EvsResult::OK) {
-                                            val1 = value;
+                                            for (auto &&v : effectiveValues) {
+                                                values.push_back(v);
+                                            }
+                                         }
+                                     });
+
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
+
+            // Join a listening thread.
+            if (listener0.joinable()) {
+                listener0.join();
+            }
+            if (listener1.joinable()) {
+                listener1.join();
+            }
+
+            // Verify a change notification
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification0.aType));
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification1.aType));
+            ASSERT_EQ(cmd,
+                      static_cast<CameraParam>(aNotification0.payload[0]));
+            ASSERT_EQ(cmd,
+                      static_cast<CameraParam>(aNotification1.payload[0]));
+            for (auto &&v : values) {
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification0.payload[1]));
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification1.payload[1]));
+            }
+
+            // Clients expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            values.clear();
+            pCamMaster->getIntParameter(cmd,
+                                     [&result, &values](auto status, auto readValues) {
+                                         result = status;
+                                         if (status == EvsResult::OK) {
+                                            for (auto &&v : readValues) {
+                                                values.push_back(v);
+                                            }
                                          }
                                      });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
-
-            // Verify a change notification
-            frameHandlerNonMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
-            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
-                      static_cast<EvsEventType>(aNotification.aType));
-            ASSERT_EQ(cmd,
-                      static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
         }
 
         // Try to adjust a parameter via non-master client
+        values.clear();
         pCamNonMaster->setIntParameter(camNonMasterCmds[0], val0,
-                                    [&result, &val1](auto status, auto effectiveValue) {
+                                    [&result, &values](auto status, auto effectiveValues) {
                                         result = status;
-                                        val1 = effectiveValue;
+                                        if (status == EvsResult::OK) {
+                                            for (auto &&v : effectiveValues) {
+                                                values.push_back(v);
+                                            }
+                                        }
                                     });
         ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
@@ -957,14 +1333,48 @@
         ASSERT_EQ(EvsResult::OWNERSHIP_LOST, result);
 
         // Master client retires from a master role
+        bool listening = false;
+        std::condition_variable eventCond;
+        std::thread listener = std::thread(
+            [&aNotification0, &frameHandlerNonMaster, &listening, &eventCond]() {
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+                if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification0, true)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+            }
+        );
+
+        std::mutex eventLock;
+        auto timer = std::chrono::system_clock::now();
+        unique_lock<std::mutex> lock(eventLock);
+        while (!listening) {
+            eventCond.wait_until(lock, timer + 1s);
+        }
+        lock.unlock();
+
         result = pCamMaster->unsetMaster();
         ASSERT_EQ(EvsResult::OK, result);
 
+        if (listener.joinable()) {
+            listener.join();
+        }
+        ASSERT_EQ(EvsEventType::MASTER_RELEASED,
+                  static_cast<EvsEventType>(aNotification0.aType));
+
         // Try to adjust a parameter after being retired
+        values.clear();
         pCamMaster->setIntParameter(camMasterCmds[0], val0,
-                                 [&result, &val1](auto status, auto effectiveValue) {
+                                 [&result, &values](auto status, auto effectiveValues) {
                                      result = status;
-                                     val1 = effectiveValue;
+                                     if (status == EvsResult::OK) {
+                                        for (auto &&v : effectiveValues) {
+                                            values.push_back(v);
+                                        }
+                                     }
                                  });
         ASSERT_EQ(EvsResult::INVALID_ARG, result);
 
@@ -986,55 +1396,128 @@
             );
 
             EvsResult result = EvsResult::OK;
+            values.clear();
             if (cmd == CameraParam::ABSOLUTE_FOCUS) {
                 // Try to turn off auto-focus
-                int32_t val1 = 1;
+                values.clear();
                 pCamNonMaster->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
+                                   [&result, &values](auto status, auto effectiveValues) {
                                        result = status;
-                                       val1 = effectiveValue;
+                                       if (status == EvsResult::OK) {
+                                          for (auto &&v : effectiveValues) {
+                                              values.push_back(v);
+                                          }
+                                       }
                                    });
                 ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+                for (auto &&v : values) {
+                    ASSERT_EQ(v, 0);
+                }
             }
 
-            // Try to program a parameter
+            // Calculate a parameter value to program.  This is being rounding down.
             val0 = minVal + (std::rand() % (maxVal - minVal));
-
-            // Rounding down
             val0 = val0 - (val0 % step);
+
+            // Prepare and start event listeners.
+            bool listening0 = false;
+            bool listening1 = false;
+            std::condition_variable eventCond;
+            std::thread listener0 = std::thread(
+                [&cmd, &val0, &aNotification0, &frameHandlerMaster, &listening0, &listening1, &eventCond]() {
+                    listening0 = true;
+                    if (listening1) {
+                        eventCond.notify_all();
+                    }
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+                    aTargetEvent.payload[1] = val0;
+                    if (!frameHandlerMaster->waitForEvent(aTargetEvent, aNotification0)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+            std::thread listener1 = std::thread(
+                [&cmd, &val0, &aNotification1, &frameHandlerNonMaster, &listening0, &listening1, &eventCond]() {
+                    listening1 = true;
+                    if (listening0) {
+                        eventCond.notify_all();
+                    }
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(cmd);
+                    aTargetEvent.payload[1] = val0;
+                    if (!frameHandlerNonMaster->waitForEvent(aTargetEvent, aNotification1)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+
+            // Wait until a listening thread starts.
+            std::mutex eventLock;
+            std::unique_lock<std::mutex> lock(eventLock);
+            auto timer = std::chrono::system_clock::now();
+            while (!listening0 || !listening1) {
+                eventCond.wait_until(lock, timer + 1s);
+            }
+            lock.unlock();
+
+            // Try to program a parameter
+            values.clear();
             pCamNonMaster->setIntParameter(cmd, val0,
-                                        [&result, &val1](auto status, auto effectiveValue) {
-                                            result = status;
-                                            val1 = effectiveValue;
-                                        });
-            ASSERT_EQ(EvsResult::OK, result);
-
-            // Wait a moment
-            sleep(1);
-
-            // Non-master client expects to receive a parameter change notification
-            // whenever a master client adjusts it.
-            EvsEvent aNotification = {};
-
-            pCamNonMaster->getIntParameter(cmd,
-                                        [&result, &val1](auto status, auto value) {
+                                        [&result, &values](auto status, auto effectiveValues) {
                                             result = status;
                                             if (status == EvsResult::OK) {
-                                               val1 = value;
+                                                for (auto &&v : effectiveValues) {
+                                                    values.push_back(v);
+                                                }
                                             }
                                         });
             ASSERT_EQ(EvsResult::OK, result);
-            ASSERT_EQ(val0, val1) << "Values are not matched.";
+
+            // Clients expects to receive a parameter change notification
+            // whenever a master client adjusts it.
+            values.clear();
+            pCamNonMaster->getIntParameter(cmd,
+                                        [&result, &values](auto status, auto readValues) {
+                                            result = status;
+                                            if (status == EvsResult::OK) {
+                                                for (auto &&v : readValues) {
+                                                    values.push_back(v);
+                                                }
+                                            }
+                                        });
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(val0, v) << "Values are not matched.";
+            }
+
+            // Join a listening thread.
+            if (listener0.joinable()) {
+                listener0.join();
+            }
+            if (listener1.joinable()) {
+                listener1.join();
+            }
 
             // Verify a change notification
-            frameHandlerMaster->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
             ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
-                      static_cast<EvsEventType>(aNotification.aType));
+                      static_cast<EvsEventType>(aNotification0.aType));
+            ASSERT_EQ(EvsEventType::PARAMETER_CHANGED,
+                      static_cast<EvsEventType>(aNotification1.aType));
             ASSERT_EQ(cmd,
-                      static_cast<CameraParam>(aNotification.payload[0]));
-            ASSERT_EQ(val1,
-                      static_cast<int32_t>(aNotification.payload[1]));
+                      static_cast<CameraParam>(aNotification0.payload[0]));
+            ASSERT_EQ(cmd,
+                      static_cast<CameraParam>(aNotification1.payload[0]));
+            for (auto &&v : values) {
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification0.payload[1]));
+                ASSERT_EQ(v,
+                          static_cast<int32_t>(aNotification1.payload[1]));
+            }
         }
 
         // New master retires from a master role
@@ -1078,17 +1561,25 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
+
         // Create two clients
         sp<IEvsCamera_1_1> pCam0 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         sp<IEvsCamera_1_1> pCam1 =
             IEvsCamera_1_1::castFrom(pEnumerator->openCamera_1_1(cam.v1.cameraId, nullCfg))
             .withDefault(nullptr);
         ASSERT_NE(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam1);
+
         // Get the parameter list; this test will use the first command in both
         // lists.
         std::vector<CameraParam> cam0Cmds, cam1Cmds;
@@ -1144,108 +1635,260 @@
             }
         );
 
-        if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
-            // Try to turn off auto-focus
-            int32_t val1 = 0;
-            pCam1->getIntParameter(CameraParam::AUTO_FOCUS,
-                               [&result, &val1](auto status, auto value) {
-                                   result = status;
-                                   if (status == EvsResult::OK) {
-                                      val1 = value;
-                                   }
-                               });
-            if (val1 != 0) {
-                pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
-                                       result = status;
-                                       val1 = effectiveValue;
-                                   });
-                ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
-            }
-        }
-
-        // Try to program a parameter with a random value [minVal, maxVal]
-        int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
-        int32_t val1 = 0;
-
-        // Rounding down
-        val0 = val0 - (val0 % step);
-
+        // Client1 becomes a master
         result = pCam1->setMaster();
         ASSERT_EQ(EvsResult::OK, result);
 
+        std::vector<int32_t> values;
+        EvsEventDesc aTargetEvent  = {};
+        EvsEventDesc aNotification = {};
+        bool listening = false;
+        std::mutex eventLock;
+        std::condition_variable eventCond;
+        if (cam1Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            std::thread listener = std::thread(
+                [&frameHandler0, &aNotification, &listening, &eventCond] {
+                    listening = true;
+                    eventCond.notify_all();
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+                    aTargetEvent.payload[1] = 0;
+                    if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+
+            // Wait until a lister starts.
+            std::unique_lock<std::mutex> lock(eventLock);
+            auto timer = std::chrono::system_clock::now();
+            while (!listening) {
+                eventCond.wait_until(lock, timer + 1s);
+            }
+            lock.unlock();
+
+            // Try to turn off auto-focus
+            pCam1->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                               [&result, &values](auto status, auto effectiveValues) {
+                                   result = status;
+                                   if (status == EvsResult::OK) {
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
+                                   }
+                               });
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(v, 0);
+            }
+
+            // Join a listener
+            if (listener.joinable()) {
+                listener.join();
+            }
+
+            // Make sure AUTO_FOCUS is off.
+            ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                      EvsEventType::PARAMETER_CHANGED);
+        }
+
+        // Try to program a parameter with a random value [minVal, maxVal] after
+        // rounding it down.
+        int32_t val0 = minVal + (std::rand() % (maxVal - minVal));
+        val0 = val0 - (val0 % step);
+
+        std::thread listener = std::thread(
+            [&frameHandler1, &aNotification, &listening, &eventCond, &cam1Cmds, val0] {
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                aTargetEvent.payload[0] = static_cast<uint32_t>(cam1Cmds[0]);
+                aTargetEvent.payload[1] = val0;
+                if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+            }
+        );
+
+        // Wait until a lister starts.
+        listening = false;
+        std::unique_lock<std::mutex> lock(eventLock);
+        auto timer = std::chrono::system_clock::now();
+        while (!listening) {
+            eventCond.wait_until(lock, timer + 1s);
+        }
+        lock.unlock();
+
+        values.clear();
         pCam1->setIntParameter(cam1Cmds[0], val0,
-                            [&result, &val1](auto status, auto effectiveValue) {
+                            [&result, &values](auto status, auto effectiveValues) {
                                 result = status;
-                                val1 = effectiveValue;
+                                if (status == EvsResult::OK) {
+                                    for (auto &&v : effectiveValues) {
+                                        values.push_back(v);
+                                    }
+                                }
                             });
         ASSERT_EQ(EvsResult::OK, result);
+        for (auto &&v : values) {
+            ASSERT_EQ(val0, v);
+        }
+
+        // Join a listener
+        if (listener.joinable()) {
+            listener.join();
+        }
 
         // Verify a change notification
-        EvsEvent aNotification = {};
-        bool timeout =
-            frameHandler0->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
-        ASSERT_FALSE(timeout) << "Expected event does not arrive";
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::PARAMETER_CHANGED);
         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
                   cam1Cmds[0]);
-        ASSERT_EQ(val1,
-                  static_cast<int32_t>(aNotification.payload[1]));
+        for (auto &&v : values) {
+            ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+        }
+
+        listener = std::thread(
+            [&frameHandler1, &aNotification, &listening, &eventCond] {
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::MASTER_RELEASED;
+                if (!frameHandler1->waitForEvent(aTargetEvent, aNotification, true)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+            }
+        );
+
+        // Wait until a lister starts.
+        listening = false;
+        lock.lock();
+        timer = std::chrono::system_clock::now();
+        while (!listening) {
+            eventCond.wait_until(lock, timer + 1s);
+        }
+        lock.unlock();
 
         // Client 0 steals a master role
         ASSERT_EQ(EvsResult::OK, pCam0->forceMaster(pDisplay));
 
-        frameHandler1->waitForEvent(EvsEventType::MASTER_RELEASED, aNotification);
+        // Join a listener
+        if (listener.joinable()) {
+            listener.join();
+        }
+
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::MASTER_RELEASED);
 
         // Client 0 programs a parameter
         val0 = minVal + (std::rand() % (maxVal - minVal));
-        val1 = 0;
 
         // Rounding down
         val0 = val0 - (val0 % step);
 
         if (cam0Cmds[0] == CameraParam::ABSOLUTE_FOCUS) {
+            std::thread listener = std::thread(
+                [&frameHandler1, &aNotification, &listening, &eventCond] {
+                    listening = true;
+                    eventCond.notify_all();
+
+                    EvsEventDesc aTargetEvent;
+                    aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                    aTargetEvent.payload[0] = static_cast<uint32_t>(CameraParam::AUTO_FOCUS);
+                    aTargetEvent.payload[1] = 0;
+                    if (!frameHandler1->waitForEvent(aTargetEvent, aNotification)) {
+                        ALOGW("A timer is expired before a target event is fired.");
+                    }
+                }
+            );
+
+            // Wait until a lister starts.
+            std::unique_lock<std::mutex> lock(eventLock);
+            auto timer = std::chrono::system_clock::now();
+            while (!listening) {
+                eventCond.wait_until(lock, timer + 1s);
+            }
+            lock.unlock();
+
             // Try to turn off auto-focus
-            int32_t val1 = 0;
-            pCam0->getIntParameter(CameraParam::AUTO_FOCUS,
-                               [&result, &val1](auto status, auto value) {
+            values.clear();
+            pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
+                               [&result, &values](auto status, auto effectiveValues) {
                                    result = status;
                                    if (status == EvsResult::OK) {
-                                      val1 = value;
+                                      for (auto &&v : effectiveValues) {
+                                          values.push_back(v);
+                                      }
                                    }
                                });
-            if (val1 != 0) {
-                pCam0->setIntParameter(CameraParam::AUTO_FOCUS, 0,
-                                   [&result, &val1](auto status, auto effectiveValue) {
-                                       result = status;
-                                       val1 = effectiveValue;
-                                   });
-                ASSERT_EQ(EvsResult::OK, result);
-                ASSERT_EQ(val1, 0);
+            ASSERT_EQ(EvsResult::OK, result);
+            for (auto &&v : values) {
+                ASSERT_EQ(v, 0);
             }
+
+            // Join a listener
+            if (listener.joinable()) {
+                listener.join();
+            }
+
+            // Make sure AUTO_FOCUS is off.
+            ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
+                      EvsEventType::PARAMETER_CHANGED);
         }
 
+        listener = std::thread(
+            [&frameHandler0, &aNotification, &listening, &eventCond, &cam0Cmds, val0] {
+                listening = true;
+                eventCond.notify_all();
+
+                EvsEventDesc aTargetEvent;
+                aTargetEvent.aType = EvsEventType::PARAMETER_CHANGED;
+                aTargetEvent.payload[0] = static_cast<uint32_t>(cam0Cmds[0]);
+                aTargetEvent.payload[1] = val0;
+                if (!frameHandler0->waitForEvent(aTargetEvent, aNotification)) {
+                    ALOGW("A timer is expired before a target event is fired.");
+                }
+            }
+        );
+
+        // Wait until a lister starts.
+        listening = false;
+        timer = std::chrono::system_clock::now();
+        lock.lock();
+        while (!listening) {
+            eventCond.wait_until(lock, timer + 1s);
+        }
+        lock.unlock();
+
+        values.clear();
         pCam0->setIntParameter(cam0Cmds[0], val0,
-                            [&result, &val1](auto status, auto effectiveValue) {
+                            [&result, &values](auto status, auto effectiveValues) {
                                 result = status;
-                                val1 = effectiveValue;
+                                if (status == EvsResult::OK) {
+                                    for (auto &&v : effectiveValues) {
+                                        values.push_back(v);
+                                    }
+                                }
                             });
         ASSERT_EQ(EvsResult::OK, result);
 
+        // Join a listener
+        if (listener.joinable()) {
+            listener.join();
+        }
         // Verify a change notification
-        timeout =
-            frameHandler1->waitForEvent(EvsEventType::PARAMETER_CHANGED, aNotification);
-        ASSERT_FALSE(timeout) << "Expected event does not arrive";
         ASSERT_EQ(static_cast<EvsEventType>(aNotification.aType),
                   EvsEventType::PARAMETER_CHANGED);
         ASSERT_EQ(static_cast<CameraParam>(aNotification.payload[0]),
                   cam0Cmds[0]);
-        ASSERT_EQ(val1,
-                  static_cast<int32_t>(aNotification.payload[1]));
+        for (auto &&v : values) {
+            ASSERT_EQ(v, static_cast<int32_t>(aNotification.payload[1]));
+        }
 
         // Turn off the display (yes, before the stream stops -- it should be handled)
         pDisplay->setDisplayState(DisplayState::NOT_VISIBLE);
@@ -1282,6 +1925,7 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // choose a configuration that has a frame rate faster than minReqFps.
         Stream targetCfg = {};
         const int32_t minReqFps = 15;
@@ -1324,6 +1968,9 @@
             .withDefault(nullptr);
         ASSERT_NE(pCam, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam);
+
         // Set up a frame receiver object which will fire up its own thread.
         sp<FrameHandler> frameHandler = new FrameHandler(pCam, cam,
                                                          pDisplay,
@@ -1383,6 +2030,7 @@
 
     // Test each reported camera
     for (auto&& cam: cameraInfo) {
+        activeCameras.clear();
         // choose a configuration that has a frame rate faster than minReqFps.
         Stream targetCfg = {};
         const int32_t minReqFps = 15;
@@ -1427,6 +2075,9 @@
             .withDefault(nullptr);
         ASSERT_NE(pCam0, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         // Try to create the second camera client with different stream
         // configuration.
         int32_t id = targetCfg.id;
@@ -1436,6 +2087,9 @@
             .withDefault(nullptr);
         ASSERT_EQ(pCam1, nullptr);
 
+        // Store a camera handle for a clean-up
+        activeCameras.push_back(pCam0);
+
         // Try again with same stream configuration.
         targetCfg.id = id;
         pCam1 =
@@ -1506,6 +2160,30 @@
 }
 
 
+/*
+ * LogicalCameraMetadata:
+ * Opens logical camera reported by the enumerator and validate its metadata by
+ * checking its capability and locating supporting physical camera device
+ * identifiers.
+ */
+TEST_F(EvsHidlTest, LogicalCameraMetadata) {
+    ALOGI("Starting LogicalCameraMetadata test");
+
+    // Get the camera list
+    loadCameraList();
+
+    // Open and close each camera twice
+    for (auto&& cam: cameraInfo) {
+        bool isLogicalCam = false;
+        auto devices = getPhysicalCameraIds(cam.v1.cameraId, isLogicalCam);
+        if (isLogicalCam) {
+            ASSERT_GE(devices.size(), 1) <<
+                "Logical camera device must have at least one physical camera device ID in its metadata.";
+        }
+    }
+}
+
+
 int main(int argc, char** argv) {
     ::testing::AddGlobalTestEnvironment(EvsHidlEnvironment::Instance());
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp b/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
index 194c438..66132ad 100644
--- a/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
+++ b/keymaster/4.0/vts/functional/keymaster_hidl_hal_test.cpp
@@ -2607,8 +2607,10 @@
                                    .Padding(PaddingMode::NONE));
     ASSERT_EQ(ErrorCode::OK, err) << "Got " << err;
 
-    err = Begin(KeyPurpose::DECRYPT,
-                AuthorizationSetBuilder().BlockMode(BlockMode::GCM).Padding(PaddingMode::NONE));
+    err = Begin(KeyPurpose::DECRYPT, AuthorizationSetBuilder()
+                                             .BlockMode(BlockMode::GCM)
+                                             .Padding(PaddingMode::NONE)
+                                             .Authorization(TAG_MAC_LENGTH, 128));
     EXPECT_EQ(ErrorCode::INCOMPATIBLE_PURPOSE, err) << "Got " << err;
 
     CheckedDeleteKey();
@@ -2621,8 +2623,10 @@
                                                  .Authorization(TAG_MIN_MAC_LENGTH, 128)
                                                  .Padding(PaddingMode::NONE)));
 
-    err = Begin(KeyPurpose::ENCRYPT,
-                AuthorizationSetBuilder().BlockMode(BlockMode::GCM).Padding(PaddingMode::NONE));
+    err = Begin(KeyPurpose::ENCRYPT, AuthorizationSetBuilder()
+                                             .BlockMode(BlockMode::GCM)
+                                             .Padding(PaddingMode::NONE)
+                                             .Authorization(TAG_MAC_LENGTH, 128));
     EXPECT_EQ(ErrorCode::INCOMPATIBLE_PURPOSE, err) << "Got " << err;
 }
 
diff --git a/neuralnetworks/1.3/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.3/vts/functional/GeneratedTestHarness.cpp
index be894f2..e3c5376 100644
--- a/neuralnetworks/1.3/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/1.3/vts/functional/GeneratedTestHarness.cpp
@@ -452,7 +452,7 @@
             EvaluatePreparedModel(preparedModel, testModel, TestKind::DYNAMIC_SHAPE);
         } break;
         case TestKind::QUANTIZATION_COUPLING: {
-            ASSERT_TRUE(testModel.hasQuant8AsymmOperands());
+            ASSERT_TRUE(testModel.hasQuant8CoupledOperands());
             createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ false);
             TestModel signedQuantizedModel = convertQuant8AsymmOperandsToSigned(testModel);
             sp<IPreparedModel> preparedCoupledModel;
@@ -521,7 +521,7 @@
                            [](const TestModel& testModel) { return !testModel.expectFailure; });
 
 INSTANTIATE_GENERATED_TEST(DISABLED_QuantizationCouplingTest, [](const TestModel& testModel) {
-    return testModel.hasQuant8AsymmOperands() && testModel.operations.size() == 1;
+    return testModel.hasQuant8CoupledOperands() && testModel.operations.size() == 1;
 });
 
 }  // namespace android::hardware::neuralnetworks::V1_3::vts::functional
diff --git a/neuralnetworks/1.3/vts/functional/ValidateModel.cpp b/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
index 65880b7..14ab897 100644
--- a/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
+++ b/neuralnetworks/1.3/vts/functional/ValidateModel.cpp
@@ -330,6 +330,8 @@
         // - DEPTHWISE_CONV_2D filter type (arg 1) can be QUANT8_ASYMM or QUANT8_SYMM_PER_CHANNEL
         // - GROUPED_CONV_2D filter type (arg 1) can be QUANT8_ASYMM or QUANT8_SYMM_PER_CHANNEL
         // - TRANSPOSE_CONV_2D filter type (arg 1) can be QUANT8_ASYMM or QUANT8_SYMM_PER_CHANNEL
+        // - AXIS_ALIGNED_BBOX_TRANSFORM bounding boxes (arg 1) can be of
+        //     TENSOR_QUANT8_ASYMM or TENSOR_QUANT8_ASYMM_SIGNED.
         switch (operation.type) {
             case OperationType::LSH_PROJECTION: {
                 if (operand == operation.inputs[1]) {
@@ -385,6 +387,13 @@
                     return true;
                 }
             } break;
+            case OperationType::AXIS_ALIGNED_BBOX_TRANSFORM: {
+                if (operand == operation.inputs[1] &&
+                    (type == OperandType::TENSOR_QUANT8_ASYMM ||
+                     type == OperandType::TENSOR_QUANT8_ASYMM_SIGNED)) {
+                    return true;
+                }
+            } break;
             default:
                 break;
         }
diff --git a/rebootescrow/aidl/default/HadamardUtils.cpp b/rebootescrow/aidl/default/HadamardUtils.cpp
index 8ee77e1..d2422b9 100644
--- a/rebootescrow/aidl/default/HadamardUtils.cpp
+++ b/rebootescrow/aidl/default/HadamardUtils.cpp
@@ -26,14 +26,18 @@
 namespace rebootescrow {
 namespace hadamard {
 
-static inline void or_bit(std::vector<uint8_t>* input, size_t bit, uint8_t val) {
-    (*input)[bit >> 3] |= (val & 1u) << (bit & 7);
-}
-
 static inline uint8_t read_bit(const std::vector<uint8_t>& input, size_t bit) {
     return (input[bit >> 3] >> (bit & 7)) & 1u;
 }
 
+// Use a simple LCG which is easy to run in reverse.
+// https://www.johndcook.com/blog/2017/07/05/simple-random-number-generator/
+constexpr uint64_t RNG_MODULUS = 0x7fffffff;
+constexpr uint64_t RNG_MUL = 742938285;
+constexpr uint64_t RNG_SEED = 20170705;
+constexpr uint64_t RNG_INV_MUL = 1413043504;   // (mul * inv_mul) % modulus == 1
+constexpr uint64_t RNG_INV_SEED = 1173538311;  // (seed * mul**65534) % modulus
+
 // Apply an error correcting encoding.
 //
 // The error correcting code used is an augmented Hadamard code with
@@ -45,21 +49,45 @@
 // codewords. Thus if a single 512-byte DRAM line is lost, instead of losing
 // 2^11 bits from the encoding of a single code word, we lose 2^7 bits
 // from the encoding of each of the 16 codewords.
+// In addition we apply a Fisher-Yates shuffle to the bytes of the encoding;
+// Hadamard encoding recovers much better from random errors than systematic
+// ones, and this ensures that errors will be random.
 std::vector<uint8_t> EncodeKey(const std::vector<uint8_t>& input) {
     CHECK_EQ(input.size(), KEY_SIZE_IN_BYTES);
     std::vector<uint8_t> result(OUTPUT_SIZE_BYTES, 0);
     static_assert(OUTPUT_SIZE_BYTES == 64 * 1024);
-    for (size_t i = 0; i < KEY_CODEWORDS; i++) {
-        uint16_t word = input[i * 2 + 1] << 8 | input[i * 2];
-        for (size_t j = 0; j < ENCODE_LENGTH; j++) {
-            uint16_t wi = word & (j + ENCODE_LENGTH);
-            // Sum all the bits in the word and check its parity.
-            wi ^= wi >> 8u;
-            wi ^= wi >> 4u;
-            wi ^= wi >> 2u;
-            wi ^= wi >> 1u;
-            or_bit(&result, (j * KEY_CODEWORDS) + i, wi & 1);
+    // Transpose the key so that each row contains one bit from each codeword
+    uint16_t wordmatrix[CODEWORD_BITS];
+    for (size_t i = 0; i < CODEWORD_BITS; i++) {
+        uint16_t word = 0;
+        for (size_t j = 0; j < KEY_CODEWORDS; j++) {
+            word |= read_bit(input, i + j * CODEWORD_BITS) << j;
         }
+        wordmatrix[i] = word;
+    }
+    // Fill in the encodings in Gray code order for speed.
+    uint16_t val = wordmatrix[CODEWORD_BITS - 1];
+    size_t ix = 0;
+    for (size_t i = 0; i < ENCODE_LENGTH; i++) {
+        for (size_t b = 0; b < CODEWORD_BITS; b++) {
+            if (i & (1 << b)) {
+                ix ^= (1 << b);
+                val ^= wordmatrix[b];
+                break;
+            }
+        }
+        result[ix * KEY_CODEWORD_BYTES] = val & 0xffu;
+        result[ix * KEY_CODEWORD_BYTES + 1] = val >> 8u;
+    }
+    // Apply the inverse shuffle here; we apply the forward shuffle in decoding.
+    uint64_t rng_state = RNG_INV_SEED;
+    for (size_t i = OUTPUT_SIZE_BYTES - 1; i > 0; i--) {
+        auto j = rng_state % (i + 1);
+        auto t = result[i];
+        result[i] = result[j];
+        result[j] = t;
+        rng_state *= RNG_INV_MUL;
+        rng_state %= RNG_MODULUS;
     }
     return result;
 }
@@ -106,8 +134,19 @@
     return winner;
 }
 
-std::vector<uint8_t> DecodeKey(const std::vector<uint8_t>& encoded) {
-    CHECK_EQ(OUTPUT_SIZE_BYTES, encoded.size());
+std::vector<uint8_t> DecodeKey(const std::vector<uint8_t>& shuffled) {
+    CHECK_EQ(OUTPUT_SIZE_BYTES, shuffled.size());
+    // Apply the forward Fisher-Yates shuffle.
+    std::vector<uint8_t> encoded(OUTPUT_SIZE_BYTES, 0);
+    encoded[0] = shuffled[0];
+    uint64_t rng_state = RNG_SEED;
+    for (size_t i = 1; i < OUTPUT_SIZE_BYTES; i++) {
+        auto j = rng_state % (i + 1);
+        encoded[i] = encoded[j];
+        encoded[j] = shuffled[i];
+        rng_state *= RNG_MUL;
+        rng_state %= RNG_MODULUS;
+    }
     std::vector<uint8_t> result(KEY_SIZE_IN_BYTES, 0);
     for (size_t i = 0; i < KEY_CODEWORDS; i++) {
         uint16_t val = DecodeWord(i, encoded);
diff --git a/rebootescrow/aidl/default/HadamardUtils.h b/rebootescrow/aidl/default/HadamardUtils.h
index 85e635f..e04f7d5 100644
--- a/rebootescrow/aidl/default/HadamardUtils.h
+++ b/rebootescrow/aidl/default/HadamardUtils.h
@@ -31,9 +31,10 @@
 constexpr auto CODEWORD_BITS = CODEWORD_BYTES * BYTE_LENGTH;
 constexpr uint32_t CODE_K = CODEWORD_BITS - 1;
 constexpr uint32_t ENCODE_LENGTH = 1u << CODE_K;
-constexpr auto KEY_CODEWORDS = 16u;
+constexpr auto KEY_CODEWORD_BYTES = 2u;  // uint16_t (after transpose)
+constexpr auto KEY_CODEWORDS = KEY_CODEWORD_BYTES * BYTE_LENGTH;
 constexpr auto KEY_SIZE_IN_BYTES = KEY_CODEWORDS * CODEWORD_BYTES;
-constexpr auto OUTPUT_SIZE_BYTES = KEY_CODEWORDS * ENCODE_LENGTH / BYTE_LENGTH;
+constexpr auto OUTPUT_SIZE_BYTES = ENCODE_LENGTH * KEY_CODEWORD_BYTES;
 
 // Encodes a key that has a size of KEY_SIZE_IN_BYTES. Returns a byte array representation of the
 // encoded bitset. So a 32 bytes key will expand to 16*(2^15) bits = 64KiB.