Merge "Change NNAPI VTS to use TEST_P to iterate across all service instances"
diff --git a/neuralnetworks/1.0/vts/functional/Android.bp b/neuralnetworks/1.0/vts/functional/Android.bp
index 0af7f79..3e9d5f7 100644
--- a/neuralnetworks/1.0/vts/functional/Android.bp
+++ b/neuralnetworks/1.0/vts/functional/Android.bp
@@ -40,10 +40,11 @@
     ],
 }
 
-cc_defaults {
-    name: "VtsHalNeuralNetworksV1_0TargetTestDefaults",
+cc_test {
+    name: "VtsHalNeuralnetworksV1_0TargetTest",
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
+        "BasicTests.cpp",
         "TestAssertions.cpp",
         "ValidateModel.cpp",
         "ValidateRequest.cpp",
@@ -64,33 +65,11 @@
         "libneuralnetworks_utils",
         "VtsHalNeuralNetworksV1_0_utils",
     ],
+    whole_static_libs: [
+        "neuralnetworks_generated_V1_0_example",
+    ],
     header_libs: [
         "libneuralnetworks_headers",
     ],
     test_suites: ["general-tests"],
 }
-
-cc_test {
-    name: "VtsHalNeuralnetworksV1_0TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-    ],
-}
-
-cc_test {
-    name: "PresubmitHalNeuralnetworksV1_0TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_0TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-    ],
-    cflags: [
-        "-DPRESUBMIT_NOT_VTS",
-    ],
-}
diff --git a/neuralnetworks/1.0/vts/functional/BasicTests.cpp b/neuralnetworks/1.0/vts/functional/BasicTests.cpp
index 551ea67..cc44c9e 100644
--- a/neuralnetworks/1.0/vts/functional/BasicTests.cpp
+++ b/neuralnetworks/1.0/vts/functional/BasicTests.cpp
@@ -21,17 +21,17 @@
 namespace android::hardware::neuralnetworks::V1_0::vts::functional {
 
 // create device test
-TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
+TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
 
 // status test
-TEST_F(NeuralnetworksHidlTest, StatusTest) {
+TEST_P(NeuralnetworksHidlTest, StatusTest) {
     Return<DeviceStatus> status = kDevice->getStatus();
     ASSERT_TRUE(status.isOk());
     EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
 }
 
 // initialization
-TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
+TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
     Return<void> ret =
             kDevice->getCapabilities([](ErrorStatus status, const Capabilities& capabilities) {
                 EXPECT_EQ(ErrorStatus::NONE, status);
diff --git a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp
index 1948c05..595ad85 100644
--- a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.cpp
@@ -148,6 +148,20 @@
     checkResults(testModel, outputs);
 }
 
+void GeneratedTestBase::SetUp() {
+    testing::TestWithParam<GeneratedTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
+}
+
+std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
+    return TestModelManager::get().getTestModels(filter);
+}
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
+    const auto& [namedDevice, namedModel] = info.param;
+    return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
+}
+
 // Tag for the generated tests
 class GeneratedTest : public GeneratedTestBase {};
 
diff --git a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h
index 10e46b7..f230a02 100644
--- a/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h
+++ b/neuralnetworks/1.0/vts/functional/GeneratedTestHarness.h
@@ -18,29 +18,38 @@
 #define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_GENERATED_TEST_HARNESS_H
 
 #include <android/hardware/neuralnetworks/1.0/IDevice.h>
+#include <functional>
 #include "TestHarness.h"
 #include "VtsHalNeuralnetworks.h"
 
 namespace android::hardware::neuralnetworks::V1_0::vts::functional {
 
-class GeneratedTestBase
-    : public NeuralnetworksHidlTest,
-      public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
+using NamedModel = Named<const test_helper::TestModel*>;
+using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
+
+class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
   protected:
-    const test_helper::TestModel& kTestModel = *GetParam().second;
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
+    const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
 };
 
-#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                        \
-    INSTANTIATE_TEST_SUITE_P(                                                                \
-            TestGenerated, TestSuite,                                                        \
-            testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
-            [](const auto& info) { return info.param.first; })
+using FilterFn = std::function<bool(const test_helper::TestModel&)>;
+std::vector<NamedModel> getNamedModels(const FilterFn& filter);
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
+
+#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                     \
+    INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite,                                    \
+                             testing::Combine(testing::ValuesIn(getNamedDevices()),       \
+                                              testing::ValuesIn(getNamedModels(filter))), \
+                             printGeneratedTest)
 
 // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
 // TODO: Clean up the hierarchy for ValidationTest.
 class ValidationTest : public GeneratedTestBase {};
 
-Model createModel(const ::test_helper::TestModel& testModel);
+Model createModel(const test_helper::TestModel& testModel);
 
 }  // namespace android::hardware::neuralnetworks::V1_0::vts::functional
 
diff --git a/neuralnetworks/1.0/vts/functional/Utils.cpp b/neuralnetworks/1.0/vts/functional/Utils.cpp
index 307003c..5b630fd 100644
--- a/neuralnetworks/1.0/vts/functional/Utils.cpp
+++ b/neuralnetworks/1.0/vts/functional/Utils.cpp
@@ -117,6 +117,13 @@
     return outputBuffers;
 }
 
+std::string gtestCompliantName(std::string name) {
+    // gtest test names must only contain alphanumeric characters
+    std::replace_if(
+            name.begin(), name.end(), [](char c) { return !std::isalnum(c); }, '_');
+    return name;
+}
+
 }  // namespace android::hardware::neuralnetworks
 
 namespace android::hardware::neuralnetworks::V1_0 {
diff --git a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp
index 20b4565..cb22250 100644
--- a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp
+++ b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.cpp
@@ -18,11 +18,13 @@
 
 #include "VtsHalNeuralnetworks.h"
 #include "1.0/Callbacks.h"
-#include "1.0/Utils.h"
 #include "GeneratedTestHarness.h"
 #include "TestHarness.h"
 
 #include <android-base/logging.h>
+#include <hidl/ServiceManagement.h>
+#include <string>
+#include <utility>
 
 namespace android::hardware::neuralnetworks::V1_0::vts::functional {
 
@@ -76,34 +78,39 @@
     ASSERT_NE(nullptr, preparedModel->get());
 }
 
-// A class for test environment setup
-NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
-    // This has to return a "new" object because it is freed inside
-    // testing::AddGlobalTestEnvironment when the gtest is being torn down
-    static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
-    return instance;
-}
-
-void NeuralnetworksHidlEnvironment::registerTestServices() {
-    registerTestService<IDevice>();
-}
-
-// The main test class for NEURALNETWORK HIDL HAL.
 void NeuralnetworksHidlTest::SetUp() {
-    testing::VtsHalHidlTargetTestBase::SetUp();
-
-#ifdef PRESUBMIT_NOT_VTS
-    const std::string name =
-            NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
-    const std::string sampleDriver = "sample-";
-    if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
-        GTEST_SKIP();
-    }
-#endif  // PRESUBMIT_NOT_VTS
-
-    ASSERT_NE(nullptr, kDevice.get());
+    testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
 }
 
+static NamedDevice makeNamedDevice(const std::string& name) {
+    return {name, IDevice::getService(name)};
+}
+
+static std::vector<NamedDevice> getNamedDevicesImpl() {
+    // Retrieves the name of all service instances that implement IDevice,
+    // including any Lazy HAL instances.
+    const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
+
+    // Get a handle to each device and pair it with its name.
+    std::vector<NamedDevice> namedDevices;
+    namedDevices.reserve(names.size());
+    std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
+    return namedDevices;
+}
+
+const std::vector<NamedDevice>& getNamedDevices() {
+    const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
+    return devices;
+}
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
+    return gtestCompliantName(getName(info.param));
+}
+
+INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
+
 // Forward declaration from ValidateModel.cpp
 void validateModel(const sp<IDevice>& device, const Model& model);
 // Forward declaration from ValidateRequest.cpp
@@ -130,14 +137,3 @@
 INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
 
 }  // namespace android::hardware::neuralnetworks::V1_0::vts::functional
-
-using android::hardware::neuralnetworks::V1_0::vts::functional::NeuralnetworksHidlEnvironment;
-
-int main(int argc, char** argv) {
-    testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
-    testing::InitGoogleTest(&argc, argv);
-    NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
-
-    int status = RUN_ALL_TESTS();
-    return status;
-}
diff --git a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h
index 48dc237..17f4613 100644
--- a/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h
+++ b/neuralnetworks/1.0/vts/functional/VtsHalNeuralnetworks.h
@@ -17,40 +17,34 @@
 #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H
 #define ANDROID_HARDWARE_NEURALNETWORKS_V1_0_VTS_HAL_NEURALNETWORKS_H
 
+#include "1.0/Utils.h"
+
 #include <android/hardware/neuralnetworks/1.0/IDevice.h>
 #include <android/hardware/neuralnetworks/1.0/types.h>
-
-#include <VtsHalHidlTargetTestBase.h>
-#include <VtsHalHidlTargetTestEnvBase.h>
-
-#include <android-base/macros.h>
 #include <gtest/gtest.h>
 
+#include <vector>
+
 namespace android::hardware::neuralnetworks::V1_0::vts::functional {
 
-// A class for test environment setup
-class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
-    NeuralnetworksHidlEnvironment() = default;
+using NamedDevice = Named<sp<IDevice>>;
+using NeuralnetworksHidlTestParam = NamedDevice;
 
-  public:
-    static NeuralnetworksHidlEnvironment* getInstance();
-    void registerTestServices() override;
-};
-
-// The main test class for NEURALNETWORKS HIDL HAL.
-class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
-
-  public:
-    NeuralnetworksHidlTest() = default;
-    void SetUp() override;
-
+class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
   protected:
-    const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
-            NeuralnetworksHidlEnvironment::getInstance());
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(GetParam());
 };
 
+const std::vector<NamedDevice>& getNamedDevices();
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
+
+#define INSTANTIATE_DEVICE_TEST(TestSuite)                                                 \
+    INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
+                             printNeuralnetworksHidlTest)
+
 // Create an IPreparedModel object. If the model cannot be prepared,
 // "preparedModel" will be nullptr instead.
 void createPreparedModel(const sp<IDevice>& device, const Model& model,
diff --git a/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h b/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h
index 1ce751c..6d4534c 100644
--- a/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h
+++ b/neuralnetworks/1.0/vts/functional/include/1.0/Utils.h
@@ -21,13 +21,15 @@
 #include <android/hardware/neuralnetworks/1.0/types.h>
 #include <algorithm>
 #include <iosfwd>
+#include <string>
+#include <utility>
 #include <vector>
 #include "TestHarness.h"
 
 namespace android::hardware::neuralnetworks {
 
 // Create HIDL Request from the TestModel struct.
-V1_0::Request createRequest(const ::test_helper::TestModel& testModel);
+V1_0::Request createRequest(const test_helper::TestModel& testModel);
 
 // After execution, copy out output results from the output memory pool.
 std::vector<::test_helper::TestBuffer> getOutputBuffers(const V1_0::Request& request);
@@ -51,6 +53,21 @@
     return index;
 }
 
+template <typename Type>
+using Named = std::pair<std::string, Type>;
+
+template <typename Type>
+const std::string& getName(const Named<Type>& namedData) {
+    return namedData.first;
+}
+
+template <typename Type>
+const Type& getData(const Named<Type>& namedData) {
+    return namedData.second;
+}
+
+std::string gtestCompliantName(std::string name);
+
 }  // namespace android::hardware::neuralnetworks
 
 namespace android::hardware::neuralnetworks::V1_0 {
diff --git a/neuralnetworks/1.1/vts/functional/Android.bp b/neuralnetworks/1.1/vts/functional/Android.bp
index c197e6d..4e85355 100644
--- a/neuralnetworks/1.1/vts/functional/Android.bp
+++ b/neuralnetworks/1.1/vts/functional/Android.bp
@@ -14,10 +14,11 @@
 // limitations under the License.
 //
 
-cc_defaults {
-    name: "VtsHalNeuralNetworksV1_1TargetTestDefaults",
+cc_test {
+    name: "VtsHalNeuralnetworksV1_1TargetTest",
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
+        "BasicTests.cpp",
         "TestAssertions.cpp",
         "ValidateModel.cpp",
         "ValidateRequest.cpp",
@@ -39,35 +40,12 @@
         "libneuralnetworks_utils",
         "VtsHalNeuralNetworksV1_0_utils",
     ],
+    whole_static_libs: [
+        "neuralnetworks_generated_V1_0_example",
+        "neuralnetworks_generated_V1_1_example",
+    ],
     header_libs: [
         "libneuralnetworks_headers",
     ],
     test_suites: ["general-tests"],
 }
-
-cc_test {
-    name: "VtsHalNeuralnetworksV1_1TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-        "neuralnetworks_generated_V1_1_example",
-    ],
-}
-
-cc_test {
-    name: "PresubmitHalNeuralnetworksV1_1TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_1TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-        "neuralnetworks_generated_V1_1_example",
-    ],
-    cflags: [
-        "-DPRESUBMIT_NOT_VTS",
-    ],
-}
diff --git a/neuralnetworks/1.1/vts/functional/BasicTests.cpp b/neuralnetworks/1.1/vts/functional/BasicTests.cpp
index 2791e80..44836f0 100644
--- a/neuralnetworks/1.1/vts/functional/BasicTests.cpp
+++ b/neuralnetworks/1.1/vts/functional/BasicTests.cpp
@@ -24,17 +24,17 @@
 using V1_0::ErrorStatus;
 
 // create device test
-TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
+TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
 
 // status test
-TEST_F(NeuralnetworksHidlTest, StatusTest) {
+TEST_P(NeuralnetworksHidlTest, StatusTest) {
     Return<DeviceStatus> status = kDevice->getStatus();
     ASSERT_TRUE(status.isOk());
     EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
 }
 
 // initialization
-TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
+TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
     Return<void> ret =
             kDevice->getCapabilities_1_1([](ErrorStatus status, const Capabilities& capabilities) {
                 EXPECT_EQ(ErrorStatus::NONE, status);
diff --git a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp
index fddfc2b..7a929d6 100644
--- a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.cpp
@@ -156,6 +156,20 @@
     checkResults(testModel, outputs);
 }
 
+void GeneratedTestBase::SetUp() {
+    testing::TestWithParam<GeneratedTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
+}
+
+std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
+    return TestModelManager::get().getTestModels(filter);
+}
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
+    const auto& [namedDevice, namedModel] = info.param;
+    return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
+}
+
 // Tag for the generated tests
 class GeneratedTest : public GeneratedTestBase {};
 
diff --git a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h
index 273d1ec..cf449ea 100644
--- a/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h
+++ b/neuralnetworks/1.1/vts/functional/GeneratedTestHarness.h
@@ -18,29 +18,38 @@
 #define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_GENERATED_TEST_HARNESS_H
 
 #include <android/hardware/neuralnetworks/1.1/IDevice.h>
+#include "1.0/Utils.h"
 #include "TestHarness.h"
 #include "VtsHalNeuralnetworks.h"
 
 namespace android::hardware::neuralnetworks::V1_1::vts::functional {
 
-class GeneratedTestBase
-    : public NeuralnetworksHidlTest,
-      public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
+using NamedModel = Named<const test_helper::TestModel*>;
+using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
+
+class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
   protected:
-    const test_helper::TestModel& kTestModel = *GetParam().second;
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
+    const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
 };
 
-#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                        \
-    INSTANTIATE_TEST_SUITE_P(                                                                \
-            TestGenerated, TestSuite,                                                        \
-            testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
-            [](const auto& info) { return info.param.first; })
+using FilterFn = std::function<bool(const test_helper::TestModel&)>;
+std::vector<NamedModel> getNamedModels(const FilterFn& filter);
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
+
+#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                     \
+    INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite,                                    \
+                             testing::Combine(testing::ValuesIn(getNamedDevices()),       \
+                                              testing::ValuesIn(getNamedModels(filter))), \
+                             printGeneratedTest)
 
 // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
 // TODO: Clean up the hierarchy for ValidationTest.
 class ValidationTest : public GeneratedTestBase {};
 
-Model createModel(const ::test_helper::TestModel& testModel);
+Model createModel(const test_helper::TestModel& testModel);
 
 }  // namespace android::hardware::neuralnetworks::V1_1::vts::functional
 
diff --git a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp
index d53d43e..d56d40b 100644
--- a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp
+++ b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.cpp
@@ -17,13 +17,15 @@
 #define LOG_TAG "neuralnetworks_hidl_hal_test"
 
 #include "VtsHalNeuralnetworks.h"
+#include <android-base/logging.h>
+#include <hidl/ServiceManagement.h>
+#include <string>
+#include <utility>
 #include "1.0/Callbacks.h"
 #include "1.0/Utils.h"
 #include "GeneratedTestHarness.h"
 #include "TestHarness.h"
 
-#include <android-base/logging.h>
-
 namespace android::hardware::neuralnetworks::V1_1::vts::functional {
 
 using V1_0::ErrorStatus;
@@ -79,34 +81,39 @@
     ASSERT_NE(nullptr, preparedModel->get());
 }
 
-// A class for test environment setup
-NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
-    // This has to return a "new" object because it is freed inside
-    // testing::AddGlobalTestEnvironment when the gtest is being torn down
-    static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
-    return instance;
-}
-
-void NeuralnetworksHidlEnvironment::registerTestServices() {
-    registerTestService<IDevice>();
-}
-
-// The main test class for NEURALNETWORK HIDL HAL.
 void NeuralnetworksHidlTest::SetUp() {
-    testing::VtsHalHidlTargetTestBase::SetUp();
-
-#ifdef PRESUBMIT_NOT_VTS
-    const std::string name =
-            NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
-    const std::string sampleDriver = "sample-";
-    if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
-        GTEST_SKIP();
-    }
-#endif  // PRESUBMIT_NOT_VTS
-
-    ASSERT_NE(nullptr, kDevice.get());
+    testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
 }
 
+static NamedDevice makeNamedDevice(const std::string& name) {
+    return {name, IDevice::getService(name)};
+}
+
+static std::vector<NamedDevice> getNamedDevicesImpl() {
+    // Retrieves the name of all service instances that implement IDevice,
+    // including any Lazy HAL instances.
+    const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
+
+    // Get a handle to each device and pair it with its name.
+    std::vector<NamedDevice> namedDevices;
+    namedDevices.reserve(names.size());
+    std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
+    return namedDevices;
+}
+
+const std::vector<NamedDevice>& getNamedDevices() {
+    const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
+    return devices;
+}
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
+    return gtestCompliantName(getName(info.param));
+}
+
+INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
+
 // Forward declaration from ValidateModel.cpp
 void validateModel(const sp<IDevice>& device, const Model& model);
 // Forward declaration from ValidateRequest.cpp
@@ -133,14 +140,3 @@
 INSTANTIATE_GENERATED_TEST(ValidationTest, [](const test_helper::TestModel&) { return true; });
 
 }  // namespace android::hardware::neuralnetworks::V1_1::vts::functional
-
-using android::hardware::neuralnetworks::V1_1::vts::functional::NeuralnetworksHidlEnvironment;
-
-int main(int argc, char** argv) {
-    testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
-    testing::InitGoogleTest(&argc, argv);
-    NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
-
-    int status = RUN_ALL_TESTS();
-    return status;
-}
diff --git a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h
index 9d6194a..e879d84 100644
--- a/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h
+++ b/neuralnetworks/1.1/vts/functional/VtsHalNeuralnetworks.h
@@ -17,41 +17,33 @@
 #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H
 #define ANDROID_HARDWARE_NEURALNETWORKS_V1_1_VTS_HAL_NEURALNETWORKS_H
 
-#include <android/hardware/neuralnetworks/1.0/types.h>
+#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
 #include <android/hardware/neuralnetworks/1.1/IDevice.h>
 #include <android/hardware/neuralnetworks/1.1/types.h>
-
-#include <VtsHalHidlTargetTestBase.h>
-#include <VtsHalHidlTargetTestEnvBase.h>
-
-#include <android-base/macros.h>
 #include <gtest/gtest.h>
+#include <vector>
+#include "1.0/Utils.h"
 
 namespace android::hardware::neuralnetworks::V1_1::vts::functional {
 
-// A class for test environment setup
-class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
-    NeuralnetworksHidlEnvironment() = default;
+using NamedDevice = Named<sp<IDevice>>;
+using NeuralnetworksHidlTestParam = NamedDevice;
 
-  public:
-    static NeuralnetworksHidlEnvironment* getInstance();
-    void registerTestServices() override;
-};
-
-// The main test class for NEURALNETWORKS HIDL HAL.
-class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
-
-  public:
-    NeuralnetworksHidlTest() = default;
-    void SetUp() override;
-
+class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
   protected:
-    const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
-            NeuralnetworksHidlEnvironment::getInstance());
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(GetParam());
 };
 
+const std::vector<NamedDevice>& getNamedDevices();
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
+
+#define INSTANTIATE_DEVICE_TEST(TestSuite)                                                 \
+    INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
+                             printNeuralnetworksHidlTest)
+
 // Create an IPreparedModel object. If the model cannot be prepared,
 // "preparedModel" will be nullptr instead.
 void createPreparedModel(const sp<IDevice>& device, const Model& model,
diff --git a/neuralnetworks/1.2/vts/functional/Android.bp b/neuralnetworks/1.2/vts/functional/Android.bp
index 40ca809..3ba8879 100644
--- a/neuralnetworks/1.2/vts/functional/Android.bp
+++ b/neuralnetworks/1.2/vts/functional/Android.bp
@@ -14,16 +14,19 @@
 // limitations under the License.
 //
 
-cc_defaults {
-    name: "VtsHalNeuralNetworksV1_2TargetTestDefaults",
+cc_test {
+    name: "VtsHalNeuralnetworksV1_2TargetTest",
     defaults: ["VtsHalTargetTestDefaults"],
     srcs: [
+        "BasicTests.cpp",
+        "Callbacks.cpp",
+        "CompilationCachingTests.cpp",
+        "GeneratedTestHarness.cpp",
         "TestAssertions.cpp",
         "ValidateModel.cpp",
         "ValidateRequest.cpp",
+        "ValidateBurst.cpp",
         "VtsHalNeuralnetworks.cpp",
-        "Callbacks.cpp",
-        "GeneratedTestHarness.cpp",
     ],
     local_include_dirs: ["include"],
     shared_libs: [
@@ -42,41 +45,13 @@
         "libneuralnetworks_utils",
         "VtsHalNeuralNetworksV1_0_utils",
     ],
+    whole_static_libs: [
+        "neuralnetworks_generated_V1_0_example",
+        "neuralnetworks_generated_V1_1_example",
+        "neuralnetworks_generated_V1_2_example",
+    ],
     header_libs: [
         "libneuralnetworks_headers",
     ],
     test_suites: ["general-tests"],
 }
-
-cc_test {
-    name: "VtsHalNeuralnetworksV1_2TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-        "CompilationCachingTests.cpp",
-        "ValidateBurst.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-        "neuralnetworks_generated_V1_1_example",
-        "neuralnetworks_generated_V1_2_example",
-    ],
-}
-
-cc_test {
-    name: "PresubmitHalNeuralnetworksV1_2TargetTest",
-    defaults: ["VtsHalNeuralNetworksV1_2TargetTestDefaults"],
-    srcs: [
-        "BasicTests.cpp",
-        "CompilationCachingTests.cpp",
-        "ValidateBurst.cpp",
-    ],
-    whole_static_libs: [
-        "neuralnetworks_generated_V1_0_example",
-        "neuralnetworks_generated_V1_1_example",
-        "neuralnetworks_generated_V1_2_example",
-    ],
-    cflags: [
-        "-DPRESUBMIT_NOT_VTS",
-    ],
-}
diff --git a/neuralnetworks/1.2/vts/functional/BasicTests.cpp b/neuralnetworks/1.2/vts/functional/BasicTests.cpp
index 8f95b96..8e82c53 100644
--- a/neuralnetworks/1.2/vts/functional/BasicTests.cpp
+++ b/neuralnetworks/1.2/vts/functional/BasicTests.cpp
@@ -25,17 +25,17 @@
 using V1_0::PerformanceInfo;
 
 // create device test
-TEST_F(NeuralnetworksHidlTest, CreateDevice) {}
+TEST_P(NeuralnetworksHidlTest, CreateDevice) {}
 
 // status test
-TEST_F(NeuralnetworksHidlTest, StatusTest) {
+TEST_P(NeuralnetworksHidlTest, StatusTest) {
     Return<DeviceStatus> status = kDevice->getStatus();
     ASSERT_TRUE(status.isOk());
     EXPECT_EQ(DeviceStatus::AVAILABLE, static_cast<DeviceStatus>(status));
 }
 
 // initialization
-TEST_F(NeuralnetworksHidlTest, GetCapabilitiesTest) {
+TEST_P(NeuralnetworksHidlTest, GetCapabilitiesTest) {
     using OperandPerformance = Capabilities::OperandPerformance;
     Return<void> ret = kDevice->getCapabilities_1_2([](ErrorStatus status,
                                                        const Capabilities& capabilities) {
@@ -60,7 +60,7 @@
 }
 
 // device version test
-TEST_F(NeuralnetworksHidlTest, GetDeviceVersionStringTest) {
+TEST_P(NeuralnetworksHidlTest, GetDeviceVersionStringTest) {
     Return<void> ret =
             kDevice->getVersionString([](ErrorStatus status, const hidl_string& version) {
                 EXPECT_EQ(ErrorStatus::NONE, status);
@@ -70,7 +70,7 @@
 }
 
 // device type test
-TEST_F(NeuralnetworksHidlTest, GetDeviceTypeTest) {
+TEST_P(NeuralnetworksHidlTest, GetDeviceTypeTest) {
     Return<void> ret = kDevice->getType([](ErrorStatus status, DeviceType type) {
         EXPECT_EQ(ErrorStatus::NONE, status);
         EXPECT_TRUE(type == DeviceType::OTHER || type == DeviceType::CPU ||
@@ -80,7 +80,7 @@
 }
 
 // device supported extensions test
-TEST_F(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) {
+TEST_P(NeuralnetworksHidlTest, GetDeviceSupportedExtensionsTest) {
     Return<void> ret = kDevice->getSupportedExtensions(
             [](ErrorStatus status, const hidl_vec<Extension>& extensions) {
                 EXPECT_EQ(ErrorStatus::NONE, status);
@@ -101,7 +101,7 @@
 }
 
 // getNumberOfCacheFilesNeeded test
-TEST_F(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) {
+TEST_P(NeuralnetworksHidlTest, getNumberOfCacheFilesNeeded) {
     Return<void> ret = kDevice->getNumberOfCacheFilesNeeded(
             [](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
                 EXPECT_EQ(ErrorStatus::NONE, status);
diff --git a/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp b/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp
index bb46e06..2130a76 100644
--- a/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp
+++ b/neuralnetworks/1.2/vts/functional/CompilationCachingTests.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "neuralnetworks_hidl_hal_test"
 
 #include <android-base/logging.h>
+#include <fcntl.h>
 #include <ftw.h>
 #include <gtest/gtest.h>
 #include <hidlmemory/mapping.h>
@@ -37,11 +38,11 @@
 // Forward declaration of the mobilenet generated test models in
 // frameworks/ml/nn/runtime/test/generated/.
 namespace generated_tests::mobilenet_224_gender_basic_fixed {
-const ::test_helper::TestModel& get_test_model();
+const test_helper::TestModel& get_test_model();
 }  // namespace generated_tests::mobilenet_224_gender_basic_fixed
 
 namespace generated_tests::mobilenet_quantized {
-const ::test_helper::TestModel& get_test_model();
+const test_helper::TestModel& get_test_model();
 }  // namespace generated_tests::mobilenet_quantized
 
 namespace android::hardware::neuralnetworks::V1_2::vts::functional {
@@ -53,13 +54,13 @@
 
 namespace float32_model {
 
-constexpr auto get_test_model = ::generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
+constexpr auto get_test_model = generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
 
 }  // namespace float32_model
 
 namespace quant8_model {
 
-constexpr auto get_test_model = ::generated_tests::mobilenet_quantized::get_test_model;
+constexpr auto get_test_model = generated_tests::mobilenet_quantized::get_test_model;
 
 }  // namespace quant8_model
 
@@ -217,12 +218,13 @@
 }  // namespace
 
 // Tag for the compilation caching tests.
-class CompilationCachingTestBase : public NeuralnetworksHidlTest {
+class CompilationCachingTestBase : public testing::Test {
   protected:
-    CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
+    CompilationCachingTestBase(sp<IDevice> device, OperandType type)
+        : kDevice(std::move(device)), kOperandType(type) {}
 
     void SetUp() override {
-        NeuralnetworksHidlTest::SetUp();
+        testing::Test::SetUp();
         ASSERT_NE(kDevice.get(), nullptr);
 
         // Create cache directory. The cache directory and a temporary cache file is always created
@@ -274,7 +276,7 @@
             };
             nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
         }
-        NeuralnetworksHidlTest::TearDown();
+        testing::Test::TearDown();
     }
 
     // Model and examples creators. According to kOperandType, the following methods will return
@@ -398,16 +400,21 @@
     uint32_t mNumDataCache;
     uint32_t mIsCachingSupported;
 
+    const sp<IDevice> kDevice;
     // The primary data type of the testModel.
     const OperandType kOperandType;
 };
 
+using CompilationCachingTestParam = std::tuple<NamedDevice, OperandType>;
+
 // A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
 // pass running with float32 models and the second pass running with quant8 models.
 class CompilationCachingTest : public CompilationCachingTestBase,
-                               public testing::WithParamInterface<OperandType> {
+                               public testing::WithParamInterface<CompilationCachingTestParam> {
   protected:
-    CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
+    CompilationCachingTest()
+        : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
+                                     std::get<OperandType>(GetParam())) {}
 };
 
 TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
@@ -1192,16 +1199,30 @@
     }
 }
 
+static const auto kNamedDeviceChoices = testing::ValuesIn(getNamedDevices());
 static const auto kOperandTypeChoices =
         testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
 
-INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
+std::string printCompilationCachingTest(
+        const testing::TestParamInfo<CompilationCachingTestParam>& info) {
+    const auto& [namedDevice, operandType] = info.param;
+    const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
+    return gtestCompliantName(getName(namedDevice) + "_" + type);
+}
+
+INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest,
+                        testing::Combine(kNamedDeviceChoices, kOperandTypeChoices),
+                        printCompilationCachingTest);
+
+using CompilationCachingSecurityTestParam = std::tuple<NamedDevice, OperandType, uint32_t>;
 
 class CompilationCachingSecurityTest
     : public CompilationCachingTestBase,
-      public testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
+      public testing::WithParamInterface<CompilationCachingSecurityTestParam> {
   protected:
-    CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
+    CompilationCachingSecurityTest()
+        : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
+                                     std::get<OperandType>(GetParam())) {}
 
     void SetUp() {
         CompilationCachingTestBase::SetUp();
@@ -1291,7 +1312,7 @@
         }
     }
 
-    const uint32_t kSeed = std::get<1>(GetParam());
+    const uint32_t kSeed = std::get<uint32_t>(GetParam());
     std::mt19937 generator;
 };
 
@@ -1338,7 +1359,16 @@
     });
 }
 
+std::string printCompilationCachingSecurityTest(
+        const testing::TestParamInfo<CompilationCachingSecurityTestParam>& info) {
+    const auto& [namedDevice, operandType, seed] = info.param;
+    const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
+    return gtestCompliantName(getName(namedDevice) + "_" + type + "_" + std::to_string(seed));
+}
+
 INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
-                        testing::Combine(kOperandTypeChoices, testing::Range(0U, 10U)));
+                        testing::Combine(kNamedDeviceChoices, kOperandTypeChoices,
+                                         testing::Range(0U, 10U)),
+                        printCompilationCachingSecurityTest);
 
 }  // namespace android::hardware::neuralnetworks::V1_2::vts::functional
diff --git a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp
index a2d3792..2beec98 100644
--- a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.cpp
@@ -190,7 +190,7 @@
 }
 static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
         const sp<IPreparedModel>& preparedModel) {
-    return ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
+    return android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
 }
 enum class Executor { ASYNC, SYNC, BURST };
 
@@ -371,6 +371,20 @@
     EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
 }
 
+void GeneratedTestBase::SetUp() {
+    testing::TestWithParam<GeneratedTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
+}
+
+std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
+    return TestModelManager::get().getTestModels(filter);
+}
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
+    const auto& [namedDevice, namedModel] = info.param;
+    return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
+}
+
 // Tag for the generated tests
 class GeneratedTest : public GeneratedTestBase {};
 
diff --git a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h
index 0b8b917..dfc980c 100644
--- a/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h
+++ b/neuralnetworks/1.2/vts/functional/GeneratedTestHarness.h
@@ -22,34 +22,43 @@
 #include <android/hardware/neuralnetworks/1.2/types.h>
 #include <functional>
 #include <vector>
+#include "1.0/Utils.h"
 #include "TestHarness.h"
 #include "VtsHalNeuralnetworks.h"
 
 namespace android::hardware::neuralnetworks::V1_2::vts::functional {
 
-class GeneratedTestBase
-    : public NeuralnetworksHidlTest,
-      public testing::WithParamInterface<test_helper::TestModelManager::TestParam> {
+using NamedModel = Named<const test_helper::TestModel*>;
+using GeneratedTestParam = std::tuple<NamedDevice, NamedModel>;
+
+class GeneratedTestBase : public testing::TestWithParam<GeneratedTestParam> {
   protected:
-    const test_helper::TestModel& kTestModel = *GetParam().second;
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(std::get<NamedDevice>(GetParam()));
+    const test_helper::TestModel& kTestModel = *getData(std::get<NamedModel>(GetParam()));
 };
 
-#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                        \
-    INSTANTIATE_TEST_SUITE_P(                                                                \
-            TestGenerated, TestSuite,                                                        \
-            testing::ValuesIn(::test_helper::TestModelManager::get().getTestModels(filter)), \
-            [](const auto& info) { return info.param.first; })
+using FilterFn = std::function<bool(const test_helper::TestModel&)>;
+std::vector<NamedModel> getNamedModels(const FilterFn& filter);
+
+std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info);
+
+#define INSTANTIATE_GENERATED_TEST(TestSuite, filter)                                     \
+    INSTANTIATE_TEST_SUITE_P(TestGenerated, TestSuite,                                    \
+                             testing::Combine(testing::ValuesIn(getNamedDevices()),       \
+                                              testing::ValuesIn(getNamedModels(filter))), \
+                             printGeneratedTest)
 
 // Tag for the validation tests, instantiated in VtsHalNeuralnetworks.cpp.
 // TODO: Clean up the hierarchy for ValidationTest.
 class ValidationTest : public GeneratedTestBase {};
 
-Model createModel(const ::test_helper::TestModel& testModel);
+Model createModel(const test_helper::TestModel& testModel);
 
 void PrepareModel(const sp<IDevice>& device, const Model& model, sp<IPreparedModel>* preparedModel);
 
 void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel,
-                           const ::test_helper::TestModel& testModel, bool testDynamicOutputShape);
+                           const test_helper::TestModel& testModel, bool testDynamicOutputShape);
 
 }  // namespace android::hardware::neuralnetworks::V1_2::vts::functional
 
diff --git a/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp b/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp
index c02d020..1d4493d 100644
--- a/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp
+++ b/neuralnetworks/1.2/vts/functional/ValidateBurst.cpp
@@ -262,7 +262,7 @@
     }));
 
     // serialize the request
-    const auto serialized = ::android::nn::serialize(request, MeasureTiming::YES, slots);
+    const auto serialized = android::nn::serialize(request, MeasureTiming::YES, slots);
 
     // validations
     removeDatumTest(sender.get(), receiver.get(), serialized);
@@ -299,7 +299,7 @@
     // skip test if regular burst output isn't useful for testing a failure
     // caused by having too small of a length for the result FMQ
     const std::vector<FmqResultDatum> serialized =
-            ::android::nn::serialize(statusRegular, outputShapesRegular, timingRegular);
+            android::nn::serialize(statusRegular, outputShapesRegular, timingRegular);
     if (statusRegular != ErrorStatus::NONE ||
         serialized.size() <= kExecutionBurstChannelSmallLength) {
         return;
diff --git a/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp b/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp
index 5c52de5..f25ee62 100644
--- a/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp
+++ b/neuralnetworks/1.2/vts/functional/ValidateRequest.cpp
@@ -94,7 +94,7 @@
 
         // create burst
         std::shared_ptr<::android::nn::ExecutionBurstController> burst =
-                ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
+                android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
         ASSERT_NE(nullptr, burst.get());
 
         // create memory keys
diff --git a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp
index aa4f1f2..4fbd0e2 100644
--- a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp
+++ b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.cpp
@@ -17,13 +17,15 @@
 #define LOG_TAG "neuralnetworks_hidl_hal_test"
 
 #include "VtsHalNeuralnetworks.h"
+#include <android-base/logging.h>
+#include <hidl/ServiceManagement.h>
+#include <string>
+#include <utility>
 #include "1.0/Callbacks.h"
 #include "1.0/Utils.h"
 #include "GeneratedTestHarness.h"
 #include "TestHarness.h"
 
-#include <android-base/logging.h>
-
 namespace android::hardware::neuralnetworks::V1_2::vts::functional {
 
 using implementation::PreparedModelCallback;
@@ -82,34 +84,39 @@
     ASSERT_NE(nullptr, preparedModel->get());
 }
 
-// A class for test environment setup
-NeuralnetworksHidlEnvironment* NeuralnetworksHidlEnvironment::getInstance() {
-    // This has to return a "new" object because it is freed inside
-    // testing::AddGlobalTestEnvironment when the gtest is being torn down
-    static NeuralnetworksHidlEnvironment* instance = new NeuralnetworksHidlEnvironment();
-    return instance;
-}
-
-void NeuralnetworksHidlEnvironment::registerTestServices() {
-    registerTestService<IDevice>();
-}
-
-// The main test class for NEURALNETWORK HIDL HAL.
 void NeuralnetworksHidlTest::SetUp() {
-    testing::VtsHalHidlTargetTestBase::SetUp();
-
-#ifdef PRESUBMIT_NOT_VTS
-    const std::string name =
-            NeuralnetworksHidlEnvironment::getInstance()->getServiceName<IDevice>();
-    const std::string sampleDriver = "sample-";
-    if (kDevice == nullptr && name.substr(0, sampleDriver.size()) == sampleDriver) {
-        GTEST_SKIP();
-    }
-#endif  // PRESUBMIT_NOT_VTS
-
-    ASSERT_NE(nullptr, kDevice.get());
+    testing::TestWithParam<NeuralnetworksHidlTestParam>::SetUp();
+    ASSERT_NE(kDevice, nullptr);
 }
 
+static NamedDevice makeNamedDevice(const std::string& name) {
+    return {name, IDevice::getService(name)};
+}
+
+static std::vector<NamedDevice> getNamedDevicesImpl() {
+    // Retrieves the name of all service instances that implement IDevice,
+    // including any Lazy HAL instances.
+    const std::vector<std::string> names = hardware::getAllHalInstanceNames(IDevice::descriptor);
+
+    // Get a handle to each device and pair it with its name.
+    std::vector<NamedDevice> namedDevices;
+    namedDevices.reserve(names.size());
+    std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
+    return namedDevices;
+}
+
+const std::vector<NamedDevice>& getNamedDevices() {
+    const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
+    return devices;
+}
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info) {
+    return gtestCompliantName(getName(info.param));
+}
+
+INSTANTIATE_DEVICE_TEST(NeuralnetworksHidlTest);
+
 // Forward declaration from ValidateModel.cpp
 void validateModel(const sp<IDevice>& device, const Model& model);
 // Forward declaration from ValidateRequest.cpp
@@ -162,14 +169,3 @@
 }
 
 }  // namespace android::hardware::neuralnetworks::V1_2::vts::functional
-
-using android::hardware::neuralnetworks::V1_2::vts::functional::NeuralnetworksHidlEnvironment;
-
-int main(int argc, char** argv) {
-    testing::AddGlobalTestEnvironment(NeuralnetworksHidlEnvironment::getInstance());
-    testing::InitGoogleTest(&argc, argv);
-    NeuralnetworksHidlEnvironment::getInstance()->init(&argc, argv);
-
-    int status = RUN_ALL_TESTS();
-    return status;
-}
diff --git a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h
index 9981696..d01336e 100644
--- a/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h
+++ b/neuralnetworks/1.2/vts/functional/VtsHalNeuralnetworks.h
@@ -17,42 +17,33 @@
 #ifndef ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H
 #define ANDROID_HARDWARE_NEURALNETWORKS_V1_2_VTS_HAL_NEURALNETWORKS_H
 
-#include <VtsHalHidlTargetTestBase.h>
-#include <VtsHalHidlTargetTestEnvBase.h>
-#include <android-base/macros.h>
-#include <android/hardware/neuralnetworks/1.0/types.h>
-#include <android/hardware/neuralnetworks/1.1/types.h>
 #include <android/hardware/neuralnetworks/1.2/IDevice.h>
+#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
 #include <android/hardware/neuralnetworks/1.2/types.h>
 #include <gtest/gtest.h>
-
+#include "1.0/Utils.h"
 #include "1.2/Callbacks.h"
 
 namespace android::hardware::neuralnetworks::V1_2::vts::functional {
 
-// A class for test environment setup
-class NeuralnetworksHidlEnvironment : public testing::VtsHalHidlTargetTestEnvBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlEnvironment);
-    NeuralnetworksHidlEnvironment() = default;
+using NamedDevice = Named<sp<IDevice>>;
+using NeuralnetworksHidlTestParam = NamedDevice;
 
-  public:
-    static NeuralnetworksHidlEnvironment* getInstance();
-    void registerTestServices() override;
-};
-
-// The main test class for NEURALNETWORKS HIDL HAL.
-class NeuralnetworksHidlTest : public testing::VtsHalHidlTargetTestBase {
-    DISALLOW_COPY_AND_ASSIGN(NeuralnetworksHidlTest);
-
-  public:
-    NeuralnetworksHidlTest() = default;
-    void SetUp() override;
-
+class NeuralnetworksHidlTest : public testing::TestWithParam<NeuralnetworksHidlTestParam> {
   protected:
-    const sp<IDevice> kDevice = testing::VtsHalHidlTargetTestBase::getService<IDevice>(
-            NeuralnetworksHidlEnvironment::getInstance());
+    void SetUp() override;
+    const sp<IDevice> kDevice = getData(GetParam());
 };
 
+const std::vector<NamedDevice>& getNamedDevices();
+
+std::string printNeuralnetworksHidlTest(
+        const testing::TestParamInfo<NeuralnetworksHidlTestParam>& info);
+
+#define INSTANTIATE_DEVICE_TEST(TestSuite)                                                 \
+    INSTANTIATE_TEST_SUITE_P(PerInstance, TestSuite, testing::ValuesIn(getNamedDevices()), \
+                             printNeuralnetworksHidlTest)
+
 // Create an IPreparedModel object. If the model cannot be prepared,
 // "preparedModel" will be nullptr instead.
 void createPreparedModel(const sp<IDevice>& device, const Model& model,
diff --git a/neuralnetworks/TEST_MAPPING b/neuralnetworks/TEST_MAPPING
index 50b6c19..421922a 100644
--- a/neuralnetworks/TEST_MAPPING
+++ b/neuralnetworks/TEST_MAPPING
@@ -1,26 +1,35 @@
 {
   "presubmit": [
     {
-      "name": "PresubmitHalNeuralnetworksV1_0TargetTest",
+      "name": "VtsHalNeuralnetworksV1_0TargetTest",
       "options": [
         {
-          "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.0::IDevice/sample-all"
+          // Just use sample-all driver for presubmit tests for faster results.
+          // The other sample drivers (fast-float, quant, etc.) are subsets of
+          // sample-all.
+          "native-test-flag": "--gtest_filter=*sample_all*"
         }
       ]
     },
     {
-      "name": "PresubmitHalNeuralnetworksV1_1TargetTest",
+      "name": "VtsHalNeuralnetworksV1_1TargetTest",
       "options": [
         {
-          "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.1::IDevice/sample-all"
+          // Just use sample-all driver for presubmit tests for faster results.
+          // The other sample drivers (fast-float, quant, etc.) are subsets of
+          // sample-all.
+          "native-test-flag": "--gtest_filter=*sample_all*"
         }
       ]
     },
     {
-      "name": "PresubmitHalNeuralnetworksV1_2TargetTest",
+      "name": "VtsHalNeuralnetworksV1_2TargetTest",
       "options": [
         {
-          "native-test-flag": "--hal_service_instance=android.hardware.neuralnetworks@1.2::IDevice/sample-all"
+          // Just use sample-all driver for presubmit tests for faster results.
+          // The other sample drivers (fast-float, quant, etc.) are subsets of
+          // sample-all.
+          "native-test-flag": "--gtest_filter=*sample_all*"
         }
       ]
     }