Reusable execution at HAL level -- HAL.

This CL modifies the canonical/AIDL adapter to use IExecution object
if available.

Bug: 202405342
Bug: 202431255
Test: NNT_static
Test: CtsNNAPITestCases
Test: VtsHalNeuralnetworksTargetTest
Change-Id: I6aac3c57f97ac87a5ba3f78cfd843fcc403decff
diff --git a/neuralnetworks/aidl/utils/test/DeviceTest.cpp b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
index 0366e7d..fb13af8 100644
--- a/neuralnetworks/aidl/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
@@ -17,6 +17,7 @@
 #include "MockBuffer.h"
 #include "MockDevice.h"
 #include "MockPreparedModel.h"
+#include "TestUtils.h"
 
 #include <aidl/android/hardware/neuralnetworks/BnDevice.h>
 #include <android/binder_auto_utils.h>
@@ -146,26 +147,7 @@
     return ndk::ScopedAStatus::fromStatus(STATUS_DEAD_OBJECT);
 };
 
-class DeviceTest : public ::testing::TestWithParam<nn::Version> {
-  protected:
-    const nn::Version kVersion = GetParam();
-};
-
-std::string printDeviceTest(const testing::TestParamInfo<nn::Version>& info) {
-    const nn::Version version = info.param;
-    CHECK(!version.runtimeOnlyFeatures);
-    switch (version.level) {
-        case nn::Version::Level::FEATURE_LEVEL_5:
-            return "v1";
-        case nn::Version::Level::FEATURE_LEVEL_6:
-            return "v2";
-        case nn::Version::Level::FEATURE_LEVEL_7:
-            return "v3";
-        default:
-            LOG(FATAL) << "Invalid AIDL version: " << version;
-            return "invalid";
-    }
-}
+class DeviceTest : public VersionedAidlUtilsTestBase {};
 
 }  // namespace
 
@@ -894,9 +876,6 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-INSTANTIATE_TEST_SUITE_P(TestDevice, DeviceTest,
-                         ::testing::Values(nn::kVersionFeatureLevel5, nn::kVersionFeatureLevel6,
-                                           nn::kVersionFeatureLevel7),
-                         printDeviceTest);
+INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(DeviceTest, kAllAidlVersions);
 
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/ExecutionTest.cpp b/neuralnetworks/aidl/utils/test/ExecutionTest.cpp
new file mode 100644
index 0000000..8519290
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/ExecutionTest.cpp
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MockExecution.h"
+#include "MockFencedExecutionCallback.h"
+
+#include <aidl/android/hardware/neuralnetworks/IFencedExecutionCallback.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <nnapi/IExecution.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/aidl/Execution.h>
+
+#include <functional>
+#include <memory>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+namespace {
+
+using ::testing::_;
+using ::testing::DoAll;
+using ::testing::Invoke;
+using ::testing::InvokeWithoutArgs;
+using ::testing::SetArgPointee;
+
+const std::shared_ptr<IExecution> kInvalidExecution;
+constexpr auto kNoTiming = Timing{.timeOnDeviceNs = -1, .timeInDriverNs = -1};
+
+constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); };
+
+constexpr auto makeGeneralFailure = [] {
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+            static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+};
+constexpr auto makeGeneralTransportFailure = [] {
+    return ndk::ScopedAStatus::fromStatus(STATUS_NO_MEMORY);
+};
+constexpr auto makeDeadObjectFailure = [] {
+    return ndk::ScopedAStatus::fromStatus(STATUS_DEAD_OBJECT);
+};
+
+auto makeFencedExecutionResult(const std::shared_ptr<MockFencedExecutionCallback>& callback) {
+    return [callback](const std::vector<ndk::ScopedFileDescriptor>& /*waitFor*/,
+                      int64_t /*deadline*/, int64_t /*duration*/,
+                      FencedExecutionResult* fencedExecutionResult) {
+        *fencedExecutionResult = FencedExecutionResult{.callback = callback,
+                                                       .syncFence = ndk::ScopedFileDescriptor(-1)};
+        return ndk::ScopedAStatus::ok();
+    };
+}
+
+}  // namespace
+
+TEST(ExecutionTest, invalidExecution) {
+    // run test
+    const auto result = Execution::create(kInvalidExecution, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSync) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockExecutionResult = ExecutionResult{
+            .outputSufficientSize = true,
+            .outputShapes = {},
+            .timing = kNoTiming,
+    };
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(
+                    DoAll(SetArgPointee<1>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    EXPECT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ExecutionTest, executeSyncError) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(Invoke(makeGeneralFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSyncTransportFailure) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeSyncDeadObject) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeSynchronously(_, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = execution->compute({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ExecutionTest, executeFenced) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                            SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk)));
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED);
+    ASSERT_NE(callback, nullptr);
+
+    // get results from callback
+    const auto callbackResult = callback();
+    ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code << ": "
+                                            << callbackResult.error().message;
+}
+
+TEST(ExecutionTest, executeFencedCallbackError) {
+    // setup call
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    const auto mockCallback = MockFencedExecutionCallback::create();
+    EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+            .Times(1)
+            .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+                                   SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE),
+                                   Invoke(makeStatusOk))));
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    const auto& [syncFence, callback] = result.value();
+    EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE);
+    ASSERT_NE(callback, nullptr);
+
+    // verify callback failure
+    const auto callbackResult = callback();
+    ASSERT_FALSE(callbackResult.has_value());
+    EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedError) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedTransportFailure) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ExecutionTest, executeFencedDeadObject) {
+    // setup test
+    const auto mockExecution = MockExecution::create();
+    const auto execution = Execution::create(mockExecution, {}).value();
+    EXPECT_CALL(*mockExecution, executeFenced(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+    // run test
+    const auto result = execution->computeFenced({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/MockExecution.h b/neuralnetworks/aidl/utils/test/MockExecution.h
new file mode 100644
index 0000000..216f569
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/MockExecution.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
+
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
+#include <android/binder_interface_utils.h>
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <hidl/HidlSupport.h>
+#include <hidl/Status.h>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+class MockExecution final : public BnExecution {
+  public:
+    static std::shared_ptr<MockExecution> create();
+
+    MOCK_METHOD(ndk::ScopedAStatus, executeSynchronously,
+                (int64_t deadline, ExecutionResult* executionResult), (override));
+    MOCK_METHOD(ndk::ScopedAStatus, executeFenced,
+                (const std::vector<ndk::ScopedFileDescriptor>& waitFor, int64_t deadline,
+                 int64_t duration, FencedExecutionResult* fencedExecutionResult),
+                (override));
+};
+
+inline std::shared_ptr<MockExecution> MockExecution::create() {
+    return ndk::SharedRefBase::make<MockExecution>();
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_EXECUTION_H
diff --git a/neuralnetworks/aidl/utils/test/MockPreparedModel.h b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
index a4ae2b7..0ed9af9 100644
--- a/neuralnetworks/aidl/utils/test/MockPreparedModel.h
+++ b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
 #define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_MOCK_PREPARED_MODEL_H
 
+#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
 #include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
 #include <android/binder_interface_utils.h>
 #include <gmock/gmock.h>
@@ -41,6 +42,10 @@
                 (override));
     MOCK_METHOD(ndk::ScopedAStatus, configureExecutionBurst, (std::shared_ptr<IBurst> * burst),
                 (override));
+    MOCK_METHOD(ndk::ScopedAStatus, createReusableExecution,
+                (const Request& request, bool measureTiming, int64_t loopTimeoutDuration,
+                 std::shared_ptr<IExecution>* execution),
+                (override));
 };
 
 inline std::shared_ptr<MockPreparedModel> MockPreparedModel::create() {
diff --git a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
index 8bb5c90..8cfb7c1 100644
--- a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
@@ -15,8 +15,10 @@
  */
 
 #include "MockBurst.h"
+#include "MockExecution.h"
 #include "MockFencedExecutionCallback.h"
 #include "MockPreparedModel.h"
+#include "TestUtils.h"
 
 #include <aidl/android/hardware/neuralnetworks/IFencedExecutionCallback.h>
 #include <gmock/gmock.h>
@@ -66,21 +68,23 @@
     };
 }
 
+class PreparedModelTest : public VersionedAidlUtilsTestBase {};
+
 }  // namespace
 
-TEST(PreparedModelTest, invalidPreparedModel) {
+TEST_P(PreparedModelTest, invalidPreparedModel) {
     // run test
-    const auto result = PreparedModel::create(kInvalidPreparedModel);
+    const auto result = PreparedModel::create(kInvalidPreparedModel, kVersion);
 
     // verify result
     ASSERT_FALSE(result.has_value());
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSync) {
+TEST_P(PreparedModelTest, executeSync) {
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockExecutionResult = ExecutionResult{
             .outputSufficientSize = true,
             .outputShapes = {},
@@ -99,10 +103,10 @@
             << "Failed with " << result.error().code << ": " << result.error().message;
 }
 
-TEST(PreparedModelTest, executeSyncError) {
+TEST_P(PreparedModelTest, executeSyncError) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(Invoke(makeGeneralFailure));
@@ -115,10 +119,10 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSyncTransportFailure) {
+TEST_P(PreparedModelTest, executeSyncTransportFailure) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
@@ -131,10 +135,10 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeSyncDeadObject) {
+TEST_P(PreparedModelTest, executeSyncDeadObject) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
@@ -147,10 +151,10 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, executeFenced) {
+TEST_P(PreparedModelTest, executeFenced) {
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -176,10 +180,10 @@
                                             << callbackResult.error().message;
 }
 
-TEST(PreparedModelTest, executeFencedCallbackError) {
+TEST_P(PreparedModelTest, executeFencedCallbackError) {
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -206,10 +210,10 @@
     EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedError) {
+TEST_P(PreparedModelTest, executeFencedError) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
@@ -222,10 +226,10 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedTransportFailure) {
+TEST_P(PreparedModelTest, executeFencedTransportFailure) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
@@ -238,10 +242,10 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, executeFencedDeadObject) {
+TEST_P(PreparedModelTest, executeFencedDeadObject) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
@@ -254,11 +258,13 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, reusableExecuteSync) {
+TEST_P(PreparedModelTest, reusableExecuteSync) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const uint32_t kNumberOfComputations = 2;
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockExecutionResult = ExecutionResult{
             .outputSufficientSize = true,
             .outputShapes = {},
@@ -283,10 +289,12 @@
     }
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncError) {
+TEST_P(PreparedModelTest, reusableExecuteSyncError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(Invoke(makeGeneralFailure));
@@ -303,10 +311,12 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncTransportFailure) {
+TEST_P(PreparedModelTest, reusableExecuteSyncTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
@@ -323,10 +333,12 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteSyncDeadObject) {
+TEST_P(PreparedModelTest, reusableExecuteSyncDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
@@ -343,11 +355,13 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, reusableExecuteFenced) {
+TEST_P(PreparedModelTest, reusableExecuteFenced) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const uint32_t kNumberOfComputations = 2;
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(kNumberOfComputations)
@@ -379,10 +393,12 @@
     }
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedCallbackError) {
+TEST_P(PreparedModelTest, reusableExecuteFencedCallbackError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup call
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     const auto mockCallback = MockFencedExecutionCallback::create();
     EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
             .Times(1)
@@ -413,10 +429,12 @@
     EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedError) {
+TEST_P(PreparedModelTest, reusableExecuteFencedError) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
@@ -433,10 +451,12 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedTransportFailure) {
+TEST_P(PreparedModelTest, reusableExecuteFencedTransportFailure) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
@@ -453,10 +473,12 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, reusableExecuteFencedDeadObject) {
+TEST_P(PreparedModelTest, reusableExecuteFencedDeadObject) {
+    if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
     EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
@@ -473,14 +495,14 @@
     EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, configureExecutionBurst) {
+TEST_P(PreparedModelTest, configureExecutionBurst) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     const auto mockBurst = ndk::SharedRefBase::make<MockBurst>();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(DoAll(SetArgPointee<0>(mockBurst), Invoke(makeStatusOk)));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -491,13 +513,13 @@
     EXPECT_NE(result.value(), nullptr);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstError) {
+TEST_P(PreparedModelTest, configureExecutionBurstError) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -507,13 +529,13 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstTransportFailure) {
+TEST_P(PreparedModelTest, configureExecutionBurstTransportFailure) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -523,13 +545,13 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
 }
 
-TEST(PreparedModelTest, configureExecutionBurstDeadObject) {
+TEST_P(PreparedModelTest, configureExecutionBurstDeadObject) {
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
     EXPECT_CALL(*mockPreparedModel, configureExecutionBurst(_))
             .Times(1)
             .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto result = preparedModel->configureExecutionBurst();
@@ -539,10 +561,84 @@
     EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
 }
 
-TEST(PreparedModelTest, getUnderlyingResource) {
+TEST_P(PreparedModelTest, createReusableExecution) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
     // setup test
     const auto mockPreparedModel = MockPreparedModel::create();
-    const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
+    const auto mockExecution = ndk::SharedRefBase::make<MockExecution>();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+            .Times(1)
+            .WillOnce(DoAll(SetArgPointee<3>(mockExecution), Invoke(makeStatusOk)));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_NE(result.value(), nullptr);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionError) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionTransportFailure) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, createReusableExecutionDeadObject) {
+    if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+            .Times(1)
+            .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+
+    // run test
+    const auto result = preparedModel->createReusableExecution({}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(PreparedModelTest, getUnderlyingResource) {
+    // setup test
+    const auto mockPreparedModel = MockPreparedModel::create();
+    const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
 
     // run test
     const auto resource = preparedModel->getUnderlyingResource();
@@ -554,4 +650,6 @@
     EXPECT_EQ(maybeMock->get(), mockPreparedModel.get());
 }
 
+INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(PreparedModelTest, kAllAidlVersions);
+
 }  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/TestUtils.cpp b/neuralnetworks/aidl/utils/test/TestUtils.cpp
new file mode 100644
index 0000000..9abec88
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/TestUtils.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "TestUtils.h"
+
+#include <android-base/logging.h>
+#include <gtest/gtest.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/CommonUtils.h>
+#include <string>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+std::string printTestVersion(const testing::TestParamInfo<nn::Version>& info) {
+    switch (info.param.level) {
+        case nn::Version::Level::FEATURE_LEVEL_5:
+            return "v1";
+        case nn::Version::Level::FEATURE_LEVEL_6:
+            return "v2";
+        case nn::Version::Level::FEATURE_LEVEL_7:
+            return "v3";
+        case nn::Version::Level::FEATURE_LEVEL_8:
+            return "v4";
+        default:
+            LOG(FATAL) << "Invalid AIDL version: " << info.param;
+            return "invalid";
+    }
+}
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/test/TestUtils.h b/neuralnetworks/aidl/utils/test/TestUtils.h
new file mode 100644
index 0000000..23f734a
--- /dev/null
+++ b/neuralnetworks/aidl/utils/test/TestUtils.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H
+
+#include <gtest/gtest.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/CommonUtils.h>
+#include <string>
+
+namespace aidl::android::hardware::neuralnetworks::utils {
+
+class VersionedAidlUtilsTestBase : public ::testing::TestWithParam<nn::Version> {
+  protected:
+    const nn::Version kVersion = GetParam();
+};
+
+std::string printTestVersion(const testing::TestParamInfo<nn::Version>& info);
+
+inline const auto kAllAidlVersions =
+        ::testing::Values(nn::kVersionFeatureLevel5, nn::kVersionFeatureLevel6,
+                          nn::kVersionFeatureLevel7, nn::kVersionFeatureLevel8);
+
+#define INSTANTIATE_VERSIONED_AIDL_UTILS_TEST(TestSuite, versions) \
+    INSTANTIATE_TEST_SUITE_P(Versioned, TestSuite, versions, printTestVersion)
+
+}  // namespace aidl::android::hardware::neuralnetworks::utils
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_AIDL_UTILS_TEST_TEST_UTILS_H