Create unit tests for NN interface utility code

This CL introduces unit tests to validate the V1_X::utils::Device,
*PreparedModel, and *Buffer adapter classes. It does so by mocking the
underlying HIDL interface in order to simulate a driver returning bad
data, HIDL transport failures, and service crashes.

Note that the purpose of these new tests is to validate the adapter
classes themselves, not the HIDL interfaces they use. For example,
because nn::IPreparedModel does not currently define a method for
configuring a burst execution, V1_[23]::utils::PreparedModel similarly
does not use hardware::neuralnetworks::V1_[23]::IPreparedModel's
configureExecutionBurst method.

This CL also introduces unit tests to validate the utils::Resilient*
adapter classes, and mocks DEAD_OBJECT failures to ensure that the
underyling object can be recovered appropriately.

Bug: 163801800
Test: mma
Test: atest neuralnetworks_utils_hal_common_test
Test: atest neuralnetworks_utils_hal_1_[0-3]_test
Change-Id: I2c79865bf666d3f4bf53061ff5090746403583e9
Merged-In: I2c79865bf666d3f4bf53061ff5090746403583e9
(cherry picked from commit afc4d7cfe753669b08562eba8f58cbceefed334f)
diff --git a/neuralnetworks/utils/common/test/MockBuffer.h b/neuralnetworks/utils/common/test/MockBuffer.h
new file mode 100644
index 0000000..c5405fb
--- /dev/null
+++ b/neuralnetworks/utils/common/test/MockBuffer.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <nnapi/IBuffer.h>
+#include <nnapi/Types.h>
+
+namespace android::nn {
+
+class MockBuffer final : public IBuffer {
+  public:
+    MOCK_METHOD(Request::MemoryDomainToken, getToken, (), (const, override));
+    MOCK_METHOD(GeneralResult<void>, copyTo, (const Memory& dst), (const, override));
+    MOCK_METHOD(GeneralResult<void>, copyFrom, (const Memory& src, const Dimensions& dimensions),
+                (const, override));
+};
+
+}  // namespace android::nn
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_BUFFER
diff --git a/neuralnetworks/utils/common/test/MockDevice.h b/neuralnetworks/utils/common/test/MockDevice.h
new file mode 100644
index 0000000..08cd5c5
--- /dev/null
+++ b/neuralnetworks/utils/common/test/MockDevice.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <nnapi/IDevice.h>
+
+namespace android::nn {
+
+class MockDevice final : public IDevice {
+  public:
+    MOCK_METHOD(const std::string&, getName, (), (const, override));
+    MOCK_METHOD(const std::string&, getVersionString, (), (const, override));
+    MOCK_METHOD(Version, getFeatureLevel, (), (const, override));
+    MOCK_METHOD(DeviceType, getType, (), (const, override));
+    MOCK_METHOD(const std::vector<Extension>&, getSupportedExtensions, (), (const, override));
+    MOCK_METHOD(const Capabilities&, getCapabilities, (), (const, override));
+    MOCK_METHOD((std::pair<uint32_t, uint32_t>), getNumberOfCacheFilesNeeded, (),
+                (const, override));
+    MOCK_METHOD(GeneralResult<void>, wait, (), (const, override));
+    MOCK_METHOD(GeneralResult<std::vector<bool>>, getSupportedOperations, (const Model& model),
+                (const, override));
+    MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModel,
+                (const Model& model, ExecutionPreference preference, Priority priority,
+                 OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
+                 const std::vector<SharedHandle>& dataCache, const CacheToken& token),
+                (const, override));
+    MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModelFromCache,
+                (OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
+                 const std::vector<SharedHandle>& dataCache, const CacheToken& token),
+                (const, override));
+    MOCK_METHOD(GeneralResult<SharedBuffer>, allocate,
+                (const BufferDesc& desc, const std::vector<SharedPreparedModel>& preparedModels,
+                 const std::vector<BufferRole>& inputRoles,
+                 const std::vector<BufferRole>& outputRoles),
+                (const, override));
+};
+
+}  // namespace android::nn
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_DEVICE
diff --git a/neuralnetworks/utils/common/test/MockPreparedModel.h b/neuralnetworks/utils/common/test/MockPreparedModel.h
new file mode 100644
index 0000000..928508e
--- /dev/null
+++ b/neuralnetworks/utils/common/test/MockPreparedModel.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
+#define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+#include <nnapi/IPreparedModel.h>
+
+namespace android::nn {
+
+class MockPreparedModel final : public IPreparedModel {
+  public:
+    MOCK_METHOD((ExecutionResult<std::pair<std::vector<OutputShape>, Timing>>), execute,
+                (const Request& request, MeasureTiming measure, const OptionalTimePoint& deadline,
+                 const OptionalDuration& loopTimeoutDuration),
+                (const, override));
+    MOCK_METHOD((GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>>), executeFenced,
+                (const Request& request, const std::vector<SyncFence>& waitFor,
+                 MeasureTiming measure, const OptionalTimePoint& deadline,
+                 const OptionalDuration& loopTimeoutDuration,
+                 const OptionalDuration& timeoutDurationAfterFence),
+                (const, override));
+    MOCK_METHOD(std::any, getUnderlyingResource, (), (const, override));
+};
+
+}  // namespace android::nn
+
+#endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_COMMON_TEST_MOCK_PREPARED_MODEL
diff --git a/neuralnetworks/utils/common/test/ResilientBufferTest.cpp b/neuralnetworks/utils/common/test/ResilientBufferTest.cpp
new file mode 100644
index 0000000..deb9b7c
--- /dev/null
+++ b/neuralnetworks/utils/common/test/ResilientBufferTest.cpp
@@ -0,0 +1,266 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/ResilientBuffer.h>
+#include <tuple>
+#include <utility>
+#include "MockBuffer.h"
+
+namespace android::hardware::neuralnetworks::utils {
+namespace {
+
+using ::testing::_;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+constexpr auto kToken = nn::Request::MemoryDomainToken{1};
+
+using SharedMockBuffer = std::shared_ptr<const nn::MockBuffer>;
+using MockBufferFactory = ::testing::MockFunction<nn::GeneralResult<nn::SharedBuffer>()>;
+
+SharedMockBuffer createConfiguredMockBuffer() {
+    return std::make_shared<const nn::MockBuffer>();
+}
+
+std::tuple<std::shared_ptr<const nn::MockBuffer>, std::unique_ptr<MockBufferFactory>,
+           std::shared_ptr<const ResilientBuffer>>
+setup() {
+    auto mockBuffer = std::make_shared<const nn::MockBuffer>();
+
+    auto mockBufferFactory = std::make_unique<MockBufferFactory>();
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(Return(mockBuffer));
+
+    auto buffer = ResilientBuffer::create(mockBufferFactory->AsStdFunction()).value();
+    return std::make_tuple(std::move(mockBuffer), std::move(mockBufferFactory), std::move(buffer));
+}
+
+constexpr auto makeError = [](nn::ErrorStatus status) {
+    return [status](const auto&... /*args*/) { return nn::error(status); };
+};
+const auto kReturnGeneralFailure = makeError(nn::ErrorStatus::GENERAL_FAILURE);
+const auto kReturnDeadObject = makeError(nn::ErrorStatus::DEAD_OBJECT);
+
+const auto kNoError = nn::GeneralResult<void>{};
+
+}  // namespace
+
+TEST(ResilientBufferTest, invalidBufferFactory) {
+    // setup call
+    const auto invalidBufferFactory = ResilientBuffer::Factory{};
+
+    // run test
+    const auto result = ResilientBuffer::create(invalidBufferFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::INVALID_ARGUMENT);
+}
+
+TEST(ResilientBufferTest, bufferFactoryFailure) {
+    // setup call
+    const auto invalidBufferFactory = kReturnGeneralFailure;
+
+    // run test
+    const auto result = ResilientBuffer::create(invalidBufferFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientBufferTest, getBuffer) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+
+    // run test
+    const auto result = buffer->getBuffer();
+
+    // verify result
+    EXPECT_TRUE(result == mockBuffer);
+}
+
+TEST(ResilientBufferTest, getToken) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, getToken()).Times(1).WillOnce(Return(kToken));
+
+    // run test
+    const auto token = buffer->getToken();
+
+    // verify result
+    EXPECT_EQ(token, kToken);
+}
+
+TEST(ResilientBufferTest, copyTo) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyTo(_)).Times(1).WillOnce(Return(kNoError));
+
+    // run test
+    const auto result = buffer->copyTo({});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientBufferTest, copyToError) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyTo(_)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = buffer->copyTo({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientBufferTest, copyToDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyTo(_)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = buffer->copyTo({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientBufferTest, copyToDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyTo(_)).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockBuffer = createConfiguredMockBuffer();
+    EXPECT_CALL(*recoveredMockBuffer, copyTo(_)).Times(1).WillOnce(Return(kNoError));
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(Return(recoveredMockBuffer));
+
+    // run test
+    const auto result = buffer->copyTo({});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientBufferTest, copyFrom) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyFrom(_, _)).Times(1).WillOnce(Return(kNoError));
+
+    // run test
+    const auto result = buffer->copyFrom({}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientBufferTest, copyFromError) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyFrom(_, _)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = buffer->copyFrom({}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientBufferTest, copyFromDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyFrom(_, _)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = buffer->copyFrom({}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientBufferTest, copyFromDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    EXPECT_CALL(*mockBuffer, copyFrom(_, _)).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockBuffer = createConfiguredMockBuffer();
+    EXPECT_CALL(*recoveredMockBuffer, copyFrom(_, _)).Times(1).WillOnce(Return(kNoError));
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(Return(recoveredMockBuffer));
+
+    // run test
+    const auto result = buffer->copyFrom({}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientBufferTest, recover) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    const auto recoveredMockBuffer = createConfiguredMockBuffer();
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(Return(recoveredMockBuffer));
+
+    // run test
+    const auto result = buffer->recover(mockBuffer.get());
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockBuffer);
+}
+
+TEST(ResilientBufferTest, recoverFailure) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    const auto recoveredMockBuffer = createConfiguredMockBuffer();
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = buffer->recover(mockBuffer.get());
+
+    // verify result
+    EXPECT_FALSE(result.has_value());
+}
+
+TEST(ResilientBufferTest, someoneElseRecovered) {
+    // setup call
+    const auto [mockBuffer, mockBufferFactory, buffer] = setup();
+    const auto recoveredMockBuffer = createConfiguredMockBuffer();
+    EXPECT_CALL(*mockBufferFactory, Call()).Times(1).WillOnce(Return(recoveredMockBuffer));
+    buffer->recover(mockBuffer.get());
+
+    // run test
+    const auto result = buffer->recover(mockBuffer.get());
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockBuffer);
+}
+
+}  // namespace android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
new file mode 100644
index 0000000..3abd724
--- /dev/null
+++ b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
@@ -0,0 +1,725 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/ResilientDevice.h>
+#include <tuple>
+#include <utility>
+#include "MockBuffer.h"
+#include "MockDevice.h"
+#include "MockPreparedModel.h"
+
+namespace android::hardware::neuralnetworks::utils {
+namespace {
+
+using ::testing::_;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+using SharedMockDevice = std::shared_ptr<const nn::MockDevice>;
+using MockDeviceFactory = ::testing::MockFunction<nn::GeneralResult<nn::SharedDevice>(bool)>;
+
+const std::string kName = "Google-MockV1";
+const std::string kVersionString = "version1";
+const auto kExtensions = std::vector<nn::Extension>{};
+constexpr auto kNoInfo = std::numeric_limits<float>::max();
+constexpr auto kNoPerformanceInfo =
+        nn::Capabilities::PerformanceInfo{.execTime = kNoInfo, .powerUsage = kNoInfo};
+const auto kCapabilities = nn::Capabilities{
+        .relaxedFloat32toFloat16PerformanceScalar = kNoPerformanceInfo,
+        .relaxedFloat32toFloat16PerformanceTensor = kNoPerformanceInfo,
+        .operandPerformance = nn::Capabilities::OperandPerformanceTable::create({}).value(),
+        .ifPerformance = kNoPerformanceInfo,
+        .whilePerformance = kNoPerformanceInfo};
+constexpr auto kNumberOfCacheFilesNeeded = std::pair<uint32_t, uint32_t>(5, 3);
+
+SharedMockDevice createConfiguredMockDevice() {
+    auto mockDevice = std::make_shared<const nn::MockDevice>();
+
+    // Setup default actions for each relevant call.
+    constexpr auto getName_ret = []() -> const std::string& { return kName; };
+    constexpr auto getVersionString_ret = []() -> const std::string& { return kVersionString; };
+    constexpr auto kFeatureLevel = nn::Version::ANDROID_OC_MR1;
+    constexpr auto kDeviceType = nn::DeviceType::ACCELERATOR;
+    constexpr auto getSupportedExtensions_ret = []() -> const std::vector<nn::Extension>& {
+        return kExtensions;
+    };
+    constexpr auto getCapabilities_ret = []() -> const nn::Capabilities& { return kCapabilities; };
+
+    // Setup default actions for each relevant call.
+    ON_CALL(*mockDevice, getName()).WillByDefault(getName_ret);
+    ON_CALL(*mockDevice, getVersionString()).WillByDefault(getVersionString_ret);
+    ON_CALL(*mockDevice, getFeatureLevel()).WillByDefault(Return(kFeatureLevel));
+    ON_CALL(*mockDevice, getType()).WillByDefault(Return(kDeviceType));
+    ON_CALL(*mockDevice, getSupportedExtensions()).WillByDefault(getSupportedExtensions_ret);
+    ON_CALL(*mockDevice, getCapabilities()).WillByDefault(getCapabilities_ret);
+    ON_CALL(*mockDevice, getNumberOfCacheFilesNeeded())
+            .WillByDefault(Return(kNumberOfCacheFilesNeeded));
+
+    // These EXPECT_CALL(...).Times(testing::AnyNumber()) calls are to suppress warnings on the
+    // uninteresting methods calls.
+    EXPECT_CALL(*mockDevice, getName()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getVersionString()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getFeatureLevel()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getType()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getSupportedExtensions()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getCapabilities()).Times(testing::AnyNumber());
+    EXPECT_CALL(*mockDevice, getNumberOfCacheFilesNeeded()).Times(testing::AnyNumber());
+
+    return mockDevice;
+}
+
+std::tuple<SharedMockDevice, std::unique_ptr<MockDeviceFactory>,
+           std::shared_ptr<const ResilientDevice>>
+setup() {
+    auto mockDevice = createConfiguredMockDevice();
+
+    auto mockDeviceFactory = std::make_unique<MockDeviceFactory>();
+    EXPECT_CALL(*mockDeviceFactory, Call(true)).Times(1).WillOnce(Return(mockDevice));
+
+    auto device = ResilientDevice::create(mockDeviceFactory->AsStdFunction()).value();
+    return std::make_tuple(std::move(mockDevice), std::move(mockDeviceFactory), std::move(device));
+}
+
+constexpr auto makeError = [](nn::ErrorStatus status) {
+    return [status](const auto&... /*args*/) { return nn::error(status); };
+};
+const auto kReturnGeneralFailure = makeError(nn::ErrorStatus::GENERAL_FAILURE);
+const auto kReturnDeadObject = makeError(nn::ErrorStatus::DEAD_OBJECT);
+
+}  // namespace
+
+TEST(ResilientDeviceTest, invalidDeviceFactory) {
+    // setup call
+    const auto invalidDeviceFactory = ResilientDevice::Factory{};
+
+    // run test
+    const auto result = ResilientDevice::create(invalidDeviceFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::INVALID_ARGUMENT);
+}
+
+TEST(ResilientDeviceTest, preparedModelFactoryFailure) {
+    // setup call
+    const auto invalidDeviceFactory = kReturnGeneralFailure;
+
+    // run test
+    const auto result = ResilientDevice::create(invalidDeviceFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, cachedData) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+
+    // run test and verify results
+    EXPECT_EQ(device->getName(), kName);
+    EXPECT_EQ(device->getVersionString(), kVersionString);
+    EXPECT_EQ(device->getSupportedExtensions(), kExtensions);
+    EXPECT_EQ(device->getCapabilities(), kCapabilities);
+}
+
+TEST(ResilientDeviceTest, getFeatureLevel) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    constexpr auto kFeatureLevel = nn::Version::ANDROID_OC_MR1;
+    EXPECT_CALL(*mockDevice, getFeatureLevel()).Times(1).WillOnce(Return(kFeatureLevel));
+
+    // run test
+    const auto featureLevel = device->getFeatureLevel();
+
+    // verify results
+    EXPECT_EQ(featureLevel, kFeatureLevel);
+}
+
+TEST(ResilientDeviceTest, getType) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    constexpr auto kDeviceType = nn::DeviceType::ACCELERATOR;
+    EXPECT_CALL(*mockDevice, getType()).Times(1).WillOnce(Return(kDeviceType));
+
+    // run test
+    const auto type = device->getType();
+
+    // verify results
+    EXPECT_EQ(type, kDeviceType);
+}
+
+TEST(ResilientDeviceTest, getNumberOfCacheFilesNeeded) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, getNumberOfCacheFilesNeeded())
+            .Times(1)
+            .WillOnce(Return(kNumberOfCacheFilesNeeded));
+
+    // run test
+    const auto numberOfCacheFilesNeeded = device->getNumberOfCacheFilesNeeded();
+
+    // verify results
+    EXPECT_EQ(numberOfCacheFilesNeeded, kNumberOfCacheFilesNeeded);
+}
+
+TEST(ResilientDeviceTest, getDevice) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+
+    // run test
+    const auto result = device->getDevice();
+
+    // verify result
+    EXPECT_TRUE(result == mockDevice);
+}
+
+TEST(ResilientDeviceTest, wait) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, wait()).Times(1).WillOnce(Return(nn::GeneralResult<void>{}));
+
+    // run test
+    const auto result = device->wait();
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, waitError) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, wait()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->wait();
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, waitDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, wait()).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockDeviceFactory, Call(true)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->wait();
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientDeviceTest, waitDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, wait()).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, wait()).Times(1).WillOnce(Return(nn::GeneralResult<void>{}));
+    EXPECT_CALL(*mockDeviceFactory, Call(true)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->wait();
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, getSupportedOperations) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, getSupportedOperations(_))
+            .Times(1)
+            .WillOnce(Return(nn::GeneralResult<std::vector<bool>>{}));
+
+    // run test
+    const auto result = device->getSupportedOperations({});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, getSupportedOperationsError) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, getSupportedOperations(_)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->getSupportedOperations({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, getSupportedOperationsDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, getSupportedOperations(_)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->getSupportedOperations({});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientDeviceTest, getSupportedOperationsDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, getSupportedOperations(_)).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getSupportedOperations(_))
+            .Times(1)
+            .WillOnce(Return(nn::GeneralResult<std::vector<bool>>{}));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->getSupportedOperations({});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, prepareModel) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Return(mockPreparedModel));
+
+    // run test
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, prepareModelError) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, prepareModelDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientDeviceTest, prepareModelDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
+    EXPECT_CALL(*recoveredMockDevice, prepareModel(_, _, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Return(mockPreparedModel));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, prepareModelFromCache) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
+    EXPECT_CALL(*mockDevice, prepareModelFromCache(_, _, _, _))
+            .Times(1)
+            .WillOnce(Return(mockPreparedModel));
+
+    // run test
+    const auto result = device->prepareModelFromCache({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, prepareModelFromCacheError) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModelFromCache(_, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->prepareModelFromCache({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, prepareModelFromCacheDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModelFromCache(_, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->prepareModelFromCache({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientDeviceTest, prepareModelFromCacheDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, prepareModelFromCache(_, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
+    EXPECT_CALL(*recoveredMockDevice, prepareModelFromCache(_, _, _, _))
+            .Times(1)
+            .WillOnce(Return(mockPreparedModel));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->prepareModelFromCache({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, allocate) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto mockBuffer = std::make_shared<const nn::MockBuffer>();
+    EXPECT_CALL(*mockDevice, allocate(_, _, _, _)).Times(1).WillOnce(Return(mockBuffer));
+
+    // run test
+    const auto result = device->allocate({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, allocateError) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, allocate(_, _, _, _)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->allocate({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientDeviceTest, allocateDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, allocate(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->allocate({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientDeviceTest, allocateDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    EXPECT_CALL(*mockDevice, allocate(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const auto mockBuffer = std::make_shared<const nn::MockBuffer>();
+    EXPECT_CALL(*recoveredMockDevice, allocate(_, _, _, _)).Times(1).WillOnce(Return(mockBuffer));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->allocate({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientDeviceTest, recover) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverFailure) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*mockDeviceFactory, Call(_)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    EXPECT_FALSE(result.has_value());
+}
+
+TEST(ResilientDeviceTest, someoneElseRecovered) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+    device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetName) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const std::string kDifferentName = "Google-DifferentName";
+    const auto ret = [&kDifferentName]() -> const std::string& { return kDifferentName; };
+    EXPECT_CALL(*recoveredMockDevice, getName()).Times(1).WillOnce(ret);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetVersionString) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const std::string kDifferentVersionString = "differentversion";
+    const auto ret = [&kDifferentVersionString]() -> const std::string& {
+        return kDifferentVersionString;
+    };
+    EXPECT_CALL(*recoveredMockDevice, getVersionString()).Times(1).WillOnce(ret);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetFeatureLevel) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getFeatureLevel())
+            .Times(1)
+            .WillOnce(Return(nn::Version::ANDROID_P));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetType) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getType()).Times(1).WillOnce(Return(nn::DeviceType::GPU));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetSupportedExtensions) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const auto kDifferentExtensions =
+            std::vector<nn::Extension>{nn::Extension{.name = "", .operandTypes = {}}};
+    const auto ret = [&kDifferentExtensions]() -> const std::vector<nn::Extension>& {
+        return kDifferentExtensions;
+    };
+    EXPECT_CALL(*recoveredMockDevice, getSupportedExtensions()).Times(1).WillOnce(ret);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchGetCapabilities) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    const auto kDifferentCapabilities = nn::Capabilities{
+            .relaxedFloat32toFloat16PerformanceTensor = {.execTime = 0.5f, .powerUsage = 0.5f},
+            .operandPerformance = nn::Capabilities::OperandPerformanceTable::create({}).value()};
+    const auto ret = [&kDifferentCapabilities]() -> const nn::Capabilities& {
+        return kDifferentCapabilities;
+    };
+    EXPECT_CALL(*recoveredMockDevice, getCapabilities()).Times(1).WillOnce(ret);
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+
+    // run test
+    const auto result = device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+    EXPECT_TRUE(result.value() != mockDevice);
+    EXPECT_TRUE(result.value() != recoveredMockDevice);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchInvalidPrepareModel) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getType()).Times(1).WillOnce(Return(nn::DeviceType::GPU));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+    device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // run test
+    auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchInvalidPrepareModelFromCache) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getType()).Times(1).WillOnce(Return(nn::DeviceType::GPU));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+    device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // run test
+    auto result = device->prepareModelFromCache({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+}
+
+TEST(ResilientDeviceTest, recoverCacheMismatchInvalidAllocate) {
+    // setup call
+    const auto [mockDevice, mockDeviceFactory, device] = setup();
+    const auto recoveredMockDevice = createConfiguredMockDevice();
+    EXPECT_CALL(*recoveredMockDevice, getType()).Times(1).WillOnce(Return(nn::DeviceType::GPU));
+    EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
+    device->recover(mockDevice.get(), /*blocking=*/false);
+
+    // run test
+    auto result = device->allocate({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() != nullptr);
+}
+
+}  // namespace android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
new file mode 100644
index 0000000..6d86e10
--- /dev/null
+++ b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <nnapi/TypeUtils.h>
+#include <nnapi/Types.h>
+#include <nnapi/hal/ResilientPreparedModel.h>
+#include <utility>
+#include "MockPreparedModel.h"
+
+namespace android::hardware::neuralnetworks::utils {
+namespace {
+
+using ::testing::_;
+using ::testing::InvokeWithoutArgs;
+using ::testing::Return;
+
+using SharedMockPreparedModel = std::shared_ptr<const nn::MockPreparedModel>;
+using MockPreparedModelFactory =
+        ::testing::MockFunction<nn::GeneralResult<nn::SharedPreparedModel>()>;
+
+SharedMockPreparedModel createConfiguredMockPreparedModel() {
+    return std::make_shared<const nn::MockPreparedModel>();
+}
+
+std::tuple<std::shared_ptr<const nn::MockPreparedModel>, std::unique_ptr<MockPreparedModelFactory>,
+           std::shared_ptr<const ResilientPreparedModel>>
+setup() {
+    auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
+
+    auto mockPreparedModelFactory = std::make_unique<MockPreparedModelFactory>();
+    EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(Return(mockPreparedModel));
+
+    auto buffer = ResilientPreparedModel::create(mockPreparedModelFactory->AsStdFunction()).value();
+    return std::make_tuple(std::move(mockPreparedModel), std::move(mockPreparedModelFactory),
+                           std::move(buffer));
+}
+
+constexpr auto makeError = [](nn::ErrorStatus status) {
+    return [status](const auto&... /*args*/) { return nn::error(status); };
+};
+const auto kReturnGeneralFailure = makeError(nn::ErrorStatus::GENERAL_FAILURE);
+const auto kReturnDeadObject = makeError(nn::ErrorStatus::DEAD_OBJECT);
+
+const auto kNoExecutionError =
+        nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>{};
+const auto kNoFencedExecutionError =
+        nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>(
+                std::make_pair(nn::SyncFence::createAsSignaled(), nullptr));
+
+struct FakeResource {};
+
+}  // namespace
+
+TEST(ResilientPreparedModelTest, invalidPreparedModelFactory) {
+    // setup call
+    const auto invalidPreparedModelFactory = ResilientPreparedModel::Factory{};
+
+    // run test
+    const auto result = ResilientPreparedModel::create(invalidPreparedModelFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::INVALID_ARGUMENT);
+}
+
+TEST(ResilientPreparedModelTest, preparedModelFactoryFailure) {
+    // setup call
+    const auto invalidPreparedModelFactory = kReturnGeneralFailure;
+
+    // run test
+    const auto result = ResilientPreparedModel::create(invalidPreparedModelFactory);
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientPreparedModelTest, getPreparedModel) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+
+    // run test
+    const auto result = preparedModel->getPreparedModel();
+
+    // verify result
+    EXPECT_TRUE(result == mockPreparedModel);
+}
+
+TEST(ResilientPreparedModelTest, execute) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _))
+            .Times(1)
+            .WillOnce(Return(kNoExecutionError));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientPreparedModelTest, executeError) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientPreparedModelTest, executeDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    constexpr auto ret = [] { return nn::error(nn::ErrorStatus::GENERAL_FAILURE); };
+    EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(ret);
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientPreparedModelTest, executeDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+    const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
+    EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _))
+            .Times(1)
+            .WillOnce(Return(kNoExecutionError));
+    EXPECT_CALL(*mockPreparedModelFactory, Call())
+            .Times(1)
+            .WillOnce(Return(recoveredMockPreparedModel));
+
+    // run test
+    const auto result = preparedModel->execute({}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientPreparedModelTest, executeFenced) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Return(kNoFencedExecutionError));
+
+    // run test
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientPreparedModelTest, executeFencedError) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST(ResilientPreparedModelTest, executeFencedDeadObjectFailedRecovery) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_FALSE(result.has_value());
+    EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST(ResilientPreparedModelTest, executeFencedDeadObjectSuccessfulRecovery) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(kReturnDeadObject);
+    const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
+    EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _))
+            .Times(1)
+            .WillOnce(Return(kNoFencedExecutionError));
+    EXPECT_CALL(*mockPreparedModelFactory, Call())
+            .Times(1)
+            .WillOnce(Return(recoveredMockPreparedModel));
+
+    // run test
+    const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST(ResilientPreparedModelTest, getUnderlyingResource) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    EXPECT_CALL(*mockPreparedModel, getUnderlyingResource())
+            .Times(1)
+            .WillOnce(Return(FakeResource{}));
+
+    // run test
+    const auto resource = preparedModel->getUnderlyingResource();
+
+    // verify resource
+    const FakeResource* maybeFakeResource = std::any_cast<FakeResource>(&resource);
+    EXPECT_NE(maybeFakeResource, nullptr);
+}
+
+TEST(ResilientPreparedModelTest, recover) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
+    EXPECT_CALL(*mockPreparedModelFactory, Call())
+            .Times(1)
+            .WillOnce(Return(recoveredMockPreparedModel));
+
+    // run test
+    const auto result = preparedModel->recover(mockPreparedModel.get());
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockPreparedModel);
+}
+
+TEST(ResilientPreparedModelTest, recoverFailure) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
+    EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
+
+    // run test
+    const auto result = preparedModel->recover(mockPreparedModel.get());
+
+    // verify result
+    EXPECT_FALSE(result.has_value());
+}
+
+TEST(ResilientPreparedModelTest, someoneElseRecovered) {
+    // setup call
+    const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
+    const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
+    EXPECT_CALL(*mockPreparedModelFactory, Call())
+            .Times(1)
+            .WillOnce(Return(recoveredMockPreparedModel));
+    preparedModel->recover(mockPreparedModel.get());
+
+    // run test
+    const auto result = preparedModel->recover(mockPreparedModel.get());
+
+    // verify result
+    ASSERT_TRUE(result.has_value())
+            << "Failed with " << result.error().code << ": " << result.error().message;
+    EXPECT_TRUE(result.value() == recoveredMockPreparedModel);
+}
+
+}  // namespace android::hardware::neuralnetworks::utils