Invalidate NN interface objects on cache mismatch

Currently, if an IDevice object is a DEAD_OBJECT, the runtime attempts
to re-retrieve the handle to the rebooted IDevice service. If an update
occurs after the IDevice was originally created, the rebooted IDevice
object may have different metadata and behavior. This is problematic
because the original metadata is cached in the runtime. Further, an
application might have made decisions based on that metadata and
behavior. (Note that a driver service that is functionally the same but
has a different underlying implementation such as having more optimized
code will have different `getVersionString` metadata.) Instead, this CL
invalidates the IDevice object on cache mismatch, and always returns an
error if it is used.

Bug: 173081926
Test: mma
Change-Id: I805987361c627c32d45e1b7c7aed230376fc66ad
Merged-In: I805987361c627c32d45e1b7c7aed230376fc66ad
(cherry picked from commit 5a74c0fb0f23474a89471c49111e5ab526735392)
diff --git a/neuralnetworks/utils/common/src/ResilientDevice.cpp b/neuralnetworks/utils/common/src/ResilientDevice.cpp
index 26025a5..2f83c5c 100644
--- a/neuralnetworks/utils/common/src/ResilientDevice.cpp
+++ b/neuralnetworks/utils/common/src/ResilientDevice.cpp
@@ -16,6 +16,9 @@
 
 #include "ResilientDevice.h"
 
+#include "InvalidBuffer.h"
+#include "InvalidDevice.h"
+#include "InvalidPreparedModel.h"
 #include "ResilientBuffer.h"
 #include "ResilientPreparedModel.h"
 
@@ -107,12 +110,21 @@
     }
     auto device = std::move(maybeDevice).value();
 
-    // TODO(b/173081926): Instead of CHECKing to ensure the cache has not been changed, return an
-    // invalid/"null" IDevice object that always fails.
-    CHECK_EQ(kName, device->getName());
-    CHECK_EQ(kVersionString, device->getVersionString());
-    CHECK(kExtensions == device->getSupportedExtensions());
-    CHECK_EQ(kCapabilities, device->getCapabilities());
+    // If recovered device has different metadata than what is cached (i.e., because it was
+    // updated), mark the device as invalid and preserve the cached data.
+    auto compare = [this, &device](auto fn) REQUIRES(mMutex) {
+        return std::invoke(fn, mDevice) != std::invoke(fn, device);
+    };
+    if (compare(&IDevice::getName) || compare(&IDevice::getVersionString) ||
+        compare(&IDevice::getFeatureLevel) || compare(&IDevice::getType) ||
+        compare(&IDevice::getSupportedExtensions) || compare(&IDevice::getCapabilities)) {
+        LOG(ERROR) << "Recovered device has different metadata than what is cached. Marking "
+                      "IDevice object as invalid.";
+        device = std::make_shared<const InvalidDevice>(
+                kName, kVersionString, mDevice->getFeatureLevel(), mDevice->getType(), kExtensions,
+                kCapabilities, mDevice->getNumberOfCacheFilesNeeded());
+        mIsValid = false;
+    }
 
     mDevice = std::move(device);
     return mDevice;
@@ -199,11 +211,19 @@
     return ResilientBuffer::create(std::move(makeBuffer));
 }
 
+bool ResilientDevice::isValidInternal() const {
+    std::lock_guard hold(mMutex);
+    return mIsValid;
+}
+
 nn::GeneralResult<nn::SharedPreparedModel> ResilientDevice::prepareModelInternal(
         bool blocking, const nn::Model& model, nn::ExecutionPreference preference,
         nn::Priority priority, nn::OptionalTimePoint deadline,
         const std::vector<nn::SharedHandle>& modelCache,
         const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+    if (!isValidInternal()) {
+        return std::make_shared<const InvalidPreparedModel>();
+    }
     const auto fn = [&model, preference, priority, deadline, &modelCache, &dataCache,
                      token](const nn::IDevice& device) {
         return device.prepareModel(model, preference, priority, deadline, modelCache, dataCache,
@@ -216,6 +236,9 @@
         bool blocking, nn::OptionalTimePoint deadline,
         const std::vector<nn::SharedHandle>& modelCache,
         const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+    if (!isValidInternal()) {
+        return std::make_shared<const InvalidPreparedModel>();
+    }
     const auto fn = [deadline, &modelCache, &dataCache, token](const nn::IDevice& device) {
         return device.prepareModelFromCache(deadline, modelCache, dataCache, token);
     };
@@ -227,6 +250,9 @@
         const std::vector<nn::SharedPreparedModel>& preparedModels,
         const std::vector<nn::BufferRole>& inputRoles,
         const std::vector<nn::BufferRole>& outputRoles) const {
+    if (!isValidInternal()) {
+        return std::make_shared<const InvalidBuffer>();
+    }
     const auto fn = [&desc, &preparedModels, &inputRoles, &outputRoles](const nn::IDevice& device) {
         return device.allocate(desc, preparedModels, inputRoles, outputRoles);
     };