Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2019 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #define LOG_TAG "neuralnetworks_hidl_hal_test" |
| 18 | |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 19 | #include <android-base/logging.h> |
| 20 | #include <android/hidl/memory/1.0/IMemory.h> |
| 21 | #include <ftw.h> |
| 22 | #include <gtest/gtest.h> |
| 23 | #include <hidlmemory/mapping.h> |
| 24 | #include <unistd.h> |
| 25 | |
| 26 | #include <cstdio> |
| 27 | #include <cstdlib> |
| 28 | #include <random> |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 29 | |
| 30 | #include "Callbacks.h" |
| 31 | #include "GeneratedTestHarness.h" |
| 32 | #include "TestHarness.h" |
| 33 | #include "Utils.h" |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 34 | #include "VtsHalNeuralnetworks.h" |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 35 | |
| 36 | namespace android { |
| 37 | namespace hardware { |
| 38 | namespace neuralnetworks { |
| 39 | namespace V1_2 { |
| 40 | namespace vts { |
| 41 | namespace functional { |
| 42 | |
| 43 | using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback; |
| 44 | using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback; |
| 45 | using ::android::nn::allocateSharedMemory; |
| 46 | using ::test_helper::MixedTypedExample; |
| 47 | |
| 48 | namespace { |
| 49 | |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 50 | // In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of mobilenet. |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 51 | #include "examples/mobilenet_224_gender_basic_fixed.example.cpp" |
| 52 | #include "vts_models/mobilenet_224_gender_basic_fixed.model.cpp" |
| 53 | |
| 54 | // Prevent the compiler from complaining about an otherwise unused function. |
| 55 | [[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape; |
| 56 | [[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape; |
| 57 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 58 | enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY }; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 59 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 60 | // Creates cache handles based on provided file groups. |
| 61 | // The outer vector corresponds to handles and the inner vector is for fds held by each handle. |
| 62 | void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, |
| 63 | const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) { |
| 64 | handles->resize(fileGroups.size()); |
| 65 | for (uint32_t i = 0; i < fileGroups.size(); i++) { |
| 66 | std::vector<int> fds; |
| 67 | for (const auto& file : fileGroups[i]) { |
| 68 | int fd; |
| 69 | if (mode[i] == AccessMode::READ_ONLY) { |
| 70 | fd = open(file.c_str(), O_RDONLY); |
| 71 | } else if (mode[i] == AccessMode::WRITE_ONLY) { |
| 72 | fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR); |
| 73 | } else if (mode[i] == AccessMode::READ_WRITE) { |
| 74 | fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR); |
| 75 | } else { |
| 76 | FAIL(); |
| 77 | } |
| 78 | ASSERT_GE(fd, 0); |
| 79 | fds.push_back(fd); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 80 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 81 | native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0); |
| 82 | ASSERT_NE(cacheNativeHandle, nullptr); |
| 83 | std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]); |
| 84 | (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 85 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 86 | } |
| 87 | |
| 88 | void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode, |
| 89 | hidl_vec<hidl_handle>* handles) { |
| 90 | createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 91 | } |
| 92 | |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 93 | // Create a chain of broadcast operations. The second operand is always constant tensor [1]. |
| 94 | // For simplicity, activation scalar is shared. The second operand is not shared |
| 95 | // in the model to let driver maintain a non-trivial size of constant data and the corresponding |
| 96 | // data locations in cache. |
| 97 | // |
| 98 | // --------- activation -------- |
| 99 | // ↓ ↓ ↓ ↓ |
| 100 | // E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output |
| 101 | // ↑ ↑ ↑ ↑ |
| 102 | // [1] [1] [1] [1] |
| 103 | // |
| 104 | Model createLargeTestModel(OperationType op, uint32_t len) { |
| 105 | // Model operations and operands. |
| 106 | std::vector<Operation> operations(len); |
| 107 | std::vector<Operand> operands(len * 2 + 2); |
| 108 | |
| 109 | // The constant buffer pool. This contains the activation scalar, followed by the |
| 110 | // per-operation constant operands. |
| 111 | std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(float)); |
| 112 | |
| 113 | // The activation scalar, value = 0. |
| 114 | operands[0] = { |
| 115 | .type = OperandType::INT32, |
| 116 | .dimensions = {}, |
| 117 | .numberOfConsumers = len, |
| 118 | .scale = 0.0f, |
| 119 | .zeroPoint = 0, |
| 120 | .lifetime = OperandLifeTime::CONSTANT_COPY, |
| 121 | .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)}, |
| 122 | }; |
| 123 | memset(operandValues.data(), 0, sizeof(int32_t)); |
| 124 | |
| 125 | const float floatBufferValue = 1.0f; |
| 126 | for (uint32_t i = 0; i < len; i++) { |
| 127 | const uint32_t firstInputIndex = i * 2 + 1; |
| 128 | const uint32_t secondInputIndex = firstInputIndex + 1; |
| 129 | const uint32_t outputIndex = secondInputIndex + 1; |
| 130 | |
| 131 | // The first operation input. |
| 132 | operands[firstInputIndex] = { |
| 133 | .type = OperandType::TENSOR_FLOAT32, |
| 134 | .dimensions = {1}, |
| 135 | .numberOfConsumers = 1, |
| 136 | .scale = 0.0f, |
| 137 | .zeroPoint = 0, |
| 138 | .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT |
| 139 | : OperandLifeTime::TEMPORARY_VARIABLE), |
| 140 | .location = {}, |
| 141 | }; |
| 142 | |
| 143 | // The second operation input, value = 1. |
| 144 | operands[secondInputIndex] = { |
| 145 | .type = OperandType::TENSOR_FLOAT32, |
| 146 | .dimensions = {1}, |
| 147 | .numberOfConsumers = 1, |
| 148 | .scale = 0.0f, |
| 149 | .zeroPoint = 0, |
| 150 | .lifetime = OperandLifeTime::CONSTANT_COPY, |
| 151 | .location = {.poolIndex = 0, |
| 152 | .offset = static_cast<uint32_t>(i * sizeof(float) + sizeof(int32_t)), |
| 153 | .length = sizeof(float)}, |
| 154 | }; |
| 155 | memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(float), &floatBufferValue, |
| 156 | sizeof(float)); |
| 157 | |
| 158 | // The operation. All operations share the same activation scalar. |
| 159 | // The output operand is created as an input in the next iteration of the loop, in the case |
| 160 | // of all but the last member of the chain; and after the loop as a model output, in the |
| 161 | // case of the last member of the chain. |
| 162 | operations[i] = { |
| 163 | .type = op, |
| 164 | .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0}, |
| 165 | .outputs = {outputIndex}, |
| 166 | }; |
| 167 | } |
| 168 | |
| 169 | // The model output. |
| 170 | operands.back() = { |
| 171 | .type = OperandType::TENSOR_FLOAT32, |
| 172 | .dimensions = {1}, |
| 173 | .numberOfConsumers = 0, |
| 174 | .scale = 0.0f, |
| 175 | .zeroPoint = 0, |
| 176 | .lifetime = OperandLifeTime::MODEL_OUTPUT, |
| 177 | .location = {}, |
| 178 | }; |
| 179 | |
| 180 | const std::vector<uint32_t> inputIndexes = {1}; |
| 181 | const std::vector<uint32_t> outputIndexes = {len * 2 + 1}; |
| 182 | const std::vector<hidl_memory> pools = {}; |
| 183 | |
| 184 | return { |
| 185 | .operands = operands, |
| 186 | .operations = operations, |
| 187 | .inputIndexes = inputIndexes, |
| 188 | .outputIndexes = outputIndexes, |
| 189 | .operandValues = operandValues, |
| 190 | .pools = pools, |
| 191 | }; |
| 192 | } |
| 193 | |
| 194 | // MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h. |
| 195 | // This function assumes the operation is always ADD. |
| 196 | std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) { |
| 197 | float outputValue = 1.0f + static_cast<float>(len); |
| 198 | return {{.operands = { |
| 199 | // Input |
| 200 | {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}}, |
| 201 | // Output |
| 202 | {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}}; |
| 203 | }; |
| 204 | |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 205 | } // namespace |
| 206 | |
| 207 | // Tag for the compilation caching tests. |
| 208 | class CompilationCachingTest : public NeuralnetworksHidlTest { |
| 209 | protected: |
| 210 | void SetUp() override { |
| 211 | NeuralnetworksHidlTest::SetUp(); |
Hervé Guihot | ac7ac52 | 2019-02-12 16:22:44 -0800 | [diff] [blame] | 212 | ASSERT_NE(device.get(), nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 213 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 214 | // Create cache directory. The cache directory and a temporary cache file is always created |
| 215 | // to test the behavior of prepareModelFromCache, even when caching is not supported. |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 216 | char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX"; |
| 217 | char* cacheDir = mkdtemp(cacheDirTemp); |
| 218 | ASSERT_NE(cacheDir, nullptr); |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 219 | mCacheDir = cacheDir; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 220 | mCacheDir.push_back('/'); |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 221 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 222 | Return<void> ret = device->getNumberOfCacheFilesNeeded( |
| 223 | [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) { |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 224 | EXPECT_EQ(ErrorStatus::NONE, status); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 225 | mNumModelCache = numModelCache; |
| 226 | mNumDataCache = numDataCache; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 227 | }); |
| 228 | EXPECT_TRUE(ret.isOk()); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 229 | mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0; |
| 230 | |
| 231 | // Create empty cache files. |
| 232 | mTmpCache = mCacheDir + "tmp"; |
| 233 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 234 | mModelCache.push_back({mCacheDir + "model" + std::to_string(i)}); |
| 235 | } |
| 236 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 237 | mDataCache.push_back({mCacheDir + "data" + std::to_string(i)}); |
| 238 | } |
| 239 | // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files. |
| 240 | hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle; |
| 241 | createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle); |
| 242 | createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle); |
| 243 | createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle); |
| 244 | |
| 245 | if (!mIsCachingSupported) { |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 246 | LOG(INFO) << "NN VTS: Early termination of test because vendor service does not " |
| 247 | "support compilation caching."; |
| 248 | std::cout << "[ ] Early termination of test because vendor service does not " |
| 249 | "support compilation caching." |
| 250 | << std::endl; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 251 | } |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 252 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 253 | |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 254 | void TearDown() override { |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 255 | // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes. |
| 256 | if (!::testing::Test::HasFailure()) { |
| 257 | // Recursively remove the cache directory specified by mCacheDir. |
| 258 | auto callback = [](const char* entry, const struct stat*, int, struct FTW*) { |
| 259 | return remove(entry); |
| 260 | }; |
| 261 | nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS); |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 262 | } |
| 263 | NeuralnetworksHidlTest::TearDown(); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 264 | } |
| 265 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 266 | void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache, |
| 267 | const hidl_vec<hidl_handle>& dataCache, bool* supported, |
| 268 | sp<IPreparedModel>* preparedModel = nullptr) { |
| 269 | if (preparedModel != nullptr) *preparedModel = nullptr; |
| 270 | |
| 271 | // See if service can handle model. |
| 272 | bool fullySupportsModel = false; |
| 273 | Return<void> supportedCall = device->getSupportedOperations_1_2( |
| 274 | model, |
| 275 | [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) { |
| 276 | ASSERT_EQ(ErrorStatus::NONE, status); |
| 277 | ASSERT_EQ(supported.size(), model.operations.size()); |
| 278 | fullySupportsModel = std::all_of(supported.begin(), supported.end(), |
| 279 | [](bool valid) { return valid; }); |
| 280 | }); |
| 281 | ASSERT_TRUE(supportedCall.isOk()); |
| 282 | *supported = fullySupportsModel; |
| 283 | if (!fullySupportsModel) return; |
| 284 | |
| 285 | // Launch prepare model. |
| 286 | sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); |
| 287 | ASSERT_NE(nullptr, preparedModelCallback.get()); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 288 | hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 289 | Return<ErrorStatus> prepareLaunchStatus = |
| 290 | device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache, |
| 291 | dataCache, cacheToken, preparedModelCallback); |
| 292 | ASSERT_TRUE(prepareLaunchStatus.isOk()); |
| 293 | ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE); |
| 294 | |
| 295 | // Retrieve prepared model. |
| 296 | preparedModelCallback->wait(); |
| 297 | ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE); |
| 298 | if (preparedModel != nullptr) { |
| 299 | *preparedModel = |
| 300 | V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel()) |
| 301 | .withDefault(nullptr); |
| 302 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 303 | } |
| 304 | |
| 305 | bool checkEarlyTermination(ErrorStatus status) { |
| 306 | if (status == ErrorStatus::GENERAL_FAILURE) { |
| 307 | LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot " |
| 308 | "save the prepared model that it does not support."; |
| 309 | std::cout << "[ ] Early termination of test because vendor service cannot " |
| 310 | "save the prepared model that it does not support." |
| 311 | << std::endl; |
| 312 | return true; |
| 313 | } |
| 314 | return false; |
| 315 | } |
| 316 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 317 | bool checkEarlyTermination(bool supported) { |
| 318 | if (!supported) { |
| 319 | LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot " |
| 320 | "prepare model that it does not support."; |
| 321 | std::cout << "[ ] Early termination of test because vendor service cannot " |
| 322 | "prepare model that it does not support." |
| 323 | << std::endl; |
| 324 | return true; |
| 325 | } |
| 326 | return false; |
| 327 | } |
| 328 | |
| 329 | void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache, |
| 330 | const hidl_vec<hidl_handle>& dataCache, |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 331 | sp<IPreparedModel>* preparedModel, ErrorStatus* status) { |
| 332 | // Launch prepare model from cache. |
| 333 | sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback(); |
| 334 | ASSERT_NE(nullptr, preparedModelCallback.get()); |
| 335 | hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 336 | Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache( |
| 337 | modelCache, dataCache, cacheToken, preparedModelCallback); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 338 | ASSERT_TRUE(prepareLaunchStatus.isOk()); |
| 339 | if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) { |
| 340 | *preparedModel = nullptr; |
| 341 | *status = static_cast<ErrorStatus>(prepareLaunchStatus); |
| 342 | return; |
| 343 | } |
| 344 | |
| 345 | // Retrieve prepared model. |
| 346 | preparedModelCallback->wait(); |
| 347 | *status = preparedModelCallback->getStatus(); |
| 348 | *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel()) |
| 349 | .withDefault(nullptr); |
| 350 | } |
| 351 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 352 | // Absolute path to the temporary cache directory. |
Xusong Wang | 6824cc1 | 2019-02-12 18:00:37 -0800 | [diff] [blame] | 353 | std::string mCacheDir; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 354 | |
| 355 | // Groups of file paths for model and data cache in the tmp cache directory, initialized with |
| 356 | // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles |
| 357 | // and the inner vector is for fds held by each handle. |
| 358 | std::vector<std::vector<std::string>> mModelCache; |
| 359 | std::vector<std::vector<std::string>> mDataCache; |
| 360 | |
| 361 | // A separate temporary file path in the tmp cache directory. |
| 362 | std::string mTmpCache; |
| 363 | |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 364 | uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {}; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 365 | uint32_t mNumModelCache; |
| 366 | uint32_t mNumDataCache; |
| 367 | uint32_t mIsCachingSupported; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 368 | }; |
| 369 | |
| 370 | TEST_F(CompilationCachingTest, CacheSavingAndRetrieval) { |
| 371 | // Create test HIDL model and compile. |
| 372 | Model testModel = createTestModel(); |
| 373 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 374 | |
| 375 | // Save the compilation to cache. |
| 376 | { |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 377 | bool supported; |
| 378 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 379 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 380 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 381 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 382 | if (checkEarlyTermination(supported)) return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 383 | } |
| 384 | |
| 385 | // Retrieve preparedModel from cache. |
| 386 | { |
| 387 | preparedModel = nullptr; |
| 388 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 389 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 390 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 391 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 392 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 393 | if (!mIsCachingSupported) { |
| 394 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 395 | ASSERT_EQ(preparedModel, nullptr); |
| 396 | return; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 397 | } else if (checkEarlyTermination(status)) { |
| 398 | ASSERT_EQ(preparedModel, nullptr); |
| 399 | return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 400 | } else { |
| 401 | ASSERT_EQ(status, ErrorStatus::NONE); |
| 402 | ASSERT_NE(preparedModel, nullptr); |
| 403 | } |
| 404 | } |
| 405 | |
| 406 | // Execute and verify results. |
| 407 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(), |
| 408 | testModel.relaxComputationFloat32toFloat16, |
| 409 | /*testDynamicOutputShape=*/false); |
| 410 | } |
| 411 | |
| 412 | TEST_F(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) { |
| 413 | // Create test HIDL model and compile. |
| 414 | Model testModel = createTestModel(); |
| 415 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 416 | |
| 417 | // Save the compilation to cache. |
| 418 | { |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 419 | bool supported; |
| 420 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 421 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 422 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 423 | uint8_t dummyBytes[] = {0, 0}; |
| 424 | // Write a dummy integer to the cache. |
| 425 | // The driver should be able to handle non-empty cache and non-zero fd offset. |
| 426 | for (uint32_t i = 0; i < modelCache.size(); i++) { |
| 427 | ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes, |
| 428 | sizeof(dummyBytes)), |
| 429 | sizeof(dummyBytes)); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 430 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 431 | for (uint32_t i = 0; i < dataCache.size(); i++) { |
| 432 | ASSERT_EQ( |
| 433 | write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)), |
| 434 | sizeof(dummyBytes)); |
| 435 | } |
| 436 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 437 | if (checkEarlyTermination(supported)) return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 438 | } |
| 439 | |
| 440 | // Retrieve preparedModel from cache. |
| 441 | { |
| 442 | preparedModel = nullptr; |
| 443 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 444 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 445 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 446 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 447 | uint8_t dummyByte = 0; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 448 | // Advance the offset of each handle by one byte. |
| 449 | // The driver should be able to handle non-zero fd offset. |
| 450 | for (uint32_t i = 0; i < modelCache.size(); i++) { |
| 451 | ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0); |
| 452 | } |
| 453 | for (uint32_t i = 0; i < dataCache.size(); i++) { |
| 454 | ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0); |
| 455 | } |
| 456 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 457 | if (!mIsCachingSupported) { |
| 458 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 459 | ASSERT_EQ(preparedModel, nullptr); |
| 460 | return; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 461 | } else if (checkEarlyTermination(status)) { |
| 462 | ASSERT_EQ(preparedModel, nullptr); |
| 463 | return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 464 | } else { |
| 465 | ASSERT_EQ(status, ErrorStatus::NONE); |
| 466 | ASSERT_NE(preparedModel, nullptr); |
| 467 | } |
| 468 | } |
| 469 | |
| 470 | // Execute and verify results. |
| 471 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(), |
| 472 | testModel.relaxComputationFloat32toFloat16, |
| 473 | /*testDynamicOutputShape=*/false); |
| 474 | } |
| 475 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 476 | TEST_F(CompilationCachingTest, SaveToCacheInvalidNumCache) { |
| 477 | // Create test HIDL model and compile. |
| 478 | Model testModel = createTestModel(); |
| 479 | |
| 480 | // Test with number of model cache files greater than mNumModelCache. |
| 481 | { |
| 482 | bool supported; |
| 483 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 484 | // Pass an additional cache file for model cache. |
| 485 | mModelCache.push_back({mTmpCache}); |
| 486 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 487 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 488 | mModelCache.pop_back(); |
| 489 | sp<IPreparedModel> preparedModel = nullptr; |
| 490 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 491 | if (checkEarlyTermination(supported)) return; |
| 492 | ASSERT_NE(preparedModel, nullptr); |
| 493 | // Execute and verify results. |
| 494 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 495 | get_examples(), |
| 496 | testModel.relaxComputationFloat32toFloat16, |
| 497 | /*testDynamicOutputShape=*/false); |
| 498 | // Check if prepareModelFromCache fails. |
| 499 | preparedModel = nullptr; |
| 500 | ErrorStatus status; |
| 501 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 502 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 503 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 504 | } |
| 505 | ASSERT_EQ(preparedModel, nullptr); |
| 506 | } |
| 507 | |
| 508 | // Test with number of model cache files smaller than mNumModelCache. |
| 509 | if (mModelCache.size() > 0) { |
| 510 | bool supported; |
| 511 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 512 | // Pop out the last cache file. |
| 513 | auto tmp = mModelCache.back(); |
| 514 | mModelCache.pop_back(); |
| 515 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 516 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 517 | mModelCache.push_back(tmp); |
| 518 | sp<IPreparedModel> preparedModel = nullptr; |
| 519 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 520 | if (checkEarlyTermination(supported)) return; |
| 521 | ASSERT_NE(preparedModel, nullptr); |
| 522 | // Execute and verify results. |
| 523 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 524 | get_examples(), |
| 525 | testModel.relaxComputationFloat32toFloat16, |
| 526 | /*testDynamicOutputShape=*/false); |
| 527 | // Check if prepareModelFromCache fails. |
| 528 | preparedModel = nullptr; |
| 529 | ErrorStatus status; |
| 530 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 531 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 532 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 533 | } |
| 534 | ASSERT_EQ(preparedModel, nullptr); |
| 535 | } |
| 536 | |
| 537 | // Test with number of data cache files greater than mNumDataCache. |
| 538 | { |
| 539 | bool supported; |
| 540 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 541 | // Pass an additional cache file for data cache. |
| 542 | mDataCache.push_back({mTmpCache}); |
| 543 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 544 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 545 | mDataCache.pop_back(); |
| 546 | sp<IPreparedModel> preparedModel = nullptr; |
| 547 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 548 | if (checkEarlyTermination(supported)) return; |
| 549 | ASSERT_NE(preparedModel, nullptr); |
| 550 | // Execute and verify results. |
| 551 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 552 | get_examples(), |
| 553 | testModel.relaxComputationFloat32toFloat16, |
| 554 | /*testDynamicOutputShape=*/false); |
| 555 | // Check if prepareModelFromCache fails. |
| 556 | preparedModel = nullptr; |
| 557 | ErrorStatus status; |
| 558 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 559 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 560 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 561 | } |
| 562 | ASSERT_EQ(preparedModel, nullptr); |
| 563 | } |
| 564 | |
| 565 | // Test with number of data cache files smaller than mNumDataCache. |
| 566 | if (mDataCache.size() > 0) { |
| 567 | bool supported; |
| 568 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 569 | // Pop out the last cache file. |
| 570 | auto tmp = mDataCache.back(); |
| 571 | mDataCache.pop_back(); |
| 572 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 573 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 574 | mDataCache.push_back(tmp); |
| 575 | sp<IPreparedModel> preparedModel = nullptr; |
| 576 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 577 | if (checkEarlyTermination(supported)) return; |
| 578 | ASSERT_NE(preparedModel, nullptr); |
| 579 | // Execute and verify results. |
| 580 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 581 | get_examples(), |
| 582 | testModel.relaxComputationFloat32toFloat16, |
| 583 | /*testDynamicOutputShape=*/false); |
| 584 | // Check if prepareModelFromCache fails. |
| 585 | preparedModel = nullptr; |
| 586 | ErrorStatus status; |
| 587 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 588 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 589 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 590 | } |
| 591 | ASSERT_EQ(preparedModel, nullptr); |
| 592 | } |
| 593 | } |
| 594 | |
| 595 | TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) { |
| 596 | // Create test HIDL model and compile. |
| 597 | Model testModel = createTestModel(); |
| 598 | |
| 599 | // Save the compilation to cache. |
| 600 | { |
| 601 | bool supported; |
| 602 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 603 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 604 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 605 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 606 | if (checkEarlyTermination(supported)) return; |
| 607 | } |
| 608 | |
| 609 | // Test with number of model cache files greater than mNumModelCache. |
| 610 | { |
| 611 | sp<IPreparedModel> preparedModel = nullptr; |
| 612 | ErrorStatus status; |
| 613 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 614 | mModelCache.push_back({mTmpCache}); |
| 615 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 616 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 617 | mModelCache.pop_back(); |
| 618 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 619 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 620 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 621 | } |
| 622 | ASSERT_EQ(preparedModel, nullptr); |
| 623 | } |
| 624 | |
| 625 | // Test with number of model cache files smaller than mNumModelCache. |
| 626 | if (mModelCache.size() > 0) { |
| 627 | sp<IPreparedModel> preparedModel = nullptr; |
| 628 | ErrorStatus status; |
| 629 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 630 | auto tmp = mModelCache.back(); |
| 631 | mModelCache.pop_back(); |
| 632 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 633 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 634 | mModelCache.push_back(tmp); |
| 635 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 636 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 637 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 638 | } |
| 639 | ASSERT_EQ(preparedModel, nullptr); |
| 640 | } |
| 641 | |
| 642 | // Test with number of data cache files greater than mNumDataCache. |
| 643 | { |
| 644 | sp<IPreparedModel> preparedModel = nullptr; |
| 645 | ErrorStatus status; |
| 646 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 647 | mDataCache.push_back({mTmpCache}); |
| 648 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 649 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 650 | mDataCache.pop_back(); |
| 651 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 652 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 653 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 654 | } |
| 655 | ASSERT_EQ(preparedModel, nullptr); |
| 656 | } |
| 657 | |
| 658 | // Test with number of data cache files smaller than mNumDataCache. |
| 659 | if (mDataCache.size() > 0) { |
| 660 | sp<IPreparedModel> preparedModel = nullptr; |
| 661 | ErrorStatus status; |
| 662 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 663 | auto tmp = mDataCache.back(); |
| 664 | mDataCache.pop_back(); |
| 665 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 666 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 667 | mDataCache.push_back(tmp); |
| 668 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 669 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 670 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 671 | } |
| 672 | ASSERT_EQ(preparedModel, nullptr); |
| 673 | } |
| 674 | } |
| 675 | |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 676 | TEST_F(CompilationCachingTest, SaveToCacheInvalidNumFd) { |
| 677 | // Create test HIDL model and compile. |
| 678 | Model testModel = createTestModel(); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 679 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 680 | // Go through each handle in model cache, test with NumFd greater than 1. |
| 681 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 682 | bool supported; |
| 683 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 684 | // Pass an invalid number of fds for handle i. |
| 685 | mModelCache[i].push_back(mTmpCache); |
| 686 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 687 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 688 | mModelCache[i].pop_back(); |
| 689 | sp<IPreparedModel> preparedModel = nullptr; |
| 690 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 691 | if (checkEarlyTermination(supported)) return; |
| 692 | ASSERT_NE(preparedModel, nullptr); |
| 693 | // Execute and verify results. |
| 694 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 695 | get_examples(), |
| 696 | testModel.relaxComputationFloat32toFloat16, |
| 697 | /*testDynamicOutputShape=*/false); |
| 698 | // Check if prepareModelFromCache fails. |
| 699 | preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 700 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 701 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 702 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 703 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 704 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 705 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 706 | } |
| 707 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 708 | // Go through each handle in model cache, test with NumFd equal to 0. |
| 709 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 710 | bool supported; |
| 711 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 712 | // Pass an invalid number of fds for handle i. |
| 713 | auto tmp = mModelCache[i].back(); |
| 714 | mModelCache[i].pop_back(); |
| 715 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 716 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 717 | mModelCache[i].push_back(tmp); |
| 718 | sp<IPreparedModel> preparedModel = nullptr; |
| 719 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 720 | if (checkEarlyTermination(supported)) return; |
| 721 | ASSERT_NE(preparedModel, nullptr); |
| 722 | // Execute and verify results. |
| 723 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 724 | get_examples(), |
| 725 | testModel.relaxComputationFloat32toFloat16, |
| 726 | /*testDynamicOutputShape=*/false); |
| 727 | // Check if prepareModelFromCache fails. |
| 728 | preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 729 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 730 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 731 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 732 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 733 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 734 | ASSERT_EQ(preparedModel, nullptr); |
| 735 | } |
| 736 | |
| 737 | // Go through each handle in data cache, test with NumFd greater than 1. |
| 738 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 739 | bool supported; |
| 740 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 741 | // Pass an invalid number of fds for handle i. |
| 742 | mDataCache[i].push_back(mTmpCache); |
| 743 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 744 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 745 | mDataCache[i].pop_back(); |
| 746 | sp<IPreparedModel> preparedModel = nullptr; |
| 747 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 748 | if (checkEarlyTermination(supported)) return; |
| 749 | ASSERT_NE(preparedModel, nullptr); |
| 750 | // Execute and verify results. |
| 751 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 752 | get_examples(), |
| 753 | testModel.relaxComputationFloat32toFloat16, |
| 754 | /*testDynamicOutputShape=*/false); |
| 755 | // Check if prepareModelFromCache fails. |
| 756 | preparedModel = nullptr; |
| 757 | ErrorStatus status; |
| 758 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 759 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 760 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 761 | } |
| 762 | ASSERT_EQ(preparedModel, nullptr); |
| 763 | } |
| 764 | |
| 765 | // Go through each handle in data cache, test with NumFd equal to 0. |
| 766 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 767 | bool supported; |
| 768 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 769 | // Pass an invalid number of fds for handle i. |
| 770 | auto tmp = mDataCache[i].back(); |
| 771 | mDataCache[i].pop_back(); |
| 772 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 773 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 774 | mDataCache[i].push_back(tmp); |
| 775 | sp<IPreparedModel> preparedModel = nullptr; |
| 776 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 777 | if (checkEarlyTermination(supported)) return; |
| 778 | ASSERT_NE(preparedModel, nullptr); |
| 779 | // Execute and verify results. |
| 780 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 781 | get_examples(), |
| 782 | testModel.relaxComputationFloat32toFloat16, |
| 783 | /*testDynamicOutputShape=*/false); |
| 784 | // Check if prepareModelFromCache fails. |
| 785 | preparedModel = nullptr; |
| 786 | ErrorStatus status; |
| 787 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 788 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 789 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 790 | } |
| 791 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 792 | } |
| 793 | } |
| 794 | |
| 795 | TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) { |
| 796 | // Create test HIDL model and compile. |
| 797 | Model testModel = createTestModel(); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 798 | |
| 799 | // Save the compilation to cache. |
| 800 | { |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 801 | bool supported; |
| 802 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 803 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 804 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 805 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 806 | if (checkEarlyTermination(supported)) return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 807 | } |
| 808 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 809 | // Go through each handle in model cache, test with NumFd greater than 1. |
| 810 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 811 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 812 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 813 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 814 | mModelCache[i].push_back(mTmpCache); |
| 815 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 816 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 817 | mModelCache[i].pop_back(); |
| 818 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 819 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 820 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 821 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 822 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 823 | } |
| 824 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 825 | // Go through each handle in model cache, test with NumFd equal to 0. |
| 826 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 827 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 828 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 829 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 830 | auto tmp = mModelCache[i].back(); |
| 831 | mModelCache[i].pop_back(); |
| 832 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 833 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 834 | mModelCache[i].push_back(tmp); |
| 835 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 836 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 837 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 838 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 839 | ASSERT_EQ(preparedModel, nullptr); |
| 840 | } |
| 841 | |
| 842 | // Go through each handle in data cache, test with NumFd greater than 1. |
| 843 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 844 | sp<IPreparedModel> preparedModel = nullptr; |
| 845 | ErrorStatus status; |
| 846 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 847 | mDataCache[i].push_back(mTmpCache); |
| 848 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 849 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 850 | mDataCache[i].pop_back(); |
| 851 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 852 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 853 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 854 | } |
| 855 | ASSERT_EQ(preparedModel, nullptr); |
| 856 | } |
| 857 | |
| 858 | // Go through each handle in data cache, test with NumFd equal to 0. |
| 859 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 860 | sp<IPreparedModel> preparedModel = nullptr; |
| 861 | ErrorStatus status; |
| 862 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 863 | auto tmp = mDataCache[i].back(); |
| 864 | mDataCache[i].pop_back(); |
| 865 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 866 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 867 | mDataCache[i].push_back(tmp); |
| 868 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 869 | if (status != ErrorStatus::GENERAL_FAILURE) { |
| 870 | ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT); |
| 871 | } |
| 872 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 873 | } |
| 874 | } |
| 875 | |
| 876 | TEST_F(CompilationCachingTest, SaveToCacheInvalidAccessMode) { |
| 877 | // Create test HIDL model and compile. |
| 878 | Model testModel = createTestModel(); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 879 | std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE); |
| 880 | std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 881 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 882 | // Go through each handle in model cache, test with invalid access mode. |
| 883 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 884 | bool supported; |
| 885 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 886 | modelCacheMode[i] = AccessMode::READ_ONLY; |
| 887 | createCacheHandles(mModelCache, modelCacheMode, &modelCache); |
| 888 | createCacheHandles(mDataCache, dataCacheMode, &dataCache); |
| 889 | modelCacheMode[i] = AccessMode::READ_WRITE; |
| 890 | sp<IPreparedModel> preparedModel = nullptr; |
| 891 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 892 | if (checkEarlyTermination(supported)) return; |
| 893 | ASSERT_NE(preparedModel, nullptr); |
| 894 | // Execute and verify results. |
| 895 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 896 | get_examples(), |
| 897 | testModel.relaxComputationFloat32toFloat16, |
| 898 | /*testDynamicOutputShape=*/false); |
| 899 | // Check if prepareModelFromCache fails. |
| 900 | preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 901 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 902 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 903 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 904 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 905 | } |
| 906 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 907 | } |
| 908 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 909 | // Go through each handle in data cache, test with invalid access mode. |
| 910 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 911 | bool supported; |
| 912 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 913 | dataCacheMode[i] = AccessMode::READ_ONLY; |
| 914 | createCacheHandles(mModelCache, modelCacheMode, &modelCache); |
| 915 | createCacheHandles(mDataCache, dataCacheMode, &dataCache); |
| 916 | dataCacheMode[i] = AccessMode::READ_WRITE; |
| 917 | sp<IPreparedModel> preparedModel = nullptr; |
| 918 | saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel); |
| 919 | if (checkEarlyTermination(supported)) return; |
| 920 | ASSERT_NE(preparedModel, nullptr); |
| 921 | // Execute and verify results. |
| 922 | generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, |
| 923 | get_examples(), |
| 924 | testModel.relaxComputationFloat32toFloat16, |
| 925 | /*testDynamicOutputShape=*/false); |
| 926 | // Check if prepareModelFromCache fails. |
| 927 | preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 928 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 929 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 930 | if (status != ErrorStatus::INVALID_ARGUMENT) { |
| 931 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 932 | } |
| 933 | ASSERT_EQ(preparedModel, nullptr); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 934 | } |
| 935 | } |
| 936 | |
| 937 | TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) { |
| 938 | // Create test HIDL model and compile. |
| 939 | Model testModel = createTestModel(); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 940 | std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE); |
| 941 | std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 942 | |
| 943 | // Save the compilation to cache. |
| 944 | { |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 945 | bool supported; |
| 946 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 947 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 948 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 949 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 950 | if (checkEarlyTermination(supported)) return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 951 | } |
| 952 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 953 | // Go through each handle in model cache, test with invalid access mode. |
| 954 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 955 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 956 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 957 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 958 | modelCacheMode[i] = AccessMode::WRITE_ONLY; |
| 959 | createCacheHandles(mModelCache, modelCacheMode, &modelCache); |
| 960 | createCacheHandles(mDataCache, dataCacheMode, &dataCache); |
| 961 | modelCacheMode[i] = AccessMode::READ_WRITE; |
| 962 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 963 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 964 | ASSERT_EQ(preparedModel, nullptr); |
| 965 | } |
| 966 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 967 | // Go through each handle in data cache, test with invalid access mode. |
| 968 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 969 | sp<IPreparedModel> preparedModel = nullptr; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 970 | ErrorStatus status; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 971 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 972 | dataCacheMode[i] = AccessMode::WRITE_ONLY; |
| 973 | createCacheHandles(mModelCache, modelCacheMode, &modelCache); |
| 974 | createCacheHandles(mDataCache, dataCacheMode, &dataCache); |
| 975 | dataCacheMode[i] = AccessMode::READ_WRITE; |
| 976 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 977 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 978 | ASSERT_EQ(preparedModel, nullptr); |
| 979 | } |
| 980 | } |
| 981 | |
Xusong Wang | 7cc0ccc | 2019-04-23 14:28:17 -0700 | [diff] [blame] | 982 | // Copy file contents between file groups. |
| 983 | // The outer vector corresponds to handles and the inner vector is for fds held by each handle. |
| 984 | // The outer vector sizes must match and the inner vectors must have size = 1. |
| 985 | static void copyCacheFiles(const std::vector<std::vector<std::string>>& from, |
| 986 | const std::vector<std::vector<std::string>>& to) { |
| 987 | constexpr size_t kBufferSize = 1000000; |
| 988 | uint8_t buffer[kBufferSize]; |
| 989 | |
| 990 | ASSERT_EQ(from.size(), to.size()); |
| 991 | for (uint32_t i = 0; i < from.size(); i++) { |
| 992 | ASSERT_EQ(from[i].size(), 1u); |
| 993 | ASSERT_EQ(to[i].size(), 1u); |
| 994 | int fromFd = open(from[i][0].c_str(), O_RDONLY); |
| 995 | int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR); |
| 996 | ASSERT_GE(fromFd, 0); |
| 997 | ASSERT_GE(toFd, 0); |
| 998 | |
| 999 | ssize_t readBytes; |
| 1000 | while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) { |
| 1001 | ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes); |
| 1002 | } |
| 1003 | ASSERT_GE(readBytes, 0); |
| 1004 | |
| 1005 | close(fromFd); |
| 1006 | close(toFd); |
| 1007 | } |
| 1008 | } |
| 1009 | |
| 1010 | // Number of operations in the large test model. |
| 1011 | constexpr uint32_t kLargeModelSize = 100; |
| 1012 | constexpr uint32_t kNumIterationsTOCTOU = 100; |
| 1013 | |
| 1014 | TEST_F(CompilationCachingTest, SaveToCache_TOCTOU) { |
| 1015 | if (!mIsCachingSupported) return; |
| 1016 | |
| 1017 | // Save the testModelMul compilation to cache. |
| 1018 | Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize); |
| 1019 | auto modelCacheMul = mModelCache; |
| 1020 | for (auto& cache : modelCacheMul) { |
| 1021 | cache[0].append("_mul"); |
| 1022 | } |
| 1023 | { |
| 1024 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1025 | createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache); |
| 1026 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1027 | bool supported; |
| 1028 | saveModelToCache(testModelMul, modelCache, dataCache, &supported); |
| 1029 | if (checkEarlyTermination(supported)) return; |
| 1030 | } |
| 1031 | |
| 1032 | // Use a different token for testModelAdd. |
| 1033 | mToken[0]++; |
| 1034 | |
| 1035 | // This test is probabilistic, so we run it multiple times. |
| 1036 | Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize); |
| 1037 | for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) { |
| 1038 | // Save the testModelAdd compilation to cache. |
| 1039 | { |
| 1040 | bool supported; |
| 1041 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1042 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1043 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1044 | |
| 1045 | // Spawn a thread to copy the cache content concurrently while saving to cache. |
| 1046 | std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache)); |
| 1047 | saveModelToCache(testModelAdd, modelCache, dataCache, &supported); |
| 1048 | thread.join(); |
| 1049 | if (checkEarlyTermination(supported)) return; |
| 1050 | } |
| 1051 | |
| 1052 | // Retrieve preparedModel from cache. |
| 1053 | { |
| 1054 | sp<IPreparedModel> preparedModel = nullptr; |
| 1055 | ErrorStatus status; |
| 1056 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1057 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1058 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1059 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 1060 | |
| 1061 | // The preparation may fail or succeed, but must not crash. If the preparation succeeds, |
| 1062 | // the prepared model must be executed with the correct result and not crash. |
| 1063 | if (status != ErrorStatus::NONE) { |
| 1064 | ASSERT_EQ(preparedModel, nullptr); |
| 1065 | } else { |
| 1066 | ASSERT_NE(preparedModel, nullptr); |
| 1067 | generated_tests::EvaluatePreparedModel( |
| 1068 | preparedModel, [](int) { return false; }, |
| 1069 | getLargeModelExamples(kLargeModelSize), |
| 1070 | testModelAdd.relaxComputationFloat32toFloat16, |
| 1071 | /*testDynamicOutputShape=*/false); |
| 1072 | } |
| 1073 | } |
| 1074 | } |
| 1075 | } |
| 1076 | |
| 1077 | TEST_F(CompilationCachingTest, PrepareFromCache_TOCTOU) { |
| 1078 | if (!mIsCachingSupported) return; |
| 1079 | |
| 1080 | // Save the testModelMul compilation to cache. |
| 1081 | Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize); |
| 1082 | auto modelCacheMul = mModelCache; |
| 1083 | for (auto& cache : modelCacheMul) { |
| 1084 | cache[0].append("_mul"); |
| 1085 | } |
| 1086 | { |
| 1087 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1088 | createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache); |
| 1089 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1090 | bool supported; |
| 1091 | saveModelToCache(testModelMul, modelCache, dataCache, &supported); |
| 1092 | if (checkEarlyTermination(supported)) return; |
| 1093 | } |
| 1094 | |
| 1095 | // Use a different token for testModelAdd. |
| 1096 | mToken[0]++; |
| 1097 | |
| 1098 | // This test is probabilistic, so we run it multiple times. |
| 1099 | Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize); |
| 1100 | for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) { |
| 1101 | // Save the testModelAdd compilation to cache. |
| 1102 | { |
| 1103 | bool supported; |
| 1104 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1105 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1106 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1107 | saveModelToCache(testModelAdd, modelCache, dataCache, &supported); |
| 1108 | if (checkEarlyTermination(supported)) return; |
| 1109 | } |
| 1110 | |
| 1111 | // Retrieve preparedModel from cache. |
| 1112 | { |
| 1113 | sp<IPreparedModel> preparedModel = nullptr; |
| 1114 | ErrorStatus status; |
| 1115 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1116 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1117 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1118 | |
| 1119 | // Spawn a thread to copy the cache content concurrently while preparing from cache. |
| 1120 | std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache)); |
| 1121 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 1122 | thread.join(); |
| 1123 | |
| 1124 | // The preparation may fail or succeed, but must not crash. If the preparation succeeds, |
| 1125 | // the prepared model must be executed with the correct result and not crash. |
| 1126 | if (status != ErrorStatus::NONE) { |
| 1127 | ASSERT_EQ(preparedModel, nullptr); |
| 1128 | } else { |
| 1129 | ASSERT_NE(preparedModel, nullptr); |
| 1130 | generated_tests::EvaluatePreparedModel( |
| 1131 | preparedModel, [](int) { return false; }, |
| 1132 | getLargeModelExamples(kLargeModelSize), |
| 1133 | testModelAdd.relaxComputationFloat32toFloat16, |
| 1134 | /*testDynamicOutputShape=*/false); |
| 1135 | } |
| 1136 | } |
| 1137 | } |
| 1138 | } |
| 1139 | |
| 1140 | TEST_F(CompilationCachingTest, ReplaceSecuritySensitiveCache) { |
| 1141 | if (!mIsCachingSupported) return; |
| 1142 | |
| 1143 | // Save the testModelMul compilation to cache. |
| 1144 | Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize); |
| 1145 | auto modelCacheMul = mModelCache; |
| 1146 | for (auto& cache : modelCacheMul) { |
| 1147 | cache[0].append("_mul"); |
| 1148 | } |
| 1149 | { |
| 1150 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1151 | createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache); |
| 1152 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1153 | bool supported; |
| 1154 | saveModelToCache(testModelMul, modelCache, dataCache, &supported); |
| 1155 | if (checkEarlyTermination(supported)) return; |
| 1156 | } |
| 1157 | |
| 1158 | // Use a different token for testModelAdd. |
| 1159 | mToken[0]++; |
| 1160 | |
| 1161 | // Save the testModelAdd compilation to cache. |
| 1162 | Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize); |
| 1163 | { |
| 1164 | bool supported; |
| 1165 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1166 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1167 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1168 | saveModelToCache(testModelAdd, modelCache, dataCache, &supported); |
| 1169 | if (checkEarlyTermination(supported)) return; |
| 1170 | } |
| 1171 | |
| 1172 | // Replace the model cache of testModelAdd with testModelMul. |
| 1173 | copyCacheFiles(modelCacheMul, mModelCache); |
| 1174 | |
| 1175 | // Retrieve the preparedModel from cache, expect failure. |
| 1176 | { |
| 1177 | sp<IPreparedModel> preparedModel = nullptr; |
| 1178 | ErrorStatus status; |
| 1179 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1180 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1181 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1182 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
| 1183 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 1184 | ASSERT_EQ(preparedModel, nullptr); |
| 1185 | } |
| 1186 | } |
| 1187 | |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1188 | class CompilationCachingSecurityTest : public CompilationCachingTest, |
| 1189 | public ::testing::WithParamInterface<uint32_t> { |
| 1190 | protected: |
| 1191 | void SetUp() { |
| 1192 | CompilationCachingTest::SetUp(); |
| 1193 | generator.seed(kSeed); |
| 1194 | } |
| 1195 | |
| 1196 | // Get a random integer within a closed range [lower, upper]. |
| 1197 | template <typename T> |
| 1198 | T getRandomInt(T lower, T upper) { |
| 1199 | std::uniform_int_distribution<T> dis(lower, upper); |
| 1200 | return dis(generator); |
| 1201 | } |
| 1202 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1203 | // Randomly flip one single bit of the cache entry. |
| 1204 | void flipOneBitOfCache(const std::string& filename, bool* skip) { |
| 1205 | FILE* pFile = fopen(filename.c_str(), "r+"); |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 1206 | ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0); |
| 1207 | long int fileSize = ftell(pFile); |
| 1208 | if (fileSize == 0) { |
| 1209 | fclose(pFile); |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1210 | *skip = true; |
| 1211 | return; |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 1212 | } |
| 1213 | ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0); |
| 1214 | int readByte = fgetc(pFile); |
| 1215 | ASSERT_NE(readByte, EOF); |
| 1216 | ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0); |
| 1217 | ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF); |
| 1218 | fclose(pFile); |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1219 | *skip = false; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1220 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1221 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1222 | // Randomly append bytes to the cache entry. |
| 1223 | void appendBytesToCache(const std::string& filename, bool* skip) { |
| 1224 | FILE* pFile = fopen(filename.c_str(), "a"); |
| 1225 | uint32_t appendLength = getRandomInt(1, 256); |
| 1226 | for (uint32_t i = 0; i < appendLength; i++) { |
| 1227 | ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF); |
| 1228 | } |
| 1229 | fclose(pFile); |
| 1230 | *skip = false; |
| 1231 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1232 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1233 | enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH }; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1234 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1235 | // Test if the driver behaves as expected when given corrupted cache or token. |
| 1236 | // The modifier will be invoked after save to cache but before prepare from cache. |
| 1237 | // The modifier accepts one pointer argument "skip" as the returning value, indicating |
| 1238 | // whether the test should be skipped or not. |
| 1239 | void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) { |
| 1240 | Model testModel = createTestModel(); |
| 1241 | |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 1242 | // Save the compilation to cache. |
| 1243 | { |
| 1244 | bool supported; |
| 1245 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1246 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1247 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1248 | saveModelToCache(testModel, modelCache, dataCache, &supported); |
| 1249 | if (checkEarlyTermination(supported)) return; |
| 1250 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1251 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1252 | bool skip = false; |
| 1253 | modifier(&skip); |
| 1254 | if (skip) return; |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1255 | |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1256 | // Retrieve preparedModel from cache. |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 1257 | { |
| 1258 | sp<IPreparedModel> preparedModel = nullptr; |
| 1259 | ErrorStatus status; |
| 1260 | hidl_vec<hidl_handle> modelCache, dataCache; |
| 1261 | createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache); |
| 1262 | createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache); |
| 1263 | prepareModelFromCache(modelCache, dataCache, &preparedModel, &status); |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1264 | |
| 1265 | switch (expected) { |
| 1266 | case ExpectedResult::GENERAL_FAILURE: |
| 1267 | ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE); |
| 1268 | ASSERT_EQ(preparedModel, nullptr); |
| 1269 | break; |
| 1270 | case ExpectedResult::NOT_CRASH: |
| 1271 | ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE); |
| 1272 | break; |
| 1273 | default: |
| 1274 | FAIL(); |
| 1275 | } |
Xusong Wang | ed0822b | 2019-02-25 16:58:58 -0800 | [diff] [blame] | 1276 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1277 | } |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1278 | |
| 1279 | const uint32_t kSeed = GetParam(); |
| 1280 | std::mt19937 generator; |
| 1281 | }; |
| 1282 | |
| 1283 | TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) { |
| 1284 | if (!mIsCachingSupported) return; |
| 1285 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 1286 | testCorruptedCache(ExpectedResult::GENERAL_FAILURE, |
| 1287 | [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); }); |
| 1288 | } |
| 1289 | } |
| 1290 | |
| 1291 | TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) { |
| 1292 | if (!mIsCachingSupported) return; |
| 1293 | for (uint32_t i = 0; i < mNumModelCache; i++) { |
| 1294 | testCorruptedCache(ExpectedResult::GENERAL_FAILURE, |
| 1295 | [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); }); |
| 1296 | } |
| 1297 | } |
| 1298 | |
| 1299 | TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) { |
| 1300 | if (!mIsCachingSupported) return; |
| 1301 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 1302 | testCorruptedCache(ExpectedResult::NOT_CRASH, |
| 1303 | [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); }); |
| 1304 | } |
| 1305 | } |
| 1306 | |
| 1307 | TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) { |
| 1308 | if (!mIsCachingSupported) return; |
| 1309 | for (uint32_t i = 0; i < mNumDataCache; i++) { |
| 1310 | testCorruptedCache(ExpectedResult::NOT_CRASH, |
| 1311 | [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); }); |
| 1312 | } |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1313 | } |
| 1314 | |
| 1315 | TEST_P(CompilationCachingSecurityTest, WrongToken) { |
| 1316 | if (!mIsCachingSupported) return; |
Xusong Wang | e371f6f | 2019-04-23 14:51:50 -0700 | [diff] [blame^] | 1317 | testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) { |
| 1318 | // Randomly flip one single bit in mToken. |
| 1319 | uint32_t ind = |
| 1320 | getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1); |
| 1321 | mToken[ind] ^= (1U << getRandomInt(0, 7)); |
| 1322 | *skip = false; |
| 1323 | }); |
Xusong Wang | 96e68dc | 2019-01-18 17:28:26 -0800 | [diff] [blame] | 1324 | } |
| 1325 | |
| 1326 | INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest, |
| 1327 | ::testing::Range(0U, 10U)); |
| 1328 | |
| 1329 | } // namespace functional |
| 1330 | } // namespace vts |
| 1331 | } // namespace V1_2 |
| 1332 | } // namespace neuralnetworks |
| 1333 | } // namespace hardware |
| 1334 | } // namespace android |