blob: bf915603bc7aa3a3157aa7df21fb2b7b1c9b3b59 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Xusong Wang96e68dc2019-01-18 17:28:26 -080029
30#include "Callbacks.h"
31#include "GeneratedTestHarness.h"
32#include "TestHarness.h"
33#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070034#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080035
36namespace android {
37namespace hardware {
38namespace neuralnetworks {
39namespace V1_2 {
40namespace vts {
41namespace functional {
42
43using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
44using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
45using ::android::nn::allocateSharedMemory;
46using ::test_helper::MixedTypedExample;
47
48namespace {
49
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070050// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of mobilenet.
Xusong Wang96e68dc2019-01-18 17:28:26 -080051#include "examples/mobilenet_224_gender_basic_fixed.example.cpp"
52#include "vts_models/mobilenet_224_gender_basic_fixed.model.cpp"
53
54// Prevent the compiler from complaining about an otherwise unused function.
55[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
56[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
57
Xusong Wanged0822b2019-02-25 16:58:58 -080058enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080059
Xusong Wanged0822b2019-02-25 16:58:58 -080060// Creates cache handles based on provided file groups.
61// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
62void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
63 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
64 handles->resize(fileGroups.size());
65 for (uint32_t i = 0; i < fileGroups.size(); i++) {
66 std::vector<int> fds;
67 for (const auto& file : fileGroups[i]) {
68 int fd;
69 if (mode[i] == AccessMode::READ_ONLY) {
70 fd = open(file.c_str(), O_RDONLY);
71 } else if (mode[i] == AccessMode::WRITE_ONLY) {
72 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
73 } else if (mode[i] == AccessMode::READ_WRITE) {
74 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
75 } else {
76 FAIL();
77 }
78 ASSERT_GE(fd, 0);
79 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -080080 }
Xusong Wanged0822b2019-02-25 16:58:58 -080081 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
82 ASSERT_NE(cacheNativeHandle, nullptr);
83 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
84 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -080085 }
Xusong Wanged0822b2019-02-25 16:58:58 -080086}
87
88void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
89 hidl_vec<hidl_handle>* handles) {
90 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -080091}
92
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070093// Create a chain of broadcast operations. The second operand is always constant tensor [1].
94// For simplicity, activation scalar is shared. The second operand is not shared
95// in the model to let driver maintain a non-trivial size of constant data and the corresponding
96// data locations in cache.
97//
98// --------- activation --------
99// ↓ ↓ ↓ ↓
100// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
101// ↑ ↑ ↑ ↑
102// [1] [1] [1] [1]
103//
104Model createLargeTestModel(OperationType op, uint32_t len) {
105 // Model operations and operands.
106 std::vector<Operation> operations(len);
107 std::vector<Operand> operands(len * 2 + 2);
108
109 // The constant buffer pool. This contains the activation scalar, followed by the
110 // per-operation constant operands.
111 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(float));
112
113 // The activation scalar, value = 0.
114 operands[0] = {
115 .type = OperandType::INT32,
116 .dimensions = {},
117 .numberOfConsumers = len,
118 .scale = 0.0f,
119 .zeroPoint = 0,
120 .lifetime = OperandLifeTime::CONSTANT_COPY,
121 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
122 };
123 memset(operandValues.data(), 0, sizeof(int32_t));
124
125 const float floatBufferValue = 1.0f;
126 for (uint32_t i = 0; i < len; i++) {
127 const uint32_t firstInputIndex = i * 2 + 1;
128 const uint32_t secondInputIndex = firstInputIndex + 1;
129 const uint32_t outputIndex = secondInputIndex + 1;
130
131 // The first operation input.
132 operands[firstInputIndex] = {
133 .type = OperandType::TENSOR_FLOAT32,
134 .dimensions = {1},
135 .numberOfConsumers = 1,
136 .scale = 0.0f,
137 .zeroPoint = 0,
138 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
139 : OperandLifeTime::TEMPORARY_VARIABLE),
140 .location = {},
141 };
142
143 // The second operation input, value = 1.
144 operands[secondInputIndex] = {
145 .type = OperandType::TENSOR_FLOAT32,
146 .dimensions = {1},
147 .numberOfConsumers = 1,
148 .scale = 0.0f,
149 .zeroPoint = 0,
150 .lifetime = OperandLifeTime::CONSTANT_COPY,
151 .location = {.poolIndex = 0,
152 .offset = static_cast<uint32_t>(i * sizeof(float) + sizeof(int32_t)),
153 .length = sizeof(float)},
154 };
155 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(float), &floatBufferValue,
156 sizeof(float));
157
158 // The operation. All operations share the same activation scalar.
159 // The output operand is created as an input in the next iteration of the loop, in the case
160 // of all but the last member of the chain; and after the loop as a model output, in the
161 // case of the last member of the chain.
162 operations[i] = {
163 .type = op,
164 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
165 .outputs = {outputIndex},
166 };
167 }
168
169 // The model output.
170 operands.back() = {
171 .type = OperandType::TENSOR_FLOAT32,
172 .dimensions = {1},
173 .numberOfConsumers = 0,
174 .scale = 0.0f,
175 .zeroPoint = 0,
176 .lifetime = OperandLifeTime::MODEL_OUTPUT,
177 .location = {},
178 };
179
180 const std::vector<uint32_t> inputIndexes = {1};
181 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
182 const std::vector<hidl_memory> pools = {};
183
184 return {
185 .operands = operands,
186 .operations = operations,
187 .inputIndexes = inputIndexes,
188 .outputIndexes = outputIndexes,
189 .operandValues = operandValues,
190 .pools = pools,
191 };
192}
193
194// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
195// This function assumes the operation is always ADD.
196std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
197 float outputValue = 1.0f + static_cast<float>(len);
198 return {{.operands = {
199 // Input
200 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
201 // Output
202 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
203};
204
Xusong Wang96e68dc2019-01-18 17:28:26 -0800205} // namespace
206
207// Tag for the compilation caching tests.
208class CompilationCachingTest : public NeuralnetworksHidlTest {
209 protected:
210 void SetUp() override {
211 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800212 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800213
Xusong Wanged0822b2019-02-25 16:58:58 -0800214 // Create cache directory. The cache directory and a temporary cache file is always created
215 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800216 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
217 char* cacheDir = mkdtemp(cacheDirTemp);
218 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800219 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800220 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800221
Xusong Wanged0822b2019-02-25 16:58:58 -0800222 Return<void> ret = device->getNumberOfCacheFilesNeeded(
223 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800224 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800225 mNumModelCache = numModelCache;
226 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800227 });
228 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800229 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
230
231 // Create empty cache files.
232 mTmpCache = mCacheDir + "tmp";
233 for (uint32_t i = 0; i < mNumModelCache; i++) {
234 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
235 }
236 for (uint32_t i = 0; i < mNumDataCache; i++) {
237 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
238 }
239 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
240 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
241 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
242 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
243 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
244
245 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800246 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
247 "support compilation caching.";
248 std::cout << "[ ] Early termination of test because vendor service does not "
249 "support compilation caching."
250 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800251 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800252 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800253
Xusong Wang6824cc12019-02-12 18:00:37 -0800254 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700255 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
256 if (!::testing::Test::HasFailure()) {
257 // Recursively remove the cache directory specified by mCacheDir.
258 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
259 return remove(entry);
260 };
261 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800262 }
263 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800264 }
265
Xusong Wang4f71afc2019-04-26 15:33:38 -0700266 // See if the service can handle the model.
267 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800268 bool fullySupportsModel = false;
269 Return<void> supportedCall = device->getSupportedOperations_1_2(
270 model,
271 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
272 ASSERT_EQ(ErrorStatus::NONE, status);
273 ASSERT_EQ(supported.size(), model.operations.size());
274 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
275 [](bool valid) { return valid; });
276 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700277 EXPECT_TRUE(supportedCall.isOk());
278 return fullySupportsModel;
279 }
280
281 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
282 const hidl_vec<hidl_handle>& dataCache,
283 sp<IPreparedModel>* preparedModel = nullptr) {
284 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800285
286 // Launch prepare model.
287 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
288 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800289 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800290 Return<ErrorStatus> prepareLaunchStatus =
291 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
292 dataCache, cacheToken, preparedModelCallback);
293 ASSERT_TRUE(prepareLaunchStatus.isOk());
294 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
295
296 // Retrieve prepared model.
297 preparedModelCallback->wait();
298 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
299 if (preparedModel != nullptr) {
300 *preparedModel =
301 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
302 .withDefault(nullptr);
303 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800304 }
305
306 bool checkEarlyTermination(ErrorStatus status) {
307 if (status == ErrorStatus::GENERAL_FAILURE) {
308 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
309 "save the prepared model that it does not support.";
310 std::cout << "[ ] Early termination of test because vendor service cannot "
311 "save the prepared model that it does not support."
312 << std::endl;
313 return true;
314 }
315 return false;
316 }
317
Xusong Wang4f71afc2019-04-26 15:33:38 -0700318 bool checkEarlyTermination(const V1_2::Model& model) {
319 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800320 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
321 "prepare model that it does not support.";
322 std::cout << "[ ] Early termination of test because vendor service cannot "
323 "prepare model that it does not support."
324 << std::endl;
325 return true;
326 }
327 return false;
328 }
329
330 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
331 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800332 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
333 // Launch prepare model from cache.
334 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
335 ASSERT_NE(nullptr, preparedModelCallback.get());
336 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800337 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
338 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800339 ASSERT_TRUE(prepareLaunchStatus.isOk());
340 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
341 *preparedModel = nullptr;
342 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
343 return;
344 }
345
346 // Retrieve prepared model.
347 preparedModelCallback->wait();
348 *status = preparedModelCallback->getStatus();
349 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
350 .withDefault(nullptr);
351 }
352
Xusong Wanged0822b2019-02-25 16:58:58 -0800353 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800354 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800355
356 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
357 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
358 // and the inner vector is for fds held by each handle.
359 std::vector<std::vector<std::string>> mModelCache;
360 std::vector<std::vector<std::string>> mDataCache;
361
362 // A separate temporary file path in the tmp cache directory.
363 std::string mTmpCache;
364
Xusong Wang96e68dc2019-01-18 17:28:26 -0800365 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800366 uint32_t mNumModelCache;
367 uint32_t mNumDataCache;
368 uint32_t mIsCachingSupported;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800369};
370
371TEST_F(CompilationCachingTest, CacheSavingAndRetrieval) {
372 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700373 const Model testModel = createTestModel();
374 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800375 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800376
377 // Save the compilation to cache.
378 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800379 hidl_vec<hidl_handle> modelCache, dataCache;
380 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
381 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700382 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800383 }
384
385 // Retrieve preparedModel from cache.
386 {
387 preparedModel = nullptr;
388 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800389 hidl_vec<hidl_handle> modelCache, dataCache;
390 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
391 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
392 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800393 if (!mIsCachingSupported) {
394 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
395 ASSERT_EQ(preparedModel, nullptr);
396 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800397 } else if (checkEarlyTermination(status)) {
398 ASSERT_EQ(preparedModel, nullptr);
399 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800400 } else {
401 ASSERT_EQ(status, ErrorStatus::NONE);
402 ASSERT_NE(preparedModel, nullptr);
403 }
404 }
405
406 // Execute and verify results.
407 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
408 testModel.relaxComputationFloat32toFloat16,
409 /*testDynamicOutputShape=*/false);
410}
411
412TEST_F(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
413 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700414 const Model testModel = createTestModel();
415 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800416 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800417
418 // Save the compilation to cache.
419 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800420 hidl_vec<hidl_handle> modelCache, dataCache;
421 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
422 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
423 uint8_t dummyBytes[] = {0, 0};
424 // Write a dummy integer to the cache.
425 // The driver should be able to handle non-empty cache and non-zero fd offset.
426 for (uint32_t i = 0; i < modelCache.size(); i++) {
427 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
428 sizeof(dummyBytes)),
429 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800430 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800431 for (uint32_t i = 0; i < dataCache.size(); i++) {
432 ASSERT_EQ(
433 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
434 sizeof(dummyBytes));
435 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700436 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800437 }
438
439 // Retrieve preparedModel from cache.
440 {
441 preparedModel = nullptr;
442 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800443 hidl_vec<hidl_handle> modelCache, dataCache;
444 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
445 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800446 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800447 // Advance the offset of each handle by one byte.
448 // The driver should be able to handle non-zero fd offset.
449 for (uint32_t i = 0; i < modelCache.size(); i++) {
450 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
451 }
452 for (uint32_t i = 0; i < dataCache.size(); i++) {
453 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
454 }
455 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800456 if (!mIsCachingSupported) {
457 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
458 ASSERT_EQ(preparedModel, nullptr);
459 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800460 } else if (checkEarlyTermination(status)) {
461 ASSERT_EQ(preparedModel, nullptr);
462 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800463 } else {
464 ASSERT_EQ(status, ErrorStatus::NONE);
465 ASSERT_NE(preparedModel, nullptr);
466 }
467 }
468
469 // Execute and verify results.
470 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
471 testModel.relaxComputationFloat32toFloat16,
472 /*testDynamicOutputShape=*/false);
473}
474
Xusong Wanged0822b2019-02-25 16:58:58 -0800475TEST_F(CompilationCachingTest, SaveToCacheInvalidNumCache) {
476 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700477 const Model testModel = createTestModel();
478 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800479
480 // Test with number of model cache files greater than mNumModelCache.
481 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800482 hidl_vec<hidl_handle> modelCache, dataCache;
483 // Pass an additional cache file for model cache.
484 mModelCache.push_back({mTmpCache});
485 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
486 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
487 mModelCache.pop_back();
488 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700489 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800490 ASSERT_NE(preparedModel, nullptr);
491 // Execute and verify results.
492 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
493 get_examples(),
494 testModel.relaxComputationFloat32toFloat16,
495 /*testDynamicOutputShape=*/false);
496 // Check if prepareModelFromCache fails.
497 preparedModel = nullptr;
498 ErrorStatus status;
499 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
500 if (status != ErrorStatus::INVALID_ARGUMENT) {
501 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
502 }
503 ASSERT_EQ(preparedModel, nullptr);
504 }
505
506 // Test with number of model cache files smaller than mNumModelCache.
507 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800508 hidl_vec<hidl_handle> modelCache, dataCache;
509 // Pop out the last cache file.
510 auto tmp = mModelCache.back();
511 mModelCache.pop_back();
512 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
513 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
514 mModelCache.push_back(tmp);
515 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700516 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800517 ASSERT_NE(preparedModel, nullptr);
518 // Execute and verify results.
519 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
520 get_examples(),
521 testModel.relaxComputationFloat32toFloat16,
522 /*testDynamicOutputShape=*/false);
523 // Check if prepareModelFromCache fails.
524 preparedModel = nullptr;
525 ErrorStatus status;
526 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
527 if (status != ErrorStatus::INVALID_ARGUMENT) {
528 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
529 }
530 ASSERT_EQ(preparedModel, nullptr);
531 }
532
533 // Test with number of data cache files greater than mNumDataCache.
534 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800535 hidl_vec<hidl_handle> modelCache, dataCache;
536 // Pass an additional cache file for data cache.
537 mDataCache.push_back({mTmpCache});
538 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
539 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
540 mDataCache.pop_back();
541 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700542 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800543 ASSERT_NE(preparedModel, nullptr);
544 // Execute and verify results.
545 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
546 get_examples(),
547 testModel.relaxComputationFloat32toFloat16,
548 /*testDynamicOutputShape=*/false);
549 // Check if prepareModelFromCache fails.
550 preparedModel = nullptr;
551 ErrorStatus status;
552 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
553 if (status != ErrorStatus::INVALID_ARGUMENT) {
554 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
555 }
556 ASSERT_EQ(preparedModel, nullptr);
557 }
558
559 // Test with number of data cache files smaller than mNumDataCache.
560 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800561 hidl_vec<hidl_handle> modelCache, dataCache;
562 // Pop out the last cache file.
563 auto tmp = mDataCache.back();
564 mDataCache.pop_back();
565 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
566 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
567 mDataCache.push_back(tmp);
568 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700569 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800570 ASSERT_NE(preparedModel, nullptr);
571 // Execute and verify results.
572 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
573 get_examples(),
574 testModel.relaxComputationFloat32toFloat16,
575 /*testDynamicOutputShape=*/false);
576 // Check if prepareModelFromCache fails.
577 preparedModel = nullptr;
578 ErrorStatus status;
579 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
580 if (status != ErrorStatus::INVALID_ARGUMENT) {
581 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
582 }
583 ASSERT_EQ(preparedModel, nullptr);
584 }
585}
586
587TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
588 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700589 const Model testModel = createTestModel();
590 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800591
592 // Save the compilation to cache.
593 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800594 hidl_vec<hidl_handle> modelCache, dataCache;
595 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
596 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700597 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800598 }
599
600 // Test with number of model cache files greater than mNumModelCache.
601 {
602 sp<IPreparedModel> preparedModel = nullptr;
603 ErrorStatus status;
604 hidl_vec<hidl_handle> modelCache, dataCache;
605 mModelCache.push_back({mTmpCache});
606 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
607 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
608 mModelCache.pop_back();
609 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
610 if (status != ErrorStatus::GENERAL_FAILURE) {
611 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
612 }
613 ASSERT_EQ(preparedModel, nullptr);
614 }
615
616 // Test with number of model cache files smaller than mNumModelCache.
617 if (mModelCache.size() > 0) {
618 sp<IPreparedModel> preparedModel = nullptr;
619 ErrorStatus status;
620 hidl_vec<hidl_handle> modelCache, dataCache;
621 auto tmp = mModelCache.back();
622 mModelCache.pop_back();
623 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
624 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
625 mModelCache.push_back(tmp);
626 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
627 if (status != ErrorStatus::GENERAL_FAILURE) {
628 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
629 }
630 ASSERT_EQ(preparedModel, nullptr);
631 }
632
633 // Test with number of data cache files greater than mNumDataCache.
634 {
635 sp<IPreparedModel> preparedModel = nullptr;
636 ErrorStatus status;
637 hidl_vec<hidl_handle> modelCache, dataCache;
638 mDataCache.push_back({mTmpCache});
639 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
640 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
641 mDataCache.pop_back();
642 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
643 if (status != ErrorStatus::GENERAL_FAILURE) {
644 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
645 }
646 ASSERT_EQ(preparedModel, nullptr);
647 }
648
649 // Test with number of data cache files smaller than mNumDataCache.
650 if (mDataCache.size() > 0) {
651 sp<IPreparedModel> preparedModel = nullptr;
652 ErrorStatus status;
653 hidl_vec<hidl_handle> modelCache, dataCache;
654 auto tmp = mDataCache.back();
655 mDataCache.pop_back();
656 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
657 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
658 mDataCache.push_back(tmp);
659 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
660 if (status != ErrorStatus::GENERAL_FAILURE) {
661 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
662 }
663 ASSERT_EQ(preparedModel, nullptr);
664 }
665}
666
Xusong Wang96e68dc2019-01-18 17:28:26 -0800667TEST_F(CompilationCachingTest, SaveToCacheInvalidNumFd) {
668 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700669 const Model testModel = createTestModel();
670 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800671
Xusong Wanged0822b2019-02-25 16:58:58 -0800672 // Go through each handle in model cache, test with NumFd greater than 1.
673 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800674 hidl_vec<hidl_handle> modelCache, dataCache;
675 // Pass an invalid number of fds for handle i.
676 mModelCache[i].push_back(mTmpCache);
677 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
678 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
679 mModelCache[i].pop_back();
680 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700681 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800682 ASSERT_NE(preparedModel, nullptr);
683 // Execute and verify results.
684 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
685 get_examples(),
686 testModel.relaxComputationFloat32toFloat16,
687 /*testDynamicOutputShape=*/false);
688 // Check if prepareModelFromCache fails.
689 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800690 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800691 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
692 if (status != ErrorStatus::INVALID_ARGUMENT) {
693 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800694 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800695 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800696 }
697
Xusong Wanged0822b2019-02-25 16:58:58 -0800698 // Go through each handle in model cache, test with NumFd equal to 0.
699 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800700 hidl_vec<hidl_handle> modelCache, dataCache;
701 // Pass an invalid number of fds for handle i.
702 auto tmp = mModelCache[i].back();
703 mModelCache[i].pop_back();
704 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
705 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
706 mModelCache[i].push_back(tmp);
707 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700708 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800709 ASSERT_NE(preparedModel, nullptr);
710 // Execute and verify results.
711 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
712 get_examples(),
713 testModel.relaxComputationFloat32toFloat16,
714 /*testDynamicOutputShape=*/false);
715 // Check if prepareModelFromCache fails.
716 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800717 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800718 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
719 if (status != ErrorStatus::INVALID_ARGUMENT) {
720 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800721 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800722 ASSERT_EQ(preparedModel, nullptr);
723 }
724
725 // Go through each handle in data cache, test with NumFd greater than 1.
726 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800727 hidl_vec<hidl_handle> modelCache, dataCache;
728 // Pass an invalid number of fds for handle i.
729 mDataCache[i].push_back(mTmpCache);
730 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
731 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
732 mDataCache[i].pop_back();
733 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700734 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800735 ASSERT_NE(preparedModel, nullptr);
736 // Execute and verify results.
737 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
738 get_examples(),
739 testModel.relaxComputationFloat32toFloat16,
740 /*testDynamicOutputShape=*/false);
741 // Check if prepareModelFromCache fails.
742 preparedModel = nullptr;
743 ErrorStatus status;
744 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
745 if (status != ErrorStatus::INVALID_ARGUMENT) {
746 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
747 }
748 ASSERT_EQ(preparedModel, nullptr);
749 }
750
751 // Go through each handle in data cache, test with NumFd equal to 0.
752 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800753 hidl_vec<hidl_handle> modelCache, dataCache;
754 // Pass an invalid number of fds for handle i.
755 auto tmp = mDataCache[i].back();
756 mDataCache[i].pop_back();
757 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
758 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
759 mDataCache[i].push_back(tmp);
760 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700761 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800762 ASSERT_NE(preparedModel, nullptr);
763 // Execute and verify results.
764 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
765 get_examples(),
766 testModel.relaxComputationFloat32toFloat16,
767 /*testDynamicOutputShape=*/false);
768 // Check if prepareModelFromCache fails.
769 preparedModel = nullptr;
770 ErrorStatus status;
771 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
772 if (status != ErrorStatus::INVALID_ARGUMENT) {
773 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
774 }
775 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800776 }
777}
778
779TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
780 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700781 const Model testModel = createTestModel();
782 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800783
784 // Save the compilation to cache.
785 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800786 hidl_vec<hidl_handle> modelCache, dataCache;
787 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
788 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700789 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800790 }
791
Xusong Wanged0822b2019-02-25 16:58:58 -0800792 // Go through each handle in model cache, test with NumFd greater than 1.
793 for (uint32_t i = 0; i < mNumModelCache; i++) {
794 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800795 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800796 hidl_vec<hidl_handle> modelCache, dataCache;
797 mModelCache[i].push_back(mTmpCache);
798 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
799 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
800 mModelCache[i].pop_back();
801 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800802 if (status != ErrorStatus::GENERAL_FAILURE) {
803 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800804 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800805 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800806 }
807
Xusong Wanged0822b2019-02-25 16:58:58 -0800808 // Go through each handle in model cache, test with NumFd equal to 0.
809 for (uint32_t i = 0; i < mNumModelCache; i++) {
810 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800811 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800812 hidl_vec<hidl_handle> modelCache, dataCache;
813 auto tmp = mModelCache[i].back();
814 mModelCache[i].pop_back();
815 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
816 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
817 mModelCache[i].push_back(tmp);
818 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800819 if (status != ErrorStatus::GENERAL_FAILURE) {
820 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800821 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800822 ASSERT_EQ(preparedModel, nullptr);
823 }
824
825 // Go through each handle in data cache, test with NumFd greater than 1.
826 for (uint32_t i = 0; i < mNumDataCache; i++) {
827 sp<IPreparedModel> preparedModel = nullptr;
828 ErrorStatus status;
829 hidl_vec<hidl_handle> modelCache, dataCache;
830 mDataCache[i].push_back(mTmpCache);
831 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
832 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
833 mDataCache[i].pop_back();
834 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
835 if (status != ErrorStatus::GENERAL_FAILURE) {
836 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
837 }
838 ASSERT_EQ(preparedModel, nullptr);
839 }
840
841 // Go through each handle in data cache, test with NumFd equal to 0.
842 for (uint32_t i = 0; i < mNumDataCache; i++) {
843 sp<IPreparedModel> preparedModel = nullptr;
844 ErrorStatus status;
845 hidl_vec<hidl_handle> modelCache, dataCache;
846 auto tmp = mDataCache[i].back();
847 mDataCache[i].pop_back();
848 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
849 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
850 mDataCache[i].push_back(tmp);
851 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
852 if (status != ErrorStatus::GENERAL_FAILURE) {
853 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
854 }
855 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800856 }
857}
858
859TEST_F(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
860 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700861 const Model testModel = createTestModel();
862 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800863 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
864 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800865
Xusong Wanged0822b2019-02-25 16:58:58 -0800866 // Go through each handle in model cache, test with invalid access mode.
867 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800868 hidl_vec<hidl_handle> modelCache, dataCache;
869 modelCacheMode[i] = AccessMode::READ_ONLY;
870 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
871 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
872 modelCacheMode[i] = AccessMode::READ_WRITE;
873 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700874 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800875 ASSERT_NE(preparedModel, nullptr);
876 // Execute and verify results.
877 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
878 get_examples(),
879 testModel.relaxComputationFloat32toFloat16,
880 /*testDynamicOutputShape=*/false);
881 // Check if prepareModelFromCache fails.
882 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800883 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800884 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
885 if (status != ErrorStatus::INVALID_ARGUMENT) {
886 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
887 }
888 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800889 }
890
Xusong Wanged0822b2019-02-25 16:58:58 -0800891 // Go through each handle in data cache, test with invalid access mode.
892 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800893 hidl_vec<hidl_handle> modelCache, dataCache;
894 dataCacheMode[i] = AccessMode::READ_ONLY;
895 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
896 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
897 dataCacheMode[i] = AccessMode::READ_WRITE;
898 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700899 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800900 ASSERT_NE(preparedModel, nullptr);
901 // Execute and verify results.
902 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
903 get_examples(),
904 testModel.relaxComputationFloat32toFloat16,
905 /*testDynamicOutputShape=*/false);
906 // Check if prepareModelFromCache fails.
907 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800908 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800909 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
910 if (status != ErrorStatus::INVALID_ARGUMENT) {
911 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
912 }
913 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800914 }
915}
916
917TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
918 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700919 const Model testModel = createTestModel();
920 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800921 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
922 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800923
924 // Save the compilation to cache.
925 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800926 hidl_vec<hidl_handle> modelCache, dataCache;
927 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
928 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700929 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800930 }
931
Xusong Wanged0822b2019-02-25 16:58:58 -0800932 // Go through each handle in model cache, test with invalid access mode.
933 for (uint32_t i = 0; i < mNumModelCache; i++) {
934 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800935 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800936 hidl_vec<hidl_handle> modelCache, dataCache;
937 modelCacheMode[i] = AccessMode::WRITE_ONLY;
938 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
939 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
940 modelCacheMode[i] = AccessMode::READ_WRITE;
941 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800942 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
943 ASSERT_EQ(preparedModel, nullptr);
944 }
945
Xusong Wanged0822b2019-02-25 16:58:58 -0800946 // Go through each handle in data cache, test with invalid access mode.
947 for (uint32_t i = 0; i < mNumDataCache; i++) {
948 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800949 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800950 hidl_vec<hidl_handle> modelCache, dataCache;
951 dataCacheMode[i] = AccessMode::WRITE_ONLY;
952 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
953 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
954 dataCacheMode[i] = AccessMode::READ_WRITE;
955 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800956 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
957 ASSERT_EQ(preparedModel, nullptr);
958 }
959}
960
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700961// Copy file contents between file groups.
962// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
963// The outer vector sizes must match and the inner vectors must have size = 1.
964static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
965 const std::vector<std::vector<std::string>>& to) {
966 constexpr size_t kBufferSize = 1000000;
967 uint8_t buffer[kBufferSize];
968
969 ASSERT_EQ(from.size(), to.size());
970 for (uint32_t i = 0; i < from.size(); i++) {
971 ASSERT_EQ(from[i].size(), 1u);
972 ASSERT_EQ(to[i].size(), 1u);
973 int fromFd = open(from[i][0].c_str(), O_RDONLY);
974 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
975 ASSERT_GE(fromFd, 0);
976 ASSERT_GE(toFd, 0);
977
978 ssize_t readBytes;
979 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
980 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
981 }
982 ASSERT_GE(readBytes, 0);
983
984 close(fromFd);
985 close(toFd);
986 }
987}
988
989// Number of operations in the large test model.
990constexpr uint32_t kLargeModelSize = 100;
991constexpr uint32_t kNumIterationsTOCTOU = 100;
992
993TEST_F(CompilationCachingTest, SaveToCache_TOCTOU) {
994 if (!mIsCachingSupported) return;
995
Xusong Wang4f71afc2019-04-26 15:33:38 -0700996 // Create test models and check if fully supported by the service.
997 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
998 if (checkEarlyTermination(testModelMul)) return;
999 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1000 if (checkEarlyTermination(testModelAdd)) return;
1001
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001002 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001003 auto modelCacheMul = mModelCache;
1004 for (auto& cache : modelCacheMul) {
1005 cache[0].append("_mul");
1006 }
1007 {
1008 hidl_vec<hidl_handle> modelCache, dataCache;
1009 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1010 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001011 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001012 }
1013
1014 // Use a different token for testModelAdd.
1015 mToken[0]++;
1016
1017 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001018 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1019 // Save the testModelAdd compilation to cache.
1020 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001021 hidl_vec<hidl_handle> modelCache, dataCache;
1022 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1023 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1024
1025 // Spawn a thread to copy the cache content concurrently while saving to cache.
1026 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001027 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001028 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001029 }
1030
1031 // Retrieve preparedModel from cache.
1032 {
1033 sp<IPreparedModel> preparedModel = nullptr;
1034 ErrorStatus status;
1035 hidl_vec<hidl_handle> modelCache, dataCache;
1036 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1037 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1038 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1039
1040 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1041 // the prepared model must be executed with the correct result and not crash.
1042 if (status != ErrorStatus::NONE) {
1043 ASSERT_EQ(preparedModel, nullptr);
1044 } else {
1045 ASSERT_NE(preparedModel, nullptr);
1046 generated_tests::EvaluatePreparedModel(
1047 preparedModel, [](int) { return false; },
1048 getLargeModelExamples(kLargeModelSize),
1049 testModelAdd.relaxComputationFloat32toFloat16,
1050 /*testDynamicOutputShape=*/false);
1051 }
1052 }
1053 }
1054}
1055
1056TEST_F(CompilationCachingTest, PrepareFromCache_TOCTOU) {
1057 if (!mIsCachingSupported) return;
1058
Xusong Wang4f71afc2019-04-26 15:33:38 -07001059 // Create test models and check if fully supported by the service.
1060 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1061 if (checkEarlyTermination(testModelMul)) return;
1062 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1063 if (checkEarlyTermination(testModelAdd)) return;
1064
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001065 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001066 auto modelCacheMul = mModelCache;
1067 for (auto& cache : modelCacheMul) {
1068 cache[0].append("_mul");
1069 }
1070 {
1071 hidl_vec<hidl_handle> modelCache, dataCache;
1072 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1073 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001074 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001075 }
1076
1077 // Use a different token for testModelAdd.
1078 mToken[0]++;
1079
1080 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001081 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1082 // Save the testModelAdd compilation to cache.
1083 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001084 hidl_vec<hidl_handle> modelCache, dataCache;
1085 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1086 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001087 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001088 }
1089
1090 // Retrieve preparedModel from cache.
1091 {
1092 sp<IPreparedModel> preparedModel = nullptr;
1093 ErrorStatus status;
1094 hidl_vec<hidl_handle> modelCache, dataCache;
1095 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1096 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1097
1098 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1099 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1100 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1101 thread.join();
1102
1103 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1104 // the prepared model must be executed with the correct result and not crash.
1105 if (status != ErrorStatus::NONE) {
1106 ASSERT_EQ(preparedModel, nullptr);
1107 } else {
1108 ASSERT_NE(preparedModel, nullptr);
1109 generated_tests::EvaluatePreparedModel(
1110 preparedModel, [](int) { return false; },
1111 getLargeModelExamples(kLargeModelSize),
1112 testModelAdd.relaxComputationFloat32toFloat16,
1113 /*testDynamicOutputShape=*/false);
1114 }
1115 }
1116 }
1117}
1118
1119TEST_F(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
1120 if (!mIsCachingSupported) return;
1121
Xusong Wang4f71afc2019-04-26 15:33:38 -07001122 // Create test models and check if fully supported by the service.
1123 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1124 if (checkEarlyTermination(testModelMul)) return;
1125 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1126 if (checkEarlyTermination(testModelAdd)) return;
1127
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001128 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001129 auto modelCacheMul = mModelCache;
1130 for (auto& cache : modelCacheMul) {
1131 cache[0].append("_mul");
1132 }
1133 {
1134 hidl_vec<hidl_handle> modelCache, dataCache;
1135 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1136 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001137 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001138 }
1139
1140 // Use a different token for testModelAdd.
1141 mToken[0]++;
1142
1143 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001144 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001145 hidl_vec<hidl_handle> modelCache, dataCache;
1146 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1147 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001148 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001149 }
1150
1151 // Replace the model cache of testModelAdd with testModelMul.
1152 copyCacheFiles(modelCacheMul, mModelCache);
1153
1154 // Retrieve the preparedModel from cache, expect failure.
1155 {
1156 sp<IPreparedModel> preparedModel = nullptr;
1157 ErrorStatus status;
1158 hidl_vec<hidl_handle> modelCache, dataCache;
1159 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1160 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1161 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1162 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1163 ASSERT_EQ(preparedModel, nullptr);
1164 }
1165}
1166
Xusong Wang96e68dc2019-01-18 17:28:26 -08001167class CompilationCachingSecurityTest : public CompilationCachingTest,
1168 public ::testing::WithParamInterface<uint32_t> {
1169 protected:
1170 void SetUp() {
1171 CompilationCachingTest::SetUp();
1172 generator.seed(kSeed);
1173 }
1174
1175 // Get a random integer within a closed range [lower, upper].
1176 template <typename T>
1177 T getRandomInt(T lower, T upper) {
1178 std::uniform_int_distribution<T> dis(lower, upper);
1179 return dis(generator);
1180 }
1181
Xusong Wange371f6f2019-04-23 14:51:50 -07001182 // Randomly flip one single bit of the cache entry.
1183 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1184 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001185 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1186 long int fileSize = ftell(pFile);
1187 if (fileSize == 0) {
1188 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001189 *skip = true;
1190 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001191 }
1192 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1193 int readByte = fgetc(pFile);
1194 ASSERT_NE(readByte, EOF);
1195 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1196 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1197 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001198 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001199 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001200
Xusong Wange371f6f2019-04-23 14:51:50 -07001201 // Randomly append bytes to the cache entry.
1202 void appendBytesToCache(const std::string& filename, bool* skip) {
1203 FILE* pFile = fopen(filename.c_str(), "a");
1204 uint32_t appendLength = getRandomInt(1, 256);
1205 for (uint32_t i = 0; i < appendLength; i++) {
1206 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1207 }
1208 fclose(pFile);
1209 *skip = false;
1210 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001211
Xusong Wange371f6f2019-04-23 14:51:50 -07001212 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001213
Xusong Wange371f6f2019-04-23 14:51:50 -07001214 // Test if the driver behaves as expected when given corrupted cache or token.
1215 // The modifier will be invoked after save to cache but before prepare from cache.
1216 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1217 // whether the test should be skipped or not.
1218 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001219 const Model testModel = createTestModel();
1220 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001221
Xusong Wanged0822b2019-02-25 16:58:58 -08001222 // Save the compilation to cache.
1223 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001224 hidl_vec<hidl_handle> modelCache, dataCache;
1225 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1226 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001227 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001228 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001229
Xusong Wange371f6f2019-04-23 14:51:50 -07001230 bool skip = false;
1231 modifier(&skip);
1232 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001233
Xusong Wange371f6f2019-04-23 14:51:50 -07001234 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001235 {
1236 sp<IPreparedModel> preparedModel = nullptr;
1237 ErrorStatus status;
1238 hidl_vec<hidl_handle> modelCache, dataCache;
1239 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1240 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1241 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001242
1243 switch (expected) {
1244 case ExpectedResult::GENERAL_FAILURE:
1245 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1246 ASSERT_EQ(preparedModel, nullptr);
1247 break;
1248 case ExpectedResult::NOT_CRASH:
1249 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1250 break;
1251 default:
1252 FAIL();
1253 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001254 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001255 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001256
1257 const uint32_t kSeed = GetParam();
1258 std::mt19937 generator;
1259};
1260
1261TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1262 if (!mIsCachingSupported) return;
1263 for (uint32_t i = 0; i < mNumModelCache; i++) {
1264 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1265 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1266 }
1267}
1268
1269TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1270 if (!mIsCachingSupported) return;
1271 for (uint32_t i = 0; i < mNumModelCache; i++) {
1272 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1273 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1274 }
1275}
1276
1277TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1278 if (!mIsCachingSupported) return;
1279 for (uint32_t i = 0; i < mNumDataCache; i++) {
1280 testCorruptedCache(ExpectedResult::NOT_CRASH,
1281 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1282 }
1283}
1284
1285TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1286 if (!mIsCachingSupported) return;
1287 for (uint32_t i = 0; i < mNumDataCache; i++) {
1288 testCorruptedCache(ExpectedResult::NOT_CRASH,
1289 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1290 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001291}
1292
1293TEST_P(CompilationCachingSecurityTest, WrongToken) {
1294 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001295 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1296 // Randomly flip one single bit in mToken.
1297 uint32_t ind =
1298 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1299 mToken[ind] ^= (1U << getRandomInt(0, 7));
1300 *skip = false;
1301 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001302}
1303
1304INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
1305 ::testing::Range(0U, 10U));
1306
1307} // namespace functional
1308} // namespace vts
1309} // namespace V1_2
1310} // namespace neuralnetworks
1311} // namespace hardware
1312} // namespace android