blob: 167fc096ceae8a1db1bf343f8d75ba1308b1f818 [file] [log] [blame]
Xusong Wang34058782019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
19#include "VtsHalNeuralnetworks.h"
20
21#include "Callbacks.h"
22#include "GeneratedTestHarness.h"
23#include "TestHarness.h"
24#include "Utils.h"
25
26#include <android-base/logging.h>
27#include <android/hidl/memory/1.0/IMemory.h>
28#include <hidlmemory/mapping.h>
29#include <cstdio>
30#include <cstdlib>
31#include <random>
32
33#include <gtest/gtest.h>
34
35namespace android {
36namespace hardware {
37namespace neuralnetworks {
38namespace V1_2 {
39namespace vts {
40namespace functional {
41
42using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
43using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
44using ::android::nn::allocateSharedMemory;
45using ::test_helper::MixedTypedExample;
46
47namespace {
48
49// In frameworks/ml/nn/runtime/tests/generated/, creates a hidl model of mobilenet.
50#include "examples/mobilenet_224_gender_basic_fixed.example.cpp"
51#include "vts_models/mobilenet_224_gender_basic_fixed.model.cpp"
52
53// Prevent the compiler from complaining about an otherwise unused function.
54[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
55[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
56
Xusong Wangb61ba1e2019-02-25 16:58:58 -080057enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang34058782019-01-18 17:28:26 -080058
Xusong Wangb61ba1e2019-02-25 16:58:58 -080059// Creates cache handles based on provided file groups.
60// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
61void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
62 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
63 handles->resize(fileGroups.size());
64 for (uint32_t i = 0; i < fileGroups.size(); i++) {
65 std::vector<int> fds;
66 for (const auto& file : fileGroups[i]) {
67 int fd;
68 if (mode[i] == AccessMode::READ_ONLY) {
69 fd = open(file.c_str(), O_RDONLY);
70 } else if (mode[i] == AccessMode::WRITE_ONLY) {
71 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
72 } else if (mode[i] == AccessMode::READ_WRITE) {
73 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
74 } else {
75 FAIL();
76 }
77 ASSERT_GE(fd, 0);
78 fds.push_back(fd);
Xusong Wang34058782019-01-18 17:28:26 -080079 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -080080 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
81 ASSERT_NE(cacheNativeHandle, nullptr);
82 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
83 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang34058782019-01-18 17:28:26 -080084 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -080085}
86
87void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
88 hidl_vec<hidl_handle>* handles) {
89 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang34058782019-01-18 17:28:26 -080090}
91
92} // namespace
93
94// Tag for the compilation caching tests.
95class CompilationCachingTest : public NeuralnetworksHidlTest {
96 protected:
97 void SetUp() override {
98 NeuralnetworksHidlTest::SetUp();
Hervé Guihot70d25b82019-02-12 16:22:44 -080099 ASSERT_NE(device.get(), nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800100
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800101 // Create cache directory. The cache directory and a temporary cache file is always created
102 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang34058782019-01-18 17:28:26 -0800103 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
104 char* cacheDir = mkdtemp(cacheDirTemp);
105 ASSERT_NE(cacheDir, nullptr);
Xusong Wang350d91b2019-02-12 18:00:37 -0800106 mCacheDir = cacheDir;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800107 mCacheDir.push_back('/');
Xusong Wang350d91b2019-02-12 18:00:37 -0800108
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800109 Return<void> ret = device->getNumberOfCacheFilesNeeded(
110 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang34058782019-01-18 17:28:26 -0800111 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800112 mNumModelCache = numModelCache;
113 mNumDataCache = numDataCache;
Xusong Wang34058782019-01-18 17:28:26 -0800114 });
115 EXPECT_TRUE(ret.isOk());
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800116 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
117
118 // Create empty cache files.
119 mTmpCache = mCacheDir + "tmp";
120 for (uint32_t i = 0; i < mNumModelCache; i++) {
121 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
122 }
123 for (uint32_t i = 0; i < mNumDataCache; i++) {
124 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
125 }
126 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
127 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
128 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
129 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
130 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
131
132 if (!mIsCachingSupported) {
Xusong Wang34058782019-01-18 17:28:26 -0800133 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
134 "support compilation caching.";
135 std::cout << "[ ] Early termination of test because vendor service does not "
136 "support compilation caching."
137 << std::endl;
Xusong Wang34058782019-01-18 17:28:26 -0800138 }
Xusong Wang350d91b2019-02-12 18:00:37 -0800139 }
Xusong Wang34058782019-01-18 17:28:26 -0800140
Xusong Wang350d91b2019-02-12 18:00:37 -0800141 void TearDown() override {
142 // The tmp directory is only removed when the driver reports caching not supported,
143 // otherwise it is kept for debugging purpose.
144 if (!mIsCachingSupported) {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800145 remove(mTmpCache.c_str());
Xusong Wang350d91b2019-02-12 18:00:37 -0800146 rmdir(mCacheDir.c_str());
147 }
148 NeuralnetworksHidlTest::TearDown();
Xusong Wang34058782019-01-18 17:28:26 -0800149 }
150
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800151 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
152 const hidl_vec<hidl_handle>& dataCache, bool* supported,
153 sp<IPreparedModel>* preparedModel = nullptr) {
154 if (preparedModel != nullptr) *preparedModel = nullptr;
155
156 // See if service can handle model.
157 bool fullySupportsModel = false;
158 Return<void> supportedCall = device->getSupportedOperations_1_2(
159 model,
160 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
161 ASSERT_EQ(ErrorStatus::NONE, status);
162 ASSERT_EQ(supported.size(), model.operations.size());
163 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
164 [](bool valid) { return valid; });
165 });
166 ASSERT_TRUE(supportedCall.isOk());
167 *supported = fullySupportsModel;
168 if (!fullySupportsModel) return;
169
170 // Launch prepare model.
171 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
172 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang34058782019-01-18 17:28:26 -0800173 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800174 Return<ErrorStatus> prepareLaunchStatus =
175 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
176 dataCache, cacheToken, preparedModelCallback);
177 ASSERT_TRUE(prepareLaunchStatus.isOk());
178 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
179
180 // Retrieve prepared model.
181 preparedModelCallback->wait();
182 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
183 if (preparedModel != nullptr) {
184 *preparedModel =
185 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
186 .withDefault(nullptr);
187 }
Xusong Wang34058782019-01-18 17:28:26 -0800188 }
189
190 bool checkEarlyTermination(ErrorStatus status) {
191 if (status == ErrorStatus::GENERAL_FAILURE) {
192 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
193 "save the prepared model that it does not support.";
194 std::cout << "[ ] Early termination of test because vendor service cannot "
195 "save the prepared model that it does not support."
196 << std::endl;
197 return true;
198 }
199 return false;
200 }
201
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800202 bool checkEarlyTermination(bool supported) {
203 if (!supported) {
204 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
205 "prepare model that it does not support.";
206 std::cout << "[ ] Early termination of test because vendor service cannot "
207 "prepare model that it does not support."
208 << std::endl;
209 return true;
210 }
211 return false;
212 }
213
214 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
215 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang34058782019-01-18 17:28:26 -0800216 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
217 // Launch prepare model from cache.
218 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
219 ASSERT_NE(nullptr, preparedModelCallback.get());
220 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800221 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
222 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang34058782019-01-18 17:28:26 -0800223 ASSERT_TRUE(prepareLaunchStatus.isOk());
224 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
225 *preparedModel = nullptr;
226 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
227 return;
228 }
229
230 // Retrieve prepared model.
231 preparedModelCallback->wait();
232 *status = preparedModelCallback->getStatus();
233 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
234 .withDefault(nullptr);
235 }
236
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800237 // Absolute path to the temporary cache directory.
Xusong Wang350d91b2019-02-12 18:00:37 -0800238 std::string mCacheDir;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800239
240 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
241 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
242 // and the inner vector is for fds held by each handle.
243 std::vector<std::vector<std::string>> mModelCache;
244 std::vector<std::vector<std::string>> mDataCache;
245
246 // A separate temporary file path in the tmp cache directory.
247 std::string mTmpCache;
248
Xusong Wang34058782019-01-18 17:28:26 -0800249 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800250 uint32_t mNumModelCache;
251 uint32_t mNumDataCache;
252 uint32_t mIsCachingSupported;
Xusong Wang34058782019-01-18 17:28:26 -0800253};
254
255TEST_F(CompilationCachingTest, CacheSavingAndRetrieval) {
256 // Create test HIDL model and compile.
257 Model testModel = createTestModel();
258 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800259
260 // Save the compilation to cache.
261 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800262 bool supported;
263 hidl_vec<hidl_handle> modelCache, dataCache;
264 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
265 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
266 saveModelToCache(testModel, modelCache, dataCache, &supported);
267 if (checkEarlyTermination(supported)) return;
Xusong Wang34058782019-01-18 17:28:26 -0800268 }
269
270 // Retrieve preparedModel from cache.
271 {
272 preparedModel = nullptr;
273 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800274 hidl_vec<hidl_handle> modelCache, dataCache;
275 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
276 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
277 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800278 if (!mIsCachingSupported) {
279 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
280 ASSERT_EQ(preparedModel, nullptr);
281 return;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800282 } else if (checkEarlyTermination(status)) {
283 ASSERT_EQ(preparedModel, nullptr);
284 return;
Xusong Wang34058782019-01-18 17:28:26 -0800285 } else {
286 ASSERT_EQ(status, ErrorStatus::NONE);
287 ASSERT_NE(preparedModel, nullptr);
288 }
289 }
290
291 // Execute and verify results.
292 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
293 testModel.relaxComputationFloat32toFloat16,
294 /*testDynamicOutputShape=*/false);
295}
296
297TEST_F(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
298 // Create test HIDL model and compile.
299 Model testModel = createTestModel();
300 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800301
302 // Save the compilation to cache.
303 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800304 bool supported;
305 hidl_vec<hidl_handle> modelCache, dataCache;
306 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
307 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
308 uint8_t dummyBytes[] = {0, 0};
309 // Write a dummy integer to the cache.
310 // The driver should be able to handle non-empty cache and non-zero fd offset.
311 for (uint32_t i = 0; i < modelCache.size(); i++) {
312 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
313 sizeof(dummyBytes)),
314 sizeof(dummyBytes));
Xusong Wang34058782019-01-18 17:28:26 -0800315 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800316 for (uint32_t i = 0; i < dataCache.size(); i++) {
317 ASSERT_EQ(
318 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
319 sizeof(dummyBytes));
320 }
321 saveModelToCache(testModel, modelCache, dataCache, &supported);
322 if (checkEarlyTermination(supported)) return;
Xusong Wang34058782019-01-18 17:28:26 -0800323 }
324
325 // Retrieve preparedModel from cache.
326 {
327 preparedModel = nullptr;
328 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800329 hidl_vec<hidl_handle> modelCache, dataCache;
330 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
331 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang34058782019-01-18 17:28:26 -0800332 uint8_t dummyByte = 0;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800333 // Advance the offset of each handle by one byte.
334 // The driver should be able to handle non-zero fd offset.
335 for (uint32_t i = 0; i < modelCache.size(); i++) {
336 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
337 }
338 for (uint32_t i = 0; i < dataCache.size(); i++) {
339 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
340 }
341 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800342 if (!mIsCachingSupported) {
343 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
344 ASSERT_EQ(preparedModel, nullptr);
345 return;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800346 } else if (checkEarlyTermination(status)) {
347 ASSERT_EQ(preparedModel, nullptr);
348 return;
Xusong Wang34058782019-01-18 17:28:26 -0800349 } else {
350 ASSERT_EQ(status, ErrorStatus::NONE);
351 ASSERT_NE(preparedModel, nullptr);
352 }
353 }
354
355 // Execute and verify results.
356 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
357 testModel.relaxComputationFloat32toFloat16,
358 /*testDynamicOutputShape=*/false);
359}
360
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800361TEST_F(CompilationCachingTest, SaveToCacheInvalidNumCache) {
362 // Create test HIDL model and compile.
363 Model testModel = createTestModel();
364
365 // Test with number of model cache files greater than mNumModelCache.
366 {
367 bool supported;
368 hidl_vec<hidl_handle> modelCache, dataCache;
369 // Pass an additional cache file for model cache.
370 mModelCache.push_back({mTmpCache});
371 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
372 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
373 mModelCache.pop_back();
374 sp<IPreparedModel> preparedModel = nullptr;
375 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
376 if (checkEarlyTermination(supported)) return;
377 ASSERT_NE(preparedModel, nullptr);
378 // Execute and verify results.
379 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
380 get_examples(),
381 testModel.relaxComputationFloat32toFloat16,
382 /*testDynamicOutputShape=*/false);
383 // Check if prepareModelFromCache fails.
384 preparedModel = nullptr;
385 ErrorStatus status;
386 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
387 if (status != ErrorStatus::INVALID_ARGUMENT) {
388 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
389 }
390 ASSERT_EQ(preparedModel, nullptr);
391 }
392
393 // Test with number of model cache files smaller than mNumModelCache.
394 if (mModelCache.size() > 0) {
395 bool supported;
396 hidl_vec<hidl_handle> modelCache, dataCache;
397 // Pop out the last cache file.
398 auto tmp = mModelCache.back();
399 mModelCache.pop_back();
400 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
401 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
402 mModelCache.push_back(tmp);
403 sp<IPreparedModel> preparedModel = nullptr;
404 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
405 if (checkEarlyTermination(supported)) return;
406 ASSERT_NE(preparedModel, nullptr);
407 // Execute and verify results.
408 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
409 get_examples(),
410 testModel.relaxComputationFloat32toFloat16,
411 /*testDynamicOutputShape=*/false);
412 // Check if prepareModelFromCache fails.
413 preparedModel = nullptr;
414 ErrorStatus status;
415 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
416 if (status != ErrorStatus::INVALID_ARGUMENT) {
417 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
418 }
419 ASSERT_EQ(preparedModel, nullptr);
420 }
421
422 // Test with number of data cache files greater than mNumDataCache.
423 {
424 bool supported;
425 hidl_vec<hidl_handle> modelCache, dataCache;
426 // Pass an additional cache file for data cache.
427 mDataCache.push_back({mTmpCache});
428 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
429 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
430 mDataCache.pop_back();
431 sp<IPreparedModel> preparedModel = nullptr;
432 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
433 if (checkEarlyTermination(supported)) return;
434 ASSERT_NE(preparedModel, nullptr);
435 // Execute and verify results.
436 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
437 get_examples(),
438 testModel.relaxComputationFloat32toFloat16,
439 /*testDynamicOutputShape=*/false);
440 // Check if prepareModelFromCache fails.
441 preparedModel = nullptr;
442 ErrorStatus status;
443 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
444 if (status != ErrorStatus::INVALID_ARGUMENT) {
445 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
446 }
447 ASSERT_EQ(preparedModel, nullptr);
448 }
449
450 // Test with number of data cache files smaller than mNumDataCache.
451 if (mDataCache.size() > 0) {
452 bool supported;
453 hidl_vec<hidl_handle> modelCache, dataCache;
454 // Pop out the last cache file.
455 auto tmp = mDataCache.back();
456 mDataCache.pop_back();
457 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
458 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
459 mDataCache.push_back(tmp);
460 sp<IPreparedModel> preparedModel = nullptr;
461 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
462 if (checkEarlyTermination(supported)) return;
463 ASSERT_NE(preparedModel, nullptr);
464 // Execute and verify results.
465 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
466 get_examples(),
467 testModel.relaxComputationFloat32toFloat16,
468 /*testDynamicOutputShape=*/false);
469 // Check if prepareModelFromCache fails.
470 preparedModel = nullptr;
471 ErrorStatus status;
472 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
473 if (status != ErrorStatus::INVALID_ARGUMENT) {
474 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
475 }
476 ASSERT_EQ(preparedModel, nullptr);
477 }
478}
479
480TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
481 // Create test HIDL model and compile.
482 Model testModel = createTestModel();
483
484 // Save the compilation to cache.
485 {
486 bool supported;
487 hidl_vec<hidl_handle> modelCache, dataCache;
488 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
489 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
490 saveModelToCache(testModel, modelCache, dataCache, &supported);
491 if (checkEarlyTermination(supported)) return;
492 }
493
494 // Test with number of model cache files greater than mNumModelCache.
495 {
496 sp<IPreparedModel> preparedModel = nullptr;
497 ErrorStatus status;
498 hidl_vec<hidl_handle> modelCache, dataCache;
499 mModelCache.push_back({mTmpCache});
500 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
501 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
502 mModelCache.pop_back();
503 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
504 if (status != ErrorStatus::GENERAL_FAILURE) {
505 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
506 }
507 ASSERT_EQ(preparedModel, nullptr);
508 }
509
510 // Test with number of model cache files smaller than mNumModelCache.
511 if (mModelCache.size() > 0) {
512 sp<IPreparedModel> preparedModel = nullptr;
513 ErrorStatus status;
514 hidl_vec<hidl_handle> modelCache, dataCache;
515 auto tmp = mModelCache.back();
516 mModelCache.pop_back();
517 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
518 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
519 mModelCache.push_back(tmp);
520 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
521 if (status != ErrorStatus::GENERAL_FAILURE) {
522 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
523 }
524 ASSERT_EQ(preparedModel, nullptr);
525 }
526
527 // Test with number of data cache files greater than mNumDataCache.
528 {
529 sp<IPreparedModel> preparedModel = nullptr;
530 ErrorStatus status;
531 hidl_vec<hidl_handle> modelCache, dataCache;
532 mDataCache.push_back({mTmpCache});
533 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
534 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
535 mDataCache.pop_back();
536 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
537 if (status != ErrorStatus::GENERAL_FAILURE) {
538 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
539 }
540 ASSERT_EQ(preparedModel, nullptr);
541 }
542
543 // Test with number of data cache files smaller than mNumDataCache.
544 if (mDataCache.size() > 0) {
545 sp<IPreparedModel> preparedModel = nullptr;
546 ErrorStatus status;
547 hidl_vec<hidl_handle> modelCache, dataCache;
548 auto tmp = mDataCache.back();
549 mDataCache.pop_back();
550 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
551 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
552 mDataCache.push_back(tmp);
553 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
554 if (status != ErrorStatus::GENERAL_FAILURE) {
555 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
556 }
557 ASSERT_EQ(preparedModel, nullptr);
558 }
559}
560
Xusong Wang34058782019-01-18 17:28:26 -0800561TEST_F(CompilationCachingTest, SaveToCacheInvalidNumFd) {
562 // Create test HIDL model and compile.
563 Model testModel = createTestModel();
Xusong Wang34058782019-01-18 17:28:26 -0800564
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800565 // Go through each handle in model cache, test with NumFd greater than 1.
566 for (uint32_t i = 0; i < mNumModelCache; i++) {
567 bool supported;
568 hidl_vec<hidl_handle> modelCache, dataCache;
569 // Pass an invalid number of fds for handle i.
570 mModelCache[i].push_back(mTmpCache);
571 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
572 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
573 mModelCache[i].pop_back();
574 sp<IPreparedModel> preparedModel = nullptr;
575 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
576 if (checkEarlyTermination(supported)) return;
577 ASSERT_NE(preparedModel, nullptr);
578 // Execute and verify results.
579 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
580 get_examples(),
581 testModel.relaxComputationFloat32toFloat16,
582 /*testDynamicOutputShape=*/false);
583 // Check if prepareModelFromCache fails.
584 preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800585 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800586 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
587 if (status != ErrorStatus::INVALID_ARGUMENT) {
588 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang34058782019-01-18 17:28:26 -0800589 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800590 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800591 }
592
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800593 // Go through each handle in model cache, test with NumFd equal to 0.
594 for (uint32_t i = 0; i < mNumModelCache; i++) {
595 bool supported;
596 hidl_vec<hidl_handle> modelCache, dataCache;
597 // Pass an invalid number of fds for handle i.
598 auto tmp = mModelCache[i].back();
599 mModelCache[i].pop_back();
600 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
601 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
602 mModelCache[i].push_back(tmp);
603 sp<IPreparedModel> preparedModel = nullptr;
604 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
605 if (checkEarlyTermination(supported)) return;
606 ASSERT_NE(preparedModel, nullptr);
607 // Execute and verify results.
608 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
609 get_examples(),
610 testModel.relaxComputationFloat32toFloat16,
611 /*testDynamicOutputShape=*/false);
612 // Check if prepareModelFromCache fails.
613 preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800614 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800615 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
616 if (status != ErrorStatus::INVALID_ARGUMENT) {
617 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang34058782019-01-18 17:28:26 -0800618 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800619 ASSERT_EQ(preparedModel, nullptr);
620 }
621
622 // Go through each handle in data cache, test with NumFd greater than 1.
623 for (uint32_t i = 0; i < mNumDataCache; i++) {
624 bool supported;
625 hidl_vec<hidl_handle> modelCache, dataCache;
626 // Pass an invalid number of fds for handle i.
627 mDataCache[i].push_back(mTmpCache);
628 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
629 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
630 mDataCache[i].pop_back();
631 sp<IPreparedModel> preparedModel = nullptr;
632 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
633 if (checkEarlyTermination(supported)) return;
634 ASSERT_NE(preparedModel, nullptr);
635 // Execute and verify results.
636 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
637 get_examples(),
638 testModel.relaxComputationFloat32toFloat16,
639 /*testDynamicOutputShape=*/false);
640 // Check if prepareModelFromCache fails.
641 preparedModel = nullptr;
642 ErrorStatus status;
643 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
644 if (status != ErrorStatus::INVALID_ARGUMENT) {
645 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
646 }
647 ASSERT_EQ(preparedModel, nullptr);
648 }
649
650 // Go through each handle in data cache, test with NumFd equal to 0.
651 for (uint32_t i = 0; i < mNumDataCache; i++) {
652 bool supported;
653 hidl_vec<hidl_handle> modelCache, dataCache;
654 // Pass an invalid number of fds for handle i.
655 auto tmp = mDataCache[i].back();
656 mDataCache[i].pop_back();
657 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
658 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
659 mDataCache[i].push_back(tmp);
660 sp<IPreparedModel> preparedModel = nullptr;
661 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
662 if (checkEarlyTermination(supported)) return;
663 ASSERT_NE(preparedModel, nullptr);
664 // Execute and verify results.
665 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
666 get_examples(),
667 testModel.relaxComputationFloat32toFloat16,
668 /*testDynamicOutputShape=*/false);
669 // Check if prepareModelFromCache fails.
670 preparedModel = nullptr;
671 ErrorStatus status;
672 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
673 if (status != ErrorStatus::INVALID_ARGUMENT) {
674 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
675 }
676 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800677 }
678}
679
680TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
681 // Create test HIDL model and compile.
682 Model testModel = createTestModel();
Xusong Wang34058782019-01-18 17:28:26 -0800683
684 // Save the compilation to cache.
685 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800686 bool supported;
687 hidl_vec<hidl_handle> modelCache, dataCache;
688 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
689 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
690 saveModelToCache(testModel, modelCache, dataCache, &supported);
691 if (checkEarlyTermination(supported)) return;
Xusong Wang34058782019-01-18 17:28:26 -0800692 }
693
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800694 // Go through each handle in model cache, test with NumFd greater than 1.
695 for (uint32_t i = 0; i < mNumModelCache; i++) {
696 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800697 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800698 hidl_vec<hidl_handle> modelCache, dataCache;
699 mModelCache[i].push_back(mTmpCache);
700 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
701 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
702 mModelCache[i].pop_back();
703 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800704 if (status != ErrorStatus::GENERAL_FAILURE) {
705 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang34058782019-01-18 17:28:26 -0800706 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800707 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800708 }
709
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800710 // Go through each handle in model cache, test with NumFd equal to 0.
711 for (uint32_t i = 0; i < mNumModelCache; i++) {
712 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800713 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800714 hidl_vec<hidl_handle> modelCache, dataCache;
715 auto tmp = mModelCache[i].back();
716 mModelCache[i].pop_back();
717 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
718 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
719 mModelCache[i].push_back(tmp);
720 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800721 if (status != ErrorStatus::GENERAL_FAILURE) {
722 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang34058782019-01-18 17:28:26 -0800723 }
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800724 ASSERT_EQ(preparedModel, nullptr);
725 }
726
727 // Go through each handle in data cache, test with NumFd greater than 1.
728 for (uint32_t i = 0; i < mNumDataCache; i++) {
729 sp<IPreparedModel> preparedModel = nullptr;
730 ErrorStatus status;
731 hidl_vec<hidl_handle> modelCache, dataCache;
732 mDataCache[i].push_back(mTmpCache);
733 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
734 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
735 mDataCache[i].pop_back();
736 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
737 if (status != ErrorStatus::GENERAL_FAILURE) {
738 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
739 }
740 ASSERT_EQ(preparedModel, nullptr);
741 }
742
743 // Go through each handle in data cache, test with NumFd equal to 0.
744 for (uint32_t i = 0; i < mNumDataCache; i++) {
745 sp<IPreparedModel> preparedModel = nullptr;
746 ErrorStatus status;
747 hidl_vec<hidl_handle> modelCache, dataCache;
748 auto tmp = mDataCache[i].back();
749 mDataCache[i].pop_back();
750 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
751 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
752 mDataCache[i].push_back(tmp);
753 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
754 if (status != ErrorStatus::GENERAL_FAILURE) {
755 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
756 }
757 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800758 }
759}
760
761TEST_F(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
762 // Create test HIDL model and compile.
763 Model testModel = createTestModel();
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800764 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
765 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang34058782019-01-18 17:28:26 -0800766
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800767 // Go through each handle in model cache, test with invalid access mode.
768 for (uint32_t i = 0; i < mNumModelCache; i++) {
769 bool supported;
770 hidl_vec<hidl_handle> modelCache, dataCache;
771 modelCacheMode[i] = AccessMode::READ_ONLY;
772 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
773 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
774 modelCacheMode[i] = AccessMode::READ_WRITE;
775 sp<IPreparedModel> preparedModel = nullptr;
776 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
777 if (checkEarlyTermination(supported)) return;
778 ASSERT_NE(preparedModel, nullptr);
779 // Execute and verify results.
780 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
781 get_examples(),
782 testModel.relaxComputationFloat32toFloat16,
783 /*testDynamicOutputShape=*/false);
784 // Check if prepareModelFromCache fails.
785 preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800786 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800787 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
788 if (status != ErrorStatus::INVALID_ARGUMENT) {
789 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
790 }
791 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800792 }
793
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800794 // Go through each handle in data cache, test with invalid access mode.
795 for (uint32_t i = 0; i < mNumDataCache; i++) {
796 bool supported;
797 hidl_vec<hidl_handle> modelCache, dataCache;
798 dataCacheMode[i] = AccessMode::READ_ONLY;
799 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
800 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
801 dataCacheMode[i] = AccessMode::READ_WRITE;
802 sp<IPreparedModel> preparedModel = nullptr;
803 saveModelToCache(testModel, modelCache, dataCache, &supported, &preparedModel);
804 if (checkEarlyTermination(supported)) return;
805 ASSERT_NE(preparedModel, nullptr);
806 // Execute and verify results.
807 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
808 get_examples(),
809 testModel.relaxComputationFloat32toFloat16,
810 /*testDynamicOutputShape=*/false);
811 // Check if prepareModelFromCache fails.
812 preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800813 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800814 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
815 if (status != ErrorStatus::INVALID_ARGUMENT) {
816 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
817 }
818 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang34058782019-01-18 17:28:26 -0800819 }
820}
821
822TEST_F(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
823 // Create test HIDL model and compile.
824 Model testModel = createTestModel();
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800825 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
826 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang34058782019-01-18 17:28:26 -0800827
828 // Save the compilation to cache.
829 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800830 bool supported;
831 hidl_vec<hidl_handle> modelCache, dataCache;
832 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
833 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
834 saveModelToCache(testModel, modelCache, dataCache, &supported);
835 if (checkEarlyTermination(supported)) return;
Xusong Wang34058782019-01-18 17:28:26 -0800836 }
837
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800838 // Go through each handle in model cache, test with invalid access mode.
839 for (uint32_t i = 0; i < mNumModelCache; i++) {
840 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800841 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800842 hidl_vec<hidl_handle> modelCache, dataCache;
843 modelCacheMode[i] = AccessMode::WRITE_ONLY;
844 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
845 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
846 modelCacheMode[i] = AccessMode::READ_WRITE;
847 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800848 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
849 ASSERT_EQ(preparedModel, nullptr);
850 }
851
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800852 // Go through each handle in data cache, test with invalid access mode.
853 for (uint32_t i = 0; i < mNumDataCache; i++) {
854 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800855 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800856 hidl_vec<hidl_handle> modelCache, dataCache;
857 dataCacheMode[i] = AccessMode::WRITE_ONLY;
858 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
859 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
860 dataCacheMode[i] = AccessMode::READ_WRITE;
861 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800862 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
863 ASSERT_EQ(preparedModel, nullptr);
864 }
865}
866
Xusong Wang34058782019-01-18 17:28:26 -0800867class CompilationCachingSecurityTest : public CompilationCachingTest,
868 public ::testing::WithParamInterface<uint32_t> {
869 protected:
870 void SetUp() {
871 CompilationCachingTest::SetUp();
872 generator.seed(kSeed);
873 }
874
875 // Get a random integer within a closed range [lower, upper].
876 template <typename T>
877 T getRandomInt(T lower, T upper) {
878 std::uniform_int_distribution<T> dis(lower, upper);
879 return dis(generator);
880 }
881
882 const uint32_t kSeed = GetParam();
883 std::mt19937 generator;
884};
885
886TEST_P(CompilationCachingSecurityTest, CorruptedSecuritySensitiveCache) {
887 if (!mIsCachingSupported) return;
888
889 // Create test HIDL model and compile.
890 Model testModel = createTestModel();
Xusong Wang34058782019-01-18 17:28:26 -0800891
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800892 for (uint32_t i = 0; i < mNumModelCache; i++) {
893 // Save the compilation to cache.
894 {
895 bool supported;
896 hidl_vec<hidl_handle> modelCache, dataCache;
897 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
898 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
899 saveModelToCache(testModel, modelCache, dataCache, &supported);
900 if (checkEarlyTermination(supported)) return;
901 }
Xusong Wang34058782019-01-18 17:28:26 -0800902
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800903 // Randomly flip one single bit of the cache entry.
904 FILE* pFile = fopen(mModelCache[i][0].c_str(), "r+");
905 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
906 long int fileSize = ftell(pFile);
907 if (fileSize == 0) {
908 fclose(pFile);
909 continue;
910 }
911 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
912 int readByte = fgetc(pFile);
913 ASSERT_NE(readByte, EOF);
914 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
915 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
916 fclose(pFile);
Xusong Wang34058782019-01-18 17:28:26 -0800917
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800918 // Retrieve preparedModel from cache, expect failure.
919 {
920 sp<IPreparedModel> preparedModel = nullptr;
921 ErrorStatus status;
922 hidl_vec<hidl_handle> modelCache, dataCache;
923 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
924 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
925 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
926 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
927 ASSERT_EQ(preparedModel, nullptr);
928 }
Xusong Wang34058782019-01-18 17:28:26 -0800929 }
930}
931
932TEST_P(CompilationCachingSecurityTest, WrongLengthSecuritySensitiveCache) {
933 if (!mIsCachingSupported) return;
934
935 // Create test HIDL model and compile.
936 Model testModel = createTestModel();
Xusong Wang34058782019-01-18 17:28:26 -0800937
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800938 for (uint32_t i = 0; i < mNumModelCache; i++) {
939 // Save the compilation to cache.
940 {
941 bool supported;
942 hidl_vec<hidl_handle> modelCache, dataCache;
943 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
944 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
945 saveModelToCache(testModel, modelCache, dataCache, &supported);
946 if (checkEarlyTermination(supported)) return;
947 }
Xusong Wang34058782019-01-18 17:28:26 -0800948
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800949 // Randomly append bytes to the cache entry.
950 FILE* pFile = fopen(mModelCache[i][0].c_str(), "a");
951 uint32_t appendLength = getRandomInt(1, 256);
952 for (uint32_t i = 0; i < appendLength; i++) {
953 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
954 }
955 fclose(pFile);
Xusong Wang34058782019-01-18 17:28:26 -0800956
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800957 // Retrieve preparedModel from cache, expect failure.
958 {
959 sp<IPreparedModel> preparedModel = nullptr;
960 ErrorStatus status;
961 hidl_vec<hidl_handle> modelCache, dataCache;
962 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
963 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
964 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
965 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
966 ASSERT_EQ(preparedModel, nullptr);
967 }
Xusong Wang34058782019-01-18 17:28:26 -0800968 }
969}
970
971TEST_P(CompilationCachingSecurityTest, WrongToken) {
972 if (!mIsCachingSupported) return;
973
974 // Create test HIDL model and compile.
975 Model testModel = createTestModel();
Xusong Wang34058782019-01-18 17:28:26 -0800976
977 // Save the compilation to cache.
978 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800979 bool supported;
980 hidl_vec<hidl_handle> modelCache, dataCache;
981 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
982 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
983 saveModelToCache(testModel, modelCache, dataCache, &supported);
984 if (checkEarlyTermination(supported)) return;
Xusong Wang34058782019-01-18 17:28:26 -0800985 }
986
987 // Randomly flip one single bit in mToken.
988 uint32_t ind = getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
989 mToken[ind] ^= (1U << getRandomInt(0, 7));
990
991 // Retrieve the preparedModel from cache, expect failure.
992 {
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800993 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang34058782019-01-18 17:28:26 -0800994 ErrorStatus status;
Xusong Wangb61ba1e2019-02-25 16:58:58 -0800995 hidl_vec<hidl_handle> modelCache, dataCache;
996 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
997 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
998 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang34058782019-01-18 17:28:26 -0800999 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1000 ASSERT_EQ(preparedModel, nullptr);
1001 }
1002}
1003
1004INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
1005 ::testing::Range(0U, 10U));
1006
1007} // namespace functional
1008} // namespace vts
1009} // namespace V1_2
1010} // namespace neuralnetworks
1011} // namespace hardware
1012} // namespace android