blob: 9cabb7b04ec651677aae1a1ec295d1f31d9d562a [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Xusong Wang96e68dc2019-01-18 17:28:26 -080029
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010030#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080031#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010032#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080033#include "TestHarness.h"
34#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070035#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080036
37namespace android {
38namespace hardware {
39namespace neuralnetworks {
40namespace V1_2 {
41namespace vts {
42namespace functional {
43
44using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
45using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
46using ::android::nn::allocateSharedMemory;
47using ::test_helper::MixedTypedExample;
48
Xusong Wang0e0721f2019-05-07 12:57:49 -070049namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080050
Xusong Wang0e0721f2019-05-07 12:57:49 -070051// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of float32 mobilenet.
Xusong Wang96e68dc2019-01-18 17:28:26 -080052#include "examples/mobilenet_224_gender_basic_fixed.example.cpp"
53#include "vts_models/mobilenet_224_gender_basic_fixed.model.cpp"
54
55// Prevent the compiler from complaining about an otherwise unused function.
56[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
57[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
58
Xusong Wang0e0721f2019-05-07 12:57:49 -070059// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
60// This function assumes the operation is always ADD.
61std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
62 float outputValue = 1.0f + static_cast<float>(len);
63 return {{.operands = {
64 // Input
65 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
66 // Output
67 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
68}
69
70} // namespace float32_model
71
72namespace quant8_model {
73
74// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of quant8 mobilenet.
75#include "examples/mobilenet_quantized.example.cpp"
76#include "vts_models/mobilenet_quantized.model.cpp"
77
78// Prevent the compiler from complaining about an otherwise unused function.
79[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
80[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
81
82// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
83// This function assumes the operation is always ADD.
84std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
85 uint8_t outputValue = 1 + static_cast<uint8_t>(len);
86 return {{.operands = {// Input
87 {.operandDimensions = {{0, {1}}}, .quant8AsymmOperands = {{0, {1}}}},
88 // Output
89 {.operandDimensions = {{0, {1}}},
90 .quant8AsymmOperands = {{0, {outputValue}}}}}}};
91}
92
93} // namespace quant8_model
94
95namespace {
96
Xusong Wanged0822b2019-02-25 16:58:58 -080097enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080098
Xusong Wanged0822b2019-02-25 16:58:58 -080099// Creates cache handles based on provided file groups.
100// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
101void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
102 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
103 handles->resize(fileGroups.size());
104 for (uint32_t i = 0; i < fileGroups.size(); i++) {
105 std::vector<int> fds;
106 for (const auto& file : fileGroups[i]) {
107 int fd;
108 if (mode[i] == AccessMode::READ_ONLY) {
109 fd = open(file.c_str(), O_RDONLY);
110 } else if (mode[i] == AccessMode::WRITE_ONLY) {
111 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
112 } else if (mode[i] == AccessMode::READ_WRITE) {
113 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
114 } else {
115 FAIL();
116 }
117 ASSERT_GE(fd, 0);
118 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800119 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800120 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
121 ASSERT_NE(cacheNativeHandle, nullptr);
122 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
123 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800124 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800125}
126
127void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
128 hidl_vec<hidl_handle>* handles) {
129 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800130}
131
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700132// Create a chain of broadcast operations. The second operand is always constant tensor [1].
133// For simplicity, activation scalar is shared. The second operand is not shared
134// in the model to let driver maintain a non-trivial size of constant data and the corresponding
135// data locations in cache.
136//
137// --------- activation --------
138// ↓ ↓ ↓ ↓
139// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
140// ↑ ↑ ↑ ↑
141// [1] [1] [1] [1]
142//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700143// This function assumes the operation is either ADD or MUL.
144template <typename CppType, OperandType operandType>
145Model createLargeTestModelImpl(OperationType op, uint32_t len) {
146 EXPECT_TRUE(op == OperationType::ADD || op == OperationType::MUL);
147
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700148 // Model operations and operands.
149 std::vector<Operation> operations(len);
150 std::vector<Operand> operands(len * 2 + 2);
151
152 // The constant buffer pool. This contains the activation scalar, followed by the
153 // per-operation constant operands.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700154 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700155
156 // The activation scalar, value = 0.
157 operands[0] = {
158 .type = OperandType::INT32,
159 .dimensions = {},
160 .numberOfConsumers = len,
161 .scale = 0.0f,
162 .zeroPoint = 0,
163 .lifetime = OperandLifeTime::CONSTANT_COPY,
164 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
165 };
166 memset(operandValues.data(), 0, sizeof(int32_t));
167
Xusong Wang0e0721f2019-05-07 12:57:49 -0700168 // The buffer value of the constant second operand. The logical value is always 1.0f.
169 CppType bufferValue;
170 // The scale of the first and second operand.
171 float scale1, scale2;
172 if (operandType == OperandType::TENSOR_FLOAT32) {
173 bufferValue = 1.0f;
174 scale1 = 0.0f;
175 scale2 = 0.0f;
176 } else if (op == OperationType::ADD) {
177 bufferValue = 1;
178 scale1 = 1.0f;
179 scale2 = 1.0f;
180 } else {
181 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
182 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
183 bufferValue = 2;
184 scale1 = 1.0f;
185 scale2 = 0.5f;
186 }
187
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700188 for (uint32_t i = 0; i < len; i++) {
189 const uint32_t firstInputIndex = i * 2 + 1;
190 const uint32_t secondInputIndex = firstInputIndex + 1;
191 const uint32_t outputIndex = secondInputIndex + 1;
192
193 // The first operation input.
194 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700195 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700196 .dimensions = {1},
197 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700198 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700199 .zeroPoint = 0,
200 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
201 : OperandLifeTime::TEMPORARY_VARIABLE),
202 .location = {},
203 };
204
205 // The second operation input, value = 1.
206 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700207 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700208 .dimensions = {1},
209 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700210 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700211 .zeroPoint = 0,
212 .lifetime = OperandLifeTime::CONSTANT_COPY,
213 .location = {.poolIndex = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700214 .offset = static_cast<uint32_t>(i * sizeof(CppType) + sizeof(int32_t)),
215 .length = sizeof(CppType)},
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700216 };
Xusong Wang0e0721f2019-05-07 12:57:49 -0700217 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(CppType), &bufferValue,
218 sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700219
220 // The operation. All operations share the same activation scalar.
221 // The output operand is created as an input in the next iteration of the loop, in the case
222 // of all but the last member of the chain; and after the loop as a model output, in the
223 // case of the last member of the chain.
224 operations[i] = {
225 .type = op,
226 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
227 .outputs = {outputIndex},
228 };
229 }
230
231 // The model output.
232 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700233 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700234 .dimensions = {1},
235 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700236 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700237 .zeroPoint = 0,
238 .lifetime = OperandLifeTime::MODEL_OUTPUT,
239 .location = {},
240 };
241
242 const std::vector<uint32_t> inputIndexes = {1};
243 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
244 const std::vector<hidl_memory> pools = {};
245
246 return {
247 .operands = operands,
248 .operations = operations,
249 .inputIndexes = inputIndexes,
250 .outputIndexes = outputIndexes,
251 .operandValues = operandValues,
252 .pools = pools,
253 };
254}
255
Xusong Wang96e68dc2019-01-18 17:28:26 -0800256} // namespace
257
258// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700259class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800260 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700261 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
262
Xusong Wang96e68dc2019-01-18 17:28:26 -0800263 void SetUp() override {
264 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800265 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800266
Xusong Wanged0822b2019-02-25 16:58:58 -0800267 // Create cache directory. The cache directory and a temporary cache file is always created
268 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800269 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
270 char* cacheDir = mkdtemp(cacheDirTemp);
271 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800272 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800273 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800274
Xusong Wanged0822b2019-02-25 16:58:58 -0800275 Return<void> ret = device->getNumberOfCacheFilesNeeded(
276 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800277 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800278 mNumModelCache = numModelCache;
279 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800280 });
281 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800282 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
283
284 // Create empty cache files.
285 mTmpCache = mCacheDir + "tmp";
286 for (uint32_t i = 0; i < mNumModelCache; i++) {
287 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
288 }
289 for (uint32_t i = 0; i < mNumDataCache; i++) {
290 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
291 }
292 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
293 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
294 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
295 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
296 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
297
298 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800299 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
300 "support compilation caching.";
301 std::cout << "[ ] Early termination of test because vendor service does not "
302 "support compilation caching."
303 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800304 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800305 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800306
Xusong Wang6824cc12019-02-12 18:00:37 -0800307 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700308 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
309 if (!::testing::Test::HasFailure()) {
310 // Recursively remove the cache directory specified by mCacheDir.
311 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
312 return remove(entry);
313 };
314 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800315 }
316 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800317 }
318
Xusong Wang0e0721f2019-05-07 12:57:49 -0700319 // Model and examples creators. According to kOperandType, the following methods will return
320 // either float32 model/examples or the quant8 variant.
321 Model createTestModel() {
322 if (kOperandType == OperandType::TENSOR_FLOAT32) {
323 return float32_model::createTestModel();
324 } else {
325 return quant8_model::createTestModel();
326 }
327 }
328
329 std::vector<MixedTypedExample> get_examples() {
330 if (kOperandType == OperandType::TENSOR_FLOAT32) {
331 return float32_model::get_examples();
332 } else {
333 return quant8_model::get_examples();
334 }
335 }
336
337 Model createLargeTestModel(OperationType op, uint32_t len) {
338 if (kOperandType == OperandType::TENSOR_FLOAT32) {
339 return createLargeTestModelImpl<float, OperandType::TENSOR_FLOAT32>(op, len);
340 } else {
341 return createLargeTestModelImpl<uint8_t, OperandType::TENSOR_QUANT8_ASYMM>(op, len);
342 }
343 }
344
345 std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
346 if (kOperandType == OperandType::TENSOR_FLOAT32) {
347 return float32_model::getLargeModelExamples(len);
348 } else {
349 return quant8_model::getLargeModelExamples(len);
350 }
351 }
352
Xusong Wang4f71afc2019-04-26 15:33:38 -0700353 // See if the service can handle the model.
354 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800355 bool fullySupportsModel = false;
356 Return<void> supportedCall = device->getSupportedOperations_1_2(
357 model,
358 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
359 ASSERT_EQ(ErrorStatus::NONE, status);
360 ASSERT_EQ(supported.size(), model.operations.size());
361 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
362 [](bool valid) { return valid; });
363 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700364 EXPECT_TRUE(supportedCall.isOk());
365 return fullySupportsModel;
366 }
367
368 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
369 const hidl_vec<hidl_handle>& dataCache,
370 sp<IPreparedModel>* preparedModel = nullptr) {
371 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800372
373 // Launch prepare model.
374 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
375 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800376 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800377 Return<ErrorStatus> prepareLaunchStatus =
378 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
379 dataCache, cacheToken, preparedModelCallback);
380 ASSERT_TRUE(prepareLaunchStatus.isOk());
381 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
382
383 // Retrieve prepared model.
384 preparedModelCallback->wait();
385 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
386 if (preparedModel != nullptr) {
387 *preparedModel =
388 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
389 .withDefault(nullptr);
390 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800391 }
392
393 bool checkEarlyTermination(ErrorStatus status) {
394 if (status == ErrorStatus::GENERAL_FAILURE) {
395 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
396 "save the prepared model that it does not support.";
397 std::cout << "[ ] Early termination of test because vendor service cannot "
398 "save the prepared model that it does not support."
399 << std::endl;
400 return true;
401 }
402 return false;
403 }
404
Xusong Wang4f71afc2019-04-26 15:33:38 -0700405 bool checkEarlyTermination(const V1_2::Model& model) {
406 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800407 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
408 "prepare model that it does not support.";
409 std::cout << "[ ] Early termination of test because vendor service cannot "
410 "prepare model that it does not support."
411 << std::endl;
412 return true;
413 }
414 return false;
415 }
416
417 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
418 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800419 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
420 // Launch prepare model from cache.
421 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
422 ASSERT_NE(nullptr, preparedModelCallback.get());
423 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800424 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
425 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800426 ASSERT_TRUE(prepareLaunchStatus.isOk());
427 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
428 *preparedModel = nullptr;
429 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
430 return;
431 }
432
433 // Retrieve prepared model.
434 preparedModelCallback->wait();
435 *status = preparedModelCallback->getStatus();
436 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
437 .withDefault(nullptr);
438 }
439
Xusong Wanged0822b2019-02-25 16:58:58 -0800440 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800441 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800442
443 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
444 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
445 // and the inner vector is for fds held by each handle.
446 std::vector<std::vector<std::string>> mModelCache;
447 std::vector<std::vector<std::string>> mDataCache;
448
449 // A separate temporary file path in the tmp cache directory.
450 std::string mTmpCache;
451
Xusong Wang96e68dc2019-01-18 17:28:26 -0800452 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800453 uint32_t mNumModelCache;
454 uint32_t mNumDataCache;
455 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700456
457 // The primary data type of the testModel.
458 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800459};
460
Xusong Wang0e0721f2019-05-07 12:57:49 -0700461// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
462// pass running with float32 models and the second pass running with quant8 models.
463class CompilationCachingTest : public CompilationCachingTestBase,
464 public ::testing::WithParamInterface<OperandType> {
465 protected:
466 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
467};
468
469TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800470 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700471 const Model testModel = createTestModel();
472 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800473 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800474
475 // Save the compilation to cache.
476 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800477 hidl_vec<hidl_handle> modelCache, dataCache;
478 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
479 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700480 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800481 }
482
483 // Retrieve preparedModel from cache.
484 {
485 preparedModel = nullptr;
486 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800487 hidl_vec<hidl_handle> modelCache, dataCache;
488 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
489 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
490 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800491 if (!mIsCachingSupported) {
492 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
493 ASSERT_EQ(preparedModel, nullptr);
494 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800495 } else if (checkEarlyTermination(status)) {
496 ASSERT_EQ(preparedModel, nullptr);
497 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800498 } else {
499 ASSERT_EQ(status, ErrorStatus::NONE);
500 ASSERT_NE(preparedModel, nullptr);
501 }
502 }
503
504 // Execute and verify results.
505 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
506 testModel.relaxComputationFloat32toFloat16,
507 /*testDynamicOutputShape=*/false);
508}
509
Xusong Wang0e0721f2019-05-07 12:57:49 -0700510TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800511 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700512 const Model testModel = createTestModel();
513 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800514 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800515
516 // Save the compilation to cache.
517 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800518 hidl_vec<hidl_handle> modelCache, dataCache;
519 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
520 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
521 uint8_t dummyBytes[] = {0, 0};
522 // Write a dummy integer to the cache.
523 // The driver should be able to handle non-empty cache and non-zero fd offset.
524 for (uint32_t i = 0; i < modelCache.size(); i++) {
525 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
526 sizeof(dummyBytes)),
527 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800528 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800529 for (uint32_t i = 0; i < dataCache.size(); i++) {
530 ASSERT_EQ(
531 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
532 sizeof(dummyBytes));
533 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700534 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800535 }
536
537 // Retrieve preparedModel from cache.
538 {
539 preparedModel = nullptr;
540 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800541 hidl_vec<hidl_handle> modelCache, dataCache;
542 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
543 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800544 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800545 // Advance the offset of each handle by one byte.
546 // The driver should be able to handle non-zero fd offset.
547 for (uint32_t i = 0; i < modelCache.size(); i++) {
548 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
549 }
550 for (uint32_t i = 0; i < dataCache.size(); i++) {
551 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
552 }
553 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800554 if (!mIsCachingSupported) {
555 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
556 ASSERT_EQ(preparedModel, nullptr);
557 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800558 } else if (checkEarlyTermination(status)) {
559 ASSERT_EQ(preparedModel, nullptr);
560 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800561 } else {
562 ASSERT_EQ(status, ErrorStatus::NONE);
563 ASSERT_NE(preparedModel, nullptr);
564 }
565 }
566
567 // Execute and verify results.
568 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
569 testModel.relaxComputationFloat32toFloat16,
570 /*testDynamicOutputShape=*/false);
571}
572
Xusong Wang0e0721f2019-05-07 12:57:49 -0700573TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800574 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700575 const Model testModel = createTestModel();
576 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800577
578 // Test with number of model cache files greater than mNumModelCache.
579 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800580 hidl_vec<hidl_handle> modelCache, dataCache;
581 // Pass an additional cache file for model cache.
582 mModelCache.push_back({mTmpCache});
583 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
584 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
585 mModelCache.pop_back();
586 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700587 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800588 ASSERT_NE(preparedModel, nullptr);
589 // Execute and verify results.
590 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
591 get_examples(),
592 testModel.relaxComputationFloat32toFloat16,
593 /*testDynamicOutputShape=*/false);
594 // Check if prepareModelFromCache fails.
595 preparedModel = nullptr;
596 ErrorStatus status;
597 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
598 if (status != ErrorStatus::INVALID_ARGUMENT) {
599 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
600 }
601 ASSERT_EQ(preparedModel, nullptr);
602 }
603
604 // Test with number of model cache files smaller than mNumModelCache.
605 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800606 hidl_vec<hidl_handle> modelCache, dataCache;
607 // Pop out the last cache file.
608 auto tmp = mModelCache.back();
609 mModelCache.pop_back();
610 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
611 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
612 mModelCache.push_back(tmp);
613 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700614 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800615 ASSERT_NE(preparedModel, nullptr);
616 // Execute and verify results.
617 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
618 get_examples(),
619 testModel.relaxComputationFloat32toFloat16,
620 /*testDynamicOutputShape=*/false);
621 // Check if prepareModelFromCache fails.
622 preparedModel = nullptr;
623 ErrorStatus status;
624 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
625 if (status != ErrorStatus::INVALID_ARGUMENT) {
626 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
627 }
628 ASSERT_EQ(preparedModel, nullptr);
629 }
630
631 // Test with number of data cache files greater than mNumDataCache.
632 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800633 hidl_vec<hidl_handle> modelCache, dataCache;
634 // Pass an additional cache file for data cache.
635 mDataCache.push_back({mTmpCache});
636 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
637 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
638 mDataCache.pop_back();
639 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700640 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800641 ASSERT_NE(preparedModel, nullptr);
642 // Execute and verify results.
643 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
644 get_examples(),
645 testModel.relaxComputationFloat32toFloat16,
646 /*testDynamicOutputShape=*/false);
647 // Check if prepareModelFromCache fails.
648 preparedModel = nullptr;
649 ErrorStatus status;
650 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
651 if (status != ErrorStatus::INVALID_ARGUMENT) {
652 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
653 }
654 ASSERT_EQ(preparedModel, nullptr);
655 }
656
657 // Test with number of data cache files smaller than mNumDataCache.
658 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800659 hidl_vec<hidl_handle> modelCache, dataCache;
660 // Pop out the last cache file.
661 auto tmp = mDataCache.back();
662 mDataCache.pop_back();
663 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
664 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
665 mDataCache.push_back(tmp);
666 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700667 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800668 ASSERT_NE(preparedModel, nullptr);
669 // Execute and verify results.
670 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
671 get_examples(),
672 testModel.relaxComputationFloat32toFloat16,
673 /*testDynamicOutputShape=*/false);
674 // Check if prepareModelFromCache fails.
675 preparedModel = nullptr;
676 ErrorStatus status;
677 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
678 if (status != ErrorStatus::INVALID_ARGUMENT) {
679 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
680 }
681 ASSERT_EQ(preparedModel, nullptr);
682 }
683}
684
Xusong Wang0e0721f2019-05-07 12:57:49 -0700685TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800686 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700687 const Model testModel = createTestModel();
688 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800689
690 // Save the compilation to cache.
691 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800692 hidl_vec<hidl_handle> modelCache, dataCache;
693 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
694 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700695 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800696 }
697
698 // Test with number of model cache files greater than mNumModelCache.
699 {
700 sp<IPreparedModel> preparedModel = nullptr;
701 ErrorStatus status;
702 hidl_vec<hidl_handle> modelCache, dataCache;
703 mModelCache.push_back({mTmpCache});
704 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
705 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
706 mModelCache.pop_back();
707 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
708 if (status != ErrorStatus::GENERAL_FAILURE) {
709 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
710 }
711 ASSERT_EQ(preparedModel, nullptr);
712 }
713
714 // Test with number of model cache files smaller than mNumModelCache.
715 if (mModelCache.size() > 0) {
716 sp<IPreparedModel> preparedModel = nullptr;
717 ErrorStatus status;
718 hidl_vec<hidl_handle> modelCache, dataCache;
719 auto tmp = mModelCache.back();
720 mModelCache.pop_back();
721 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
722 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
723 mModelCache.push_back(tmp);
724 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
725 if (status != ErrorStatus::GENERAL_FAILURE) {
726 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
727 }
728 ASSERT_EQ(preparedModel, nullptr);
729 }
730
731 // Test with number of data cache files greater than mNumDataCache.
732 {
733 sp<IPreparedModel> preparedModel = nullptr;
734 ErrorStatus status;
735 hidl_vec<hidl_handle> modelCache, dataCache;
736 mDataCache.push_back({mTmpCache});
737 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
738 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
739 mDataCache.pop_back();
740 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
741 if (status != ErrorStatus::GENERAL_FAILURE) {
742 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
743 }
744 ASSERT_EQ(preparedModel, nullptr);
745 }
746
747 // Test with number of data cache files smaller than mNumDataCache.
748 if (mDataCache.size() > 0) {
749 sp<IPreparedModel> preparedModel = nullptr;
750 ErrorStatus status;
751 hidl_vec<hidl_handle> modelCache, dataCache;
752 auto tmp = mDataCache.back();
753 mDataCache.pop_back();
754 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
755 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
756 mDataCache.push_back(tmp);
757 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
758 if (status != ErrorStatus::GENERAL_FAILURE) {
759 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
760 }
761 ASSERT_EQ(preparedModel, nullptr);
762 }
763}
764
Xusong Wang0e0721f2019-05-07 12:57:49 -0700765TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800766 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700767 const Model testModel = createTestModel();
768 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800769
Xusong Wanged0822b2019-02-25 16:58:58 -0800770 // Go through each handle in model cache, test with NumFd greater than 1.
771 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800772 hidl_vec<hidl_handle> modelCache, dataCache;
773 // Pass an invalid number of fds for handle i.
774 mModelCache[i].push_back(mTmpCache);
775 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
776 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
777 mModelCache[i].pop_back();
778 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700779 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800780 ASSERT_NE(preparedModel, nullptr);
781 // Execute and verify results.
782 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
783 get_examples(),
784 testModel.relaxComputationFloat32toFloat16,
785 /*testDynamicOutputShape=*/false);
786 // Check if prepareModelFromCache fails.
787 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800788 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800789 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
790 if (status != ErrorStatus::INVALID_ARGUMENT) {
791 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800792 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800793 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800794 }
795
Xusong Wanged0822b2019-02-25 16:58:58 -0800796 // Go through each handle in model cache, test with NumFd equal to 0.
797 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800798 hidl_vec<hidl_handle> modelCache, dataCache;
799 // Pass an invalid number of fds for handle i.
800 auto tmp = mModelCache[i].back();
801 mModelCache[i].pop_back();
802 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
803 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
804 mModelCache[i].push_back(tmp);
805 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700806 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800807 ASSERT_NE(preparedModel, nullptr);
808 // Execute and verify results.
809 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
810 get_examples(),
811 testModel.relaxComputationFloat32toFloat16,
812 /*testDynamicOutputShape=*/false);
813 // Check if prepareModelFromCache fails.
814 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800815 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800816 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
817 if (status != ErrorStatus::INVALID_ARGUMENT) {
818 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800819 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800820 ASSERT_EQ(preparedModel, nullptr);
821 }
822
823 // Go through each handle in data cache, test with NumFd greater than 1.
824 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800825 hidl_vec<hidl_handle> modelCache, dataCache;
826 // Pass an invalid number of fds for handle i.
827 mDataCache[i].push_back(mTmpCache);
828 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
829 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
830 mDataCache[i].pop_back();
831 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700832 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800833 ASSERT_NE(preparedModel, nullptr);
834 // Execute and verify results.
835 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
836 get_examples(),
837 testModel.relaxComputationFloat32toFloat16,
838 /*testDynamicOutputShape=*/false);
839 // Check if prepareModelFromCache fails.
840 preparedModel = nullptr;
841 ErrorStatus status;
842 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
843 if (status != ErrorStatus::INVALID_ARGUMENT) {
844 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
845 }
846 ASSERT_EQ(preparedModel, nullptr);
847 }
848
849 // Go through each handle in data cache, test with NumFd equal to 0.
850 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800851 hidl_vec<hidl_handle> modelCache, dataCache;
852 // Pass an invalid number of fds for handle i.
853 auto tmp = mDataCache[i].back();
854 mDataCache[i].pop_back();
855 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
856 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
857 mDataCache[i].push_back(tmp);
858 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700859 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800860 ASSERT_NE(preparedModel, nullptr);
861 // Execute and verify results.
862 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
863 get_examples(),
864 testModel.relaxComputationFloat32toFloat16,
865 /*testDynamicOutputShape=*/false);
866 // Check if prepareModelFromCache fails.
867 preparedModel = nullptr;
868 ErrorStatus status;
869 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
870 if (status != ErrorStatus::INVALID_ARGUMENT) {
871 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
872 }
873 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800874 }
875}
876
Xusong Wang0e0721f2019-05-07 12:57:49 -0700877TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800878 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700879 const Model testModel = createTestModel();
880 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800881
882 // Save the compilation to cache.
883 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800884 hidl_vec<hidl_handle> modelCache, dataCache;
885 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
886 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700887 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800888 }
889
Xusong Wanged0822b2019-02-25 16:58:58 -0800890 // Go through each handle in model cache, test with NumFd greater than 1.
891 for (uint32_t i = 0; i < mNumModelCache; i++) {
892 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800893 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800894 hidl_vec<hidl_handle> modelCache, dataCache;
895 mModelCache[i].push_back(mTmpCache);
896 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
897 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
898 mModelCache[i].pop_back();
899 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800900 if (status != ErrorStatus::GENERAL_FAILURE) {
901 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800902 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800903 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800904 }
905
Xusong Wanged0822b2019-02-25 16:58:58 -0800906 // Go through each handle in model cache, test with NumFd equal to 0.
907 for (uint32_t i = 0; i < mNumModelCache; i++) {
908 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800909 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800910 hidl_vec<hidl_handle> modelCache, dataCache;
911 auto tmp = mModelCache[i].back();
912 mModelCache[i].pop_back();
913 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
914 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
915 mModelCache[i].push_back(tmp);
916 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800917 if (status != ErrorStatus::GENERAL_FAILURE) {
918 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800919 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800920 ASSERT_EQ(preparedModel, nullptr);
921 }
922
923 // Go through each handle in data cache, test with NumFd greater than 1.
924 for (uint32_t i = 0; i < mNumDataCache; i++) {
925 sp<IPreparedModel> preparedModel = nullptr;
926 ErrorStatus status;
927 hidl_vec<hidl_handle> modelCache, dataCache;
928 mDataCache[i].push_back(mTmpCache);
929 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
930 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
931 mDataCache[i].pop_back();
932 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
933 if (status != ErrorStatus::GENERAL_FAILURE) {
934 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
935 }
936 ASSERT_EQ(preparedModel, nullptr);
937 }
938
939 // Go through each handle in data cache, test with NumFd equal to 0.
940 for (uint32_t i = 0; i < mNumDataCache; i++) {
941 sp<IPreparedModel> preparedModel = nullptr;
942 ErrorStatus status;
943 hidl_vec<hidl_handle> modelCache, dataCache;
944 auto tmp = mDataCache[i].back();
945 mDataCache[i].pop_back();
946 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
947 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
948 mDataCache[i].push_back(tmp);
949 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
950 if (status != ErrorStatus::GENERAL_FAILURE) {
951 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
952 }
953 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800954 }
955}
956
Xusong Wang0e0721f2019-05-07 12:57:49 -0700957TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800958 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700959 const Model testModel = createTestModel();
960 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800961 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
962 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800963
Xusong Wanged0822b2019-02-25 16:58:58 -0800964 // Go through each handle in model cache, test with invalid access mode.
965 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800966 hidl_vec<hidl_handle> modelCache, dataCache;
967 modelCacheMode[i] = AccessMode::READ_ONLY;
968 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
969 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
970 modelCacheMode[i] = AccessMode::READ_WRITE;
971 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700972 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800973 ASSERT_NE(preparedModel, nullptr);
974 // Execute and verify results.
975 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
976 get_examples(),
977 testModel.relaxComputationFloat32toFloat16,
978 /*testDynamicOutputShape=*/false);
979 // Check if prepareModelFromCache fails.
980 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800981 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800982 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
983 if (status != ErrorStatus::INVALID_ARGUMENT) {
984 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
985 }
986 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800987 }
988
Xusong Wanged0822b2019-02-25 16:58:58 -0800989 // Go through each handle in data cache, test with invalid access mode.
990 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800991 hidl_vec<hidl_handle> modelCache, dataCache;
992 dataCacheMode[i] = AccessMode::READ_ONLY;
993 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
994 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
995 dataCacheMode[i] = AccessMode::READ_WRITE;
996 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700997 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800998 ASSERT_NE(preparedModel, nullptr);
999 // Execute and verify results.
1000 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
1001 get_examples(),
1002 testModel.relaxComputationFloat32toFloat16,
1003 /*testDynamicOutputShape=*/false);
1004 // Check if prepareModelFromCache fails.
1005 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001006 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001007 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1008 if (status != ErrorStatus::INVALID_ARGUMENT) {
1009 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1010 }
1011 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001012 }
1013}
1014
Xusong Wang0e0721f2019-05-07 12:57:49 -07001015TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001016 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -07001017 const Model testModel = createTestModel();
1018 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001019 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
1020 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001021
1022 // Save the compilation to cache.
1023 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001024 hidl_vec<hidl_handle> modelCache, dataCache;
1025 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1026 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001027 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001028 }
1029
Xusong Wanged0822b2019-02-25 16:58:58 -08001030 // Go through each handle in model cache, test with invalid access mode.
1031 for (uint32_t i = 0; i < mNumModelCache; i++) {
1032 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001033 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001034 hidl_vec<hidl_handle> modelCache, dataCache;
1035 modelCacheMode[i] = AccessMode::WRITE_ONLY;
1036 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1037 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1038 modelCacheMode[i] = AccessMode::READ_WRITE;
1039 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001040 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1041 ASSERT_EQ(preparedModel, nullptr);
1042 }
1043
Xusong Wanged0822b2019-02-25 16:58:58 -08001044 // Go through each handle in data cache, test with invalid access mode.
1045 for (uint32_t i = 0; i < mNumDataCache; i++) {
1046 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001047 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001048 hidl_vec<hidl_handle> modelCache, dataCache;
1049 dataCacheMode[i] = AccessMode::WRITE_ONLY;
1050 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1051 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1052 dataCacheMode[i] = AccessMode::READ_WRITE;
1053 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001054 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1055 ASSERT_EQ(preparedModel, nullptr);
1056 }
1057}
1058
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001059// Copy file contents between file groups.
1060// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1061// The outer vector sizes must match and the inner vectors must have size = 1.
1062static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1063 const std::vector<std::vector<std::string>>& to) {
1064 constexpr size_t kBufferSize = 1000000;
1065 uint8_t buffer[kBufferSize];
1066
1067 ASSERT_EQ(from.size(), to.size());
1068 for (uint32_t i = 0; i < from.size(); i++) {
1069 ASSERT_EQ(from[i].size(), 1u);
1070 ASSERT_EQ(to[i].size(), 1u);
1071 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1072 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1073 ASSERT_GE(fromFd, 0);
1074 ASSERT_GE(toFd, 0);
1075
1076 ssize_t readBytes;
1077 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1078 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1079 }
1080 ASSERT_GE(readBytes, 0);
1081
1082 close(fromFd);
1083 close(toFd);
1084 }
1085}
1086
1087// Number of operations in the large test model.
1088constexpr uint32_t kLargeModelSize = 100;
1089constexpr uint32_t kNumIterationsTOCTOU = 100;
1090
Xusong Wang0e0721f2019-05-07 12:57:49 -07001091TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001092 if (!mIsCachingSupported) return;
1093
Xusong Wang4f71afc2019-04-26 15:33:38 -07001094 // Create test models and check if fully supported by the service.
1095 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1096 if (checkEarlyTermination(testModelMul)) return;
1097 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1098 if (checkEarlyTermination(testModelAdd)) return;
1099
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001100 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001101 auto modelCacheMul = mModelCache;
1102 for (auto& cache : modelCacheMul) {
1103 cache[0].append("_mul");
1104 }
1105 {
1106 hidl_vec<hidl_handle> modelCache, dataCache;
1107 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1108 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001109 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001110 }
1111
1112 // Use a different token for testModelAdd.
1113 mToken[0]++;
1114
1115 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001116 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1117 // Save the testModelAdd compilation to cache.
1118 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001119 hidl_vec<hidl_handle> modelCache, dataCache;
1120 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1121 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1122
1123 // Spawn a thread to copy the cache content concurrently while saving to cache.
1124 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001125 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001126 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001127 }
1128
1129 // Retrieve preparedModel from cache.
1130 {
1131 sp<IPreparedModel> preparedModel = nullptr;
1132 ErrorStatus status;
1133 hidl_vec<hidl_handle> modelCache, dataCache;
1134 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1135 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1136 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1137
1138 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1139 // the prepared model must be executed with the correct result and not crash.
1140 if (status != ErrorStatus::NONE) {
1141 ASSERT_EQ(preparedModel, nullptr);
1142 } else {
1143 ASSERT_NE(preparedModel, nullptr);
1144 generated_tests::EvaluatePreparedModel(
1145 preparedModel, [](int) { return false; },
1146 getLargeModelExamples(kLargeModelSize),
1147 testModelAdd.relaxComputationFloat32toFloat16,
1148 /*testDynamicOutputShape=*/false);
1149 }
1150 }
1151 }
1152}
1153
Xusong Wang0e0721f2019-05-07 12:57:49 -07001154TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001155 if (!mIsCachingSupported) return;
1156
Xusong Wang4f71afc2019-04-26 15:33:38 -07001157 // Create test models and check if fully supported by the service.
1158 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1159 if (checkEarlyTermination(testModelMul)) return;
1160 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1161 if (checkEarlyTermination(testModelAdd)) return;
1162
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001163 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001164 auto modelCacheMul = mModelCache;
1165 for (auto& cache : modelCacheMul) {
1166 cache[0].append("_mul");
1167 }
1168 {
1169 hidl_vec<hidl_handle> modelCache, dataCache;
1170 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1171 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001172 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001173 }
1174
1175 // Use a different token for testModelAdd.
1176 mToken[0]++;
1177
1178 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001179 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1180 // Save the testModelAdd compilation to cache.
1181 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001182 hidl_vec<hidl_handle> modelCache, dataCache;
1183 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1184 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001185 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001186 }
1187
1188 // Retrieve preparedModel from cache.
1189 {
1190 sp<IPreparedModel> preparedModel = nullptr;
1191 ErrorStatus status;
1192 hidl_vec<hidl_handle> modelCache, dataCache;
1193 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1194 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1195
1196 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1197 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1198 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1199 thread.join();
1200
1201 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1202 // the prepared model must be executed with the correct result and not crash.
1203 if (status != ErrorStatus::NONE) {
1204 ASSERT_EQ(preparedModel, nullptr);
1205 } else {
1206 ASSERT_NE(preparedModel, nullptr);
1207 generated_tests::EvaluatePreparedModel(
1208 preparedModel, [](int) { return false; },
1209 getLargeModelExamples(kLargeModelSize),
1210 testModelAdd.relaxComputationFloat32toFloat16,
1211 /*testDynamicOutputShape=*/false);
1212 }
1213 }
1214 }
1215}
1216
Xusong Wang0e0721f2019-05-07 12:57:49 -07001217TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001218 if (!mIsCachingSupported) return;
1219
Xusong Wang4f71afc2019-04-26 15:33:38 -07001220 // Create test models and check if fully supported by the service.
1221 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1222 if (checkEarlyTermination(testModelMul)) return;
1223 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1224 if (checkEarlyTermination(testModelAdd)) return;
1225
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001226 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001227 auto modelCacheMul = mModelCache;
1228 for (auto& cache : modelCacheMul) {
1229 cache[0].append("_mul");
1230 }
1231 {
1232 hidl_vec<hidl_handle> modelCache, dataCache;
1233 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1234 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001235 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001236 }
1237
1238 // Use a different token for testModelAdd.
1239 mToken[0]++;
1240
1241 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001242 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001243 hidl_vec<hidl_handle> modelCache, dataCache;
1244 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1245 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001246 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001247 }
1248
1249 // Replace the model cache of testModelAdd with testModelMul.
1250 copyCacheFiles(modelCacheMul, mModelCache);
1251
1252 // Retrieve the preparedModel from cache, expect failure.
1253 {
1254 sp<IPreparedModel> preparedModel = nullptr;
1255 ErrorStatus status;
1256 hidl_vec<hidl_handle> modelCache, dataCache;
1257 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1258 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1259 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1260 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1261 ASSERT_EQ(preparedModel, nullptr);
1262 }
1263}
1264
Xusong Wang0e0721f2019-05-07 12:57:49 -07001265static const auto kOperandTypeChoices =
1266 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1267
1268INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1269
1270class CompilationCachingSecurityTest
1271 : public CompilationCachingTestBase,
1272 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001273 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001274 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1275
Xusong Wang96e68dc2019-01-18 17:28:26 -08001276 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001277 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001278 generator.seed(kSeed);
1279 }
1280
1281 // Get a random integer within a closed range [lower, upper].
1282 template <typename T>
1283 T getRandomInt(T lower, T upper) {
1284 std::uniform_int_distribution<T> dis(lower, upper);
1285 return dis(generator);
1286 }
1287
Xusong Wange371f6f2019-04-23 14:51:50 -07001288 // Randomly flip one single bit of the cache entry.
1289 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1290 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001291 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1292 long int fileSize = ftell(pFile);
1293 if (fileSize == 0) {
1294 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001295 *skip = true;
1296 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001297 }
1298 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1299 int readByte = fgetc(pFile);
1300 ASSERT_NE(readByte, EOF);
1301 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1302 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1303 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001304 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001305 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001306
Xusong Wange371f6f2019-04-23 14:51:50 -07001307 // Randomly append bytes to the cache entry.
1308 void appendBytesToCache(const std::string& filename, bool* skip) {
1309 FILE* pFile = fopen(filename.c_str(), "a");
1310 uint32_t appendLength = getRandomInt(1, 256);
1311 for (uint32_t i = 0; i < appendLength; i++) {
1312 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1313 }
1314 fclose(pFile);
1315 *skip = false;
1316 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001317
Xusong Wange371f6f2019-04-23 14:51:50 -07001318 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001319
Xusong Wange371f6f2019-04-23 14:51:50 -07001320 // Test if the driver behaves as expected when given corrupted cache or token.
1321 // The modifier will be invoked after save to cache but before prepare from cache.
1322 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1323 // whether the test should be skipped or not.
1324 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001325 const Model testModel = createTestModel();
1326 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001327
Xusong Wanged0822b2019-02-25 16:58:58 -08001328 // Save the compilation to cache.
1329 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001330 hidl_vec<hidl_handle> modelCache, dataCache;
1331 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1332 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001333 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001334 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001335
Xusong Wange371f6f2019-04-23 14:51:50 -07001336 bool skip = false;
1337 modifier(&skip);
1338 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001339
Xusong Wange371f6f2019-04-23 14:51:50 -07001340 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001341 {
1342 sp<IPreparedModel> preparedModel = nullptr;
1343 ErrorStatus status;
1344 hidl_vec<hidl_handle> modelCache, dataCache;
1345 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1346 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1347 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001348
1349 switch (expected) {
1350 case ExpectedResult::GENERAL_FAILURE:
1351 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1352 ASSERT_EQ(preparedModel, nullptr);
1353 break;
1354 case ExpectedResult::NOT_CRASH:
1355 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1356 break;
1357 default:
1358 FAIL();
1359 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001360 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001361 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001362
Xusong Wang0e0721f2019-05-07 12:57:49 -07001363 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001364 std::mt19937 generator;
1365};
1366
1367TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1368 if (!mIsCachingSupported) return;
1369 for (uint32_t i = 0; i < mNumModelCache; i++) {
1370 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1371 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1372 }
1373}
1374
1375TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1376 if (!mIsCachingSupported) return;
1377 for (uint32_t i = 0; i < mNumModelCache; i++) {
1378 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1379 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1380 }
1381}
1382
1383TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1384 if (!mIsCachingSupported) return;
1385 for (uint32_t i = 0; i < mNumDataCache; i++) {
1386 testCorruptedCache(ExpectedResult::NOT_CRASH,
1387 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1388 }
1389}
1390
1391TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1392 if (!mIsCachingSupported) return;
1393 for (uint32_t i = 0; i < mNumDataCache; i++) {
1394 testCorruptedCache(ExpectedResult::NOT_CRASH,
1395 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1396 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001397}
1398
1399TEST_P(CompilationCachingSecurityTest, WrongToken) {
1400 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001401 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1402 // Randomly flip one single bit in mToken.
1403 uint32_t ind =
1404 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1405 mToken[ind] ^= (1U << getRandomInt(0, 7));
1406 *skip = false;
1407 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001408}
1409
1410INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001411 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001412
1413} // namespace functional
1414} // namespace vts
1415} // namespace V1_2
1416} // namespace neuralnetworks
1417} // namespace hardware
1418} // namespace android