blob: ac92a5b0a2dff8009399fb683ff5e32f1dfbdfa2 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
38namespace android {
39namespace hardware {
40namespace neuralnetworks {
41namespace V1_2 {
42namespace vts {
43namespace functional {
44
45using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
46using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
47using ::android::nn::allocateSharedMemory;
48using ::test_helper::MixedTypedExample;
49
Xusong Wang0e0721f2019-05-07 12:57:49 -070050namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080051
Xusong Wang0e0721f2019-05-07 12:57:49 -070052// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of float32 mobilenet.
Xusong Wang96e68dc2019-01-18 17:28:26 -080053#include "examples/mobilenet_224_gender_basic_fixed.example.cpp"
Slava Shklyaevcac83ee2019-05-29 22:21:53 +010054#include "vts/V1_2/models/mobilenet_224_gender_basic_fixed.model.cpp"
Xusong Wang96e68dc2019-01-18 17:28:26 -080055
56// Prevent the compiler from complaining about an otherwise unused function.
57[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
58[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
59
Xusong Wang0e0721f2019-05-07 12:57:49 -070060// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
61// This function assumes the operation is always ADD.
62std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
63 float outputValue = 1.0f + static_cast<float>(len);
64 return {{.operands = {
65 // Input
66 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
67 // Output
68 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
69}
70
71} // namespace float32_model
72
73namespace quant8_model {
74
75// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of quant8 mobilenet.
76#include "examples/mobilenet_quantized.example.cpp"
Slava Shklyaevcac83ee2019-05-29 22:21:53 +010077#include "vts/V1_2/models/mobilenet_quantized.model.cpp"
Xusong Wang0e0721f2019-05-07 12:57:49 -070078
79// Prevent the compiler from complaining about an otherwise unused function.
80[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
81[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
82
83// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
84// This function assumes the operation is always ADD.
85std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
86 uint8_t outputValue = 1 + static_cast<uint8_t>(len);
87 return {{.operands = {// Input
88 {.operandDimensions = {{0, {1}}}, .quant8AsymmOperands = {{0, {1}}}},
89 // Output
90 {.operandDimensions = {{0, {1}}},
91 .quant8AsymmOperands = {{0, {outputValue}}}}}}};
92}
93
94} // namespace quant8_model
95
96namespace {
97
Xusong Wanged0822b2019-02-25 16:58:58 -080098enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080099
Xusong Wanged0822b2019-02-25 16:58:58 -0800100// Creates cache handles based on provided file groups.
101// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
102void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
103 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
104 handles->resize(fileGroups.size());
105 for (uint32_t i = 0; i < fileGroups.size(); i++) {
106 std::vector<int> fds;
107 for (const auto& file : fileGroups[i]) {
108 int fd;
109 if (mode[i] == AccessMode::READ_ONLY) {
110 fd = open(file.c_str(), O_RDONLY);
111 } else if (mode[i] == AccessMode::WRITE_ONLY) {
112 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
113 } else if (mode[i] == AccessMode::READ_WRITE) {
114 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
115 } else {
116 FAIL();
117 }
118 ASSERT_GE(fd, 0);
119 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800120 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800121 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
122 ASSERT_NE(cacheNativeHandle, nullptr);
123 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
124 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800125 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800126}
127
128void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
129 hidl_vec<hidl_handle>* handles) {
130 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800131}
132
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700133// Create a chain of broadcast operations. The second operand is always constant tensor [1].
134// For simplicity, activation scalar is shared. The second operand is not shared
135// in the model to let driver maintain a non-trivial size of constant data and the corresponding
136// data locations in cache.
137//
138// --------- activation --------
139// ↓ ↓ ↓ ↓
140// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
141// ↑ ↑ ↑ ↑
142// [1] [1] [1] [1]
143//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700144// This function assumes the operation is either ADD or MUL.
145template <typename CppType, OperandType operandType>
146Model createLargeTestModelImpl(OperationType op, uint32_t len) {
147 EXPECT_TRUE(op == OperationType::ADD || op == OperationType::MUL);
148
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700149 // Model operations and operands.
150 std::vector<Operation> operations(len);
151 std::vector<Operand> operands(len * 2 + 2);
152
153 // The constant buffer pool. This contains the activation scalar, followed by the
154 // per-operation constant operands.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700155 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700156
157 // The activation scalar, value = 0.
158 operands[0] = {
159 .type = OperandType::INT32,
160 .dimensions = {},
161 .numberOfConsumers = len,
162 .scale = 0.0f,
163 .zeroPoint = 0,
164 .lifetime = OperandLifeTime::CONSTANT_COPY,
165 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
166 };
167 memset(operandValues.data(), 0, sizeof(int32_t));
168
Xusong Wang0e0721f2019-05-07 12:57:49 -0700169 // The buffer value of the constant second operand. The logical value is always 1.0f.
170 CppType bufferValue;
171 // The scale of the first and second operand.
172 float scale1, scale2;
173 if (operandType == OperandType::TENSOR_FLOAT32) {
174 bufferValue = 1.0f;
175 scale1 = 0.0f;
176 scale2 = 0.0f;
177 } else if (op == OperationType::ADD) {
178 bufferValue = 1;
179 scale1 = 1.0f;
180 scale2 = 1.0f;
181 } else {
182 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
183 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
184 bufferValue = 2;
185 scale1 = 1.0f;
186 scale2 = 0.5f;
187 }
188
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700189 for (uint32_t i = 0; i < len; i++) {
190 const uint32_t firstInputIndex = i * 2 + 1;
191 const uint32_t secondInputIndex = firstInputIndex + 1;
192 const uint32_t outputIndex = secondInputIndex + 1;
193
194 // The first operation input.
195 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700196 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700197 .dimensions = {1},
198 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700199 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700200 .zeroPoint = 0,
201 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
202 : OperandLifeTime::TEMPORARY_VARIABLE),
203 .location = {},
204 };
205
206 // The second operation input, value = 1.
207 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700208 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700209 .dimensions = {1},
210 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700211 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700212 .zeroPoint = 0,
213 .lifetime = OperandLifeTime::CONSTANT_COPY,
214 .location = {.poolIndex = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700215 .offset = static_cast<uint32_t>(i * sizeof(CppType) + sizeof(int32_t)),
216 .length = sizeof(CppType)},
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700217 };
Xusong Wang0e0721f2019-05-07 12:57:49 -0700218 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(CppType), &bufferValue,
219 sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700220
221 // The operation. All operations share the same activation scalar.
222 // The output operand is created as an input in the next iteration of the loop, in the case
223 // of all but the last member of the chain; and after the loop as a model output, in the
224 // case of the last member of the chain.
225 operations[i] = {
226 .type = op,
227 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
228 .outputs = {outputIndex},
229 };
230 }
231
232 // The model output.
233 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700234 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700235 .dimensions = {1},
236 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700237 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700238 .zeroPoint = 0,
239 .lifetime = OperandLifeTime::MODEL_OUTPUT,
240 .location = {},
241 };
242
243 const std::vector<uint32_t> inputIndexes = {1};
244 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
245 const std::vector<hidl_memory> pools = {};
246
247 return {
248 .operands = operands,
249 .operations = operations,
250 .inputIndexes = inputIndexes,
251 .outputIndexes = outputIndexes,
252 .operandValues = operandValues,
253 .pools = pools,
254 };
255}
256
Xusong Wang96e68dc2019-01-18 17:28:26 -0800257} // namespace
258
259// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700260class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800261 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700262 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
263
Xusong Wang96e68dc2019-01-18 17:28:26 -0800264 void SetUp() override {
265 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800266 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800267
Xusong Wanged0822b2019-02-25 16:58:58 -0800268 // Create cache directory. The cache directory and a temporary cache file is always created
269 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800270 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
271 char* cacheDir = mkdtemp(cacheDirTemp);
272 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800273 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800274 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800275
Xusong Wanged0822b2019-02-25 16:58:58 -0800276 Return<void> ret = device->getNumberOfCacheFilesNeeded(
277 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800278 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800279 mNumModelCache = numModelCache;
280 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800281 });
282 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800283 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
284
285 // Create empty cache files.
286 mTmpCache = mCacheDir + "tmp";
287 for (uint32_t i = 0; i < mNumModelCache; i++) {
288 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
289 }
290 for (uint32_t i = 0; i < mNumDataCache; i++) {
291 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
292 }
293 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
294 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
295 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
296 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
297 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
298
299 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800300 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
301 "support compilation caching.";
302 std::cout << "[ ] Early termination of test because vendor service does not "
303 "support compilation caching."
304 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800305 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800306 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800307
Xusong Wang6824cc12019-02-12 18:00:37 -0800308 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700309 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
310 if (!::testing::Test::HasFailure()) {
311 // Recursively remove the cache directory specified by mCacheDir.
312 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
313 return remove(entry);
314 };
315 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800316 }
317 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800318 }
319
Xusong Wang0e0721f2019-05-07 12:57:49 -0700320 // Model and examples creators. According to kOperandType, the following methods will return
321 // either float32 model/examples or the quant8 variant.
322 Model createTestModel() {
323 if (kOperandType == OperandType::TENSOR_FLOAT32) {
324 return float32_model::createTestModel();
325 } else {
326 return quant8_model::createTestModel();
327 }
328 }
329
330 std::vector<MixedTypedExample> get_examples() {
331 if (kOperandType == OperandType::TENSOR_FLOAT32) {
332 return float32_model::get_examples();
333 } else {
334 return quant8_model::get_examples();
335 }
336 }
337
338 Model createLargeTestModel(OperationType op, uint32_t len) {
339 if (kOperandType == OperandType::TENSOR_FLOAT32) {
340 return createLargeTestModelImpl<float, OperandType::TENSOR_FLOAT32>(op, len);
341 } else {
342 return createLargeTestModelImpl<uint8_t, OperandType::TENSOR_QUANT8_ASYMM>(op, len);
343 }
344 }
345
346 std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
347 if (kOperandType == OperandType::TENSOR_FLOAT32) {
348 return float32_model::getLargeModelExamples(len);
349 } else {
350 return quant8_model::getLargeModelExamples(len);
351 }
352 }
353
Xusong Wang4f71afc2019-04-26 15:33:38 -0700354 // See if the service can handle the model.
355 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800356 bool fullySupportsModel = false;
357 Return<void> supportedCall = device->getSupportedOperations_1_2(
358 model,
359 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
360 ASSERT_EQ(ErrorStatus::NONE, status);
361 ASSERT_EQ(supported.size(), model.operations.size());
362 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
363 [](bool valid) { return valid; });
364 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700365 EXPECT_TRUE(supportedCall.isOk());
366 return fullySupportsModel;
367 }
368
369 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
370 const hidl_vec<hidl_handle>& dataCache,
371 sp<IPreparedModel>* preparedModel = nullptr) {
372 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800373
374 // Launch prepare model.
375 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
376 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800377 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800378 Return<ErrorStatus> prepareLaunchStatus =
379 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
380 dataCache, cacheToken, preparedModelCallback);
381 ASSERT_TRUE(prepareLaunchStatus.isOk());
382 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
383
384 // Retrieve prepared model.
385 preparedModelCallback->wait();
386 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
387 if (preparedModel != nullptr) {
388 *preparedModel =
389 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
390 .withDefault(nullptr);
391 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800392 }
393
394 bool checkEarlyTermination(ErrorStatus status) {
395 if (status == ErrorStatus::GENERAL_FAILURE) {
396 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
397 "save the prepared model that it does not support.";
398 std::cout << "[ ] Early termination of test because vendor service cannot "
399 "save the prepared model that it does not support."
400 << std::endl;
401 return true;
402 }
403 return false;
404 }
405
Xusong Wang4f71afc2019-04-26 15:33:38 -0700406 bool checkEarlyTermination(const V1_2::Model& model) {
407 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800408 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
409 "prepare model that it does not support.";
410 std::cout << "[ ] Early termination of test because vendor service cannot "
411 "prepare model that it does not support."
412 << std::endl;
413 return true;
414 }
415 return false;
416 }
417
418 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
419 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800420 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
421 // Launch prepare model from cache.
422 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
423 ASSERT_NE(nullptr, preparedModelCallback.get());
424 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800425 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
426 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800427 ASSERT_TRUE(prepareLaunchStatus.isOk());
428 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
429 *preparedModel = nullptr;
430 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
431 return;
432 }
433
434 // Retrieve prepared model.
435 preparedModelCallback->wait();
436 *status = preparedModelCallback->getStatus();
437 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
438 .withDefault(nullptr);
439 }
440
Xusong Wanged0822b2019-02-25 16:58:58 -0800441 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800442 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800443
444 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
445 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
446 // and the inner vector is for fds held by each handle.
447 std::vector<std::vector<std::string>> mModelCache;
448 std::vector<std::vector<std::string>> mDataCache;
449
450 // A separate temporary file path in the tmp cache directory.
451 std::string mTmpCache;
452
Xusong Wang96e68dc2019-01-18 17:28:26 -0800453 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800454 uint32_t mNumModelCache;
455 uint32_t mNumDataCache;
456 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700457
458 // The primary data type of the testModel.
459 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800460};
461
Xusong Wang0e0721f2019-05-07 12:57:49 -0700462// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
463// pass running with float32 models and the second pass running with quant8 models.
464class CompilationCachingTest : public CompilationCachingTestBase,
465 public ::testing::WithParamInterface<OperandType> {
466 protected:
467 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
468};
469
470TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800471 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700472 const Model testModel = createTestModel();
473 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800474 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800475
476 // Save the compilation to cache.
477 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800478 hidl_vec<hidl_handle> modelCache, dataCache;
479 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
480 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700481 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800482 }
483
484 // Retrieve preparedModel from cache.
485 {
486 preparedModel = nullptr;
487 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800488 hidl_vec<hidl_handle> modelCache, dataCache;
489 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
490 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
491 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800492 if (!mIsCachingSupported) {
493 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
494 ASSERT_EQ(preparedModel, nullptr);
495 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800496 } else if (checkEarlyTermination(status)) {
497 ASSERT_EQ(preparedModel, nullptr);
498 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800499 } else {
500 ASSERT_EQ(status, ErrorStatus::NONE);
501 ASSERT_NE(preparedModel, nullptr);
502 }
503 }
504
505 // Execute and verify results.
506 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
507 testModel.relaxComputationFloat32toFloat16,
508 /*testDynamicOutputShape=*/false);
509}
510
Xusong Wang0e0721f2019-05-07 12:57:49 -0700511TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800512 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700513 const Model testModel = createTestModel();
514 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800515 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800516
517 // Save the compilation to cache.
518 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800519 hidl_vec<hidl_handle> modelCache, dataCache;
520 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
521 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
522 uint8_t dummyBytes[] = {0, 0};
523 // Write a dummy integer to the cache.
524 // The driver should be able to handle non-empty cache and non-zero fd offset.
525 for (uint32_t i = 0; i < modelCache.size(); i++) {
526 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
527 sizeof(dummyBytes)),
528 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800529 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800530 for (uint32_t i = 0; i < dataCache.size(); i++) {
531 ASSERT_EQ(
532 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
533 sizeof(dummyBytes));
534 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700535 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800536 }
537
538 // Retrieve preparedModel from cache.
539 {
540 preparedModel = nullptr;
541 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800542 hidl_vec<hidl_handle> modelCache, dataCache;
543 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
544 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800545 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800546 // Advance the offset of each handle by one byte.
547 // The driver should be able to handle non-zero fd offset.
548 for (uint32_t i = 0; i < modelCache.size(); i++) {
549 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
550 }
551 for (uint32_t i = 0; i < dataCache.size(); i++) {
552 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
553 }
554 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800555 if (!mIsCachingSupported) {
556 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
557 ASSERT_EQ(preparedModel, nullptr);
558 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800559 } else if (checkEarlyTermination(status)) {
560 ASSERT_EQ(preparedModel, nullptr);
561 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800562 } else {
563 ASSERT_EQ(status, ErrorStatus::NONE);
564 ASSERT_NE(preparedModel, nullptr);
565 }
566 }
567
568 // Execute and verify results.
569 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
570 testModel.relaxComputationFloat32toFloat16,
571 /*testDynamicOutputShape=*/false);
572}
573
Xusong Wang0e0721f2019-05-07 12:57:49 -0700574TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800575 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700576 const Model testModel = createTestModel();
577 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800578
579 // Test with number of model cache files greater than mNumModelCache.
580 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800581 hidl_vec<hidl_handle> modelCache, dataCache;
582 // Pass an additional cache file for model cache.
583 mModelCache.push_back({mTmpCache});
584 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
585 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
586 mModelCache.pop_back();
587 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700588 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800589 ASSERT_NE(preparedModel, nullptr);
590 // Execute and verify results.
591 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
592 get_examples(),
593 testModel.relaxComputationFloat32toFloat16,
594 /*testDynamicOutputShape=*/false);
595 // Check if prepareModelFromCache fails.
596 preparedModel = nullptr;
597 ErrorStatus status;
598 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
599 if (status != ErrorStatus::INVALID_ARGUMENT) {
600 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
601 }
602 ASSERT_EQ(preparedModel, nullptr);
603 }
604
605 // Test with number of model cache files smaller than mNumModelCache.
606 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800607 hidl_vec<hidl_handle> modelCache, dataCache;
608 // Pop out the last cache file.
609 auto tmp = mModelCache.back();
610 mModelCache.pop_back();
611 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
612 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
613 mModelCache.push_back(tmp);
614 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700615 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800616 ASSERT_NE(preparedModel, nullptr);
617 // Execute and verify results.
618 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
619 get_examples(),
620 testModel.relaxComputationFloat32toFloat16,
621 /*testDynamicOutputShape=*/false);
622 // Check if prepareModelFromCache fails.
623 preparedModel = nullptr;
624 ErrorStatus status;
625 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
626 if (status != ErrorStatus::INVALID_ARGUMENT) {
627 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
628 }
629 ASSERT_EQ(preparedModel, nullptr);
630 }
631
632 // Test with number of data cache files greater than mNumDataCache.
633 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800634 hidl_vec<hidl_handle> modelCache, dataCache;
635 // Pass an additional cache file for data cache.
636 mDataCache.push_back({mTmpCache});
637 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
638 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
639 mDataCache.pop_back();
640 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700641 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800642 ASSERT_NE(preparedModel, nullptr);
643 // Execute and verify results.
644 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
645 get_examples(),
646 testModel.relaxComputationFloat32toFloat16,
647 /*testDynamicOutputShape=*/false);
648 // Check if prepareModelFromCache fails.
649 preparedModel = nullptr;
650 ErrorStatus status;
651 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
652 if (status != ErrorStatus::INVALID_ARGUMENT) {
653 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
654 }
655 ASSERT_EQ(preparedModel, nullptr);
656 }
657
658 // Test with number of data cache files smaller than mNumDataCache.
659 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800660 hidl_vec<hidl_handle> modelCache, dataCache;
661 // Pop out the last cache file.
662 auto tmp = mDataCache.back();
663 mDataCache.pop_back();
664 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
665 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
666 mDataCache.push_back(tmp);
667 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700668 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800669 ASSERT_NE(preparedModel, nullptr);
670 // Execute and verify results.
671 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
672 get_examples(),
673 testModel.relaxComputationFloat32toFloat16,
674 /*testDynamicOutputShape=*/false);
675 // Check if prepareModelFromCache fails.
676 preparedModel = nullptr;
677 ErrorStatus status;
678 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
679 if (status != ErrorStatus::INVALID_ARGUMENT) {
680 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
681 }
682 ASSERT_EQ(preparedModel, nullptr);
683 }
684}
685
Xusong Wang0e0721f2019-05-07 12:57:49 -0700686TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800687 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700688 const Model testModel = createTestModel();
689 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800690
691 // Save the compilation to cache.
692 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800693 hidl_vec<hidl_handle> modelCache, dataCache;
694 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
695 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700696 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800697 }
698
699 // Test with number of model cache files greater than mNumModelCache.
700 {
701 sp<IPreparedModel> preparedModel = nullptr;
702 ErrorStatus status;
703 hidl_vec<hidl_handle> modelCache, dataCache;
704 mModelCache.push_back({mTmpCache});
705 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
706 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
707 mModelCache.pop_back();
708 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
709 if (status != ErrorStatus::GENERAL_FAILURE) {
710 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
711 }
712 ASSERT_EQ(preparedModel, nullptr);
713 }
714
715 // Test with number of model cache files smaller than mNumModelCache.
716 if (mModelCache.size() > 0) {
717 sp<IPreparedModel> preparedModel = nullptr;
718 ErrorStatus status;
719 hidl_vec<hidl_handle> modelCache, dataCache;
720 auto tmp = mModelCache.back();
721 mModelCache.pop_back();
722 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
723 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
724 mModelCache.push_back(tmp);
725 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
726 if (status != ErrorStatus::GENERAL_FAILURE) {
727 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
728 }
729 ASSERT_EQ(preparedModel, nullptr);
730 }
731
732 // Test with number of data cache files greater than mNumDataCache.
733 {
734 sp<IPreparedModel> preparedModel = nullptr;
735 ErrorStatus status;
736 hidl_vec<hidl_handle> modelCache, dataCache;
737 mDataCache.push_back({mTmpCache});
738 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
739 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
740 mDataCache.pop_back();
741 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
742 if (status != ErrorStatus::GENERAL_FAILURE) {
743 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
744 }
745 ASSERT_EQ(preparedModel, nullptr);
746 }
747
748 // Test with number of data cache files smaller than mNumDataCache.
749 if (mDataCache.size() > 0) {
750 sp<IPreparedModel> preparedModel = nullptr;
751 ErrorStatus status;
752 hidl_vec<hidl_handle> modelCache, dataCache;
753 auto tmp = mDataCache.back();
754 mDataCache.pop_back();
755 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
756 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
757 mDataCache.push_back(tmp);
758 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
759 if (status != ErrorStatus::GENERAL_FAILURE) {
760 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
761 }
762 ASSERT_EQ(preparedModel, nullptr);
763 }
764}
765
Xusong Wang0e0721f2019-05-07 12:57:49 -0700766TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800767 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700768 const Model testModel = createTestModel();
769 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800770
Xusong Wanged0822b2019-02-25 16:58:58 -0800771 // Go through each handle in model cache, test with NumFd greater than 1.
772 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800773 hidl_vec<hidl_handle> modelCache, dataCache;
774 // Pass an invalid number of fds for handle i.
775 mModelCache[i].push_back(mTmpCache);
776 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
777 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
778 mModelCache[i].pop_back();
779 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700780 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800781 ASSERT_NE(preparedModel, nullptr);
782 // Execute and verify results.
783 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
784 get_examples(),
785 testModel.relaxComputationFloat32toFloat16,
786 /*testDynamicOutputShape=*/false);
787 // Check if prepareModelFromCache fails.
788 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800789 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800790 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
791 if (status != ErrorStatus::INVALID_ARGUMENT) {
792 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800793 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800794 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800795 }
796
Xusong Wanged0822b2019-02-25 16:58:58 -0800797 // Go through each handle in model cache, test with NumFd equal to 0.
798 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800799 hidl_vec<hidl_handle> modelCache, dataCache;
800 // Pass an invalid number of fds for handle i.
801 auto tmp = mModelCache[i].back();
802 mModelCache[i].pop_back();
803 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
804 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
805 mModelCache[i].push_back(tmp);
806 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700807 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800808 ASSERT_NE(preparedModel, nullptr);
809 // Execute and verify results.
810 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
811 get_examples(),
812 testModel.relaxComputationFloat32toFloat16,
813 /*testDynamicOutputShape=*/false);
814 // Check if prepareModelFromCache fails.
815 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800816 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800817 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
818 if (status != ErrorStatus::INVALID_ARGUMENT) {
819 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800820 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800821 ASSERT_EQ(preparedModel, nullptr);
822 }
823
824 // Go through each handle in data cache, test with NumFd greater than 1.
825 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800826 hidl_vec<hidl_handle> modelCache, dataCache;
827 // Pass an invalid number of fds for handle i.
828 mDataCache[i].push_back(mTmpCache);
829 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
830 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
831 mDataCache[i].pop_back();
832 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700833 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800834 ASSERT_NE(preparedModel, nullptr);
835 // Execute and verify results.
836 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
837 get_examples(),
838 testModel.relaxComputationFloat32toFloat16,
839 /*testDynamicOutputShape=*/false);
840 // Check if prepareModelFromCache fails.
841 preparedModel = nullptr;
842 ErrorStatus status;
843 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
844 if (status != ErrorStatus::INVALID_ARGUMENT) {
845 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
846 }
847 ASSERT_EQ(preparedModel, nullptr);
848 }
849
850 // Go through each handle in data cache, test with NumFd equal to 0.
851 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800852 hidl_vec<hidl_handle> modelCache, dataCache;
853 // Pass an invalid number of fds for handle i.
854 auto tmp = mDataCache[i].back();
855 mDataCache[i].pop_back();
856 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
857 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
858 mDataCache[i].push_back(tmp);
859 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700860 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800861 ASSERT_NE(preparedModel, nullptr);
862 // Execute and verify results.
863 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
864 get_examples(),
865 testModel.relaxComputationFloat32toFloat16,
866 /*testDynamicOutputShape=*/false);
867 // Check if prepareModelFromCache fails.
868 preparedModel = nullptr;
869 ErrorStatus status;
870 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
871 if (status != ErrorStatus::INVALID_ARGUMENT) {
872 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
873 }
874 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800875 }
876}
877
Xusong Wang0e0721f2019-05-07 12:57:49 -0700878TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800879 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700880 const Model testModel = createTestModel();
881 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800882
883 // Save the compilation to cache.
884 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800885 hidl_vec<hidl_handle> modelCache, dataCache;
886 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
887 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700888 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800889 }
890
Xusong Wanged0822b2019-02-25 16:58:58 -0800891 // Go through each handle in model cache, test with NumFd greater than 1.
892 for (uint32_t i = 0; i < mNumModelCache; i++) {
893 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800894 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800895 hidl_vec<hidl_handle> modelCache, dataCache;
896 mModelCache[i].push_back(mTmpCache);
897 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
898 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
899 mModelCache[i].pop_back();
900 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800901 if (status != ErrorStatus::GENERAL_FAILURE) {
902 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800903 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800904 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800905 }
906
Xusong Wanged0822b2019-02-25 16:58:58 -0800907 // Go through each handle in model cache, test with NumFd equal to 0.
908 for (uint32_t i = 0; i < mNumModelCache; i++) {
909 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800910 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800911 hidl_vec<hidl_handle> modelCache, dataCache;
912 auto tmp = mModelCache[i].back();
913 mModelCache[i].pop_back();
914 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
915 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
916 mModelCache[i].push_back(tmp);
917 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800918 if (status != ErrorStatus::GENERAL_FAILURE) {
919 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800920 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800921 ASSERT_EQ(preparedModel, nullptr);
922 }
923
924 // Go through each handle in data cache, test with NumFd greater than 1.
925 for (uint32_t i = 0; i < mNumDataCache; i++) {
926 sp<IPreparedModel> preparedModel = nullptr;
927 ErrorStatus status;
928 hidl_vec<hidl_handle> modelCache, dataCache;
929 mDataCache[i].push_back(mTmpCache);
930 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
931 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
932 mDataCache[i].pop_back();
933 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
934 if (status != ErrorStatus::GENERAL_FAILURE) {
935 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
936 }
937 ASSERT_EQ(preparedModel, nullptr);
938 }
939
940 // Go through each handle in data cache, test with NumFd equal to 0.
941 for (uint32_t i = 0; i < mNumDataCache; i++) {
942 sp<IPreparedModel> preparedModel = nullptr;
943 ErrorStatus status;
944 hidl_vec<hidl_handle> modelCache, dataCache;
945 auto tmp = mDataCache[i].back();
946 mDataCache[i].pop_back();
947 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
948 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
949 mDataCache[i].push_back(tmp);
950 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
951 if (status != ErrorStatus::GENERAL_FAILURE) {
952 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
953 }
954 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800955 }
956}
957
Xusong Wang0e0721f2019-05-07 12:57:49 -0700958TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800959 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700960 const Model testModel = createTestModel();
961 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800962 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
963 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800964
Xusong Wanged0822b2019-02-25 16:58:58 -0800965 // Go through each handle in model cache, test with invalid access mode.
966 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800967 hidl_vec<hidl_handle> modelCache, dataCache;
968 modelCacheMode[i] = AccessMode::READ_ONLY;
969 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
970 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
971 modelCacheMode[i] = AccessMode::READ_WRITE;
972 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700973 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800974 ASSERT_NE(preparedModel, nullptr);
975 // Execute and verify results.
976 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
977 get_examples(),
978 testModel.relaxComputationFloat32toFloat16,
979 /*testDynamicOutputShape=*/false);
980 // Check if prepareModelFromCache fails.
981 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800982 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800983 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
984 if (status != ErrorStatus::INVALID_ARGUMENT) {
985 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
986 }
987 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800988 }
989
Xusong Wanged0822b2019-02-25 16:58:58 -0800990 // Go through each handle in data cache, test with invalid access mode.
991 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800992 hidl_vec<hidl_handle> modelCache, dataCache;
993 dataCacheMode[i] = AccessMode::READ_ONLY;
994 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
995 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
996 dataCacheMode[i] = AccessMode::READ_WRITE;
997 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700998 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800999 ASSERT_NE(preparedModel, nullptr);
1000 // Execute and verify results.
1001 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
1002 get_examples(),
1003 testModel.relaxComputationFloat32toFloat16,
1004 /*testDynamicOutputShape=*/false);
1005 // Check if prepareModelFromCache fails.
1006 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001007 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001008 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1009 if (status != ErrorStatus::INVALID_ARGUMENT) {
1010 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1011 }
1012 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001013 }
1014}
1015
Xusong Wang0e0721f2019-05-07 12:57:49 -07001016TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001017 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -07001018 const Model testModel = createTestModel();
1019 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001020 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
1021 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001022
1023 // Save the compilation to cache.
1024 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001025 hidl_vec<hidl_handle> modelCache, dataCache;
1026 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1027 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001028 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001029 }
1030
Xusong Wanged0822b2019-02-25 16:58:58 -08001031 // Go through each handle in model cache, test with invalid access mode.
1032 for (uint32_t i = 0; i < mNumModelCache; i++) {
1033 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001034 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001035 hidl_vec<hidl_handle> modelCache, dataCache;
1036 modelCacheMode[i] = AccessMode::WRITE_ONLY;
1037 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1038 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1039 modelCacheMode[i] = AccessMode::READ_WRITE;
1040 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001041 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1042 ASSERT_EQ(preparedModel, nullptr);
1043 }
1044
Xusong Wanged0822b2019-02-25 16:58:58 -08001045 // Go through each handle in data cache, test with invalid access mode.
1046 for (uint32_t i = 0; i < mNumDataCache; i++) {
1047 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001048 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001049 hidl_vec<hidl_handle> modelCache, dataCache;
1050 dataCacheMode[i] = AccessMode::WRITE_ONLY;
1051 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1052 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1053 dataCacheMode[i] = AccessMode::READ_WRITE;
1054 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001055 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1056 ASSERT_EQ(preparedModel, nullptr);
1057 }
1058}
1059
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001060// Copy file contents between file groups.
1061// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1062// The outer vector sizes must match and the inner vectors must have size = 1.
1063static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1064 const std::vector<std::vector<std::string>>& to) {
1065 constexpr size_t kBufferSize = 1000000;
1066 uint8_t buffer[kBufferSize];
1067
1068 ASSERT_EQ(from.size(), to.size());
1069 for (uint32_t i = 0; i < from.size(); i++) {
1070 ASSERT_EQ(from[i].size(), 1u);
1071 ASSERT_EQ(to[i].size(), 1u);
1072 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1073 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1074 ASSERT_GE(fromFd, 0);
1075 ASSERT_GE(toFd, 0);
1076
1077 ssize_t readBytes;
1078 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1079 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1080 }
1081 ASSERT_GE(readBytes, 0);
1082
1083 close(fromFd);
1084 close(toFd);
1085 }
1086}
1087
1088// Number of operations in the large test model.
1089constexpr uint32_t kLargeModelSize = 100;
1090constexpr uint32_t kNumIterationsTOCTOU = 100;
1091
Xusong Wang0e0721f2019-05-07 12:57:49 -07001092TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001093 if (!mIsCachingSupported) return;
1094
Xusong Wang4f71afc2019-04-26 15:33:38 -07001095 // Create test models and check if fully supported by the service.
1096 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1097 if (checkEarlyTermination(testModelMul)) return;
1098 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1099 if (checkEarlyTermination(testModelAdd)) return;
1100
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001101 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001102 auto modelCacheMul = mModelCache;
1103 for (auto& cache : modelCacheMul) {
1104 cache[0].append("_mul");
1105 }
1106 {
1107 hidl_vec<hidl_handle> modelCache, dataCache;
1108 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1109 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001110 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001111 }
1112
1113 // Use a different token for testModelAdd.
1114 mToken[0]++;
1115
1116 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001117 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1118 // Save the testModelAdd compilation to cache.
1119 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001120 hidl_vec<hidl_handle> modelCache, dataCache;
1121 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1122 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1123
1124 // Spawn a thread to copy the cache content concurrently while saving to cache.
1125 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001126 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001127 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001128 }
1129
1130 // Retrieve preparedModel from cache.
1131 {
1132 sp<IPreparedModel> preparedModel = nullptr;
1133 ErrorStatus status;
1134 hidl_vec<hidl_handle> modelCache, dataCache;
1135 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1136 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1137 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1138
1139 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1140 // the prepared model must be executed with the correct result and not crash.
1141 if (status != ErrorStatus::NONE) {
1142 ASSERT_EQ(preparedModel, nullptr);
1143 } else {
1144 ASSERT_NE(preparedModel, nullptr);
1145 generated_tests::EvaluatePreparedModel(
1146 preparedModel, [](int) { return false; },
1147 getLargeModelExamples(kLargeModelSize),
1148 testModelAdd.relaxComputationFloat32toFloat16,
1149 /*testDynamicOutputShape=*/false);
1150 }
1151 }
1152 }
1153}
1154
Xusong Wang0e0721f2019-05-07 12:57:49 -07001155TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001156 if (!mIsCachingSupported) return;
1157
Xusong Wang4f71afc2019-04-26 15:33:38 -07001158 // Create test models and check if fully supported by the service.
1159 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1160 if (checkEarlyTermination(testModelMul)) return;
1161 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1162 if (checkEarlyTermination(testModelAdd)) return;
1163
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001164 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001165 auto modelCacheMul = mModelCache;
1166 for (auto& cache : modelCacheMul) {
1167 cache[0].append("_mul");
1168 }
1169 {
1170 hidl_vec<hidl_handle> modelCache, dataCache;
1171 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1172 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001173 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001174 }
1175
1176 // Use a different token for testModelAdd.
1177 mToken[0]++;
1178
1179 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001180 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1181 // Save the testModelAdd compilation to cache.
1182 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001183 hidl_vec<hidl_handle> modelCache, dataCache;
1184 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1185 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001186 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001187 }
1188
1189 // Retrieve preparedModel from cache.
1190 {
1191 sp<IPreparedModel> preparedModel = nullptr;
1192 ErrorStatus status;
1193 hidl_vec<hidl_handle> modelCache, dataCache;
1194 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1195 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1196
1197 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1198 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1199 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1200 thread.join();
1201
1202 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1203 // the prepared model must be executed with the correct result and not crash.
1204 if (status != ErrorStatus::NONE) {
1205 ASSERT_EQ(preparedModel, nullptr);
1206 } else {
1207 ASSERT_NE(preparedModel, nullptr);
1208 generated_tests::EvaluatePreparedModel(
1209 preparedModel, [](int) { return false; },
1210 getLargeModelExamples(kLargeModelSize),
1211 testModelAdd.relaxComputationFloat32toFloat16,
1212 /*testDynamicOutputShape=*/false);
1213 }
1214 }
1215 }
1216}
1217
Xusong Wang0e0721f2019-05-07 12:57:49 -07001218TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001219 if (!mIsCachingSupported) return;
1220
Xusong Wang4f71afc2019-04-26 15:33:38 -07001221 // Create test models and check if fully supported by the service.
1222 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1223 if (checkEarlyTermination(testModelMul)) return;
1224 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1225 if (checkEarlyTermination(testModelAdd)) return;
1226
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001227 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001228 auto modelCacheMul = mModelCache;
1229 for (auto& cache : modelCacheMul) {
1230 cache[0].append("_mul");
1231 }
1232 {
1233 hidl_vec<hidl_handle> modelCache, dataCache;
1234 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1235 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001236 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001237 }
1238
1239 // Use a different token for testModelAdd.
1240 mToken[0]++;
1241
1242 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001243 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001244 hidl_vec<hidl_handle> modelCache, dataCache;
1245 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1246 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001247 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001248 }
1249
1250 // Replace the model cache of testModelAdd with testModelMul.
1251 copyCacheFiles(modelCacheMul, mModelCache);
1252
1253 // Retrieve the preparedModel from cache, expect failure.
1254 {
1255 sp<IPreparedModel> preparedModel = nullptr;
1256 ErrorStatus status;
1257 hidl_vec<hidl_handle> modelCache, dataCache;
1258 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1259 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1260 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1261 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1262 ASSERT_EQ(preparedModel, nullptr);
1263 }
1264}
1265
Xusong Wang0e0721f2019-05-07 12:57:49 -07001266static const auto kOperandTypeChoices =
1267 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1268
1269INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1270
1271class CompilationCachingSecurityTest
1272 : public CompilationCachingTestBase,
1273 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001274 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001275 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1276
Xusong Wang96e68dc2019-01-18 17:28:26 -08001277 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001278 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001279 generator.seed(kSeed);
1280 }
1281
1282 // Get a random integer within a closed range [lower, upper].
1283 template <typename T>
1284 T getRandomInt(T lower, T upper) {
1285 std::uniform_int_distribution<T> dis(lower, upper);
1286 return dis(generator);
1287 }
1288
Xusong Wange371f6f2019-04-23 14:51:50 -07001289 // Randomly flip one single bit of the cache entry.
1290 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1291 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001292 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1293 long int fileSize = ftell(pFile);
1294 if (fileSize == 0) {
1295 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001296 *skip = true;
1297 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001298 }
1299 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1300 int readByte = fgetc(pFile);
1301 ASSERT_NE(readByte, EOF);
1302 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1303 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1304 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001305 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001306 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001307
Xusong Wange371f6f2019-04-23 14:51:50 -07001308 // Randomly append bytes to the cache entry.
1309 void appendBytesToCache(const std::string& filename, bool* skip) {
1310 FILE* pFile = fopen(filename.c_str(), "a");
1311 uint32_t appendLength = getRandomInt(1, 256);
1312 for (uint32_t i = 0; i < appendLength; i++) {
1313 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1314 }
1315 fclose(pFile);
1316 *skip = false;
1317 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001318
Xusong Wange371f6f2019-04-23 14:51:50 -07001319 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001320
Xusong Wange371f6f2019-04-23 14:51:50 -07001321 // Test if the driver behaves as expected when given corrupted cache or token.
1322 // The modifier will be invoked after save to cache but before prepare from cache.
1323 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1324 // whether the test should be skipped or not.
1325 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001326 const Model testModel = createTestModel();
1327 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001328
Xusong Wanged0822b2019-02-25 16:58:58 -08001329 // Save the compilation to cache.
1330 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001331 hidl_vec<hidl_handle> modelCache, dataCache;
1332 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1333 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001334 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001335 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001336
Xusong Wange371f6f2019-04-23 14:51:50 -07001337 bool skip = false;
1338 modifier(&skip);
1339 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001340
Xusong Wange371f6f2019-04-23 14:51:50 -07001341 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001342 {
1343 sp<IPreparedModel> preparedModel = nullptr;
1344 ErrorStatus status;
1345 hidl_vec<hidl_handle> modelCache, dataCache;
1346 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1347 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1348 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001349
1350 switch (expected) {
1351 case ExpectedResult::GENERAL_FAILURE:
1352 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1353 ASSERT_EQ(preparedModel, nullptr);
1354 break;
1355 case ExpectedResult::NOT_CRASH:
1356 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1357 break;
1358 default:
1359 FAIL();
1360 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001361 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001362 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001363
Xusong Wang0e0721f2019-05-07 12:57:49 -07001364 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001365 std::mt19937 generator;
1366};
1367
1368TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1369 if (!mIsCachingSupported) return;
1370 for (uint32_t i = 0; i < mNumModelCache; i++) {
1371 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1372 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1373 }
1374}
1375
1376TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1377 if (!mIsCachingSupported) return;
1378 for (uint32_t i = 0; i < mNumModelCache; i++) {
1379 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1380 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1381 }
1382}
1383
1384TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1385 if (!mIsCachingSupported) return;
1386 for (uint32_t i = 0; i < mNumDataCache; i++) {
1387 testCorruptedCache(ExpectedResult::NOT_CRASH,
1388 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1389 }
1390}
1391
1392TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1393 if (!mIsCachingSupported) return;
1394 for (uint32_t i = 0; i < mNumDataCache; i++) {
1395 testCorruptedCache(ExpectedResult::NOT_CRASH,
1396 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1397 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001398}
1399
1400TEST_P(CompilationCachingSecurityTest, WrongToken) {
1401 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001402 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1403 // Randomly flip one single bit in mToken.
1404 uint32_t ind =
1405 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1406 mToken[ind] ^= (1U << getRandomInt(0, 7));
1407 *skip = false;
1408 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001409}
1410
1411INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001412 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001413
1414} // namespace functional
1415} // namespace vts
1416} // namespace V1_2
1417} // namespace neuralnetworks
1418} // namespace hardware
1419} // namespace android