blob: 8711f479fed2d759ff66814f82c3919c2e7ceaf7 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
38namespace android {
39namespace hardware {
40namespace neuralnetworks {
41namespace V1_2 {
42namespace vts {
43namespace functional {
44
Michael Butler3835f612019-07-11 15:43:22 -070045using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
46using ::android::hardware::neuralnetworks::V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080047using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
48using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
Michael Butler3835f612019-07-11 15:43:22 -070049using ::android::hidl::memory::V1_0::IMemory;
Xusong Wang96e68dc2019-01-18 17:28:26 -080050using ::android::nn::allocateSharedMemory;
51using ::test_helper::MixedTypedExample;
52
Xusong Wang0e0721f2019-05-07 12:57:49 -070053namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080054
Xusong Wang0e0721f2019-05-07 12:57:49 -070055// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of float32 mobilenet.
Xusong Wang96e68dc2019-01-18 17:28:26 -080056#include "examples/mobilenet_224_gender_basic_fixed.example.cpp"
Slava Shklyaevcac83ee2019-05-29 22:21:53 +010057#include "vts/V1_2/models/mobilenet_224_gender_basic_fixed.model.cpp"
Xusong Wang96e68dc2019-01-18 17:28:26 -080058
59// Prevent the compiler from complaining about an otherwise unused function.
60[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
61[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
62
Xusong Wang0e0721f2019-05-07 12:57:49 -070063// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
64// This function assumes the operation is always ADD.
65std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
66 float outputValue = 1.0f + static_cast<float>(len);
67 return {{.operands = {
68 // Input
69 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
70 // Output
71 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
72}
73
74} // namespace float32_model
75
76namespace quant8_model {
77
78// In frameworks/ml/nn/runtime/test/generated/, creates a hidl model of quant8 mobilenet.
79#include "examples/mobilenet_quantized.example.cpp"
Slava Shklyaevcac83ee2019-05-29 22:21:53 +010080#include "vts/V1_2/models/mobilenet_quantized.model.cpp"
Xusong Wang0e0721f2019-05-07 12:57:49 -070081
82// Prevent the compiler from complaining about an otherwise unused function.
83[[maybe_unused]] auto dummy_createTestModel = createTestModel_dynamic_output_shape;
84[[maybe_unused]] auto dummy_get_examples = get_examples_dynamic_output_shape;
85
86// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
87// This function assumes the operation is always ADD.
88std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
89 uint8_t outputValue = 1 + static_cast<uint8_t>(len);
90 return {{.operands = {// Input
91 {.operandDimensions = {{0, {1}}}, .quant8AsymmOperands = {{0, {1}}}},
92 // Output
93 {.operandDimensions = {{0, {1}}},
94 .quant8AsymmOperands = {{0, {outputValue}}}}}}};
95}
96
97} // namespace quant8_model
98
99namespace {
100
Xusong Wanged0822b2019-02-25 16:58:58 -0800101enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -0800102
Xusong Wanged0822b2019-02-25 16:58:58 -0800103// Creates cache handles based on provided file groups.
104// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
105void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
106 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
107 handles->resize(fileGroups.size());
108 for (uint32_t i = 0; i < fileGroups.size(); i++) {
109 std::vector<int> fds;
110 for (const auto& file : fileGroups[i]) {
111 int fd;
112 if (mode[i] == AccessMode::READ_ONLY) {
113 fd = open(file.c_str(), O_RDONLY);
114 } else if (mode[i] == AccessMode::WRITE_ONLY) {
115 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
116 } else if (mode[i] == AccessMode::READ_WRITE) {
117 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
118 } else {
119 FAIL();
120 }
121 ASSERT_GE(fd, 0);
122 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800123 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800124 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
125 ASSERT_NE(cacheNativeHandle, nullptr);
126 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
127 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800128 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800129}
130
131void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
132 hidl_vec<hidl_handle>* handles) {
133 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800134}
135
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700136// Create a chain of broadcast operations. The second operand is always constant tensor [1].
137// For simplicity, activation scalar is shared. The second operand is not shared
138// in the model to let driver maintain a non-trivial size of constant data and the corresponding
139// data locations in cache.
140//
141// --------- activation --------
142// ↓ ↓ ↓ ↓
143// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
144// ↑ ↑ ↑ ↑
145// [1] [1] [1] [1]
146//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700147// This function assumes the operation is either ADD or MUL.
148template <typename CppType, OperandType operandType>
149Model createLargeTestModelImpl(OperationType op, uint32_t len) {
150 EXPECT_TRUE(op == OperationType::ADD || op == OperationType::MUL);
151
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700152 // Model operations and operands.
153 std::vector<Operation> operations(len);
154 std::vector<Operand> operands(len * 2 + 2);
155
156 // The constant buffer pool. This contains the activation scalar, followed by the
157 // per-operation constant operands.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700158 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700159
160 // The activation scalar, value = 0.
161 operands[0] = {
162 .type = OperandType::INT32,
163 .dimensions = {},
164 .numberOfConsumers = len,
165 .scale = 0.0f,
166 .zeroPoint = 0,
167 .lifetime = OperandLifeTime::CONSTANT_COPY,
168 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
169 };
170 memset(operandValues.data(), 0, sizeof(int32_t));
171
Xusong Wang0e0721f2019-05-07 12:57:49 -0700172 // The buffer value of the constant second operand. The logical value is always 1.0f.
173 CppType bufferValue;
174 // The scale of the first and second operand.
175 float scale1, scale2;
176 if (operandType == OperandType::TENSOR_FLOAT32) {
177 bufferValue = 1.0f;
178 scale1 = 0.0f;
179 scale2 = 0.0f;
180 } else if (op == OperationType::ADD) {
181 bufferValue = 1;
182 scale1 = 1.0f;
183 scale2 = 1.0f;
184 } else {
185 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
186 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
187 bufferValue = 2;
188 scale1 = 1.0f;
189 scale2 = 0.5f;
190 }
191
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700192 for (uint32_t i = 0; i < len; i++) {
193 const uint32_t firstInputIndex = i * 2 + 1;
194 const uint32_t secondInputIndex = firstInputIndex + 1;
195 const uint32_t outputIndex = secondInputIndex + 1;
196
197 // The first operation input.
198 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700199 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700200 .dimensions = {1},
201 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700202 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700203 .zeroPoint = 0,
204 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
205 : OperandLifeTime::TEMPORARY_VARIABLE),
206 .location = {},
207 };
208
209 // The second operation input, value = 1.
210 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700211 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700212 .dimensions = {1},
213 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700214 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700215 .zeroPoint = 0,
216 .lifetime = OperandLifeTime::CONSTANT_COPY,
217 .location = {.poolIndex = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700218 .offset = static_cast<uint32_t>(i * sizeof(CppType) + sizeof(int32_t)),
219 .length = sizeof(CppType)},
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700220 };
Xusong Wang0e0721f2019-05-07 12:57:49 -0700221 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(CppType), &bufferValue,
222 sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700223
224 // The operation. All operations share the same activation scalar.
225 // The output operand is created as an input in the next iteration of the loop, in the case
226 // of all but the last member of the chain; and after the loop as a model output, in the
227 // case of the last member of the chain.
228 operations[i] = {
229 .type = op,
230 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
231 .outputs = {outputIndex},
232 };
233 }
234
235 // The model output.
236 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700237 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700238 .dimensions = {1},
239 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700240 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700241 .zeroPoint = 0,
242 .lifetime = OperandLifeTime::MODEL_OUTPUT,
243 .location = {},
244 };
245
246 const std::vector<uint32_t> inputIndexes = {1};
247 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
248 const std::vector<hidl_memory> pools = {};
249
250 return {
251 .operands = operands,
252 .operations = operations,
253 .inputIndexes = inputIndexes,
254 .outputIndexes = outputIndexes,
255 .operandValues = operandValues,
256 .pools = pools,
257 };
258}
259
Xusong Wang96e68dc2019-01-18 17:28:26 -0800260} // namespace
261
262// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700263class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800264 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700265 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
266
Xusong Wang96e68dc2019-01-18 17:28:26 -0800267 void SetUp() override {
268 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800269 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800270
Xusong Wanged0822b2019-02-25 16:58:58 -0800271 // Create cache directory. The cache directory and a temporary cache file is always created
272 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800273 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
274 char* cacheDir = mkdtemp(cacheDirTemp);
275 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800276 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800277 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800278
Xusong Wanged0822b2019-02-25 16:58:58 -0800279 Return<void> ret = device->getNumberOfCacheFilesNeeded(
280 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800281 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800282 mNumModelCache = numModelCache;
283 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800284 });
285 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800286 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
287
288 // Create empty cache files.
289 mTmpCache = mCacheDir + "tmp";
290 for (uint32_t i = 0; i < mNumModelCache; i++) {
291 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
292 }
293 for (uint32_t i = 0; i < mNumDataCache; i++) {
294 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
295 }
296 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
297 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
298 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
299 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
300 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
301
302 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800303 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
304 "support compilation caching.";
305 std::cout << "[ ] Early termination of test because vendor service does not "
306 "support compilation caching."
307 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800308 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800309 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800310
Xusong Wang6824cc12019-02-12 18:00:37 -0800311 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700312 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
313 if (!::testing::Test::HasFailure()) {
314 // Recursively remove the cache directory specified by mCacheDir.
315 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
316 return remove(entry);
317 };
318 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800319 }
320 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800321 }
322
Xusong Wang0e0721f2019-05-07 12:57:49 -0700323 // Model and examples creators. According to kOperandType, the following methods will return
324 // either float32 model/examples or the quant8 variant.
325 Model createTestModel() {
326 if (kOperandType == OperandType::TENSOR_FLOAT32) {
327 return float32_model::createTestModel();
328 } else {
329 return quant8_model::createTestModel();
330 }
331 }
332
333 std::vector<MixedTypedExample> get_examples() {
334 if (kOperandType == OperandType::TENSOR_FLOAT32) {
335 return float32_model::get_examples();
336 } else {
337 return quant8_model::get_examples();
338 }
339 }
340
341 Model createLargeTestModel(OperationType op, uint32_t len) {
342 if (kOperandType == OperandType::TENSOR_FLOAT32) {
343 return createLargeTestModelImpl<float, OperandType::TENSOR_FLOAT32>(op, len);
344 } else {
345 return createLargeTestModelImpl<uint8_t, OperandType::TENSOR_QUANT8_ASYMM>(op, len);
346 }
347 }
348
349 std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
350 if (kOperandType == OperandType::TENSOR_FLOAT32) {
351 return float32_model::getLargeModelExamples(len);
352 } else {
353 return quant8_model::getLargeModelExamples(len);
354 }
355 }
356
Xusong Wang4f71afc2019-04-26 15:33:38 -0700357 // See if the service can handle the model.
358 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800359 bool fullySupportsModel = false;
360 Return<void> supportedCall = device->getSupportedOperations_1_2(
361 model,
362 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
363 ASSERT_EQ(ErrorStatus::NONE, status);
364 ASSERT_EQ(supported.size(), model.operations.size());
365 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
366 [](bool valid) { return valid; });
367 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700368 EXPECT_TRUE(supportedCall.isOk());
369 return fullySupportsModel;
370 }
371
372 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
373 const hidl_vec<hidl_handle>& dataCache,
374 sp<IPreparedModel>* preparedModel = nullptr) {
375 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800376
377 // Launch prepare model.
378 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
379 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800380 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800381 Return<ErrorStatus> prepareLaunchStatus =
382 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
383 dataCache, cacheToken, preparedModelCallback);
384 ASSERT_TRUE(prepareLaunchStatus.isOk());
385 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
386
387 // Retrieve prepared model.
388 preparedModelCallback->wait();
389 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
390 if (preparedModel != nullptr) {
391 *preparedModel =
392 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
393 .withDefault(nullptr);
394 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800395 }
396
397 bool checkEarlyTermination(ErrorStatus status) {
398 if (status == ErrorStatus::GENERAL_FAILURE) {
399 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
400 "save the prepared model that it does not support.";
401 std::cout << "[ ] Early termination of test because vendor service cannot "
402 "save the prepared model that it does not support."
403 << std::endl;
404 return true;
405 }
406 return false;
407 }
408
Xusong Wang4f71afc2019-04-26 15:33:38 -0700409 bool checkEarlyTermination(const V1_2::Model& model) {
410 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800411 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
412 "prepare model that it does not support.";
413 std::cout << "[ ] Early termination of test because vendor service cannot "
414 "prepare model that it does not support."
415 << std::endl;
416 return true;
417 }
418 return false;
419 }
420
421 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
422 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800423 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
424 // Launch prepare model from cache.
425 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
426 ASSERT_NE(nullptr, preparedModelCallback.get());
427 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800428 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
429 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800430 ASSERT_TRUE(prepareLaunchStatus.isOk());
431 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
432 *preparedModel = nullptr;
433 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
434 return;
435 }
436
437 // Retrieve prepared model.
438 preparedModelCallback->wait();
439 *status = preparedModelCallback->getStatus();
440 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
441 .withDefault(nullptr);
442 }
443
Xusong Wanged0822b2019-02-25 16:58:58 -0800444 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800445 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800446
447 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
448 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
449 // and the inner vector is for fds held by each handle.
450 std::vector<std::vector<std::string>> mModelCache;
451 std::vector<std::vector<std::string>> mDataCache;
452
453 // A separate temporary file path in the tmp cache directory.
454 std::string mTmpCache;
455
Xusong Wang96e68dc2019-01-18 17:28:26 -0800456 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800457 uint32_t mNumModelCache;
458 uint32_t mNumDataCache;
459 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700460
461 // The primary data type of the testModel.
462 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800463};
464
Xusong Wang0e0721f2019-05-07 12:57:49 -0700465// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
466// pass running with float32 models and the second pass running with quant8 models.
467class CompilationCachingTest : public CompilationCachingTestBase,
468 public ::testing::WithParamInterface<OperandType> {
469 protected:
470 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
471};
472
473TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800474 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700475 const Model testModel = createTestModel();
476 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800477 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800478
479 // Save the compilation to cache.
480 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800481 hidl_vec<hidl_handle> modelCache, dataCache;
482 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
483 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700484 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800485 }
486
487 // Retrieve preparedModel from cache.
488 {
489 preparedModel = nullptr;
490 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800491 hidl_vec<hidl_handle> modelCache, dataCache;
492 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
493 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
494 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800495 if (!mIsCachingSupported) {
496 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
497 ASSERT_EQ(preparedModel, nullptr);
498 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800499 } else if (checkEarlyTermination(status)) {
500 ASSERT_EQ(preparedModel, nullptr);
501 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800502 } else {
503 ASSERT_EQ(status, ErrorStatus::NONE);
504 ASSERT_NE(preparedModel, nullptr);
505 }
506 }
507
508 // Execute and verify results.
509 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
510 testModel.relaxComputationFloat32toFloat16,
511 /*testDynamicOutputShape=*/false);
512}
513
Xusong Wang0e0721f2019-05-07 12:57:49 -0700514TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800515 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700516 const Model testModel = createTestModel();
517 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800518 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800519
520 // Save the compilation to cache.
521 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800522 hidl_vec<hidl_handle> modelCache, dataCache;
523 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
524 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
525 uint8_t dummyBytes[] = {0, 0};
526 // Write a dummy integer to the cache.
527 // The driver should be able to handle non-empty cache and non-zero fd offset.
528 for (uint32_t i = 0; i < modelCache.size(); i++) {
529 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
530 sizeof(dummyBytes)),
531 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800532 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800533 for (uint32_t i = 0; i < dataCache.size(); i++) {
534 ASSERT_EQ(
535 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
536 sizeof(dummyBytes));
537 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700538 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800539 }
540
541 // Retrieve preparedModel from cache.
542 {
543 preparedModel = nullptr;
544 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800545 hidl_vec<hidl_handle> modelCache, dataCache;
546 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
547 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800548 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800549 // Advance the offset of each handle by one byte.
550 // The driver should be able to handle non-zero fd offset.
551 for (uint32_t i = 0; i < modelCache.size(); i++) {
552 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
553 }
554 for (uint32_t i = 0; i < dataCache.size(); i++) {
555 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
556 }
557 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800558 if (!mIsCachingSupported) {
559 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
560 ASSERT_EQ(preparedModel, nullptr);
561 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800562 } else if (checkEarlyTermination(status)) {
563 ASSERT_EQ(preparedModel, nullptr);
564 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800565 } else {
566 ASSERT_EQ(status, ErrorStatus::NONE);
567 ASSERT_NE(preparedModel, nullptr);
568 }
569 }
570
571 // Execute and verify results.
572 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
573 testModel.relaxComputationFloat32toFloat16,
574 /*testDynamicOutputShape=*/false);
575}
576
Xusong Wang0e0721f2019-05-07 12:57:49 -0700577TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800578 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700579 const Model testModel = createTestModel();
580 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800581
582 // Test with number of model cache files greater than mNumModelCache.
583 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800584 hidl_vec<hidl_handle> modelCache, dataCache;
585 // Pass an additional cache file for model cache.
586 mModelCache.push_back({mTmpCache});
587 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
588 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
589 mModelCache.pop_back();
590 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700591 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800592 ASSERT_NE(preparedModel, nullptr);
593 // Execute and verify results.
594 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
595 get_examples(),
596 testModel.relaxComputationFloat32toFloat16,
597 /*testDynamicOutputShape=*/false);
598 // Check if prepareModelFromCache fails.
599 preparedModel = nullptr;
600 ErrorStatus status;
601 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
602 if (status != ErrorStatus::INVALID_ARGUMENT) {
603 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
604 }
605 ASSERT_EQ(preparedModel, nullptr);
606 }
607
608 // Test with number of model cache files smaller than mNumModelCache.
609 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800610 hidl_vec<hidl_handle> modelCache, dataCache;
611 // Pop out the last cache file.
612 auto tmp = mModelCache.back();
613 mModelCache.pop_back();
614 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
615 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
616 mModelCache.push_back(tmp);
617 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700618 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800619 ASSERT_NE(preparedModel, nullptr);
620 // Execute and verify results.
621 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
622 get_examples(),
623 testModel.relaxComputationFloat32toFloat16,
624 /*testDynamicOutputShape=*/false);
625 // Check if prepareModelFromCache fails.
626 preparedModel = nullptr;
627 ErrorStatus status;
628 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
629 if (status != ErrorStatus::INVALID_ARGUMENT) {
630 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
631 }
632 ASSERT_EQ(preparedModel, nullptr);
633 }
634
635 // Test with number of data cache files greater than mNumDataCache.
636 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800637 hidl_vec<hidl_handle> modelCache, dataCache;
638 // Pass an additional cache file for data cache.
639 mDataCache.push_back({mTmpCache});
640 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
641 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
642 mDataCache.pop_back();
643 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700644 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800645 ASSERT_NE(preparedModel, nullptr);
646 // Execute and verify results.
647 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
648 get_examples(),
649 testModel.relaxComputationFloat32toFloat16,
650 /*testDynamicOutputShape=*/false);
651 // Check if prepareModelFromCache fails.
652 preparedModel = nullptr;
653 ErrorStatus status;
654 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
655 if (status != ErrorStatus::INVALID_ARGUMENT) {
656 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
657 }
658 ASSERT_EQ(preparedModel, nullptr);
659 }
660
661 // Test with number of data cache files smaller than mNumDataCache.
662 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800663 hidl_vec<hidl_handle> modelCache, dataCache;
664 // Pop out the last cache file.
665 auto tmp = mDataCache.back();
666 mDataCache.pop_back();
667 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
668 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
669 mDataCache.push_back(tmp);
670 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700671 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800672 ASSERT_NE(preparedModel, nullptr);
673 // Execute and verify results.
674 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
675 get_examples(),
676 testModel.relaxComputationFloat32toFloat16,
677 /*testDynamicOutputShape=*/false);
678 // Check if prepareModelFromCache fails.
679 preparedModel = nullptr;
680 ErrorStatus status;
681 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
682 if (status != ErrorStatus::INVALID_ARGUMENT) {
683 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
684 }
685 ASSERT_EQ(preparedModel, nullptr);
686 }
687}
688
Xusong Wang0e0721f2019-05-07 12:57:49 -0700689TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800690 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700691 const Model testModel = createTestModel();
692 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800693
694 // Save the compilation to cache.
695 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800696 hidl_vec<hidl_handle> modelCache, dataCache;
697 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
698 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700699 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800700 }
701
702 // Test with number of model cache files greater than mNumModelCache.
703 {
704 sp<IPreparedModel> preparedModel = nullptr;
705 ErrorStatus status;
706 hidl_vec<hidl_handle> modelCache, dataCache;
707 mModelCache.push_back({mTmpCache});
708 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
709 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
710 mModelCache.pop_back();
711 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
712 if (status != ErrorStatus::GENERAL_FAILURE) {
713 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
714 }
715 ASSERT_EQ(preparedModel, nullptr);
716 }
717
718 // Test with number of model cache files smaller than mNumModelCache.
719 if (mModelCache.size() > 0) {
720 sp<IPreparedModel> preparedModel = nullptr;
721 ErrorStatus status;
722 hidl_vec<hidl_handle> modelCache, dataCache;
723 auto tmp = mModelCache.back();
724 mModelCache.pop_back();
725 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
726 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
727 mModelCache.push_back(tmp);
728 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
729 if (status != ErrorStatus::GENERAL_FAILURE) {
730 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
731 }
732 ASSERT_EQ(preparedModel, nullptr);
733 }
734
735 // Test with number of data cache files greater than mNumDataCache.
736 {
737 sp<IPreparedModel> preparedModel = nullptr;
738 ErrorStatus status;
739 hidl_vec<hidl_handle> modelCache, dataCache;
740 mDataCache.push_back({mTmpCache});
741 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
742 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
743 mDataCache.pop_back();
744 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
745 if (status != ErrorStatus::GENERAL_FAILURE) {
746 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
747 }
748 ASSERT_EQ(preparedModel, nullptr);
749 }
750
751 // Test with number of data cache files smaller than mNumDataCache.
752 if (mDataCache.size() > 0) {
753 sp<IPreparedModel> preparedModel = nullptr;
754 ErrorStatus status;
755 hidl_vec<hidl_handle> modelCache, dataCache;
756 auto tmp = mDataCache.back();
757 mDataCache.pop_back();
758 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
759 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
760 mDataCache.push_back(tmp);
761 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
762 if (status != ErrorStatus::GENERAL_FAILURE) {
763 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
764 }
765 ASSERT_EQ(preparedModel, nullptr);
766 }
767}
768
Xusong Wang0e0721f2019-05-07 12:57:49 -0700769TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800770 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700771 const Model testModel = createTestModel();
772 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800773
Xusong Wanged0822b2019-02-25 16:58:58 -0800774 // Go through each handle in model cache, test with NumFd greater than 1.
775 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800776 hidl_vec<hidl_handle> modelCache, dataCache;
777 // Pass an invalid number of fds for handle i.
778 mModelCache[i].push_back(mTmpCache);
779 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
780 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
781 mModelCache[i].pop_back();
782 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700783 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800784 ASSERT_NE(preparedModel, nullptr);
785 // Execute and verify results.
786 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
787 get_examples(),
788 testModel.relaxComputationFloat32toFloat16,
789 /*testDynamicOutputShape=*/false);
790 // Check if prepareModelFromCache fails.
791 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800792 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800793 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
794 if (status != ErrorStatus::INVALID_ARGUMENT) {
795 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800796 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800797 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800798 }
799
Xusong Wanged0822b2019-02-25 16:58:58 -0800800 // Go through each handle in model cache, test with NumFd equal to 0.
801 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800802 hidl_vec<hidl_handle> modelCache, dataCache;
803 // Pass an invalid number of fds for handle i.
804 auto tmp = mModelCache[i].back();
805 mModelCache[i].pop_back();
806 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
807 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
808 mModelCache[i].push_back(tmp);
809 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700810 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800811 ASSERT_NE(preparedModel, nullptr);
812 // Execute and verify results.
813 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
814 get_examples(),
815 testModel.relaxComputationFloat32toFloat16,
816 /*testDynamicOutputShape=*/false);
817 // Check if prepareModelFromCache fails.
818 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800819 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800820 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
821 if (status != ErrorStatus::INVALID_ARGUMENT) {
822 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800823 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800824 ASSERT_EQ(preparedModel, nullptr);
825 }
826
827 // Go through each handle in data cache, test with NumFd greater than 1.
828 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800829 hidl_vec<hidl_handle> modelCache, dataCache;
830 // Pass an invalid number of fds for handle i.
831 mDataCache[i].push_back(mTmpCache);
832 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
833 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
834 mDataCache[i].pop_back();
835 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700836 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800837 ASSERT_NE(preparedModel, nullptr);
838 // Execute and verify results.
839 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
840 get_examples(),
841 testModel.relaxComputationFloat32toFloat16,
842 /*testDynamicOutputShape=*/false);
843 // Check if prepareModelFromCache fails.
844 preparedModel = nullptr;
845 ErrorStatus status;
846 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
847 if (status != ErrorStatus::INVALID_ARGUMENT) {
848 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
849 }
850 ASSERT_EQ(preparedModel, nullptr);
851 }
852
853 // Go through each handle in data cache, test with NumFd equal to 0.
854 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800855 hidl_vec<hidl_handle> modelCache, dataCache;
856 // Pass an invalid number of fds for handle i.
857 auto tmp = mDataCache[i].back();
858 mDataCache[i].pop_back();
859 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
860 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
861 mDataCache[i].push_back(tmp);
862 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700863 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800864 ASSERT_NE(preparedModel, nullptr);
865 // Execute and verify results.
866 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
867 get_examples(),
868 testModel.relaxComputationFloat32toFloat16,
869 /*testDynamicOutputShape=*/false);
870 // Check if prepareModelFromCache fails.
871 preparedModel = nullptr;
872 ErrorStatus status;
873 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
874 if (status != ErrorStatus::INVALID_ARGUMENT) {
875 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
876 }
877 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800878 }
879}
880
Xusong Wang0e0721f2019-05-07 12:57:49 -0700881TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800882 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700883 const Model testModel = createTestModel();
884 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800885
886 // Save the compilation to cache.
887 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800888 hidl_vec<hidl_handle> modelCache, dataCache;
889 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
890 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700891 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800892 }
893
Xusong Wanged0822b2019-02-25 16:58:58 -0800894 // Go through each handle in model cache, test with NumFd greater than 1.
895 for (uint32_t i = 0; i < mNumModelCache; i++) {
896 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800897 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800898 hidl_vec<hidl_handle> modelCache, dataCache;
899 mModelCache[i].push_back(mTmpCache);
900 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
901 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
902 mModelCache[i].pop_back();
903 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800904 if (status != ErrorStatus::GENERAL_FAILURE) {
905 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800906 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800907 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800908 }
909
Xusong Wanged0822b2019-02-25 16:58:58 -0800910 // Go through each handle in model cache, test with NumFd equal to 0.
911 for (uint32_t i = 0; i < mNumModelCache; i++) {
912 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800913 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800914 hidl_vec<hidl_handle> modelCache, dataCache;
915 auto tmp = mModelCache[i].back();
916 mModelCache[i].pop_back();
917 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
918 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
919 mModelCache[i].push_back(tmp);
920 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800921 if (status != ErrorStatus::GENERAL_FAILURE) {
922 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800923 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800924 ASSERT_EQ(preparedModel, nullptr);
925 }
926
927 // Go through each handle in data cache, test with NumFd greater than 1.
928 for (uint32_t i = 0; i < mNumDataCache; i++) {
929 sp<IPreparedModel> preparedModel = nullptr;
930 ErrorStatus status;
931 hidl_vec<hidl_handle> modelCache, dataCache;
932 mDataCache[i].push_back(mTmpCache);
933 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
934 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
935 mDataCache[i].pop_back();
936 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
937 if (status != ErrorStatus::GENERAL_FAILURE) {
938 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
939 }
940 ASSERT_EQ(preparedModel, nullptr);
941 }
942
943 // Go through each handle in data cache, test with NumFd equal to 0.
944 for (uint32_t i = 0; i < mNumDataCache; i++) {
945 sp<IPreparedModel> preparedModel = nullptr;
946 ErrorStatus status;
947 hidl_vec<hidl_handle> modelCache, dataCache;
948 auto tmp = mDataCache[i].back();
949 mDataCache[i].pop_back();
950 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
951 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
952 mDataCache[i].push_back(tmp);
953 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
954 if (status != ErrorStatus::GENERAL_FAILURE) {
955 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
956 }
957 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800958 }
959}
960
Xusong Wang0e0721f2019-05-07 12:57:49 -0700961TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800962 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700963 const Model testModel = createTestModel();
964 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800965 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
966 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800967
Xusong Wanged0822b2019-02-25 16:58:58 -0800968 // Go through each handle in model cache, test with invalid access mode.
969 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800970 hidl_vec<hidl_handle> modelCache, dataCache;
971 modelCacheMode[i] = AccessMode::READ_ONLY;
972 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
973 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
974 modelCacheMode[i] = AccessMode::READ_WRITE;
975 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700976 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800977 ASSERT_NE(preparedModel, nullptr);
978 // Execute and verify results.
979 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
980 get_examples(),
981 testModel.relaxComputationFloat32toFloat16,
982 /*testDynamicOutputShape=*/false);
983 // Check if prepareModelFromCache fails.
984 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800985 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800986 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
987 if (status != ErrorStatus::INVALID_ARGUMENT) {
988 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
989 }
990 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800991 }
992
Xusong Wanged0822b2019-02-25 16:58:58 -0800993 // Go through each handle in data cache, test with invalid access mode.
994 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800995 hidl_vec<hidl_handle> modelCache, dataCache;
996 dataCacheMode[i] = AccessMode::READ_ONLY;
997 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
998 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
999 dataCacheMode[i] = AccessMode::READ_WRITE;
1000 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001001 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -08001002 ASSERT_NE(preparedModel, nullptr);
1003 // Execute and verify results.
1004 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
1005 get_examples(),
1006 testModel.relaxComputationFloat32toFloat16,
1007 /*testDynamicOutputShape=*/false);
1008 // Check if prepareModelFromCache fails.
1009 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001010 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001011 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1012 if (status != ErrorStatus::INVALID_ARGUMENT) {
1013 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1014 }
1015 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001016 }
1017}
1018
Xusong Wang0e0721f2019-05-07 12:57:49 -07001019TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001020 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -07001021 const Model testModel = createTestModel();
1022 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001023 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
1024 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001025
1026 // Save the compilation to cache.
1027 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001028 hidl_vec<hidl_handle> modelCache, dataCache;
1029 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1030 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001031 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001032 }
1033
Xusong Wanged0822b2019-02-25 16:58:58 -08001034 // Go through each handle in model cache, test with invalid access mode.
1035 for (uint32_t i = 0; i < mNumModelCache; i++) {
1036 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001037 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001038 hidl_vec<hidl_handle> modelCache, dataCache;
1039 modelCacheMode[i] = AccessMode::WRITE_ONLY;
1040 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1041 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1042 modelCacheMode[i] = AccessMode::READ_WRITE;
1043 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001044 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1045 ASSERT_EQ(preparedModel, nullptr);
1046 }
1047
Xusong Wanged0822b2019-02-25 16:58:58 -08001048 // Go through each handle in data cache, test with invalid access mode.
1049 for (uint32_t i = 0; i < mNumDataCache; i++) {
1050 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001051 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001052 hidl_vec<hidl_handle> modelCache, dataCache;
1053 dataCacheMode[i] = AccessMode::WRITE_ONLY;
1054 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1055 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1056 dataCacheMode[i] = AccessMode::READ_WRITE;
1057 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001058 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1059 ASSERT_EQ(preparedModel, nullptr);
1060 }
1061}
1062
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001063// Copy file contents between file groups.
1064// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1065// The outer vector sizes must match and the inner vectors must have size = 1.
1066static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1067 const std::vector<std::vector<std::string>>& to) {
1068 constexpr size_t kBufferSize = 1000000;
1069 uint8_t buffer[kBufferSize];
1070
1071 ASSERT_EQ(from.size(), to.size());
1072 for (uint32_t i = 0; i < from.size(); i++) {
1073 ASSERT_EQ(from[i].size(), 1u);
1074 ASSERT_EQ(to[i].size(), 1u);
1075 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1076 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1077 ASSERT_GE(fromFd, 0);
1078 ASSERT_GE(toFd, 0);
1079
1080 ssize_t readBytes;
1081 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1082 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1083 }
1084 ASSERT_GE(readBytes, 0);
1085
1086 close(fromFd);
1087 close(toFd);
1088 }
1089}
1090
1091// Number of operations in the large test model.
1092constexpr uint32_t kLargeModelSize = 100;
1093constexpr uint32_t kNumIterationsTOCTOU = 100;
1094
Xusong Wang0e0721f2019-05-07 12:57:49 -07001095TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001096 if (!mIsCachingSupported) return;
1097
Xusong Wang4f71afc2019-04-26 15:33:38 -07001098 // Create test models and check if fully supported by the service.
1099 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1100 if (checkEarlyTermination(testModelMul)) return;
1101 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1102 if (checkEarlyTermination(testModelAdd)) return;
1103
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001104 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001105 auto modelCacheMul = mModelCache;
1106 for (auto& cache : modelCacheMul) {
1107 cache[0].append("_mul");
1108 }
1109 {
1110 hidl_vec<hidl_handle> modelCache, dataCache;
1111 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1112 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001113 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001114 }
1115
1116 // Use a different token for testModelAdd.
1117 mToken[0]++;
1118
1119 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001120 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1121 // Save the testModelAdd compilation to cache.
1122 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001123 hidl_vec<hidl_handle> modelCache, dataCache;
1124 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1125 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1126
1127 // Spawn a thread to copy the cache content concurrently while saving to cache.
1128 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001129 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001130 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001131 }
1132
1133 // Retrieve preparedModel from cache.
1134 {
1135 sp<IPreparedModel> preparedModel = nullptr;
1136 ErrorStatus status;
1137 hidl_vec<hidl_handle> modelCache, dataCache;
1138 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1139 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1140 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1141
1142 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1143 // the prepared model must be executed with the correct result and not crash.
1144 if (status != ErrorStatus::NONE) {
1145 ASSERT_EQ(preparedModel, nullptr);
1146 } else {
1147 ASSERT_NE(preparedModel, nullptr);
1148 generated_tests::EvaluatePreparedModel(
1149 preparedModel, [](int) { return false; },
1150 getLargeModelExamples(kLargeModelSize),
1151 testModelAdd.relaxComputationFloat32toFloat16,
1152 /*testDynamicOutputShape=*/false);
1153 }
1154 }
1155 }
1156}
1157
Xusong Wang0e0721f2019-05-07 12:57:49 -07001158TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001159 if (!mIsCachingSupported) return;
1160
Xusong Wang4f71afc2019-04-26 15:33:38 -07001161 // Create test models and check if fully supported by the service.
1162 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1163 if (checkEarlyTermination(testModelMul)) return;
1164 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1165 if (checkEarlyTermination(testModelAdd)) return;
1166
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001167 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001168 auto modelCacheMul = mModelCache;
1169 for (auto& cache : modelCacheMul) {
1170 cache[0].append("_mul");
1171 }
1172 {
1173 hidl_vec<hidl_handle> modelCache, dataCache;
1174 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1175 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001176 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001177 }
1178
1179 // Use a different token for testModelAdd.
1180 mToken[0]++;
1181
1182 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001183 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1184 // Save the testModelAdd compilation to cache.
1185 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001186 hidl_vec<hidl_handle> modelCache, dataCache;
1187 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1188 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001189 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001190 }
1191
1192 // Retrieve preparedModel from cache.
1193 {
1194 sp<IPreparedModel> preparedModel = nullptr;
1195 ErrorStatus status;
1196 hidl_vec<hidl_handle> modelCache, dataCache;
1197 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1198 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1199
1200 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1201 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1202 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1203 thread.join();
1204
1205 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1206 // the prepared model must be executed with the correct result and not crash.
1207 if (status != ErrorStatus::NONE) {
1208 ASSERT_EQ(preparedModel, nullptr);
1209 } else {
1210 ASSERT_NE(preparedModel, nullptr);
1211 generated_tests::EvaluatePreparedModel(
1212 preparedModel, [](int) { return false; },
1213 getLargeModelExamples(kLargeModelSize),
1214 testModelAdd.relaxComputationFloat32toFloat16,
1215 /*testDynamicOutputShape=*/false);
1216 }
1217 }
1218 }
1219}
1220
Xusong Wang0e0721f2019-05-07 12:57:49 -07001221TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001222 if (!mIsCachingSupported) return;
1223
Xusong Wang4f71afc2019-04-26 15:33:38 -07001224 // Create test models and check if fully supported by the service.
1225 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1226 if (checkEarlyTermination(testModelMul)) return;
1227 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1228 if (checkEarlyTermination(testModelAdd)) return;
1229
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001230 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001231 auto modelCacheMul = mModelCache;
1232 for (auto& cache : modelCacheMul) {
1233 cache[0].append("_mul");
1234 }
1235 {
1236 hidl_vec<hidl_handle> modelCache, dataCache;
1237 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1238 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001239 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001240 }
1241
1242 // Use a different token for testModelAdd.
1243 mToken[0]++;
1244
1245 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001246 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001247 hidl_vec<hidl_handle> modelCache, dataCache;
1248 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1249 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001250 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001251 }
1252
1253 // Replace the model cache of testModelAdd with testModelMul.
1254 copyCacheFiles(modelCacheMul, mModelCache);
1255
1256 // Retrieve the preparedModel from cache, expect failure.
1257 {
1258 sp<IPreparedModel> preparedModel = nullptr;
1259 ErrorStatus status;
1260 hidl_vec<hidl_handle> modelCache, dataCache;
1261 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1262 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1263 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1264 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1265 ASSERT_EQ(preparedModel, nullptr);
1266 }
1267}
1268
Xusong Wang0e0721f2019-05-07 12:57:49 -07001269static const auto kOperandTypeChoices =
1270 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1271
1272INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1273
1274class CompilationCachingSecurityTest
1275 : public CompilationCachingTestBase,
1276 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001277 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001278 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1279
Xusong Wang96e68dc2019-01-18 17:28:26 -08001280 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001281 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001282 generator.seed(kSeed);
1283 }
1284
1285 // Get a random integer within a closed range [lower, upper].
1286 template <typename T>
1287 T getRandomInt(T lower, T upper) {
1288 std::uniform_int_distribution<T> dis(lower, upper);
1289 return dis(generator);
1290 }
1291
Xusong Wange371f6f2019-04-23 14:51:50 -07001292 // Randomly flip one single bit of the cache entry.
1293 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1294 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001295 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1296 long int fileSize = ftell(pFile);
1297 if (fileSize == 0) {
1298 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001299 *skip = true;
1300 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001301 }
1302 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1303 int readByte = fgetc(pFile);
1304 ASSERT_NE(readByte, EOF);
1305 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1306 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1307 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001308 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001309 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001310
Xusong Wange371f6f2019-04-23 14:51:50 -07001311 // Randomly append bytes to the cache entry.
1312 void appendBytesToCache(const std::string& filename, bool* skip) {
1313 FILE* pFile = fopen(filename.c_str(), "a");
1314 uint32_t appendLength = getRandomInt(1, 256);
1315 for (uint32_t i = 0; i < appendLength; i++) {
1316 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1317 }
1318 fclose(pFile);
1319 *skip = false;
1320 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001321
Xusong Wange371f6f2019-04-23 14:51:50 -07001322 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001323
Xusong Wange371f6f2019-04-23 14:51:50 -07001324 // Test if the driver behaves as expected when given corrupted cache or token.
1325 // The modifier will be invoked after save to cache but before prepare from cache.
1326 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1327 // whether the test should be skipped or not.
1328 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001329 const Model testModel = createTestModel();
1330 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001331
Xusong Wanged0822b2019-02-25 16:58:58 -08001332 // Save the compilation to cache.
1333 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001334 hidl_vec<hidl_handle> modelCache, dataCache;
1335 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1336 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001337 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001338 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001339
Xusong Wange371f6f2019-04-23 14:51:50 -07001340 bool skip = false;
1341 modifier(&skip);
1342 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001343
Xusong Wange371f6f2019-04-23 14:51:50 -07001344 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001345 {
1346 sp<IPreparedModel> preparedModel = nullptr;
1347 ErrorStatus status;
1348 hidl_vec<hidl_handle> modelCache, dataCache;
1349 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1350 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1351 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001352
1353 switch (expected) {
1354 case ExpectedResult::GENERAL_FAILURE:
1355 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1356 ASSERT_EQ(preparedModel, nullptr);
1357 break;
1358 case ExpectedResult::NOT_CRASH:
1359 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1360 break;
1361 default:
1362 FAIL();
1363 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001364 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001365 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001366
Xusong Wang0e0721f2019-05-07 12:57:49 -07001367 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001368 std::mt19937 generator;
1369};
1370
1371TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1372 if (!mIsCachingSupported) return;
1373 for (uint32_t i = 0; i < mNumModelCache; i++) {
1374 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1375 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1376 }
1377}
1378
1379TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1380 if (!mIsCachingSupported) return;
1381 for (uint32_t i = 0; i < mNumModelCache; i++) {
1382 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1383 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1384 }
1385}
1386
1387TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1388 if (!mIsCachingSupported) return;
1389 for (uint32_t i = 0; i < mNumDataCache; i++) {
1390 testCorruptedCache(ExpectedResult::NOT_CRASH,
1391 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1392 }
1393}
1394
1395TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1396 if (!mIsCachingSupported) return;
1397 for (uint32_t i = 0; i < mNumDataCache; i++) {
1398 testCorruptedCache(ExpectedResult::NOT_CRASH,
1399 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1400 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001401}
1402
1403TEST_P(CompilationCachingSecurityTest, WrongToken) {
1404 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001405 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1406 // Randomly flip one single bit in mToken.
1407 uint32_t ind =
1408 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1409 mToken[ind] ^= (1U << getRandomInt(0, 7));
1410 *skip = false;
1411 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001412}
1413
1414INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001415 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001416
1417} // namespace functional
1418} // namespace vts
1419} // namespace V1_2
1420} // namespace neuralnetworks
1421} // namespace hardware
1422} // namespace android