blob: 590764635ed4c26ba66e21752ae1a53a5f2d4085 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
Slava Shklyaev0da5c342019-07-17 15:50:57 +010038namespace android::hardware::neuralnetworks::V1_2 {
39namespace generated_tests::mobilenet_224_gender_basic_fixed {
Slava Shklyaeve8b24462019-07-17 15:50:57 +010040Model createTestModel();
Slava Shklyaev0da5c342019-07-17 15:50:57 +010041} // namespace generated_tests::mobilenet_224_gender_basic_fixed
42} // namespace android::hardware::neuralnetworks::V1_2
Slava Shklyaeve8b24462019-07-17 15:50:57 +010043
44namespace generated_tests::mobilenet_224_gender_basic_fixed {
45std::vector<test_helper::MixedTypedExample>& get_examples();
46} // namespace generated_tests::mobilenet_224_gender_basic_fixed
47
48namespace android::hardware::neuralnetworks::V1_2::generated_tests::mobilenet_quantized {
49Model createTestModel();
50} // namespace android::hardware::neuralnetworks::V1_2::generated_tests::mobilenet_quantized
51
52namespace generated_tests::mobilenet_quantized {
53std::vector<test_helper::MixedTypedExample>& get_examples();
54} // namespace generated_tests::mobilenet_quantized
55
Xusong Wang96e68dc2019-01-18 17:28:26 -080056namespace android {
57namespace hardware {
58namespace neuralnetworks {
59namespace V1_2 {
60namespace vts {
61namespace functional {
62
Michael Butler3835f612019-07-11 15:43:22 -070063using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
64using ::android::hardware::neuralnetworks::V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080065using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
66using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
Michael Butler3835f612019-07-11 15:43:22 -070067using ::android::hidl::memory::V1_0::IMemory;
Xusong Wang96e68dc2019-01-18 17:28:26 -080068using ::android::nn::allocateSharedMemory;
69using ::test_helper::MixedTypedExample;
70
Xusong Wang0e0721f2019-05-07 12:57:49 -070071namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080072
Slava Shklyaeve8b24462019-07-17 15:50:57 +010073constexpr auto createTestModel = ::android::hardware::neuralnetworks::V1_2::generated_tests::
74 mobilenet_224_gender_basic_fixed::createTestModel;
75constexpr auto get_examples = ::generated_tests::mobilenet_224_gender_basic_fixed::get_examples;
Xusong Wang96e68dc2019-01-18 17:28:26 -080076
Xusong Wang0e0721f2019-05-07 12:57:49 -070077// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
78// This function assumes the operation is always ADD.
79std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
80 float outputValue = 1.0f + static_cast<float>(len);
81 return {{.operands = {
82 // Input
83 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
84 // Output
85 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
86}
87
88} // namespace float32_model
89
90namespace quant8_model {
91
Slava Shklyaeve8b24462019-07-17 15:50:57 +010092constexpr auto createTestModel = ::android::hardware::neuralnetworks::V1_2::generated_tests::
93 mobilenet_quantized::createTestModel;
94constexpr auto get_examples = ::generated_tests::mobilenet_quantized::get_examples;
Xusong Wang0e0721f2019-05-07 12:57:49 -070095
96// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
97// This function assumes the operation is always ADD.
98std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
99 uint8_t outputValue = 1 + static_cast<uint8_t>(len);
100 return {{.operands = {// Input
101 {.operandDimensions = {{0, {1}}}, .quant8AsymmOperands = {{0, {1}}}},
102 // Output
103 {.operandDimensions = {{0, {1}}},
104 .quant8AsymmOperands = {{0, {outputValue}}}}}}};
105}
106
107} // namespace quant8_model
108
109namespace {
110
Xusong Wanged0822b2019-02-25 16:58:58 -0800111enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -0800112
Xusong Wanged0822b2019-02-25 16:58:58 -0800113// Creates cache handles based on provided file groups.
114// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
115void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
116 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
117 handles->resize(fileGroups.size());
118 for (uint32_t i = 0; i < fileGroups.size(); i++) {
119 std::vector<int> fds;
120 for (const auto& file : fileGroups[i]) {
121 int fd;
122 if (mode[i] == AccessMode::READ_ONLY) {
123 fd = open(file.c_str(), O_RDONLY);
124 } else if (mode[i] == AccessMode::WRITE_ONLY) {
125 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
126 } else if (mode[i] == AccessMode::READ_WRITE) {
127 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
128 } else {
129 FAIL();
130 }
131 ASSERT_GE(fd, 0);
132 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800133 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800134 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
135 ASSERT_NE(cacheNativeHandle, nullptr);
136 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
137 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800138 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800139}
140
141void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
142 hidl_vec<hidl_handle>* handles) {
143 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800144}
145
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700146// Create a chain of broadcast operations. The second operand is always constant tensor [1].
147// For simplicity, activation scalar is shared. The second operand is not shared
148// in the model to let driver maintain a non-trivial size of constant data and the corresponding
149// data locations in cache.
150//
151// --------- activation --------
152// ↓ ↓ ↓ ↓
153// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
154// ↑ ↑ ↑ ↑
155// [1] [1] [1] [1]
156//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700157// This function assumes the operation is either ADD or MUL.
158template <typename CppType, OperandType operandType>
159Model createLargeTestModelImpl(OperationType op, uint32_t len) {
160 EXPECT_TRUE(op == OperationType::ADD || op == OperationType::MUL);
161
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700162 // Model operations and operands.
163 std::vector<Operation> operations(len);
164 std::vector<Operand> operands(len * 2 + 2);
165
166 // The constant buffer pool. This contains the activation scalar, followed by the
167 // per-operation constant operands.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700168 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700169
170 // The activation scalar, value = 0.
171 operands[0] = {
172 .type = OperandType::INT32,
173 .dimensions = {},
174 .numberOfConsumers = len,
175 .scale = 0.0f,
176 .zeroPoint = 0,
177 .lifetime = OperandLifeTime::CONSTANT_COPY,
178 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
179 };
180 memset(operandValues.data(), 0, sizeof(int32_t));
181
Xusong Wang0e0721f2019-05-07 12:57:49 -0700182 // The buffer value of the constant second operand. The logical value is always 1.0f.
183 CppType bufferValue;
184 // The scale of the first and second operand.
185 float scale1, scale2;
186 if (operandType == OperandType::TENSOR_FLOAT32) {
187 bufferValue = 1.0f;
188 scale1 = 0.0f;
189 scale2 = 0.0f;
190 } else if (op == OperationType::ADD) {
191 bufferValue = 1;
192 scale1 = 1.0f;
193 scale2 = 1.0f;
194 } else {
195 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
196 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
197 bufferValue = 2;
198 scale1 = 1.0f;
199 scale2 = 0.5f;
200 }
201
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700202 for (uint32_t i = 0; i < len; i++) {
203 const uint32_t firstInputIndex = i * 2 + 1;
204 const uint32_t secondInputIndex = firstInputIndex + 1;
205 const uint32_t outputIndex = secondInputIndex + 1;
206
207 // The first operation input.
208 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700209 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700210 .dimensions = {1},
211 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700212 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700213 .zeroPoint = 0,
214 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
215 : OperandLifeTime::TEMPORARY_VARIABLE),
216 .location = {},
217 };
218
219 // The second operation input, value = 1.
220 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700221 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700222 .dimensions = {1},
223 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700224 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700225 .zeroPoint = 0,
226 .lifetime = OperandLifeTime::CONSTANT_COPY,
227 .location = {.poolIndex = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700228 .offset = static_cast<uint32_t>(i * sizeof(CppType) + sizeof(int32_t)),
229 .length = sizeof(CppType)},
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700230 };
Xusong Wang0e0721f2019-05-07 12:57:49 -0700231 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(CppType), &bufferValue,
232 sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700233
234 // The operation. All operations share the same activation scalar.
235 // The output operand is created as an input in the next iteration of the loop, in the case
236 // of all but the last member of the chain; and after the loop as a model output, in the
237 // case of the last member of the chain.
238 operations[i] = {
239 .type = op,
240 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
241 .outputs = {outputIndex},
242 };
243 }
244
245 // The model output.
246 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700247 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700248 .dimensions = {1},
249 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700250 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700251 .zeroPoint = 0,
252 .lifetime = OperandLifeTime::MODEL_OUTPUT,
253 .location = {},
254 };
255
256 const std::vector<uint32_t> inputIndexes = {1};
257 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
258 const std::vector<hidl_memory> pools = {};
259
260 return {
261 .operands = operands,
262 .operations = operations,
263 .inputIndexes = inputIndexes,
264 .outputIndexes = outputIndexes,
265 .operandValues = operandValues,
266 .pools = pools,
267 };
268}
269
Xusong Wang96e68dc2019-01-18 17:28:26 -0800270} // namespace
271
272// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700273class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800274 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700275 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
276
Xusong Wang96e68dc2019-01-18 17:28:26 -0800277 void SetUp() override {
278 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800279 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800280
Xusong Wanged0822b2019-02-25 16:58:58 -0800281 // Create cache directory. The cache directory and a temporary cache file is always created
282 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800283 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
284 char* cacheDir = mkdtemp(cacheDirTemp);
285 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800286 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800287 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800288
Xusong Wanged0822b2019-02-25 16:58:58 -0800289 Return<void> ret = device->getNumberOfCacheFilesNeeded(
290 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800291 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800292 mNumModelCache = numModelCache;
293 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800294 });
295 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800296 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
297
298 // Create empty cache files.
299 mTmpCache = mCacheDir + "tmp";
300 for (uint32_t i = 0; i < mNumModelCache; i++) {
301 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
302 }
303 for (uint32_t i = 0; i < mNumDataCache; i++) {
304 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
305 }
306 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
307 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
308 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
309 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
310 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
311
312 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800313 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
314 "support compilation caching.";
315 std::cout << "[ ] Early termination of test because vendor service does not "
316 "support compilation caching."
317 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800318 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800319 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800320
Xusong Wang6824cc12019-02-12 18:00:37 -0800321 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700322 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
323 if (!::testing::Test::HasFailure()) {
324 // Recursively remove the cache directory specified by mCacheDir.
325 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
326 return remove(entry);
327 };
328 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800329 }
330 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800331 }
332
Xusong Wang0e0721f2019-05-07 12:57:49 -0700333 // Model and examples creators. According to kOperandType, the following methods will return
334 // either float32 model/examples or the quant8 variant.
335 Model createTestModel() {
336 if (kOperandType == OperandType::TENSOR_FLOAT32) {
337 return float32_model::createTestModel();
338 } else {
339 return quant8_model::createTestModel();
340 }
341 }
342
343 std::vector<MixedTypedExample> get_examples() {
344 if (kOperandType == OperandType::TENSOR_FLOAT32) {
345 return float32_model::get_examples();
346 } else {
347 return quant8_model::get_examples();
348 }
349 }
350
351 Model createLargeTestModel(OperationType op, uint32_t len) {
352 if (kOperandType == OperandType::TENSOR_FLOAT32) {
353 return createLargeTestModelImpl<float, OperandType::TENSOR_FLOAT32>(op, len);
354 } else {
355 return createLargeTestModelImpl<uint8_t, OperandType::TENSOR_QUANT8_ASYMM>(op, len);
356 }
357 }
358
359 std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
360 if (kOperandType == OperandType::TENSOR_FLOAT32) {
361 return float32_model::getLargeModelExamples(len);
362 } else {
363 return quant8_model::getLargeModelExamples(len);
364 }
365 }
366
Xusong Wang4f71afc2019-04-26 15:33:38 -0700367 // See if the service can handle the model.
368 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800369 bool fullySupportsModel = false;
370 Return<void> supportedCall = device->getSupportedOperations_1_2(
371 model,
372 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
373 ASSERT_EQ(ErrorStatus::NONE, status);
374 ASSERT_EQ(supported.size(), model.operations.size());
375 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
376 [](bool valid) { return valid; });
377 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700378 EXPECT_TRUE(supportedCall.isOk());
379 return fullySupportsModel;
380 }
381
382 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
383 const hidl_vec<hidl_handle>& dataCache,
384 sp<IPreparedModel>* preparedModel = nullptr) {
385 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800386
387 // Launch prepare model.
388 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
389 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800390 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800391 Return<ErrorStatus> prepareLaunchStatus =
392 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
393 dataCache, cacheToken, preparedModelCallback);
394 ASSERT_TRUE(prepareLaunchStatus.isOk());
395 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
396
397 // Retrieve prepared model.
398 preparedModelCallback->wait();
399 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
400 if (preparedModel != nullptr) {
401 *preparedModel =
402 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
403 .withDefault(nullptr);
404 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800405 }
406
407 bool checkEarlyTermination(ErrorStatus status) {
408 if (status == ErrorStatus::GENERAL_FAILURE) {
409 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
410 "save the prepared model that it does not support.";
411 std::cout << "[ ] Early termination of test because vendor service cannot "
412 "save the prepared model that it does not support."
413 << std::endl;
414 return true;
415 }
416 return false;
417 }
418
Xusong Wang4f71afc2019-04-26 15:33:38 -0700419 bool checkEarlyTermination(const V1_2::Model& model) {
420 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800421 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
422 "prepare model that it does not support.";
423 std::cout << "[ ] Early termination of test because vendor service cannot "
424 "prepare model that it does not support."
425 << std::endl;
426 return true;
427 }
428 return false;
429 }
430
431 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
432 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800433 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
434 // Launch prepare model from cache.
435 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
436 ASSERT_NE(nullptr, preparedModelCallback.get());
437 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800438 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
439 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800440 ASSERT_TRUE(prepareLaunchStatus.isOk());
441 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
442 *preparedModel = nullptr;
443 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
444 return;
445 }
446
447 // Retrieve prepared model.
448 preparedModelCallback->wait();
449 *status = preparedModelCallback->getStatus();
450 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
451 .withDefault(nullptr);
452 }
453
Xusong Wanged0822b2019-02-25 16:58:58 -0800454 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800455 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800456
457 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
458 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
459 // and the inner vector is for fds held by each handle.
460 std::vector<std::vector<std::string>> mModelCache;
461 std::vector<std::vector<std::string>> mDataCache;
462
463 // A separate temporary file path in the tmp cache directory.
464 std::string mTmpCache;
465
Xusong Wang96e68dc2019-01-18 17:28:26 -0800466 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800467 uint32_t mNumModelCache;
468 uint32_t mNumDataCache;
469 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700470
471 // The primary data type of the testModel.
472 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800473};
474
Xusong Wang0e0721f2019-05-07 12:57:49 -0700475// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
476// pass running with float32 models and the second pass running with quant8 models.
477class CompilationCachingTest : public CompilationCachingTestBase,
478 public ::testing::WithParamInterface<OperandType> {
479 protected:
480 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
481};
482
483TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800484 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700485 const Model testModel = createTestModel();
486 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800487 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800488
489 // Save the compilation to cache.
490 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800491 hidl_vec<hidl_handle> modelCache, dataCache;
492 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
493 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700494 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800495 }
496
497 // Retrieve preparedModel from cache.
498 {
499 preparedModel = nullptr;
500 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800501 hidl_vec<hidl_handle> modelCache, dataCache;
502 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
503 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
504 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800505 if (!mIsCachingSupported) {
506 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
507 ASSERT_EQ(preparedModel, nullptr);
508 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800509 } else if (checkEarlyTermination(status)) {
510 ASSERT_EQ(preparedModel, nullptr);
511 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800512 } else {
513 ASSERT_EQ(status, ErrorStatus::NONE);
514 ASSERT_NE(preparedModel, nullptr);
515 }
516 }
517
518 // Execute and verify results.
519 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
520 testModel.relaxComputationFloat32toFloat16,
521 /*testDynamicOutputShape=*/false);
522}
523
Xusong Wang0e0721f2019-05-07 12:57:49 -0700524TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800525 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700526 const Model testModel = createTestModel();
527 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800528 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800529
530 // Save the compilation to cache.
531 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800532 hidl_vec<hidl_handle> modelCache, dataCache;
533 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
534 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
535 uint8_t dummyBytes[] = {0, 0};
536 // Write a dummy integer to the cache.
537 // The driver should be able to handle non-empty cache and non-zero fd offset.
538 for (uint32_t i = 0; i < modelCache.size(); i++) {
539 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
540 sizeof(dummyBytes)),
541 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800542 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800543 for (uint32_t i = 0; i < dataCache.size(); i++) {
544 ASSERT_EQ(
545 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
546 sizeof(dummyBytes));
547 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700548 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800549 }
550
551 // Retrieve preparedModel from cache.
552 {
553 preparedModel = nullptr;
554 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800555 hidl_vec<hidl_handle> modelCache, dataCache;
556 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
557 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800558 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800559 // Advance the offset of each handle by one byte.
560 // The driver should be able to handle non-zero fd offset.
561 for (uint32_t i = 0; i < modelCache.size(); i++) {
562 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
563 }
564 for (uint32_t i = 0; i < dataCache.size(); i++) {
565 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
566 }
567 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800568 if (!mIsCachingSupported) {
569 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
570 ASSERT_EQ(preparedModel, nullptr);
571 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800572 } else if (checkEarlyTermination(status)) {
573 ASSERT_EQ(preparedModel, nullptr);
574 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800575 } else {
576 ASSERT_EQ(status, ErrorStatus::NONE);
577 ASSERT_NE(preparedModel, nullptr);
578 }
579 }
580
581 // Execute and verify results.
582 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
583 testModel.relaxComputationFloat32toFloat16,
584 /*testDynamicOutputShape=*/false);
585}
586
Xusong Wang0e0721f2019-05-07 12:57:49 -0700587TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800588 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700589 const Model testModel = createTestModel();
590 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800591
592 // Test with number of model cache files greater than mNumModelCache.
593 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800594 hidl_vec<hidl_handle> modelCache, dataCache;
595 // Pass an additional cache file for model cache.
596 mModelCache.push_back({mTmpCache});
597 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
598 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
599 mModelCache.pop_back();
600 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700601 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800602 ASSERT_NE(preparedModel, nullptr);
603 // Execute and verify results.
604 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
605 get_examples(),
606 testModel.relaxComputationFloat32toFloat16,
607 /*testDynamicOutputShape=*/false);
608 // Check if prepareModelFromCache fails.
609 preparedModel = nullptr;
610 ErrorStatus status;
611 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
612 if (status != ErrorStatus::INVALID_ARGUMENT) {
613 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
614 }
615 ASSERT_EQ(preparedModel, nullptr);
616 }
617
618 // Test with number of model cache files smaller than mNumModelCache.
619 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800620 hidl_vec<hidl_handle> modelCache, dataCache;
621 // Pop out the last cache file.
622 auto tmp = mModelCache.back();
623 mModelCache.pop_back();
624 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
625 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
626 mModelCache.push_back(tmp);
627 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700628 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800629 ASSERT_NE(preparedModel, nullptr);
630 // Execute and verify results.
631 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
632 get_examples(),
633 testModel.relaxComputationFloat32toFloat16,
634 /*testDynamicOutputShape=*/false);
635 // Check if prepareModelFromCache fails.
636 preparedModel = nullptr;
637 ErrorStatus status;
638 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
639 if (status != ErrorStatus::INVALID_ARGUMENT) {
640 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
641 }
642 ASSERT_EQ(preparedModel, nullptr);
643 }
644
645 // Test with number of data cache files greater than mNumDataCache.
646 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800647 hidl_vec<hidl_handle> modelCache, dataCache;
648 // Pass an additional cache file for data cache.
649 mDataCache.push_back({mTmpCache});
650 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
651 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
652 mDataCache.pop_back();
653 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700654 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800655 ASSERT_NE(preparedModel, nullptr);
656 // Execute and verify results.
657 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
658 get_examples(),
659 testModel.relaxComputationFloat32toFloat16,
660 /*testDynamicOutputShape=*/false);
661 // Check if prepareModelFromCache fails.
662 preparedModel = nullptr;
663 ErrorStatus status;
664 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
665 if (status != ErrorStatus::INVALID_ARGUMENT) {
666 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
667 }
668 ASSERT_EQ(preparedModel, nullptr);
669 }
670
671 // Test with number of data cache files smaller than mNumDataCache.
672 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800673 hidl_vec<hidl_handle> modelCache, dataCache;
674 // Pop out the last cache file.
675 auto tmp = mDataCache.back();
676 mDataCache.pop_back();
677 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
678 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
679 mDataCache.push_back(tmp);
680 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700681 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800682 ASSERT_NE(preparedModel, nullptr);
683 // Execute and verify results.
684 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
685 get_examples(),
686 testModel.relaxComputationFloat32toFloat16,
687 /*testDynamicOutputShape=*/false);
688 // Check if prepareModelFromCache fails.
689 preparedModel = nullptr;
690 ErrorStatus status;
691 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
692 if (status != ErrorStatus::INVALID_ARGUMENT) {
693 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
694 }
695 ASSERT_EQ(preparedModel, nullptr);
696 }
697}
698
Xusong Wang0e0721f2019-05-07 12:57:49 -0700699TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800700 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700701 const Model testModel = createTestModel();
702 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800703
704 // Save the compilation to cache.
705 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800706 hidl_vec<hidl_handle> modelCache, dataCache;
707 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
708 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700709 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800710 }
711
712 // Test with number of model cache files greater than mNumModelCache.
713 {
714 sp<IPreparedModel> preparedModel = nullptr;
715 ErrorStatus status;
716 hidl_vec<hidl_handle> modelCache, dataCache;
717 mModelCache.push_back({mTmpCache});
718 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
719 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
720 mModelCache.pop_back();
721 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
722 if (status != ErrorStatus::GENERAL_FAILURE) {
723 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
724 }
725 ASSERT_EQ(preparedModel, nullptr);
726 }
727
728 // Test with number of model cache files smaller than mNumModelCache.
729 if (mModelCache.size() > 0) {
730 sp<IPreparedModel> preparedModel = nullptr;
731 ErrorStatus status;
732 hidl_vec<hidl_handle> modelCache, dataCache;
733 auto tmp = mModelCache.back();
734 mModelCache.pop_back();
735 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
736 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
737 mModelCache.push_back(tmp);
738 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
739 if (status != ErrorStatus::GENERAL_FAILURE) {
740 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
741 }
742 ASSERT_EQ(preparedModel, nullptr);
743 }
744
745 // Test with number of data cache files greater than mNumDataCache.
746 {
747 sp<IPreparedModel> preparedModel = nullptr;
748 ErrorStatus status;
749 hidl_vec<hidl_handle> modelCache, dataCache;
750 mDataCache.push_back({mTmpCache});
751 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
752 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
753 mDataCache.pop_back();
754 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
755 if (status != ErrorStatus::GENERAL_FAILURE) {
756 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
757 }
758 ASSERT_EQ(preparedModel, nullptr);
759 }
760
761 // Test with number of data cache files smaller than mNumDataCache.
762 if (mDataCache.size() > 0) {
763 sp<IPreparedModel> preparedModel = nullptr;
764 ErrorStatus status;
765 hidl_vec<hidl_handle> modelCache, dataCache;
766 auto tmp = mDataCache.back();
767 mDataCache.pop_back();
768 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
769 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
770 mDataCache.push_back(tmp);
771 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
772 if (status != ErrorStatus::GENERAL_FAILURE) {
773 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
774 }
775 ASSERT_EQ(preparedModel, nullptr);
776 }
777}
778
Xusong Wang0e0721f2019-05-07 12:57:49 -0700779TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800780 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700781 const Model testModel = createTestModel();
782 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800783
Xusong Wanged0822b2019-02-25 16:58:58 -0800784 // Go through each handle in model cache, test with NumFd greater than 1.
785 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800786 hidl_vec<hidl_handle> modelCache, dataCache;
787 // Pass an invalid number of fds for handle i.
788 mModelCache[i].push_back(mTmpCache);
789 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
790 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
791 mModelCache[i].pop_back();
792 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700793 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800794 ASSERT_NE(preparedModel, nullptr);
795 // Execute and verify results.
796 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
797 get_examples(),
798 testModel.relaxComputationFloat32toFloat16,
799 /*testDynamicOutputShape=*/false);
800 // Check if prepareModelFromCache fails.
801 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800802 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800803 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
804 if (status != ErrorStatus::INVALID_ARGUMENT) {
805 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800806 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800807 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800808 }
809
Xusong Wanged0822b2019-02-25 16:58:58 -0800810 // Go through each handle in model cache, test with NumFd equal to 0.
811 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800812 hidl_vec<hidl_handle> modelCache, dataCache;
813 // Pass an invalid number of fds for handle i.
814 auto tmp = mModelCache[i].back();
815 mModelCache[i].pop_back();
816 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
817 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
818 mModelCache[i].push_back(tmp);
819 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700820 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800821 ASSERT_NE(preparedModel, nullptr);
822 // Execute and verify results.
823 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
824 get_examples(),
825 testModel.relaxComputationFloat32toFloat16,
826 /*testDynamicOutputShape=*/false);
827 // Check if prepareModelFromCache fails.
828 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800829 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800830 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
831 if (status != ErrorStatus::INVALID_ARGUMENT) {
832 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800833 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800834 ASSERT_EQ(preparedModel, nullptr);
835 }
836
837 // Go through each handle in data cache, test with NumFd greater than 1.
838 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800839 hidl_vec<hidl_handle> modelCache, dataCache;
840 // Pass an invalid number of fds for handle i.
841 mDataCache[i].push_back(mTmpCache);
842 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
843 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
844 mDataCache[i].pop_back();
845 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700846 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800847 ASSERT_NE(preparedModel, nullptr);
848 // Execute and verify results.
849 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
850 get_examples(),
851 testModel.relaxComputationFloat32toFloat16,
852 /*testDynamicOutputShape=*/false);
853 // Check if prepareModelFromCache fails.
854 preparedModel = nullptr;
855 ErrorStatus status;
856 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
857 if (status != ErrorStatus::INVALID_ARGUMENT) {
858 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
859 }
860 ASSERT_EQ(preparedModel, nullptr);
861 }
862
863 // Go through each handle in data cache, test with NumFd equal to 0.
864 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800865 hidl_vec<hidl_handle> modelCache, dataCache;
866 // Pass an invalid number of fds for handle i.
867 auto tmp = mDataCache[i].back();
868 mDataCache[i].pop_back();
869 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
870 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
871 mDataCache[i].push_back(tmp);
872 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700873 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800874 ASSERT_NE(preparedModel, nullptr);
875 // Execute and verify results.
876 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
877 get_examples(),
878 testModel.relaxComputationFloat32toFloat16,
879 /*testDynamicOutputShape=*/false);
880 // Check if prepareModelFromCache fails.
881 preparedModel = nullptr;
882 ErrorStatus status;
883 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
884 if (status != ErrorStatus::INVALID_ARGUMENT) {
885 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
886 }
887 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800888 }
889}
890
Xusong Wang0e0721f2019-05-07 12:57:49 -0700891TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800892 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700893 const Model testModel = createTestModel();
894 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800895
896 // Save the compilation to cache.
897 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800898 hidl_vec<hidl_handle> modelCache, dataCache;
899 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
900 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700901 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800902 }
903
Xusong Wanged0822b2019-02-25 16:58:58 -0800904 // Go through each handle in model cache, test with NumFd greater than 1.
905 for (uint32_t i = 0; i < mNumModelCache; i++) {
906 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800907 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800908 hidl_vec<hidl_handle> modelCache, dataCache;
909 mModelCache[i].push_back(mTmpCache);
910 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
911 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
912 mModelCache[i].pop_back();
913 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800914 if (status != ErrorStatus::GENERAL_FAILURE) {
915 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800916 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800917 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800918 }
919
Xusong Wanged0822b2019-02-25 16:58:58 -0800920 // Go through each handle in model cache, test with NumFd equal to 0.
921 for (uint32_t i = 0; i < mNumModelCache; i++) {
922 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800923 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800924 hidl_vec<hidl_handle> modelCache, dataCache;
925 auto tmp = mModelCache[i].back();
926 mModelCache[i].pop_back();
927 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
928 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
929 mModelCache[i].push_back(tmp);
930 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800931 if (status != ErrorStatus::GENERAL_FAILURE) {
932 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800933 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800934 ASSERT_EQ(preparedModel, nullptr);
935 }
936
937 // Go through each handle in data cache, test with NumFd greater than 1.
938 for (uint32_t i = 0; i < mNumDataCache; i++) {
939 sp<IPreparedModel> preparedModel = nullptr;
940 ErrorStatus status;
941 hidl_vec<hidl_handle> modelCache, dataCache;
942 mDataCache[i].push_back(mTmpCache);
943 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
944 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
945 mDataCache[i].pop_back();
946 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
947 if (status != ErrorStatus::GENERAL_FAILURE) {
948 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
949 }
950 ASSERT_EQ(preparedModel, nullptr);
951 }
952
953 // Go through each handle in data cache, test with NumFd equal to 0.
954 for (uint32_t i = 0; i < mNumDataCache; i++) {
955 sp<IPreparedModel> preparedModel = nullptr;
956 ErrorStatus status;
957 hidl_vec<hidl_handle> modelCache, dataCache;
958 auto tmp = mDataCache[i].back();
959 mDataCache[i].pop_back();
960 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
961 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
962 mDataCache[i].push_back(tmp);
963 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
964 if (status != ErrorStatus::GENERAL_FAILURE) {
965 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
966 }
967 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800968 }
969}
970
Xusong Wang0e0721f2019-05-07 12:57:49 -0700971TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800972 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700973 const Model testModel = createTestModel();
974 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800975 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
976 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800977
Xusong Wanged0822b2019-02-25 16:58:58 -0800978 // Go through each handle in model cache, test with invalid access mode.
979 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800980 hidl_vec<hidl_handle> modelCache, dataCache;
981 modelCacheMode[i] = AccessMode::READ_ONLY;
982 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
983 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
984 modelCacheMode[i] = AccessMode::READ_WRITE;
985 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700986 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800987 ASSERT_NE(preparedModel, nullptr);
988 // Execute and verify results.
989 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
990 get_examples(),
991 testModel.relaxComputationFloat32toFloat16,
992 /*testDynamicOutputShape=*/false);
993 // Check if prepareModelFromCache fails.
994 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800995 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800996 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
997 if (status != ErrorStatus::INVALID_ARGUMENT) {
998 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
999 }
1000 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001001 }
1002
Xusong Wanged0822b2019-02-25 16:58:58 -08001003 // Go through each handle in data cache, test with invalid access mode.
1004 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -08001005 hidl_vec<hidl_handle> modelCache, dataCache;
1006 dataCacheMode[i] = AccessMode::READ_ONLY;
1007 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1008 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1009 dataCacheMode[i] = AccessMode::READ_WRITE;
1010 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001011 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -08001012 ASSERT_NE(preparedModel, nullptr);
1013 // Execute and verify results.
1014 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
1015 get_examples(),
1016 testModel.relaxComputationFloat32toFloat16,
1017 /*testDynamicOutputShape=*/false);
1018 // Check if prepareModelFromCache fails.
1019 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001020 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001021 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1022 if (status != ErrorStatus::INVALID_ARGUMENT) {
1023 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1024 }
1025 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001026 }
1027}
1028
Xusong Wang0e0721f2019-05-07 12:57:49 -07001029TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001030 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -07001031 const Model testModel = createTestModel();
1032 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001033 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
1034 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001035
1036 // Save the compilation to cache.
1037 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001038 hidl_vec<hidl_handle> modelCache, dataCache;
1039 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1040 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001041 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001042 }
1043
Xusong Wanged0822b2019-02-25 16:58:58 -08001044 // Go through each handle in model cache, test with invalid access mode.
1045 for (uint32_t i = 0; i < mNumModelCache; i++) {
1046 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001047 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001048 hidl_vec<hidl_handle> modelCache, dataCache;
1049 modelCacheMode[i] = AccessMode::WRITE_ONLY;
1050 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1051 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1052 modelCacheMode[i] = AccessMode::READ_WRITE;
1053 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001054 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1055 ASSERT_EQ(preparedModel, nullptr);
1056 }
1057
Xusong Wanged0822b2019-02-25 16:58:58 -08001058 // Go through each handle in data cache, test with invalid access mode.
1059 for (uint32_t i = 0; i < mNumDataCache; i++) {
1060 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001061 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001062 hidl_vec<hidl_handle> modelCache, dataCache;
1063 dataCacheMode[i] = AccessMode::WRITE_ONLY;
1064 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1065 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1066 dataCacheMode[i] = AccessMode::READ_WRITE;
1067 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001068 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1069 ASSERT_EQ(preparedModel, nullptr);
1070 }
1071}
1072
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001073// Copy file contents between file groups.
1074// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1075// The outer vector sizes must match and the inner vectors must have size = 1.
1076static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1077 const std::vector<std::vector<std::string>>& to) {
1078 constexpr size_t kBufferSize = 1000000;
1079 uint8_t buffer[kBufferSize];
1080
1081 ASSERT_EQ(from.size(), to.size());
1082 for (uint32_t i = 0; i < from.size(); i++) {
1083 ASSERT_EQ(from[i].size(), 1u);
1084 ASSERT_EQ(to[i].size(), 1u);
1085 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1086 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1087 ASSERT_GE(fromFd, 0);
1088 ASSERT_GE(toFd, 0);
1089
1090 ssize_t readBytes;
1091 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1092 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1093 }
1094 ASSERT_GE(readBytes, 0);
1095
1096 close(fromFd);
1097 close(toFd);
1098 }
1099}
1100
1101// Number of operations in the large test model.
1102constexpr uint32_t kLargeModelSize = 100;
1103constexpr uint32_t kNumIterationsTOCTOU = 100;
1104
Xusong Wang0e0721f2019-05-07 12:57:49 -07001105TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001106 if (!mIsCachingSupported) return;
1107
Xusong Wang4f71afc2019-04-26 15:33:38 -07001108 // Create test models and check if fully supported by the service.
1109 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1110 if (checkEarlyTermination(testModelMul)) return;
1111 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1112 if (checkEarlyTermination(testModelAdd)) return;
1113
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001114 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001115 auto modelCacheMul = mModelCache;
1116 for (auto& cache : modelCacheMul) {
1117 cache[0].append("_mul");
1118 }
1119 {
1120 hidl_vec<hidl_handle> modelCache, dataCache;
1121 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1122 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001123 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001124 }
1125
1126 // Use a different token for testModelAdd.
1127 mToken[0]++;
1128
1129 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001130 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1131 // Save the testModelAdd compilation to cache.
1132 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001133 hidl_vec<hidl_handle> modelCache, dataCache;
1134 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1135 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1136
1137 // Spawn a thread to copy the cache content concurrently while saving to cache.
1138 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001139 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001140 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001141 }
1142
1143 // Retrieve preparedModel from cache.
1144 {
1145 sp<IPreparedModel> preparedModel = nullptr;
1146 ErrorStatus status;
1147 hidl_vec<hidl_handle> modelCache, dataCache;
1148 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1149 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1150 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1151
1152 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1153 // the prepared model must be executed with the correct result and not crash.
1154 if (status != ErrorStatus::NONE) {
1155 ASSERT_EQ(preparedModel, nullptr);
1156 } else {
1157 ASSERT_NE(preparedModel, nullptr);
1158 generated_tests::EvaluatePreparedModel(
1159 preparedModel, [](int) { return false; },
1160 getLargeModelExamples(kLargeModelSize),
1161 testModelAdd.relaxComputationFloat32toFloat16,
1162 /*testDynamicOutputShape=*/false);
1163 }
1164 }
1165 }
1166}
1167
Xusong Wang0e0721f2019-05-07 12:57:49 -07001168TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001169 if (!mIsCachingSupported) return;
1170
Xusong Wang4f71afc2019-04-26 15:33:38 -07001171 // Create test models and check if fully supported by the service.
1172 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1173 if (checkEarlyTermination(testModelMul)) return;
1174 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1175 if (checkEarlyTermination(testModelAdd)) return;
1176
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001177 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001178 auto modelCacheMul = mModelCache;
1179 for (auto& cache : modelCacheMul) {
1180 cache[0].append("_mul");
1181 }
1182 {
1183 hidl_vec<hidl_handle> modelCache, dataCache;
1184 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1185 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001186 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001187 }
1188
1189 // Use a different token for testModelAdd.
1190 mToken[0]++;
1191
1192 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001193 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1194 // Save the testModelAdd compilation to cache.
1195 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001196 hidl_vec<hidl_handle> modelCache, dataCache;
1197 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1198 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001199 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001200 }
1201
1202 // Retrieve preparedModel from cache.
1203 {
1204 sp<IPreparedModel> preparedModel = nullptr;
1205 ErrorStatus status;
1206 hidl_vec<hidl_handle> modelCache, dataCache;
1207 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1208 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1209
1210 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1211 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1212 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1213 thread.join();
1214
1215 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1216 // the prepared model must be executed with the correct result and not crash.
1217 if (status != ErrorStatus::NONE) {
1218 ASSERT_EQ(preparedModel, nullptr);
1219 } else {
1220 ASSERT_NE(preparedModel, nullptr);
1221 generated_tests::EvaluatePreparedModel(
1222 preparedModel, [](int) { return false; },
1223 getLargeModelExamples(kLargeModelSize),
1224 testModelAdd.relaxComputationFloat32toFloat16,
1225 /*testDynamicOutputShape=*/false);
1226 }
1227 }
1228 }
1229}
1230
Xusong Wang0e0721f2019-05-07 12:57:49 -07001231TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001232 if (!mIsCachingSupported) return;
1233
Xusong Wang4f71afc2019-04-26 15:33:38 -07001234 // Create test models and check if fully supported by the service.
1235 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1236 if (checkEarlyTermination(testModelMul)) return;
1237 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1238 if (checkEarlyTermination(testModelAdd)) return;
1239
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001240 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001241 auto modelCacheMul = mModelCache;
1242 for (auto& cache : modelCacheMul) {
1243 cache[0].append("_mul");
1244 }
1245 {
1246 hidl_vec<hidl_handle> modelCache, dataCache;
1247 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1248 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001249 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001250 }
1251
1252 // Use a different token for testModelAdd.
1253 mToken[0]++;
1254
1255 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001256 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001257 hidl_vec<hidl_handle> modelCache, dataCache;
1258 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1259 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001260 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001261 }
1262
1263 // Replace the model cache of testModelAdd with testModelMul.
1264 copyCacheFiles(modelCacheMul, mModelCache);
1265
1266 // Retrieve the preparedModel from cache, expect failure.
1267 {
1268 sp<IPreparedModel> preparedModel = nullptr;
1269 ErrorStatus status;
1270 hidl_vec<hidl_handle> modelCache, dataCache;
1271 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1272 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1273 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1274 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1275 ASSERT_EQ(preparedModel, nullptr);
1276 }
1277}
1278
Xusong Wang0e0721f2019-05-07 12:57:49 -07001279static const auto kOperandTypeChoices =
1280 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1281
1282INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1283
1284class CompilationCachingSecurityTest
1285 : public CompilationCachingTestBase,
1286 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001287 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001288 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1289
Xusong Wang96e68dc2019-01-18 17:28:26 -08001290 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001291 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001292 generator.seed(kSeed);
1293 }
1294
1295 // Get a random integer within a closed range [lower, upper].
1296 template <typename T>
1297 T getRandomInt(T lower, T upper) {
1298 std::uniform_int_distribution<T> dis(lower, upper);
1299 return dis(generator);
1300 }
1301
Xusong Wange371f6f2019-04-23 14:51:50 -07001302 // Randomly flip one single bit of the cache entry.
1303 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1304 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001305 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1306 long int fileSize = ftell(pFile);
1307 if (fileSize == 0) {
1308 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001309 *skip = true;
1310 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001311 }
1312 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1313 int readByte = fgetc(pFile);
1314 ASSERT_NE(readByte, EOF);
1315 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1316 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1317 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001318 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001319 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001320
Xusong Wange371f6f2019-04-23 14:51:50 -07001321 // Randomly append bytes to the cache entry.
1322 void appendBytesToCache(const std::string& filename, bool* skip) {
1323 FILE* pFile = fopen(filename.c_str(), "a");
1324 uint32_t appendLength = getRandomInt(1, 256);
1325 for (uint32_t i = 0; i < appendLength; i++) {
1326 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1327 }
1328 fclose(pFile);
1329 *skip = false;
1330 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001331
Xusong Wange371f6f2019-04-23 14:51:50 -07001332 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001333
Xusong Wange371f6f2019-04-23 14:51:50 -07001334 // Test if the driver behaves as expected when given corrupted cache or token.
1335 // The modifier will be invoked after save to cache but before prepare from cache.
1336 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1337 // whether the test should be skipped or not.
1338 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001339 const Model testModel = createTestModel();
1340 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001341
Xusong Wanged0822b2019-02-25 16:58:58 -08001342 // Save the compilation to cache.
1343 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001344 hidl_vec<hidl_handle> modelCache, dataCache;
1345 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1346 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001347 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001348 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001349
Xusong Wange371f6f2019-04-23 14:51:50 -07001350 bool skip = false;
1351 modifier(&skip);
1352 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001353
Xusong Wange371f6f2019-04-23 14:51:50 -07001354 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001355 {
1356 sp<IPreparedModel> preparedModel = nullptr;
1357 ErrorStatus status;
1358 hidl_vec<hidl_handle> modelCache, dataCache;
1359 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1360 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1361 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001362
1363 switch (expected) {
1364 case ExpectedResult::GENERAL_FAILURE:
1365 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1366 ASSERT_EQ(preparedModel, nullptr);
1367 break;
1368 case ExpectedResult::NOT_CRASH:
1369 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1370 break;
1371 default:
1372 FAIL();
1373 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001374 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001375 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001376
Xusong Wang0e0721f2019-05-07 12:57:49 -07001377 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001378 std::mt19937 generator;
1379};
1380
1381TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1382 if (!mIsCachingSupported) return;
1383 for (uint32_t i = 0; i < mNumModelCache; i++) {
1384 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1385 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1386 }
1387}
1388
1389TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1390 if (!mIsCachingSupported) return;
1391 for (uint32_t i = 0; i < mNumModelCache; i++) {
1392 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1393 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1394 }
1395}
1396
1397TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1398 if (!mIsCachingSupported) return;
1399 for (uint32_t i = 0; i < mNumDataCache; i++) {
1400 testCorruptedCache(ExpectedResult::NOT_CRASH,
1401 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1402 }
1403}
1404
1405TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1406 if (!mIsCachingSupported) return;
1407 for (uint32_t i = 0; i < mNumDataCache; i++) {
1408 testCorruptedCache(ExpectedResult::NOT_CRASH,
1409 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1410 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001411}
1412
1413TEST_P(CompilationCachingSecurityTest, WrongToken) {
1414 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001415 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1416 // Randomly flip one single bit in mToken.
1417 uint32_t ind =
1418 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1419 mToken[ind] ^= (1U << getRandomInt(0, 7));
1420 *skip = false;
1421 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001422}
1423
1424INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001425 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001426
1427} // namespace functional
1428} // namespace vts
1429} // namespace V1_2
1430} // namespace neuralnetworks
1431} // namespace hardware
1432} // namespace android