blob: 082d7583a3baf2f611a10c8518d82b1303a8acf7 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
Slava Shklyaeve8b24462019-07-17 15:50:57 +010038namespace android::hardware::neuralnetworks::V1_2::generated_tests::
39 mobilenet_224_gender_basic_fixed {
40Model createTestModel();
41} // namespace android::hardware::neuralnetworks::V1_2::generated_tests::mobilenet_224_gender_basic_fixed
42
43namespace generated_tests::mobilenet_224_gender_basic_fixed {
44std::vector<test_helper::MixedTypedExample>& get_examples();
45} // namespace generated_tests::mobilenet_224_gender_basic_fixed
46
47namespace android::hardware::neuralnetworks::V1_2::generated_tests::mobilenet_quantized {
48Model createTestModel();
49} // namespace android::hardware::neuralnetworks::V1_2::generated_tests::mobilenet_quantized
50
51namespace generated_tests::mobilenet_quantized {
52std::vector<test_helper::MixedTypedExample>& get_examples();
53} // namespace generated_tests::mobilenet_quantized
54
Xusong Wang96e68dc2019-01-18 17:28:26 -080055namespace android {
56namespace hardware {
57namespace neuralnetworks {
58namespace V1_2 {
59namespace vts {
60namespace functional {
61
Michael Butler3835f612019-07-11 15:43:22 -070062using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
63using ::android::hardware::neuralnetworks::V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080064using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
65using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
Michael Butler3835f612019-07-11 15:43:22 -070066using ::android::hidl::memory::V1_0::IMemory;
Xusong Wang96e68dc2019-01-18 17:28:26 -080067using ::android::nn::allocateSharedMemory;
68using ::test_helper::MixedTypedExample;
69
Xusong Wang0e0721f2019-05-07 12:57:49 -070070namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080071
Slava Shklyaeve8b24462019-07-17 15:50:57 +010072constexpr auto createTestModel = ::android::hardware::neuralnetworks::V1_2::generated_tests::
73 mobilenet_224_gender_basic_fixed::createTestModel;
74constexpr auto get_examples = ::generated_tests::mobilenet_224_gender_basic_fixed::get_examples;
Xusong Wang96e68dc2019-01-18 17:28:26 -080075
Xusong Wang0e0721f2019-05-07 12:57:49 -070076// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
77// This function assumes the operation is always ADD.
78std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
79 float outputValue = 1.0f + static_cast<float>(len);
80 return {{.operands = {
81 // Input
82 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {1.0f}}}},
83 // Output
84 {.operandDimensions = {{0, {1}}}, .float32Operands = {{0, {outputValue}}}}}}};
85}
86
87} // namespace float32_model
88
89namespace quant8_model {
90
Slava Shklyaeve8b24462019-07-17 15:50:57 +010091constexpr auto createTestModel = ::android::hardware::neuralnetworks::V1_2::generated_tests::
92 mobilenet_quantized::createTestModel;
93constexpr auto get_examples = ::generated_tests::mobilenet_quantized::get_examples;
Xusong Wang0e0721f2019-05-07 12:57:49 -070094
95// MixedTypedExample is defined in frameworks/ml/nn/tools/test_generator/include/TestHarness.h.
96// This function assumes the operation is always ADD.
97std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
98 uint8_t outputValue = 1 + static_cast<uint8_t>(len);
99 return {{.operands = {// Input
100 {.operandDimensions = {{0, {1}}}, .quant8AsymmOperands = {{0, {1}}}},
101 // Output
102 {.operandDimensions = {{0, {1}}},
103 .quant8AsymmOperands = {{0, {outputValue}}}}}}};
104}
105
106} // namespace quant8_model
107
108namespace {
109
Xusong Wanged0822b2019-02-25 16:58:58 -0800110enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -0800111
Xusong Wanged0822b2019-02-25 16:58:58 -0800112// Creates cache handles based on provided file groups.
113// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
114void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
115 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
116 handles->resize(fileGroups.size());
117 for (uint32_t i = 0; i < fileGroups.size(); i++) {
118 std::vector<int> fds;
119 for (const auto& file : fileGroups[i]) {
120 int fd;
121 if (mode[i] == AccessMode::READ_ONLY) {
122 fd = open(file.c_str(), O_RDONLY);
123 } else if (mode[i] == AccessMode::WRITE_ONLY) {
124 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
125 } else if (mode[i] == AccessMode::READ_WRITE) {
126 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
127 } else {
128 FAIL();
129 }
130 ASSERT_GE(fd, 0);
131 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800132 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800133 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
134 ASSERT_NE(cacheNativeHandle, nullptr);
135 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
136 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800137 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800138}
139
140void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
141 hidl_vec<hidl_handle>* handles) {
142 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800143}
144
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700145// Create a chain of broadcast operations. The second operand is always constant tensor [1].
146// For simplicity, activation scalar is shared. The second operand is not shared
147// in the model to let driver maintain a non-trivial size of constant data and the corresponding
148// data locations in cache.
149//
150// --------- activation --------
151// ↓ ↓ ↓ ↓
152// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
153// ↑ ↑ ↑ ↑
154// [1] [1] [1] [1]
155//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700156// This function assumes the operation is either ADD or MUL.
157template <typename CppType, OperandType operandType>
158Model createLargeTestModelImpl(OperationType op, uint32_t len) {
159 EXPECT_TRUE(op == OperationType::ADD || op == OperationType::MUL);
160
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700161 // Model operations and operands.
162 std::vector<Operation> operations(len);
163 std::vector<Operand> operands(len * 2 + 2);
164
165 // The constant buffer pool. This contains the activation scalar, followed by the
166 // per-operation constant operands.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700167 std::vector<uint8_t> operandValues(sizeof(int32_t) + len * sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700168
169 // The activation scalar, value = 0.
170 operands[0] = {
171 .type = OperandType::INT32,
172 .dimensions = {},
173 .numberOfConsumers = len,
174 .scale = 0.0f,
175 .zeroPoint = 0,
176 .lifetime = OperandLifeTime::CONSTANT_COPY,
177 .location = {.poolIndex = 0, .offset = 0, .length = sizeof(int32_t)},
178 };
179 memset(operandValues.data(), 0, sizeof(int32_t));
180
Xusong Wang0e0721f2019-05-07 12:57:49 -0700181 // The buffer value of the constant second operand. The logical value is always 1.0f.
182 CppType bufferValue;
183 // The scale of the first and second operand.
184 float scale1, scale2;
185 if (operandType == OperandType::TENSOR_FLOAT32) {
186 bufferValue = 1.0f;
187 scale1 = 0.0f;
188 scale2 = 0.0f;
189 } else if (op == OperationType::ADD) {
190 bufferValue = 1;
191 scale1 = 1.0f;
192 scale2 = 1.0f;
193 } else {
194 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
195 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
196 bufferValue = 2;
197 scale1 = 1.0f;
198 scale2 = 0.5f;
199 }
200
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700201 for (uint32_t i = 0; i < len; i++) {
202 const uint32_t firstInputIndex = i * 2 + 1;
203 const uint32_t secondInputIndex = firstInputIndex + 1;
204 const uint32_t outputIndex = secondInputIndex + 1;
205
206 // The first operation input.
207 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700208 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700209 .dimensions = {1},
210 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700211 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700212 .zeroPoint = 0,
213 .lifetime = (i == 0 ? OperandLifeTime::MODEL_INPUT
214 : OperandLifeTime::TEMPORARY_VARIABLE),
215 .location = {},
216 };
217
218 // The second operation input, value = 1.
219 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700220 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700221 .dimensions = {1},
222 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700223 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700224 .zeroPoint = 0,
225 .lifetime = OperandLifeTime::CONSTANT_COPY,
226 .location = {.poolIndex = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700227 .offset = static_cast<uint32_t>(i * sizeof(CppType) + sizeof(int32_t)),
228 .length = sizeof(CppType)},
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700229 };
Xusong Wang0e0721f2019-05-07 12:57:49 -0700230 memcpy(operandValues.data() + sizeof(int32_t) + i * sizeof(CppType), &bufferValue,
231 sizeof(CppType));
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700232
233 // The operation. All operations share the same activation scalar.
234 // The output operand is created as an input in the next iteration of the loop, in the case
235 // of all but the last member of the chain; and after the loop as a model output, in the
236 // case of the last member of the chain.
237 operations[i] = {
238 .type = op,
239 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
240 .outputs = {outputIndex},
241 };
242 }
243
244 // The model output.
245 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700246 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700247 .dimensions = {1},
248 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700249 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700250 .zeroPoint = 0,
251 .lifetime = OperandLifeTime::MODEL_OUTPUT,
252 .location = {},
253 };
254
255 const std::vector<uint32_t> inputIndexes = {1};
256 const std::vector<uint32_t> outputIndexes = {len * 2 + 1};
257 const std::vector<hidl_memory> pools = {};
258
259 return {
260 .operands = operands,
261 .operations = operations,
262 .inputIndexes = inputIndexes,
263 .outputIndexes = outputIndexes,
264 .operandValues = operandValues,
265 .pools = pools,
266 };
267}
268
Xusong Wang96e68dc2019-01-18 17:28:26 -0800269} // namespace
270
271// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700272class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800273 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700274 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
275
Xusong Wang96e68dc2019-01-18 17:28:26 -0800276 void SetUp() override {
277 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800278 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800279
Xusong Wanged0822b2019-02-25 16:58:58 -0800280 // Create cache directory. The cache directory and a temporary cache file is always created
281 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800282 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
283 char* cacheDir = mkdtemp(cacheDirTemp);
284 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800285 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800286 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800287
Xusong Wanged0822b2019-02-25 16:58:58 -0800288 Return<void> ret = device->getNumberOfCacheFilesNeeded(
289 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800290 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800291 mNumModelCache = numModelCache;
292 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800293 });
294 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800295 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
296
297 // Create empty cache files.
298 mTmpCache = mCacheDir + "tmp";
299 for (uint32_t i = 0; i < mNumModelCache; i++) {
300 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
301 }
302 for (uint32_t i = 0; i < mNumDataCache; i++) {
303 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
304 }
305 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
306 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
307 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
308 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
309 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
310
311 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800312 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
313 "support compilation caching.";
314 std::cout << "[ ] Early termination of test because vendor service does not "
315 "support compilation caching."
316 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800317 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800318 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800319
Xusong Wang6824cc12019-02-12 18:00:37 -0800320 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700321 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
322 if (!::testing::Test::HasFailure()) {
323 // Recursively remove the cache directory specified by mCacheDir.
324 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
325 return remove(entry);
326 };
327 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800328 }
329 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800330 }
331
Xusong Wang0e0721f2019-05-07 12:57:49 -0700332 // Model and examples creators. According to kOperandType, the following methods will return
333 // either float32 model/examples or the quant8 variant.
334 Model createTestModel() {
335 if (kOperandType == OperandType::TENSOR_FLOAT32) {
336 return float32_model::createTestModel();
337 } else {
338 return quant8_model::createTestModel();
339 }
340 }
341
342 std::vector<MixedTypedExample> get_examples() {
343 if (kOperandType == OperandType::TENSOR_FLOAT32) {
344 return float32_model::get_examples();
345 } else {
346 return quant8_model::get_examples();
347 }
348 }
349
350 Model createLargeTestModel(OperationType op, uint32_t len) {
351 if (kOperandType == OperandType::TENSOR_FLOAT32) {
352 return createLargeTestModelImpl<float, OperandType::TENSOR_FLOAT32>(op, len);
353 } else {
354 return createLargeTestModelImpl<uint8_t, OperandType::TENSOR_QUANT8_ASYMM>(op, len);
355 }
356 }
357
358 std::vector<MixedTypedExample> getLargeModelExamples(uint32_t len) {
359 if (kOperandType == OperandType::TENSOR_FLOAT32) {
360 return float32_model::getLargeModelExamples(len);
361 } else {
362 return quant8_model::getLargeModelExamples(len);
363 }
364 }
365
Xusong Wang4f71afc2019-04-26 15:33:38 -0700366 // See if the service can handle the model.
367 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800368 bool fullySupportsModel = false;
369 Return<void> supportedCall = device->getSupportedOperations_1_2(
370 model,
371 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
372 ASSERT_EQ(ErrorStatus::NONE, status);
373 ASSERT_EQ(supported.size(), model.operations.size());
374 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
375 [](bool valid) { return valid; });
376 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700377 EXPECT_TRUE(supportedCall.isOk());
378 return fullySupportsModel;
379 }
380
381 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
382 const hidl_vec<hidl_handle>& dataCache,
383 sp<IPreparedModel>* preparedModel = nullptr) {
384 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800385
386 // Launch prepare model.
387 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
388 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800389 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800390 Return<ErrorStatus> prepareLaunchStatus =
391 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
392 dataCache, cacheToken, preparedModelCallback);
393 ASSERT_TRUE(prepareLaunchStatus.isOk());
394 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
395
396 // Retrieve prepared model.
397 preparedModelCallback->wait();
398 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
399 if (preparedModel != nullptr) {
400 *preparedModel =
401 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
402 .withDefault(nullptr);
403 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800404 }
405
406 bool checkEarlyTermination(ErrorStatus status) {
407 if (status == ErrorStatus::GENERAL_FAILURE) {
408 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
409 "save the prepared model that it does not support.";
410 std::cout << "[ ] Early termination of test because vendor service cannot "
411 "save the prepared model that it does not support."
412 << std::endl;
413 return true;
414 }
415 return false;
416 }
417
Xusong Wang4f71afc2019-04-26 15:33:38 -0700418 bool checkEarlyTermination(const V1_2::Model& model) {
419 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800420 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
421 "prepare model that it does not support.";
422 std::cout << "[ ] Early termination of test because vendor service cannot "
423 "prepare model that it does not support."
424 << std::endl;
425 return true;
426 }
427 return false;
428 }
429
430 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
431 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800432 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
433 // Launch prepare model from cache.
434 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
435 ASSERT_NE(nullptr, preparedModelCallback.get());
436 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800437 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
438 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800439 ASSERT_TRUE(prepareLaunchStatus.isOk());
440 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
441 *preparedModel = nullptr;
442 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
443 return;
444 }
445
446 // Retrieve prepared model.
447 preparedModelCallback->wait();
448 *status = preparedModelCallback->getStatus();
449 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
450 .withDefault(nullptr);
451 }
452
Xusong Wanged0822b2019-02-25 16:58:58 -0800453 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800454 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800455
456 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
457 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
458 // and the inner vector is for fds held by each handle.
459 std::vector<std::vector<std::string>> mModelCache;
460 std::vector<std::vector<std::string>> mDataCache;
461
462 // A separate temporary file path in the tmp cache directory.
463 std::string mTmpCache;
464
Xusong Wang96e68dc2019-01-18 17:28:26 -0800465 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800466 uint32_t mNumModelCache;
467 uint32_t mNumDataCache;
468 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700469
470 // The primary data type of the testModel.
471 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800472};
473
Xusong Wang0e0721f2019-05-07 12:57:49 -0700474// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
475// pass running with float32 models and the second pass running with quant8 models.
476class CompilationCachingTest : public CompilationCachingTestBase,
477 public ::testing::WithParamInterface<OperandType> {
478 protected:
479 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
480};
481
482TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800483 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700484 const Model testModel = createTestModel();
485 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800486 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800487
488 // Save the compilation to cache.
489 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800490 hidl_vec<hidl_handle> modelCache, dataCache;
491 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
492 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700493 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800494 }
495
496 // Retrieve preparedModel from cache.
497 {
498 preparedModel = nullptr;
499 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800500 hidl_vec<hidl_handle> modelCache, dataCache;
501 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
502 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
503 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800504 if (!mIsCachingSupported) {
505 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
506 ASSERT_EQ(preparedModel, nullptr);
507 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800508 } else if (checkEarlyTermination(status)) {
509 ASSERT_EQ(preparedModel, nullptr);
510 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800511 } else {
512 ASSERT_EQ(status, ErrorStatus::NONE);
513 ASSERT_NE(preparedModel, nullptr);
514 }
515 }
516
517 // Execute and verify results.
518 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
519 testModel.relaxComputationFloat32toFloat16,
520 /*testDynamicOutputShape=*/false);
521}
522
Xusong Wang0e0721f2019-05-07 12:57:49 -0700523TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800524 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700525 const Model testModel = createTestModel();
526 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800527 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800528
529 // Save the compilation to cache.
530 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800531 hidl_vec<hidl_handle> modelCache, dataCache;
532 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
533 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
534 uint8_t dummyBytes[] = {0, 0};
535 // Write a dummy integer to the cache.
536 // The driver should be able to handle non-empty cache and non-zero fd offset.
537 for (uint32_t i = 0; i < modelCache.size(); i++) {
538 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
539 sizeof(dummyBytes)),
540 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800541 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800542 for (uint32_t i = 0; i < dataCache.size(); i++) {
543 ASSERT_EQ(
544 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
545 sizeof(dummyBytes));
546 }
Xusong Wang4f71afc2019-04-26 15:33:38 -0700547 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800548 }
549
550 // Retrieve preparedModel from cache.
551 {
552 preparedModel = nullptr;
553 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800554 hidl_vec<hidl_handle> modelCache, dataCache;
555 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
556 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800557 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800558 // Advance the offset of each handle by one byte.
559 // The driver should be able to handle non-zero fd offset.
560 for (uint32_t i = 0; i < modelCache.size(); i++) {
561 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
562 }
563 for (uint32_t i = 0; i < dataCache.size(); i++) {
564 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
565 }
566 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800567 if (!mIsCachingSupported) {
568 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
569 ASSERT_EQ(preparedModel, nullptr);
570 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800571 } else if (checkEarlyTermination(status)) {
572 ASSERT_EQ(preparedModel, nullptr);
573 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800574 } else {
575 ASSERT_EQ(status, ErrorStatus::NONE);
576 ASSERT_NE(preparedModel, nullptr);
577 }
578 }
579
580 // Execute and verify results.
581 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; }, get_examples(),
582 testModel.relaxComputationFloat32toFloat16,
583 /*testDynamicOutputShape=*/false);
584}
585
Xusong Wang0e0721f2019-05-07 12:57:49 -0700586TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800587 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700588 const Model testModel = createTestModel();
589 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800590
591 // Test with number of model cache files greater than mNumModelCache.
592 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800593 hidl_vec<hidl_handle> modelCache, dataCache;
594 // Pass an additional cache file for model cache.
595 mModelCache.push_back({mTmpCache});
596 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
597 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
598 mModelCache.pop_back();
599 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700600 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800601 ASSERT_NE(preparedModel, nullptr);
602 // Execute and verify results.
603 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
604 get_examples(),
605 testModel.relaxComputationFloat32toFloat16,
606 /*testDynamicOutputShape=*/false);
607 // Check if prepareModelFromCache fails.
608 preparedModel = nullptr;
609 ErrorStatus status;
610 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
611 if (status != ErrorStatus::INVALID_ARGUMENT) {
612 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
613 }
614 ASSERT_EQ(preparedModel, nullptr);
615 }
616
617 // Test with number of model cache files smaller than mNumModelCache.
618 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800619 hidl_vec<hidl_handle> modelCache, dataCache;
620 // Pop out the last cache file.
621 auto tmp = mModelCache.back();
622 mModelCache.pop_back();
623 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
624 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
625 mModelCache.push_back(tmp);
626 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700627 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800628 ASSERT_NE(preparedModel, nullptr);
629 // Execute and verify results.
630 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
631 get_examples(),
632 testModel.relaxComputationFloat32toFloat16,
633 /*testDynamicOutputShape=*/false);
634 // Check if prepareModelFromCache fails.
635 preparedModel = nullptr;
636 ErrorStatus status;
637 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
638 if (status != ErrorStatus::INVALID_ARGUMENT) {
639 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
640 }
641 ASSERT_EQ(preparedModel, nullptr);
642 }
643
644 // Test with number of data cache files greater than mNumDataCache.
645 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800646 hidl_vec<hidl_handle> modelCache, dataCache;
647 // Pass an additional cache file for data cache.
648 mDataCache.push_back({mTmpCache});
649 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
650 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
651 mDataCache.pop_back();
652 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700653 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800654 ASSERT_NE(preparedModel, nullptr);
655 // Execute and verify results.
656 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
657 get_examples(),
658 testModel.relaxComputationFloat32toFloat16,
659 /*testDynamicOutputShape=*/false);
660 // Check if prepareModelFromCache fails.
661 preparedModel = nullptr;
662 ErrorStatus status;
663 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
664 if (status != ErrorStatus::INVALID_ARGUMENT) {
665 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
666 }
667 ASSERT_EQ(preparedModel, nullptr);
668 }
669
670 // Test with number of data cache files smaller than mNumDataCache.
671 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800672 hidl_vec<hidl_handle> modelCache, dataCache;
673 // Pop out the last cache file.
674 auto tmp = mDataCache.back();
675 mDataCache.pop_back();
676 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
677 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
678 mDataCache.push_back(tmp);
679 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700680 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800681 ASSERT_NE(preparedModel, nullptr);
682 // Execute and verify results.
683 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
684 get_examples(),
685 testModel.relaxComputationFloat32toFloat16,
686 /*testDynamicOutputShape=*/false);
687 // Check if prepareModelFromCache fails.
688 preparedModel = nullptr;
689 ErrorStatus status;
690 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
691 if (status != ErrorStatus::INVALID_ARGUMENT) {
692 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
693 }
694 ASSERT_EQ(preparedModel, nullptr);
695 }
696}
697
Xusong Wang0e0721f2019-05-07 12:57:49 -0700698TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800699 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700700 const Model testModel = createTestModel();
701 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800702
703 // Save the compilation to cache.
704 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800705 hidl_vec<hidl_handle> modelCache, dataCache;
706 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
707 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700708 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800709 }
710
711 // Test with number of model cache files greater than mNumModelCache.
712 {
713 sp<IPreparedModel> preparedModel = nullptr;
714 ErrorStatus status;
715 hidl_vec<hidl_handle> modelCache, dataCache;
716 mModelCache.push_back({mTmpCache});
717 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
718 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
719 mModelCache.pop_back();
720 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
721 if (status != ErrorStatus::GENERAL_FAILURE) {
722 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
723 }
724 ASSERT_EQ(preparedModel, nullptr);
725 }
726
727 // Test with number of model cache files smaller than mNumModelCache.
728 if (mModelCache.size() > 0) {
729 sp<IPreparedModel> preparedModel = nullptr;
730 ErrorStatus status;
731 hidl_vec<hidl_handle> modelCache, dataCache;
732 auto tmp = mModelCache.back();
733 mModelCache.pop_back();
734 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
735 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
736 mModelCache.push_back(tmp);
737 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
738 if (status != ErrorStatus::GENERAL_FAILURE) {
739 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
740 }
741 ASSERT_EQ(preparedModel, nullptr);
742 }
743
744 // Test with number of data cache files greater than mNumDataCache.
745 {
746 sp<IPreparedModel> preparedModel = nullptr;
747 ErrorStatus status;
748 hidl_vec<hidl_handle> modelCache, dataCache;
749 mDataCache.push_back({mTmpCache});
750 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
751 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
752 mDataCache.pop_back();
753 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
754 if (status != ErrorStatus::GENERAL_FAILURE) {
755 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
756 }
757 ASSERT_EQ(preparedModel, nullptr);
758 }
759
760 // Test with number of data cache files smaller than mNumDataCache.
761 if (mDataCache.size() > 0) {
762 sp<IPreparedModel> preparedModel = nullptr;
763 ErrorStatus status;
764 hidl_vec<hidl_handle> modelCache, dataCache;
765 auto tmp = mDataCache.back();
766 mDataCache.pop_back();
767 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
768 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
769 mDataCache.push_back(tmp);
770 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
771 if (status != ErrorStatus::GENERAL_FAILURE) {
772 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
773 }
774 ASSERT_EQ(preparedModel, nullptr);
775 }
776}
777
Xusong Wang0e0721f2019-05-07 12:57:49 -0700778TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800779 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700780 const Model testModel = createTestModel();
781 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800782
Xusong Wanged0822b2019-02-25 16:58:58 -0800783 // Go through each handle in model cache, test with NumFd greater than 1.
784 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800785 hidl_vec<hidl_handle> modelCache, dataCache;
786 // Pass an invalid number of fds for handle i.
787 mModelCache[i].push_back(mTmpCache);
788 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
789 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
790 mModelCache[i].pop_back();
791 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700792 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800793 ASSERT_NE(preparedModel, nullptr);
794 // Execute and verify results.
795 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
796 get_examples(),
797 testModel.relaxComputationFloat32toFloat16,
798 /*testDynamicOutputShape=*/false);
799 // Check if prepareModelFromCache fails.
800 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800801 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800802 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
803 if (status != ErrorStatus::INVALID_ARGUMENT) {
804 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800805 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800806 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800807 }
808
Xusong Wanged0822b2019-02-25 16:58:58 -0800809 // Go through each handle in model cache, test with NumFd equal to 0.
810 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800811 hidl_vec<hidl_handle> modelCache, dataCache;
812 // Pass an invalid number of fds for handle i.
813 auto tmp = mModelCache[i].back();
814 mModelCache[i].pop_back();
815 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
816 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
817 mModelCache[i].push_back(tmp);
818 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700819 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800820 ASSERT_NE(preparedModel, nullptr);
821 // Execute and verify results.
822 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
823 get_examples(),
824 testModel.relaxComputationFloat32toFloat16,
825 /*testDynamicOutputShape=*/false);
826 // Check if prepareModelFromCache fails.
827 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800828 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800829 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
830 if (status != ErrorStatus::INVALID_ARGUMENT) {
831 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800832 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800833 ASSERT_EQ(preparedModel, nullptr);
834 }
835
836 // Go through each handle in data cache, test with NumFd greater than 1.
837 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800838 hidl_vec<hidl_handle> modelCache, dataCache;
839 // Pass an invalid number of fds for handle i.
840 mDataCache[i].push_back(mTmpCache);
841 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
842 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
843 mDataCache[i].pop_back();
844 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700845 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800846 ASSERT_NE(preparedModel, nullptr);
847 // Execute and verify results.
848 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
849 get_examples(),
850 testModel.relaxComputationFloat32toFloat16,
851 /*testDynamicOutputShape=*/false);
852 // Check if prepareModelFromCache fails.
853 preparedModel = nullptr;
854 ErrorStatus status;
855 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
856 if (status != ErrorStatus::INVALID_ARGUMENT) {
857 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
858 }
859 ASSERT_EQ(preparedModel, nullptr);
860 }
861
862 // Go through each handle in data cache, test with NumFd equal to 0.
863 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800864 hidl_vec<hidl_handle> modelCache, dataCache;
865 // Pass an invalid number of fds for handle i.
866 auto tmp = mDataCache[i].back();
867 mDataCache[i].pop_back();
868 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
869 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
870 mDataCache[i].push_back(tmp);
871 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700872 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800873 ASSERT_NE(preparedModel, nullptr);
874 // Execute and verify results.
875 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
876 get_examples(),
877 testModel.relaxComputationFloat32toFloat16,
878 /*testDynamicOutputShape=*/false);
879 // Check if prepareModelFromCache fails.
880 preparedModel = nullptr;
881 ErrorStatus status;
882 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
883 if (status != ErrorStatus::INVALID_ARGUMENT) {
884 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
885 }
886 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800887 }
888}
889
Xusong Wang0e0721f2019-05-07 12:57:49 -0700890TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800891 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700892 const Model testModel = createTestModel();
893 if (checkEarlyTermination(testModel)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800894
895 // Save the compilation to cache.
896 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800897 hidl_vec<hidl_handle> modelCache, dataCache;
898 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
899 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -0700900 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800901 }
902
Xusong Wanged0822b2019-02-25 16:58:58 -0800903 // Go through each handle in model cache, test with NumFd greater than 1.
904 for (uint32_t i = 0; i < mNumModelCache; i++) {
905 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800906 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800907 hidl_vec<hidl_handle> modelCache, dataCache;
908 mModelCache[i].push_back(mTmpCache);
909 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
910 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
911 mModelCache[i].pop_back();
912 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800913 if (status != ErrorStatus::GENERAL_FAILURE) {
914 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800915 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800916 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800917 }
918
Xusong Wanged0822b2019-02-25 16:58:58 -0800919 // Go through each handle in model cache, test with NumFd equal to 0.
920 for (uint32_t i = 0; i < mNumModelCache; i++) {
921 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800922 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800923 hidl_vec<hidl_handle> modelCache, dataCache;
924 auto tmp = mModelCache[i].back();
925 mModelCache[i].pop_back();
926 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
927 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
928 mModelCache[i].push_back(tmp);
929 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800930 if (status != ErrorStatus::GENERAL_FAILURE) {
931 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800932 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800933 ASSERT_EQ(preparedModel, nullptr);
934 }
935
936 // Go through each handle in data cache, test with NumFd greater than 1.
937 for (uint32_t i = 0; i < mNumDataCache; i++) {
938 sp<IPreparedModel> preparedModel = nullptr;
939 ErrorStatus status;
940 hidl_vec<hidl_handle> modelCache, dataCache;
941 mDataCache[i].push_back(mTmpCache);
942 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
943 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
944 mDataCache[i].pop_back();
945 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
946 if (status != ErrorStatus::GENERAL_FAILURE) {
947 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
948 }
949 ASSERT_EQ(preparedModel, nullptr);
950 }
951
952 // Go through each handle in data cache, test with NumFd equal to 0.
953 for (uint32_t i = 0; i < mNumDataCache; i++) {
954 sp<IPreparedModel> preparedModel = nullptr;
955 ErrorStatus status;
956 hidl_vec<hidl_handle> modelCache, dataCache;
957 auto tmp = mDataCache[i].back();
958 mDataCache[i].pop_back();
959 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
960 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
961 mDataCache[i].push_back(tmp);
962 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
963 if (status != ErrorStatus::GENERAL_FAILURE) {
964 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
965 }
966 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800967 }
968}
969
Xusong Wang0e0721f2019-05-07 12:57:49 -0700970TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800971 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -0700972 const Model testModel = createTestModel();
973 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800974 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
975 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800976
Xusong Wanged0822b2019-02-25 16:58:58 -0800977 // Go through each handle in model cache, test with invalid access mode.
978 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800979 hidl_vec<hidl_handle> modelCache, dataCache;
980 modelCacheMode[i] = AccessMode::READ_ONLY;
981 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
982 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
983 modelCacheMode[i] = AccessMode::READ_WRITE;
984 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -0700985 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800986 ASSERT_NE(preparedModel, nullptr);
987 // Execute and verify results.
988 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
989 get_examples(),
990 testModel.relaxComputationFloat32toFloat16,
991 /*testDynamicOutputShape=*/false);
992 // Check if prepareModelFromCache fails.
993 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800994 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800995 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
996 if (status != ErrorStatus::INVALID_ARGUMENT) {
997 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
998 }
999 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001000 }
1001
Xusong Wanged0822b2019-02-25 16:58:58 -08001002 // Go through each handle in data cache, test with invalid access mode.
1003 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -08001004 hidl_vec<hidl_handle> modelCache, dataCache;
1005 dataCacheMode[i] = AccessMode::READ_ONLY;
1006 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1007 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1008 dataCacheMode[i] = AccessMode::READ_WRITE;
1009 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001010 saveModelToCache(testModel, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -08001011 ASSERT_NE(preparedModel, nullptr);
1012 // Execute and verify results.
1013 generated_tests::EvaluatePreparedModel(preparedModel, [](int) { return false; },
1014 get_examples(),
1015 testModel.relaxComputationFloat32toFloat16,
1016 /*testDynamicOutputShape=*/false);
1017 // Check if prepareModelFromCache fails.
1018 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001019 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001020 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1021 if (status != ErrorStatus::INVALID_ARGUMENT) {
1022 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1023 }
1024 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001025 }
1026}
1027
Xusong Wang0e0721f2019-05-07 12:57:49 -07001028TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001029 // Create test HIDL model and compile.
Xusong Wang4f71afc2019-04-26 15:33:38 -07001030 const Model testModel = createTestModel();
1031 if (checkEarlyTermination(testModel)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001032 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
1033 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001034
1035 // Save the compilation to cache.
1036 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001037 hidl_vec<hidl_handle> modelCache, dataCache;
1038 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1039 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001040 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001041 }
1042
Xusong Wanged0822b2019-02-25 16:58:58 -08001043 // Go through each handle in model cache, test with invalid access mode.
1044 for (uint32_t i = 0; i < mNumModelCache; i++) {
1045 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001046 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001047 hidl_vec<hidl_handle> modelCache, dataCache;
1048 modelCacheMode[i] = AccessMode::WRITE_ONLY;
1049 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1050 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1051 modelCacheMode[i] = AccessMode::READ_WRITE;
1052 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001053 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1054 ASSERT_EQ(preparedModel, nullptr);
1055 }
1056
Xusong Wanged0822b2019-02-25 16:58:58 -08001057 // Go through each handle in data cache, test with invalid access mode.
1058 for (uint32_t i = 0; i < mNumDataCache; i++) {
1059 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001060 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -08001061 hidl_vec<hidl_handle> modelCache, dataCache;
1062 dataCacheMode[i] = AccessMode::WRITE_ONLY;
1063 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
1064 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
1065 dataCacheMode[i] = AccessMode::READ_WRITE;
1066 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001067 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1068 ASSERT_EQ(preparedModel, nullptr);
1069 }
1070}
1071
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001072// Copy file contents between file groups.
1073// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1074// The outer vector sizes must match and the inner vectors must have size = 1.
1075static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1076 const std::vector<std::vector<std::string>>& to) {
1077 constexpr size_t kBufferSize = 1000000;
1078 uint8_t buffer[kBufferSize];
1079
1080 ASSERT_EQ(from.size(), to.size());
1081 for (uint32_t i = 0; i < from.size(); i++) {
1082 ASSERT_EQ(from[i].size(), 1u);
1083 ASSERT_EQ(to[i].size(), 1u);
1084 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1085 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1086 ASSERT_GE(fromFd, 0);
1087 ASSERT_GE(toFd, 0);
1088
1089 ssize_t readBytes;
1090 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1091 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1092 }
1093 ASSERT_GE(readBytes, 0);
1094
1095 close(fromFd);
1096 close(toFd);
1097 }
1098}
1099
1100// Number of operations in the large test model.
1101constexpr uint32_t kLargeModelSize = 100;
1102constexpr uint32_t kNumIterationsTOCTOU = 100;
1103
Xusong Wang0e0721f2019-05-07 12:57:49 -07001104TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001105 if (!mIsCachingSupported) return;
1106
Xusong Wang4f71afc2019-04-26 15:33:38 -07001107 // Create test models and check if fully supported by the service.
1108 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1109 if (checkEarlyTermination(testModelMul)) return;
1110 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1111 if (checkEarlyTermination(testModelAdd)) return;
1112
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001113 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001114 auto modelCacheMul = mModelCache;
1115 for (auto& cache : modelCacheMul) {
1116 cache[0].append("_mul");
1117 }
1118 {
1119 hidl_vec<hidl_handle> modelCache, dataCache;
1120 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1121 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001122 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001123 }
1124
1125 // Use a different token for testModelAdd.
1126 mToken[0]++;
1127
1128 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001129 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1130 // Save the testModelAdd compilation to cache.
1131 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001132 hidl_vec<hidl_handle> modelCache, dataCache;
1133 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1134 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1135
1136 // Spawn a thread to copy the cache content concurrently while saving to cache.
1137 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang4f71afc2019-04-26 15:33:38 -07001138 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001139 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001140 }
1141
1142 // Retrieve preparedModel from cache.
1143 {
1144 sp<IPreparedModel> preparedModel = nullptr;
1145 ErrorStatus status;
1146 hidl_vec<hidl_handle> modelCache, dataCache;
1147 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1148 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1149 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1150
1151 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1152 // the prepared model must be executed with the correct result and not crash.
1153 if (status != ErrorStatus::NONE) {
1154 ASSERT_EQ(preparedModel, nullptr);
1155 } else {
1156 ASSERT_NE(preparedModel, nullptr);
1157 generated_tests::EvaluatePreparedModel(
1158 preparedModel, [](int) { return false; },
1159 getLargeModelExamples(kLargeModelSize),
1160 testModelAdd.relaxComputationFloat32toFloat16,
1161 /*testDynamicOutputShape=*/false);
1162 }
1163 }
1164 }
1165}
1166
Xusong Wang0e0721f2019-05-07 12:57:49 -07001167TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001168 if (!mIsCachingSupported) return;
1169
Xusong Wang4f71afc2019-04-26 15:33:38 -07001170 // Create test models and check if fully supported by the service.
1171 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1172 if (checkEarlyTermination(testModelMul)) return;
1173 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1174 if (checkEarlyTermination(testModelAdd)) return;
1175
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001176 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001177 auto modelCacheMul = mModelCache;
1178 for (auto& cache : modelCacheMul) {
1179 cache[0].append("_mul");
1180 }
1181 {
1182 hidl_vec<hidl_handle> modelCache, dataCache;
1183 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1184 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001185 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001186 }
1187
1188 // Use a different token for testModelAdd.
1189 mToken[0]++;
1190
1191 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001192 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
1193 // Save the testModelAdd compilation to cache.
1194 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001195 hidl_vec<hidl_handle> modelCache, dataCache;
1196 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1197 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001198 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001199 }
1200
1201 // Retrieve preparedModel from cache.
1202 {
1203 sp<IPreparedModel> preparedModel = nullptr;
1204 ErrorStatus status;
1205 hidl_vec<hidl_handle> modelCache, dataCache;
1206 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1207 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1208
1209 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1210 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1211 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1212 thread.join();
1213
1214 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1215 // the prepared model must be executed with the correct result and not crash.
1216 if (status != ErrorStatus::NONE) {
1217 ASSERT_EQ(preparedModel, nullptr);
1218 } else {
1219 ASSERT_NE(preparedModel, nullptr);
1220 generated_tests::EvaluatePreparedModel(
1221 preparedModel, [](int) { return false; },
1222 getLargeModelExamples(kLargeModelSize),
1223 testModelAdd.relaxComputationFloat32toFloat16,
1224 /*testDynamicOutputShape=*/false);
1225 }
1226 }
1227 }
1228}
1229
Xusong Wang0e0721f2019-05-07 12:57:49 -07001230TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001231 if (!mIsCachingSupported) return;
1232
Xusong Wang4f71afc2019-04-26 15:33:38 -07001233 // Create test models and check if fully supported by the service.
1234 const Model testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1235 if (checkEarlyTermination(testModelMul)) return;
1236 const Model testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1237 if (checkEarlyTermination(testModelAdd)) return;
1238
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001239 // Save the testModelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001240 auto modelCacheMul = mModelCache;
1241 for (auto& cache : modelCacheMul) {
1242 cache[0].append("_mul");
1243 }
1244 {
1245 hidl_vec<hidl_handle> modelCache, dataCache;
1246 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1247 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001248 saveModelToCache(testModelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001249 }
1250
1251 // Use a different token for testModelAdd.
1252 mToken[0]++;
1253
1254 // Save the testModelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001255 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001256 hidl_vec<hidl_handle> modelCache, dataCache;
1257 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1258 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001259 saveModelToCache(testModelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001260 }
1261
1262 // Replace the model cache of testModelAdd with testModelMul.
1263 copyCacheFiles(modelCacheMul, mModelCache);
1264
1265 // Retrieve the preparedModel from cache, expect failure.
1266 {
1267 sp<IPreparedModel> preparedModel = nullptr;
1268 ErrorStatus status;
1269 hidl_vec<hidl_handle> modelCache, dataCache;
1270 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1271 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1272 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1273 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1274 ASSERT_EQ(preparedModel, nullptr);
1275 }
1276}
1277
Xusong Wang0e0721f2019-05-07 12:57:49 -07001278static const auto kOperandTypeChoices =
1279 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1280
1281INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1282
1283class CompilationCachingSecurityTest
1284 : public CompilationCachingTestBase,
1285 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001286 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001287 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1288
Xusong Wang96e68dc2019-01-18 17:28:26 -08001289 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001290 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001291 generator.seed(kSeed);
1292 }
1293
1294 // Get a random integer within a closed range [lower, upper].
1295 template <typename T>
1296 T getRandomInt(T lower, T upper) {
1297 std::uniform_int_distribution<T> dis(lower, upper);
1298 return dis(generator);
1299 }
1300
Xusong Wange371f6f2019-04-23 14:51:50 -07001301 // Randomly flip one single bit of the cache entry.
1302 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1303 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001304 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1305 long int fileSize = ftell(pFile);
1306 if (fileSize == 0) {
1307 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001308 *skip = true;
1309 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001310 }
1311 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1312 int readByte = fgetc(pFile);
1313 ASSERT_NE(readByte, EOF);
1314 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1315 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1316 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001317 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001318 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001319
Xusong Wange371f6f2019-04-23 14:51:50 -07001320 // Randomly append bytes to the cache entry.
1321 void appendBytesToCache(const std::string& filename, bool* skip) {
1322 FILE* pFile = fopen(filename.c_str(), "a");
1323 uint32_t appendLength = getRandomInt(1, 256);
1324 for (uint32_t i = 0; i < appendLength; i++) {
1325 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1326 }
1327 fclose(pFile);
1328 *skip = false;
1329 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001330
Xusong Wange371f6f2019-04-23 14:51:50 -07001331 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001332
Xusong Wange371f6f2019-04-23 14:51:50 -07001333 // Test if the driver behaves as expected when given corrupted cache or token.
1334 // The modifier will be invoked after save to cache but before prepare from cache.
1335 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1336 // whether the test should be skipped or not.
1337 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wang4f71afc2019-04-26 15:33:38 -07001338 const Model testModel = createTestModel();
1339 if (checkEarlyTermination(testModel)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001340
Xusong Wanged0822b2019-02-25 16:58:58 -08001341 // Save the compilation to cache.
1342 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001343 hidl_vec<hidl_handle> modelCache, dataCache;
1344 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1345 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang4f71afc2019-04-26 15:33:38 -07001346 saveModelToCache(testModel, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001347 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001348
Xusong Wange371f6f2019-04-23 14:51:50 -07001349 bool skip = false;
1350 modifier(&skip);
1351 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001352
Xusong Wange371f6f2019-04-23 14:51:50 -07001353 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001354 {
1355 sp<IPreparedModel> preparedModel = nullptr;
1356 ErrorStatus status;
1357 hidl_vec<hidl_handle> modelCache, dataCache;
1358 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1359 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1360 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001361
1362 switch (expected) {
1363 case ExpectedResult::GENERAL_FAILURE:
1364 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1365 ASSERT_EQ(preparedModel, nullptr);
1366 break;
1367 case ExpectedResult::NOT_CRASH:
1368 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1369 break;
1370 default:
1371 FAIL();
1372 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001373 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001374 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001375
Xusong Wang0e0721f2019-05-07 12:57:49 -07001376 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001377 std::mt19937 generator;
1378};
1379
1380TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1381 if (!mIsCachingSupported) return;
1382 for (uint32_t i = 0; i < mNumModelCache; i++) {
1383 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1384 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1385 }
1386}
1387
1388TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1389 if (!mIsCachingSupported) return;
1390 for (uint32_t i = 0; i < mNumModelCache; i++) {
1391 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1392 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1393 }
1394}
1395
1396TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1397 if (!mIsCachingSupported) return;
1398 for (uint32_t i = 0; i < mNumDataCache; i++) {
1399 testCorruptedCache(ExpectedResult::NOT_CRASH,
1400 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1401 }
1402}
1403
1404TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1405 if (!mIsCachingSupported) return;
1406 for (uint32_t i = 0; i < mNumDataCache; i++) {
1407 testCorruptedCache(ExpectedResult::NOT_CRASH,
1408 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1409 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001410}
1411
1412TEST_P(CompilationCachingSecurityTest, WrongToken) {
1413 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001414 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1415 // Randomly flip one single bit in mToken.
1416 uint32_t ind =
1417 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1418 mToken[ind] ^= (1U << getRandomInt(0, 7));
1419 *skip = false;
1420 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001421}
1422
1423INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001424 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001425
1426} // namespace functional
1427} // namespace vts
1428} // namespace V1_2
1429} // namespace neuralnetworks
1430} // namespace hardware
1431} // namespace android