blob: 8747fb3bf570c040abdc208c9b94253177b7aa18 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
20#include <android/hidl/memory/1.0/IMemory.h>
21#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
Xusong Wangead950d2019-08-09 16:45:24 -070038// Forward declaration of the mobilenet generated test models in
39// frameworks/ml/nn/runtime/test/generated/.
Slava Shklyaev0da5c342019-07-17 15:50:57 +010040namespace generated_tests::mobilenet_224_gender_basic_fixed {
Xusong Wangead950d2019-08-09 16:45:24 -070041const ::test_helper::TestModel& get_test_model();
Slava Shklyaev0da5c342019-07-17 15:50:57 +010042} // namespace generated_tests::mobilenet_224_gender_basic_fixed
Slava Shklyaeve8b24462019-07-17 15:50:57 +010043
44namespace generated_tests::mobilenet_quantized {
Xusong Wangead950d2019-08-09 16:45:24 -070045const ::test_helper::TestModel& get_test_model();
Slava Shklyaeve8b24462019-07-17 15:50:57 +010046} // namespace generated_tests::mobilenet_quantized
47
Xusong Wang96e68dc2019-01-18 17:28:26 -080048namespace android {
49namespace hardware {
50namespace neuralnetworks {
51namespace V1_2 {
52namespace vts {
53namespace functional {
54
Xusong Wangead950d2019-08-09 16:45:24 -070055using namespace test_helper;
Michael Butler3835f612019-07-11 15:43:22 -070056using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
57using ::android::hardware::neuralnetworks::V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080058using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
59using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
Michael Butler3835f612019-07-11 15:43:22 -070060using ::android::hidl::memory::V1_0::IMemory;
Xusong Wang96e68dc2019-01-18 17:28:26 -080061using ::android::nn::allocateSharedMemory;
Xusong Wang96e68dc2019-01-18 17:28:26 -080062
Xusong Wang0e0721f2019-05-07 12:57:49 -070063namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080064
Xusong Wangead950d2019-08-09 16:45:24 -070065constexpr auto get_test_model = ::generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070066
67} // namespace float32_model
68
69namespace quant8_model {
70
Xusong Wangead950d2019-08-09 16:45:24 -070071constexpr auto get_test_model = ::generated_tests::mobilenet_quantized::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070072
73} // namespace quant8_model
74
75namespace {
76
Xusong Wanged0822b2019-02-25 16:58:58 -080077enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080078
Xusong Wanged0822b2019-02-25 16:58:58 -080079// Creates cache handles based on provided file groups.
80// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
81void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
82 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
83 handles->resize(fileGroups.size());
84 for (uint32_t i = 0; i < fileGroups.size(); i++) {
85 std::vector<int> fds;
86 for (const auto& file : fileGroups[i]) {
87 int fd;
88 if (mode[i] == AccessMode::READ_ONLY) {
89 fd = open(file.c_str(), O_RDONLY);
90 } else if (mode[i] == AccessMode::WRITE_ONLY) {
91 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
92 } else if (mode[i] == AccessMode::READ_WRITE) {
93 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
94 } else {
95 FAIL();
96 }
97 ASSERT_GE(fd, 0);
98 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -080099 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800100 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
101 ASSERT_NE(cacheNativeHandle, nullptr);
102 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
103 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800104 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800105}
106
107void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
108 hidl_vec<hidl_handle>* handles) {
109 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800110}
111
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700112// Create a chain of broadcast operations. The second operand is always constant tensor [1].
113// For simplicity, activation scalar is shared. The second operand is not shared
114// in the model to let driver maintain a non-trivial size of constant data and the corresponding
115// data locations in cache.
116//
117// --------- activation --------
118// ↓ ↓ ↓ ↓
119// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
120// ↑ ↑ ↑ ↑
121// [1] [1] [1] [1]
122//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700123// This function assumes the operation is either ADD or MUL.
Xusong Wangead950d2019-08-09 16:45:24 -0700124template <typename CppType, TestOperandType operandType>
125TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) {
126 EXPECT_TRUE(op == TestOperationType::ADD || op == TestOperationType::MUL);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700127
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700128 // Model operations and operands.
Xusong Wangead950d2019-08-09 16:45:24 -0700129 std::vector<TestOperation> operations(len);
130 std::vector<TestOperand> operands(len * 2 + 2);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700131
132 // The activation scalar, value = 0.
133 operands[0] = {
Xusong Wangead950d2019-08-09 16:45:24 -0700134 .type = TestOperandType::INT32,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700135 .dimensions = {},
136 .numberOfConsumers = len,
137 .scale = 0.0f,
138 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700139 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
140 .data = TestBuffer::createFromVector<int32_t>({0}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700141 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700142
Xusong Wang0e0721f2019-05-07 12:57:49 -0700143 // The buffer value of the constant second operand. The logical value is always 1.0f.
144 CppType bufferValue;
145 // The scale of the first and second operand.
146 float scale1, scale2;
Xusong Wangead950d2019-08-09 16:45:24 -0700147 if (operandType == TestOperandType::TENSOR_FLOAT32) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700148 bufferValue = 1.0f;
149 scale1 = 0.0f;
150 scale2 = 0.0f;
Xusong Wangead950d2019-08-09 16:45:24 -0700151 } else if (op == TestOperationType::ADD) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700152 bufferValue = 1;
153 scale1 = 1.0f;
154 scale2 = 1.0f;
155 } else {
156 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
157 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
158 bufferValue = 2;
159 scale1 = 1.0f;
160 scale2 = 0.5f;
161 }
162
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700163 for (uint32_t i = 0; i < len; i++) {
164 const uint32_t firstInputIndex = i * 2 + 1;
165 const uint32_t secondInputIndex = firstInputIndex + 1;
166 const uint32_t outputIndex = secondInputIndex + 1;
167
168 // The first operation input.
169 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700170 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700171 .dimensions = {1},
172 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700173 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700174 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700175 .lifetime = (i == 0 ? TestOperandLifeTime::MODEL_INPUT
176 : TestOperandLifeTime::TEMPORARY_VARIABLE),
177 .data = (i == 0 ? TestBuffer::createFromVector<CppType>({1}) : TestBuffer()),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700178 };
179
180 // The second operation input, value = 1.
181 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700182 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700183 .dimensions = {1},
184 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700185 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700186 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700187 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
188 .data = TestBuffer::createFromVector<CppType>({bufferValue}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700189 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700190
191 // The operation. All operations share the same activation scalar.
192 // The output operand is created as an input in the next iteration of the loop, in the case
193 // of all but the last member of the chain; and after the loop as a model output, in the
194 // case of the last member of the chain.
195 operations[i] = {
196 .type = op,
197 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
198 .outputs = {outputIndex},
199 };
200 }
201
Xusong Wangead950d2019-08-09 16:45:24 -0700202 // For TestOperationType::ADD, output = 1 + 1 * len = len + 1
203 // For TestOperationType::MUL, output = 1 * 1 ^ len = 1
204 CppType outputResult = static_cast<CppType>(op == TestOperationType::ADD ? len + 1u : 1u);
205
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700206 // The model output.
207 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700208 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700209 .dimensions = {1},
210 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700211 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700212 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700213 .lifetime = TestOperandLifeTime::MODEL_OUTPUT,
214 .data = TestBuffer::createFromVector<CppType>({outputResult}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700215 };
216
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700217 return {
Xusong Wangead950d2019-08-09 16:45:24 -0700218 .operands = std::move(operands),
219 .operations = std::move(operations),
220 .inputIndexes = {1},
221 .outputIndexes = {len * 2 + 1},
222 .isRelaxed = false,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700223 };
224}
225
Xusong Wang96e68dc2019-01-18 17:28:26 -0800226} // namespace
227
228// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700229class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800230 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700231 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
232
Xusong Wang96e68dc2019-01-18 17:28:26 -0800233 void SetUp() override {
234 NeuralnetworksHidlTest::SetUp();
Hervé Guihotac7ac522019-02-12 16:22:44 -0800235 ASSERT_NE(device.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800236
Xusong Wanged0822b2019-02-25 16:58:58 -0800237 // Create cache directory. The cache directory and a temporary cache file is always created
238 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800239 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
240 char* cacheDir = mkdtemp(cacheDirTemp);
241 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800242 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800243 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800244
Xusong Wanged0822b2019-02-25 16:58:58 -0800245 Return<void> ret = device->getNumberOfCacheFilesNeeded(
246 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800247 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800248 mNumModelCache = numModelCache;
249 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800250 });
251 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800252 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
253
254 // Create empty cache files.
255 mTmpCache = mCacheDir + "tmp";
256 for (uint32_t i = 0; i < mNumModelCache; i++) {
257 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
258 }
259 for (uint32_t i = 0; i < mNumDataCache; i++) {
260 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
261 }
262 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
263 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
264 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
265 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
266 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
267
268 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800269 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
270 "support compilation caching.";
271 std::cout << "[ ] Early termination of test because vendor service does not "
272 "support compilation caching."
273 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800274 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800275 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800276
Xusong Wang6824cc12019-02-12 18:00:37 -0800277 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700278 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
279 if (!::testing::Test::HasFailure()) {
280 // Recursively remove the cache directory specified by mCacheDir.
281 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
282 return remove(entry);
283 };
284 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800285 }
286 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800287 }
288
Xusong Wang0e0721f2019-05-07 12:57:49 -0700289 // Model and examples creators. According to kOperandType, the following methods will return
290 // either float32 model/examples or the quant8 variant.
Xusong Wangead950d2019-08-09 16:45:24 -0700291 TestModel createTestModel() {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700292 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700293 return float32_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700294 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700295 return quant8_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700296 }
297 }
298
Xusong Wangead950d2019-08-09 16:45:24 -0700299 TestModel createLargeTestModel(OperationType op, uint32_t len) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700300 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700301 return createLargeTestModelImpl<float, TestOperandType::TENSOR_FLOAT32>(
302 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700303 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700304 return createLargeTestModelImpl<uint8_t, TestOperandType::TENSOR_QUANT8_ASYMM>(
305 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700306 }
307 }
308
Xusong Wang4f71afc2019-04-26 15:33:38 -0700309 // See if the service can handle the model.
310 bool isModelFullySupported(const V1_2::Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800311 bool fullySupportsModel = false;
312 Return<void> supportedCall = device->getSupportedOperations_1_2(
313 model,
314 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
315 ASSERT_EQ(ErrorStatus::NONE, status);
316 ASSERT_EQ(supported.size(), model.operations.size());
317 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
318 [](bool valid) { return valid; });
319 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700320 EXPECT_TRUE(supportedCall.isOk());
321 return fullySupportsModel;
322 }
323
324 void saveModelToCache(const V1_2::Model& model, const hidl_vec<hidl_handle>& modelCache,
325 const hidl_vec<hidl_handle>& dataCache,
326 sp<IPreparedModel>* preparedModel = nullptr) {
327 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800328
329 // Launch prepare model.
330 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
331 ASSERT_NE(nullptr, preparedModelCallback.get());
Xusong Wang96e68dc2019-01-18 17:28:26 -0800332 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800333 Return<ErrorStatus> prepareLaunchStatus =
334 device->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER, modelCache,
335 dataCache, cacheToken, preparedModelCallback);
336 ASSERT_TRUE(prepareLaunchStatus.isOk());
337 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
338
339 // Retrieve prepared model.
340 preparedModelCallback->wait();
341 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
342 if (preparedModel != nullptr) {
343 *preparedModel =
344 V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
345 .withDefault(nullptr);
346 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800347 }
348
349 bool checkEarlyTermination(ErrorStatus status) {
350 if (status == ErrorStatus::GENERAL_FAILURE) {
351 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
352 "save the prepared model that it does not support.";
353 std::cout << "[ ] Early termination of test because vendor service cannot "
354 "save the prepared model that it does not support."
355 << std::endl;
356 return true;
357 }
358 return false;
359 }
360
Xusong Wang4f71afc2019-04-26 15:33:38 -0700361 bool checkEarlyTermination(const V1_2::Model& model) {
362 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800363 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
364 "prepare model that it does not support.";
365 std::cout << "[ ] Early termination of test because vendor service cannot "
366 "prepare model that it does not support."
367 << std::endl;
368 return true;
369 }
370 return false;
371 }
372
373 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
374 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800375 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
376 // Launch prepare model from cache.
377 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
378 ASSERT_NE(nullptr, preparedModelCallback.get());
379 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800380 Return<ErrorStatus> prepareLaunchStatus = device->prepareModelFromCache(
381 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800382 ASSERT_TRUE(prepareLaunchStatus.isOk());
383 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
384 *preparedModel = nullptr;
385 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
386 return;
387 }
388
389 // Retrieve prepared model.
390 preparedModelCallback->wait();
391 *status = preparedModelCallback->getStatus();
392 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
393 .withDefault(nullptr);
394 }
395
Xusong Wanged0822b2019-02-25 16:58:58 -0800396 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800397 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800398
399 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
400 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
401 // and the inner vector is for fds held by each handle.
402 std::vector<std::vector<std::string>> mModelCache;
403 std::vector<std::vector<std::string>> mDataCache;
404
405 // A separate temporary file path in the tmp cache directory.
406 std::string mTmpCache;
407
Xusong Wang96e68dc2019-01-18 17:28:26 -0800408 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800409 uint32_t mNumModelCache;
410 uint32_t mNumDataCache;
411 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700412
413 // The primary data type of the testModel.
414 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800415};
416
Xusong Wang0e0721f2019-05-07 12:57:49 -0700417// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
418// pass running with float32 models and the second pass running with quant8 models.
419class CompilationCachingTest : public CompilationCachingTestBase,
420 public ::testing::WithParamInterface<OperandType> {
421 protected:
422 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
423};
424
425TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800426 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700427 const TestModel& testModel = createTestModel();
428 const Model model = generated_tests::createModel(testModel);
429 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800430 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800431
432 // Save the compilation to cache.
433 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800434 hidl_vec<hidl_handle> modelCache, dataCache;
435 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
436 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700437 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800438 }
439
440 // Retrieve preparedModel from cache.
441 {
442 preparedModel = nullptr;
443 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800444 hidl_vec<hidl_handle> modelCache, dataCache;
445 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
446 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
447 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800448 if (!mIsCachingSupported) {
449 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
450 ASSERT_EQ(preparedModel, nullptr);
451 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800452 } else if (checkEarlyTermination(status)) {
453 ASSERT_EQ(preparedModel, nullptr);
454 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800455 } else {
456 ASSERT_EQ(status, ErrorStatus::NONE);
457 ASSERT_NE(preparedModel, nullptr);
458 }
459 }
460
461 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700462 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800463 /*testDynamicOutputShape=*/false);
464}
465
Xusong Wang0e0721f2019-05-07 12:57:49 -0700466TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800467 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700468 const TestModel& testModel = createTestModel();
469 const Model model = generated_tests::createModel(testModel);
470 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800471 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800472
473 // Save the compilation to cache.
474 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800475 hidl_vec<hidl_handle> modelCache, dataCache;
476 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
477 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
478 uint8_t dummyBytes[] = {0, 0};
479 // Write a dummy integer to the cache.
480 // The driver should be able to handle non-empty cache and non-zero fd offset.
481 for (uint32_t i = 0; i < modelCache.size(); i++) {
482 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
483 sizeof(dummyBytes)),
484 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800485 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800486 for (uint32_t i = 0; i < dataCache.size(); i++) {
487 ASSERT_EQ(
488 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
489 sizeof(dummyBytes));
490 }
Xusong Wangead950d2019-08-09 16:45:24 -0700491 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800492 }
493
494 // Retrieve preparedModel from cache.
495 {
496 preparedModel = nullptr;
497 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800498 hidl_vec<hidl_handle> modelCache, dataCache;
499 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
500 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800501 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800502 // Advance the offset of each handle by one byte.
503 // The driver should be able to handle non-zero fd offset.
504 for (uint32_t i = 0; i < modelCache.size(); i++) {
505 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
506 }
507 for (uint32_t i = 0; i < dataCache.size(); i++) {
508 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
509 }
510 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800511 if (!mIsCachingSupported) {
512 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
513 ASSERT_EQ(preparedModel, nullptr);
514 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800515 } else if (checkEarlyTermination(status)) {
516 ASSERT_EQ(preparedModel, nullptr);
517 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800518 } else {
519 ASSERT_EQ(status, ErrorStatus::NONE);
520 ASSERT_NE(preparedModel, nullptr);
521 }
522 }
523
524 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700525 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800526 /*testDynamicOutputShape=*/false);
527}
528
Xusong Wang0e0721f2019-05-07 12:57:49 -0700529TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800530 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700531 const TestModel& testModel = createTestModel();
532 const Model model = generated_tests::createModel(testModel);
533 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800534
535 // Test with number of model cache files greater than mNumModelCache.
536 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800537 hidl_vec<hidl_handle> modelCache, dataCache;
538 // Pass an additional cache file for model cache.
539 mModelCache.push_back({mTmpCache});
540 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
541 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
542 mModelCache.pop_back();
543 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700544 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800545 ASSERT_NE(preparedModel, nullptr);
546 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700547 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800548 /*testDynamicOutputShape=*/false);
549 // Check if prepareModelFromCache fails.
550 preparedModel = nullptr;
551 ErrorStatus status;
552 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
553 if (status != ErrorStatus::INVALID_ARGUMENT) {
554 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
555 }
556 ASSERT_EQ(preparedModel, nullptr);
557 }
558
559 // Test with number of model cache files smaller than mNumModelCache.
560 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800561 hidl_vec<hidl_handle> modelCache, dataCache;
562 // Pop out the last cache file.
563 auto tmp = mModelCache.back();
564 mModelCache.pop_back();
565 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
566 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
567 mModelCache.push_back(tmp);
568 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700569 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800570 ASSERT_NE(preparedModel, nullptr);
571 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700572 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800573 /*testDynamicOutputShape=*/false);
574 // Check if prepareModelFromCache fails.
575 preparedModel = nullptr;
576 ErrorStatus status;
577 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
578 if (status != ErrorStatus::INVALID_ARGUMENT) {
579 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
580 }
581 ASSERT_EQ(preparedModel, nullptr);
582 }
583
584 // Test with number of data cache files greater than mNumDataCache.
585 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800586 hidl_vec<hidl_handle> modelCache, dataCache;
587 // Pass an additional cache file for data cache.
588 mDataCache.push_back({mTmpCache});
589 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
590 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
591 mDataCache.pop_back();
592 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700593 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800594 ASSERT_NE(preparedModel, nullptr);
595 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700596 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800597 /*testDynamicOutputShape=*/false);
598 // Check if prepareModelFromCache fails.
599 preparedModel = nullptr;
600 ErrorStatus status;
601 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
602 if (status != ErrorStatus::INVALID_ARGUMENT) {
603 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
604 }
605 ASSERT_EQ(preparedModel, nullptr);
606 }
607
608 // Test with number of data cache files smaller than mNumDataCache.
609 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800610 hidl_vec<hidl_handle> modelCache, dataCache;
611 // Pop out the last cache file.
612 auto tmp = mDataCache.back();
613 mDataCache.pop_back();
614 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
615 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
616 mDataCache.push_back(tmp);
617 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700618 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800619 ASSERT_NE(preparedModel, nullptr);
620 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700621 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800622 /*testDynamicOutputShape=*/false);
623 // Check if prepareModelFromCache fails.
624 preparedModel = nullptr;
625 ErrorStatus status;
626 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
627 if (status != ErrorStatus::INVALID_ARGUMENT) {
628 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
629 }
630 ASSERT_EQ(preparedModel, nullptr);
631 }
632}
633
Xusong Wang0e0721f2019-05-07 12:57:49 -0700634TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800635 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700636 const TestModel& testModel = createTestModel();
637 const Model model = generated_tests::createModel(testModel);
638 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800639
640 // Save the compilation to cache.
641 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800642 hidl_vec<hidl_handle> modelCache, dataCache;
643 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
644 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700645 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800646 }
647
648 // Test with number of model cache files greater than mNumModelCache.
649 {
650 sp<IPreparedModel> preparedModel = nullptr;
651 ErrorStatus status;
652 hidl_vec<hidl_handle> modelCache, dataCache;
653 mModelCache.push_back({mTmpCache});
654 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
655 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
656 mModelCache.pop_back();
657 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
658 if (status != ErrorStatus::GENERAL_FAILURE) {
659 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
660 }
661 ASSERT_EQ(preparedModel, nullptr);
662 }
663
664 // Test with number of model cache files smaller than mNumModelCache.
665 if (mModelCache.size() > 0) {
666 sp<IPreparedModel> preparedModel = nullptr;
667 ErrorStatus status;
668 hidl_vec<hidl_handle> modelCache, dataCache;
669 auto tmp = mModelCache.back();
670 mModelCache.pop_back();
671 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
672 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
673 mModelCache.push_back(tmp);
674 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
675 if (status != ErrorStatus::GENERAL_FAILURE) {
676 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
677 }
678 ASSERT_EQ(preparedModel, nullptr);
679 }
680
681 // Test with number of data cache files greater than mNumDataCache.
682 {
683 sp<IPreparedModel> preparedModel = nullptr;
684 ErrorStatus status;
685 hidl_vec<hidl_handle> modelCache, dataCache;
686 mDataCache.push_back({mTmpCache});
687 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
688 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
689 mDataCache.pop_back();
690 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
691 if (status != ErrorStatus::GENERAL_FAILURE) {
692 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
693 }
694 ASSERT_EQ(preparedModel, nullptr);
695 }
696
697 // Test with number of data cache files smaller than mNumDataCache.
698 if (mDataCache.size() > 0) {
699 sp<IPreparedModel> preparedModel = nullptr;
700 ErrorStatus status;
701 hidl_vec<hidl_handle> modelCache, dataCache;
702 auto tmp = mDataCache.back();
703 mDataCache.pop_back();
704 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
705 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
706 mDataCache.push_back(tmp);
707 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
708 if (status != ErrorStatus::GENERAL_FAILURE) {
709 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
710 }
711 ASSERT_EQ(preparedModel, nullptr);
712 }
713}
714
Xusong Wang0e0721f2019-05-07 12:57:49 -0700715TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800716 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700717 const TestModel& testModel = createTestModel();
718 const Model model = generated_tests::createModel(testModel);
719 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800720
Xusong Wanged0822b2019-02-25 16:58:58 -0800721 // Go through each handle in model cache, test with NumFd greater than 1.
722 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800723 hidl_vec<hidl_handle> modelCache, dataCache;
724 // Pass an invalid number of fds for handle i.
725 mModelCache[i].push_back(mTmpCache);
726 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
727 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
728 mModelCache[i].pop_back();
729 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700730 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800731 ASSERT_NE(preparedModel, nullptr);
732 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700733 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800734 /*testDynamicOutputShape=*/false);
735 // Check if prepareModelFromCache fails.
736 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800737 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800738 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
739 if (status != ErrorStatus::INVALID_ARGUMENT) {
740 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800741 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800742 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800743 }
744
Xusong Wanged0822b2019-02-25 16:58:58 -0800745 // Go through each handle in model cache, test with NumFd equal to 0.
746 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800747 hidl_vec<hidl_handle> modelCache, dataCache;
748 // Pass an invalid number of fds for handle i.
749 auto tmp = mModelCache[i].back();
750 mModelCache[i].pop_back();
751 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
752 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
753 mModelCache[i].push_back(tmp);
754 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700755 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800756 ASSERT_NE(preparedModel, nullptr);
757 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700758 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800759 /*testDynamicOutputShape=*/false);
760 // Check if prepareModelFromCache fails.
761 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800762 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800763 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
764 if (status != ErrorStatus::INVALID_ARGUMENT) {
765 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800766 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800767 ASSERT_EQ(preparedModel, nullptr);
768 }
769
770 // Go through each handle in data cache, test with NumFd greater than 1.
771 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800772 hidl_vec<hidl_handle> modelCache, dataCache;
773 // Pass an invalid number of fds for handle i.
774 mDataCache[i].push_back(mTmpCache);
775 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
776 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
777 mDataCache[i].pop_back();
778 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700779 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800780 ASSERT_NE(preparedModel, nullptr);
781 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700782 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800783 /*testDynamicOutputShape=*/false);
784 // Check if prepareModelFromCache fails.
785 preparedModel = nullptr;
786 ErrorStatus status;
787 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
788 if (status != ErrorStatus::INVALID_ARGUMENT) {
789 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
790 }
791 ASSERT_EQ(preparedModel, nullptr);
792 }
793
794 // Go through each handle in data cache, test with NumFd equal to 0.
795 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800796 hidl_vec<hidl_handle> modelCache, dataCache;
797 // Pass an invalid number of fds for handle i.
798 auto tmp = mDataCache[i].back();
799 mDataCache[i].pop_back();
800 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
801 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
802 mDataCache[i].push_back(tmp);
803 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700804 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800805 ASSERT_NE(preparedModel, nullptr);
806 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700807 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800808 /*testDynamicOutputShape=*/false);
809 // Check if prepareModelFromCache fails.
810 preparedModel = nullptr;
811 ErrorStatus status;
812 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
813 if (status != ErrorStatus::INVALID_ARGUMENT) {
814 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
815 }
816 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800817 }
818}
819
Xusong Wang0e0721f2019-05-07 12:57:49 -0700820TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800821 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700822 const TestModel& testModel = createTestModel();
823 const Model model = generated_tests::createModel(testModel);
824 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800825
826 // Save the compilation to cache.
827 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800828 hidl_vec<hidl_handle> modelCache, dataCache;
829 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
830 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700831 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800832 }
833
Xusong Wanged0822b2019-02-25 16:58:58 -0800834 // Go through each handle in model cache, test with NumFd greater than 1.
835 for (uint32_t i = 0; i < mNumModelCache; i++) {
836 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800837 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800838 hidl_vec<hidl_handle> modelCache, dataCache;
839 mModelCache[i].push_back(mTmpCache);
840 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
841 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
842 mModelCache[i].pop_back();
843 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800844 if (status != ErrorStatus::GENERAL_FAILURE) {
845 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800846 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800847 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800848 }
849
Xusong Wanged0822b2019-02-25 16:58:58 -0800850 // Go through each handle in model cache, test with NumFd equal to 0.
851 for (uint32_t i = 0; i < mNumModelCache; i++) {
852 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800853 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800854 hidl_vec<hidl_handle> modelCache, dataCache;
855 auto tmp = mModelCache[i].back();
856 mModelCache[i].pop_back();
857 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
858 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
859 mModelCache[i].push_back(tmp);
860 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800861 if (status != ErrorStatus::GENERAL_FAILURE) {
862 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800863 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800864 ASSERT_EQ(preparedModel, nullptr);
865 }
866
867 // Go through each handle in data cache, test with NumFd greater than 1.
868 for (uint32_t i = 0; i < mNumDataCache; i++) {
869 sp<IPreparedModel> preparedModel = nullptr;
870 ErrorStatus status;
871 hidl_vec<hidl_handle> modelCache, dataCache;
872 mDataCache[i].push_back(mTmpCache);
873 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
874 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
875 mDataCache[i].pop_back();
876 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
877 if (status != ErrorStatus::GENERAL_FAILURE) {
878 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
879 }
880 ASSERT_EQ(preparedModel, nullptr);
881 }
882
883 // Go through each handle in data cache, test with NumFd equal to 0.
884 for (uint32_t i = 0; i < mNumDataCache; i++) {
885 sp<IPreparedModel> preparedModel = nullptr;
886 ErrorStatus status;
887 hidl_vec<hidl_handle> modelCache, dataCache;
888 auto tmp = mDataCache[i].back();
889 mDataCache[i].pop_back();
890 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
891 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
892 mDataCache[i].push_back(tmp);
893 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
894 if (status != ErrorStatus::GENERAL_FAILURE) {
895 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
896 }
897 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800898 }
899}
900
Xusong Wang0e0721f2019-05-07 12:57:49 -0700901TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800902 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700903 const TestModel& testModel = createTestModel();
904 const Model model = generated_tests::createModel(testModel);
905 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800906 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
907 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800908
Xusong Wanged0822b2019-02-25 16:58:58 -0800909 // Go through each handle in model cache, test with invalid access mode.
910 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800911 hidl_vec<hidl_handle> modelCache, dataCache;
912 modelCacheMode[i] = AccessMode::READ_ONLY;
913 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
914 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
915 modelCacheMode[i] = AccessMode::READ_WRITE;
916 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700917 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800918 ASSERT_NE(preparedModel, nullptr);
919 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700920 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800921 /*testDynamicOutputShape=*/false);
922 // Check if prepareModelFromCache fails.
923 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800924 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800925 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
926 if (status != ErrorStatus::INVALID_ARGUMENT) {
927 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
928 }
929 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800930 }
931
Xusong Wanged0822b2019-02-25 16:58:58 -0800932 // Go through each handle in data cache, test with invalid access mode.
933 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800934 hidl_vec<hidl_handle> modelCache, dataCache;
935 dataCacheMode[i] = AccessMode::READ_ONLY;
936 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
937 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
938 dataCacheMode[i] = AccessMode::READ_WRITE;
939 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700940 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800941 ASSERT_NE(preparedModel, nullptr);
942 // Execute and verify results.
Xusong Wangead950d2019-08-09 16:45:24 -0700943 generated_tests::EvaluatePreparedModel(preparedModel, testModel,
Xusong Wanged0822b2019-02-25 16:58:58 -0800944 /*testDynamicOutputShape=*/false);
945 // Check if prepareModelFromCache fails.
946 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800947 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800948 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
949 if (status != ErrorStatus::INVALID_ARGUMENT) {
950 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
951 }
952 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800953 }
954}
955
Xusong Wang0e0721f2019-05-07 12:57:49 -0700956TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800957 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700958 const TestModel& testModel = createTestModel();
959 const Model model = generated_tests::createModel(testModel);
960 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800961 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
962 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800963
964 // Save the compilation to cache.
965 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800966 hidl_vec<hidl_handle> modelCache, dataCache;
967 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
968 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700969 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800970 }
971
Xusong Wanged0822b2019-02-25 16:58:58 -0800972 // Go through each handle in model cache, test with invalid access mode.
973 for (uint32_t i = 0; i < mNumModelCache; i++) {
974 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800975 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800976 hidl_vec<hidl_handle> modelCache, dataCache;
977 modelCacheMode[i] = AccessMode::WRITE_ONLY;
978 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
979 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
980 modelCacheMode[i] = AccessMode::READ_WRITE;
981 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800982 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
983 ASSERT_EQ(preparedModel, nullptr);
984 }
985
Xusong Wanged0822b2019-02-25 16:58:58 -0800986 // Go through each handle in data cache, test with invalid access mode.
987 for (uint32_t i = 0; i < mNumDataCache; i++) {
988 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800989 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800990 hidl_vec<hidl_handle> modelCache, dataCache;
991 dataCacheMode[i] = AccessMode::WRITE_ONLY;
992 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
993 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
994 dataCacheMode[i] = AccessMode::READ_WRITE;
995 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800996 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
997 ASSERT_EQ(preparedModel, nullptr);
998 }
999}
1000
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001001// Copy file contents between file groups.
1002// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1003// The outer vector sizes must match and the inner vectors must have size = 1.
1004static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1005 const std::vector<std::vector<std::string>>& to) {
1006 constexpr size_t kBufferSize = 1000000;
1007 uint8_t buffer[kBufferSize];
1008
1009 ASSERT_EQ(from.size(), to.size());
1010 for (uint32_t i = 0; i < from.size(); i++) {
1011 ASSERT_EQ(from[i].size(), 1u);
1012 ASSERT_EQ(to[i].size(), 1u);
1013 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1014 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1015 ASSERT_GE(fromFd, 0);
1016 ASSERT_GE(toFd, 0);
1017
1018 ssize_t readBytes;
1019 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1020 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1021 }
1022 ASSERT_GE(readBytes, 0);
1023
1024 close(fromFd);
1025 close(toFd);
1026 }
1027}
1028
1029// Number of operations in the large test model.
1030constexpr uint32_t kLargeModelSize = 100;
1031constexpr uint32_t kNumIterationsTOCTOU = 100;
1032
Xusong Wang0e0721f2019-05-07 12:57:49 -07001033TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001034 if (!mIsCachingSupported) return;
1035
Xusong Wang4f71afc2019-04-26 15:33:38 -07001036 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001037 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1038 const Model modelMul = generated_tests::createModel(testModelMul);
1039 if (checkEarlyTermination(modelMul)) return;
1040 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1041 const Model modelAdd = generated_tests::createModel(testModelAdd);
1042 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001043
Xusong Wangead950d2019-08-09 16:45:24 -07001044 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001045 auto modelCacheMul = mModelCache;
1046 for (auto& cache : modelCacheMul) {
1047 cache[0].append("_mul");
1048 }
1049 {
1050 hidl_vec<hidl_handle> modelCache, dataCache;
1051 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1052 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001053 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001054 }
1055
Xusong Wangead950d2019-08-09 16:45:24 -07001056 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001057 mToken[0]++;
1058
1059 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001060 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001061 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001062 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001063 hidl_vec<hidl_handle> modelCache, dataCache;
1064 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1065 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1066
1067 // Spawn a thread to copy the cache content concurrently while saving to cache.
1068 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wangead950d2019-08-09 16:45:24 -07001069 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001070 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001071 }
1072
1073 // Retrieve preparedModel from cache.
1074 {
1075 sp<IPreparedModel> preparedModel = nullptr;
1076 ErrorStatus status;
1077 hidl_vec<hidl_handle> modelCache, dataCache;
1078 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1079 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1080 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1081
1082 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1083 // the prepared model must be executed with the correct result and not crash.
1084 if (status != ErrorStatus::NONE) {
1085 ASSERT_EQ(preparedModel, nullptr);
1086 } else {
1087 ASSERT_NE(preparedModel, nullptr);
Xusong Wangead950d2019-08-09 16:45:24 -07001088 generated_tests::EvaluatePreparedModel(preparedModel, testModelAdd,
1089 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001090 }
1091 }
1092 }
1093}
1094
Xusong Wang0e0721f2019-05-07 12:57:49 -07001095TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001096 if (!mIsCachingSupported) return;
1097
Xusong Wang4f71afc2019-04-26 15:33:38 -07001098 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001099 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1100 const Model modelMul = generated_tests::createModel(testModelMul);
1101 if (checkEarlyTermination(modelMul)) return;
1102 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1103 const Model modelAdd = generated_tests::createModel(testModelAdd);
1104 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001105
Xusong Wangead950d2019-08-09 16:45:24 -07001106 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001107 auto modelCacheMul = mModelCache;
1108 for (auto& cache : modelCacheMul) {
1109 cache[0].append("_mul");
1110 }
1111 {
1112 hidl_vec<hidl_handle> modelCache, dataCache;
1113 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1114 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001115 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001116 }
1117
Xusong Wangead950d2019-08-09 16:45:24 -07001118 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001119 mToken[0]++;
1120
1121 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001122 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001123 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001124 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001125 hidl_vec<hidl_handle> modelCache, dataCache;
1126 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1127 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001128 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001129 }
1130
1131 // Retrieve preparedModel from cache.
1132 {
1133 sp<IPreparedModel> preparedModel = nullptr;
1134 ErrorStatus status;
1135 hidl_vec<hidl_handle> modelCache, dataCache;
1136 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1137 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1138
1139 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1140 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1141 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1142 thread.join();
1143
1144 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1145 // the prepared model must be executed with the correct result and not crash.
1146 if (status != ErrorStatus::NONE) {
1147 ASSERT_EQ(preparedModel, nullptr);
1148 } else {
1149 ASSERT_NE(preparedModel, nullptr);
Xusong Wangead950d2019-08-09 16:45:24 -07001150 generated_tests::EvaluatePreparedModel(preparedModel, testModelAdd,
1151 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001152 }
1153 }
1154 }
1155}
1156
Xusong Wang0e0721f2019-05-07 12:57:49 -07001157TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001158 if (!mIsCachingSupported) return;
1159
Xusong Wang4f71afc2019-04-26 15:33:38 -07001160 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001161 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
1162 const Model modelMul = generated_tests::createModel(testModelMul);
1163 if (checkEarlyTermination(modelMul)) return;
1164 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
1165 const Model modelAdd = generated_tests::createModel(testModelAdd);
1166 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001167
Xusong Wangead950d2019-08-09 16:45:24 -07001168 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001169 auto modelCacheMul = mModelCache;
1170 for (auto& cache : modelCacheMul) {
1171 cache[0].append("_mul");
1172 }
1173 {
1174 hidl_vec<hidl_handle> modelCache, dataCache;
1175 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1176 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001177 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001178 }
1179
Xusong Wangead950d2019-08-09 16:45:24 -07001180 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001181 mToken[0]++;
1182
Xusong Wangead950d2019-08-09 16:45:24 -07001183 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001184 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001185 hidl_vec<hidl_handle> modelCache, dataCache;
1186 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1187 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001188 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001189 }
1190
Xusong Wangead950d2019-08-09 16:45:24 -07001191 // Replace the model cache of modelAdd with modelMul.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001192 copyCacheFiles(modelCacheMul, mModelCache);
1193
1194 // Retrieve the preparedModel from cache, expect failure.
1195 {
1196 sp<IPreparedModel> preparedModel = nullptr;
1197 ErrorStatus status;
1198 hidl_vec<hidl_handle> modelCache, dataCache;
1199 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1200 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1201 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1202 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1203 ASSERT_EQ(preparedModel, nullptr);
1204 }
1205}
1206
Xusong Wang0e0721f2019-05-07 12:57:49 -07001207static const auto kOperandTypeChoices =
1208 ::testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
1209
1210INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1211
1212class CompilationCachingSecurityTest
1213 : public CompilationCachingTestBase,
1214 public ::testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001215 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001216 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1217
Xusong Wang96e68dc2019-01-18 17:28:26 -08001218 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001219 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001220 generator.seed(kSeed);
1221 }
1222
1223 // Get a random integer within a closed range [lower, upper].
1224 template <typename T>
1225 T getRandomInt(T lower, T upper) {
1226 std::uniform_int_distribution<T> dis(lower, upper);
1227 return dis(generator);
1228 }
1229
Xusong Wange371f6f2019-04-23 14:51:50 -07001230 // Randomly flip one single bit of the cache entry.
1231 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1232 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001233 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1234 long int fileSize = ftell(pFile);
1235 if (fileSize == 0) {
1236 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001237 *skip = true;
1238 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001239 }
1240 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1241 int readByte = fgetc(pFile);
1242 ASSERT_NE(readByte, EOF);
1243 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1244 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1245 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001246 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001247 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001248
Xusong Wange371f6f2019-04-23 14:51:50 -07001249 // Randomly append bytes to the cache entry.
1250 void appendBytesToCache(const std::string& filename, bool* skip) {
1251 FILE* pFile = fopen(filename.c_str(), "a");
1252 uint32_t appendLength = getRandomInt(1, 256);
1253 for (uint32_t i = 0; i < appendLength; i++) {
1254 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1255 }
1256 fclose(pFile);
1257 *skip = false;
1258 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001259
Xusong Wange371f6f2019-04-23 14:51:50 -07001260 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001261
Xusong Wange371f6f2019-04-23 14:51:50 -07001262 // Test if the driver behaves as expected when given corrupted cache or token.
1263 // The modifier will be invoked after save to cache but before prepare from cache.
1264 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1265 // whether the test should be skipped or not.
1266 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wangead950d2019-08-09 16:45:24 -07001267 const TestModel& testModel = createTestModel();
1268 const Model model = generated_tests::createModel(testModel);
1269 if (checkEarlyTermination(model)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001270
Xusong Wanged0822b2019-02-25 16:58:58 -08001271 // Save the compilation to cache.
1272 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001273 hidl_vec<hidl_handle> modelCache, dataCache;
1274 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1275 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001276 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001277 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001278
Xusong Wange371f6f2019-04-23 14:51:50 -07001279 bool skip = false;
1280 modifier(&skip);
1281 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001282
Xusong Wange371f6f2019-04-23 14:51:50 -07001283 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001284 {
1285 sp<IPreparedModel> preparedModel = nullptr;
1286 ErrorStatus status;
1287 hidl_vec<hidl_handle> modelCache, dataCache;
1288 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1289 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1290 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001291
1292 switch (expected) {
1293 case ExpectedResult::GENERAL_FAILURE:
1294 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1295 ASSERT_EQ(preparedModel, nullptr);
1296 break;
1297 case ExpectedResult::NOT_CRASH:
1298 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1299 break;
1300 default:
1301 FAIL();
1302 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001303 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001304 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001305
Xusong Wang0e0721f2019-05-07 12:57:49 -07001306 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001307 std::mt19937 generator;
1308};
1309
1310TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1311 if (!mIsCachingSupported) return;
1312 for (uint32_t i = 0; i < mNumModelCache; i++) {
1313 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1314 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1315 }
1316}
1317
1318TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1319 if (!mIsCachingSupported) return;
1320 for (uint32_t i = 0; i < mNumModelCache; i++) {
1321 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1322 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1323 }
1324}
1325
1326TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1327 if (!mIsCachingSupported) return;
1328 for (uint32_t i = 0; i < mNumDataCache; i++) {
1329 testCorruptedCache(ExpectedResult::NOT_CRASH,
1330 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1331 }
1332}
1333
1334TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1335 if (!mIsCachingSupported) return;
1336 for (uint32_t i = 0; i < mNumDataCache; i++) {
1337 testCorruptedCache(ExpectedResult::NOT_CRASH,
1338 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1339 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001340}
1341
1342TEST_P(CompilationCachingSecurityTest, WrongToken) {
1343 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001344 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1345 // Randomly flip one single bit in mToken.
1346 uint32_t ind =
1347 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1348 mToken[ind] ^= (1U << getRandomInt(0, 7));
1349 *skip = false;
1350 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001351}
1352
1353INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Xusong Wang0e0721f2019-05-07 12:57:49 -07001354 ::testing::Combine(kOperandTypeChoices, ::testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001355
1356} // namespace functional
1357} // namespace vts
1358} // namespace V1_2
1359} // namespace neuralnetworks
1360} // namespace hardware
1361} // namespace android