blob: bb46e06d6fa58e6c3e8fdb3a0cab2d46462f6358 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070020#include <ftw.h>
21#include <gtest/gtest.h>
22#include <hidlmemory/mapping.h>
23#include <unistd.h>
24
25#include <cstdio>
26#include <cstdlib>
27#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070028#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080029
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010030#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080031#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010032#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080033#include "TestHarness.h"
34#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070035#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080036
Xusong Wangead950d2019-08-09 16:45:24 -070037// Forward declaration of the mobilenet generated test models in
38// frameworks/ml/nn/runtime/test/generated/.
Slava Shklyaev0da5c342019-07-17 15:50:57 +010039namespace generated_tests::mobilenet_224_gender_basic_fixed {
Xusong Wangead950d2019-08-09 16:45:24 -070040const ::test_helper::TestModel& get_test_model();
Slava Shklyaev0da5c342019-07-17 15:50:57 +010041} // namespace generated_tests::mobilenet_224_gender_basic_fixed
Slava Shklyaeve8b24462019-07-17 15:50:57 +010042
43namespace generated_tests::mobilenet_quantized {
Xusong Wangead950d2019-08-09 16:45:24 -070044const ::test_helper::TestModel& get_test_model();
Slava Shklyaeve8b24462019-07-17 15:50:57 +010045} // namespace generated_tests::mobilenet_quantized
46
Michael Butler62749b92019-08-26 23:55:47 -070047namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Xusong Wang96e68dc2019-01-18 17:28:26 -080048
Xusong Wangead950d2019-08-09 16:45:24 -070049using namespace test_helper;
Michael Butler62749b92019-08-26 23:55:47 -070050using implementation::PreparedModelCallback;
51using V1_0::ErrorStatus;
52using V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080053
Xusong Wang0e0721f2019-05-07 12:57:49 -070054namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080055
Xusong Wangead950d2019-08-09 16:45:24 -070056constexpr auto get_test_model = ::generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070057
58} // namespace float32_model
59
60namespace quant8_model {
61
Xusong Wangead950d2019-08-09 16:45:24 -070062constexpr auto get_test_model = ::generated_tests::mobilenet_quantized::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070063
64} // namespace quant8_model
65
66namespace {
67
Xusong Wanged0822b2019-02-25 16:58:58 -080068enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080069
Xusong Wanged0822b2019-02-25 16:58:58 -080070// Creates cache handles based on provided file groups.
71// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
72void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
73 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
74 handles->resize(fileGroups.size());
75 for (uint32_t i = 0; i < fileGroups.size(); i++) {
76 std::vector<int> fds;
77 for (const auto& file : fileGroups[i]) {
78 int fd;
79 if (mode[i] == AccessMode::READ_ONLY) {
80 fd = open(file.c_str(), O_RDONLY);
81 } else if (mode[i] == AccessMode::WRITE_ONLY) {
82 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
83 } else if (mode[i] == AccessMode::READ_WRITE) {
84 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
85 } else {
86 FAIL();
87 }
88 ASSERT_GE(fd, 0);
89 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -080090 }
Xusong Wanged0822b2019-02-25 16:58:58 -080091 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
92 ASSERT_NE(cacheNativeHandle, nullptr);
93 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
94 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -080095 }
Xusong Wanged0822b2019-02-25 16:58:58 -080096}
97
98void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
99 hidl_vec<hidl_handle>* handles) {
100 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800101}
102
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700103// Create a chain of broadcast operations. The second operand is always constant tensor [1].
104// For simplicity, activation scalar is shared. The second operand is not shared
105// in the model to let driver maintain a non-trivial size of constant data and the corresponding
106// data locations in cache.
107//
108// --------- activation --------
109// ↓ ↓ ↓ ↓
110// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
111// ↑ ↑ ↑ ↑
112// [1] [1] [1] [1]
113//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700114// This function assumes the operation is either ADD or MUL.
Xusong Wangead950d2019-08-09 16:45:24 -0700115template <typename CppType, TestOperandType operandType>
116TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) {
117 EXPECT_TRUE(op == TestOperationType::ADD || op == TestOperationType::MUL);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700118
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700119 // Model operations and operands.
Xusong Wangead950d2019-08-09 16:45:24 -0700120 std::vector<TestOperation> operations(len);
121 std::vector<TestOperand> operands(len * 2 + 2);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700122
123 // The activation scalar, value = 0.
124 operands[0] = {
Xusong Wangead950d2019-08-09 16:45:24 -0700125 .type = TestOperandType::INT32,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700126 .dimensions = {},
127 .numberOfConsumers = len,
128 .scale = 0.0f,
129 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700130 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
131 .data = TestBuffer::createFromVector<int32_t>({0}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700132 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700133
Xusong Wang0e0721f2019-05-07 12:57:49 -0700134 // The buffer value of the constant second operand. The logical value is always 1.0f.
135 CppType bufferValue;
136 // The scale of the first and second operand.
137 float scale1, scale2;
Xusong Wangead950d2019-08-09 16:45:24 -0700138 if (operandType == TestOperandType::TENSOR_FLOAT32) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700139 bufferValue = 1.0f;
140 scale1 = 0.0f;
141 scale2 = 0.0f;
Xusong Wangead950d2019-08-09 16:45:24 -0700142 } else if (op == TestOperationType::ADD) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700143 bufferValue = 1;
144 scale1 = 1.0f;
145 scale2 = 1.0f;
146 } else {
147 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
148 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
149 bufferValue = 2;
150 scale1 = 1.0f;
151 scale2 = 0.5f;
152 }
153
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700154 for (uint32_t i = 0; i < len; i++) {
155 const uint32_t firstInputIndex = i * 2 + 1;
156 const uint32_t secondInputIndex = firstInputIndex + 1;
157 const uint32_t outputIndex = secondInputIndex + 1;
158
159 // The first operation input.
160 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700161 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700162 .dimensions = {1},
163 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700164 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700165 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700166 .lifetime = (i == 0 ? TestOperandLifeTime::MODEL_INPUT
167 : TestOperandLifeTime::TEMPORARY_VARIABLE),
168 .data = (i == 0 ? TestBuffer::createFromVector<CppType>({1}) : TestBuffer()),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700169 };
170
171 // The second operation input, value = 1.
172 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700173 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700174 .dimensions = {1},
175 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700176 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700177 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700178 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
179 .data = TestBuffer::createFromVector<CppType>({bufferValue}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700180 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700181
182 // The operation. All operations share the same activation scalar.
183 // The output operand is created as an input in the next iteration of the loop, in the case
184 // of all but the last member of the chain; and after the loop as a model output, in the
185 // case of the last member of the chain.
186 operations[i] = {
187 .type = op,
188 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
189 .outputs = {outputIndex},
190 };
191 }
192
Xusong Wangead950d2019-08-09 16:45:24 -0700193 // For TestOperationType::ADD, output = 1 + 1 * len = len + 1
194 // For TestOperationType::MUL, output = 1 * 1 ^ len = 1
195 CppType outputResult = static_cast<CppType>(op == TestOperationType::ADD ? len + 1u : 1u);
196
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700197 // The model output.
198 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700199 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700200 .dimensions = {1},
201 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700202 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700203 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700204 .lifetime = TestOperandLifeTime::MODEL_OUTPUT,
205 .data = TestBuffer::createFromVector<CppType>({outputResult}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700206 };
207
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700208 return {
Xusong Wangead950d2019-08-09 16:45:24 -0700209 .operands = std::move(operands),
210 .operations = std::move(operations),
211 .inputIndexes = {1},
212 .outputIndexes = {len * 2 + 1},
213 .isRelaxed = false,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700214 };
215}
216
Xusong Wang96e68dc2019-01-18 17:28:26 -0800217} // namespace
218
219// Tag for the compilation caching tests.
Xusong Wang0e0721f2019-05-07 12:57:49 -0700220class CompilationCachingTestBase : public NeuralnetworksHidlTest {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800221 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -0700222 CompilationCachingTestBase(OperandType type) : kOperandType(type) {}
223
Xusong Wang96e68dc2019-01-18 17:28:26 -0800224 void SetUp() override {
225 NeuralnetworksHidlTest::SetUp();
Michael Butler13b05162019-08-29 22:17:24 -0700226 ASSERT_NE(kDevice.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800227
Xusong Wanged0822b2019-02-25 16:58:58 -0800228 // Create cache directory. The cache directory and a temporary cache file is always created
229 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800230 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
231 char* cacheDir = mkdtemp(cacheDirTemp);
232 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800233 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800234 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800235
Michael Butler13b05162019-08-29 22:17:24 -0700236 Return<void> ret = kDevice->getNumberOfCacheFilesNeeded(
Xusong Wanged0822b2019-02-25 16:58:58 -0800237 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800238 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800239 mNumModelCache = numModelCache;
240 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800241 });
242 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800243 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
244
245 // Create empty cache files.
246 mTmpCache = mCacheDir + "tmp";
247 for (uint32_t i = 0; i < mNumModelCache; i++) {
248 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
249 }
250 for (uint32_t i = 0; i < mNumDataCache; i++) {
251 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
252 }
253 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
254 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
255 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
256 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
257 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
258
259 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800260 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
261 "support compilation caching.";
262 std::cout << "[ ] Early termination of test because vendor service does not "
263 "support compilation caching."
264 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800265 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800266 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800267
Xusong Wang6824cc12019-02-12 18:00:37 -0800268 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700269 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
Michael Butler13b05162019-08-29 22:17:24 -0700270 if (!testing::Test::HasFailure()) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700271 // Recursively remove the cache directory specified by mCacheDir.
272 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
273 return remove(entry);
274 };
275 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800276 }
277 NeuralnetworksHidlTest::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800278 }
279
Xusong Wang0e0721f2019-05-07 12:57:49 -0700280 // Model and examples creators. According to kOperandType, the following methods will return
281 // either float32 model/examples or the quant8 variant.
Xusong Wangead950d2019-08-09 16:45:24 -0700282 TestModel createTestModel() {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700283 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700284 return float32_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700285 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700286 return quant8_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700287 }
288 }
289
Xusong Wangead950d2019-08-09 16:45:24 -0700290 TestModel createLargeTestModel(OperationType op, uint32_t len) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700291 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700292 return createLargeTestModelImpl<float, TestOperandType::TENSOR_FLOAT32>(
293 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700294 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700295 return createLargeTestModelImpl<uint8_t, TestOperandType::TENSOR_QUANT8_ASYMM>(
296 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700297 }
298 }
299
Xusong Wang4f71afc2019-04-26 15:33:38 -0700300 // See if the service can handle the model.
Michael Butler62749b92019-08-26 23:55:47 -0700301 bool isModelFullySupported(const Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800302 bool fullySupportsModel = false;
Michael Butler13b05162019-08-29 22:17:24 -0700303 Return<void> supportedCall = kDevice->getSupportedOperations_1_2(
Xusong Wanged0822b2019-02-25 16:58:58 -0800304 model,
305 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
306 ASSERT_EQ(ErrorStatus::NONE, status);
307 ASSERT_EQ(supported.size(), model.operations.size());
308 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
309 [](bool valid) { return valid; });
310 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700311 EXPECT_TRUE(supportedCall.isOk());
312 return fullySupportsModel;
313 }
314
Michael Butler62749b92019-08-26 23:55:47 -0700315 void saveModelToCache(const Model& model, const hidl_vec<hidl_handle>& modelCache,
Xusong Wang4f71afc2019-04-26 15:33:38 -0700316 const hidl_vec<hidl_handle>& dataCache,
317 sp<IPreparedModel>* preparedModel = nullptr) {
318 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800319
320 // Launch prepare model.
321 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800322 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800323 Return<ErrorStatus> prepareLaunchStatus =
Michael Butler13b05162019-08-29 22:17:24 -0700324 kDevice->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER,
325 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wanged0822b2019-02-25 16:58:58 -0800326 ASSERT_TRUE(prepareLaunchStatus.isOk());
327 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
328
329 // Retrieve prepared model.
330 preparedModelCallback->wait();
331 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
332 if (preparedModel != nullptr) {
Michael Butler62749b92019-08-26 23:55:47 -0700333 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
334 .withDefault(nullptr);
Xusong Wanged0822b2019-02-25 16:58:58 -0800335 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800336 }
337
338 bool checkEarlyTermination(ErrorStatus status) {
339 if (status == ErrorStatus::GENERAL_FAILURE) {
340 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
341 "save the prepared model that it does not support.";
342 std::cout << "[ ] Early termination of test because vendor service cannot "
343 "save the prepared model that it does not support."
344 << std::endl;
345 return true;
346 }
347 return false;
348 }
349
Michael Butler62749b92019-08-26 23:55:47 -0700350 bool checkEarlyTermination(const Model& model) {
Xusong Wang4f71afc2019-04-26 15:33:38 -0700351 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800352 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
353 "prepare model that it does not support.";
354 std::cout << "[ ] Early termination of test because vendor service cannot "
355 "prepare model that it does not support."
356 << std::endl;
357 return true;
358 }
359 return false;
360 }
361
362 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
363 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800364 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
365 // Launch prepare model from cache.
366 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800367 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Michael Butler13b05162019-08-29 22:17:24 -0700368 Return<ErrorStatus> prepareLaunchStatus = kDevice->prepareModelFromCache(
Xusong Wanged0822b2019-02-25 16:58:58 -0800369 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800370 ASSERT_TRUE(prepareLaunchStatus.isOk());
371 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
372 *preparedModel = nullptr;
373 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
374 return;
375 }
376
377 // Retrieve prepared model.
378 preparedModelCallback->wait();
379 *status = preparedModelCallback->getStatus();
Michael Butler62749b92019-08-26 23:55:47 -0700380 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
Xusong Wang96e68dc2019-01-18 17:28:26 -0800381 .withDefault(nullptr);
382 }
383
Xusong Wanged0822b2019-02-25 16:58:58 -0800384 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800385 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800386
387 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
388 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
389 // and the inner vector is for fds held by each handle.
390 std::vector<std::vector<std::string>> mModelCache;
391 std::vector<std::vector<std::string>> mDataCache;
392
393 // A separate temporary file path in the tmp cache directory.
394 std::string mTmpCache;
395
Xusong Wang96e68dc2019-01-18 17:28:26 -0800396 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800397 uint32_t mNumModelCache;
398 uint32_t mNumDataCache;
399 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700400
401 // The primary data type of the testModel.
402 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800403};
404
Xusong Wang0e0721f2019-05-07 12:57:49 -0700405// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
406// pass running with float32 models and the second pass running with quant8 models.
407class CompilationCachingTest : public CompilationCachingTestBase,
Michael Butler13b05162019-08-29 22:17:24 -0700408 public testing::WithParamInterface<OperandType> {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700409 protected:
410 CompilationCachingTest() : CompilationCachingTestBase(GetParam()) {}
411};
412
413TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800414 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700415 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700416 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700417 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800418 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800419
420 // Save the compilation to cache.
421 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800422 hidl_vec<hidl_handle> modelCache, dataCache;
423 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
424 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700425 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800426 }
427
428 // Retrieve preparedModel from cache.
429 {
430 preparedModel = nullptr;
431 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800432 hidl_vec<hidl_handle> modelCache, dataCache;
433 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
434 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
435 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800436 if (!mIsCachingSupported) {
437 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
438 ASSERT_EQ(preparedModel, nullptr);
439 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800440 } else if (checkEarlyTermination(status)) {
441 ASSERT_EQ(preparedModel, nullptr);
442 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800443 } else {
444 ASSERT_EQ(status, ErrorStatus::NONE);
445 ASSERT_NE(preparedModel, nullptr);
446 }
447 }
448
449 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700450 EvaluatePreparedModel(preparedModel, testModel,
451 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800452}
453
Xusong Wang0e0721f2019-05-07 12:57:49 -0700454TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800455 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700456 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700457 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700458 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800459 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800460
461 // Save the compilation to cache.
462 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800463 hidl_vec<hidl_handle> modelCache, dataCache;
464 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
465 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
466 uint8_t dummyBytes[] = {0, 0};
467 // Write a dummy integer to the cache.
468 // The driver should be able to handle non-empty cache and non-zero fd offset.
469 for (uint32_t i = 0; i < modelCache.size(); i++) {
470 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
471 sizeof(dummyBytes)),
472 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800473 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800474 for (uint32_t i = 0; i < dataCache.size(); i++) {
475 ASSERT_EQ(
476 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
477 sizeof(dummyBytes));
478 }
Xusong Wangead950d2019-08-09 16:45:24 -0700479 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800480 }
481
482 // Retrieve preparedModel from cache.
483 {
484 preparedModel = nullptr;
485 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800486 hidl_vec<hidl_handle> modelCache, dataCache;
487 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
488 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800489 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800490 // Advance the offset of each handle by one byte.
491 // The driver should be able to handle non-zero fd offset.
492 for (uint32_t i = 0; i < modelCache.size(); i++) {
493 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
494 }
495 for (uint32_t i = 0; i < dataCache.size(); i++) {
496 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
497 }
498 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800499 if (!mIsCachingSupported) {
500 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
501 ASSERT_EQ(preparedModel, nullptr);
502 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800503 } else if (checkEarlyTermination(status)) {
504 ASSERT_EQ(preparedModel, nullptr);
505 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800506 } else {
507 ASSERT_EQ(status, ErrorStatus::NONE);
508 ASSERT_NE(preparedModel, nullptr);
509 }
510 }
511
512 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700513 EvaluatePreparedModel(preparedModel, testModel,
514 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800515}
516
Xusong Wang0e0721f2019-05-07 12:57:49 -0700517TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800518 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700519 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700520 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700521 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800522
523 // Test with number of model cache files greater than mNumModelCache.
524 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800525 hidl_vec<hidl_handle> modelCache, dataCache;
526 // Pass an additional cache file for model cache.
527 mModelCache.push_back({mTmpCache});
528 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
529 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
530 mModelCache.pop_back();
531 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700532 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800533 ASSERT_NE(preparedModel, nullptr);
534 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700535 EvaluatePreparedModel(preparedModel, testModel,
536 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800537 // Check if prepareModelFromCache fails.
538 preparedModel = nullptr;
539 ErrorStatus status;
540 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
541 if (status != ErrorStatus::INVALID_ARGUMENT) {
542 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
543 }
544 ASSERT_EQ(preparedModel, nullptr);
545 }
546
547 // Test with number of model cache files smaller than mNumModelCache.
548 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800549 hidl_vec<hidl_handle> modelCache, dataCache;
550 // Pop out the last cache file.
551 auto tmp = mModelCache.back();
552 mModelCache.pop_back();
553 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
554 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
555 mModelCache.push_back(tmp);
556 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700557 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800558 ASSERT_NE(preparedModel, nullptr);
559 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700560 EvaluatePreparedModel(preparedModel, testModel,
561 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800562 // Check if prepareModelFromCache fails.
563 preparedModel = nullptr;
564 ErrorStatus status;
565 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
566 if (status != ErrorStatus::INVALID_ARGUMENT) {
567 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
568 }
569 ASSERT_EQ(preparedModel, nullptr);
570 }
571
572 // Test with number of data cache files greater than mNumDataCache.
573 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800574 hidl_vec<hidl_handle> modelCache, dataCache;
575 // Pass an additional cache file for data cache.
576 mDataCache.push_back({mTmpCache});
577 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
578 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
579 mDataCache.pop_back();
580 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700581 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800582 ASSERT_NE(preparedModel, nullptr);
583 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700584 EvaluatePreparedModel(preparedModel, testModel,
585 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800586 // Check if prepareModelFromCache fails.
587 preparedModel = nullptr;
588 ErrorStatus status;
589 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
590 if (status != ErrorStatus::INVALID_ARGUMENT) {
591 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
592 }
593 ASSERT_EQ(preparedModel, nullptr);
594 }
595
596 // Test with number of data cache files smaller than mNumDataCache.
597 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800598 hidl_vec<hidl_handle> modelCache, dataCache;
599 // Pop out the last cache file.
600 auto tmp = mDataCache.back();
601 mDataCache.pop_back();
602 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
603 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
604 mDataCache.push_back(tmp);
605 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700606 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800607 ASSERT_NE(preparedModel, nullptr);
608 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700609 EvaluatePreparedModel(preparedModel, testModel,
610 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800611 // Check if prepareModelFromCache fails.
612 preparedModel = nullptr;
613 ErrorStatus status;
614 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
615 if (status != ErrorStatus::INVALID_ARGUMENT) {
616 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
617 }
618 ASSERT_EQ(preparedModel, nullptr);
619 }
620}
621
Xusong Wang0e0721f2019-05-07 12:57:49 -0700622TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800623 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700624 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700625 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700626 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800627
628 // Save the compilation to cache.
629 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800630 hidl_vec<hidl_handle> modelCache, dataCache;
631 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
632 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700633 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800634 }
635
636 // Test with number of model cache files greater than mNumModelCache.
637 {
638 sp<IPreparedModel> preparedModel = nullptr;
639 ErrorStatus status;
640 hidl_vec<hidl_handle> modelCache, dataCache;
641 mModelCache.push_back({mTmpCache});
642 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
643 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
644 mModelCache.pop_back();
645 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
646 if (status != ErrorStatus::GENERAL_FAILURE) {
647 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
648 }
649 ASSERT_EQ(preparedModel, nullptr);
650 }
651
652 // Test with number of model cache files smaller than mNumModelCache.
653 if (mModelCache.size() > 0) {
654 sp<IPreparedModel> preparedModel = nullptr;
655 ErrorStatus status;
656 hidl_vec<hidl_handle> modelCache, dataCache;
657 auto tmp = mModelCache.back();
658 mModelCache.pop_back();
659 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
660 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
661 mModelCache.push_back(tmp);
662 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
663 if (status != ErrorStatus::GENERAL_FAILURE) {
664 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
665 }
666 ASSERT_EQ(preparedModel, nullptr);
667 }
668
669 // Test with number of data cache files greater than mNumDataCache.
670 {
671 sp<IPreparedModel> preparedModel = nullptr;
672 ErrorStatus status;
673 hidl_vec<hidl_handle> modelCache, dataCache;
674 mDataCache.push_back({mTmpCache});
675 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
676 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
677 mDataCache.pop_back();
678 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
679 if (status != ErrorStatus::GENERAL_FAILURE) {
680 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
681 }
682 ASSERT_EQ(preparedModel, nullptr);
683 }
684
685 // Test with number of data cache files smaller than mNumDataCache.
686 if (mDataCache.size() > 0) {
687 sp<IPreparedModel> preparedModel = nullptr;
688 ErrorStatus status;
689 hidl_vec<hidl_handle> modelCache, dataCache;
690 auto tmp = mDataCache.back();
691 mDataCache.pop_back();
692 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
693 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
694 mDataCache.push_back(tmp);
695 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
696 if (status != ErrorStatus::GENERAL_FAILURE) {
697 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
698 }
699 ASSERT_EQ(preparedModel, nullptr);
700 }
701}
702
Xusong Wang0e0721f2019-05-07 12:57:49 -0700703TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800704 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700705 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700706 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700707 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800708
Xusong Wanged0822b2019-02-25 16:58:58 -0800709 // Go through each handle in model cache, test with NumFd greater than 1.
710 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800711 hidl_vec<hidl_handle> modelCache, dataCache;
712 // Pass an invalid number of fds for handle i.
713 mModelCache[i].push_back(mTmpCache);
714 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
715 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
716 mModelCache[i].pop_back();
717 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700718 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800719 ASSERT_NE(preparedModel, nullptr);
720 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700721 EvaluatePreparedModel(preparedModel, testModel,
722 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800723 // Check if prepareModelFromCache fails.
724 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800725 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800726 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
727 if (status != ErrorStatus::INVALID_ARGUMENT) {
728 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800729 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800730 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800731 }
732
Xusong Wanged0822b2019-02-25 16:58:58 -0800733 // Go through each handle in model cache, test with NumFd equal to 0.
734 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800735 hidl_vec<hidl_handle> modelCache, dataCache;
736 // Pass an invalid number of fds for handle i.
737 auto tmp = mModelCache[i].back();
738 mModelCache[i].pop_back();
739 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
740 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
741 mModelCache[i].push_back(tmp);
742 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700743 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800744 ASSERT_NE(preparedModel, nullptr);
745 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700746 EvaluatePreparedModel(preparedModel, testModel,
747 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800748 // Check if prepareModelFromCache fails.
749 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800750 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800751 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
752 if (status != ErrorStatus::INVALID_ARGUMENT) {
753 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800754 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800755 ASSERT_EQ(preparedModel, nullptr);
756 }
757
758 // Go through each handle in data cache, test with NumFd greater than 1.
759 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800760 hidl_vec<hidl_handle> modelCache, dataCache;
761 // Pass an invalid number of fds for handle i.
762 mDataCache[i].push_back(mTmpCache);
763 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
764 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
765 mDataCache[i].pop_back();
766 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700767 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800768 ASSERT_NE(preparedModel, nullptr);
769 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700770 EvaluatePreparedModel(preparedModel, testModel,
771 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800772 // Check if prepareModelFromCache fails.
773 preparedModel = nullptr;
774 ErrorStatus status;
775 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
776 if (status != ErrorStatus::INVALID_ARGUMENT) {
777 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
778 }
779 ASSERT_EQ(preparedModel, nullptr);
780 }
781
782 // Go through each handle in data cache, test with NumFd equal to 0.
783 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800784 hidl_vec<hidl_handle> modelCache, dataCache;
785 // Pass an invalid number of fds for handle i.
786 auto tmp = mDataCache[i].back();
787 mDataCache[i].pop_back();
788 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
789 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
790 mDataCache[i].push_back(tmp);
791 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700792 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800793 ASSERT_NE(preparedModel, nullptr);
794 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700795 EvaluatePreparedModel(preparedModel, testModel,
796 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800797 // Check if prepareModelFromCache fails.
798 preparedModel = nullptr;
799 ErrorStatus status;
800 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
801 if (status != ErrorStatus::INVALID_ARGUMENT) {
802 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
803 }
804 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800805 }
806}
807
Xusong Wang0e0721f2019-05-07 12:57:49 -0700808TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800809 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700810 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700811 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700812 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800813
814 // Save the compilation to cache.
815 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800816 hidl_vec<hidl_handle> modelCache, dataCache;
817 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
818 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700819 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800820 }
821
Xusong Wanged0822b2019-02-25 16:58:58 -0800822 // Go through each handle in model cache, test with NumFd greater than 1.
823 for (uint32_t i = 0; i < mNumModelCache; i++) {
824 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800825 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800826 hidl_vec<hidl_handle> modelCache, dataCache;
827 mModelCache[i].push_back(mTmpCache);
828 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
829 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
830 mModelCache[i].pop_back();
831 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800832 if (status != ErrorStatus::GENERAL_FAILURE) {
833 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800834 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800835 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800836 }
837
Xusong Wanged0822b2019-02-25 16:58:58 -0800838 // Go through each handle in model cache, test with NumFd equal to 0.
839 for (uint32_t i = 0; i < mNumModelCache; i++) {
840 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800841 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800842 hidl_vec<hidl_handle> modelCache, dataCache;
843 auto tmp = mModelCache[i].back();
844 mModelCache[i].pop_back();
845 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
846 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
847 mModelCache[i].push_back(tmp);
848 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800849 if (status != ErrorStatus::GENERAL_FAILURE) {
850 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800851 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800852 ASSERT_EQ(preparedModel, nullptr);
853 }
854
855 // Go through each handle in data cache, test with NumFd greater than 1.
856 for (uint32_t i = 0; i < mNumDataCache; i++) {
857 sp<IPreparedModel> preparedModel = nullptr;
858 ErrorStatus status;
859 hidl_vec<hidl_handle> modelCache, dataCache;
860 mDataCache[i].push_back(mTmpCache);
861 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
862 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
863 mDataCache[i].pop_back();
864 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
865 if (status != ErrorStatus::GENERAL_FAILURE) {
866 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
867 }
868 ASSERT_EQ(preparedModel, nullptr);
869 }
870
871 // Go through each handle in data cache, test with NumFd equal to 0.
872 for (uint32_t i = 0; i < mNumDataCache; i++) {
873 sp<IPreparedModel> preparedModel = nullptr;
874 ErrorStatus status;
875 hidl_vec<hidl_handle> modelCache, dataCache;
876 auto tmp = mDataCache[i].back();
877 mDataCache[i].pop_back();
878 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
879 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
880 mDataCache[i].push_back(tmp);
881 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
882 if (status != ErrorStatus::GENERAL_FAILURE) {
883 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
884 }
885 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800886 }
887}
888
Xusong Wang0e0721f2019-05-07 12:57:49 -0700889TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800890 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700891 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700892 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700893 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800894 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
895 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800896
Xusong Wanged0822b2019-02-25 16:58:58 -0800897 // Go through each handle in model cache, test with invalid access mode.
898 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800899 hidl_vec<hidl_handle> modelCache, dataCache;
900 modelCacheMode[i] = AccessMode::READ_ONLY;
901 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
902 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
903 modelCacheMode[i] = AccessMode::READ_WRITE;
904 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700905 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800906 ASSERT_NE(preparedModel, nullptr);
907 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700908 EvaluatePreparedModel(preparedModel, testModel,
909 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800910 // Check if prepareModelFromCache fails.
911 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800912 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800913 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
914 if (status != ErrorStatus::INVALID_ARGUMENT) {
915 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
916 }
917 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800918 }
919
Xusong Wanged0822b2019-02-25 16:58:58 -0800920 // Go through each handle in data cache, test with invalid access mode.
921 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800922 hidl_vec<hidl_handle> modelCache, dataCache;
923 dataCacheMode[i] = AccessMode::READ_ONLY;
924 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
925 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
926 dataCacheMode[i] = AccessMode::READ_WRITE;
927 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700928 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800929 ASSERT_NE(preparedModel, nullptr);
930 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700931 EvaluatePreparedModel(preparedModel, testModel,
932 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800933 // Check if prepareModelFromCache fails.
934 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800935 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800936 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
937 if (status != ErrorStatus::INVALID_ARGUMENT) {
938 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
939 }
940 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800941 }
942}
943
Xusong Wang0e0721f2019-05-07 12:57:49 -0700944TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800945 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700946 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700947 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700948 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800949 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
950 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800951
952 // Save the compilation to cache.
953 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800954 hidl_vec<hidl_handle> modelCache, dataCache;
955 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
956 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700957 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800958 }
959
Xusong Wanged0822b2019-02-25 16:58:58 -0800960 // Go through each handle in model cache, test with invalid access mode.
961 for (uint32_t i = 0; i < mNumModelCache; i++) {
962 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800963 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800964 hidl_vec<hidl_handle> modelCache, dataCache;
965 modelCacheMode[i] = AccessMode::WRITE_ONLY;
966 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
967 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
968 modelCacheMode[i] = AccessMode::READ_WRITE;
969 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800970 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
971 ASSERT_EQ(preparedModel, nullptr);
972 }
973
Xusong Wanged0822b2019-02-25 16:58:58 -0800974 // Go through each handle in data cache, test with invalid access mode.
975 for (uint32_t i = 0; i < mNumDataCache; i++) {
976 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800977 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800978 hidl_vec<hidl_handle> modelCache, dataCache;
979 dataCacheMode[i] = AccessMode::WRITE_ONLY;
980 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
981 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
982 dataCacheMode[i] = AccessMode::READ_WRITE;
983 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800984 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
985 ASSERT_EQ(preparedModel, nullptr);
986 }
987}
988
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700989// Copy file contents between file groups.
990// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
991// The outer vector sizes must match and the inner vectors must have size = 1.
992static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
993 const std::vector<std::vector<std::string>>& to) {
994 constexpr size_t kBufferSize = 1000000;
995 uint8_t buffer[kBufferSize];
996
997 ASSERT_EQ(from.size(), to.size());
998 for (uint32_t i = 0; i < from.size(); i++) {
999 ASSERT_EQ(from[i].size(), 1u);
1000 ASSERT_EQ(to[i].size(), 1u);
1001 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1002 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1003 ASSERT_GE(fromFd, 0);
1004 ASSERT_GE(toFd, 0);
1005
1006 ssize_t readBytes;
1007 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1008 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1009 }
1010 ASSERT_GE(readBytes, 0);
1011
1012 close(fromFd);
1013 close(toFd);
1014 }
1015}
1016
1017// Number of operations in the large test model.
1018constexpr uint32_t kLargeModelSize = 100;
1019constexpr uint32_t kNumIterationsTOCTOU = 100;
1020
Xusong Wang0e0721f2019-05-07 12:57:49 -07001021TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001022 if (!mIsCachingSupported) return;
1023
Xusong Wang4f71afc2019-04-26 15:33:38 -07001024 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001025 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001026 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001027 if (checkEarlyTermination(modelMul)) return;
1028 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001029 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001030 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001031
Xusong Wangead950d2019-08-09 16:45:24 -07001032 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001033 auto modelCacheMul = mModelCache;
1034 for (auto& cache : modelCacheMul) {
1035 cache[0].append("_mul");
1036 }
1037 {
1038 hidl_vec<hidl_handle> modelCache, dataCache;
1039 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1040 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001041 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001042 }
1043
Xusong Wangead950d2019-08-09 16:45:24 -07001044 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001045 mToken[0]++;
1046
1047 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001048 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001049 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001050 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001051 hidl_vec<hidl_handle> modelCache, dataCache;
1052 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1053 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1054
1055 // Spawn a thread to copy the cache content concurrently while saving to cache.
1056 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wangead950d2019-08-09 16:45:24 -07001057 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001058 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001059 }
1060
1061 // Retrieve preparedModel from cache.
1062 {
1063 sp<IPreparedModel> preparedModel = nullptr;
1064 ErrorStatus status;
1065 hidl_vec<hidl_handle> modelCache, dataCache;
1066 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1067 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1068 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1069
1070 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1071 // the prepared model must be executed with the correct result and not crash.
1072 if (status != ErrorStatus::NONE) {
1073 ASSERT_EQ(preparedModel, nullptr);
1074 } else {
1075 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001076 EvaluatePreparedModel(preparedModel, testModelAdd,
1077 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001078 }
1079 }
1080 }
1081}
1082
Xusong Wang0e0721f2019-05-07 12:57:49 -07001083TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001084 if (!mIsCachingSupported) return;
1085
Xusong Wang4f71afc2019-04-26 15:33:38 -07001086 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001087 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001088 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001089 if (checkEarlyTermination(modelMul)) return;
1090 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001091 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001092 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001093
Xusong Wangead950d2019-08-09 16:45:24 -07001094 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001095 auto modelCacheMul = mModelCache;
1096 for (auto& cache : modelCacheMul) {
1097 cache[0].append("_mul");
1098 }
1099 {
1100 hidl_vec<hidl_handle> modelCache, dataCache;
1101 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1102 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001103 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001104 }
1105
Xusong Wangead950d2019-08-09 16:45:24 -07001106 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001107 mToken[0]++;
1108
1109 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001110 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001111 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001112 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001113 hidl_vec<hidl_handle> modelCache, dataCache;
1114 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1115 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001116 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001117 }
1118
1119 // Retrieve preparedModel from cache.
1120 {
1121 sp<IPreparedModel> preparedModel = nullptr;
1122 ErrorStatus status;
1123 hidl_vec<hidl_handle> modelCache, dataCache;
1124 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1125 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1126
1127 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1128 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1129 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1130 thread.join();
1131
1132 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1133 // the prepared model must be executed with the correct result and not crash.
1134 if (status != ErrorStatus::NONE) {
1135 ASSERT_EQ(preparedModel, nullptr);
1136 } else {
1137 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001138 EvaluatePreparedModel(preparedModel, testModelAdd,
1139 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001140 }
1141 }
1142 }
1143}
1144
Xusong Wang0e0721f2019-05-07 12:57:49 -07001145TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001146 if (!mIsCachingSupported) return;
1147
Xusong Wang4f71afc2019-04-26 15:33:38 -07001148 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001149 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001150 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001151 if (checkEarlyTermination(modelMul)) return;
1152 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001153 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001154 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001155
Xusong Wangead950d2019-08-09 16:45:24 -07001156 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001157 auto modelCacheMul = mModelCache;
1158 for (auto& cache : modelCacheMul) {
1159 cache[0].append("_mul");
1160 }
1161 {
1162 hidl_vec<hidl_handle> modelCache, dataCache;
1163 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1164 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001165 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001166 }
1167
Xusong Wangead950d2019-08-09 16:45:24 -07001168 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001169 mToken[0]++;
1170
Xusong Wangead950d2019-08-09 16:45:24 -07001171 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001172 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001173 hidl_vec<hidl_handle> modelCache, dataCache;
1174 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1175 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001176 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001177 }
1178
Xusong Wangead950d2019-08-09 16:45:24 -07001179 // Replace the model cache of modelAdd with modelMul.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001180 copyCacheFiles(modelCacheMul, mModelCache);
1181
1182 // Retrieve the preparedModel from cache, expect failure.
1183 {
1184 sp<IPreparedModel> preparedModel = nullptr;
1185 ErrorStatus status;
1186 hidl_vec<hidl_handle> modelCache, dataCache;
1187 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1188 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1189 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1190 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1191 ASSERT_EQ(preparedModel, nullptr);
1192 }
1193}
1194
Xusong Wang0e0721f2019-05-07 12:57:49 -07001195static const auto kOperandTypeChoices =
Michael Butler13b05162019-08-29 22:17:24 -07001196 testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
Xusong Wang0e0721f2019-05-07 12:57:49 -07001197
1198INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest, kOperandTypeChoices);
1199
1200class CompilationCachingSecurityTest
1201 : public CompilationCachingTestBase,
Michael Butler13b05162019-08-29 22:17:24 -07001202 public testing::WithParamInterface<std::tuple<OperandType, uint32_t>> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001203 protected:
Xusong Wang0e0721f2019-05-07 12:57:49 -07001204 CompilationCachingSecurityTest() : CompilationCachingTestBase(std::get<0>(GetParam())) {}
1205
Xusong Wang96e68dc2019-01-18 17:28:26 -08001206 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001207 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001208 generator.seed(kSeed);
1209 }
1210
1211 // Get a random integer within a closed range [lower, upper].
1212 template <typename T>
1213 T getRandomInt(T lower, T upper) {
1214 std::uniform_int_distribution<T> dis(lower, upper);
1215 return dis(generator);
1216 }
1217
Xusong Wange371f6f2019-04-23 14:51:50 -07001218 // Randomly flip one single bit of the cache entry.
1219 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1220 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001221 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1222 long int fileSize = ftell(pFile);
1223 if (fileSize == 0) {
1224 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001225 *skip = true;
1226 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001227 }
1228 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1229 int readByte = fgetc(pFile);
1230 ASSERT_NE(readByte, EOF);
1231 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1232 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1233 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001234 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001235 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001236
Xusong Wange371f6f2019-04-23 14:51:50 -07001237 // Randomly append bytes to the cache entry.
1238 void appendBytesToCache(const std::string& filename, bool* skip) {
1239 FILE* pFile = fopen(filename.c_str(), "a");
1240 uint32_t appendLength = getRandomInt(1, 256);
1241 for (uint32_t i = 0; i < appendLength; i++) {
1242 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1243 }
1244 fclose(pFile);
1245 *skip = false;
1246 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001247
Xusong Wange371f6f2019-04-23 14:51:50 -07001248 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001249
Xusong Wange371f6f2019-04-23 14:51:50 -07001250 // Test if the driver behaves as expected when given corrupted cache or token.
1251 // The modifier will be invoked after save to cache but before prepare from cache.
1252 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1253 // whether the test should be skipped or not.
1254 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wangead950d2019-08-09 16:45:24 -07001255 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -07001256 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -07001257 if (checkEarlyTermination(model)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001258
Xusong Wanged0822b2019-02-25 16:58:58 -08001259 // Save the compilation to cache.
1260 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001261 hidl_vec<hidl_handle> modelCache, dataCache;
1262 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1263 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001264 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001265 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001266
Xusong Wange371f6f2019-04-23 14:51:50 -07001267 bool skip = false;
1268 modifier(&skip);
1269 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001270
Xusong Wange371f6f2019-04-23 14:51:50 -07001271 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001272 {
1273 sp<IPreparedModel> preparedModel = nullptr;
1274 ErrorStatus status;
1275 hidl_vec<hidl_handle> modelCache, dataCache;
1276 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1277 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1278 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001279
1280 switch (expected) {
1281 case ExpectedResult::GENERAL_FAILURE:
1282 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1283 ASSERT_EQ(preparedModel, nullptr);
1284 break;
1285 case ExpectedResult::NOT_CRASH:
1286 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1287 break;
1288 default:
1289 FAIL();
1290 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001291 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001292 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001293
Xusong Wang0e0721f2019-05-07 12:57:49 -07001294 const uint32_t kSeed = std::get<1>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001295 std::mt19937 generator;
1296};
1297
1298TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1299 if (!mIsCachingSupported) return;
1300 for (uint32_t i = 0; i < mNumModelCache; i++) {
1301 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1302 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1303 }
1304}
1305
1306TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1307 if (!mIsCachingSupported) return;
1308 for (uint32_t i = 0; i < mNumModelCache; i++) {
1309 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1310 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1311 }
1312}
1313
1314TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1315 if (!mIsCachingSupported) return;
1316 for (uint32_t i = 0; i < mNumDataCache; i++) {
1317 testCorruptedCache(ExpectedResult::NOT_CRASH,
1318 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1319 }
1320}
1321
1322TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1323 if (!mIsCachingSupported) return;
1324 for (uint32_t i = 0; i < mNumDataCache; i++) {
1325 testCorruptedCache(ExpectedResult::NOT_CRASH,
1326 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1327 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001328}
1329
1330TEST_P(CompilationCachingSecurityTest, WrongToken) {
1331 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001332 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1333 // Randomly flip one single bit in mToken.
1334 uint32_t ind =
1335 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1336 mToken[ind] ^= (1U << getRandomInt(0, 7));
1337 *skip = false;
1338 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001339}
1340
1341INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Michael Butler13b05162019-08-29 22:17:24 -07001342 testing::Combine(kOperandTypeChoices, testing::Range(0U, 10U)));
Xusong Wang96e68dc2019-01-18 17:28:26 -08001343
Michael Butler62749b92019-08-26 23:55:47 -07001344} // namespace android::hardware::neuralnetworks::V1_2::vts::functional