blob: 10dec791cfe137d6c6675fc16c1c942db1bdcd99 [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
Michael Butler07633282019-08-29 11:08:25 -070020#include <fcntl.h>
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070021#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
35#include "Utils.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070036#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080037
Xusong Wangead950d2019-08-09 16:45:24 -070038// Forward declaration of the mobilenet generated test models in
39// frameworks/ml/nn/runtime/test/generated/.
Slava Shklyaev0da5c342019-07-17 15:50:57 +010040namespace generated_tests::mobilenet_224_gender_basic_fixed {
Michael Butler07633282019-08-29 11:08:25 -070041const test_helper::TestModel& get_test_model();
Slava Shklyaev0da5c342019-07-17 15:50:57 +010042} // namespace generated_tests::mobilenet_224_gender_basic_fixed
Slava Shklyaeve8b24462019-07-17 15:50:57 +010043
44namespace generated_tests::mobilenet_quantized {
Michael Butler07633282019-08-29 11:08:25 -070045const test_helper::TestModel& get_test_model();
Slava Shklyaeve8b24462019-07-17 15:50:57 +010046} // namespace generated_tests::mobilenet_quantized
47
Michael Butler62749b92019-08-26 23:55:47 -070048namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Xusong Wang96e68dc2019-01-18 17:28:26 -080049
Xusong Wangead950d2019-08-09 16:45:24 -070050using namespace test_helper;
Michael Butler62749b92019-08-26 23:55:47 -070051using implementation::PreparedModelCallback;
52using V1_0::ErrorStatus;
53using V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080054
Xusong Wang0e0721f2019-05-07 12:57:49 -070055namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080056
Michael Butler07633282019-08-29 11:08:25 -070057constexpr auto get_test_model = generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070058
59} // namespace float32_model
60
61namespace quant8_model {
62
Michael Butler07633282019-08-29 11:08:25 -070063constexpr auto get_test_model = generated_tests::mobilenet_quantized::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070064
65} // namespace quant8_model
66
67namespace {
68
Xusong Wanged0822b2019-02-25 16:58:58 -080069enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080070
Xusong Wanged0822b2019-02-25 16:58:58 -080071// Creates cache handles based on provided file groups.
72// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
73void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
74 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
75 handles->resize(fileGroups.size());
76 for (uint32_t i = 0; i < fileGroups.size(); i++) {
77 std::vector<int> fds;
78 for (const auto& file : fileGroups[i]) {
79 int fd;
80 if (mode[i] == AccessMode::READ_ONLY) {
81 fd = open(file.c_str(), O_RDONLY);
82 } else if (mode[i] == AccessMode::WRITE_ONLY) {
83 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
84 } else if (mode[i] == AccessMode::READ_WRITE) {
85 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
86 } else {
87 FAIL();
88 }
89 ASSERT_GE(fd, 0);
90 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -080091 }
Xusong Wanged0822b2019-02-25 16:58:58 -080092 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
93 ASSERT_NE(cacheNativeHandle, nullptr);
94 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
95 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -080096 }
Xusong Wanged0822b2019-02-25 16:58:58 -080097}
98
99void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
100 hidl_vec<hidl_handle>* handles) {
101 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800102}
103
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700104// Create a chain of broadcast operations. The second operand is always constant tensor [1].
105// For simplicity, activation scalar is shared. The second operand is not shared
106// in the model to let driver maintain a non-trivial size of constant data and the corresponding
107// data locations in cache.
108//
109// --------- activation --------
110// ↓ ↓ ↓ ↓
111// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
112// ↑ ↑ ↑ ↑
113// [1] [1] [1] [1]
114//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700115// This function assumes the operation is either ADD or MUL.
Xusong Wangead950d2019-08-09 16:45:24 -0700116template <typename CppType, TestOperandType operandType>
117TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) {
118 EXPECT_TRUE(op == TestOperationType::ADD || op == TestOperationType::MUL);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700119
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700120 // Model operations and operands.
Xusong Wangead950d2019-08-09 16:45:24 -0700121 std::vector<TestOperation> operations(len);
122 std::vector<TestOperand> operands(len * 2 + 2);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700123
124 // The activation scalar, value = 0.
125 operands[0] = {
Xusong Wangead950d2019-08-09 16:45:24 -0700126 .type = TestOperandType::INT32,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700127 .dimensions = {},
128 .numberOfConsumers = len,
129 .scale = 0.0f,
130 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700131 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
132 .data = TestBuffer::createFromVector<int32_t>({0}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700133 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700134
Xusong Wang0e0721f2019-05-07 12:57:49 -0700135 // The buffer value of the constant second operand. The logical value is always 1.0f.
136 CppType bufferValue;
137 // The scale of the first and second operand.
138 float scale1, scale2;
Xusong Wangead950d2019-08-09 16:45:24 -0700139 if (operandType == TestOperandType::TENSOR_FLOAT32) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700140 bufferValue = 1.0f;
141 scale1 = 0.0f;
142 scale2 = 0.0f;
Xusong Wangead950d2019-08-09 16:45:24 -0700143 } else if (op == TestOperationType::ADD) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700144 bufferValue = 1;
145 scale1 = 1.0f;
146 scale2 = 1.0f;
147 } else {
148 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
149 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
150 bufferValue = 2;
151 scale1 = 1.0f;
152 scale2 = 0.5f;
153 }
154
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700155 for (uint32_t i = 0; i < len; i++) {
156 const uint32_t firstInputIndex = i * 2 + 1;
157 const uint32_t secondInputIndex = firstInputIndex + 1;
158 const uint32_t outputIndex = secondInputIndex + 1;
159
160 // The first operation input.
161 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700162 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700163 .dimensions = {1},
164 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700165 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700166 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700167 .lifetime = (i == 0 ? TestOperandLifeTime::MODEL_INPUT
168 : TestOperandLifeTime::TEMPORARY_VARIABLE),
169 .data = (i == 0 ? TestBuffer::createFromVector<CppType>({1}) : TestBuffer()),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700170 };
171
172 // The second operation input, value = 1.
173 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700174 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700175 .dimensions = {1},
176 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700177 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700178 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700179 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
180 .data = TestBuffer::createFromVector<CppType>({bufferValue}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700181 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700182
183 // The operation. All operations share the same activation scalar.
184 // The output operand is created as an input in the next iteration of the loop, in the case
185 // of all but the last member of the chain; and after the loop as a model output, in the
186 // case of the last member of the chain.
187 operations[i] = {
188 .type = op,
189 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
190 .outputs = {outputIndex},
191 };
192 }
193
Xusong Wangead950d2019-08-09 16:45:24 -0700194 // For TestOperationType::ADD, output = 1 + 1 * len = len + 1
195 // For TestOperationType::MUL, output = 1 * 1 ^ len = 1
196 CppType outputResult = static_cast<CppType>(op == TestOperationType::ADD ? len + 1u : 1u);
197
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700198 // The model output.
199 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700200 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700201 .dimensions = {1},
202 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700203 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700204 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700205 .lifetime = TestOperandLifeTime::MODEL_OUTPUT,
206 .data = TestBuffer::createFromVector<CppType>({outputResult}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700207 };
208
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700209 return {
Slava Shklyaev0fff59b2020-01-31 15:14:24 +0000210 .main = {.operands = std::move(operands),
211 .operations = std::move(operations),
212 .inputIndexes = {1},
213 .outputIndexes = {len * 2 + 1}},
Xusong Wangead950d2019-08-09 16:45:24 -0700214 .isRelaxed = false,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700215 };
216}
217
Xusong Wang96e68dc2019-01-18 17:28:26 -0800218} // namespace
219
220// Tag for the compilation caching tests.
Michael Butler07633282019-08-29 11:08:25 -0700221class CompilationCachingTestBase : public testing::Test {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800222 protected:
Michael Butler07633282019-08-29 11:08:25 -0700223 CompilationCachingTestBase(sp<IDevice> device, OperandType type)
224 : kDevice(std::move(device)), kOperandType(type) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -0700225
Xusong Wang96e68dc2019-01-18 17:28:26 -0800226 void SetUp() override {
Michael Butler07633282019-08-29 11:08:25 -0700227 testing::Test::SetUp();
Michael Butler13b05162019-08-29 22:17:24 -0700228 ASSERT_NE(kDevice.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800229
Xusong Wanged0822b2019-02-25 16:58:58 -0800230 // Create cache directory. The cache directory and a temporary cache file is always created
231 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800232 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
233 char* cacheDir = mkdtemp(cacheDirTemp);
234 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800235 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800236 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800237
Michael Butler13b05162019-08-29 22:17:24 -0700238 Return<void> ret = kDevice->getNumberOfCacheFilesNeeded(
Xusong Wanged0822b2019-02-25 16:58:58 -0800239 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800240 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800241 mNumModelCache = numModelCache;
242 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800243 });
244 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800245 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
246
247 // Create empty cache files.
248 mTmpCache = mCacheDir + "tmp";
249 for (uint32_t i = 0; i < mNumModelCache; i++) {
250 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
251 }
252 for (uint32_t i = 0; i < mNumDataCache; i++) {
253 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
254 }
255 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
256 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
257 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
258 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
259 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
260
261 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800262 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
263 "support compilation caching.";
264 std::cout << "[ ] Early termination of test because vendor service does not "
265 "support compilation caching."
266 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800267 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800268 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800269
Xusong Wang6824cc12019-02-12 18:00:37 -0800270 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700271 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
Michael Butler13b05162019-08-29 22:17:24 -0700272 if (!testing::Test::HasFailure()) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700273 // Recursively remove the cache directory specified by mCacheDir.
274 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
275 return remove(entry);
276 };
277 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800278 }
Michael Butler07633282019-08-29 11:08:25 -0700279 testing::Test::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800280 }
281
Xusong Wang0e0721f2019-05-07 12:57:49 -0700282 // Model and examples creators. According to kOperandType, the following methods will return
283 // either float32 model/examples or the quant8 variant.
Xusong Wangead950d2019-08-09 16:45:24 -0700284 TestModel createTestModel() {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700285 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700286 return float32_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700287 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700288 return quant8_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700289 }
290 }
291
Xusong Wangead950d2019-08-09 16:45:24 -0700292 TestModel createLargeTestModel(OperationType op, uint32_t len) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700293 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700294 return createLargeTestModelImpl<float, TestOperandType::TENSOR_FLOAT32>(
295 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700296 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700297 return createLargeTestModelImpl<uint8_t, TestOperandType::TENSOR_QUANT8_ASYMM>(
298 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700299 }
300 }
301
Xusong Wang4f71afc2019-04-26 15:33:38 -0700302 // See if the service can handle the model.
Michael Butler62749b92019-08-26 23:55:47 -0700303 bool isModelFullySupported(const Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800304 bool fullySupportsModel = false;
Michael Butler13b05162019-08-29 22:17:24 -0700305 Return<void> supportedCall = kDevice->getSupportedOperations_1_2(
Xusong Wanged0822b2019-02-25 16:58:58 -0800306 model,
307 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
308 ASSERT_EQ(ErrorStatus::NONE, status);
309 ASSERT_EQ(supported.size(), model.operations.size());
310 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
311 [](bool valid) { return valid; });
312 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700313 EXPECT_TRUE(supportedCall.isOk());
314 return fullySupportsModel;
315 }
316
Michael Butler62749b92019-08-26 23:55:47 -0700317 void saveModelToCache(const Model& model, const hidl_vec<hidl_handle>& modelCache,
Xusong Wang4f71afc2019-04-26 15:33:38 -0700318 const hidl_vec<hidl_handle>& dataCache,
319 sp<IPreparedModel>* preparedModel = nullptr) {
320 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800321
322 // Launch prepare model.
323 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800324 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800325 Return<ErrorStatus> prepareLaunchStatus =
Michael Butler13b05162019-08-29 22:17:24 -0700326 kDevice->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER,
327 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wanged0822b2019-02-25 16:58:58 -0800328 ASSERT_TRUE(prepareLaunchStatus.isOk());
329 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
330
331 // Retrieve prepared model.
332 preparedModelCallback->wait();
333 ASSERT_EQ(preparedModelCallback->getStatus(), ErrorStatus::NONE);
334 if (preparedModel != nullptr) {
Michael Butler62749b92019-08-26 23:55:47 -0700335 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
336 .withDefault(nullptr);
Xusong Wanged0822b2019-02-25 16:58:58 -0800337 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800338 }
339
340 bool checkEarlyTermination(ErrorStatus status) {
341 if (status == ErrorStatus::GENERAL_FAILURE) {
342 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
343 "save the prepared model that it does not support.";
344 std::cout << "[ ] Early termination of test because vendor service cannot "
345 "save the prepared model that it does not support."
346 << std::endl;
347 return true;
348 }
349 return false;
350 }
351
Michael Butler62749b92019-08-26 23:55:47 -0700352 bool checkEarlyTermination(const Model& model) {
Xusong Wang4f71afc2019-04-26 15:33:38 -0700353 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800354 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
355 "prepare model that it does not support.";
356 std::cout << "[ ] Early termination of test because vendor service cannot "
357 "prepare model that it does not support."
358 << std::endl;
359 return true;
360 }
361 return false;
362 }
363
364 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
365 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800366 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
367 // Launch prepare model from cache.
368 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800369 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Michael Butler13b05162019-08-29 22:17:24 -0700370 Return<ErrorStatus> prepareLaunchStatus = kDevice->prepareModelFromCache(
Xusong Wanged0822b2019-02-25 16:58:58 -0800371 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800372 ASSERT_TRUE(prepareLaunchStatus.isOk());
373 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
374 *preparedModel = nullptr;
375 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
376 return;
377 }
378
379 // Retrieve prepared model.
380 preparedModelCallback->wait();
381 *status = preparedModelCallback->getStatus();
Michael Butler62749b92019-08-26 23:55:47 -0700382 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
Xusong Wang96e68dc2019-01-18 17:28:26 -0800383 .withDefault(nullptr);
384 }
385
Xusong Wanged0822b2019-02-25 16:58:58 -0800386 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800387 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800388
389 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
390 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
391 // and the inner vector is for fds held by each handle.
392 std::vector<std::vector<std::string>> mModelCache;
393 std::vector<std::vector<std::string>> mDataCache;
394
395 // A separate temporary file path in the tmp cache directory.
396 std::string mTmpCache;
397
Xusong Wang96e68dc2019-01-18 17:28:26 -0800398 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800399 uint32_t mNumModelCache;
400 uint32_t mNumDataCache;
401 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700402
Michael Butler07633282019-08-29 11:08:25 -0700403 const sp<IDevice> kDevice;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700404 // The primary data type of the testModel.
405 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800406};
407
Michael Butler07633282019-08-29 11:08:25 -0700408using CompilationCachingTestParam = std::tuple<NamedDevice, OperandType>;
409
Xusong Wang0e0721f2019-05-07 12:57:49 -0700410// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
411// pass running with float32 models and the second pass running with quant8 models.
412class CompilationCachingTest : public CompilationCachingTestBase,
Michael Butler07633282019-08-29 11:08:25 -0700413 public testing::WithParamInterface<CompilationCachingTestParam> {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700414 protected:
Michael Butler07633282019-08-29 11:08:25 -0700415 CompilationCachingTest()
416 : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
417 std::get<OperandType>(GetParam())) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -0700418};
419
420TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800421 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700422 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700423 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700424 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800425 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800426
427 // Save the compilation to cache.
428 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800429 hidl_vec<hidl_handle> modelCache, dataCache;
430 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
431 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700432 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800433 }
434
435 // Retrieve preparedModel from cache.
436 {
437 preparedModel = nullptr;
438 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800439 hidl_vec<hidl_handle> modelCache, dataCache;
440 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
441 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
442 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800443 if (!mIsCachingSupported) {
444 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
445 ASSERT_EQ(preparedModel, nullptr);
446 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800447 } else if (checkEarlyTermination(status)) {
448 ASSERT_EQ(preparedModel, nullptr);
449 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800450 } else {
451 ASSERT_EQ(status, ErrorStatus::NONE);
452 ASSERT_NE(preparedModel, nullptr);
453 }
454 }
455
456 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700457 EvaluatePreparedModel(preparedModel, testModel,
458 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800459}
460
Xusong Wang0e0721f2019-05-07 12:57:49 -0700461TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800462 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700463 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700464 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700465 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800466 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800467
468 // Save the compilation to cache.
469 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800470 hidl_vec<hidl_handle> modelCache, dataCache;
471 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
472 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
473 uint8_t dummyBytes[] = {0, 0};
474 // Write a dummy integer to the cache.
475 // The driver should be able to handle non-empty cache and non-zero fd offset.
476 for (uint32_t i = 0; i < modelCache.size(); i++) {
477 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
478 sizeof(dummyBytes)),
479 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800480 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800481 for (uint32_t i = 0; i < dataCache.size(); i++) {
482 ASSERT_EQ(
483 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
484 sizeof(dummyBytes));
485 }
Xusong Wangead950d2019-08-09 16:45:24 -0700486 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800487 }
488
489 // Retrieve preparedModel from cache.
490 {
491 preparedModel = nullptr;
492 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800493 hidl_vec<hidl_handle> modelCache, dataCache;
494 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
495 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800496 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800497 // Advance the offset of each handle by one byte.
498 // The driver should be able to handle non-zero fd offset.
499 for (uint32_t i = 0; i < modelCache.size(); i++) {
500 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
501 }
502 for (uint32_t i = 0; i < dataCache.size(); i++) {
503 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
504 }
505 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800506 if (!mIsCachingSupported) {
507 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
508 ASSERT_EQ(preparedModel, nullptr);
509 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800510 } else if (checkEarlyTermination(status)) {
511 ASSERT_EQ(preparedModel, nullptr);
512 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800513 } else {
514 ASSERT_EQ(status, ErrorStatus::NONE);
515 ASSERT_NE(preparedModel, nullptr);
516 }
517 }
518
519 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700520 EvaluatePreparedModel(preparedModel, testModel,
521 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800522}
523
Xusong Wang0e0721f2019-05-07 12:57:49 -0700524TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800525 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700526 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700527 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700528 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800529
530 // Test with number of model cache files greater than mNumModelCache.
531 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800532 hidl_vec<hidl_handle> modelCache, dataCache;
533 // Pass an additional cache file for model cache.
534 mModelCache.push_back({mTmpCache});
535 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
536 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
537 mModelCache.pop_back();
538 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700539 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800540 ASSERT_NE(preparedModel, nullptr);
541 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700542 EvaluatePreparedModel(preparedModel, testModel,
543 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800544 // Check if prepareModelFromCache fails.
545 preparedModel = nullptr;
546 ErrorStatus status;
547 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
548 if (status != ErrorStatus::INVALID_ARGUMENT) {
549 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
550 }
551 ASSERT_EQ(preparedModel, nullptr);
552 }
553
554 // Test with number of model cache files smaller than mNumModelCache.
555 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800556 hidl_vec<hidl_handle> modelCache, dataCache;
557 // Pop out the last cache file.
558 auto tmp = mModelCache.back();
559 mModelCache.pop_back();
560 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
561 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
562 mModelCache.push_back(tmp);
563 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700564 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800565 ASSERT_NE(preparedModel, nullptr);
566 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700567 EvaluatePreparedModel(preparedModel, testModel,
568 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800569 // Check if prepareModelFromCache fails.
570 preparedModel = nullptr;
571 ErrorStatus status;
572 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
573 if (status != ErrorStatus::INVALID_ARGUMENT) {
574 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
575 }
576 ASSERT_EQ(preparedModel, nullptr);
577 }
578
579 // Test with number of data cache files greater than mNumDataCache.
580 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800581 hidl_vec<hidl_handle> modelCache, dataCache;
582 // Pass an additional cache file for data cache.
583 mDataCache.push_back({mTmpCache});
584 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
585 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
586 mDataCache.pop_back();
587 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700588 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800589 ASSERT_NE(preparedModel, nullptr);
590 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700591 EvaluatePreparedModel(preparedModel, testModel,
592 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800593 // Check if prepareModelFromCache fails.
594 preparedModel = nullptr;
595 ErrorStatus status;
596 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
597 if (status != ErrorStatus::INVALID_ARGUMENT) {
598 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
599 }
600 ASSERT_EQ(preparedModel, nullptr);
601 }
602
603 // Test with number of data cache files smaller than mNumDataCache.
604 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800605 hidl_vec<hidl_handle> modelCache, dataCache;
606 // Pop out the last cache file.
607 auto tmp = mDataCache.back();
608 mDataCache.pop_back();
609 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
610 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
611 mDataCache.push_back(tmp);
612 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700613 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800614 ASSERT_NE(preparedModel, nullptr);
615 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700616 EvaluatePreparedModel(preparedModel, testModel,
617 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800618 // Check if prepareModelFromCache fails.
619 preparedModel = nullptr;
620 ErrorStatus status;
621 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
622 if (status != ErrorStatus::INVALID_ARGUMENT) {
623 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
624 }
625 ASSERT_EQ(preparedModel, nullptr);
626 }
627}
628
Xusong Wang0e0721f2019-05-07 12:57:49 -0700629TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800630 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700631 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700632 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700633 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800634
635 // Save the compilation to cache.
636 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800637 hidl_vec<hidl_handle> modelCache, dataCache;
638 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
639 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700640 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800641 }
642
643 // Test with number of model cache files greater than mNumModelCache.
644 {
645 sp<IPreparedModel> preparedModel = nullptr;
646 ErrorStatus status;
647 hidl_vec<hidl_handle> modelCache, dataCache;
648 mModelCache.push_back({mTmpCache});
649 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
650 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
651 mModelCache.pop_back();
652 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
653 if (status != ErrorStatus::GENERAL_FAILURE) {
654 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
655 }
656 ASSERT_EQ(preparedModel, nullptr);
657 }
658
659 // Test with number of model cache files smaller than mNumModelCache.
660 if (mModelCache.size() > 0) {
661 sp<IPreparedModel> preparedModel = nullptr;
662 ErrorStatus status;
663 hidl_vec<hidl_handle> modelCache, dataCache;
664 auto tmp = mModelCache.back();
665 mModelCache.pop_back();
666 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
667 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
668 mModelCache.push_back(tmp);
669 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
670 if (status != ErrorStatus::GENERAL_FAILURE) {
671 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
672 }
673 ASSERT_EQ(preparedModel, nullptr);
674 }
675
676 // Test with number of data cache files greater than mNumDataCache.
677 {
678 sp<IPreparedModel> preparedModel = nullptr;
679 ErrorStatus status;
680 hidl_vec<hidl_handle> modelCache, dataCache;
681 mDataCache.push_back({mTmpCache});
682 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
683 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
684 mDataCache.pop_back();
685 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
686 if (status != ErrorStatus::GENERAL_FAILURE) {
687 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
688 }
689 ASSERT_EQ(preparedModel, nullptr);
690 }
691
692 // Test with number of data cache files smaller than mNumDataCache.
693 if (mDataCache.size() > 0) {
694 sp<IPreparedModel> preparedModel = nullptr;
695 ErrorStatus status;
696 hidl_vec<hidl_handle> modelCache, dataCache;
697 auto tmp = mDataCache.back();
698 mDataCache.pop_back();
699 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
700 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
701 mDataCache.push_back(tmp);
702 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
703 if (status != ErrorStatus::GENERAL_FAILURE) {
704 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
705 }
706 ASSERT_EQ(preparedModel, nullptr);
707 }
708}
709
Xusong Wang0e0721f2019-05-07 12:57:49 -0700710TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800711 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700712 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700713 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700714 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800715
Xusong Wanged0822b2019-02-25 16:58:58 -0800716 // Go through each handle in model cache, test with NumFd greater than 1.
717 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800718 hidl_vec<hidl_handle> modelCache, dataCache;
719 // Pass an invalid number of fds for handle i.
720 mModelCache[i].push_back(mTmpCache);
721 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
722 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
723 mModelCache[i].pop_back();
724 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700725 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800726 ASSERT_NE(preparedModel, nullptr);
727 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700728 EvaluatePreparedModel(preparedModel, testModel,
729 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800730 // Check if prepareModelFromCache fails.
731 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800732 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800733 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
734 if (status != ErrorStatus::INVALID_ARGUMENT) {
735 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800736 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800737 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800738 }
739
Xusong Wanged0822b2019-02-25 16:58:58 -0800740 // Go through each handle in model cache, test with NumFd equal to 0.
741 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800742 hidl_vec<hidl_handle> modelCache, dataCache;
743 // Pass an invalid number of fds for handle i.
744 auto tmp = mModelCache[i].back();
745 mModelCache[i].pop_back();
746 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
747 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
748 mModelCache[i].push_back(tmp);
749 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700750 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800751 ASSERT_NE(preparedModel, nullptr);
752 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700753 EvaluatePreparedModel(preparedModel, testModel,
754 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800755 // Check if prepareModelFromCache fails.
756 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800757 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800758 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
759 if (status != ErrorStatus::INVALID_ARGUMENT) {
760 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800761 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800762 ASSERT_EQ(preparedModel, nullptr);
763 }
764
765 // Go through each handle in data cache, test with NumFd greater than 1.
766 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800767 hidl_vec<hidl_handle> modelCache, dataCache;
768 // Pass an invalid number of fds for handle i.
769 mDataCache[i].push_back(mTmpCache);
770 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
771 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
772 mDataCache[i].pop_back();
773 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700774 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800775 ASSERT_NE(preparedModel, nullptr);
776 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700777 EvaluatePreparedModel(preparedModel, testModel,
778 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800779 // Check if prepareModelFromCache fails.
780 preparedModel = nullptr;
781 ErrorStatus status;
782 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
783 if (status != ErrorStatus::INVALID_ARGUMENT) {
784 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
785 }
786 ASSERT_EQ(preparedModel, nullptr);
787 }
788
789 // Go through each handle in data cache, test with NumFd equal to 0.
790 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800791 hidl_vec<hidl_handle> modelCache, dataCache;
792 // Pass an invalid number of fds for handle i.
793 auto tmp = mDataCache[i].back();
794 mDataCache[i].pop_back();
795 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
796 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
797 mDataCache[i].push_back(tmp);
798 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700799 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800800 ASSERT_NE(preparedModel, nullptr);
801 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700802 EvaluatePreparedModel(preparedModel, testModel,
803 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800804 // Check if prepareModelFromCache fails.
805 preparedModel = nullptr;
806 ErrorStatus status;
807 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
808 if (status != ErrorStatus::INVALID_ARGUMENT) {
809 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
810 }
811 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800812 }
813}
814
Xusong Wang0e0721f2019-05-07 12:57:49 -0700815TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800816 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700817 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700818 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700819 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800820
821 // Save the compilation to cache.
822 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800823 hidl_vec<hidl_handle> modelCache, dataCache;
824 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
825 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700826 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800827 }
828
Xusong Wanged0822b2019-02-25 16:58:58 -0800829 // Go through each handle in model cache, test with NumFd greater than 1.
830 for (uint32_t i = 0; i < mNumModelCache; i++) {
831 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800832 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800833 hidl_vec<hidl_handle> modelCache, dataCache;
834 mModelCache[i].push_back(mTmpCache);
835 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
836 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
837 mModelCache[i].pop_back();
838 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800839 if (status != ErrorStatus::GENERAL_FAILURE) {
840 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800841 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800842 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800843 }
844
Xusong Wanged0822b2019-02-25 16:58:58 -0800845 // Go through each handle in model cache, test with NumFd equal to 0.
846 for (uint32_t i = 0; i < mNumModelCache; i++) {
847 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800848 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800849 hidl_vec<hidl_handle> modelCache, dataCache;
850 auto tmp = mModelCache[i].back();
851 mModelCache[i].pop_back();
852 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
853 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
854 mModelCache[i].push_back(tmp);
855 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800856 if (status != ErrorStatus::GENERAL_FAILURE) {
857 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800858 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800859 ASSERT_EQ(preparedModel, nullptr);
860 }
861
862 // Go through each handle in data cache, test with NumFd greater than 1.
863 for (uint32_t i = 0; i < mNumDataCache; i++) {
864 sp<IPreparedModel> preparedModel = nullptr;
865 ErrorStatus status;
866 hidl_vec<hidl_handle> modelCache, dataCache;
867 mDataCache[i].push_back(mTmpCache);
868 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
869 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
870 mDataCache[i].pop_back();
871 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
872 if (status != ErrorStatus::GENERAL_FAILURE) {
873 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
874 }
875 ASSERT_EQ(preparedModel, nullptr);
876 }
877
878 // Go through each handle in data cache, test with NumFd equal to 0.
879 for (uint32_t i = 0; i < mNumDataCache; i++) {
880 sp<IPreparedModel> preparedModel = nullptr;
881 ErrorStatus status;
882 hidl_vec<hidl_handle> modelCache, dataCache;
883 auto tmp = mDataCache[i].back();
884 mDataCache[i].pop_back();
885 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
886 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
887 mDataCache[i].push_back(tmp);
888 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
889 if (status != ErrorStatus::GENERAL_FAILURE) {
890 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
891 }
892 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800893 }
894}
895
Xusong Wang0e0721f2019-05-07 12:57:49 -0700896TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800897 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700898 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700899 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700900 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800901 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
902 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800903
Xusong Wanged0822b2019-02-25 16:58:58 -0800904 // Go through each handle in model cache, test with invalid access mode.
905 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800906 hidl_vec<hidl_handle> modelCache, dataCache;
907 modelCacheMode[i] = AccessMode::READ_ONLY;
908 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
909 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
910 modelCacheMode[i] = AccessMode::READ_WRITE;
911 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700912 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800913 ASSERT_NE(preparedModel, nullptr);
914 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700915 EvaluatePreparedModel(preparedModel, testModel,
916 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800917 // Check if prepareModelFromCache fails.
918 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800919 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800920 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
921 if (status != ErrorStatus::INVALID_ARGUMENT) {
922 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
923 }
924 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800925 }
926
Xusong Wanged0822b2019-02-25 16:58:58 -0800927 // Go through each handle in data cache, test with invalid access mode.
928 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800929 hidl_vec<hidl_handle> modelCache, dataCache;
930 dataCacheMode[i] = AccessMode::READ_ONLY;
931 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
932 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
933 dataCacheMode[i] = AccessMode::READ_WRITE;
934 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700935 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800936 ASSERT_NE(preparedModel, nullptr);
937 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700938 EvaluatePreparedModel(preparedModel, testModel,
939 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800940 // Check if prepareModelFromCache fails.
941 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800942 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800943 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
944 if (status != ErrorStatus::INVALID_ARGUMENT) {
945 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
946 }
947 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800948 }
949}
950
Xusong Wang0e0721f2019-05-07 12:57:49 -0700951TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800952 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700953 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700954 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700955 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800956 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
957 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800958
959 // Save the compilation to cache.
960 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800961 hidl_vec<hidl_handle> modelCache, dataCache;
962 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
963 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700964 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800965 }
966
Xusong Wanged0822b2019-02-25 16:58:58 -0800967 // Go through each handle in model cache, test with invalid access mode.
968 for (uint32_t i = 0; i < mNumModelCache; i++) {
969 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800970 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800971 hidl_vec<hidl_handle> modelCache, dataCache;
972 modelCacheMode[i] = AccessMode::WRITE_ONLY;
973 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
974 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
975 modelCacheMode[i] = AccessMode::READ_WRITE;
976 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800977 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
978 ASSERT_EQ(preparedModel, nullptr);
979 }
980
Xusong Wanged0822b2019-02-25 16:58:58 -0800981 // Go through each handle in data cache, test with invalid access mode.
982 for (uint32_t i = 0; i < mNumDataCache; i++) {
983 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800984 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800985 hidl_vec<hidl_handle> modelCache, dataCache;
986 dataCacheMode[i] = AccessMode::WRITE_ONLY;
987 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
988 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
989 dataCacheMode[i] = AccessMode::READ_WRITE;
990 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800991 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
992 ASSERT_EQ(preparedModel, nullptr);
993 }
994}
995
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700996// Copy file contents between file groups.
997// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
998// The outer vector sizes must match and the inner vectors must have size = 1.
999static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1000 const std::vector<std::vector<std::string>>& to) {
1001 constexpr size_t kBufferSize = 1000000;
1002 uint8_t buffer[kBufferSize];
1003
1004 ASSERT_EQ(from.size(), to.size());
1005 for (uint32_t i = 0; i < from.size(); i++) {
1006 ASSERT_EQ(from[i].size(), 1u);
1007 ASSERT_EQ(to[i].size(), 1u);
1008 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1009 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1010 ASSERT_GE(fromFd, 0);
1011 ASSERT_GE(toFd, 0);
1012
1013 ssize_t readBytes;
1014 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1015 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1016 }
1017 ASSERT_GE(readBytes, 0);
1018
1019 close(fromFd);
1020 close(toFd);
1021 }
1022}
1023
1024// Number of operations in the large test model.
1025constexpr uint32_t kLargeModelSize = 100;
1026constexpr uint32_t kNumIterationsTOCTOU = 100;
1027
Xusong Wang0e0721f2019-05-07 12:57:49 -07001028TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001029 if (!mIsCachingSupported) return;
1030
Xusong Wang4f71afc2019-04-26 15:33:38 -07001031 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001032 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001033 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001034 if (checkEarlyTermination(modelMul)) return;
1035 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001036 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001037 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001038
Xusong Wangead950d2019-08-09 16:45:24 -07001039 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001040 auto modelCacheMul = mModelCache;
1041 for (auto& cache : modelCacheMul) {
1042 cache[0].append("_mul");
1043 }
1044 {
1045 hidl_vec<hidl_handle> modelCache, dataCache;
1046 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1047 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001048 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001049 }
1050
Xusong Wangead950d2019-08-09 16:45:24 -07001051 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001052 mToken[0]++;
1053
1054 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001055 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001056 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001057 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001058 hidl_vec<hidl_handle> modelCache, dataCache;
1059 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1060 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1061
1062 // Spawn a thread to copy the cache content concurrently while saving to cache.
1063 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wangead950d2019-08-09 16:45:24 -07001064 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001065 thread.join();
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001066 }
1067
1068 // Retrieve preparedModel from cache.
1069 {
1070 sp<IPreparedModel> preparedModel = nullptr;
1071 ErrorStatus status;
1072 hidl_vec<hidl_handle> modelCache, dataCache;
1073 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1074 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1075 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1076
1077 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1078 // the prepared model must be executed with the correct result and not crash.
1079 if (status != ErrorStatus::NONE) {
1080 ASSERT_EQ(preparedModel, nullptr);
1081 } else {
1082 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001083 EvaluatePreparedModel(preparedModel, testModelAdd,
1084 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001085 }
1086 }
1087 }
1088}
1089
Xusong Wang0e0721f2019-05-07 12:57:49 -07001090TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001091 if (!mIsCachingSupported) return;
1092
Xusong Wang4f71afc2019-04-26 15:33:38 -07001093 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001094 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001095 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001096 if (checkEarlyTermination(modelMul)) return;
1097 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001098 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001099 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001100
Xusong Wangead950d2019-08-09 16:45:24 -07001101 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001102 auto modelCacheMul = mModelCache;
1103 for (auto& cache : modelCacheMul) {
1104 cache[0].append("_mul");
1105 }
1106 {
1107 hidl_vec<hidl_handle> modelCache, dataCache;
1108 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1109 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001110 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001111 }
1112
Xusong Wangead950d2019-08-09 16:45:24 -07001113 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001114 mToken[0]++;
1115
1116 // This test is probabilistic, so we run it multiple times.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001117 for (uint32_t i = 0; i < kNumIterationsTOCTOU; i++) {
Xusong Wangead950d2019-08-09 16:45:24 -07001118 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001119 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001120 hidl_vec<hidl_handle> modelCache, dataCache;
1121 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1122 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001123 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001124 }
1125
1126 // Retrieve preparedModel from cache.
1127 {
1128 sp<IPreparedModel> preparedModel = nullptr;
1129 ErrorStatus status;
1130 hidl_vec<hidl_handle> modelCache, dataCache;
1131 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1132 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1133
1134 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1135 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1136 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1137 thread.join();
1138
1139 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1140 // the prepared model must be executed with the correct result and not crash.
1141 if (status != ErrorStatus::NONE) {
1142 ASSERT_EQ(preparedModel, nullptr);
1143 } else {
1144 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001145 EvaluatePreparedModel(preparedModel, testModelAdd,
1146 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001147 }
1148 }
1149 }
1150}
1151
Xusong Wang0e0721f2019-05-07 12:57:49 -07001152TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001153 if (!mIsCachingSupported) return;
1154
Xusong Wang4f71afc2019-04-26 15:33:38 -07001155 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001156 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001157 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001158 if (checkEarlyTermination(modelMul)) return;
1159 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001160 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001161 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001162
Xusong Wangead950d2019-08-09 16:45:24 -07001163 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001164 auto modelCacheMul = mModelCache;
1165 for (auto& cache : modelCacheMul) {
1166 cache[0].append("_mul");
1167 }
1168 {
1169 hidl_vec<hidl_handle> modelCache, dataCache;
1170 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1171 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001172 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001173 }
1174
Xusong Wangead950d2019-08-09 16:45:24 -07001175 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001176 mToken[0]++;
1177
Xusong Wangead950d2019-08-09 16:45:24 -07001178 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001179 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001180 hidl_vec<hidl_handle> modelCache, dataCache;
1181 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1182 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001183 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001184 }
1185
Xusong Wangead950d2019-08-09 16:45:24 -07001186 // Replace the model cache of modelAdd with modelMul.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001187 copyCacheFiles(modelCacheMul, mModelCache);
1188
1189 // Retrieve the preparedModel from cache, expect failure.
1190 {
1191 sp<IPreparedModel> preparedModel = nullptr;
1192 ErrorStatus status;
1193 hidl_vec<hidl_handle> modelCache, dataCache;
1194 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1195 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1196 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1197 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1198 ASSERT_EQ(preparedModel, nullptr);
1199 }
1200}
1201
Michael Butler07633282019-08-29 11:08:25 -07001202static const auto kNamedDeviceChoices = testing::ValuesIn(getNamedDevices());
Xusong Wang0e0721f2019-05-07 12:57:49 -07001203static const auto kOperandTypeChoices =
Michael Butler13b05162019-08-29 22:17:24 -07001204 testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
Xusong Wang0e0721f2019-05-07 12:57:49 -07001205
Michael Butler07633282019-08-29 11:08:25 -07001206std::string printCompilationCachingTest(
1207 const testing::TestParamInfo<CompilationCachingTestParam>& info) {
1208 const auto& [namedDevice, operandType] = info.param;
1209 const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
1210 return gtestCompliantName(getName(namedDevice) + "_" + type);
1211}
1212
1213INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest,
1214 testing::Combine(kNamedDeviceChoices, kOperandTypeChoices),
1215 printCompilationCachingTest);
1216
1217using CompilationCachingSecurityTestParam = std::tuple<NamedDevice, OperandType, uint32_t>;
Xusong Wang0e0721f2019-05-07 12:57:49 -07001218
1219class CompilationCachingSecurityTest
1220 : public CompilationCachingTestBase,
Michael Butler07633282019-08-29 11:08:25 -07001221 public testing::WithParamInterface<CompilationCachingSecurityTestParam> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001222 protected:
Michael Butler07633282019-08-29 11:08:25 -07001223 CompilationCachingSecurityTest()
1224 : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
1225 std::get<OperandType>(GetParam())) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -07001226
Xusong Wang96e68dc2019-01-18 17:28:26 -08001227 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001228 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001229 generator.seed(kSeed);
1230 }
1231
1232 // Get a random integer within a closed range [lower, upper].
1233 template <typename T>
1234 T getRandomInt(T lower, T upper) {
1235 std::uniform_int_distribution<T> dis(lower, upper);
1236 return dis(generator);
1237 }
1238
Xusong Wange371f6f2019-04-23 14:51:50 -07001239 // Randomly flip one single bit of the cache entry.
1240 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1241 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001242 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1243 long int fileSize = ftell(pFile);
1244 if (fileSize == 0) {
1245 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001246 *skip = true;
1247 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001248 }
1249 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1250 int readByte = fgetc(pFile);
1251 ASSERT_NE(readByte, EOF);
1252 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1253 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1254 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001255 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001256 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001257
Xusong Wange371f6f2019-04-23 14:51:50 -07001258 // Randomly append bytes to the cache entry.
1259 void appendBytesToCache(const std::string& filename, bool* skip) {
1260 FILE* pFile = fopen(filename.c_str(), "a");
1261 uint32_t appendLength = getRandomInt(1, 256);
1262 for (uint32_t i = 0; i < appendLength; i++) {
1263 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1264 }
1265 fclose(pFile);
1266 *skip = false;
1267 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001268
Xusong Wange371f6f2019-04-23 14:51:50 -07001269 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001270
Xusong Wange371f6f2019-04-23 14:51:50 -07001271 // Test if the driver behaves as expected when given corrupted cache or token.
1272 // The modifier will be invoked after save to cache but before prepare from cache.
1273 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1274 // whether the test should be skipped or not.
1275 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wangead950d2019-08-09 16:45:24 -07001276 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -07001277 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -07001278 if (checkEarlyTermination(model)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001279
Xusong Wanged0822b2019-02-25 16:58:58 -08001280 // Save the compilation to cache.
1281 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001282 hidl_vec<hidl_handle> modelCache, dataCache;
1283 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1284 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001285 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001286 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001287
Xusong Wange371f6f2019-04-23 14:51:50 -07001288 bool skip = false;
1289 modifier(&skip);
1290 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001291
Xusong Wange371f6f2019-04-23 14:51:50 -07001292 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001293 {
1294 sp<IPreparedModel> preparedModel = nullptr;
1295 ErrorStatus status;
1296 hidl_vec<hidl_handle> modelCache, dataCache;
1297 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1298 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1299 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001300
1301 switch (expected) {
1302 case ExpectedResult::GENERAL_FAILURE:
1303 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1304 ASSERT_EQ(preparedModel, nullptr);
1305 break;
1306 case ExpectedResult::NOT_CRASH:
1307 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1308 break;
1309 default:
1310 FAIL();
1311 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001312 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001313 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001314
Michael Butler07633282019-08-29 11:08:25 -07001315 const uint32_t kSeed = std::get<uint32_t>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001316 std::mt19937 generator;
1317};
1318
1319TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1320 if (!mIsCachingSupported) return;
1321 for (uint32_t i = 0; i < mNumModelCache; i++) {
1322 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1323 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1324 }
1325}
1326
1327TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1328 if (!mIsCachingSupported) return;
1329 for (uint32_t i = 0; i < mNumModelCache; i++) {
1330 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1331 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1332 }
1333}
1334
1335TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1336 if (!mIsCachingSupported) return;
1337 for (uint32_t i = 0; i < mNumDataCache; i++) {
1338 testCorruptedCache(ExpectedResult::NOT_CRASH,
1339 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1340 }
1341}
1342
1343TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1344 if (!mIsCachingSupported) return;
1345 for (uint32_t i = 0; i < mNumDataCache; i++) {
1346 testCorruptedCache(ExpectedResult::NOT_CRASH,
1347 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1348 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001349}
1350
1351TEST_P(CompilationCachingSecurityTest, WrongToken) {
1352 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001353 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1354 // Randomly flip one single bit in mToken.
1355 uint32_t ind =
1356 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1357 mToken[ind] ^= (1U << getRandomInt(0, 7));
1358 *skip = false;
1359 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001360}
1361
Michael Butler07633282019-08-29 11:08:25 -07001362std::string printCompilationCachingSecurityTest(
1363 const testing::TestParamInfo<CompilationCachingSecurityTestParam>& info) {
1364 const auto& [namedDevice, operandType, seed] = info.param;
1365 const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
1366 return gtestCompliantName(getName(namedDevice) + "_" + type + "_" + std::to_string(seed));
1367}
1368
Xusong Wang96e68dc2019-01-18 17:28:26 -08001369INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Michael Butler07633282019-08-29 11:08:25 -07001370 testing::Combine(kNamedDeviceChoices, kOperandTypeChoices,
1371 testing::Range(0U, 10U)),
1372 printCompilationCachingSecurityTest);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001373
Michael Butler62749b92019-08-26 23:55:47 -07001374} // namespace android::hardware::neuralnetworks::V1_2::vts::functional