blob: 16b313a855ef4c63a3408a2586b283f03d12d42a [file] [log] [blame]
Xusong Wang96e68dc2019-01-18 17:28:26 -08001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "neuralnetworks_hidl_hal_test"
18
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070019#include <android-base/logging.h>
Michael Butler07633282019-08-29 11:08:25 -070020#include <fcntl.h>
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070021#include <ftw.h>
22#include <gtest/gtest.h>
23#include <hidlmemory/mapping.h>
24#include <unistd.h>
25
26#include <cstdio>
27#include <cstdlib>
28#include <random>
Michael Butler051cf392019-07-16 16:52:06 -070029#include <thread>
Xusong Wang96e68dc2019-01-18 17:28:26 -080030
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010031#include "1.2/Callbacks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080032#include "GeneratedTestHarness.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include "MemoryUtils.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080034#include "TestHarness.h"
Xusong Wang7cc0ccc2019-04-23 14:28:17 -070035#include "VtsHalNeuralnetworks.h"
Xusong Wang96e68dc2019-01-18 17:28:26 -080036
Xusong Wangead950d2019-08-09 16:45:24 -070037// Forward declaration of the mobilenet generated test models in
38// frameworks/ml/nn/runtime/test/generated/.
Slava Shklyaev0da5c342019-07-17 15:50:57 +010039namespace generated_tests::mobilenet_224_gender_basic_fixed {
Michael Butler07633282019-08-29 11:08:25 -070040const test_helper::TestModel& get_test_model();
Slava Shklyaev0da5c342019-07-17 15:50:57 +010041} // namespace generated_tests::mobilenet_224_gender_basic_fixed
Slava Shklyaeve8b24462019-07-17 15:50:57 +010042
43namespace generated_tests::mobilenet_quantized {
Michael Butler07633282019-08-29 11:08:25 -070044const test_helper::TestModel& get_test_model();
Slava Shklyaeve8b24462019-07-17 15:50:57 +010045} // namespace generated_tests::mobilenet_quantized
46
Michael Butler62749b92019-08-26 23:55:47 -070047namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Xusong Wang96e68dc2019-01-18 17:28:26 -080048
Xusong Wangead950d2019-08-09 16:45:24 -070049using namespace test_helper;
Michael Butler62749b92019-08-26 23:55:47 -070050using implementation::PreparedModelCallback;
51using V1_0::ErrorStatus;
52using V1_1::ExecutionPreference;
Xusong Wang96e68dc2019-01-18 17:28:26 -080053
Xusong Wang0e0721f2019-05-07 12:57:49 -070054namespace float32_model {
Xusong Wang96e68dc2019-01-18 17:28:26 -080055
Michael Butler07633282019-08-29 11:08:25 -070056constexpr auto get_test_model = generated_tests::mobilenet_224_gender_basic_fixed::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070057
58} // namespace float32_model
59
60namespace quant8_model {
61
Michael Butler07633282019-08-29 11:08:25 -070062constexpr auto get_test_model = generated_tests::mobilenet_quantized::get_test_model;
Xusong Wang0e0721f2019-05-07 12:57:49 -070063
64} // namespace quant8_model
65
66namespace {
67
Xusong Wanged0822b2019-02-25 16:58:58 -080068enum class AccessMode { READ_WRITE, READ_ONLY, WRITE_ONLY };
Xusong Wang96e68dc2019-01-18 17:28:26 -080069
Xusong Wanged0822b2019-02-25 16:58:58 -080070// Creates cache handles based on provided file groups.
71// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
72void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups,
73 const std::vector<AccessMode>& mode, hidl_vec<hidl_handle>* handles) {
74 handles->resize(fileGroups.size());
75 for (uint32_t i = 0; i < fileGroups.size(); i++) {
76 std::vector<int> fds;
77 for (const auto& file : fileGroups[i]) {
78 int fd;
79 if (mode[i] == AccessMode::READ_ONLY) {
80 fd = open(file.c_str(), O_RDONLY);
81 } else if (mode[i] == AccessMode::WRITE_ONLY) {
82 fd = open(file.c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
83 } else if (mode[i] == AccessMode::READ_WRITE) {
84 fd = open(file.c_str(), O_RDWR | O_CREAT, S_IRUSR | S_IWUSR);
85 } else {
86 FAIL();
87 }
88 ASSERT_GE(fd, 0);
89 fds.push_back(fd);
Xusong Wang96e68dc2019-01-18 17:28:26 -080090 }
Xusong Wanged0822b2019-02-25 16:58:58 -080091 native_handle_t* cacheNativeHandle = native_handle_create(fds.size(), 0);
92 ASSERT_NE(cacheNativeHandle, nullptr);
93 std::copy(fds.begin(), fds.end(), &cacheNativeHandle->data[0]);
94 (*handles)[i].setTo(cacheNativeHandle, /*shouldOwn=*/true);
Xusong Wang96e68dc2019-01-18 17:28:26 -080095 }
Xusong Wanged0822b2019-02-25 16:58:58 -080096}
97
98void createCacheHandles(const std::vector<std::vector<std::string>>& fileGroups, AccessMode mode,
99 hidl_vec<hidl_handle>* handles) {
100 createCacheHandles(fileGroups, std::vector<AccessMode>(fileGroups.size(), mode), handles);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800101}
102
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700103// Create a chain of broadcast operations. The second operand is always constant tensor [1].
104// For simplicity, activation scalar is shared. The second operand is not shared
105// in the model to let driver maintain a non-trivial size of constant data and the corresponding
106// data locations in cache.
107//
108// --------- activation --------
109// ↓ ↓ ↓ ↓
110// E.g. input -> ADD -> ADD -> ADD -> ... -> ADD -> output
111// ↑ ↑ ↑ ↑
112// [1] [1] [1] [1]
113//
Xusong Wang0e0721f2019-05-07 12:57:49 -0700114// This function assumes the operation is either ADD or MUL.
Xusong Wangead950d2019-08-09 16:45:24 -0700115template <typename CppType, TestOperandType operandType>
116TestModel createLargeTestModelImpl(TestOperationType op, uint32_t len) {
117 EXPECT_TRUE(op == TestOperationType::ADD || op == TestOperationType::MUL);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700118
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700119 // Model operations and operands.
Xusong Wangead950d2019-08-09 16:45:24 -0700120 std::vector<TestOperation> operations(len);
121 std::vector<TestOperand> operands(len * 2 + 2);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700122
123 // The activation scalar, value = 0.
124 operands[0] = {
Xusong Wangead950d2019-08-09 16:45:24 -0700125 .type = TestOperandType::INT32,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700126 .dimensions = {},
127 .numberOfConsumers = len,
128 .scale = 0.0f,
129 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700130 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
131 .data = TestBuffer::createFromVector<int32_t>({0}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700132 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700133
Xusong Wang0e0721f2019-05-07 12:57:49 -0700134 // The buffer value of the constant second operand. The logical value is always 1.0f.
135 CppType bufferValue;
136 // The scale of the first and second operand.
137 float scale1, scale2;
Xusong Wangead950d2019-08-09 16:45:24 -0700138 if (operandType == TestOperandType::TENSOR_FLOAT32) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700139 bufferValue = 1.0f;
140 scale1 = 0.0f;
141 scale2 = 0.0f;
Xusong Wangead950d2019-08-09 16:45:24 -0700142 } else if (op == TestOperationType::ADD) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700143 bufferValue = 1;
144 scale1 = 1.0f;
145 scale2 = 1.0f;
146 } else {
147 // To satisfy the constraint on quant8 MUL: input0.scale * input1.scale < output.scale,
148 // set input1 to have scale = 0.5f and bufferValue = 2, i.e. 1.0f in floating point.
149 bufferValue = 2;
150 scale1 = 1.0f;
151 scale2 = 0.5f;
152 }
153
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700154 for (uint32_t i = 0; i < len; i++) {
155 const uint32_t firstInputIndex = i * 2 + 1;
156 const uint32_t secondInputIndex = firstInputIndex + 1;
157 const uint32_t outputIndex = secondInputIndex + 1;
158
159 // The first operation input.
160 operands[firstInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700161 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700162 .dimensions = {1},
163 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700164 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700165 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700166 .lifetime = (i == 0 ? TestOperandLifeTime::MODEL_INPUT
167 : TestOperandLifeTime::TEMPORARY_VARIABLE),
168 .data = (i == 0 ? TestBuffer::createFromVector<CppType>({1}) : TestBuffer()),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700169 };
170
171 // The second operation input, value = 1.
172 operands[secondInputIndex] = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700173 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700174 .dimensions = {1},
175 .numberOfConsumers = 1,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700176 .scale = scale2,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700177 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700178 .lifetime = TestOperandLifeTime::CONSTANT_COPY,
179 .data = TestBuffer::createFromVector<CppType>({bufferValue}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700180 };
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700181
182 // The operation. All operations share the same activation scalar.
183 // The output operand is created as an input in the next iteration of the loop, in the case
184 // of all but the last member of the chain; and after the loop as a model output, in the
185 // case of the last member of the chain.
186 operations[i] = {
187 .type = op,
188 .inputs = {firstInputIndex, secondInputIndex, /*activation scalar*/ 0},
189 .outputs = {outputIndex},
190 };
191 }
192
Xusong Wangead950d2019-08-09 16:45:24 -0700193 // For TestOperationType::ADD, output = 1 + 1 * len = len + 1
194 // For TestOperationType::MUL, output = 1 * 1 ^ len = 1
195 CppType outputResult = static_cast<CppType>(op == TestOperationType::ADD ? len + 1u : 1u);
196
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700197 // The model output.
198 operands.back() = {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700199 .type = operandType,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700200 .dimensions = {1},
201 .numberOfConsumers = 0,
Xusong Wang0e0721f2019-05-07 12:57:49 -0700202 .scale = scale1,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700203 .zeroPoint = 0,
Xusong Wangead950d2019-08-09 16:45:24 -0700204 .lifetime = TestOperandLifeTime::MODEL_OUTPUT,
205 .data = TestBuffer::createFromVector<CppType>({outputResult}),
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700206 };
207
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700208 return {
Slava Shklyaev0fff59b2020-01-31 15:14:24 +0000209 .main = {.operands = std::move(operands),
210 .operations = std::move(operations),
211 .inputIndexes = {1},
212 .outputIndexes = {len * 2 + 1}},
Xusong Wangead950d2019-08-09 16:45:24 -0700213 .isRelaxed = false,
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700214 };
215}
216
Xusong Wang96e68dc2019-01-18 17:28:26 -0800217} // namespace
218
219// Tag for the compilation caching tests.
Michael Butler07633282019-08-29 11:08:25 -0700220class CompilationCachingTestBase : public testing::Test {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800221 protected:
Michael Butler07633282019-08-29 11:08:25 -0700222 CompilationCachingTestBase(sp<IDevice> device, OperandType type)
223 : kDevice(std::move(device)), kOperandType(type) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -0700224
Xusong Wang96e68dc2019-01-18 17:28:26 -0800225 void SetUp() override {
Michael Butler07633282019-08-29 11:08:25 -0700226 testing::Test::SetUp();
Michael Butler13b05162019-08-29 22:17:24 -0700227 ASSERT_NE(kDevice.get(), nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800228
Xusong Wanged0822b2019-02-25 16:58:58 -0800229 // Create cache directory. The cache directory and a temporary cache file is always created
230 // to test the behavior of prepareModelFromCache, even when caching is not supported.
Xusong Wang96e68dc2019-01-18 17:28:26 -0800231 char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
232 char* cacheDir = mkdtemp(cacheDirTemp);
233 ASSERT_NE(cacheDir, nullptr);
Xusong Wang6824cc12019-02-12 18:00:37 -0800234 mCacheDir = cacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800235 mCacheDir.push_back('/');
Xusong Wang6824cc12019-02-12 18:00:37 -0800236
Michael Butler13b05162019-08-29 22:17:24 -0700237 Return<void> ret = kDevice->getNumberOfCacheFilesNeeded(
Xusong Wanged0822b2019-02-25 16:58:58 -0800238 [this](ErrorStatus status, uint32_t numModelCache, uint32_t numDataCache) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800239 EXPECT_EQ(ErrorStatus::NONE, status);
Xusong Wanged0822b2019-02-25 16:58:58 -0800240 mNumModelCache = numModelCache;
241 mNumDataCache = numDataCache;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800242 });
243 EXPECT_TRUE(ret.isOk());
Xusong Wanged0822b2019-02-25 16:58:58 -0800244 mIsCachingSupported = mNumModelCache > 0 || mNumDataCache > 0;
245
246 // Create empty cache files.
247 mTmpCache = mCacheDir + "tmp";
248 for (uint32_t i = 0; i < mNumModelCache; i++) {
249 mModelCache.push_back({mCacheDir + "model" + std::to_string(i)});
250 }
251 for (uint32_t i = 0; i < mNumDataCache; i++) {
252 mDataCache.push_back({mCacheDir + "data" + std::to_string(i)});
253 }
254 // Dummy handles, use AccessMode::WRITE_ONLY for createCacheHandles to create files.
255 hidl_vec<hidl_handle> modelHandle, dataHandle, tmpHandle;
256 createCacheHandles(mModelCache, AccessMode::WRITE_ONLY, &modelHandle);
257 createCacheHandles(mDataCache, AccessMode::WRITE_ONLY, &dataHandle);
258 createCacheHandles({{mTmpCache}}, AccessMode::WRITE_ONLY, &tmpHandle);
259
260 if (!mIsCachingSupported) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800261 LOG(INFO) << "NN VTS: Early termination of test because vendor service does not "
262 "support compilation caching.";
263 std::cout << "[ ] Early termination of test because vendor service does not "
264 "support compilation caching."
265 << std::endl;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800266 }
Xusong Wang6824cc12019-02-12 18:00:37 -0800267 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800268
Xusong Wang6824cc12019-02-12 18:00:37 -0800269 void TearDown() override {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700270 // If the test passes, remove the tmp directory. Otherwise, keep it for debugging purposes.
Michael Butler13b05162019-08-29 22:17:24 -0700271 if (!testing::Test::HasFailure()) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700272 // Recursively remove the cache directory specified by mCacheDir.
273 auto callback = [](const char* entry, const struct stat*, int, struct FTW*) {
274 return remove(entry);
275 };
276 nftw(mCacheDir.c_str(), callback, 128, FTW_DEPTH | FTW_MOUNT | FTW_PHYS);
Xusong Wang6824cc12019-02-12 18:00:37 -0800277 }
Michael Butler07633282019-08-29 11:08:25 -0700278 testing::Test::TearDown();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800279 }
280
Xusong Wang0e0721f2019-05-07 12:57:49 -0700281 // Model and examples creators. According to kOperandType, the following methods will return
282 // either float32 model/examples or the quant8 variant.
Xusong Wangead950d2019-08-09 16:45:24 -0700283 TestModel createTestModel() {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700284 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700285 return float32_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700286 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700287 return quant8_model::get_test_model();
Xusong Wang0e0721f2019-05-07 12:57:49 -0700288 }
289 }
290
Xusong Wangead950d2019-08-09 16:45:24 -0700291 TestModel createLargeTestModel(OperationType op, uint32_t len) {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700292 if (kOperandType == OperandType::TENSOR_FLOAT32) {
Xusong Wangead950d2019-08-09 16:45:24 -0700293 return createLargeTestModelImpl<float, TestOperandType::TENSOR_FLOAT32>(
294 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700295 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700296 return createLargeTestModelImpl<uint8_t, TestOperandType::TENSOR_QUANT8_ASYMM>(
297 static_cast<TestOperationType>(op), len);
Xusong Wang0e0721f2019-05-07 12:57:49 -0700298 }
299 }
300
Xusong Wang4f71afc2019-04-26 15:33:38 -0700301 // See if the service can handle the model.
Michael Butler62749b92019-08-26 23:55:47 -0700302 bool isModelFullySupported(const Model& model) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800303 bool fullySupportsModel = false;
Michael Butler13b05162019-08-29 22:17:24 -0700304 Return<void> supportedCall = kDevice->getSupportedOperations_1_2(
Xusong Wanged0822b2019-02-25 16:58:58 -0800305 model,
306 [&fullySupportsModel, &model](ErrorStatus status, const hidl_vec<bool>& supported) {
307 ASSERT_EQ(ErrorStatus::NONE, status);
308 ASSERT_EQ(supported.size(), model.operations.size());
309 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
310 [](bool valid) { return valid; });
311 });
Xusong Wang4f71afc2019-04-26 15:33:38 -0700312 EXPECT_TRUE(supportedCall.isOk());
313 return fullySupportsModel;
314 }
315
Michael Butler62749b92019-08-26 23:55:47 -0700316 void saveModelToCache(const Model& model, const hidl_vec<hidl_handle>& modelCache,
Xusong Wang4f71afc2019-04-26 15:33:38 -0700317 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang0b617ae2020-06-09 10:42:12 -0700318 sp<IPreparedModel>* preparedModel = nullptr,
319 bool allowGeneralFailure = false) {
Xusong Wang4f71afc2019-04-26 15:33:38 -0700320 if (preparedModel != nullptr) *preparedModel = nullptr;
Xusong Wanged0822b2019-02-25 16:58:58 -0800321
322 // Launch prepare model.
323 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800324 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Xusong Wanged0822b2019-02-25 16:58:58 -0800325 Return<ErrorStatus> prepareLaunchStatus =
Michael Butler13b05162019-08-29 22:17:24 -0700326 kDevice->prepareModel_1_2(model, ExecutionPreference::FAST_SINGLE_ANSWER,
327 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wanged0822b2019-02-25 16:58:58 -0800328 ASSERT_TRUE(prepareLaunchStatus.isOk());
329 ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus), ErrorStatus::NONE);
330
331 // Retrieve prepared model.
332 preparedModelCallback->wait();
Xusong Wang0b617ae2020-06-09 10:42:12 -0700333 const auto prepareCallbackStatus = preparedModelCallback->getStatus();
334 if (!allowGeneralFailure || prepareCallbackStatus != ErrorStatus::GENERAL_FAILURE) {
335 ASSERT_EQ(prepareCallbackStatus, ErrorStatus::NONE);
336 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800337 if (preparedModel != nullptr) {
Michael Butler62749b92019-08-26 23:55:47 -0700338 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
339 .withDefault(nullptr);
Xusong Wanged0822b2019-02-25 16:58:58 -0800340 }
Xusong Wang96e68dc2019-01-18 17:28:26 -0800341 }
342
343 bool checkEarlyTermination(ErrorStatus status) {
344 if (status == ErrorStatus::GENERAL_FAILURE) {
345 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
346 "save the prepared model that it does not support.";
347 std::cout << "[ ] Early termination of test because vendor service cannot "
348 "save the prepared model that it does not support."
349 << std::endl;
350 return true;
351 }
352 return false;
353 }
354
Michael Butler62749b92019-08-26 23:55:47 -0700355 bool checkEarlyTermination(const Model& model) {
Xusong Wang4f71afc2019-04-26 15:33:38 -0700356 if (!isModelFullySupported(model)) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800357 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
358 "prepare model that it does not support.";
359 std::cout << "[ ] Early termination of test because vendor service cannot "
360 "prepare model that it does not support."
361 << std::endl;
362 return true;
363 }
364 return false;
365 }
366
367 void prepareModelFromCache(const hidl_vec<hidl_handle>& modelCache,
368 const hidl_vec<hidl_handle>& dataCache,
Xusong Wang96e68dc2019-01-18 17:28:26 -0800369 sp<IPreparedModel>* preparedModel, ErrorStatus* status) {
370 // Launch prepare model from cache.
371 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Xusong Wang96e68dc2019-01-18 17:28:26 -0800372 hidl_array<uint8_t, sizeof(mToken)> cacheToken(mToken);
Michael Butler13b05162019-08-29 22:17:24 -0700373 Return<ErrorStatus> prepareLaunchStatus = kDevice->prepareModelFromCache(
Xusong Wanged0822b2019-02-25 16:58:58 -0800374 modelCache, dataCache, cacheToken, preparedModelCallback);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800375 ASSERT_TRUE(prepareLaunchStatus.isOk());
376 if (static_cast<ErrorStatus>(prepareLaunchStatus) != ErrorStatus::NONE) {
377 *preparedModel = nullptr;
378 *status = static_cast<ErrorStatus>(prepareLaunchStatus);
379 return;
380 }
381
382 // Retrieve prepared model.
383 preparedModelCallback->wait();
384 *status = preparedModelCallback->getStatus();
Michael Butler62749b92019-08-26 23:55:47 -0700385 *preparedModel = IPreparedModel::castFrom(preparedModelCallback->getPreparedModel())
Xusong Wang96e68dc2019-01-18 17:28:26 -0800386 .withDefault(nullptr);
387 }
388
Xusong Wanged0822b2019-02-25 16:58:58 -0800389 // Absolute path to the temporary cache directory.
Xusong Wang6824cc12019-02-12 18:00:37 -0800390 std::string mCacheDir;
Xusong Wanged0822b2019-02-25 16:58:58 -0800391
392 // Groups of file paths for model and data cache in the tmp cache directory, initialized with
393 // outer_size = mNum{Model|Data}Cache, inner_size = 1. The outer vector corresponds to handles
394 // and the inner vector is for fds held by each handle.
395 std::vector<std::vector<std::string>> mModelCache;
396 std::vector<std::vector<std::string>> mDataCache;
397
398 // A separate temporary file path in the tmp cache directory.
399 std::string mTmpCache;
400
Xusong Wang96e68dc2019-01-18 17:28:26 -0800401 uint8_t mToken[static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)] = {};
Xusong Wanged0822b2019-02-25 16:58:58 -0800402 uint32_t mNumModelCache;
403 uint32_t mNumDataCache;
404 uint32_t mIsCachingSupported;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700405
Michael Butler07633282019-08-29 11:08:25 -0700406 const sp<IDevice> kDevice;
Xusong Wang0e0721f2019-05-07 12:57:49 -0700407 // The primary data type of the testModel.
408 const OperandType kOperandType;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800409};
410
Michael Butler07633282019-08-29 11:08:25 -0700411using CompilationCachingTestParam = std::tuple<NamedDevice, OperandType>;
412
Xusong Wang0e0721f2019-05-07 12:57:49 -0700413// A parameterized fixture of CompilationCachingTestBase. Every test will run twice, with the first
414// pass running with float32 models and the second pass running with quant8 models.
415class CompilationCachingTest : public CompilationCachingTestBase,
Michael Butler07633282019-08-29 11:08:25 -0700416 public testing::WithParamInterface<CompilationCachingTestParam> {
Xusong Wang0e0721f2019-05-07 12:57:49 -0700417 protected:
Michael Butler07633282019-08-29 11:08:25 -0700418 CompilationCachingTest()
419 : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
420 std::get<OperandType>(GetParam())) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -0700421};
422
423TEST_P(CompilationCachingTest, CacheSavingAndRetrieval) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800424 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700425 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700426 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700427 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800428 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800429
430 // Save the compilation to cache.
431 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800432 hidl_vec<hidl_handle> modelCache, dataCache;
433 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
434 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700435 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800436 }
437
438 // Retrieve preparedModel from cache.
439 {
440 preparedModel = nullptr;
441 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800442 hidl_vec<hidl_handle> modelCache, dataCache;
443 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
444 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
445 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800446 if (!mIsCachingSupported) {
447 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
448 ASSERT_EQ(preparedModel, nullptr);
449 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800450 } else if (checkEarlyTermination(status)) {
451 ASSERT_EQ(preparedModel, nullptr);
452 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800453 } else {
454 ASSERT_EQ(status, ErrorStatus::NONE);
455 ASSERT_NE(preparedModel, nullptr);
456 }
457 }
458
459 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700460 EvaluatePreparedModel(preparedModel, testModel,
461 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800462}
463
Xusong Wang0e0721f2019-05-07 12:57:49 -0700464TEST_P(CompilationCachingTest, CacheSavingAndRetrievalNonZeroOffset) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800465 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700466 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700467 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700468 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800469 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800470
471 // Save the compilation to cache.
472 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800473 hidl_vec<hidl_handle> modelCache, dataCache;
474 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
475 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
476 uint8_t dummyBytes[] = {0, 0};
477 // Write a dummy integer to the cache.
478 // The driver should be able to handle non-empty cache and non-zero fd offset.
479 for (uint32_t i = 0; i < modelCache.size(); i++) {
480 ASSERT_EQ(write(modelCache[i].getNativeHandle()->data[0], &dummyBytes,
481 sizeof(dummyBytes)),
482 sizeof(dummyBytes));
Xusong Wang96e68dc2019-01-18 17:28:26 -0800483 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800484 for (uint32_t i = 0; i < dataCache.size(); i++) {
485 ASSERT_EQ(
486 write(dataCache[i].getNativeHandle()->data[0], &dummyBytes, sizeof(dummyBytes)),
487 sizeof(dummyBytes));
488 }
Xusong Wangead950d2019-08-09 16:45:24 -0700489 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800490 }
491
492 // Retrieve preparedModel from cache.
493 {
494 preparedModel = nullptr;
495 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800496 hidl_vec<hidl_handle> modelCache, dataCache;
497 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
498 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800499 uint8_t dummyByte = 0;
Xusong Wanged0822b2019-02-25 16:58:58 -0800500 // Advance the offset of each handle by one byte.
501 // The driver should be able to handle non-zero fd offset.
502 for (uint32_t i = 0; i < modelCache.size(); i++) {
503 ASSERT_GE(read(modelCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
504 }
505 for (uint32_t i = 0; i < dataCache.size(); i++) {
506 ASSERT_GE(read(dataCache[i].getNativeHandle()->data[0], &dummyByte, 1), 0);
507 }
508 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800509 if (!mIsCachingSupported) {
510 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
511 ASSERT_EQ(preparedModel, nullptr);
512 return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800513 } else if (checkEarlyTermination(status)) {
514 ASSERT_EQ(preparedModel, nullptr);
515 return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800516 } else {
517 ASSERT_EQ(status, ErrorStatus::NONE);
518 ASSERT_NE(preparedModel, nullptr);
519 }
520 }
521
522 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700523 EvaluatePreparedModel(preparedModel, testModel,
524 /*testDynamicOutputShape=*/false);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800525}
526
Xusong Wang0e0721f2019-05-07 12:57:49 -0700527TEST_P(CompilationCachingTest, SaveToCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800528 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700529 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700530 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700531 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800532
533 // Test with number of model cache files greater than mNumModelCache.
534 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800535 hidl_vec<hidl_handle> modelCache, dataCache;
536 // Pass an additional cache file for model cache.
537 mModelCache.push_back({mTmpCache});
538 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
539 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
540 mModelCache.pop_back();
541 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700542 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800543 ASSERT_NE(preparedModel, nullptr);
544 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700545 EvaluatePreparedModel(preparedModel, testModel,
546 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800547 // Check if prepareModelFromCache fails.
548 preparedModel = nullptr;
549 ErrorStatus status;
550 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
551 if (status != ErrorStatus::INVALID_ARGUMENT) {
552 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
553 }
554 ASSERT_EQ(preparedModel, nullptr);
555 }
556
557 // Test with number of model cache files smaller than mNumModelCache.
558 if (mModelCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800559 hidl_vec<hidl_handle> modelCache, dataCache;
560 // Pop out the last cache file.
561 auto tmp = mModelCache.back();
562 mModelCache.pop_back();
563 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
564 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
565 mModelCache.push_back(tmp);
566 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700567 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800568 ASSERT_NE(preparedModel, nullptr);
569 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700570 EvaluatePreparedModel(preparedModel, testModel,
571 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800572 // Check if prepareModelFromCache fails.
573 preparedModel = nullptr;
574 ErrorStatus status;
575 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
576 if (status != ErrorStatus::INVALID_ARGUMENT) {
577 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
578 }
579 ASSERT_EQ(preparedModel, nullptr);
580 }
581
582 // Test with number of data cache files greater than mNumDataCache.
583 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800584 hidl_vec<hidl_handle> modelCache, dataCache;
585 // Pass an additional cache file for data cache.
586 mDataCache.push_back({mTmpCache});
587 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
588 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
589 mDataCache.pop_back();
590 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700591 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800592 ASSERT_NE(preparedModel, nullptr);
593 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700594 EvaluatePreparedModel(preparedModel, testModel,
595 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800596 // Check if prepareModelFromCache fails.
597 preparedModel = nullptr;
598 ErrorStatus status;
599 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
600 if (status != ErrorStatus::INVALID_ARGUMENT) {
601 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
602 }
603 ASSERT_EQ(preparedModel, nullptr);
604 }
605
606 // Test with number of data cache files smaller than mNumDataCache.
607 if (mDataCache.size() > 0) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800608 hidl_vec<hidl_handle> modelCache, dataCache;
609 // Pop out the last cache file.
610 auto tmp = mDataCache.back();
611 mDataCache.pop_back();
612 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
613 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
614 mDataCache.push_back(tmp);
615 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700616 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800617 ASSERT_NE(preparedModel, nullptr);
618 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700619 EvaluatePreparedModel(preparedModel, testModel,
620 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800621 // Check if prepareModelFromCache fails.
622 preparedModel = nullptr;
623 ErrorStatus status;
624 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
625 if (status != ErrorStatus::INVALID_ARGUMENT) {
626 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
627 }
628 ASSERT_EQ(preparedModel, nullptr);
629 }
630}
631
Xusong Wang0e0721f2019-05-07 12:57:49 -0700632TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumCache) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800633 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700634 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700635 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700636 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800637
638 // Save the compilation to cache.
639 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800640 hidl_vec<hidl_handle> modelCache, dataCache;
641 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
642 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700643 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -0800644 }
645
646 // Test with number of model cache files greater than mNumModelCache.
647 {
648 sp<IPreparedModel> preparedModel = nullptr;
649 ErrorStatus status;
650 hidl_vec<hidl_handle> modelCache, dataCache;
651 mModelCache.push_back({mTmpCache});
652 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
653 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
654 mModelCache.pop_back();
655 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
656 if (status != ErrorStatus::GENERAL_FAILURE) {
657 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
658 }
659 ASSERT_EQ(preparedModel, nullptr);
660 }
661
662 // Test with number of model cache files smaller than mNumModelCache.
663 if (mModelCache.size() > 0) {
664 sp<IPreparedModel> preparedModel = nullptr;
665 ErrorStatus status;
666 hidl_vec<hidl_handle> modelCache, dataCache;
667 auto tmp = mModelCache.back();
668 mModelCache.pop_back();
669 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
670 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
671 mModelCache.push_back(tmp);
672 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
673 if (status != ErrorStatus::GENERAL_FAILURE) {
674 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
675 }
676 ASSERT_EQ(preparedModel, nullptr);
677 }
678
679 // Test with number of data cache files greater than mNumDataCache.
680 {
681 sp<IPreparedModel> preparedModel = nullptr;
682 ErrorStatus status;
683 hidl_vec<hidl_handle> modelCache, dataCache;
684 mDataCache.push_back({mTmpCache});
685 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
686 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
687 mDataCache.pop_back();
688 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
689 if (status != ErrorStatus::GENERAL_FAILURE) {
690 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
691 }
692 ASSERT_EQ(preparedModel, nullptr);
693 }
694
695 // Test with number of data cache files smaller than mNumDataCache.
696 if (mDataCache.size() > 0) {
697 sp<IPreparedModel> preparedModel = nullptr;
698 ErrorStatus status;
699 hidl_vec<hidl_handle> modelCache, dataCache;
700 auto tmp = mDataCache.back();
701 mDataCache.pop_back();
702 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
703 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
704 mDataCache.push_back(tmp);
705 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
706 if (status != ErrorStatus::GENERAL_FAILURE) {
707 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
708 }
709 ASSERT_EQ(preparedModel, nullptr);
710 }
711}
712
Xusong Wang0e0721f2019-05-07 12:57:49 -0700713TEST_P(CompilationCachingTest, SaveToCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800714 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700715 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700716 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700717 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800718
Xusong Wanged0822b2019-02-25 16:58:58 -0800719 // Go through each handle in model cache, test with NumFd greater than 1.
720 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800721 hidl_vec<hidl_handle> modelCache, dataCache;
722 // Pass an invalid number of fds for handle i.
723 mModelCache[i].push_back(mTmpCache);
724 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
725 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
726 mModelCache[i].pop_back();
727 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700728 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800729 ASSERT_NE(preparedModel, nullptr);
730 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700731 EvaluatePreparedModel(preparedModel, testModel,
732 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800733 // Check if prepareModelFromCache fails.
734 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800735 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800736 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
737 if (status != ErrorStatus::INVALID_ARGUMENT) {
738 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800739 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800740 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800741 }
742
Xusong Wanged0822b2019-02-25 16:58:58 -0800743 // Go through each handle in model cache, test with NumFd equal to 0.
744 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800745 hidl_vec<hidl_handle> modelCache, dataCache;
746 // Pass an invalid number of fds for handle i.
747 auto tmp = mModelCache[i].back();
748 mModelCache[i].pop_back();
749 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
750 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
751 mModelCache[i].push_back(tmp);
752 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700753 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800754 ASSERT_NE(preparedModel, nullptr);
755 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700756 EvaluatePreparedModel(preparedModel, testModel,
757 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800758 // Check if prepareModelFromCache fails.
759 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800760 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800761 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
762 if (status != ErrorStatus::INVALID_ARGUMENT) {
763 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800764 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800765 ASSERT_EQ(preparedModel, nullptr);
766 }
767
768 // Go through each handle in data cache, test with NumFd greater than 1.
769 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800770 hidl_vec<hidl_handle> modelCache, dataCache;
771 // Pass an invalid number of fds for handle i.
772 mDataCache[i].push_back(mTmpCache);
773 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
774 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
775 mDataCache[i].pop_back();
776 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700777 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800778 ASSERT_NE(preparedModel, nullptr);
779 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700780 EvaluatePreparedModel(preparedModel, testModel,
781 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800782 // Check if prepareModelFromCache fails.
783 preparedModel = nullptr;
784 ErrorStatus status;
785 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
786 if (status != ErrorStatus::INVALID_ARGUMENT) {
787 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
788 }
789 ASSERT_EQ(preparedModel, nullptr);
790 }
791
792 // Go through each handle in data cache, test with NumFd equal to 0.
793 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800794 hidl_vec<hidl_handle> modelCache, dataCache;
795 // Pass an invalid number of fds for handle i.
796 auto tmp = mDataCache[i].back();
797 mDataCache[i].pop_back();
798 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
799 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
800 mDataCache[i].push_back(tmp);
801 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700802 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800803 ASSERT_NE(preparedModel, nullptr);
804 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700805 EvaluatePreparedModel(preparedModel, testModel,
806 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800807 // Check if prepareModelFromCache fails.
808 preparedModel = nullptr;
809 ErrorStatus status;
810 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
811 if (status != ErrorStatus::INVALID_ARGUMENT) {
812 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
813 }
814 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800815 }
816}
817
Xusong Wang0e0721f2019-05-07 12:57:49 -0700818TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidNumFd) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800819 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700820 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700821 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700822 if (checkEarlyTermination(model)) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800823
824 // Save the compilation to cache.
825 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800826 hidl_vec<hidl_handle> modelCache, dataCache;
827 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
828 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700829 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800830 }
831
Xusong Wanged0822b2019-02-25 16:58:58 -0800832 // Go through each handle in model cache, test with NumFd greater than 1.
833 for (uint32_t i = 0; i < mNumModelCache; i++) {
834 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800835 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800836 hidl_vec<hidl_handle> modelCache, dataCache;
837 mModelCache[i].push_back(mTmpCache);
838 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
839 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
840 mModelCache[i].pop_back();
841 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800842 if (status != ErrorStatus::GENERAL_FAILURE) {
843 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800844 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800845 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800846 }
847
Xusong Wanged0822b2019-02-25 16:58:58 -0800848 // Go through each handle in model cache, test with NumFd equal to 0.
849 for (uint32_t i = 0; i < mNumModelCache; i++) {
850 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800851 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800852 hidl_vec<hidl_handle> modelCache, dataCache;
853 auto tmp = mModelCache[i].back();
854 mModelCache[i].pop_back();
855 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
856 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
857 mModelCache[i].push_back(tmp);
858 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800859 if (status != ErrorStatus::GENERAL_FAILURE) {
860 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800861 }
Xusong Wanged0822b2019-02-25 16:58:58 -0800862 ASSERT_EQ(preparedModel, nullptr);
863 }
864
865 // Go through each handle in data cache, test with NumFd greater than 1.
866 for (uint32_t i = 0; i < mNumDataCache; i++) {
867 sp<IPreparedModel> preparedModel = nullptr;
868 ErrorStatus status;
869 hidl_vec<hidl_handle> modelCache, dataCache;
870 mDataCache[i].push_back(mTmpCache);
871 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
872 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
873 mDataCache[i].pop_back();
874 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
875 if (status != ErrorStatus::GENERAL_FAILURE) {
876 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
877 }
878 ASSERT_EQ(preparedModel, nullptr);
879 }
880
881 // Go through each handle in data cache, test with NumFd equal to 0.
882 for (uint32_t i = 0; i < mNumDataCache; i++) {
883 sp<IPreparedModel> preparedModel = nullptr;
884 ErrorStatus status;
885 hidl_vec<hidl_handle> modelCache, dataCache;
886 auto tmp = mDataCache[i].back();
887 mDataCache[i].pop_back();
888 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
889 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
890 mDataCache[i].push_back(tmp);
891 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
892 if (status != ErrorStatus::GENERAL_FAILURE) {
893 ASSERT_EQ(status, ErrorStatus::INVALID_ARGUMENT);
894 }
895 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800896 }
897}
898
Xusong Wang0e0721f2019-05-07 12:57:49 -0700899TEST_P(CompilationCachingTest, SaveToCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800900 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700901 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700902 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700903 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800904 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
905 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800906
Xusong Wanged0822b2019-02-25 16:58:58 -0800907 // Go through each handle in model cache, test with invalid access mode.
908 for (uint32_t i = 0; i < mNumModelCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800909 hidl_vec<hidl_handle> modelCache, dataCache;
910 modelCacheMode[i] = AccessMode::READ_ONLY;
911 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
912 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
913 modelCacheMode[i] = AccessMode::READ_WRITE;
914 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700915 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800916 ASSERT_NE(preparedModel, nullptr);
917 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700918 EvaluatePreparedModel(preparedModel, testModel,
919 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800920 // Check if prepareModelFromCache fails.
921 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800922 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800923 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
924 if (status != ErrorStatus::INVALID_ARGUMENT) {
925 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
926 }
927 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800928 }
929
Xusong Wanged0822b2019-02-25 16:58:58 -0800930 // Go through each handle in data cache, test with invalid access mode.
931 for (uint32_t i = 0; i < mNumDataCache; i++) {
Xusong Wanged0822b2019-02-25 16:58:58 -0800932 hidl_vec<hidl_handle> modelCache, dataCache;
933 dataCacheMode[i] = AccessMode::READ_ONLY;
934 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
935 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
936 dataCacheMode[i] = AccessMode::READ_WRITE;
937 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wangead950d2019-08-09 16:45:24 -0700938 saveModelToCache(model, modelCache, dataCache, &preparedModel);
Xusong Wanged0822b2019-02-25 16:58:58 -0800939 ASSERT_NE(preparedModel, nullptr);
940 // Execute and verify results.
Xusong Wangbcaa7822019-08-23 16:10:54 -0700941 EvaluatePreparedModel(preparedModel, testModel,
942 /*testDynamicOutputShape=*/false);
Xusong Wanged0822b2019-02-25 16:58:58 -0800943 // Check if prepareModelFromCache fails.
944 preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800945 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800946 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
947 if (status != ErrorStatus::INVALID_ARGUMENT) {
948 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
949 }
950 ASSERT_EQ(preparedModel, nullptr);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800951 }
952}
953
Xusong Wang0e0721f2019-05-07 12:57:49 -0700954TEST_P(CompilationCachingTest, PrepareModelFromCacheInvalidAccessMode) {
Xusong Wang96e68dc2019-01-18 17:28:26 -0800955 // Create test HIDL model and compile.
Xusong Wangead950d2019-08-09 16:45:24 -0700956 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -0700957 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -0700958 if (checkEarlyTermination(model)) return;
Xusong Wanged0822b2019-02-25 16:58:58 -0800959 std::vector<AccessMode> modelCacheMode(mNumModelCache, AccessMode::READ_WRITE);
960 std::vector<AccessMode> dataCacheMode(mNumDataCache, AccessMode::READ_WRITE);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800961
962 // Save the compilation to cache.
963 {
Xusong Wanged0822b2019-02-25 16:58:58 -0800964 hidl_vec<hidl_handle> modelCache, dataCache;
965 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
966 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -0700967 saveModelToCache(model, modelCache, dataCache);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800968 }
969
Xusong Wanged0822b2019-02-25 16:58:58 -0800970 // Go through each handle in model cache, test with invalid access mode.
971 for (uint32_t i = 0; i < mNumModelCache; i++) {
972 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800973 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800974 hidl_vec<hidl_handle> modelCache, dataCache;
975 modelCacheMode[i] = AccessMode::WRITE_ONLY;
976 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
977 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
978 modelCacheMode[i] = AccessMode::READ_WRITE;
979 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800980 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
981 ASSERT_EQ(preparedModel, nullptr);
982 }
983
Xusong Wanged0822b2019-02-25 16:58:58 -0800984 // Go through each handle in data cache, test with invalid access mode.
985 for (uint32_t i = 0; i < mNumDataCache; i++) {
986 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang96e68dc2019-01-18 17:28:26 -0800987 ErrorStatus status;
Xusong Wanged0822b2019-02-25 16:58:58 -0800988 hidl_vec<hidl_handle> modelCache, dataCache;
989 dataCacheMode[i] = AccessMode::WRITE_ONLY;
990 createCacheHandles(mModelCache, modelCacheMode, &modelCache);
991 createCacheHandles(mDataCache, dataCacheMode, &dataCache);
992 dataCacheMode[i] = AccessMode::READ_WRITE;
993 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wang96e68dc2019-01-18 17:28:26 -0800994 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
995 ASSERT_EQ(preparedModel, nullptr);
996 }
997}
998
Xusong Wang7cc0ccc2019-04-23 14:28:17 -0700999// Copy file contents between file groups.
1000// The outer vector corresponds to handles and the inner vector is for fds held by each handle.
1001// The outer vector sizes must match and the inner vectors must have size = 1.
1002static void copyCacheFiles(const std::vector<std::vector<std::string>>& from,
1003 const std::vector<std::vector<std::string>>& to) {
1004 constexpr size_t kBufferSize = 1000000;
1005 uint8_t buffer[kBufferSize];
1006
1007 ASSERT_EQ(from.size(), to.size());
1008 for (uint32_t i = 0; i < from.size(); i++) {
1009 ASSERT_EQ(from[i].size(), 1u);
1010 ASSERT_EQ(to[i].size(), 1u);
1011 int fromFd = open(from[i][0].c_str(), O_RDONLY);
1012 int toFd = open(to[i][0].c_str(), O_WRONLY | O_CREAT, S_IRUSR | S_IWUSR);
1013 ASSERT_GE(fromFd, 0);
1014 ASSERT_GE(toFd, 0);
1015
1016 ssize_t readBytes;
1017 while ((readBytes = read(fromFd, &buffer, kBufferSize)) > 0) {
1018 ASSERT_EQ(write(toFd, &buffer, readBytes), readBytes);
1019 }
1020 ASSERT_GE(readBytes, 0);
1021
1022 close(fromFd);
1023 close(toFd);
1024 }
1025}
1026
1027// Number of operations in the large test model.
1028constexpr uint32_t kLargeModelSize = 100;
Xusong Wang0b617ae2020-06-09 10:42:12 -07001029constexpr uint32_t kNumSuccessfulIterationsTOCTOU = 100;
1030constexpr uint32_t kMaxNumFailedIterationsTOCTOU = 100;
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001031
Xusong Wang0e0721f2019-05-07 12:57:49 -07001032TEST_P(CompilationCachingTest, SaveToCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001033 if (!mIsCachingSupported) return;
1034
Xusong Wang4f71afc2019-04-26 15:33:38 -07001035 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001036 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001037 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001038 if (checkEarlyTermination(modelMul)) return;
1039 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001040 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001041 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001042
Xusong Wangead950d2019-08-09 16:45:24 -07001043 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001044 auto modelCacheMul = mModelCache;
1045 for (auto& cache : modelCacheMul) {
1046 cache[0].append("_mul");
1047 }
1048 {
1049 hidl_vec<hidl_handle> modelCache, dataCache;
1050 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1051 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001052 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001053 }
1054
Xusong Wangead950d2019-08-09 16:45:24 -07001055 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001056 mToken[0]++;
1057
Xusong Wang0b617ae2020-06-09 10:42:12 -07001058 // This test is probabilistic, so we run it multiple times. We allow the compilation to fail
1059 // because it is not related to the security aspect of the TOCTOU test. However, we need to have
1060 // enough successful iterations to ensure the test coverage.
1061 uint32_t numSuccessfulIterations = 0, numFailedIterations = 0;
1062 while (numSuccessfulIterations < kNumSuccessfulIterationsTOCTOU) {
Xusong Wangead950d2019-08-09 16:45:24 -07001063 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001064 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001065 hidl_vec<hidl_handle> modelCache, dataCache;
1066 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1067 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1068
Xusong Wang0b617ae2020-06-09 10:42:12 -07001069 sp<IPreparedModel> preparedModel = nullptr;
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001070 // Spawn a thread to copy the cache content concurrently while saving to cache.
1071 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
Xusong Wang0b617ae2020-06-09 10:42:12 -07001072 saveModelToCache(modelAdd, modelCache, dataCache, &preparedModel,
1073 /*allowGeneralFailure=*/true);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001074 thread.join();
Xusong Wang0b617ae2020-06-09 10:42:12 -07001075
1076 if (preparedModel == nullptr) {
1077 numFailedIterations++;
1078 ASSERT_LE(numFailedIterations, kMaxNumFailedIterationsTOCTOU);
1079 } else {
1080 numSuccessfulIterations++;
1081 }
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001082 }
1083
1084 // Retrieve preparedModel from cache.
1085 {
1086 sp<IPreparedModel> preparedModel = nullptr;
1087 ErrorStatus status;
1088 hidl_vec<hidl_handle> modelCache, dataCache;
1089 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1090 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1091 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1092
1093 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1094 // the prepared model must be executed with the correct result and not crash.
1095 if (status != ErrorStatus::NONE) {
1096 ASSERT_EQ(preparedModel, nullptr);
1097 } else {
1098 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001099 EvaluatePreparedModel(preparedModel, testModelAdd,
1100 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001101 }
1102 }
1103 }
1104}
1105
Xusong Wang0e0721f2019-05-07 12:57:49 -07001106TEST_P(CompilationCachingTest, PrepareFromCache_TOCTOU) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001107 if (!mIsCachingSupported) return;
1108
Xusong Wang4f71afc2019-04-26 15:33:38 -07001109 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001110 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001111 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001112 if (checkEarlyTermination(modelMul)) return;
1113 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001114 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001115 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001116
Xusong Wangead950d2019-08-09 16:45:24 -07001117 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001118 auto modelCacheMul = mModelCache;
1119 for (auto& cache : modelCacheMul) {
1120 cache[0].append("_mul");
1121 }
1122 {
1123 hidl_vec<hidl_handle> modelCache, dataCache;
1124 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1125 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001126 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001127 }
1128
Xusong Wangead950d2019-08-09 16:45:24 -07001129 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001130 mToken[0]++;
1131
Xusong Wang0b617ae2020-06-09 10:42:12 -07001132 // This test is probabilistic, so we run it multiple times. We allow the compilation to fail
1133 // because it is not related to the security aspect of the TOCTOU test. However, we need to have
1134 // enough successful iterations to ensure the test coverage.
1135 uint32_t numSuccessfulIterations = 0, numFailedIterations = 0;
1136 while (numSuccessfulIterations < kNumSuccessfulIterationsTOCTOU) {
Xusong Wangead950d2019-08-09 16:45:24 -07001137 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001138 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001139 hidl_vec<hidl_handle> modelCache, dataCache;
1140 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1141 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wang0b617ae2020-06-09 10:42:12 -07001142 sp<IPreparedModel> preparedModel = nullptr;
1143 saveModelToCache(modelAdd, modelCache, dataCache, &preparedModel,
1144 /*allowGeneralFailure=*/true);
1145
1146 if (preparedModel == nullptr) {
1147 numFailedIterations++;
1148 ASSERT_LE(numFailedIterations, kMaxNumFailedIterationsTOCTOU);
1149 } else {
1150 numSuccessfulIterations++;
1151 }
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001152 }
1153
1154 // Retrieve preparedModel from cache.
1155 {
1156 sp<IPreparedModel> preparedModel = nullptr;
1157 ErrorStatus status;
1158 hidl_vec<hidl_handle> modelCache, dataCache;
1159 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1160 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1161
1162 // Spawn a thread to copy the cache content concurrently while preparing from cache.
1163 std::thread thread(copyCacheFiles, std::cref(modelCacheMul), std::cref(mModelCache));
1164 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1165 thread.join();
1166
1167 // The preparation may fail or succeed, but must not crash. If the preparation succeeds,
1168 // the prepared model must be executed with the correct result and not crash.
1169 if (status != ErrorStatus::NONE) {
1170 ASSERT_EQ(preparedModel, nullptr);
1171 } else {
1172 ASSERT_NE(preparedModel, nullptr);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001173 EvaluatePreparedModel(preparedModel, testModelAdd,
1174 /*testDynamicOutputShape=*/false);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001175 }
1176 }
1177 }
1178}
1179
Xusong Wang0e0721f2019-05-07 12:57:49 -07001180TEST_P(CompilationCachingTest, ReplaceSecuritySensitiveCache) {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001181 if (!mIsCachingSupported) return;
1182
Xusong Wang4f71afc2019-04-26 15:33:38 -07001183 // Create test models and check if fully supported by the service.
Xusong Wangead950d2019-08-09 16:45:24 -07001184 const TestModel testModelMul = createLargeTestModel(OperationType::MUL, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001185 const Model modelMul = createModel(testModelMul);
Xusong Wangead950d2019-08-09 16:45:24 -07001186 if (checkEarlyTermination(modelMul)) return;
1187 const TestModel testModelAdd = createLargeTestModel(OperationType::ADD, kLargeModelSize);
Xusong Wangbcaa7822019-08-23 16:10:54 -07001188 const Model modelAdd = createModel(testModelAdd);
Xusong Wangead950d2019-08-09 16:45:24 -07001189 if (checkEarlyTermination(modelAdd)) return;
Xusong Wang4f71afc2019-04-26 15:33:38 -07001190
Xusong Wangead950d2019-08-09 16:45:24 -07001191 // Save the modelMul compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001192 auto modelCacheMul = mModelCache;
1193 for (auto& cache : modelCacheMul) {
1194 cache[0].append("_mul");
1195 }
1196 {
1197 hidl_vec<hidl_handle> modelCache, dataCache;
1198 createCacheHandles(modelCacheMul, AccessMode::READ_WRITE, &modelCache);
1199 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001200 saveModelToCache(modelMul, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001201 }
1202
Xusong Wangead950d2019-08-09 16:45:24 -07001203 // Use a different token for modelAdd.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001204 mToken[0]++;
1205
Xusong Wangead950d2019-08-09 16:45:24 -07001206 // Save the modelAdd compilation to cache.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001207 {
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001208 hidl_vec<hidl_handle> modelCache, dataCache;
1209 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1210 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001211 saveModelToCache(modelAdd, modelCache, dataCache);
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001212 }
1213
Xusong Wangead950d2019-08-09 16:45:24 -07001214 // Replace the model cache of modelAdd with modelMul.
Xusong Wang7cc0ccc2019-04-23 14:28:17 -07001215 copyCacheFiles(modelCacheMul, mModelCache);
1216
1217 // Retrieve the preparedModel from cache, expect failure.
1218 {
1219 sp<IPreparedModel> preparedModel = nullptr;
1220 ErrorStatus status;
1221 hidl_vec<hidl_handle> modelCache, dataCache;
1222 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1223 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1224 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
1225 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1226 ASSERT_EQ(preparedModel, nullptr);
1227 }
1228}
1229
Michael Butler07633282019-08-29 11:08:25 -07001230static const auto kNamedDeviceChoices = testing::ValuesIn(getNamedDevices());
Xusong Wang0e0721f2019-05-07 12:57:49 -07001231static const auto kOperandTypeChoices =
Michael Butler13b05162019-08-29 22:17:24 -07001232 testing::Values(OperandType::TENSOR_FLOAT32, OperandType::TENSOR_QUANT8_ASYMM);
Xusong Wang0e0721f2019-05-07 12:57:49 -07001233
Michael Butler07633282019-08-29 11:08:25 -07001234std::string printCompilationCachingTest(
1235 const testing::TestParamInfo<CompilationCachingTestParam>& info) {
1236 const auto& [namedDevice, operandType] = info.param;
1237 const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
1238 return gtestCompliantName(getName(namedDevice) + "_" + type);
1239}
1240
1241INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingTest,
1242 testing::Combine(kNamedDeviceChoices, kOperandTypeChoices),
1243 printCompilationCachingTest);
1244
1245using CompilationCachingSecurityTestParam = std::tuple<NamedDevice, OperandType, uint32_t>;
Xusong Wang0e0721f2019-05-07 12:57:49 -07001246
1247class CompilationCachingSecurityTest
1248 : public CompilationCachingTestBase,
Michael Butler07633282019-08-29 11:08:25 -07001249 public testing::WithParamInterface<CompilationCachingSecurityTestParam> {
Xusong Wang96e68dc2019-01-18 17:28:26 -08001250 protected:
Michael Butler07633282019-08-29 11:08:25 -07001251 CompilationCachingSecurityTest()
1252 : CompilationCachingTestBase(getData(std::get<NamedDevice>(GetParam())),
1253 std::get<OperandType>(GetParam())) {}
Xusong Wang0e0721f2019-05-07 12:57:49 -07001254
Xusong Wang96e68dc2019-01-18 17:28:26 -08001255 void SetUp() {
Xusong Wang0e0721f2019-05-07 12:57:49 -07001256 CompilationCachingTestBase::SetUp();
Xusong Wang96e68dc2019-01-18 17:28:26 -08001257 generator.seed(kSeed);
1258 }
1259
1260 // Get a random integer within a closed range [lower, upper].
1261 template <typename T>
1262 T getRandomInt(T lower, T upper) {
1263 std::uniform_int_distribution<T> dis(lower, upper);
1264 return dis(generator);
1265 }
1266
Xusong Wange371f6f2019-04-23 14:51:50 -07001267 // Randomly flip one single bit of the cache entry.
1268 void flipOneBitOfCache(const std::string& filename, bool* skip) {
1269 FILE* pFile = fopen(filename.c_str(), "r+");
Xusong Wanged0822b2019-02-25 16:58:58 -08001270 ASSERT_EQ(fseek(pFile, 0, SEEK_END), 0);
1271 long int fileSize = ftell(pFile);
1272 if (fileSize == 0) {
1273 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001274 *skip = true;
1275 return;
Xusong Wanged0822b2019-02-25 16:58:58 -08001276 }
1277 ASSERT_EQ(fseek(pFile, getRandomInt(0l, fileSize - 1), SEEK_SET), 0);
1278 int readByte = fgetc(pFile);
1279 ASSERT_NE(readByte, EOF);
1280 ASSERT_EQ(fseek(pFile, -1, SEEK_CUR), 0);
1281 ASSERT_NE(fputc(static_cast<uint8_t>(readByte) ^ (1U << getRandomInt(0, 7)), pFile), EOF);
1282 fclose(pFile);
Xusong Wange371f6f2019-04-23 14:51:50 -07001283 *skip = false;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001284 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001285
Xusong Wange371f6f2019-04-23 14:51:50 -07001286 // Randomly append bytes to the cache entry.
1287 void appendBytesToCache(const std::string& filename, bool* skip) {
1288 FILE* pFile = fopen(filename.c_str(), "a");
1289 uint32_t appendLength = getRandomInt(1, 256);
1290 for (uint32_t i = 0; i < appendLength; i++) {
1291 ASSERT_NE(fputc(getRandomInt<uint8_t>(0, 255), pFile), EOF);
1292 }
1293 fclose(pFile);
1294 *skip = false;
1295 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001296
Xusong Wange371f6f2019-04-23 14:51:50 -07001297 enum class ExpectedResult { GENERAL_FAILURE, NOT_CRASH };
Xusong Wang96e68dc2019-01-18 17:28:26 -08001298
Xusong Wange371f6f2019-04-23 14:51:50 -07001299 // Test if the driver behaves as expected when given corrupted cache or token.
1300 // The modifier will be invoked after save to cache but before prepare from cache.
1301 // The modifier accepts one pointer argument "skip" as the returning value, indicating
1302 // whether the test should be skipped or not.
1303 void testCorruptedCache(ExpectedResult expected, std::function<void(bool*)> modifier) {
Xusong Wangead950d2019-08-09 16:45:24 -07001304 const TestModel& testModel = createTestModel();
Xusong Wangbcaa7822019-08-23 16:10:54 -07001305 const Model model = createModel(testModel);
Xusong Wangead950d2019-08-09 16:45:24 -07001306 if (checkEarlyTermination(model)) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001307
Xusong Wanged0822b2019-02-25 16:58:58 -08001308 // Save the compilation to cache.
1309 {
Xusong Wanged0822b2019-02-25 16:58:58 -08001310 hidl_vec<hidl_handle> modelCache, dataCache;
1311 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1312 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
Xusong Wangead950d2019-08-09 16:45:24 -07001313 saveModelToCache(model, modelCache, dataCache);
Xusong Wanged0822b2019-02-25 16:58:58 -08001314 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001315
Xusong Wange371f6f2019-04-23 14:51:50 -07001316 bool skip = false;
1317 modifier(&skip);
1318 if (skip) return;
Xusong Wang96e68dc2019-01-18 17:28:26 -08001319
Xusong Wange371f6f2019-04-23 14:51:50 -07001320 // Retrieve preparedModel from cache.
Xusong Wanged0822b2019-02-25 16:58:58 -08001321 {
1322 sp<IPreparedModel> preparedModel = nullptr;
1323 ErrorStatus status;
1324 hidl_vec<hidl_handle> modelCache, dataCache;
1325 createCacheHandles(mModelCache, AccessMode::READ_WRITE, &modelCache);
1326 createCacheHandles(mDataCache, AccessMode::READ_WRITE, &dataCache);
1327 prepareModelFromCache(modelCache, dataCache, &preparedModel, &status);
Xusong Wange371f6f2019-04-23 14:51:50 -07001328
1329 switch (expected) {
1330 case ExpectedResult::GENERAL_FAILURE:
1331 ASSERT_EQ(status, ErrorStatus::GENERAL_FAILURE);
1332 ASSERT_EQ(preparedModel, nullptr);
1333 break;
1334 case ExpectedResult::NOT_CRASH:
1335 ASSERT_EQ(preparedModel == nullptr, status != ErrorStatus::NONE);
1336 break;
1337 default:
1338 FAIL();
1339 }
Xusong Wanged0822b2019-02-25 16:58:58 -08001340 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001341 }
Xusong Wange371f6f2019-04-23 14:51:50 -07001342
Michael Butler07633282019-08-29 11:08:25 -07001343 const uint32_t kSeed = std::get<uint32_t>(GetParam());
Xusong Wange371f6f2019-04-23 14:51:50 -07001344 std::mt19937 generator;
1345};
1346
1347TEST_P(CompilationCachingSecurityTest, CorruptedModelCache) {
1348 if (!mIsCachingSupported) return;
1349 for (uint32_t i = 0; i < mNumModelCache; i++) {
1350 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1351 [this, i](bool* skip) { flipOneBitOfCache(mModelCache[i][0], skip); });
1352 }
1353}
1354
1355TEST_P(CompilationCachingSecurityTest, WrongLengthModelCache) {
1356 if (!mIsCachingSupported) return;
1357 for (uint32_t i = 0; i < mNumModelCache; i++) {
1358 testCorruptedCache(ExpectedResult::GENERAL_FAILURE,
1359 [this, i](bool* skip) { appendBytesToCache(mModelCache[i][0], skip); });
1360 }
1361}
1362
1363TEST_P(CompilationCachingSecurityTest, CorruptedDataCache) {
1364 if (!mIsCachingSupported) return;
1365 for (uint32_t i = 0; i < mNumDataCache; i++) {
1366 testCorruptedCache(ExpectedResult::NOT_CRASH,
1367 [this, i](bool* skip) { flipOneBitOfCache(mDataCache[i][0], skip); });
1368 }
1369}
1370
1371TEST_P(CompilationCachingSecurityTest, WrongLengthDataCache) {
1372 if (!mIsCachingSupported) return;
1373 for (uint32_t i = 0; i < mNumDataCache; i++) {
1374 testCorruptedCache(ExpectedResult::NOT_CRASH,
1375 [this, i](bool* skip) { appendBytesToCache(mDataCache[i][0], skip); });
1376 }
Xusong Wang96e68dc2019-01-18 17:28:26 -08001377}
1378
1379TEST_P(CompilationCachingSecurityTest, WrongToken) {
1380 if (!mIsCachingSupported) return;
Xusong Wange371f6f2019-04-23 14:51:50 -07001381 testCorruptedCache(ExpectedResult::GENERAL_FAILURE, [this](bool* skip) {
1382 // Randomly flip one single bit in mToken.
1383 uint32_t ind =
1384 getRandomInt(0u, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN) - 1);
1385 mToken[ind] ^= (1U << getRandomInt(0, 7));
1386 *skip = false;
1387 });
Xusong Wang96e68dc2019-01-18 17:28:26 -08001388}
1389
Michael Butler07633282019-08-29 11:08:25 -07001390std::string printCompilationCachingSecurityTest(
1391 const testing::TestParamInfo<CompilationCachingSecurityTestParam>& info) {
1392 const auto& [namedDevice, operandType, seed] = info.param;
1393 const std::string type = (operandType == OperandType::TENSOR_FLOAT32 ? "float32" : "quant8");
1394 return gtestCompliantName(getName(namedDevice) + "_" + type + "_" + std::to_string(seed));
1395}
1396
Xusong Wang96e68dc2019-01-18 17:28:26 -08001397INSTANTIATE_TEST_CASE_P(TestCompilationCaching, CompilationCachingSecurityTest,
Michael Butler07633282019-08-29 11:08:25 -07001398 testing::Combine(kNamedDeviceChoices, kOperandTypeChoices,
1399 testing::Range(0U, 10U)),
1400 printCompilationCachingSecurityTest);
Xusong Wang96e68dc2019-01-18 17:28:26 -08001401
Michael Butler62749b92019-08-26 23:55:47 -07001402} // namespace android::hardware::neuralnetworks::V1_2::vts::functional