blob: aacb38500bd55b72622258abd58a913f6d970c47 [file] [log] [blame]
Slava Shklyaev73ee79d2019-05-14 14:15:14 +01001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "GeneratedTestHarness.h"
18
19#include <android-base/logging.h>
20#include <android/hardware/neuralnetworks/1.0/IDevice.h>
21#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
22#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
23#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <android/hardware/neuralnetworks/1.1/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/IDevice.h>
27#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
28#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
29#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
30#include <android/hidl/allocator/1.0/IAllocator.h>
31#include <android/hidl/memory/1.0/IMemory.h>
32#include <hidlmemory/mapping.h>
33
Xusong Wangead950d2019-08-09 16:45:24 -070034#include <gtest/gtest.h>
35#include <algorithm>
Michael Butler648ada52019-07-25 17:22:11 -070036#include <chrono>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010037#include <iostream>
Xusong Wangead950d2019-08-09 16:45:24 -070038#include <numeric>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010039
40#include "1.0/Utils.h"
41#include "1.2/Callbacks.h"
42#include "ExecutionBurstController.h"
43#include "MemoryUtils.h"
44#include "TestHarness.h"
45#include "Utils.h"
Xusong Wangbcaa7822019-08-23 16:10:54 -070046#include "VtsHalNeuralnetworks.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010047
Michael Butler62749b92019-08-26 23:55:47 -070048namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010049
Xusong Wangead950d2019-08-09 16:45:24 -070050using namespace test_helper;
Michael Butler62749b92019-08-26 23:55:47 -070051using hidl::memory::V1_0::IMemory;
52using implementation::ExecutionCallback;
53using implementation::PreparedModelCallback;
54using V1_0::DataLocation;
55using V1_0::ErrorStatus;
56using V1_0::OperandLifeTime;
57using V1_0::Request;
58using V1_1::ExecutionPreference;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010059using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
60
Xusong Wangead950d2019-08-09 16:45:24 -070061enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
62
63Model createModel(const TestModel& testModel) {
64 // Model operands.
65 hidl_vec<Operand> operands(testModel.operands.size());
66 size_t constCopySize = 0, constRefSize = 0;
67 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
68 const auto& op = testModel.operands[i];
69
70 DataLocation loc = {};
71 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
72 loc = {.poolIndex = 0,
73 .offset = static_cast<uint32_t>(constCopySize),
74 .length = static_cast<uint32_t>(op.data.size())};
75 constCopySize += op.data.alignedSize();
76 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
77 loc = {.poolIndex = 0,
78 .offset = static_cast<uint32_t>(constRefSize),
79 .length = static_cast<uint32_t>(op.data.size())};
80 constRefSize += op.data.alignedSize();
81 }
82
83 Operand::ExtraParams extraParams;
84 if (op.type == TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) {
85 extraParams.channelQuant(SymmPerChannelQuantParams{
86 .scales = op.channelQuant.scales, .channelDim = op.channelQuant.channelDim});
87 }
88
89 operands[i] = {.type = static_cast<OperandType>(op.type),
90 .dimensions = op.dimensions,
91 .numberOfConsumers = op.numberOfConsumers,
92 .scale = op.scale,
93 .zeroPoint = op.zeroPoint,
94 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
95 .location = loc,
96 .extraParams = std::move(extraParams)};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010097 }
Xusong Wangead950d2019-08-09 16:45:24 -070098
99 // Model operations.
100 hidl_vec<Operation> operations(testModel.operations.size());
101 std::transform(testModel.operations.begin(), testModel.operations.end(), operations.begin(),
102 [](const TestOperation& op) -> Operation {
103 return {.type = static_cast<OperationType>(op.type),
104 .inputs = op.inputs,
105 .outputs = op.outputs};
106 });
107
108 // Constant copies.
109 hidl_vec<uint8_t> operandValues(constCopySize);
110 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
111 const auto& op = testModel.operands[i];
112 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
113 const uint8_t* begin = op.data.get<uint8_t>();
114 const uint8_t* end = begin + op.data.size();
115 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
116 }
117 }
118
119 // Shared memory.
120 hidl_vec<hidl_memory> pools = {};
121 if (constRefSize > 0) {
122 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
123 CHECK_NE(pools[0].size(), 0u);
124
125 // load data
126 sp<IMemory> mappedMemory = mapMemory(pools[0]);
127 CHECK(mappedMemory.get() != nullptr);
128 uint8_t* mappedPtr =
129 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
130 CHECK(mappedPtr != nullptr);
131
132 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
133 const auto& op = testModel.operands[i];
134 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
135 const uint8_t* begin = op.data.get<uint8_t>();
136 const uint8_t* end = begin + op.data.size();
137 std::copy(begin, end, mappedPtr + operands[i].location.offset);
138 }
139 }
140 }
141
142 return {.operands = std::move(operands),
143 .operations = std::move(operations),
144 .inputIndexes = testModel.inputIndexes,
145 .outputIndexes = testModel.outputIndexes,
146 .operandValues = std::move(operandValues),
147 .pools = std::move(pools),
148 .relaxComputationFloat32toFloat16 = testModel.isRelaxed};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100149}
150
Xusong Wangead950d2019-08-09 16:45:24 -0700151static bool isOutputSizeGreaterThanOne(const TestModel& testModel, uint32_t index) {
152 const auto byteSize = testModel.operands[testModel.outputIndexes[index]].data.size();
153 return byteSize > 1u;
154}
155
156static void makeOutputInsufficientSize(uint32_t outputIndex, Request* request) {
157 auto& length = request->outputs[outputIndex].location.length;
158 ASSERT_GT(length, 1u);
159 length -= 1u;
160}
161
162static void makeOutputDimensionsUnspecified(Model* model) {
163 for (auto i : model->outputIndexes) {
164 auto& dims = model->operands[i].dimensions;
165 std::fill(dims.begin(), dims.end(), 0);
166 }
167}
168
169static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100170 const Request& request, MeasureTiming measure,
171 sp<ExecutionCallback>& callback) {
172 return preparedModel->execute_1_2(request, measure, callback);
173}
Xusong Wangead950d2019-08-09 16:45:24 -0700174static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100175 const Request& request, MeasureTiming measure,
176 hidl_vec<OutputShape>* outputShapes,
177 Timing* timing) {
178 ErrorStatus result;
179 Return<void> ret = preparedModel->executeSynchronously(
180 request, measure,
181 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
182 const Timing& time) {
183 result = error;
184 *outputShapes = shapes;
185 *timing = time;
186 });
187 if (!ret.isOk()) {
188 return ErrorStatus::GENERAL_FAILURE;
189 }
190 return result;
191}
192static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
193 const sp<IPreparedModel>& preparedModel) {
Michael Butler648ada52019-07-25 17:22:11 -0700194 return android::nn::ExecutionBurstController::create(preparedModel,
195 std::chrono::microseconds{0});
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100196}
197enum class Executor { ASYNC, SYNC, BURST };
Xusong Wangead950d2019-08-09 16:45:24 -0700198
199void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100200 Executor executor, MeasureTiming measure, OutputType outputType) {
Xusong Wangead950d2019-08-09 16:45:24 -0700201 // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
202 if (outputType == OutputType::INSUFFICIENT && !isOutputSizeGreaterThanOne(testModel, 0)) {
203 return;
204 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100205
Xusong Wangead950d2019-08-09 16:45:24 -0700206 Request request = createRequest(testModel);
207 if (outputType == OutputType::INSUFFICIENT) {
208 makeOutputInsufficientSize(/*outputIndex=*/0, &request);
209 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100210
Xusong Wangead950d2019-08-09 16:45:24 -0700211 ErrorStatus executionStatus;
212 hidl_vec<OutputShape> outputShapes;
213 Timing timing;
214 switch (executor) {
215 case Executor::ASYNC: {
216 SCOPED_TRACE("asynchronous");
217
218 // launch execution
219 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
220 Return<ErrorStatus> executionLaunchStatus =
221 ExecutePreparedModel(preparedModel, request, measure, executionCallback);
222 ASSERT_TRUE(executionLaunchStatus.isOk());
223 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
224
225 // retrieve execution status
226 executionCallback->wait();
227 executionStatus = executionCallback->getStatus();
228 outputShapes = executionCallback->getOutputShapes();
229 timing = executionCallback->getTiming();
230
231 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100232 }
Xusong Wangead950d2019-08-09 16:45:24 -0700233 case Executor::SYNC: {
234 SCOPED_TRACE("synchronous");
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100235
Xusong Wangead950d2019-08-09 16:45:24 -0700236 // execute
237 Return<ErrorStatus> executionReturnStatus =
238 ExecutePreparedModel(preparedModel, request, measure, &outputShapes, &timing);
239 ASSERT_TRUE(executionReturnStatus.isOk());
240 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
241
242 break;
243 }
244 case Executor::BURST: {
245 SCOPED_TRACE("burst");
246
247 // create burst
248 const std::shared_ptr<::android::nn::ExecutionBurstController> controller =
249 CreateBurst(preparedModel);
250 ASSERT_NE(nullptr, controller.get());
251
252 // create memory keys
253 std::vector<intptr_t> keys(request.pools.size());
254 for (size_t i = 0; i < keys.size(); ++i) {
255 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100256 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100257
Xusong Wangead950d2019-08-09 16:45:24 -0700258 // execute burst
Michael Butler648ada52019-07-25 17:22:11 -0700259 int n;
260 std::tie(n, outputShapes, timing, std::ignore) =
Xusong Wangead950d2019-08-09 16:45:24 -0700261 controller->compute(request, measure, keys);
Michael Butler648ada52019-07-25 17:22:11 -0700262 executionStatus = nn::convertResultCodeToErrorStatus(n);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100263
Xusong Wangead950d2019-08-09 16:45:24 -0700264 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100265 }
266 }
Xusong Wangead950d2019-08-09 16:45:24 -0700267
268 if (outputType != OutputType::FULLY_SPECIFIED &&
269 executionStatus == ErrorStatus::GENERAL_FAILURE) {
270 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
271 "execute model that it does not support.";
272 std::cout << "[ ] Early termination of test because vendor service cannot "
273 "execute model that it does not support."
274 << std::endl;
275 GTEST_SKIP();
276 }
277 if (measure == MeasureTiming::NO) {
278 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
279 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
280 } else {
281 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
282 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
283 }
284 }
285
286 switch (outputType) {
287 case OutputType::FULLY_SPECIFIED:
288 // If the model output operands are fully specified, outputShapes must be either
289 // either empty, or have the same number of elements as the number of outputs.
290 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
291 ASSERT_TRUE(outputShapes.size() == 0 ||
292 outputShapes.size() == testModel.outputIndexes.size());
293 break;
294 case OutputType::UNSPECIFIED:
295 // If the model output operands are not fully specified, outputShapes must have
296 // the same number of elements as the number of outputs.
297 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
298 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
299 break;
300 case OutputType::INSUFFICIENT:
301 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
302 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
303 ASSERT_FALSE(outputShapes[0].isSufficient);
304 return;
305 }
306
307 // Go through all outputs, check returned output shapes.
308 for (uint32_t i = 0; i < outputShapes.size(); i++) {
309 EXPECT_TRUE(outputShapes[i].isSufficient);
310 const auto& expect = testModel.operands[testModel.outputIndexes[i]].dimensions;
311 const std::vector<uint32_t> actual = outputShapes[i].dimensions;
312 EXPECT_EQ(expect, actual);
313 }
314
315 // Retrieve execution results.
316 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
317
318 // We want "close-enough" results.
319 checkResults(testModel, outputs);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100320}
321
Xusong Wangead950d2019-08-09 16:45:24 -0700322void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
323 bool testDynamicOutputShape) {
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100324 if (testDynamicOutputShape) {
Xusong Wangead950d2019-08-09 16:45:24 -0700325 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
326 OutputType::UNSPECIFIED);
327 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
328 OutputType::UNSPECIFIED);
329 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
330 OutputType::UNSPECIFIED);
331 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
332 OutputType::UNSPECIFIED);
333 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
334 OutputType::UNSPECIFIED);
335 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
336 OutputType::UNSPECIFIED);
337 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
338 OutputType::INSUFFICIENT);
339 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
340 OutputType::INSUFFICIENT);
341 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
342 OutputType::INSUFFICIENT);
343 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
344 OutputType::INSUFFICIENT);
345 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
346 OutputType::INSUFFICIENT);
347 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
348 OutputType::INSUFFICIENT);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100349 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700350 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
351 OutputType::FULLY_SPECIFIED);
352 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
353 OutputType::FULLY_SPECIFIED);
354 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
355 OutputType::FULLY_SPECIFIED);
356 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
357 OutputType::FULLY_SPECIFIED);
358 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
359 OutputType::FULLY_SPECIFIED);
360 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
361 OutputType::FULLY_SPECIFIED);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100362 }
363}
364
Michael Butler13b05162019-08-29 22:17:24 -0700365void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) {
366 Model model = createModel(testModel);
367 if (testDynamicOutputShape) {
368 makeOutputDimensionsUnspecified(&model);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100369 }
Michael Butler13b05162019-08-29 22:17:24 -0700370
371 sp<IPreparedModel> preparedModel;
372 createPreparedModel(device, model, &preparedModel);
373 if (preparedModel == nullptr) return;
374
375 EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100376}
377
Michael Butler07633282019-08-29 11:08:25 -0700378void GeneratedTestBase::SetUp() {
379 testing::TestWithParam<GeneratedTestParam>::SetUp();
380 ASSERT_NE(kDevice, nullptr);
381}
382
383std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
384 return TestModelManager::get().getTestModels(filter);
385}
386
387std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
388 const auto& [namedDevice, namedModel] = info.param;
389 return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
390}
391
Xusong Wangbcaa7822019-08-23 16:10:54 -0700392// Tag for the generated tests
Michael Butler13b05162019-08-29 22:17:24 -0700393class GeneratedTest : public GeneratedTestBase {};
Xusong Wangbcaa7822019-08-23 16:10:54 -0700394
395// Tag for the dynamic output shape tests
396class DynamicOutputShapeTest : public GeneratedTest {};
397
398TEST_P(GeneratedTest, Test) {
Michael Butler13b05162019-08-29 22:17:24 -0700399 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/false);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100400}
401
Xusong Wangbcaa7822019-08-23 16:10:54 -0700402TEST_P(DynamicOutputShapeTest, Test) {
Michael Butler13b05162019-08-29 22:17:24 -0700403 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/true);
Xusong Wangbcaa7822019-08-23 16:10:54 -0700404}
405
406INSTANTIATE_GENERATED_TEST(GeneratedTest,
407 [](const TestModel& testModel) { return !testModel.expectFailure; });
408
409INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
410 [](const TestModel& testModel) { return !testModel.expectFailure; });
411
Michael Butler62749b92019-08-26 23:55:47 -0700412} // namespace android::hardware::neuralnetworks::V1_2::vts::functional