blob: a2d37920d521391b0d025e1cbb6da5954dbd5c4f [file] [log] [blame]
Slava Shklyaev1d6b4652019-05-14 14:15:14 +01001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "GeneratedTestHarness.h"
18
19#include <android-base/logging.h>
20#include <android/hardware/neuralnetworks/1.0/IDevice.h>
21#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
22#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
23#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <android/hardware/neuralnetworks/1.1/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/IDevice.h>
27#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
28#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
29#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
30#include <android/hidl/allocator/1.0/IAllocator.h>
31#include <android/hidl/memory/1.0/IMemory.h>
32#include <hidlmemory/mapping.h>
33
Xusong Wang491b0a82019-08-09 16:45:24 -070034#include <gtest/gtest.h>
35#include <algorithm>
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010036#include <iostream>
Xusong Wang491b0a82019-08-09 16:45:24 -070037#include <numeric>
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010038
39#include "1.0/Utils.h"
40#include "1.2/Callbacks.h"
41#include "ExecutionBurstController.h"
42#include "MemoryUtils.h"
43#include "TestHarness.h"
44#include "Utils.h"
Xusong Wang9e2b97b2019-08-23 16:10:54 -070045#include "VtsHalNeuralnetworks.h"
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010046
Michael Butlerbbe5dad2019-08-26 23:55:47 -070047namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010048
Xusong Wang491b0a82019-08-09 16:45:24 -070049using namespace test_helper;
Michael Butlerbbe5dad2019-08-26 23:55:47 -070050using hidl::memory::V1_0::IMemory;
51using implementation::ExecutionCallback;
52using implementation::PreparedModelCallback;
53using V1_0::DataLocation;
54using V1_0::ErrorStatus;
55using V1_0::OperandLifeTime;
56using V1_0::Request;
57using V1_1::ExecutionPreference;
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010058using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
59
Xusong Wang491b0a82019-08-09 16:45:24 -070060enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
61
62Model createModel(const TestModel& testModel) {
63 // Model operands.
64 hidl_vec<Operand> operands(testModel.operands.size());
65 size_t constCopySize = 0, constRefSize = 0;
66 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
67 const auto& op = testModel.operands[i];
68
69 DataLocation loc = {};
70 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
71 loc = {.poolIndex = 0,
72 .offset = static_cast<uint32_t>(constCopySize),
73 .length = static_cast<uint32_t>(op.data.size())};
74 constCopySize += op.data.alignedSize();
75 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
76 loc = {.poolIndex = 0,
77 .offset = static_cast<uint32_t>(constRefSize),
78 .length = static_cast<uint32_t>(op.data.size())};
79 constRefSize += op.data.alignedSize();
80 }
81
82 Operand::ExtraParams extraParams;
83 if (op.type == TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) {
84 extraParams.channelQuant(SymmPerChannelQuantParams{
85 .scales = op.channelQuant.scales, .channelDim = op.channelQuant.channelDim});
86 }
87
88 operands[i] = {.type = static_cast<OperandType>(op.type),
89 .dimensions = op.dimensions,
90 .numberOfConsumers = op.numberOfConsumers,
91 .scale = op.scale,
92 .zeroPoint = op.zeroPoint,
93 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
94 .location = loc,
95 .extraParams = std::move(extraParams)};
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010096 }
Xusong Wang491b0a82019-08-09 16:45:24 -070097
98 // Model operations.
99 hidl_vec<Operation> operations(testModel.operations.size());
100 std::transform(testModel.operations.begin(), testModel.operations.end(), operations.begin(),
101 [](const TestOperation& op) -> Operation {
102 return {.type = static_cast<OperationType>(op.type),
103 .inputs = op.inputs,
104 .outputs = op.outputs};
105 });
106
107 // Constant copies.
108 hidl_vec<uint8_t> operandValues(constCopySize);
109 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
110 const auto& op = testModel.operands[i];
111 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
112 const uint8_t* begin = op.data.get<uint8_t>();
113 const uint8_t* end = begin + op.data.size();
114 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
115 }
116 }
117
118 // Shared memory.
119 hidl_vec<hidl_memory> pools = {};
120 if (constRefSize > 0) {
121 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
122 CHECK_NE(pools[0].size(), 0u);
123
124 // load data
125 sp<IMemory> mappedMemory = mapMemory(pools[0]);
126 CHECK(mappedMemory.get() != nullptr);
127 uint8_t* mappedPtr =
128 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
129 CHECK(mappedPtr != nullptr);
130
131 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
132 const auto& op = testModel.operands[i];
133 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
134 const uint8_t* begin = op.data.get<uint8_t>();
135 const uint8_t* end = begin + op.data.size();
136 std::copy(begin, end, mappedPtr + operands[i].location.offset);
137 }
138 }
139 }
140
141 return {.operands = std::move(operands),
142 .operations = std::move(operations),
143 .inputIndexes = testModel.inputIndexes,
144 .outputIndexes = testModel.outputIndexes,
145 .operandValues = std::move(operandValues),
146 .pools = std::move(pools),
147 .relaxComputationFloat32toFloat16 = testModel.isRelaxed};
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100148}
149
Xusong Wang491b0a82019-08-09 16:45:24 -0700150static bool isOutputSizeGreaterThanOne(const TestModel& testModel, uint32_t index) {
151 const auto byteSize = testModel.operands[testModel.outputIndexes[index]].data.size();
152 return byteSize > 1u;
153}
154
155static void makeOutputInsufficientSize(uint32_t outputIndex, Request* request) {
156 auto& length = request->outputs[outputIndex].location.length;
157 ASSERT_GT(length, 1u);
158 length -= 1u;
159}
160
161static void makeOutputDimensionsUnspecified(Model* model) {
162 for (auto i : model->outputIndexes) {
163 auto& dims = model->operands[i].dimensions;
164 std::fill(dims.begin(), dims.end(), 0);
165 }
166}
167
168static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100169 const Request& request, MeasureTiming measure,
170 sp<ExecutionCallback>& callback) {
171 return preparedModel->execute_1_2(request, measure, callback);
172}
Xusong Wang491b0a82019-08-09 16:45:24 -0700173static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100174 const Request& request, MeasureTiming measure,
175 hidl_vec<OutputShape>* outputShapes,
176 Timing* timing) {
177 ErrorStatus result;
178 Return<void> ret = preparedModel->executeSynchronously(
179 request, measure,
180 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
181 const Timing& time) {
182 result = error;
183 *outputShapes = shapes;
184 *timing = time;
185 });
186 if (!ret.isOk()) {
187 return ErrorStatus::GENERAL_FAILURE;
188 }
189 return result;
190}
191static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
192 const sp<IPreparedModel>& preparedModel) {
193 return ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
194}
195enum class Executor { ASYNC, SYNC, BURST };
Xusong Wang491b0a82019-08-09 16:45:24 -0700196
197void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100198 Executor executor, MeasureTiming measure, OutputType outputType) {
Xusong Wang491b0a82019-08-09 16:45:24 -0700199 // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
200 if (outputType == OutputType::INSUFFICIENT && !isOutputSizeGreaterThanOne(testModel, 0)) {
201 return;
202 }
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100203
Xusong Wang491b0a82019-08-09 16:45:24 -0700204 Request request = createRequest(testModel);
205 if (outputType == OutputType::INSUFFICIENT) {
206 makeOutputInsufficientSize(/*outputIndex=*/0, &request);
207 }
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100208
Xusong Wang491b0a82019-08-09 16:45:24 -0700209 ErrorStatus executionStatus;
210 hidl_vec<OutputShape> outputShapes;
211 Timing timing;
212 switch (executor) {
213 case Executor::ASYNC: {
214 SCOPED_TRACE("asynchronous");
215
216 // launch execution
217 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
218 Return<ErrorStatus> executionLaunchStatus =
219 ExecutePreparedModel(preparedModel, request, measure, executionCallback);
220 ASSERT_TRUE(executionLaunchStatus.isOk());
221 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
222
223 // retrieve execution status
224 executionCallback->wait();
225 executionStatus = executionCallback->getStatus();
226 outputShapes = executionCallback->getOutputShapes();
227 timing = executionCallback->getTiming();
228
229 break;
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100230 }
Xusong Wang491b0a82019-08-09 16:45:24 -0700231 case Executor::SYNC: {
232 SCOPED_TRACE("synchronous");
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100233
Xusong Wang491b0a82019-08-09 16:45:24 -0700234 // execute
235 Return<ErrorStatus> executionReturnStatus =
236 ExecutePreparedModel(preparedModel, request, measure, &outputShapes, &timing);
237 ASSERT_TRUE(executionReturnStatus.isOk());
238 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
239
240 break;
241 }
242 case Executor::BURST: {
243 SCOPED_TRACE("burst");
244
245 // create burst
246 const std::shared_ptr<::android::nn::ExecutionBurstController> controller =
247 CreateBurst(preparedModel);
248 ASSERT_NE(nullptr, controller.get());
249
250 // create memory keys
251 std::vector<intptr_t> keys(request.pools.size());
252 for (size_t i = 0; i < keys.size(); ++i) {
253 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100254 }
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100255
Xusong Wang491b0a82019-08-09 16:45:24 -0700256 // execute burst
257 std::tie(executionStatus, outputShapes, timing) =
258 controller->compute(request, measure, keys);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100259
Xusong Wang491b0a82019-08-09 16:45:24 -0700260 break;
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100261 }
262 }
Xusong Wang491b0a82019-08-09 16:45:24 -0700263
264 if (outputType != OutputType::FULLY_SPECIFIED &&
265 executionStatus == ErrorStatus::GENERAL_FAILURE) {
266 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
267 "execute model that it does not support.";
268 std::cout << "[ ] Early termination of test because vendor service cannot "
269 "execute model that it does not support."
270 << std::endl;
271 GTEST_SKIP();
272 }
273 if (measure == MeasureTiming::NO) {
274 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
275 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
276 } else {
277 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
278 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
279 }
280 }
281
282 switch (outputType) {
283 case OutputType::FULLY_SPECIFIED:
284 // If the model output operands are fully specified, outputShapes must be either
285 // either empty, or have the same number of elements as the number of outputs.
286 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
287 ASSERT_TRUE(outputShapes.size() == 0 ||
288 outputShapes.size() == testModel.outputIndexes.size());
289 break;
290 case OutputType::UNSPECIFIED:
291 // If the model output operands are not fully specified, outputShapes must have
292 // the same number of elements as the number of outputs.
293 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
294 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
295 break;
296 case OutputType::INSUFFICIENT:
297 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
298 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
299 ASSERT_FALSE(outputShapes[0].isSufficient);
300 return;
301 }
302
303 // Go through all outputs, check returned output shapes.
304 for (uint32_t i = 0; i < outputShapes.size(); i++) {
305 EXPECT_TRUE(outputShapes[i].isSufficient);
306 const auto& expect = testModel.operands[testModel.outputIndexes[i]].dimensions;
307 const std::vector<uint32_t> actual = outputShapes[i].dimensions;
308 EXPECT_EQ(expect, actual);
309 }
310
311 // Retrieve execution results.
312 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
313
314 // We want "close-enough" results.
315 checkResults(testModel, outputs);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100316}
317
Xusong Wang491b0a82019-08-09 16:45:24 -0700318void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
319 bool testDynamicOutputShape) {
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100320 if (testDynamicOutputShape) {
Xusong Wang491b0a82019-08-09 16:45:24 -0700321 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
322 OutputType::UNSPECIFIED);
323 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
324 OutputType::UNSPECIFIED);
325 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
326 OutputType::UNSPECIFIED);
327 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
328 OutputType::UNSPECIFIED);
329 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
330 OutputType::UNSPECIFIED);
331 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
332 OutputType::UNSPECIFIED);
333 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
334 OutputType::INSUFFICIENT);
335 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
336 OutputType::INSUFFICIENT);
337 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
338 OutputType::INSUFFICIENT);
339 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
340 OutputType::INSUFFICIENT);
341 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
342 OutputType::INSUFFICIENT);
343 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
344 OutputType::INSUFFICIENT);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100345 } else {
Xusong Wang491b0a82019-08-09 16:45:24 -0700346 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
347 OutputType::FULLY_SPECIFIED);
348 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
349 OutputType::FULLY_SPECIFIED);
350 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
351 OutputType::FULLY_SPECIFIED);
352 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
353 OutputType::FULLY_SPECIFIED);
354 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
355 OutputType::FULLY_SPECIFIED);
356 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
357 OutputType::FULLY_SPECIFIED);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100358 }
359}
360
Michael Butlere16af0a2019-08-29 22:17:24 -0700361void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) {
362 Model model = createModel(testModel);
363 if (testDynamicOutputShape) {
364 makeOutputDimensionsUnspecified(&model);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100365 }
Michael Butlere16af0a2019-08-29 22:17:24 -0700366
367 sp<IPreparedModel> preparedModel;
368 createPreparedModel(device, model, &preparedModel);
369 if (preparedModel == nullptr) return;
370
371 EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100372}
373
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700374// Tag for the generated tests
Michael Butlere16af0a2019-08-29 22:17:24 -0700375class GeneratedTest : public GeneratedTestBase {};
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700376
377// Tag for the dynamic output shape tests
378class DynamicOutputShapeTest : public GeneratedTest {};
379
380TEST_P(GeneratedTest, Test) {
Michael Butlere16af0a2019-08-29 22:17:24 -0700381 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/false);
Slava Shklyaev1d6b4652019-05-14 14:15:14 +0100382}
383
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700384TEST_P(DynamicOutputShapeTest, Test) {
Michael Butlere16af0a2019-08-29 22:17:24 -0700385 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/true);
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700386}
387
388INSTANTIATE_GENERATED_TEST(GeneratedTest,
389 [](const TestModel& testModel) { return !testModel.expectFailure; });
390
391INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
392 [](const TestModel& testModel) { return !testModel.expectFailure; });
393
Michael Butlerbbe5dad2019-08-26 23:55:47 -0700394} // namespace android::hardware::neuralnetworks::V1_2::vts::functional