blob: eced063416040041b002f2fef578bef81db4cfc3 [file] [log] [blame]
Lev Proleev13fdfcd2019-08-30 11:35:34 +01001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "GeneratedTestHarness.h"
18
19#include <android-base/logging.h>
20#include <android/hardware/neuralnetworks/1.0/IDevice.h>
21#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
22#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
23#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <android/hardware/neuralnetworks/1.1/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/IDevice.h>
27#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
28#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
29#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
Lev Proleev26d1bc82019-08-30 11:57:18 +010030#include <android/hardware/neuralnetworks/1.2/types.h>
31#include <android/hardware/neuralnetworks/1.3/IDevice.h>
Xusong Wang1b3f4262019-10-25 12:07:17 -070032#include <android/hardware/neuralnetworks/1.3/IPreparedModel.h>
Xusong Wangcc47dff2019-10-23 10:35:07 -070033#include <android/hardware/neuralnetworks/1.3/IPreparedModelCallback.h>
Lev Proleev26d1bc82019-08-30 11:57:18 +010034#include <android/hardware/neuralnetworks/1.3/types.h>
Lev Proleev13fdfcd2019-08-30 11:35:34 +010035#include <android/hidl/allocator/1.0/IAllocator.h>
36#include <android/hidl/memory/1.0/IMemory.h>
Lev Proleev56cda832019-12-05 14:49:47 +000037#include <gtest/gtest.h>
Lev Proleev13fdfcd2019-08-30 11:35:34 +010038#include <hidlmemory/mapping.h>
39
Lev Proleev13fdfcd2019-08-30 11:35:34 +010040#include <algorithm>
Michael Butler648ada52019-07-25 17:22:11 -070041#include <chrono>
Lev Proleev13fdfcd2019-08-30 11:35:34 +010042#include <iostream>
43#include <numeric>
Lev Proleev56cda832019-12-05 14:49:47 +000044#include <vector>
Lev Proleev13fdfcd2019-08-30 11:35:34 +010045
46#include "1.0/Utils.h"
47#include "1.2/Callbacks.h"
Xusong Wangcc47dff2019-10-23 10:35:07 -070048#include "1.3/Callbacks.h"
Lev Proleev13fdfcd2019-08-30 11:35:34 +010049#include "ExecutionBurstController.h"
50#include "MemoryUtils.h"
51#include "TestHarness.h"
52#include "Utils.h"
53#include "VtsHalNeuralnetworks.h"
54
Lev Proleev26d1bc82019-08-30 11:57:18 +010055namespace android::hardware::neuralnetworks::V1_3::vts::functional {
Lev Proleev13fdfcd2019-08-30 11:35:34 +010056
57using namespace test_helper;
58using hidl::memory::V1_0::IMemory;
Xusong Wangcc47dff2019-10-23 10:35:07 -070059using implementation::PreparedModelCallback;
Lev Proleev13fdfcd2019-08-30 11:35:34 +010060using V1_0::DataLocation;
61using V1_0::ErrorStatus;
62using V1_0::OperandLifeTime;
63using V1_0::Request;
64using V1_1::ExecutionPreference;
Lev Proleev26d1bc82019-08-30 11:57:18 +010065using V1_2::Constant;
Lev Proleev26d1bc82019-08-30 11:57:18 +010066using V1_2::MeasureTiming;
Lev Proleev26d1bc82019-08-30 11:57:18 +010067using V1_2::OutputShape;
68using V1_2::SymmPerChannelQuantParams;
69using V1_2::Timing;
70using V1_2::implementation::ExecutionCallback;
Lev Proleev13fdfcd2019-08-30 11:35:34 +010071using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
72
Lev Proleev0d4ba3f2019-10-02 17:32:06 +010073namespace {
74
75enum class Executor { ASYNC, SYNC, BURST };
76
Lev Proleev13fdfcd2019-08-30 11:35:34 +010077enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
78
Lev Proleev0d4ba3f2019-10-02 17:32:06 +010079struct TestConfig {
80 Executor executor;
81 MeasureTiming measureTiming;
82 OutputType outputType;
Lev Proleev9226c1e2019-10-03 14:43:18 +010083 // `reportSkipping` indicates if a test should print an info message in case
84 // it is skipped. The field is set to true by default and is set to false in
85 // quantization coupling tests to suppress skipping a test
86 bool reportSkipping;
87 TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType)
88 : executor(executor),
89 measureTiming(measureTiming),
90 outputType(outputType),
91 reportSkipping(true) {}
92 TestConfig(Executor executor, MeasureTiming measureTiming, OutputType outputType,
93 bool reportSkipping)
94 : executor(executor),
95 measureTiming(measureTiming),
96 outputType(outputType),
97 reportSkipping(reportSkipping) {}
Lev Proleev0d4ba3f2019-10-02 17:32:06 +010098};
99
100} // namespace
101
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100102Model createModel(const TestModel& testModel) {
103 // Model operands.
104 hidl_vec<Operand> operands(testModel.operands.size());
105 size_t constCopySize = 0, constRefSize = 0;
106 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
107 const auto& op = testModel.operands[i];
108
109 DataLocation loc = {};
110 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
111 loc = {.poolIndex = 0,
112 .offset = static_cast<uint32_t>(constCopySize),
113 .length = static_cast<uint32_t>(op.data.size())};
114 constCopySize += op.data.alignedSize();
115 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
116 loc = {.poolIndex = 0,
117 .offset = static_cast<uint32_t>(constRefSize),
118 .length = static_cast<uint32_t>(op.data.size())};
119 constRefSize += op.data.alignedSize();
120 }
121
122 Operand::ExtraParams extraParams;
123 if (op.type == TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) {
124 extraParams.channelQuant(SymmPerChannelQuantParams{
125 .scales = op.channelQuant.scales, .channelDim = op.channelQuant.channelDim});
126 }
127
128 operands[i] = {.type = static_cast<OperandType>(op.type),
129 .dimensions = op.dimensions,
130 .numberOfConsumers = op.numberOfConsumers,
131 .scale = op.scale,
132 .zeroPoint = op.zeroPoint,
133 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
134 .location = loc,
135 .extraParams = std::move(extraParams)};
136 }
137
138 // Model operations.
139 hidl_vec<Operation> operations(testModel.operations.size());
140 std::transform(testModel.operations.begin(), testModel.operations.end(), operations.begin(),
141 [](const TestOperation& op) -> Operation {
142 return {.type = static_cast<OperationType>(op.type),
143 .inputs = op.inputs,
144 .outputs = op.outputs};
145 });
146
147 // Constant copies.
148 hidl_vec<uint8_t> operandValues(constCopySize);
149 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
150 const auto& op = testModel.operands[i];
151 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
152 const uint8_t* begin = op.data.get<uint8_t>();
153 const uint8_t* end = begin + op.data.size();
154 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
155 }
156 }
157
158 // Shared memory.
159 hidl_vec<hidl_memory> pools = {};
160 if (constRefSize > 0) {
161 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
162 CHECK_NE(pools[0].size(), 0u);
163
164 // load data
165 sp<IMemory> mappedMemory = mapMemory(pools[0]);
166 CHECK(mappedMemory.get() != nullptr);
167 uint8_t* mappedPtr =
168 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
169 CHECK(mappedPtr != nullptr);
170
171 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
172 const auto& op = testModel.operands[i];
173 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
174 const uint8_t* begin = op.data.get<uint8_t>();
175 const uint8_t* end = begin + op.data.size();
176 std::copy(begin, end, mappedPtr + operands[i].location.offset);
177 }
178 }
179 }
180
181 return {.operands = std::move(operands),
182 .operations = std::move(operations),
183 .inputIndexes = testModel.inputIndexes,
184 .outputIndexes = testModel.outputIndexes,
185 .operandValues = std::move(operandValues),
186 .pools = std::move(pools),
187 .relaxComputationFloat32toFloat16 = testModel.isRelaxed};
188}
189
190static bool isOutputSizeGreaterThanOne(const TestModel& testModel, uint32_t index) {
191 const auto byteSize = testModel.operands[testModel.outputIndexes[index]].data.size();
192 return byteSize > 1u;
193}
194
195static void makeOutputInsufficientSize(uint32_t outputIndex, Request* request) {
196 auto& length = request->outputs[outputIndex].location.length;
197 ASSERT_GT(length, 1u);
198 length -= 1u;
199}
200
201static void makeOutputDimensionsUnspecified(Model* model) {
202 for (auto i : model->outputIndexes) {
203 auto& dims = model->operands[i].dimensions;
204 std::fill(dims.begin(), dims.end(), 0);
205 }
206}
207
208static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
209 const Request& request, MeasureTiming measure,
210 sp<ExecutionCallback>& callback) {
Xusong Wang1b3f4262019-10-25 12:07:17 -0700211 return preparedModel->execute_1_3(request, measure, callback);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100212}
213static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
214 const Request& request, MeasureTiming measure,
215 hidl_vec<OutputShape>* outputShapes,
216 Timing* timing) {
217 ErrorStatus result;
Xusong Wangebd88ba2019-10-28 11:11:19 -0700218 Return<void> ret = preparedModel->executeSynchronously_1_3(
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100219 request, measure,
220 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
221 const Timing& time) {
222 result = error;
223 *outputShapes = shapes;
224 *timing = time;
225 });
226 if (!ret.isOk()) {
227 return ErrorStatus::GENERAL_FAILURE;
228 }
229 return result;
230}
231static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
232 const sp<IPreparedModel>& preparedModel) {
Michael Butler648ada52019-07-25 17:22:11 -0700233 return android::nn::ExecutionBurstController::create(preparedModel,
234 std::chrono::microseconds{0});
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100235}
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100236
237void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Lev Proleev9226c1e2019-10-03 14:43:18 +0100238 const TestConfig& testConfig, bool* skipped = nullptr) {
239 if (skipped != nullptr) {
240 *skipped = false;
241 }
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100242 // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100243 if (testConfig.outputType == OutputType::INSUFFICIENT &&
244 !isOutputSizeGreaterThanOne(testModel, 0)) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100245 return;
246 }
247
248 Request request = createRequest(testModel);
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100249 if (testConfig.outputType == OutputType::INSUFFICIENT) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100250 makeOutputInsufficientSize(/*outputIndex=*/0, &request);
251 }
252
253 ErrorStatus executionStatus;
254 hidl_vec<OutputShape> outputShapes;
255 Timing timing;
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100256 switch (testConfig.executor) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100257 case Executor::ASYNC: {
258 SCOPED_TRACE("asynchronous");
259
260 // launch execution
261 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100262 Return<ErrorStatus> executionLaunchStatus = ExecutePreparedModel(
263 preparedModel, request, testConfig.measureTiming, executionCallback);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100264 ASSERT_TRUE(executionLaunchStatus.isOk());
265 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
266
267 // retrieve execution status
268 executionCallback->wait();
269 executionStatus = executionCallback->getStatus();
270 outputShapes = executionCallback->getOutputShapes();
271 timing = executionCallback->getTiming();
272
273 break;
274 }
275 case Executor::SYNC: {
276 SCOPED_TRACE("synchronous");
277
278 // execute
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100279 Return<ErrorStatus> executionReturnStatus = ExecutePreparedModel(
280 preparedModel, request, testConfig.measureTiming, &outputShapes, &timing);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100281 ASSERT_TRUE(executionReturnStatus.isOk());
282 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
283
284 break;
285 }
286 case Executor::BURST: {
287 SCOPED_TRACE("burst");
288
289 // create burst
290 const std::shared_ptr<::android::nn::ExecutionBurstController> controller =
291 CreateBurst(preparedModel);
292 ASSERT_NE(nullptr, controller.get());
293
294 // create memory keys
295 std::vector<intptr_t> keys(request.pools.size());
296 for (size_t i = 0; i < keys.size(); ++i) {
297 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
298 }
299
300 // execute burst
Michael Butler648ada52019-07-25 17:22:11 -0700301 int n;
302 std::tie(n, outputShapes, timing, std::ignore) =
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100303 controller->compute(request, testConfig.measureTiming, keys);
Michael Butler648ada52019-07-25 17:22:11 -0700304 executionStatus = nn::convertResultCodeToErrorStatus(n);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100305
306 break;
307 }
308 }
309
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100310 if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100311 executionStatus == ErrorStatus::GENERAL_FAILURE) {
Lev Proleev9226c1e2019-10-03 14:43:18 +0100312 if (skipped != nullptr) {
313 *skipped = true;
314 }
315 if (!testConfig.reportSkipping) {
316 return;
317 }
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100318 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
319 "execute model that it does not support.";
320 std::cout << "[ ] Early termination of test because vendor service cannot "
321 "execute model that it does not support."
322 << std::endl;
323 GTEST_SKIP();
324 }
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100325 if (testConfig.measureTiming == MeasureTiming::NO) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100326 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
327 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
328 } else {
329 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
330 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
331 }
332 }
333
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100334 switch (testConfig.outputType) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100335 case OutputType::FULLY_SPECIFIED:
336 // If the model output operands are fully specified, outputShapes must be either
337 // either empty, or have the same number of elements as the number of outputs.
338 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
339 ASSERT_TRUE(outputShapes.size() == 0 ||
340 outputShapes.size() == testModel.outputIndexes.size());
341 break;
342 case OutputType::UNSPECIFIED:
343 // If the model output operands are not fully specified, outputShapes must have
344 // the same number of elements as the number of outputs.
345 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
346 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
347 break;
348 case OutputType::INSUFFICIENT:
349 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
350 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
351 ASSERT_FALSE(outputShapes[0].isSufficient);
352 return;
353 }
354
355 // Go through all outputs, check returned output shapes.
356 for (uint32_t i = 0; i < outputShapes.size(); i++) {
357 EXPECT_TRUE(outputShapes[i].isSufficient);
358 const auto& expect = testModel.operands[testModel.outputIndexes[i]].dimensions;
359 const std::vector<uint32_t> actual = outputShapes[i].dimensions;
360 EXPECT_EQ(expect, actual);
361 }
362
363 // Retrieve execution results.
364 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
365
366 // We want "close-enough" results.
367 checkResults(testModel, outputs);
368}
369
370void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Lev Proleev9226c1e2019-10-03 14:43:18 +0100371 TestKind testKind) {
Lev Proleev56cda832019-12-05 14:49:47 +0000372 std::vector<OutputType> outputTypesList;
373 std::vector<MeasureTiming> measureTimingList;
374 std::vector<Executor> executorList;
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100375
Lev Proleev9226c1e2019-10-03 14:43:18 +0100376 switch (testKind) {
377 case TestKind::GENERAL: {
378 outputTypesList = {OutputType::FULLY_SPECIFIED};
379 measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
380 executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
381 } break;
382 case TestKind::DYNAMIC_SHAPE: {
383 outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
384 measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
385 executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
386 } break;
387 case TestKind::QUANTIZATION_COUPLING: {
388 LOG(FATAL) << "Wrong TestKind for EvaluatePreparedModel";
389 return;
390 } break;
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100391 }
392
393 for (const OutputType outputType : outputTypesList) {
394 for (const MeasureTiming measureTiming : measureTimingList) {
395 for (const Executor executor : executorList) {
Lev Proleev9226c1e2019-10-03 14:43:18 +0100396 const TestConfig testConfig(executor, measureTiming, outputType);
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100397 EvaluatePreparedModel(preparedModel, testModel, testConfig);
398 }
399 }
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100400 }
401}
402
Lev Proleev9226c1e2019-10-03 14:43:18 +0100403void EvaluatePreparedCoupledModels(const sp<IPreparedModel>& preparedModel,
404 const TestModel& testModel,
405 const sp<IPreparedModel>& preparedCoupledModel,
406 const TestModel& coupledModel) {
Lev Proleev56cda832019-12-05 14:49:47 +0000407 const std::vector<OutputType> outputTypesList = {OutputType::FULLY_SPECIFIED};
408 const std::vector<MeasureTiming> measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
409 const std::vector<Executor> executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
Lev Proleev9226c1e2019-10-03 14:43:18 +0100410
411 for (const OutputType outputType : outputTypesList) {
412 for (const MeasureTiming measureTiming : measureTimingList) {
413 for (const Executor executor : executorList) {
414 const TestConfig testConfig(executor, measureTiming, outputType,
415 /*reportSkipping=*/false);
416 bool baseSkipped = false;
417 EvaluatePreparedModel(preparedModel, testModel, testConfig, &baseSkipped);
418 bool coupledSkipped = false;
419 EvaluatePreparedModel(preparedCoupledModel, coupledModel, testConfig,
420 &coupledSkipped);
421 ASSERT_EQ(baseSkipped, coupledSkipped);
422 if (baseSkipped) {
423 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
424 "execute model that it does not support.";
425 std::cout << "[ ] Early termination of test because vendor service "
426 "cannot "
427 "execute model that it does not support."
428 << std::endl;
429 GTEST_SKIP();
430 }
431 }
432 }
433 }
434}
435
436void Execute(const sp<IDevice>& device, const TestModel& testModel, TestKind testKind) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100437 Model model = createModel(testModel);
Lev Proleev9226c1e2019-10-03 14:43:18 +0100438 if (testKind == TestKind::DYNAMIC_SHAPE) {
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100439 makeOutputDimensionsUnspecified(&model);
440 }
441
442 sp<IPreparedModel> preparedModel;
Lev Proleev9226c1e2019-10-03 14:43:18 +0100443 switch (testKind) {
444 case TestKind::GENERAL: {
445 createPreparedModel(device, model, &preparedModel);
446 if (preparedModel == nullptr) return;
447 EvaluatePreparedModel(preparedModel, testModel, TestKind::GENERAL);
448 } break;
449 case TestKind::DYNAMIC_SHAPE: {
450 createPreparedModel(device, model, &preparedModel);
451 if (preparedModel == nullptr) return;
452 EvaluatePreparedModel(preparedModel, testModel, TestKind::DYNAMIC_SHAPE);
453 } break;
454 case TestKind::QUANTIZATION_COUPLING: {
Lev Proleev6a1d5e42020-01-02 18:22:30 +0000455 ASSERT_TRUE(testModel.hasQuant8CoupledOperands());
Lev Proleev9226c1e2019-10-03 14:43:18 +0100456 createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ false);
457 TestModel signedQuantizedModel = convertQuant8AsymmOperandsToSigned(testModel);
458 sp<IPreparedModel> preparedCoupledModel;
459 createPreparedModel(device, createModel(signedQuantizedModel), &preparedCoupledModel,
460 /*reportSkipping*/ false);
461 // If we couldn't prepare a model with unsigned quantization, we must
462 // fail to prepare a model with signed quantization as well.
463 if (preparedModel == nullptr) {
464 ASSERT_EQ(preparedCoupledModel, nullptr);
465 // If we failed to prepare both of the models, we can safely skip
466 // the test.
467 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
468 "prepare model that it does not support.";
469 std::cout
470 << "[ ] Early termination of test because vendor service cannot "
471 "prepare model that it does not support."
472 << std::endl;
473 GTEST_SKIP();
474 }
475 ASSERT_NE(preparedCoupledModel, nullptr);
476 EvaluatePreparedCoupledModels(preparedModel, testModel, preparedCoupledModel,
477 signedQuantizedModel);
478 } break;
479 }
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100480}
481
482void GeneratedTestBase::SetUp() {
483 testing::TestWithParam<GeneratedTestParam>::SetUp();
484 ASSERT_NE(kDevice, nullptr);
485}
486
487std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
488 return TestModelManager::get().getTestModels(filter);
489}
490
491std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
492 const auto& [namedDevice, namedModel] = info.param;
493 return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
494}
495
496// Tag for the generated tests
497class GeneratedTest : public GeneratedTestBase {};
498
499// Tag for the dynamic output shape tests
500class DynamicOutputShapeTest : public GeneratedTest {};
501
Lev Proleev9226c1e2019-10-03 14:43:18 +0100502// Tag for the dynamic output shape tests
Lev Proleevbaac15d2020-01-09 16:37:28 +0000503class QuantizationCouplingTest : public GeneratedTest {};
Lev Proleev9226c1e2019-10-03 14:43:18 +0100504
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100505TEST_P(GeneratedTest, Test) {
Lev Proleev9226c1e2019-10-03 14:43:18 +0100506 Execute(kDevice, kTestModel, /*testKind=*/TestKind::GENERAL);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100507}
508
509TEST_P(DynamicOutputShapeTest, Test) {
Lev Proleev9226c1e2019-10-03 14:43:18 +0100510 Execute(kDevice, kTestModel, /*testKind=*/TestKind::DYNAMIC_SHAPE);
511}
512
Lev Proleevbaac15d2020-01-09 16:37:28 +0000513TEST_P(QuantizationCouplingTest, Test) {
Lev Proleev9226c1e2019-10-03 14:43:18 +0100514 Execute(kDevice, kTestModel, /*testKind=*/TestKind::QUANTIZATION_COUPLING);
Lev Proleev13fdfcd2019-08-30 11:35:34 +0100515}
516
517INSTANTIATE_GENERATED_TEST(GeneratedTest,
518 [](const TestModel& testModel) { return !testModel.expectFailure; });
519
520INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
521 [](const TestModel& testModel) { return !testModel.expectFailure; });
522
Lev Proleevbaac15d2020-01-09 16:37:28 +0000523INSTANTIATE_GENERATED_TEST(QuantizationCouplingTest, [](const TestModel& testModel) {
Lev Proleev6a1d5e42020-01-02 18:22:30 +0000524 return testModel.hasQuant8CoupledOperands() && testModel.operations.size() == 1;
Lev Proleev9226c1e2019-10-03 14:43:18 +0100525});
526
Lev Proleev26d1bc82019-08-30 11:57:18 +0100527} // namespace android::hardware::neuralnetworks::V1_3::vts::functional