blob: 573545a049a35eaa01069373e031681ec2851680 [file] [log] [blame]
Slava Shklyaev73ee79d2019-05-14 14:15:14 +01001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "GeneratedTestHarness.h"
18
19#include <android-base/logging.h>
20#include <android/hardware/neuralnetworks/1.0/IDevice.h>
21#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
22#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
23#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <android/hardware/neuralnetworks/1.1/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/IDevice.h>
27#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
28#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
29#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
30#include <android/hidl/allocator/1.0/IAllocator.h>
31#include <android/hidl/memory/1.0/IMemory.h>
Lev Proleev56cda832019-12-05 14:49:47 +000032#include <gtest/gtest.h>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010033#include <hidlmemory/mapping.h>
34
Xusong Wangead950d2019-08-09 16:45:24 -070035#include <algorithm>
Michael Butler648ada52019-07-25 17:22:11 -070036#include <chrono>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010037#include <iostream>
Xusong Wangead950d2019-08-09 16:45:24 -070038#include <numeric>
Lev Proleev56cda832019-12-05 14:49:47 +000039#include <vector>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010040
41#include "1.0/Utils.h"
42#include "1.2/Callbacks.h"
43#include "ExecutionBurstController.h"
44#include "MemoryUtils.h"
45#include "TestHarness.h"
Xusong Wangbcaa7822019-08-23 16:10:54 -070046#include "VtsHalNeuralnetworks.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010047
Michael Butler62749b92019-08-26 23:55:47 -070048namespace android::hardware::neuralnetworks::V1_2::vts::functional {
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010049
Xusong Wangead950d2019-08-09 16:45:24 -070050using namespace test_helper;
Michael Butler62749b92019-08-26 23:55:47 -070051using hidl::memory::V1_0::IMemory;
52using implementation::ExecutionCallback;
53using implementation::PreparedModelCallback;
54using V1_0::DataLocation;
55using V1_0::ErrorStatus;
56using V1_0::OperandLifeTime;
57using V1_0::Request;
58using V1_1::ExecutionPreference;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010059using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
60
Lev Proleev0d4ba3f2019-10-02 17:32:06 +010061namespace {
62
63enum class Executor { ASYNC, SYNC, BURST };
64
Xusong Wangead950d2019-08-09 16:45:24 -070065enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
66
Lev Proleev0d4ba3f2019-10-02 17:32:06 +010067struct TestConfig {
68 Executor executor;
69 MeasureTiming measureTiming;
70 OutputType outputType;
71};
72
73} // namespace
74
Xusong Wangead950d2019-08-09 16:45:24 -070075Model createModel(const TestModel& testModel) {
76 // Model operands.
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +000077 CHECK_EQ(testModel.referenced.size(), 0u); // Not supported in 1.1.
78 hidl_vec<Operand> operands(testModel.main.operands.size());
Xusong Wangead950d2019-08-09 16:45:24 -070079 size_t constCopySize = 0, constRefSize = 0;
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +000080 for (uint32_t i = 0; i < testModel.main.operands.size(); i++) {
81 const auto& op = testModel.main.operands[i];
Xusong Wangead950d2019-08-09 16:45:24 -070082
83 DataLocation loc = {};
84 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
85 loc = {.poolIndex = 0,
86 .offset = static_cast<uint32_t>(constCopySize),
87 .length = static_cast<uint32_t>(op.data.size())};
88 constCopySize += op.data.alignedSize();
89 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
90 loc = {.poolIndex = 0,
91 .offset = static_cast<uint32_t>(constRefSize),
92 .length = static_cast<uint32_t>(op.data.size())};
93 constRefSize += op.data.alignedSize();
94 }
95
96 Operand::ExtraParams extraParams;
97 if (op.type == TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) {
98 extraParams.channelQuant(SymmPerChannelQuantParams{
99 .scales = op.channelQuant.scales, .channelDim = op.channelQuant.channelDim});
100 }
101
102 operands[i] = {.type = static_cast<OperandType>(op.type),
103 .dimensions = op.dimensions,
104 .numberOfConsumers = op.numberOfConsumers,
105 .scale = op.scale,
106 .zeroPoint = op.zeroPoint,
107 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
108 .location = loc,
109 .extraParams = std::move(extraParams)};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100110 }
Xusong Wangead950d2019-08-09 16:45:24 -0700111
112 // Model operations.
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000113 hidl_vec<Operation> operations(testModel.main.operations.size());
114 std::transform(testModel.main.operations.begin(), testModel.main.operations.end(),
115 operations.begin(), [](const TestOperation& op) -> Operation {
Xusong Wangead950d2019-08-09 16:45:24 -0700116 return {.type = static_cast<OperationType>(op.type),
117 .inputs = op.inputs,
118 .outputs = op.outputs};
119 });
120
121 // Constant copies.
122 hidl_vec<uint8_t> operandValues(constCopySize);
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000123 for (uint32_t i = 0; i < testModel.main.operands.size(); i++) {
124 const auto& op = testModel.main.operands[i];
Xusong Wangead950d2019-08-09 16:45:24 -0700125 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
126 const uint8_t* begin = op.data.get<uint8_t>();
127 const uint8_t* end = begin + op.data.size();
128 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
129 }
130 }
131
132 // Shared memory.
133 hidl_vec<hidl_memory> pools = {};
134 if (constRefSize > 0) {
135 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
136 CHECK_NE(pools[0].size(), 0u);
137
138 // load data
139 sp<IMemory> mappedMemory = mapMemory(pools[0]);
140 CHECK(mappedMemory.get() != nullptr);
141 uint8_t* mappedPtr =
142 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
143 CHECK(mappedPtr != nullptr);
144
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000145 for (uint32_t i = 0; i < testModel.main.operands.size(); i++) {
146 const auto& op = testModel.main.operands[i];
Xusong Wangead950d2019-08-09 16:45:24 -0700147 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
148 const uint8_t* begin = op.data.get<uint8_t>();
149 const uint8_t* end = begin + op.data.size();
150 std::copy(begin, end, mappedPtr + operands[i].location.offset);
151 }
152 }
153 }
154
155 return {.operands = std::move(operands),
156 .operations = std::move(operations),
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000157 .inputIndexes = testModel.main.inputIndexes,
158 .outputIndexes = testModel.main.outputIndexes,
Xusong Wangead950d2019-08-09 16:45:24 -0700159 .operandValues = std::move(operandValues),
160 .pools = std::move(pools),
161 .relaxComputationFloat32toFloat16 = testModel.isRelaxed};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100162}
163
Xusong Wangead950d2019-08-09 16:45:24 -0700164static bool isOutputSizeGreaterThanOne(const TestModel& testModel, uint32_t index) {
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000165 const auto byteSize = testModel.main.operands[testModel.main.outputIndexes[index]].data.size();
Xusong Wangead950d2019-08-09 16:45:24 -0700166 return byteSize > 1u;
167}
168
169static void makeOutputInsufficientSize(uint32_t outputIndex, Request* request) {
170 auto& length = request->outputs[outputIndex].location.length;
171 ASSERT_GT(length, 1u);
172 length -= 1u;
173}
174
175static void makeOutputDimensionsUnspecified(Model* model) {
176 for (auto i : model->outputIndexes) {
177 auto& dims = model->operands[i].dimensions;
178 std::fill(dims.begin(), dims.end(), 0);
179 }
180}
181
182static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100183 const Request& request, MeasureTiming measure,
184 sp<ExecutionCallback>& callback) {
185 return preparedModel->execute_1_2(request, measure, callback);
186}
Xusong Wangead950d2019-08-09 16:45:24 -0700187static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100188 const Request& request, MeasureTiming measure,
189 hidl_vec<OutputShape>* outputShapes,
190 Timing* timing) {
191 ErrorStatus result;
192 Return<void> ret = preparedModel->executeSynchronously(
193 request, measure,
194 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
195 const Timing& time) {
196 result = error;
197 *outputShapes = shapes;
198 *timing = time;
199 });
200 if (!ret.isOk()) {
201 return ErrorStatus::GENERAL_FAILURE;
202 }
203 return result;
204}
205static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
206 const sp<IPreparedModel>& preparedModel) {
Michael Butler648ada52019-07-25 17:22:11 -0700207 return android::nn::ExecutionBurstController::create(preparedModel,
208 std::chrono::microseconds{0});
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100209}
Xusong Wangead950d2019-08-09 16:45:24 -0700210
211void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100212 const TestConfig& testConfig) {
Xusong Wangead950d2019-08-09 16:45:24 -0700213 // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100214 if (testConfig.outputType == OutputType::INSUFFICIENT &&
215 !isOutputSizeGreaterThanOne(testModel, 0)) {
Xusong Wangead950d2019-08-09 16:45:24 -0700216 return;
217 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100218
Xusong Wangead950d2019-08-09 16:45:24 -0700219 Request request = createRequest(testModel);
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100220 if (testConfig.outputType == OutputType::INSUFFICIENT) {
Xusong Wangead950d2019-08-09 16:45:24 -0700221 makeOutputInsufficientSize(/*outputIndex=*/0, &request);
222 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100223
Xusong Wangead950d2019-08-09 16:45:24 -0700224 ErrorStatus executionStatus;
225 hidl_vec<OutputShape> outputShapes;
226 Timing timing;
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100227 switch (testConfig.executor) {
Xusong Wangead950d2019-08-09 16:45:24 -0700228 case Executor::ASYNC: {
229 SCOPED_TRACE("asynchronous");
230
231 // launch execution
232 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100233 Return<ErrorStatus> executionLaunchStatus = ExecutePreparedModel(
234 preparedModel, request, testConfig.measureTiming, executionCallback);
Xusong Wangead950d2019-08-09 16:45:24 -0700235 ASSERT_TRUE(executionLaunchStatus.isOk());
236 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
237
238 // retrieve execution status
239 executionCallback->wait();
240 executionStatus = executionCallback->getStatus();
241 outputShapes = executionCallback->getOutputShapes();
242 timing = executionCallback->getTiming();
243
244 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100245 }
Xusong Wangead950d2019-08-09 16:45:24 -0700246 case Executor::SYNC: {
247 SCOPED_TRACE("synchronous");
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100248
Xusong Wangead950d2019-08-09 16:45:24 -0700249 // execute
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100250 Return<ErrorStatus> executionReturnStatus = ExecutePreparedModel(
251 preparedModel, request, testConfig.measureTiming, &outputShapes, &timing);
Xusong Wangead950d2019-08-09 16:45:24 -0700252 ASSERT_TRUE(executionReturnStatus.isOk());
253 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
254
255 break;
256 }
257 case Executor::BURST: {
258 SCOPED_TRACE("burst");
259
260 // create burst
261 const std::shared_ptr<::android::nn::ExecutionBurstController> controller =
262 CreateBurst(preparedModel);
263 ASSERT_NE(nullptr, controller.get());
264
265 // create memory keys
266 std::vector<intptr_t> keys(request.pools.size());
267 for (size_t i = 0; i < keys.size(); ++i) {
268 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100269 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100270
Xusong Wangead950d2019-08-09 16:45:24 -0700271 // execute burst
Michael Butler648ada52019-07-25 17:22:11 -0700272 int n;
273 std::tie(n, outputShapes, timing, std::ignore) =
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100274 controller->compute(request, testConfig.measureTiming, keys);
Michael Butler4876af12020-03-12 15:12:23 -0700275 executionStatus = nn::legacyConvertResultCodeToErrorStatus(n);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100276
Xusong Wangead950d2019-08-09 16:45:24 -0700277 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100278 }
279 }
Xusong Wangead950d2019-08-09 16:45:24 -0700280
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100281 if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
Xusong Wangead950d2019-08-09 16:45:24 -0700282 executionStatus == ErrorStatus::GENERAL_FAILURE) {
283 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
284 "execute model that it does not support.";
285 std::cout << "[ ] Early termination of test because vendor service cannot "
286 "execute model that it does not support."
287 << std::endl;
288 GTEST_SKIP();
289 }
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100290 if (testConfig.measureTiming == MeasureTiming::NO) {
Xusong Wangead950d2019-08-09 16:45:24 -0700291 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
292 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
293 } else {
294 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
295 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
296 }
297 }
298
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100299 switch (testConfig.outputType) {
Xusong Wangead950d2019-08-09 16:45:24 -0700300 case OutputType::FULLY_SPECIFIED:
301 // If the model output operands are fully specified, outputShapes must be either
302 // either empty, or have the same number of elements as the number of outputs.
303 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
304 ASSERT_TRUE(outputShapes.size() == 0 ||
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000305 outputShapes.size() == testModel.main.outputIndexes.size());
Xusong Wangead950d2019-08-09 16:45:24 -0700306 break;
307 case OutputType::UNSPECIFIED:
308 // If the model output operands are not fully specified, outputShapes must have
309 // the same number of elements as the number of outputs.
310 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000311 ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
Xusong Wangead950d2019-08-09 16:45:24 -0700312 break;
313 case OutputType::INSUFFICIENT:
314 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000315 ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
Xusong Wangead950d2019-08-09 16:45:24 -0700316 ASSERT_FALSE(outputShapes[0].isSufficient);
317 return;
318 }
319
320 // Go through all outputs, check returned output shapes.
321 for (uint32_t i = 0; i < outputShapes.size(); i++) {
322 EXPECT_TRUE(outputShapes[i].isSufficient);
Slava Shklyaev1f98e2e2020-01-31 15:14:24 +0000323 const auto& expect = testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
Xusong Wangead950d2019-08-09 16:45:24 -0700324 const std::vector<uint32_t> actual = outputShapes[i].dimensions;
325 EXPECT_EQ(expect, actual);
326 }
327
328 // Retrieve execution results.
329 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
330
331 // We want "close-enough" results.
332 checkResults(testModel, outputs);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100333}
334
Xusong Wangead950d2019-08-09 16:45:24 -0700335void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
336 bool testDynamicOutputShape) {
Lev Proleev56cda832019-12-05 14:49:47 +0000337 std::vector<OutputType> outputTypesList;
338 std::vector<MeasureTiming> measureTimingList;
339 std::vector<Executor> executorList;
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100340
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100341 if (testDynamicOutputShape) {
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100342 outputTypesList = {OutputType::UNSPECIFIED, OutputType::INSUFFICIENT};
343 measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
344 executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100345 } else {
Lev Proleev0d4ba3f2019-10-02 17:32:06 +0100346 outputTypesList = {OutputType::FULLY_SPECIFIED};
347 measureTimingList = {MeasureTiming::NO, MeasureTiming::YES};
348 executorList = {Executor::ASYNC, Executor::SYNC, Executor::BURST};
349 }
350
351 for (const OutputType outputType : outputTypesList) {
352 for (const MeasureTiming measureTiming : measureTimingList) {
353 for (const Executor executor : executorList) {
354 const TestConfig testConfig = {.executor = executor,
355 .measureTiming = measureTiming,
356 .outputType = outputType};
357 EvaluatePreparedModel(preparedModel, testModel, testConfig);
358 }
359 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100360 }
361}
362
Michael Butler13b05162019-08-29 22:17:24 -0700363void Execute(const sp<IDevice>& device, const TestModel& testModel, bool testDynamicOutputShape) {
364 Model model = createModel(testModel);
365 if (testDynamicOutputShape) {
366 makeOutputDimensionsUnspecified(&model);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100367 }
Michael Butler13b05162019-08-29 22:17:24 -0700368
369 sp<IPreparedModel> preparedModel;
370 createPreparedModel(device, model, &preparedModel);
371 if (preparedModel == nullptr) return;
372
373 EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100374}
375
Michael Butler07633282019-08-29 11:08:25 -0700376void GeneratedTestBase::SetUp() {
377 testing::TestWithParam<GeneratedTestParam>::SetUp();
378 ASSERT_NE(kDevice, nullptr);
379}
380
381std::vector<NamedModel> getNamedModels(const FilterFn& filter) {
382 return TestModelManager::get().getTestModels(filter);
383}
384
Michael Butler678a1062020-03-19 17:10:34 -0700385std::vector<NamedModel> getNamedModels(const FilterNameFn& filter) {
386 return TestModelManager::get().getTestModels(filter);
387}
388
Michael Butler07633282019-08-29 11:08:25 -0700389std::string printGeneratedTest(const testing::TestParamInfo<GeneratedTestParam>& info) {
390 const auto& [namedDevice, namedModel] = info.param;
391 return gtestCompliantName(getName(namedDevice) + "_" + getName(namedModel));
392}
393
Xusong Wangbcaa7822019-08-23 16:10:54 -0700394// Tag for the generated tests
Michael Butler13b05162019-08-29 22:17:24 -0700395class GeneratedTest : public GeneratedTestBase {};
Xusong Wangbcaa7822019-08-23 16:10:54 -0700396
397// Tag for the dynamic output shape tests
398class DynamicOutputShapeTest : public GeneratedTest {};
399
400TEST_P(GeneratedTest, Test) {
Michael Butler13b05162019-08-29 22:17:24 -0700401 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/false);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100402}
403
Xusong Wangbcaa7822019-08-23 16:10:54 -0700404TEST_P(DynamicOutputShapeTest, Test) {
Michael Butler13b05162019-08-29 22:17:24 -0700405 Execute(kDevice, kTestModel, /*testDynamicOutputShape=*/true);
Xusong Wangbcaa7822019-08-23 16:10:54 -0700406}
407
408INSTANTIATE_GENERATED_TEST(GeneratedTest,
409 [](const TestModel& testModel) { return !testModel.expectFailure; });
410
411INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
412 [](const TestModel& testModel) { return !testModel.expectFailure; });
413
Michael Butler62749b92019-08-26 23:55:47 -0700414} // namespace android::hardware::neuralnetworks::V1_2::vts::functional