blob: 1d302e2f2ccc454394962d6d7bbfe489e99adac8 [file] [log] [blame]
Slava Shklyaev73ee79d2019-05-14 14:15:14 +01001/*
2 * Copyright (C) 2019 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "GeneratedTestHarness.h"
18
19#include <android-base/logging.h>
20#include <android/hardware/neuralnetworks/1.0/IDevice.h>
21#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
22#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
23#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <android/hardware/neuralnetworks/1.1/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/IDevice.h>
27#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
28#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
29#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
30#include <android/hidl/allocator/1.0/IAllocator.h>
31#include <android/hidl/memory/1.0/IMemory.h>
32#include <hidlmemory/mapping.h>
33
Xusong Wangead950d2019-08-09 16:45:24 -070034#include <gtest/gtest.h>
35#include <algorithm>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010036#include <iostream>
Xusong Wangead950d2019-08-09 16:45:24 -070037#include <numeric>
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010038
39#include "1.0/Utils.h"
40#include "1.2/Callbacks.h"
41#include "ExecutionBurstController.h"
42#include "MemoryUtils.h"
43#include "TestHarness.h"
44#include "Utils.h"
Xusong Wangbcaa7822019-08-23 16:10:54 -070045#include "VtsHalNeuralnetworks.h"
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010046
47namespace android {
48namespace hardware {
49namespace neuralnetworks {
Slava Shklyaeve8b24462019-07-17 15:50:57 +010050namespace V1_2 {
Xusong Wangbcaa7822019-08-23 16:10:54 -070051namespace vts {
52namespace functional {
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010053
Xusong Wangead950d2019-08-09 16:45:24 -070054using namespace test_helper;
55using ::android::hardware::neuralnetworks::V1_0::DataLocation;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010056using ::android::hardware::neuralnetworks::V1_0::ErrorStatus;
Xusong Wangead950d2019-08-09 16:45:24 -070057using ::android::hardware::neuralnetworks::V1_0::OperandLifeTime;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010058using ::android::hardware::neuralnetworks::V1_0::Request;
59using ::android::hardware::neuralnetworks::V1_0::RequestArgument;
60using ::android::hardware::neuralnetworks::V1_1::ExecutionPreference;
Michael Butler3835f612019-07-11 15:43:22 -070061using ::android::hardware::neuralnetworks::V1_2::Constant;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010062using ::android::hardware::neuralnetworks::V1_2::IDevice;
63using ::android::hardware::neuralnetworks::V1_2::IPreparedModel;
Michael Butler3835f612019-07-11 15:43:22 -070064using ::android::hardware::neuralnetworks::V1_2::MeasureTiming;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010065using ::android::hardware::neuralnetworks::V1_2::Model;
Michael Butler3835f612019-07-11 15:43:22 -070066using ::android::hardware::neuralnetworks::V1_2::OutputShape;
67using ::android::hardware::neuralnetworks::V1_2::Timing;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010068using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
69using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
70using ::android::hidl::memory::V1_0::IMemory;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +010071using HidlToken = hidl_array<uint8_t, static_cast<uint32_t>(Constant::BYTE_SIZE_OF_CACHE_TOKEN)>;
72
Xusong Wangead950d2019-08-09 16:45:24 -070073enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
74
75Model createModel(const TestModel& testModel) {
76 // Model operands.
77 hidl_vec<Operand> operands(testModel.operands.size());
78 size_t constCopySize = 0, constRefSize = 0;
79 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
80 const auto& op = testModel.operands[i];
81
82 DataLocation loc = {};
83 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
84 loc = {.poolIndex = 0,
85 .offset = static_cast<uint32_t>(constCopySize),
86 .length = static_cast<uint32_t>(op.data.size())};
87 constCopySize += op.data.alignedSize();
88 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
89 loc = {.poolIndex = 0,
90 .offset = static_cast<uint32_t>(constRefSize),
91 .length = static_cast<uint32_t>(op.data.size())};
92 constRefSize += op.data.alignedSize();
93 }
94
95 Operand::ExtraParams extraParams;
96 if (op.type == TestOperandType::TENSOR_QUANT8_SYMM_PER_CHANNEL) {
97 extraParams.channelQuant(SymmPerChannelQuantParams{
98 .scales = op.channelQuant.scales, .channelDim = op.channelQuant.channelDim});
99 }
100
101 operands[i] = {.type = static_cast<OperandType>(op.type),
102 .dimensions = op.dimensions,
103 .numberOfConsumers = op.numberOfConsumers,
104 .scale = op.scale,
105 .zeroPoint = op.zeroPoint,
106 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
107 .location = loc,
108 .extraParams = std::move(extraParams)};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100109 }
Xusong Wangead950d2019-08-09 16:45:24 -0700110
111 // Model operations.
112 hidl_vec<Operation> operations(testModel.operations.size());
113 std::transform(testModel.operations.begin(), testModel.operations.end(), operations.begin(),
114 [](const TestOperation& op) -> Operation {
115 return {.type = static_cast<OperationType>(op.type),
116 .inputs = op.inputs,
117 .outputs = op.outputs};
118 });
119
120 // Constant copies.
121 hidl_vec<uint8_t> operandValues(constCopySize);
122 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
123 const auto& op = testModel.operands[i];
124 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
125 const uint8_t* begin = op.data.get<uint8_t>();
126 const uint8_t* end = begin + op.data.size();
127 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
128 }
129 }
130
131 // Shared memory.
132 hidl_vec<hidl_memory> pools = {};
133 if (constRefSize > 0) {
134 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
135 CHECK_NE(pools[0].size(), 0u);
136
137 // load data
138 sp<IMemory> mappedMemory = mapMemory(pools[0]);
139 CHECK(mappedMemory.get() != nullptr);
140 uint8_t* mappedPtr =
141 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
142 CHECK(mappedPtr != nullptr);
143
144 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
145 const auto& op = testModel.operands[i];
146 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
147 const uint8_t* begin = op.data.get<uint8_t>();
148 const uint8_t* end = begin + op.data.size();
149 std::copy(begin, end, mappedPtr + operands[i].location.offset);
150 }
151 }
152 }
153
154 return {.operands = std::move(operands),
155 .operations = std::move(operations),
156 .inputIndexes = testModel.inputIndexes,
157 .outputIndexes = testModel.outputIndexes,
158 .operandValues = std::move(operandValues),
159 .pools = std::move(pools),
160 .relaxComputationFloat32toFloat16 = testModel.isRelaxed};
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100161}
162
Xusong Wangead950d2019-08-09 16:45:24 -0700163static bool isOutputSizeGreaterThanOne(const TestModel& testModel, uint32_t index) {
164 const auto byteSize = testModel.operands[testModel.outputIndexes[index]].data.size();
165 return byteSize > 1u;
166}
167
168static void makeOutputInsufficientSize(uint32_t outputIndex, Request* request) {
169 auto& length = request->outputs[outputIndex].location.length;
170 ASSERT_GT(length, 1u);
171 length -= 1u;
172}
173
174static void makeOutputDimensionsUnspecified(Model* model) {
175 for (auto i : model->outputIndexes) {
176 auto& dims = model->operands[i].dimensions;
177 std::fill(dims.begin(), dims.end(), 0);
178 }
179}
180
181static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100182 const Request& request, MeasureTiming measure,
183 sp<ExecutionCallback>& callback) {
184 return preparedModel->execute_1_2(request, measure, callback);
185}
Xusong Wangead950d2019-08-09 16:45:24 -0700186static Return<ErrorStatus> ExecutePreparedModel(const sp<IPreparedModel>& preparedModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100187 const Request& request, MeasureTiming measure,
188 hidl_vec<OutputShape>* outputShapes,
189 Timing* timing) {
190 ErrorStatus result;
191 Return<void> ret = preparedModel->executeSynchronously(
192 request, measure,
193 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
194 const Timing& time) {
195 result = error;
196 *outputShapes = shapes;
197 *timing = time;
198 });
199 if (!ret.isOk()) {
200 return ErrorStatus::GENERAL_FAILURE;
201 }
202 return result;
203}
204static std::shared_ptr<::android::nn::ExecutionBurstController> CreateBurst(
205 const sp<IPreparedModel>& preparedModel) {
206 return ::android::nn::ExecutionBurstController::create(preparedModel, /*blocking=*/true);
207}
208enum class Executor { ASYNC, SYNC, BURST };
Xusong Wangead950d2019-08-09 16:45:24 -0700209
210void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100211 Executor executor, MeasureTiming measure, OutputType outputType) {
Xusong Wangead950d2019-08-09 16:45:24 -0700212 // If output0 does not have size larger than one byte, we can not test with insufficient buffer.
213 if (outputType == OutputType::INSUFFICIENT && !isOutputSizeGreaterThanOne(testModel, 0)) {
214 return;
215 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100216
Xusong Wangead950d2019-08-09 16:45:24 -0700217 Request request = createRequest(testModel);
218 if (outputType == OutputType::INSUFFICIENT) {
219 makeOutputInsufficientSize(/*outputIndex=*/0, &request);
220 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100221
Xusong Wangead950d2019-08-09 16:45:24 -0700222 ErrorStatus executionStatus;
223 hidl_vec<OutputShape> outputShapes;
224 Timing timing;
225 switch (executor) {
226 case Executor::ASYNC: {
227 SCOPED_TRACE("asynchronous");
228
229 // launch execution
230 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
231 Return<ErrorStatus> executionLaunchStatus =
232 ExecutePreparedModel(preparedModel, request, measure, executionCallback);
233 ASSERT_TRUE(executionLaunchStatus.isOk());
234 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
235
236 // retrieve execution status
237 executionCallback->wait();
238 executionStatus = executionCallback->getStatus();
239 outputShapes = executionCallback->getOutputShapes();
240 timing = executionCallback->getTiming();
241
242 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100243 }
Xusong Wangead950d2019-08-09 16:45:24 -0700244 case Executor::SYNC: {
245 SCOPED_TRACE("synchronous");
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100246
Xusong Wangead950d2019-08-09 16:45:24 -0700247 // execute
248 Return<ErrorStatus> executionReturnStatus =
249 ExecutePreparedModel(preparedModel, request, measure, &outputShapes, &timing);
250 ASSERT_TRUE(executionReturnStatus.isOk());
251 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
252
253 break;
254 }
255 case Executor::BURST: {
256 SCOPED_TRACE("burst");
257
258 // create burst
259 const std::shared_ptr<::android::nn::ExecutionBurstController> controller =
260 CreateBurst(preparedModel);
261 ASSERT_NE(nullptr, controller.get());
262
263 // create memory keys
264 std::vector<intptr_t> keys(request.pools.size());
265 for (size_t i = 0; i < keys.size(); ++i) {
266 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100267 }
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100268
Xusong Wangead950d2019-08-09 16:45:24 -0700269 // execute burst
270 std::tie(executionStatus, outputShapes, timing) =
271 controller->compute(request, measure, keys);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100272
Xusong Wangead950d2019-08-09 16:45:24 -0700273 break;
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100274 }
275 }
Xusong Wangead950d2019-08-09 16:45:24 -0700276
277 if (outputType != OutputType::FULLY_SPECIFIED &&
278 executionStatus == ErrorStatus::GENERAL_FAILURE) {
279 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
280 "execute model that it does not support.";
281 std::cout << "[ ] Early termination of test because vendor service cannot "
282 "execute model that it does not support."
283 << std::endl;
284 GTEST_SKIP();
285 }
286 if (measure == MeasureTiming::NO) {
287 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
288 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
289 } else {
290 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
291 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
292 }
293 }
294
295 switch (outputType) {
296 case OutputType::FULLY_SPECIFIED:
297 // If the model output operands are fully specified, outputShapes must be either
298 // either empty, or have the same number of elements as the number of outputs.
299 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
300 ASSERT_TRUE(outputShapes.size() == 0 ||
301 outputShapes.size() == testModel.outputIndexes.size());
302 break;
303 case OutputType::UNSPECIFIED:
304 // If the model output operands are not fully specified, outputShapes must have
305 // the same number of elements as the number of outputs.
306 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
307 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
308 break;
309 case OutputType::INSUFFICIENT:
310 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
311 ASSERT_EQ(outputShapes.size(), testModel.outputIndexes.size());
312 ASSERT_FALSE(outputShapes[0].isSufficient);
313 return;
314 }
315
316 // Go through all outputs, check returned output shapes.
317 for (uint32_t i = 0; i < outputShapes.size(); i++) {
318 EXPECT_TRUE(outputShapes[i].isSufficient);
319 const auto& expect = testModel.operands[testModel.outputIndexes[i]].dimensions;
320 const std::vector<uint32_t> actual = outputShapes[i].dimensions;
321 EXPECT_EQ(expect, actual);
322 }
323
324 // Retrieve execution results.
325 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
326
327 // We want "close-enough" results.
328 checkResults(testModel, outputs);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100329}
330
Xusong Wangead950d2019-08-09 16:45:24 -0700331void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel,
332 bool testDynamicOutputShape) {
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100333 if (testDynamicOutputShape) {
Xusong Wangead950d2019-08-09 16:45:24 -0700334 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
335 OutputType::UNSPECIFIED);
336 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
337 OutputType::UNSPECIFIED);
338 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
339 OutputType::UNSPECIFIED);
340 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
341 OutputType::UNSPECIFIED);
342 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
343 OutputType::UNSPECIFIED);
344 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
345 OutputType::UNSPECIFIED);
346 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
347 OutputType::INSUFFICIENT);
348 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
349 OutputType::INSUFFICIENT);
350 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
351 OutputType::INSUFFICIENT);
352 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
353 OutputType::INSUFFICIENT);
354 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
355 OutputType::INSUFFICIENT);
356 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
357 OutputType::INSUFFICIENT);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100358 } else {
Xusong Wangead950d2019-08-09 16:45:24 -0700359 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::NO,
360 OutputType::FULLY_SPECIFIED);
361 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::NO,
362 OutputType::FULLY_SPECIFIED);
363 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::NO,
364 OutputType::FULLY_SPECIFIED);
365 EvaluatePreparedModel(preparedModel, testModel, Executor::ASYNC, MeasureTiming::YES,
366 OutputType::FULLY_SPECIFIED);
367 EvaluatePreparedModel(preparedModel, testModel, Executor::SYNC, MeasureTiming::YES,
368 OutputType::FULLY_SPECIFIED);
369 EvaluatePreparedModel(preparedModel, testModel, Executor::BURST, MeasureTiming::YES,
370 OutputType::FULLY_SPECIFIED);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100371 }
372}
373
374void PrepareModel(const sp<IDevice>& device, const Model& model,
375 sp<IPreparedModel>* preparedModel) {
376 // see if service can handle model
377 bool fullySupportsModel = false;
378 Return<void> supportedCall = device->getSupportedOperations_1_2(
379 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
380 ASSERT_EQ(ErrorStatus::NONE, status);
381 ASSERT_NE(0ul, supported.size());
382 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
383 [](bool valid) { return valid; });
384 });
385 ASSERT_TRUE(supportedCall.isOk());
386
387 // launch prepare model
388 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100389 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_2(
390 model, ExecutionPreference::FAST_SINGLE_ANSWER, hidl_vec<hidl_handle>(),
391 hidl_vec<hidl_handle>(), HidlToken(), preparedModelCallback);
392 ASSERT_TRUE(prepareLaunchStatus.isOk());
393 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
394
395 // retrieve prepared model
396 preparedModelCallback->wait();
397 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
398 sp<V1_0::IPreparedModel> preparedModelV1_0 = preparedModelCallback->getPreparedModel();
399 *preparedModel = IPreparedModel::castFrom(preparedModelV1_0).withDefault(nullptr);
400
401 // early termination if vendor service cannot fully prepare model
402 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
403 ASSERT_EQ(nullptr, preparedModel->get());
404 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
405 "prepare model that it does not support.";
406 std::cout << "[ ] Early termination of test because vendor service cannot "
407 "prepare model that it does not support."
408 << std::endl;
409 return;
410 }
411 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
412 ASSERT_NE(nullptr, preparedModel->get());
413}
414
Xusong Wangbcaa7822019-08-23 16:10:54 -0700415// Tag for the generated tests
416class GeneratedTest : public GeneratedTestBase {
417 protected:
418 void Execute(const TestModel& testModel, bool testDynamicOutputShape) {
419 Model model = createModel(testModel);
420 if (testDynamicOutputShape) {
421 makeOutputDimensionsUnspecified(&model);
422 }
Xusong Wangead950d2019-08-09 16:45:24 -0700423
Xusong Wangbcaa7822019-08-23 16:10:54 -0700424 sp<IPreparedModel> preparedModel = nullptr;
425 PrepareModel(device, model, &preparedModel);
426 if (preparedModel == nullptr) {
427 GTEST_SKIP();
428 }
429 EvaluatePreparedModel(preparedModel, testModel, testDynamicOutputShape);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100430 }
Xusong Wangbcaa7822019-08-23 16:10:54 -0700431};
432
433// Tag for the dynamic output shape tests
434class DynamicOutputShapeTest : public GeneratedTest {};
435
436TEST_P(GeneratedTest, Test) {
437 Execute(*mTestModel, /*testDynamicOutputShape=*/false);
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100438}
439
Xusong Wangbcaa7822019-08-23 16:10:54 -0700440TEST_P(DynamicOutputShapeTest, Test) {
441 Execute(*mTestModel, /*testDynamicOutputShape=*/true);
442}
443
444INSTANTIATE_GENERATED_TEST(GeneratedTest,
445 [](const TestModel& testModel) { return !testModel.expectFailure; });
446
447INSTANTIATE_GENERATED_TEST(DynamicOutputShapeTest,
448 [](const TestModel& testModel) { return !testModel.expectFailure; });
449
450} // namespace functional
451} // namespace vts
Slava Shklyaeve8b24462019-07-17 15:50:57 +0100452} // namespace V1_2
Slava Shklyaev73ee79d2019-05-14 14:15:14 +0100453} // namespace neuralnetworks
454} // namespace hardware
455} // namespace android