blob: f5cb0d7cf5b7f68e27ffda1ad93e693912425851 [file] [log] [blame]
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -07001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Xusong Wang34058782019-01-18 17:28:26 -080017#include "GeneratedTestHarness.h"
Michael Butlercf22a572017-09-22 13:26:12 -070018#include "Callbacks.h"
Michael Butler814d8372019-01-15 11:02:55 -080019#include "ExecutionBurstController.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070020#include "TestHarness.h"
Miao Wanga2d04c82018-02-05 17:26:54 -080021#include "Utils.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070022
23#include <android-base/logging.h>
Miao Wanga2d04c82018-02-05 17:26:54 -080024#include <android/hardware/neuralnetworks/1.0/IDevice.h>
25#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
26#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
27#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
28#include <android/hardware/neuralnetworks/1.0/types.h>
Xusong Wangb5cb8f72018-10-31 08:43:12 -070029#include <android/hardware/neuralnetworks/1.1/IDevice.h>
30#include <android/hardware/neuralnetworks/1.2/IDevice.h>
31#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
32#include <android/hardware/neuralnetworks/1.2/IPreparedModel.h>
33#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
Miao Wanga2d04c82018-02-05 17:26:54 -080034#include <android/hidl/allocator/1.0/IAllocator.h>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070035#include <android/hidl/memory/1.0/IMemory.h>
36#include <hidlmemory/mapping.h>
Michael Butler0897ab32017-10-04 02:38:42 -070037#include <iostream>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070038
39namespace android {
40namespace hardware {
41namespace neuralnetworks {
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070042
43namespace generated_tests {
Xusong Wangb5cb8f72018-10-31 08:43:12 -070044using ::android::hardware::neuralnetworks::V1_2::implementation::ExecutionCallback;
45using ::android::hardware::neuralnetworks::V1_2::implementation::PreparedModelCallback;
Slava Shklyaev9e3fad12018-11-30 17:55:12 +000046using ::test_helper::bool8;
Michael K. Sanders941d61a2018-10-19 14:39:09 +010047using ::test_helper::compare;
48using ::test_helper::expectMultinomialDistributionWithinTolerance;
Mika Raentode166942018-04-17 16:49:50 +010049using ::test_helper::filter;
50using ::test_helper::for_all;
51using ::test_helper::for_each;
Michael K. Sanders941d61a2018-10-19 14:39:09 +010052using ::test_helper::MixedTyped;
53using ::test_helper::MixedTypedExample;
Michael K. Sanders941d61a2018-10-19 14:39:09 +010054using ::test_helper::resize_accordingly;
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070055
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -070056template <typename T>
Xusong Wanga3165812018-11-19 18:26:08 -080057void copy_back_(std::map<int, std::vector<T>>* dst, const std::vector<RequestArgument>& ra,
58 char* src) {
59 for_each<T>(*dst, [&ra, src](int index, std::vector<T>& m) {
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -070060 ASSERT_EQ(m.size(), ra[index].location.length / sizeof(T));
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070061 char* begin = src + ra[index].location.offset;
62 memcpy(m.data(), begin, ra[index].location.length);
63 });
64}
65
66void copy_back(MixedTyped* dst, const std::vector<RequestArgument>& ra, char* src) {
Xusong Wanga3165812018-11-19 18:26:08 -080067 copy_back_(&dst->float32Operands, ra, src);
68 copy_back_(&dst->int32Operands, ra, src);
Xusong Wangd49f6652019-01-16 18:32:24 -080069 copy_back_(&dst->quant8AsymmOperands, ra, src);
70 copy_back_(&dst->quant16SymmOperands, ra, src);
Xusong Wanga3165812018-11-19 18:26:08 -080071 copy_back_(&dst->float16Operands, ra, src);
72 copy_back_(&dst->bool8Operands, ra, src);
73 copy_back_(&dst->quant8ChannelOperands, ra, src);
Xusong Wangd49f6652019-01-16 18:32:24 -080074 copy_back_(&dst->quant16AsymmOperands, ra, src);
Lev Proleevbf26a9e2019-02-20 12:49:14 +000075 copy_back_(&dst->quant8SymmOperands, ra, src);
76 static_assert(9 == MixedTyped::kNumTypes,
Lev Proleev9b490f42018-11-02 12:44:11 +000077 "Number of types in MixedTyped changed, but copy_back function wasn't updated");
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070078}
79
Xusong Wangd2933152019-03-12 14:40:32 -070080static bool isZeroSized(const MixedTyped& example, uint32_t index) {
81 for (auto i : example.operandDimensions.at(index)) {
82 if (i == 0) return true;
83 }
84 return false;
85}
86
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070087// Top level driver for models and examples generated by test_generator.py
88// Test driver for those generated from ml/nn/runtime/test/spec
Xusong Wangb5cb8f72018-10-31 08:43:12 -070089static Return<ErrorStatus> ExecutePreparedModel(sp<V1_0::IPreparedModel>& preparedModel,
David Grosse3013492019-01-23 14:01:52 -080090 const Request& request, MeasureTiming,
Xusong Wangb5cb8f72018-10-31 08:43:12 -070091 sp<ExecutionCallback>& callback) {
92 return preparedModel->execute(request, callback);
93}
94static Return<ErrorStatus> ExecutePreparedModel(sp<V1_2::IPreparedModel>& preparedModel,
David Grosse3013492019-01-23 14:01:52 -080095 const Request& request, MeasureTiming measure,
Xusong Wangb5cb8f72018-10-31 08:43:12 -070096 sp<ExecutionCallback>& callback) {
David Grosse3013492019-01-23 14:01:52 -080097 return preparedModel->execute_1_2(request, measure, callback);
Xusong Wangb5cb8f72018-10-31 08:43:12 -070098}
Xusong Wang187c5972018-11-07 09:33:59 -080099static Return<ErrorStatus> ExecutePreparedModel(sp<V1_0::IPreparedModel>&, const Request&,
David Grosse3013492019-01-23 14:01:52 -0800100 MeasureTiming, hidl_vec<OutputShape>*, Timing*) {
David Gross49e41672018-12-21 11:20:26 -0800101 ADD_FAILURE() << "asking for synchronous execution at V1_0";
102 return ErrorStatus::GENERAL_FAILURE;
103}
104static Return<ErrorStatus> ExecutePreparedModel(sp<V1_2::IPreparedModel>& preparedModel,
David Grosse3013492019-01-23 14:01:52 -0800105 const Request& request, MeasureTiming measure,
106 hidl_vec<OutputShape>* outputShapes,
107 Timing* timing) {
Xusong Wang187c5972018-11-07 09:33:59 -0800108 ErrorStatus result;
109 Return<void> ret = preparedModel->executeSynchronously(
David Grosse3013492019-01-23 14:01:52 -0800110 request, measure,
111 [&result, outputShapes, timing](ErrorStatus error, const hidl_vec<OutputShape>& shapes,
112 const Timing& time) {
113 result = error;
114 *outputShapes = shapes;
115 *timing = time;
116 });
Xusong Wang187c5972018-11-07 09:33:59 -0800117 if (!ret.isOk()) {
118 return ErrorStatus::GENERAL_FAILURE;
119 }
120 return result;
David Gross49e41672018-12-21 11:20:26 -0800121}
Michael Butler814d8372019-01-15 11:02:55 -0800122static std::unique_ptr<::android::nn::ExecutionBurstController> CreateBurst(
123 const sp<V1_0::IPreparedModel>&) {
124 ADD_FAILURE() << "asking for burst execution at V1_0";
125 return nullptr;
126}
127static std::unique_ptr<::android::nn::ExecutionBurstController> CreateBurst(
128 const sp<V1_2::IPreparedModel>& preparedModel) {
129 return ::android::nn::createExecutionBurstController(preparedModel, /*blocking=*/true);
130}
131enum class Executor { ASYNC, SYNC, BURST };
Xusong Wang929fd212019-01-27 23:08:12 -0800132enum class OutputType { FULLY_SPECIFIED, UNSPECIFIED, INSUFFICIENT };
David Gross49e41672018-12-21 11:20:26 -0800133const float kDefaultAtol = 1e-5f;
134const float kDefaultRtol = 1e-5f;
Xusong Wangb5cb8f72018-10-31 08:43:12 -0700135template <typename T_IPreparedModel>
136void EvaluatePreparedModel(sp<T_IPreparedModel>& preparedModel, std::function<bool(int)> is_ignored,
Michael K. Sandersefa4c812018-10-30 14:44:48 +0000137 const std::vector<MixedTypedExample>& examples,
David Grosse3013492019-01-23 14:01:52 -0800138 bool hasRelaxedFloat32Model, float fpAtol, float fpRtol,
Xusong Wang929fd212019-01-27 23:08:12 -0800139 Executor executor, MeasureTiming measure, OutputType outputType) {
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700140 const uint32_t INPUT = 0;
141 const uint32_t OUTPUT = 1;
142
143 int example_no = 1;
144 for (auto& example : examples) {
145 SCOPED_TRACE(example_no++);
Michael K. Sanders941d61a2018-10-19 14:39:09 +0100146 const MixedTyped& inputs = example.operands.first;
147 const MixedTyped& golden = example.operands.second;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700148
Xusong Wanga3165812018-11-19 18:26:08 -0800149 const bool hasFloat16Inputs = !inputs.float16Operands.empty();
Michael K. Sandersefa4c812018-10-30 14:44:48 +0000150 if (hasRelaxedFloat32Model || hasFloat16Inputs) {
151 // TODO: Adjust the error limit based on testing.
152 // If in relaxed mode, set the absolute tolerance to be 5ULP of FP16.
153 fpAtol = 5.0f * 0.0009765625f;
154 // Set the relative tolerance to be 5ULP of the corresponding FP precision.
155 fpRtol = 5.0f * 0.0009765625f;
156 }
157
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700158 std::vector<RequestArgument> inputs_info, outputs_info;
159 uint32_t inputSize = 0, outputSize = 0;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700160 // This function only partially specifies the metadata (vector of RequestArguments).
161 // The contents are copied over below.
162 for_all(inputs, [&inputs_info, &inputSize](int index, auto, auto s) {
163 if (inputs_info.size() <= static_cast<size_t>(index)) inputs_info.resize(index + 1);
164 RequestArgument arg = {
165 .location = {.poolIndex = INPUT, .offset = 0, .length = static_cast<uint32_t>(s)},
166 .dimensions = {},
167 };
I-Jui (Ray) Sung959cd782017-10-04 20:49:57 -0700168 RequestArgument arg_empty = {
169 .hasNoValue = true,
170 };
171 inputs_info[index] = s ? arg : arg_empty;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700172 inputSize += s;
173 });
174 // Compute offset for inputs 1 and so on
175 {
176 size_t offset = 0;
177 for (auto& i : inputs_info) {
I-Jui (Ray) Sung959cd782017-10-04 20:49:57 -0700178 if (!i.hasNoValue) i.location.offset = offset;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700179 offset += i.location.length;
180 }
181 }
182
183 MixedTyped test; // holding test results
184
185 // Go through all outputs, initialize RequestArgument descriptors
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -0700186 resize_accordingly(golden, test);
Xusong Wang929fd212019-01-27 23:08:12 -0800187 bool sizeLargerThanOne = true;
Xusong Wangd2933152019-03-12 14:40:32 -0700188 for_all(golden, [&golden, &outputs_info, &outputSize, &outputType, &sizeLargerThanOne](
Xusong Wang929fd212019-01-27 23:08:12 -0800189 int index, auto, auto s) {
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700190 if (outputs_info.size() <= static_cast<size_t>(index)) outputs_info.resize(index + 1);
Xusong Wang929fd212019-01-27 23:08:12 -0800191 if (index == 0) {
192 // On OutputType::INSUFFICIENT, set the output operand with index 0 with
193 // buffer size one byte less than needed.
194 if (outputType == OutputType::INSUFFICIENT) {
Xusong Wangd2933152019-03-12 14:40:32 -0700195 if (s > 1 && !isZeroSized(golden, index)) {
Xusong Wang929fd212019-01-27 23:08:12 -0800196 s -= 1;
Xusong Wangd2933152019-03-12 14:40:32 -0700197 } else {
Xusong Wang929fd212019-01-27 23:08:12 -0800198 sizeLargerThanOne = false;
Xusong Wangd2933152019-03-12 14:40:32 -0700199 }
Xusong Wang929fd212019-01-27 23:08:12 -0800200 }
201 }
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700202 RequestArgument arg = {
203 .location = {.poolIndex = OUTPUT, .offset = 0, .length = static_cast<uint32_t>(s)},
204 .dimensions = {},
205 };
206 outputs_info[index] = arg;
207 outputSize += s;
208 });
Xusong Wang929fd212019-01-27 23:08:12 -0800209 // If output0 does not have size larger than one byte,
210 // we can not provide an insufficient buffer
211 if (!sizeLargerThanOne && outputType == OutputType::INSUFFICIENT) return;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700212 // Compute offset for outputs 1 and so on
213 {
214 size_t offset = 0;
215 for (auto& i : outputs_info) {
216 i.location.offset = offset;
217 offset += i.location.length;
218 }
219 }
Miao Wanga2d04c82018-02-05 17:26:54 -0800220 std::vector<hidl_memory> pools = {nn::allocateSharedMemory(inputSize),
221 nn::allocateSharedMemory(outputSize)};
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700222 ASSERT_NE(0ull, pools[INPUT].size());
223 ASSERT_NE(0ull, pools[OUTPUT].size());
224
225 // load data
226 sp<IMemory> inputMemory = mapMemory(pools[INPUT]);
227 sp<IMemory> outputMemory = mapMemory(pools[OUTPUT]);
228 ASSERT_NE(nullptr, inputMemory.get());
229 ASSERT_NE(nullptr, outputMemory.get());
230 char* inputPtr = reinterpret_cast<char*>(static_cast<void*>(inputMemory->getPointer()));
231 char* outputPtr = reinterpret_cast<char*>(static_cast<void*>(outputMemory->getPointer()));
232 ASSERT_NE(nullptr, inputPtr);
233 ASSERT_NE(nullptr, outputPtr);
234 inputMemory->update();
235 outputMemory->update();
236
237 // Go through all inputs, copy the values
238 for_all(inputs, [&inputs_info, inputPtr](int index, auto p, auto s) {
239 char* begin = (char*)p;
240 char* end = begin + s;
241 // TODO: handle more than one input
242 std::copy(begin, end, inputPtr + inputs_info[index].location.offset);
243 });
244
245 inputMemory->commit();
246 outputMemory->commit();
Michael Butlercf22a572017-09-22 13:26:12 -0700247
Michael Butler814d8372019-01-15 11:02:55 -0800248 const Request request = {.inputs = inputs_info, .outputs = outputs_info, .pools = pools};
249
Xusong Wang187c5972018-11-07 09:33:59 -0800250 ErrorStatus executionStatus;
251 hidl_vec<OutputShape> outputShapes;
David Grosse3013492019-01-23 14:01:52 -0800252 Timing timing;
Michael Butler814d8372019-01-15 11:02:55 -0800253 switch (executor) {
254 case Executor::ASYNC: {
255 SCOPED_TRACE("asynchronous");
Michael Butlercf22a572017-09-22 13:26:12 -0700256
Michael Butler814d8372019-01-15 11:02:55 -0800257 // launch execution
258 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
259 ASSERT_NE(nullptr, executionCallback.get());
260 Return<ErrorStatus> executionLaunchStatus =
261 ExecutePreparedModel(preparedModel, request, measure, executionCallback);
262 ASSERT_TRUE(executionLaunchStatus.isOk());
263 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
David Gross49e41672018-12-21 11:20:26 -0800264
Michael Butler814d8372019-01-15 11:02:55 -0800265 // retrieve execution status
266 executionCallback->wait();
267 executionStatus = executionCallback->getStatus();
268 outputShapes = executionCallback->getOutputShapes();
269 timing = executionCallback->getTiming();
David Gross49e41672018-12-21 11:20:26 -0800270
Michael Butler814d8372019-01-15 11:02:55 -0800271 break;
272 }
273 case Executor::SYNC: {
274 SCOPED_TRACE("synchronous");
275
276 // execute
277 Return<ErrorStatus> executionReturnStatus = ExecutePreparedModel(
278 preparedModel, request, measure, &outputShapes, &timing);
279 ASSERT_TRUE(executionReturnStatus.isOk());
280 executionStatus = static_cast<ErrorStatus>(executionReturnStatus);
281
282 break;
283 }
284 case Executor::BURST: {
285 SCOPED_TRACE("burst");
286
287 // create burst
288 const std::unique_ptr<::android::nn::ExecutionBurstController> controller =
289 CreateBurst(preparedModel);
290 ASSERT_NE(nullptr, controller.get());
291
292 // create memory keys
293 std::vector<intptr_t> keys(request.pools.size());
294 for (size_t i = 0; i < keys.size(); ++i) {
295 keys[i] = reinterpret_cast<intptr_t>(&request.pools[i]);
296 }
297
298 // execute burst
299 std::tie(executionStatus, outputShapes, timing) =
300 controller->compute(request, measure, keys);
301
302 break;
303 }
David Gross49e41672018-12-21 11:20:26 -0800304 }
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700305
Xusong Wang929fd212019-01-27 23:08:12 -0800306 if (outputType != OutputType::FULLY_SPECIFIED &&
307 executionStatus == ErrorStatus::GENERAL_FAILURE) {
Xusong Wanga3165812018-11-19 18:26:08 -0800308 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
309 "execute model that it does not support.";
310 std::cout << "[ ] Early termination of test because vendor service cannot "
311 "execute model that it does not support."
312 << std::endl;
Xusong Wang929fd212019-01-27 23:08:12 -0800313 GTEST_SKIP();
Xusong Wanga3165812018-11-19 18:26:08 -0800314 }
David Grosse3013492019-01-23 14:01:52 -0800315 if (measure == MeasureTiming::NO) {
316 EXPECT_EQ(UINT64_MAX, timing.timeOnDevice);
317 EXPECT_EQ(UINT64_MAX, timing.timeInDriver);
318 } else {
319 if (timing.timeOnDevice != UINT64_MAX && timing.timeInDriver != UINT64_MAX) {
320 EXPECT_LE(timing.timeOnDevice, timing.timeInDriver);
321 }
322 }
Xusong Wanga3165812018-11-19 18:26:08 -0800323
Xusong Wang929fd212019-01-27 23:08:12 -0800324 switch (outputType) {
325 case OutputType::FULLY_SPECIFIED:
326 // If the model output operands are fully specified, outputShapes must be either
327 // either empty, or have the same number of elements as the number of outputs.
328 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
329 ASSERT_TRUE(outputShapes.size() == 0 ||
330 outputShapes.size() == test.operandDimensions.size());
331 break;
332 case OutputType::UNSPECIFIED:
333 // If the model output operands are not fully specified, outputShapes must have
334 // the same number of elements as the number of outputs.
335 ASSERT_EQ(ErrorStatus::NONE, executionStatus);
336 ASSERT_EQ(outputShapes.size(), test.operandDimensions.size());
337 break;
338 case OutputType::INSUFFICIENT:
339 ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
340 ASSERT_EQ(outputShapes.size(), test.operandDimensions.size());
341 ASSERT_FALSE(outputShapes[0].isSufficient);
342 return;
343 }
Xusong Wanga3165812018-11-19 18:26:08 -0800344 // Go through all outputs, overwrite output dimensions with returned output shapes
Xusong Wang929fd212019-01-27 23:08:12 -0800345 if (outputShapes.size() > 0) {
Xusong Wanga3165812018-11-19 18:26:08 -0800346 for_each<uint32_t>(test.operandDimensions,
347 [&outputShapes](int idx, std::vector<uint32_t>& dim) {
348 dim = outputShapes[idx].dimensions;
349 });
350 }
Xusong Wang187c5972018-11-07 09:33:59 -0800351
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700352 // validate results
353 outputMemory->read();
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -0700354 copy_back(&test, outputs_info, outputPtr);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700355 outputMemory->commit();
I-Jui (Ray) Sung7d765bd2017-09-13 18:47:12 -0700356 // Filter out don't cares
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -0700357 MixedTyped filtered_golden = filter(golden, is_ignored);
358 MixedTyped filtered_test = filter(test, is_ignored);
I-Jui (Ray) Sung7d765bd2017-09-13 18:47:12 -0700359
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700360 // We want "close-enough" results for float
Xusong Wang10d77e42018-08-28 16:50:01 -0700361 compare(filtered_golden, filtered_test, fpAtol, fpRtol);
Michael K. Sanders941d61a2018-10-19 14:39:09 +0100362
363 if (example.expectedMultinomialDistributionTolerance > 0) {
364 expectMultinomialDistributionWithinTolerance(test, example);
365 }
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700366 }
367}
David Gross49e41672018-12-21 11:20:26 -0800368template <typename T_IPreparedModel>
369void EvaluatePreparedModel(sp<T_IPreparedModel>& preparedModel, std::function<bool(int)> is_ignored,
370 const std::vector<MixedTypedExample>& examples,
Michael Butler814d8372019-01-15 11:02:55 -0800371 bool hasRelaxedFloat32Model, Executor executor, MeasureTiming measure,
Xusong Wang929fd212019-01-27 23:08:12 -0800372 OutputType outputType) {
David Gross49e41672018-12-21 11:20:26 -0800373 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model, kDefaultAtol,
Xusong Wang929fd212019-01-27 23:08:12 -0800374 kDefaultRtol, executor, measure, outputType);
David Gross49e41672018-12-21 11:20:26 -0800375}
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700376
Xusong Wang34058782019-01-18 17:28:26 -0800377void EvaluatePreparedModel(sp<V1_2::IPreparedModel>& preparedModel,
378 std::function<bool(int)> is_ignored,
379 const std::vector<MixedTypedExample>& examples,
380 bool hasRelaxedFloat32Model, bool testDynamicOutputShape) {
381 if (testDynamicOutputShape) {
382 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
383 Executor::ASYNC, MeasureTiming::NO, OutputType::UNSPECIFIED);
384 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
385 Executor::SYNC, MeasureTiming::NO, OutputType::UNSPECIFIED);
386 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
387 Executor::BURST, MeasureTiming::NO, OutputType::UNSPECIFIED);
388 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
389 Executor::ASYNC, MeasureTiming::YES, OutputType::UNSPECIFIED);
390 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
391 Executor::SYNC, MeasureTiming::YES, OutputType::UNSPECIFIED);
392 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
393 Executor::BURST, MeasureTiming::YES, OutputType::UNSPECIFIED);
394 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
395 Executor::ASYNC, MeasureTiming::NO, OutputType::INSUFFICIENT);
396 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
397 Executor::SYNC, MeasureTiming::NO, OutputType::INSUFFICIENT);
398 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
399 Executor::BURST, MeasureTiming::NO, OutputType::INSUFFICIENT);
400 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
401 Executor::ASYNC, MeasureTiming::YES, OutputType::INSUFFICIENT);
402 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
403 Executor::SYNC, MeasureTiming::YES, OutputType::INSUFFICIENT);
404 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
405 Executor::BURST, MeasureTiming::YES, OutputType::INSUFFICIENT);
406 } else {
407 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
408 Executor::ASYNC, MeasureTiming::NO, OutputType::FULLY_SPECIFIED);
409 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
410 Executor::SYNC, MeasureTiming::NO, OutputType::FULLY_SPECIFIED);
411 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
412 Executor::BURST, MeasureTiming::NO, OutputType::FULLY_SPECIFIED);
413 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
414 Executor::ASYNC, MeasureTiming::YES, OutputType::FULLY_SPECIFIED);
415 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
416 Executor::SYNC, MeasureTiming::YES, OutputType::FULLY_SPECIFIED);
417 EvaluatePreparedModel(preparedModel, is_ignored, examples, hasRelaxedFloat32Model,
418 Executor::BURST, MeasureTiming::YES, OutputType::FULLY_SPECIFIED);
419 }
420}
421
Xusong Wangb5cb8f72018-10-31 08:43:12 -0700422static void getPreparedModel(sp<PreparedModelCallback> callback,
423 sp<V1_0::IPreparedModel>* preparedModel) {
424 *preparedModel = callback->getPreparedModel();
425}
426static void getPreparedModel(sp<PreparedModelCallback> callback,
427 sp<V1_2::IPreparedModel>* preparedModel) {
428 sp<V1_0::IPreparedModel> preparedModelV1_0 = callback->getPreparedModel();
429 *preparedModel = V1_2::IPreparedModel::castFrom(preparedModelV1_0).withDefault(nullptr);
430}
431
Michael Butlerf76acd02018-03-22 16:37:57 -0700432void Execute(const sp<V1_0::IDevice>& device, std::function<V1_0::Model(void)> create_model,
Michael K. Sanders941d61a2018-10-19 14:39:09 +0100433 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples) {
Miao Wanga2d04c82018-02-05 17:26:54 -0800434 V1_0::Model model = create_model();
435
436 // see if service can handle model
437 bool fullySupportsModel = false;
Miao Wanga2d04c82018-02-05 17:26:54 -0800438 Return<void> supportedCall = device->getSupportedOperations(
Michael Butler4d5bb102018-02-26 15:24:46 -0800439 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
440 ASSERT_EQ(ErrorStatus::NONE, status);
Miao Wanga2d04c82018-02-05 17:26:54 -0800441 ASSERT_NE(0ul, supported.size());
442 fullySupportsModel =
443 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
444 });
445 ASSERT_TRUE(supportedCall.isOk());
Michael Butler4d5bb102018-02-26 15:24:46 -0800446
447 // launch prepare model
448 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
449 ASSERT_NE(nullptr, preparedModelCallback.get());
Miao Wanga2d04c82018-02-05 17:26:54 -0800450 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel(model, preparedModelCallback);
451 ASSERT_TRUE(prepareLaunchStatus.isOk());
Michael Butler4d5bb102018-02-26 15:24:46 -0800452 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
Miao Wanga2d04c82018-02-05 17:26:54 -0800453
454 // retrieve prepared model
455 preparedModelCallback->wait();
456 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
Xusong Wangb5cb8f72018-10-31 08:43:12 -0700457 sp<V1_0::IPreparedModel> preparedModel;
458 getPreparedModel(preparedModelCallback, &preparedModel);
Miao Wanga2d04c82018-02-05 17:26:54 -0800459
460 // early termination if vendor service cannot fully prepare model
Michael Butler4d5bb102018-02-26 15:24:46 -0800461 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
Miao Wanga2d04c82018-02-05 17:26:54 -0800462 ASSERT_EQ(nullptr, preparedModel.get());
463 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
464 "prepare model that it does not support.";
465 std::cout << "[ ] Early termination of test because vendor service cannot "
466 "prepare model that it does not support."
467 << std::endl;
Miao Wangbb685a42019-01-08 12:27:35 -0800468 GTEST_SKIP();
Miao Wanga2d04c82018-02-05 17:26:54 -0800469 }
Michael Butler4d5bb102018-02-26 15:24:46 -0800470 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
Miao Wanga2d04c82018-02-05 17:26:54 -0800471 ASSERT_NE(nullptr, preparedModel.get());
472
Xusong Wang10d77e42018-08-28 16:50:01 -0700473 float fpAtol = 1e-5f, fpRtol = 5.0f * 1.1920928955078125e-7f;
Michael K. Sandersefa4c812018-10-30 14:44:48 +0000474 EvaluatePreparedModel(preparedModel, is_ignored, examples,
Michael Butler814d8372019-01-15 11:02:55 -0800475 /*hasRelaxedFloat32Model=*/false, fpAtol, fpRtol, Executor::ASYNC,
Xusong Wang929fd212019-01-27 23:08:12 -0800476 MeasureTiming::NO, OutputType::FULLY_SPECIFIED);
Miao Wanga2d04c82018-02-05 17:26:54 -0800477}
478
Michael Butlerf76acd02018-03-22 16:37:57 -0700479void Execute(const sp<V1_1::IDevice>& device, std::function<V1_1::Model(void)> create_model,
Michael K. Sanders941d61a2018-10-19 14:39:09 +0100480 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples) {
Miao Wanga2d04c82018-02-05 17:26:54 -0800481 V1_1::Model model = create_model();
482
483 // see if service can handle model
484 bool fullySupportsModel = false;
Miao Wanga2d04c82018-02-05 17:26:54 -0800485 Return<void> supportedCall = device->getSupportedOperations_1_1(
Michael Butler4d5bb102018-02-26 15:24:46 -0800486 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
487 ASSERT_EQ(ErrorStatus::NONE, status);
Miao Wanga2d04c82018-02-05 17:26:54 -0800488 ASSERT_NE(0ul, supported.size());
489 fullySupportsModel =
490 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
491 });
492 ASSERT_TRUE(supportedCall.isOk());
Michael Butler4d5bb102018-02-26 15:24:46 -0800493
494 // launch prepare model
495 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
496 ASSERT_NE(nullptr, preparedModelCallback.get());
Michael Butler2504c2f2018-04-11 16:30:09 -0700497 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
498 model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
Miao Wanga2d04c82018-02-05 17:26:54 -0800499 ASSERT_TRUE(prepareLaunchStatus.isOk());
Michael Butler4d5bb102018-02-26 15:24:46 -0800500 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
Miao Wanga2d04c82018-02-05 17:26:54 -0800501
502 // retrieve prepared model
503 preparedModelCallback->wait();
504 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
Xusong Wangb5cb8f72018-10-31 08:43:12 -0700505 sp<V1_0::IPreparedModel> preparedModel;
506 getPreparedModel(preparedModelCallback, &preparedModel);
Miao Wanga2d04c82018-02-05 17:26:54 -0800507
508 // early termination if vendor service cannot fully prepare model
Michael Butler4d5bb102018-02-26 15:24:46 -0800509 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
Miao Wanga2d04c82018-02-05 17:26:54 -0800510 ASSERT_EQ(nullptr, preparedModel.get());
511 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
512 "prepare model that it does not support.";
513 std::cout << "[ ] Early termination of test because vendor service cannot "
514 "prepare model that it does not support."
515 << std::endl;
Miao Wangbb685a42019-01-08 12:27:35 -0800516 GTEST_SKIP();
Miao Wanga2d04c82018-02-05 17:26:54 -0800517 }
Michael Butler4d5bb102018-02-26 15:24:46 -0800518 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
Miao Wanga2d04c82018-02-05 17:26:54 -0800519 ASSERT_NE(nullptr, preparedModel.get());
520
Michael K. Sandersefa4c812018-10-30 14:44:48 +0000521 EvaluatePreparedModel(preparedModel, is_ignored, examples,
Michael Butler814d8372019-01-15 11:02:55 -0800522 model.relaxComputationFloat32toFloat16, 1e-5f, 1e-5f, Executor::ASYNC,
Xusong Wang929fd212019-01-27 23:08:12 -0800523 MeasureTiming::NO, OutputType::FULLY_SPECIFIED);
Miao Wanga2d04c82018-02-05 17:26:54 -0800524}
525
Xusong Wang34058782019-01-18 17:28:26 -0800526void PrepareModel(const sp<V1_2::IDevice>& device, const V1_2::Model& model,
527 sp<V1_2::IPreparedModel>* preparedModel) {
Slava Shklyaev871be942018-09-12 14:52:02 +0100528 // see if service can handle model
529 bool fullySupportsModel = false;
530 Return<void> supportedCall = device->getSupportedOperations_1_2(
531 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
532 ASSERT_EQ(ErrorStatus::NONE, status);
533 ASSERT_NE(0ul, supported.size());
534 fullySupportsModel =
535 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
536 });
537 ASSERT_TRUE(supportedCall.isOk());
538
539 // launch prepare model
540 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
541 ASSERT_NE(nullptr, preparedModelCallback.get());
542 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_2(
543 model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
544 ASSERT_TRUE(prepareLaunchStatus.isOk());
545 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
546
547 // retrieve prepared model
548 preparedModelCallback->wait();
549 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
Xusong Wang34058782019-01-18 17:28:26 -0800550 getPreparedModel(preparedModelCallback, preparedModel);
Slava Shklyaev871be942018-09-12 14:52:02 +0100551
552 // early termination if vendor service cannot fully prepare model
553 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
Xusong Wang34058782019-01-18 17:28:26 -0800554 ASSERT_EQ(nullptr, preparedModel->get());
Slava Shklyaev871be942018-09-12 14:52:02 +0100555 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
556 "prepare model that it does not support.";
557 std::cout << "[ ] Early termination of test because vendor service cannot "
558 "prepare model that it does not support."
559 << std::endl;
Miao Wang4135a8e2019-02-01 14:00:08 -0800560 return;
Slava Shklyaev871be942018-09-12 14:52:02 +0100561 }
562 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
Xusong Wang34058782019-01-18 17:28:26 -0800563 ASSERT_NE(nullptr, preparedModel->get());
564}
Slava Shklyaev871be942018-09-12 14:52:02 +0100565
Xusong Wang34058782019-01-18 17:28:26 -0800566// TODO: Reduce code duplication.
567void Execute(const sp<V1_2::IDevice>& device, std::function<V1_2::Model(void)> create_model,
568 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples,
569 bool testDynamicOutputShape) {
570 V1_2::Model model = create_model();
571 sp<V1_2::IPreparedModel> preparedModel = nullptr;
572 PrepareModel(device, model, &preparedModel);
Miao Wang4135a8e2019-02-01 14:00:08 -0800573 if (preparedModel == nullptr) {
574 GTEST_SKIP();
575 }
Xusong Wang34058782019-01-18 17:28:26 -0800576 EvaluatePreparedModel(preparedModel, is_ignored, examples,
577 model.relaxComputationFloat32toFloat16, testDynamicOutputShape);
Slava Shklyaev871be942018-09-12 14:52:02 +0100578}
579
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700580} // namespace generated_tests
581
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700582} // namespace neuralnetworks
583} // namespace hardware
584} // namespace android