blob: cc199787e437377fe6991f353e7def7294b7f023 [file] [log] [blame]
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -07001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Michael Butlercf22a572017-09-22 13:26:12 -070017#include "Callbacks.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070018#include "TestHarness.h"
Miao Wang4862d612018-02-05 17:26:54 -080019#include "Utils.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070020
21#include <android-base/logging.h>
Miao Wang4862d612018-02-05 17:26:54 -080022#include <android/hardware/neuralnetworks/1.0/IDevice.h>
23#include <android/hardware/neuralnetworks/1.0/IExecutionCallback.h>
24#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
25#include <android/hardware/neuralnetworks/1.0/IPreparedModelCallback.h>
26#include <android/hardware/neuralnetworks/1.0/types.h>
27#include <android/hidl/allocator/1.0/IAllocator.h>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070028#include <android/hidl/memory/1.0/IMemory.h>
29#include <hidlmemory/mapping.h>
Michael Butler0897ab32017-10-04 02:38:42 -070030#include <iostream>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070031
32namespace android {
33namespace hardware {
34namespace neuralnetworks {
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070035
36namespace generated_tests {
Michael Butlercf22a572017-09-22 13:26:12 -070037using ::android::hardware::neuralnetworks::V1_0::implementation::ExecutionCallback;
38using ::android::hardware::neuralnetworks::V1_0::implementation::PreparedModelCallback;
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010039using ::test_helper::compare;
40using ::test_helper::expectMultinomialDistributionWithinTolerance;
Mika Raentod534d322018-04-17 16:49:50 +010041using ::test_helper::filter;
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010042using ::test_helper::Float32Operands;
Mika Raentod534d322018-04-17 16:49:50 +010043using ::test_helper::for_all;
44using ::test_helper::for_each;
Mika Raentod534d322018-04-17 16:49:50 +010045using ::test_helper::Int32Operands;
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010046using ::test_helper::MixedTyped;
47using ::test_helper::MixedTypedExample;
Mika Raentod534d322018-04-17 16:49:50 +010048using ::test_helper::Quant8Operands;
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010049using ::test_helper::resize_accordingly;
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070050
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -070051template <typename T>
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070052void copy_back_(MixedTyped* dst, const std::vector<RequestArgument>& ra, char* src) {
53 MixedTyped& test = *dst;
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -070054 for_each<T>(test, [&ra, src](int index, std::vector<T>& m) {
55 ASSERT_EQ(m.size(), ra[index].location.length / sizeof(T));
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -070056 char* begin = src + ra[index].location.offset;
57 memcpy(m.data(), begin, ra[index].location.length);
58 });
59}
60
61void copy_back(MixedTyped* dst, const std::vector<RequestArgument>& ra, char* src) {
62 copy_back_<float>(dst, ra, src);
63 copy_back_<int32_t>(dst, ra, src);
64 copy_back_<uint8_t>(dst, ra, src);
65}
66
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070067// Top level driver for models and examples generated by test_generator.py
68// Test driver for those generated from ml/nn/runtime/test/spec
Miao Wang4862d612018-02-05 17:26:54 -080069void EvaluatePreparedModel(sp<IPreparedModel>& preparedModel, std::function<bool(int)> is_ignored,
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010070 const std::vector<MixedTypedExample>& examples, float fpAtol = 1e-5f,
Xusong Wangf6235f82018-08-28 16:50:01 -070071 float fpRtol = 1e-5f) {
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070072 const uint32_t INPUT = 0;
73 const uint32_t OUTPUT = 1;
74
75 int example_no = 1;
76 for (auto& example : examples) {
77 SCOPED_TRACE(example_no++);
78
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +010079 const MixedTyped& inputs = example.operands.first;
80 const MixedTyped& golden = example.operands.second;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070081
82 std::vector<RequestArgument> inputs_info, outputs_info;
83 uint32_t inputSize = 0, outputSize = 0;
84
85 // This function only partially specifies the metadata (vector of RequestArguments).
86 // The contents are copied over below.
87 for_all(inputs, [&inputs_info, &inputSize](int index, auto, auto s) {
88 if (inputs_info.size() <= static_cast<size_t>(index)) inputs_info.resize(index + 1);
89 RequestArgument arg = {
90 .location = {.poolIndex = INPUT, .offset = 0, .length = static_cast<uint32_t>(s)},
91 .dimensions = {},
92 };
I-Jui (Ray) Sung959cd782017-10-04 20:49:57 -070093 RequestArgument arg_empty = {
94 .hasNoValue = true,
95 };
96 inputs_info[index] = s ? arg : arg_empty;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070097 inputSize += s;
98 });
99 // Compute offset for inputs 1 and so on
100 {
101 size_t offset = 0;
102 for (auto& i : inputs_info) {
I-Jui (Ray) Sung959cd782017-10-04 20:49:57 -0700103 if (!i.hasNoValue) i.location.offset = offset;
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700104 offset += i.location.length;
105 }
106 }
107
108 MixedTyped test; // holding test results
109
110 // Go through all outputs, initialize RequestArgument descriptors
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -0700111 resize_accordingly(golden, test);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700112 for_all(golden, [&outputs_info, &outputSize](int index, auto, auto s) {
113 if (outputs_info.size() <= static_cast<size_t>(index)) outputs_info.resize(index + 1);
114 RequestArgument arg = {
115 .location = {.poolIndex = OUTPUT, .offset = 0, .length = static_cast<uint32_t>(s)},
116 .dimensions = {},
117 };
118 outputs_info[index] = arg;
119 outputSize += s;
120 });
121 // Compute offset for outputs 1 and so on
122 {
123 size_t offset = 0;
124 for (auto& i : outputs_info) {
125 i.location.offset = offset;
126 offset += i.location.length;
127 }
128 }
Miao Wang4862d612018-02-05 17:26:54 -0800129 std::vector<hidl_memory> pools = {nn::allocateSharedMemory(inputSize),
130 nn::allocateSharedMemory(outputSize)};
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700131 ASSERT_NE(0ull, pools[INPUT].size());
132 ASSERT_NE(0ull, pools[OUTPUT].size());
133
134 // load data
135 sp<IMemory> inputMemory = mapMemory(pools[INPUT]);
136 sp<IMemory> outputMemory = mapMemory(pools[OUTPUT]);
137 ASSERT_NE(nullptr, inputMemory.get());
138 ASSERT_NE(nullptr, outputMemory.get());
139 char* inputPtr = reinterpret_cast<char*>(static_cast<void*>(inputMemory->getPointer()));
140 char* outputPtr = reinterpret_cast<char*>(static_cast<void*>(outputMemory->getPointer()));
141 ASSERT_NE(nullptr, inputPtr);
142 ASSERT_NE(nullptr, outputPtr);
143 inputMemory->update();
144 outputMemory->update();
145
146 // Go through all inputs, copy the values
147 for_all(inputs, [&inputs_info, inputPtr](int index, auto p, auto s) {
148 char* begin = (char*)p;
149 char* end = begin + s;
150 // TODO: handle more than one input
151 std::copy(begin, end, inputPtr + inputs_info[index].location.offset);
152 });
153
154 inputMemory->commit();
155 outputMemory->commit();
Michael Butlercf22a572017-09-22 13:26:12 -0700156
157 // launch execution
158 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
159 ASSERT_NE(nullptr, executionCallback.get());
160 Return<ErrorStatus> executionLaunchStatus = preparedModel->execute(
161 {.inputs = inputs_info, .outputs = outputs_info, .pools = pools}, executionCallback);
162 ASSERT_TRUE(executionLaunchStatus.isOk());
163 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
164
165 // retrieve execution status
166 executionCallback->wait();
167 ErrorStatus executionReturnStatus = executionCallback->getStatus();
168 EXPECT_EQ(ErrorStatus::NONE, executionReturnStatus);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700169
170 // validate results
171 outputMemory->read();
I-Jui (Ray) Sungf6b85502017-09-20 13:45:50 -0700172 copy_back(&test, outputs_info, outputPtr);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700173 outputMemory->commit();
I-Jui (Ray) Sung7d765bd2017-09-13 18:47:12 -0700174 // Filter out don't cares
I-Jui (Ray) Sung5bf4edf2017-10-06 13:22:39 -0700175 MixedTyped filtered_golden = filter(golden, is_ignored);
176 MixedTyped filtered_test = filter(test, is_ignored);
I-Jui (Ray) Sung7d765bd2017-09-13 18:47:12 -0700177
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700178 // We want "close-enough" results for float
Xusong Wangf6235f82018-08-28 16:50:01 -0700179 compare(filtered_golden, filtered_test, fpAtol, fpRtol);
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +0100180
181 if (example.expectedMultinomialDistributionTolerance > 0) {
182 expectMultinomialDistributionWithinTolerance(test, example);
183 }
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700184 }
185}
186
Michael Butler7ed61352018-03-22 16:37:57 -0700187void Execute(const sp<V1_0::IDevice>& device, std::function<V1_0::Model(void)> create_model,
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +0100188 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples) {
Miao Wang4862d612018-02-05 17:26:54 -0800189 V1_0::Model model = create_model();
190
191 // see if service can handle model
192 bool fullySupportsModel = false;
Miao Wang4862d612018-02-05 17:26:54 -0800193 Return<void> supportedCall = device->getSupportedOperations(
Michael Butler1ae02d62018-02-26 15:24:46 -0800194 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
195 ASSERT_EQ(ErrorStatus::NONE, status);
Miao Wang4862d612018-02-05 17:26:54 -0800196 ASSERT_NE(0ul, supported.size());
197 fullySupportsModel =
198 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
199 });
200 ASSERT_TRUE(supportedCall.isOk());
Michael Butler1ae02d62018-02-26 15:24:46 -0800201
202 // launch prepare model
203 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
204 ASSERT_NE(nullptr, preparedModelCallback.get());
Miao Wang4862d612018-02-05 17:26:54 -0800205 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel(model, preparedModelCallback);
206 ASSERT_TRUE(prepareLaunchStatus.isOk());
Michael Butler1ae02d62018-02-26 15:24:46 -0800207 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
Miao Wang4862d612018-02-05 17:26:54 -0800208
209 // retrieve prepared model
210 preparedModelCallback->wait();
211 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
212 sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
Miao Wang4862d612018-02-05 17:26:54 -0800213
214 // early termination if vendor service cannot fully prepare model
Michael Butler1ae02d62018-02-26 15:24:46 -0800215 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
Miao Wang4862d612018-02-05 17:26:54 -0800216 ASSERT_EQ(nullptr, preparedModel.get());
217 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
218 "prepare model that it does not support.";
219 std::cout << "[ ] Early termination of test because vendor service cannot "
220 "prepare model that it does not support."
221 << std::endl;
222 return;
223 }
Michael Butler1ae02d62018-02-26 15:24:46 -0800224 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
Miao Wang4862d612018-02-05 17:26:54 -0800225 ASSERT_NE(nullptr, preparedModel.get());
226
Xusong Wangf6235f82018-08-28 16:50:01 -0700227 float fpAtol = 1e-5f, fpRtol = 5.0f * 1.1920928955078125e-7f;
228 EvaluatePreparedModel(preparedModel, is_ignored, examples, fpAtol, fpRtol);
Miao Wang4862d612018-02-05 17:26:54 -0800229}
230
Michael Butler7ed61352018-03-22 16:37:57 -0700231void Execute(const sp<V1_1::IDevice>& device, std::function<V1_1::Model(void)> create_model,
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +0100232 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples) {
Miao Wang4862d612018-02-05 17:26:54 -0800233 V1_1::Model model = create_model();
234
235 // see if service can handle model
236 bool fullySupportsModel = false;
Miao Wang4862d612018-02-05 17:26:54 -0800237 Return<void> supportedCall = device->getSupportedOperations_1_1(
Michael Butler1ae02d62018-02-26 15:24:46 -0800238 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
239 ASSERT_EQ(ErrorStatus::NONE, status);
Miao Wang4862d612018-02-05 17:26:54 -0800240 ASSERT_NE(0ul, supported.size());
241 fullySupportsModel =
242 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
243 });
244 ASSERT_TRUE(supportedCall.isOk());
Michael Butler1ae02d62018-02-26 15:24:46 -0800245
246 // launch prepare model
247 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
248 ASSERT_NE(nullptr, preparedModelCallback.get());
Michael Butlerf02692d2018-04-11 16:30:09 -0700249 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_1(
250 model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
Miao Wang4862d612018-02-05 17:26:54 -0800251 ASSERT_TRUE(prepareLaunchStatus.isOk());
Michael Butler1ae02d62018-02-26 15:24:46 -0800252 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
Miao Wang4862d612018-02-05 17:26:54 -0800253
254 // retrieve prepared model
255 preparedModelCallback->wait();
256 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
257 sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
Miao Wang4862d612018-02-05 17:26:54 -0800258
259 // early termination if vendor service cannot fully prepare model
Michael Butler1ae02d62018-02-26 15:24:46 -0800260 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
Miao Wang4862d612018-02-05 17:26:54 -0800261 ASSERT_EQ(nullptr, preparedModel.get());
262 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
263 "prepare model that it does not support.";
264 std::cout << "[ ] Early termination of test because vendor service cannot "
265 "prepare model that it does not support."
266 << std::endl;
267 return;
268 }
Michael Butler1ae02d62018-02-26 15:24:46 -0800269 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
Miao Wang4862d612018-02-05 17:26:54 -0800270 ASSERT_NE(nullptr, preparedModel.get());
271
Xusong Wangf6235f82018-08-28 16:50:01 -0700272 // TODO: Adjust the error limit based on testing.
273 // If in relaxed mode, set the absolute tolerance to be 5ULP of FP16.
274 float fpAtol = !model.relaxComputationFloat32toFloat16 ? 1e-5f : 5.0f * 0.0009765625f;
275 // Set the relative tolerance to be 5ULP of the corresponding FP precision.
276 float fpRtol = !model.relaxComputationFloat32toFloat16 ? 5.0f * 1.1920928955078125e-7f
277 : 5.0f * 0.0009765625f;
278 EvaluatePreparedModel(preparedModel, is_ignored, examples, fpAtol, fpRtol);
Miao Wang4862d612018-02-05 17:26:54 -0800279}
280
Slava Shklyaevfeb87a92018-09-12 14:52:02 +0100281// TODO: Reduce code duplication.
282void Execute(const sp<V1_2::IDevice>& device, std::function<V1_2::Model(void)> create_model,
Michael K. Sandersda3bdbc2018-10-19 14:39:09 +0100283 std::function<bool(int)> is_ignored, const std::vector<MixedTypedExample>& examples) {
Slava Shklyaevfeb87a92018-09-12 14:52:02 +0100284 V1_2::Model model = create_model();
285
286 // see if service can handle model
287 bool fullySupportsModel = false;
288 Return<void> supportedCall = device->getSupportedOperations_1_2(
289 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
290 ASSERT_EQ(ErrorStatus::NONE, status);
291 ASSERT_NE(0ul, supported.size());
292 fullySupportsModel =
293 std::all_of(supported.begin(), supported.end(), [](bool valid) { return valid; });
294 });
295 ASSERT_TRUE(supportedCall.isOk());
296
297 // launch prepare model
298 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
299 ASSERT_NE(nullptr, preparedModelCallback.get());
300 Return<ErrorStatus> prepareLaunchStatus = device->prepareModel_1_2(
301 model, ExecutionPreference::FAST_SINGLE_ANSWER, preparedModelCallback);
302 ASSERT_TRUE(prepareLaunchStatus.isOk());
303 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
304
305 // retrieve prepared model
306 preparedModelCallback->wait();
307 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
308 sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
309
310 // early termination if vendor service cannot fully prepare model
311 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
312 ASSERT_EQ(nullptr, preparedModel.get());
313 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
314 "prepare model that it does not support.";
315 std::cout << "[ ] Early termination of test because vendor service cannot "
316 "prepare model that it does not support."
317 << std::endl;
318 return;
319 }
320 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
321 ASSERT_NE(nullptr, preparedModel.get());
322
323 // TODO: Adjust the error limit based on testing.
324 // If in relaxed mode, set the absolute tolerance to be 5ULP of FP16.
325 float fpAtol = !model.relaxComputationFloat32toFloat16 ? 1e-5f : 5.0f * 0.0009765625f;
326 // Set the relative tolerance to be 5ULP of the corresponding FP precision.
327 float fpRtol = !model.relaxComputationFloat32toFloat16 ? 5.0f * 1.1920928955078125e-7f
328 : 5.0f * 0.0009765625f;
329 EvaluatePreparedModel(preparedModel, is_ignored, examples, fpAtol, fpRtol);
330}
331
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700332} // namespace generated_tests
333
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700334} // namespace neuralnetworks
335} // namespace hardware
336} // namespace android