blob: 33a6fa5d6a10e4442931fef19a139e7c59a42346 [file] [log] [blame]
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -07001/*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Xusong Wang34058782019-01-18 17:28:26 -080017#include "GeneratedTestHarness.h"
Xusong Wang8e8b70c2019-08-09 16:38:14 -070018
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010019#include "1.0/Callbacks.h"
20#include "1.0/Utils.h"
21#include "MemoryUtils.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070022#include "TestHarness.h"
Xusong Wang9e2b97b2019-08-23 16:10:54 -070023#include "VtsHalNeuralnetworks.h"
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070024
25#include <android-base/logging.h>
Miao Wanga2d04c82018-02-05 17:26:54 -080026#include <android/hardware/neuralnetworks/1.0/IDevice.h>
Miao Wanga2d04c82018-02-05 17:26:54 -080027#include <android/hardware/neuralnetworks/1.0/IPreparedModel.h>
Miao Wanga2d04c82018-02-05 17:26:54 -080028#include <android/hardware/neuralnetworks/1.0/types.h>
29#include <android/hidl/allocator/1.0/IAllocator.h>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070030#include <android/hidl/memory/1.0/IMemory.h>
31#include <hidlmemory/mapping.h>
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010032
Xusong Wang8e8b70c2019-08-09 16:38:14 -070033#include <gtest/gtest.h>
Michael Butler0897ab32017-10-04 02:38:42 -070034#include <iostream>
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -070035
Michael Butlerbbe5dad2019-08-26 23:55:47 -070036namespace android::hardware::neuralnetworks::V1_0::vts::functional {
Slava Shklyaev1d6b4652019-05-14 14:15:14 +010037
Xusong Wang8e8b70c2019-08-09 16:38:14 -070038using namespace test_helper;
Michael Butlerbbe5dad2019-08-26 23:55:47 -070039using hidl::memory::V1_0::IMemory;
40using implementation::ExecutionCallback;
41using implementation::PreparedModelCallback;
Xusong Wang8e8b70c2019-08-09 16:38:14 -070042
43Model createModel(const TestModel& testModel) {
44 // Model operands.
45 hidl_vec<Operand> operands(testModel.operands.size());
46 size_t constCopySize = 0, constRefSize = 0;
47 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
48 const auto& op = testModel.operands[i];
49
50 DataLocation loc = {};
51 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
52 loc = {.poolIndex = 0,
53 .offset = static_cast<uint32_t>(constCopySize),
54 .length = static_cast<uint32_t>(op.data.size())};
55 constCopySize += op.data.alignedSize();
56 } else if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
57 loc = {.poolIndex = 0,
58 .offset = static_cast<uint32_t>(constRefSize),
59 .length = static_cast<uint32_t>(op.data.size())};
60 constRefSize += op.data.alignedSize();
61 }
62
63 operands[i] = {.type = static_cast<OperandType>(op.type),
64 .dimensions = op.dimensions,
65 .numberOfConsumers = op.numberOfConsumers,
66 .scale = op.scale,
67 .zeroPoint = op.zeroPoint,
68 .lifetime = static_cast<OperandLifeTime>(op.lifetime),
69 .location = loc};
70 }
71
72 // Model operations.
73 hidl_vec<Operation> operations(testModel.operations.size());
74 std::transform(testModel.operations.begin(), testModel.operations.end(), operations.begin(),
75 [](const TestOperation& op) -> Operation {
76 return {.type = static_cast<OperationType>(op.type),
77 .inputs = op.inputs,
78 .outputs = op.outputs};
79 });
80
81 // Constant copies.
82 hidl_vec<uint8_t> operandValues(constCopySize);
83 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
84 const auto& op = testModel.operands[i];
85 if (op.lifetime == TestOperandLifeTime::CONSTANT_COPY) {
86 const uint8_t* begin = op.data.get<uint8_t>();
87 const uint8_t* end = begin + op.data.size();
88 std::copy(begin, end, operandValues.data() + operands[i].location.offset);
89 }
90 }
91
92 // Shared memory.
93 hidl_vec<hidl_memory> pools;
94 if (constRefSize > 0) {
95 hidl_vec_push_back(&pools, nn::allocateSharedMemory(constRefSize));
96 CHECK_NE(pools[0].size(), 0u);
97
98 // load data
99 sp<IMemory> mappedMemory = mapMemory(pools[0]);
100 CHECK(mappedMemory.get() != nullptr);
101 uint8_t* mappedPtr =
102 reinterpret_cast<uint8_t*>(static_cast<void*>(mappedMemory->getPointer()));
103 CHECK(mappedPtr != nullptr);
104
105 for (uint32_t i = 0; i < testModel.operands.size(); i++) {
106 const auto& op = testModel.operands[i];
107 if (op.lifetime == TestOperandLifeTime::CONSTANT_REFERENCE) {
108 const uint8_t* begin = op.data.get<uint8_t>();
109 const uint8_t* end = begin + op.data.size();
110 std::copy(begin, end, mappedPtr + operands[i].location.offset);
111 }
112 }
113 }
114
115 return {.operands = std::move(operands),
116 .operations = std::move(operations),
117 .inputIndexes = testModel.inputIndexes,
118 .outputIndexes = testModel.outputIndexes,
119 .operandValues = std::move(operandValues),
120 .pools = std::move(pools)};
121}
Xusong Wangd2933152019-03-12 14:40:32 -0700122
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700123// Top level driver for models and examples generated by test_generator.py
124// Test driver for those generated from ml/nn/runtime/test/spec
Xusong Wang8e8b70c2019-08-09 16:38:14 -0700125void EvaluatePreparedModel(const sp<IPreparedModel>& preparedModel, const TestModel& testModel) {
126 const Request request = createRequest(testModel);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700127
Xusong Wang8e8b70c2019-08-09 16:38:14 -0700128 // Launch execution.
129 sp<ExecutionCallback> executionCallback = new ExecutionCallback();
130 Return<ErrorStatus> executionLaunchStatus = preparedModel->execute(request, executionCallback);
131 ASSERT_TRUE(executionLaunchStatus.isOk());
132 EXPECT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(executionLaunchStatus));
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700133
Xusong Wang8e8b70c2019-08-09 16:38:14 -0700134 // Retrieve execution status.
135 executionCallback->wait();
136 ASSERT_EQ(ErrorStatus::NONE, executionCallback->getStatus());
Michael K. Sandersefa4c812018-10-30 14:44:48 +0000137
Xusong Wang8e8b70c2019-08-09 16:38:14 -0700138 // Retrieve execution results.
139 const std::vector<TestBuffer> outputs = getOutputBuffers(request);
I-Jui (Ray) Sung2c4e1362017-09-06 02:15:54 -0700140
Xusong Wang8e8b70c2019-08-09 16:38:14 -0700141 // We want "close-enough" results.
142 checkResults(testModel, outputs);
Xusong Wang34058782019-01-18 17:28:26 -0800143}
144
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700145// Tag for the generated tests
146class GeneratedTest : public GeneratedTestBase {
147 protected:
148 void Execute(const TestModel& testModel) {
149 Model model = createModel(testModel);
Miao Wanga2d04c82018-02-05 17:26:54 -0800150
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700151 // see if service can handle model
152 bool fullySupportsModel = false;
153 Return<void> supportedCall = device->getSupportedOperations(
154 model, [&fullySupportsModel](ErrorStatus status, const hidl_vec<bool>& supported) {
155 ASSERT_EQ(ErrorStatus::NONE, status);
156 ASSERT_NE(0ul, supported.size());
157 fullySupportsModel = std::all_of(supported.begin(), supported.end(),
158 [](bool valid) { return valid; });
159 });
160 ASSERT_TRUE(supportedCall.isOk());
Michael Butler4d5bb102018-02-26 15:24:46 -0800161
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700162 // launch prepare model
163 sp<PreparedModelCallback> preparedModelCallback = new PreparedModelCallback();
164 Return<ErrorStatus> prepareLaunchStatus =
165 device->prepareModel(model, preparedModelCallback);
166 ASSERT_TRUE(prepareLaunchStatus.isOk());
167 ASSERT_EQ(ErrorStatus::NONE, static_cast<ErrorStatus>(prepareLaunchStatus));
Miao Wanga2d04c82018-02-05 17:26:54 -0800168
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700169 // retrieve prepared model
170 preparedModelCallback->wait();
171 ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
172 sp<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
Miao Wanga2d04c82018-02-05 17:26:54 -0800173
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700174 // early termination if vendor service cannot fully prepare model
175 if (!fullySupportsModel && prepareReturnStatus != ErrorStatus::NONE) {
176 ASSERT_EQ(nullptr, preparedModel.get());
177 LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
178 "prepare model that it does not support.";
179 std::cout << "[ ] Early termination of test because vendor service cannot "
180 "prepare model that it does not support."
181 << std::endl;
182 GTEST_SKIP();
183 }
184 EXPECT_EQ(ErrorStatus::NONE, prepareReturnStatus);
185 ASSERT_NE(nullptr, preparedModel.get());
186
187 EvaluatePreparedModel(preparedModel, testModel);
Miao Wanga2d04c82018-02-05 17:26:54 -0800188 }
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700189};
Miao Wanga2d04c82018-02-05 17:26:54 -0800190
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700191TEST_P(GeneratedTest, Test) {
192 Execute(*mTestModel);
Slava Shklyaev871be942018-09-12 14:52:02 +0100193}
194
Xusong Wang9e2b97b2019-08-23 16:10:54 -0700195INSTANTIATE_GENERATED_TEST(GeneratedTest,
196 [](const TestModel& testModel) { return !testModel.expectFailure; });
197
Michael Butlerbbe5dad2019-08-26 23:55:47 -0700198} // namespace android::hardware::neuralnetworks::V1_0::vts::functional