blob: 0fa244d891c5080e9cd236c849e5fa70b86c738d [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Buffer.h"
20#include "Callbacks.h"
21#include "Conversions.h"
22#include "PreparedModel.h"
23#include "Utils.h"
24
25#include <android/hardware/neuralnetworks/1.0/types.h>
26#include <android/hardware/neuralnetworks/1.1/types.h>
27#include <android/hardware/neuralnetworks/1.2/types.h>
28#include <android/hardware/neuralnetworks/1.3/IDevice.h>
29#include <android/hardware/neuralnetworks/1.3/types.h>
30#include <nnapi/IBuffer.h>
31#include <nnapi/IDevice.h>
32#include <nnapi/IPreparedModel.h>
33#include <nnapi/OperandTypes.h>
34#include <nnapi/Result.h>
35#include <nnapi/Types.h>
36#include <nnapi/hal/1.1/Conversions.h>
37#include <nnapi/hal/1.2/Conversions.h>
38#include <nnapi/hal/1.2/Device.h>
39#include <nnapi/hal/CommonUtils.h>
40#include <nnapi/hal/HandleError.h>
41#include <nnapi/hal/ProtectCallback.h>
42
43#include <any>
44#include <functional>
45#include <memory>
46#include <optional>
47#include <string>
48#include <vector>
49
50namespace android::hardware::neuralnetworks::V1_3::utils {
51namespace {
52
53nn::GeneralResult<hidl_vec<sp<IPreparedModel>>> convert(
54 const std::vector<nn::SharedPreparedModel>& preparedModels) {
55 hidl_vec<sp<IPreparedModel>> hidlPreparedModels(preparedModels.size());
56 for (size_t i = 0; i < preparedModels.size(); ++i) {
57 std::any underlyingResource = preparedModels[i]->getUnderlyingResource();
58 if (const auto* hidlPreparedModel =
59 std::any_cast<sp<IPreparedModel>>(&underlyingResource)) {
60 hidlPreparedModels[i] = *hidlPreparedModel;
61 } else {
62 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
63 << "Unable to convert from nn::IPreparedModel to V1_3::IPreparedModel";
64 }
65 }
66 return hidlPreparedModels;
67}
68
69nn::GeneralResult<nn::SharedBuffer> convert(
70 nn::GeneralResult<std::shared_ptr<const Buffer>> result) {
71 return NN_TRY(std::move(result));
72}
73
Slava Shklyaev77e06d82020-11-30 15:33:17 +000074nn::GeneralResult<nn::Capabilities> initCapabilities(V1_3::IDevice* device) {
75 CHECK(device != nullptr);
76
77 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
78 << "uninitialized";
79 const auto cb = [&result](ErrorStatus status, const Capabilities& capabilities) {
80 if (status != ErrorStatus::NONE) {
81 const auto canonical =
82 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
83 result = NN_ERROR(canonical) << "getCapabilities_1_3 failed with " << toString(status);
84 } else {
85 result = validatedConvertToCanonical(capabilities);
86 }
87 };
88
89 const auto ret = device->getCapabilities_1_3(cb);
90 NN_TRY(hal::utils::handleTransportError(ret));
91
92 return result;
93}
94
Michael Butler4b276a72020-08-06 23:22:35 -070095} // namespace
96
97nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
98 sp<V1_3::IDevice> device) {
99 if (name.empty()) {
100 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
101 << "V1_3::utils::Device::create must have non-empty name";
102 }
103 if (device == nullptr) {
104 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
105 << "V1_3::utils::Device::create must have non-null device";
106 }
107
108 auto versionString = NN_TRY(V1_2::utils::initVersionString(device.get()));
109 const auto deviceType = NN_TRY(V1_2::utils::initDeviceType(device.get()));
110 auto extensions = NN_TRY(V1_2::utils::initExtensions(device.get()));
Slava Shklyaev77e06d82020-11-30 15:33:17 +0000111 auto capabilities = NN_TRY(initCapabilities(device.get()));
Michael Butler4b276a72020-08-06 23:22:35 -0700112 const auto numberOfCacheFilesNeeded =
113 NN_TRY(V1_2::utils::initNumberOfCacheFilesNeeded(device.get()));
114
115 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
116 return std::make_shared<const Device>(
117 PrivateConstructorTag{}, std::move(name), std::move(versionString), deviceType,
118 std::move(extensions), std::move(capabilities), numberOfCacheFilesNeeded,
119 std::move(device), std::move(deathHandler));
120}
121
122Device::Device(PrivateConstructorTag /*tag*/, std::string name, std::string versionString,
123 nn::DeviceType deviceType, std::vector<nn::Extension> extensions,
124 nn::Capabilities capabilities,
125 std::pair<uint32_t, uint32_t> numberOfCacheFilesNeeded, sp<V1_3::IDevice> device,
126 hal::utils::DeathHandler deathHandler)
127 : kName(std::move(name)),
128 kVersionString(std::move(versionString)),
129 kDeviceType(deviceType),
130 kExtensions(std::move(extensions)),
131 kCapabilities(std::move(capabilities)),
132 kNumberOfCacheFilesNeeded(numberOfCacheFilesNeeded),
133 kDevice(std::move(device)),
134 kDeathHandler(std::move(deathHandler)) {}
135
136const std::string& Device::getName() const {
137 return kName;
138}
139
140const std::string& Device::getVersionString() const {
141 return kVersionString;
142}
143
144nn::Version Device::getFeatureLevel() const {
145 return nn::Version::ANDROID_R;
146}
147
148nn::DeviceType Device::getType() const {
149 return kDeviceType;
150}
151
152const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
153 return kExtensions;
154}
155
156const nn::Capabilities& Device::getCapabilities() const {
157 return kCapabilities;
158}
159
160std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
161 return kNumberOfCacheFilesNeeded;
162}
163
164nn::GeneralResult<void> Device::wait() const {
165 const auto ret = kDevice->ping();
166 return hal::utils::handleTransportError(ret);
167}
168
169nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
170 // Ensure that model is ready for IPC.
171 std::optional<nn::Model> maybeModelInShared;
172 const nn::Model& modelInShared =
173 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
174
175 const auto hidlModel = NN_TRY(convert(modelInShared));
176
177 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
178 << "uninitialized";
179 auto cb = [&result, &model](ErrorStatus status, const hidl_vec<bool>& supportedOperations) {
180 if (status != ErrorStatus::NONE) {
181 const auto canonical =
182 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
183 result = NN_ERROR(canonical)
184 << "IDevice::getSupportedOperations_1_3 failed with " << toString(status);
185 } else if (supportedOperations.size() != model.main.operations.size()) {
186 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
187 << "IDevice::getSupportedOperations_1_3 returned vector of size "
188 << supportedOperations.size() << " but expected "
189 << model.main.operations.size();
190 } else {
191 result = supportedOperations;
192 }
193 };
194
195 const auto ret = kDevice->getSupportedOperations_1_3(hidlModel, cb);
196 NN_TRY(hal::utils::handleTransportError(ret));
197
198 return result;
199}
200
201nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
202 const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
Slava Shklyaev49817a02020-10-27 18:44:01 +0000203 nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
204 const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700205 // Ensure that model is ready for IPC.
206 std::optional<nn::Model> maybeModelInShared;
207 const nn::Model& modelInShared =
208 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
209
210 const auto hidlModel = NN_TRY(convert(modelInShared));
211 const auto hidlPreference = NN_TRY(V1_1::utils::convert(preference));
212 const auto hidlPriority = NN_TRY(convert(priority));
213 const auto hidlDeadline = NN_TRY(convert(deadline));
214 const auto hidlModelCache = NN_TRY(V1_2::utils::convert(modelCache));
215 const auto hidlDataCache = NN_TRY(V1_2::utils::convert(dataCache));
216 const auto hidlToken = token;
217
218 const auto cb = sp<PreparedModelCallback>::make();
219 const auto scoped = kDeathHandler.protectCallback(cb.get());
220
221 const auto ret =
222 kDevice->prepareModel_1_3(hidlModel, hidlPreference, hidlPriority, hidlDeadline,
223 hidlModelCache, hidlDataCache, hidlToken, cb);
224 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
225 if (status != ErrorStatus::NONE) {
226 const auto canonical =
227 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
228 return NN_ERROR(canonical) << "prepareModel_1_3 failed with " << toString(status);
229 }
230
231 return cb->get();
232}
233
234nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
Slava Shklyaev49817a02020-10-27 18:44:01 +0000235 nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
236 const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700237 const auto hidlDeadline = NN_TRY(convert(deadline));
238 const auto hidlModelCache = NN_TRY(V1_2::utils::convert(modelCache));
239 const auto hidlDataCache = NN_TRY(V1_2::utils::convert(dataCache));
240 const auto hidlToken = token;
241
242 const auto cb = sp<PreparedModelCallback>::make();
243 const auto scoped = kDeathHandler.protectCallback(cb.get());
244
245 const auto ret = kDevice->prepareModelFromCache_1_3(hidlDeadline, hidlModelCache, hidlDataCache,
246 hidlToken, cb);
247 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
248 if (status != ErrorStatus::NONE) {
249 const auto canonical =
250 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
251 return NN_ERROR(canonical) << "prepareModelFromCache_1_3 failed with " << toString(status);
252 }
253
254 return cb->get();
255}
256
257nn::GeneralResult<nn::SharedBuffer> Device::allocate(
258 const nn::BufferDesc& desc, const std::vector<nn::SharedPreparedModel>& preparedModels,
259 const std::vector<nn::BufferRole>& inputRoles,
260 const std::vector<nn::BufferRole>& outputRoles) const {
261 const auto hidlDesc = NN_TRY(convert(desc));
262 const auto hidlPreparedModels = NN_TRY(convert(preparedModels));
263 const auto hidlInputRoles = NN_TRY(convert(inputRoles));
264 const auto hidlOutputRoles = NN_TRY(convert(outputRoles));
265
266 nn::GeneralResult<nn::SharedBuffer> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
267 << "uninitialized";
268 auto cb = [&result](ErrorStatus status, const sp<IBuffer>& buffer, uint32_t token) {
269 if (status != ErrorStatus::NONE) {
270 const auto canonical =
271 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
272 result = NN_ERROR(canonical) << "IDevice::allocate failed with " << toString(status);
273 } else if (buffer == nullptr) {
274 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "Returned buffer is nullptr";
275 } else if (token == 0) {
276 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "Returned token is invalid (0)";
277 } else {
278 result = convert(
279 Buffer::create(buffer, static_cast<nn::Request::MemoryDomainToken>(token)));
280 }
281 };
282
283 const auto ret =
284 kDevice->allocate(hidlDesc, hidlPreparedModels, hidlInputRoles, hidlOutputRoles, cb);
285 NN_TRY(hal::utils::handleTransportError(ret));
286
287 return result;
288}
289
290} // namespace android::hardware::neuralnetworks::V1_3::utils