blob: a9e537752eccae222ffc3d06699c0967c4a2c953 [file] [log] [blame]
Michael Butler3670c382020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Callbacks.h"
20#include "Conversions.h"
21#include "Utils.h"
22
23#include <android/hardware/neuralnetworks/1.0/types.h>
24#include <android/hardware/neuralnetworks/1.1/types.h>
25#include <android/hardware/neuralnetworks/1.2/IDevice.h>
26#include <android/hardware/neuralnetworks/1.2/types.h>
27#include <nnapi/IBuffer.h>
28#include <nnapi/IDevice.h>
29#include <nnapi/IPreparedModel.h>
30#include <nnapi/OperandTypes.h>
31#include <nnapi/Result.h>
32#include <nnapi/Types.h>
33#include <nnapi/hal/1.1/Conversions.h>
34#include <nnapi/hal/CommonUtils.h>
35#include <nnapi/hal/HandleError.h>
36#include <nnapi/hal/ProtectCallback.h>
37
38#include <functional>
39#include <memory>
40#include <optional>
41#include <string>
42#include <vector>
43
44namespace android::hardware::neuralnetworks::V1_2::utils {
Slava Shklyaevd594cd02020-11-30 15:33:17 +000045namespace {
46
47nn::GeneralResult<nn::Capabilities> initCapabilities(V1_2::IDevice* device) {
48 CHECK(device != nullptr);
49
50 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
51 << "uninitialized";
52 const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) {
53 if (status != V1_0::ErrorStatus::NONE) {
54 const auto canonical =
55 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
56 result = NN_ERROR(canonical) << "getCapabilities_1_2 failed with " << toString(status);
57 } else {
58 result = validatedConvertToCanonical(capabilities);
59 }
60 };
61
62 const auto ret = device->getCapabilities_1_2(cb);
63 NN_TRY(hal::utils::handleTransportError(ret));
64
65 return result;
66}
67
68} // namespace
Michael Butler3670c382020-08-06 23:22:35 -070069
70nn::GeneralResult<std::string> initVersionString(V1_2::IDevice* device) {
71 CHECK(device != nullptr);
72
73 nn::GeneralResult<std::string> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
74 << "uninitialized";
75 const auto cb = [&result](V1_0::ErrorStatus status, const hidl_string& versionString) {
76 if (status != V1_0::ErrorStatus::NONE) {
77 const auto canonical =
78 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
79 result = NN_ERROR(canonical) << "getVersionString failed with " << toString(status);
80 } else {
81 result = versionString;
82 }
83 };
84
85 const auto ret = device->getVersionString(cb);
86 NN_TRY(hal::utils::handleTransportError(ret));
87
88 return result;
89}
90
91nn::GeneralResult<nn::DeviceType> initDeviceType(V1_2::IDevice* device) {
92 CHECK(device != nullptr);
93
94 nn::GeneralResult<nn::DeviceType> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
95 << "uninitialized";
96 const auto cb = [&result](V1_0::ErrorStatus status, DeviceType deviceType) {
97 if (status != V1_0::ErrorStatus::NONE) {
98 const auto canonical =
99 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
100 result = NN_ERROR(canonical) << "getDeviceType failed with " << toString(status);
101 } else {
102 result = nn::convert(deviceType);
103 }
104 };
105
106 const auto ret = device->getType(cb);
107 NN_TRY(hal::utils::handleTransportError(ret));
108
109 return result;
110}
111
112nn::GeneralResult<std::vector<nn::Extension>> initExtensions(V1_2::IDevice* device) {
113 CHECK(device != nullptr);
114
115 nn::GeneralResult<std::vector<nn::Extension>> result =
116 NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized";
117 const auto cb = [&result](V1_0::ErrorStatus status, const hidl_vec<Extension>& extensions) {
118 if (status != V1_0::ErrorStatus::NONE) {
119 const auto canonical =
120 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
121 result = NN_ERROR(canonical) << "getExtensions failed with " << toString(status);
122 } else {
123 result = nn::convert(extensions);
124 }
125 };
126
127 const auto ret = device->getSupportedExtensions(cb);
128 NN_TRY(hal::utils::handleTransportError(ret));
129
130 return result;
131}
132
Michael Butler3670c382020-08-06 23:22:35 -0700133nn::GeneralResult<std::pair<uint32_t, uint32_t>> initNumberOfCacheFilesNeeded(
134 V1_2::IDevice* device) {
135 CHECK(device != nullptr);
136
137 nn::GeneralResult<std::pair<uint32_t, uint32_t>> result =
138 NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "uninitialized";
139 const auto cb = [&result](V1_0::ErrorStatus status, uint32_t numModelCache,
140 uint32_t numDataCache) {
141 if (status != V1_0::ErrorStatus::NONE) {
142 const auto canonical =
143 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
144 result = NN_ERROR(canonical)
145 << "getNumberOfCacheFilesNeeded failed with " << toString(status);
146 } else {
147 result = std::make_pair(numModelCache, numDataCache);
148 }
149 };
150
151 const auto ret = device->getNumberOfCacheFilesNeeded(cb);
152 NN_TRY(hal::utils::handleTransportError(ret));
153
154 return result;
155}
156
157nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
158 sp<V1_2::IDevice> device) {
159 if (name.empty()) {
160 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
161 << "V1_2::utils::Device::create must have non-empty name";
162 }
163 if (device == nullptr) {
164 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
165 << "V1_2::utils::Device::create must have non-null device";
166 }
167
168 auto versionString = NN_TRY(initVersionString(device.get()));
169 const auto deviceType = NN_TRY(initDeviceType(device.get()));
170 auto extensions = NN_TRY(initExtensions(device.get()));
171 auto capabilities = NN_TRY(initCapabilities(device.get()));
172 const auto numberOfCacheFilesNeeded = NN_TRY(initNumberOfCacheFilesNeeded(device.get()));
173
174 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
175 return std::make_shared<const Device>(
176 PrivateConstructorTag{}, std::move(name), std::move(versionString), deviceType,
177 std::move(extensions), std::move(capabilities), numberOfCacheFilesNeeded,
178 std::move(device), std::move(deathHandler));
179}
180
181Device::Device(PrivateConstructorTag /*tag*/, std::string name, std::string versionString,
182 nn::DeviceType deviceType, std::vector<nn::Extension> extensions,
183 nn::Capabilities capabilities,
184 std::pair<uint32_t, uint32_t> numberOfCacheFilesNeeded, sp<V1_2::IDevice> device,
185 hal::utils::DeathHandler deathHandler)
186 : kName(std::move(name)),
187 kVersionString(std::move(versionString)),
188 kDeviceType(deviceType),
189 kExtensions(std::move(extensions)),
190 kCapabilities(std::move(capabilities)),
191 kNumberOfCacheFilesNeeded(numberOfCacheFilesNeeded),
192 kDevice(std::move(device)),
193 kDeathHandler(std::move(deathHandler)) {}
194
195const std::string& Device::getName() const {
196 return kName;
197}
198
199const std::string& Device::getVersionString() const {
200 return kVersionString;
201}
202
203nn::Version Device::getFeatureLevel() const {
204 return nn::Version::ANDROID_Q;
205}
206
207nn::DeviceType Device::getType() const {
208 return kDeviceType;
209}
210
211const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
212 return kExtensions;
213}
214
215const nn::Capabilities& Device::getCapabilities() const {
216 return kCapabilities;
217}
218
219std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
220 return kNumberOfCacheFilesNeeded;
221}
222
223nn::GeneralResult<void> Device::wait() const {
224 const auto ret = kDevice->ping();
225 return hal::utils::handleTransportError(ret);
226}
227
228nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
229 // Ensure that model is ready for IPC.
230 std::optional<nn::Model> maybeModelInShared;
231 const nn::Model& modelInShared =
232 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
233
234 const auto hidlModel = NN_TRY(convert(modelInShared));
235
236 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
237 << "uninitialized";
238 auto cb = [&result, &model](V1_0::ErrorStatus status,
239 const hidl_vec<bool>& supportedOperations) {
240 if (status != V1_0::ErrorStatus::NONE) {
241 const auto canonical =
242 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
243 result = NN_ERROR(canonical)
244 << "getSupportedOperations_1_2 failed with " << toString(status);
245 } else if (supportedOperations.size() != model.main.operations.size()) {
246 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
247 << "getSupportedOperations_1_2 returned vector of size "
248 << supportedOperations.size() << " but expected "
249 << model.main.operations.size();
250 } else {
251 result = supportedOperations;
252 }
253 };
254
255 const auto ret = kDevice->getSupportedOperations_1_2(hidlModel, cb);
256 NN_TRY(hal::utils::handleTransportError(ret));
257
258 return result;
259}
260
261nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
262 const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
Slava Shklyaevd4290b82020-10-27 18:44:01 +0000263 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& modelCache,
264 const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
Michael Butler3670c382020-08-06 23:22:35 -0700265 // Ensure that model is ready for IPC.
266 std::optional<nn::Model> maybeModelInShared;
267 const nn::Model& modelInShared =
268 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
269
270 const auto hidlModel = NN_TRY(convert(modelInShared));
271 const auto hidlPreference = NN_TRY(V1_1::utils::convert(preference));
272 const auto hidlModelCache = NN_TRY(convert(modelCache));
273 const auto hidlDataCache = NN_TRY(convert(dataCache));
274 const auto hidlToken = token;
275
276 const auto cb = sp<PreparedModelCallback>::make();
277 const auto scoped = kDeathHandler.protectCallback(cb.get());
278
279 const auto ret = kDevice->prepareModel_1_2(hidlModel, hidlPreference, hidlModelCache,
280 hidlDataCache, hidlToken, cb);
281 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
282 if (status != V1_0::ErrorStatus::NONE) {
283 const auto canonical =
284 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
285 return NN_ERROR(canonical) << "prepareModel_1_2 failed with " << toString(status);
286 }
287
288 return cb->get();
289}
290
291nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
Slava Shklyaevd4290b82020-10-27 18:44:01 +0000292 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& modelCache,
293 const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
Michael Butler3670c382020-08-06 23:22:35 -0700294 const auto hidlModelCache = NN_TRY(convert(modelCache));
295 const auto hidlDataCache = NN_TRY(convert(dataCache));
296 const auto hidlToken = token;
297
298 const auto cb = sp<PreparedModelCallback>::make();
299 const auto scoped = kDeathHandler.protectCallback(cb.get());
300
301 const auto ret = kDevice->prepareModelFromCache(hidlModelCache, hidlDataCache, hidlToken, cb);
302 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
303 if (status != V1_0::ErrorStatus::NONE) {
304 const auto canonical =
305 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
306 return NN_ERROR(canonical) << "prepareModelFromCache failed with " << toString(status);
307 }
308
309 return cb->get();
310}
311
312nn::GeneralResult<nn::SharedBuffer> Device::allocate(
313 const nn::BufferDesc& /*desc*/,
314 const std::vector<nn::SharedPreparedModel>& /*preparedModels*/,
315 const std::vector<nn::BufferRole>& /*inputRoles*/,
316 const std::vector<nn::BufferRole>& /*outputRoles*/) const {
317 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
318 << "IDevice::allocate not supported on 1.2 HAL service";
319}
320
321} // namespace android::hardware::neuralnetworks::V1_2::utils