blob: b57c7f4c549be7353f0961b1028637ec7e022d38 [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Conversions.h"
20#include "Utils.h"
21
22#include <android/hardware/neuralnetworks/1.0/types.h>
23#include <android/hardware/neuralnetworks/1.1/IDevice.h>
24#include <android/hardware/neuralnetworks/1.1/types.h>
25#include <nnapi/IBuffer.h>
26#include <nnapi/IDevice.h>
27#include <nnapi/IPreparedModel.h>
28#include <nnapi/OperandTypes.h>
29#include <nnapi/Result.h>
30#include <nnapi/Types.h>
31#include <nnapi/hal/1.0/Callbacks.h>
32#include <nnapi/hal/CommonUtils.h>
33#include <nnapi/hal/HandleError.h>
34#include <nnapi/hal/ProtectCallback.h>
35
36#include <functional>
37#include <memory>
38#include <optional>
39#include <string>
40#include <vector>
41
Michael Butleraad934b2020-12-13 23:06:06 -080042// See hardware/interfaces/neuralnetworks/utils/README.md for more information on HIDL interface
43// lifetimes across processes and for protecting asynchronous calls across HIDL.
44
Michael Butler4b276a72020-08-06 23:22:35 -070045namespace android::hardware::neuralnetworks::V1_1::utils {
46namespace {
47
48nn::GeneralResult<nn::Capabilities> initCapabilities(V1_1::IDevice* device) {
49 CHECK(device != nullptr);
50
51 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
52 << "uninitialized";
53 const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) {
54 if (status != V1_0::ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -080055 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -070056 result = NN_ERROR(canonical) << "getCapabilities_1_1 failed with " << toString(status);
57 } else {
Michael Butler6547b2a2020-11-22 19:36:30 -080058 result = nn::convert(capabilities);
Michael Butler4b276a72020-08-06 23:22:35 -070059 }
60 };
61
62 const auto ret = device->getCapabilities_1_1(cb);
Michael Butlercca3e202020-11-22 20:25:34 -080063 HANDLE_TRANSPORT_FAILURE(ret);
Michael Butler4b276a72020-08-06 23:22:35 -070064
65 return result;
66}
67
68} // namespace
69
70nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
71 sp<V1_1::IDevice> device) {
72 if (name.empty()) {
73 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
74 << "V1_1::utils::Device::create must have non-empty name";
75 }
76 if (device == nullptr) {
77 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
78 << "V1_1::utils::Device::create must have non-null device";
79 }
80
81 auto capabilities = NN_TRY(initCapabilities(device.get()));
82
83 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
84 return std::make_shared<const Device>(PrivateConstructorTag{}, std::move(name),
85 std::move(capabilities), std::move(device),
86 std::move(deathHandler));
87}
88
89Device::Device(PrivateConstructorTag /*tag*/, std::string name, nn::Capabilities capabilities,
90 sp<V1_1::IDevice> device, hal::utils::DeathHandler deathHandler)
91 : kName(std::move(name)),
92 kCapabilities(std::move(capabilities)),
93 kDevice(std::move(device)),
94 kDeathHandler(std::move(deathHandler)) {}
95
96const std::string& Device::getName() const {
97 return kName;
98}
99
100const std::string& Device::getVersionString() const {
101 return kVersionString;
102}
103
104nn::Version Device::getFeatureLevel() const {
105 return nn::Version::ANDROID_P;
106}
107
108nn::DeviceType Device::getType() const {
109 return nn::DeviceType::UNKNOWN;
110}
111
112const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
113 return kExtensions;
114}
115
116const nn::Capabilities& Device::getCapabilities() const {
117 return kCapabilities;
118}
119
120std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
121 return std::make_pair(/*numModelCache=*/0, /*numDataCache=*/0);
122}
123
124nn::GeneralResult<void> Device::wait() const {
125 const auto ret = kDevice->ping();
Michael Butlercca3e202020-11-22 20:25:34 -0800126 HANDLE_TRANSPORT_FAILURE(ret);
127 return {};
Michael Butler4b276a72020-08-06 23:22:35 -0700128}
129
130nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
131 // Ensure that model is ready for IPC.
132 std::optional<nn::Model> maybeModelInShared;
133 const nn::Model& modelInShared =
134 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
135
136 const auto hidlModel = NN_TRY(convert(modelInShared));
137
138 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
139 << "uninitialized";
140 auto cb = [&result, &model](V1_0::ErrorStatus status,
141 const hidl_vec<bool>& supportedOperations) {
142 if (status != V1_0::ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -0800143 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -0700144 result = NN_ERROR(canonical)
145 << "getSupportedOperations_1_1 failed with " << toString(status);
146 } else if (supportedOperations.size() != model.main.operations.size()) {
147 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
148 << "getSupportedOperations_1_1 returned vector of size "
149 << supportedOperations.size() << " but expected "
150 << model.main.operations.size();
151 } else {
152 result = supportedOperations;
153 }
154 };
155
156 const auto ret = kDevice->getSupportedOperations_1_1(hidlModel, cb);
Michael Butlercca3e202020-11-22 20:25:34 -0800157 HANDLE_TRANSPORT_FAILURE(ret);
Michael Butler4b276a72020-08-06 23:22:35 -0700158
159 return result;
160}
161
162nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
163 const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
Slava Shklyaev49817a02020-10-27 18:44:01 +0000164 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
165 const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700166 // Ensure that model is ready for IPC.
167 std::optional<nn::Model> maybeModelInShared;
168 const nn::Model& modelInShared =
169 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
170
171 const auto hidlModel = NN_TRY(convert(modelInShared));
172 const auto hidlPreference = NN_TRY(convert(preference));
173
174 const auto cb = sp<V1_0::utils::PreparedModelCallback>::make();
175 const auto scoped = kDeathHandler.protectCallback(cb.get());
176
177 const auto ret = kDevice->prepareModel_1_1(hidlModel, hidlPreference, cb);
Michael Butlercca3e202020-11-22 20:25:34 -0800178 const auto status = HANDLE_TRANSPORT_FAILURE(ret);
Michael Butler4b276a72020-08-06 23:22:35 -0700179 if (status != V1_0::ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -0800180 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -0700181 return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
182 }
183
184 return cb->get();
185}
186
187nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
Slava Shklyaev49817a02020-10-27 18:44:01 +0000188 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
189 const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700190 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
191 << "IDevice::prepareModelFromCache not supported on 1.1 HAL service";
192}
193
194nn::GeneralResult<nn::SharedBuffer> Device::allocate(
195 const nn::BufferDesc& /*desc*/,
196 const std::vector<nn::SharedPreparedModel>& /*preparedModels*/,
197 const std::vector<nn::BufferRole>& /*inputRoles*/,
198 const std::vector<nn::BufferRole>& /*outputRoles*/) const {
199 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
200 << "IDevice::allocate not supported on 1.1 HAL service";
201}
202
203} // namespace android::hardware::neuralnetworks::V1_1::utils