blob: ab3f5afb239eb0a42a201db90dadd615f39a6b18 [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Callbacks.h"
20#include "Conversions.h"
21#include "Utils.h"
22
23#include <android/hardware/neuralnetworks/1.0/IDevice.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <nnapi/IBuffer.h>
26#include <nnapi/IDevice.h>
27#include <nnapi/IPreparedModel.h>
28#include <nnapi/OperandTypes.h>
29#include <nnapi/Result.h>
30#include <nnapi/Types.h>
31#include <nnapi/hal/CommonUtils.h>
32#include <nnapi/hal/HandleError.h>
33#include <nnapi/hal/ProtectCallback.h>
34
35#include <functional>
36#include <memory>
37#include <optional>
38#include <string>
39#include <vector>
40
41namespace android::hardware::neuralnetworks::V1_0::utils {
42namespace {
43
44nn::GeneralResult<nn::Capabilities> initCapabilities(V1_0::IDevice* device) {
45 CHECK(device != nullptr);
46
47 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
48 << "uninitialized";
49 const auto cb = [&result](ErrorStatus status, const Capabilities& capabilities) {
50 if (status != ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -080051 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -070052 result = NN_ERROR(canonical) << "getCapabilities failed with " << toString(status);
53 } else {
Michael Butler6547b2a2020-11-22 19:36:30 -080054 result = nn::convert(capabilities);
Michael Butler4b276a72020-08-06 23:22:35 -070055 }
56 };
57
58 const auto ret = device->getCapabilities(cb);
59 NN_TRY(hal::utils::handleTransportError(ret));
60
61 return result;
62}
63
64} // namespace
65
66nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
67 sp<V1_0::IDevice> device) {
68 if (name.empty()) {
69 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
70 << "V1_0::utils::Device::create must have non-empty name";
71 }
72 if (device == nullptr) {
73 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
74 << "V1_0::utils::Device::create must have non-null device";
75 }
76
77 auto capabilities = NN_TRY(initCapabilities(device.get()));
78
79 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
80 return std::make_shared<const Device>(PrivateConstructorTag{}, std::move(name),
81 std::move(capabilities), std::move(device),
82 std::move(deathHandler));
83}
84
85Device::Device(PrivateConstructorTag /*tag*/, std::string name, nn::Capabilities capabilities,
86 sp<V1_0::IDevice> device, hal::utils::DeathHandler deathHandler)
87 : kName(std::move(name)),
88 kCapabilities(std::move(capabilities)),
89 kDevice(std::move(device)),
90 kDeathHandler(std::move(deathHandler)) {}
91
92const std::string& Device::getName() const {
93 return kName;
94}
95
96const std::string& Device::getVersionString() const {
97 return kVersionString;
98}
99
100nn::Version Device::getFeatureLevel() const {
101 return nn::Version::ANDROID_OC_MR1;
102}
103
104nn::DeviceType Device::getType() const {
105 return nn::DeviceType::OTHER;
106}
107
108const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
109 return kExtensions;
110}
111
112const nn::Capabilities& Device::getCapabilities() const {
113 return kCapabilities;
114}
115
116std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
117 return std::make_pair(/*numModelCache=*/0, /*numDataCache=*/0);
118}
119
120nn::GeneralResult<void> Device::wait() const {
121 const auto ret = kDevice->ping();
122 return hal::utils::handleTransportError(ret);
123}
124
125nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
126 // Ensure that model is ready for IPC.
127 std::optional<nn::Model> maybeModelInShared;
128 const nn::Model& modelInShared =
129 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
130
131 const auto hidlModel = NN_TRY(convert(modelInShared));
132
133 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
134 << "uninitialized";
135 auto cb = [&result, &model](ErrorStatus status, const hidl_vec<bool>& supportedOperations) {
136 if (status != ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -0800137 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -0700138 result = NN_ERROR(canonical)
139 << "getSupportedOperations failed with " << toString(status);
140 } else if (supportedOperations.size() != model.main.operations.size()) {
141 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
142 << "getSupportedOperations returned vector of size "
143 << supportedOperations.size() << " but expected "
144 << model.main.operations.size();
145 } else {
146 result = supportedOperations;
147 }
148 };
149
150 const auto ret = kDevice->getSupportedOperations(hidlModel, cb);
151 NN_TRY(hal::utils::handleTransportError(ret));
152
153 return result;
154}
155
156nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
157 const nn::Model& model, nn::ExecutionPreference /*preference*/, nn::Priority /*priority*/,
Slava Shklyaev49817a02020-10-27 18:44:01 +0000158 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
159 const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700160 // Ensure that model is ready for IPC.
161 std::optional<nn::Model> maybeModelInShared;
162 const nn::Model& modelInShared =
163 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
164
165 const auto hidlModel = NN_TRY(convert(modelInShared));
166
167 const auto cb = sp<PreparedModelCallback>::make();
168 const auto scoped = kDeathHandler.protectCallback(cb.get());
169
170 const auto ret = kDevice->prepareModel(hidlModel, cb);
171 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
172 if (status != ErrorStatus::NONE) {
Michael Butler6547b2a2020-11-22 19:36:30 -0800173 const auto canonical = nn::convert(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
Michael Butler4b276a72020-08-06 23:22:35 -0700174 return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
175 }
176
177 return cb->get();
178}
179
180nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
Slava Shklyaev49817a02020-10-27 18:44:01 +0000181 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
182 const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
Michael Butler4b276a72020-08-06 23:22:35 -0700183 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
184 << "IDevice::prepareModelFromCache not supported on 1.0 HAL service";
185}
186
187nn::GeneralResult<nn::SharedBuffer> Device::allocate(
188 const nn::BufferDesc& /*desc*/,
189 const std::vector<nn::SharedPreparedModel>& /*preparedModels*/,
190 const std::vector<nn::BufferRole>& /*inputRoles*/,
191 const std::vector<nn::BufferRole>& /*outputRoles*/) const {
192 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
193 << "IDevice::allocate not supported on 1.0 HAL service";
194}
195
196} // namespace android::hardware::neuralnetworks::V1_0::utils