blob: 03b0d6eb8ef53363ecf33f450af550ec25d86bf3 [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Conversions.h"
20#include "Utils.h"
21
22#include <android/hardware/neuralnetworks/1.0/types.h>
23#include <android/hardware/neuralnetworks/1.1/IDevice.h>
24#include <android/hardware/neuralnetworks/1.1/types.h>
25#include <nnapi/IBuffer.h>
26#include <nnapi/IDevice.h>
27#include <nnapi/IPreparedModel.h>
28#include <nnapi/OperandTypes.h>
29#include <nnapi/Result.h>
30#include <nnapi/Types.h>
31#include <nnapi/hal/1.0/Callbacks.h>
32#include <nnapi/hal/CommonUtils.h>
33#include <nnapi/hal/HandleError.h>
34#include <nnapi/hal/ProtectCallback.h>
35
36#include <functional>
37#include <memory>
38#include <optional>
39#include <string>
40#include <vector>
41
42namespace android::hardware::neuralnetworks::V1_1::utils {
43namespace {
44
45nn::GeneralResult<nn::Capabilities> initCapabilities(V1_1::IDevice* device) {
46 CHECK(device != nullptr);
47
48 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
49 << "uninitialized";
50 const auto cb = [&result](V1_0::ErrorStatus status, const Capabilities& capabilities) {
51 if (status != V1_0::ErrorStatus::NONE) {
52 const auto canonical =
53 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
54 result = NN_ERROR(canonical) << "getCapabilities_1_1 failed with " << toString(status);
55 } else {
56 result = validatedConvertToCanonical(capabilities);
57 }
58 };
59
60 const auto ret = device->getCapabilities_1_1(cb);
61 NN_TRY(hal::utils::handleTransportError(ret));
62
63 return result;
64}
65
66} // namespace
67
68nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
69 sp<V1_1::IDevice> device) {
70 if (name.empty()) {
71 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
72 << "V1_1::utils::Device::create must have non-empty name";
73 }
74 if (device == nullptr) {
75 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
76 << "V1_1::utils::Device::create must have non-null device";
77 }
78
79 auto capabilities = NN_TRY(initCapabilities(device.get()));
80
81 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
82 return std::make_shared<const Device>(PrivateConstructorTag{}, std::move(name),
83 std::move(capabilities), std::move(device),
84 std::move(deathHandler));
85}
86
87Device::Device(PrivateConstructorTag /*tag*/, std::string name, nn::Capabilities capabilities,
88 sp<V1_1::IDevice> device, hal::utils::DeathHandler deathHandler)
89 : kName(std::move(name)),
90 kCapabilities(std::move(capabilities)),
91 kDevice(std::move(device)),
92 kDeathHandler(std::move(deathHandler)) {}
93
94const std::string& Device::getName() const {
95 return kName;
96}
97
98const std::string& Device::getVersionString() const {
99 return kVersionString;
100}
101
102nn::Version Device::getFeatureLevel() const {
103 return nn::Version::ANDROID_P;
104}
105
106nn::DeviceType Device::getType() const {
107 return nn::DeviceType::UNKNOWN;
108}
109
110const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
111 return kExtensions;
112}
113
114const nn::Capabilities& Device::getCapabilities() const {
115 return kCapabilities;
116}
117
118std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
119 return std::make_pair(/*numModelCache=*/0, /*numDataCache=*/0);
120}
121
122nn::GeneralResult<void> Device::wait() const {
123 const auto ret = kDevice->ping();
124 return hal::utils::handleTransportError(ret);
125}
126
127nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
128 // Ensure that model is ready for IPC.
129 std::optional<nn::Model> maybeModelInShared;
130 const nn::Model& modelInShared =
131 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
132
133 const auto hidlModel = NN_TRY(convert(modelInShared));
134
135 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
136 << "uninitialized";
137 auto cb = [&result, &model](V1_0::ErrorStatus status,
138 const hidl_vec<bool>& supportedOperations) {
139 if (status != V1_0::ErrorStatus::NONE) {
140 const auto canonical =
141 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
142 result = NN_ERROR(canonical)
143 << "getSupportedOperations_1_1 failed with " << toString(status);
144 } else if (supportedOperations.size() != model.main.operations.size()) {
145 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
146 << "getSupportedOperations_1_1 returned vector of size "
147 << supportedOperations.size() << " but expected "
148 << model.main.operations.size();
149 } else {
150 result = supportedOperations;
151 }
152 };
153
154 const auto ret = kDevice->getSupportedOperations_1_1(hidlModel, cb);
155 NN_TRY(hal::utils::handleTransportError(ret));
156
157 return result;
158}
159
160nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
161 const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
162 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::NativeHandle>& /*modelCache*/,
163 const std::vector<nn::NativeHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
164 // Ensure that model is ready for IPC.
165 std::optional<nn::Model> maybeModelInShared;
166 const nn::Model& modelInShared =
167 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
168
169 const auto hidlModel = NN_TRY(convert(modelInShared));
170 const auto hidlPreference = NN_TRY(convert(preference));
171
172 const auto cb = sp<V1_0::utils::PreparedModelCallback>::make();
173 const auto scoped = kDeathHandler.protectCallback(cb.get());
174
175 const auto ret = kDevice->prepareModel_1_1(hidlModel, hidlPreference, cb);
176 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
177 if (status != V1_0::ErrorStatus::NONE) {
178 const auto canonical =
179 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
180 return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
181 }
182
183 return cb->get();
184}
185
186nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
187 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::NativeHandle>& /*modelCache*/,
188 const std::vector<nn::NativeHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
189 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
190 << "IDevice::prepareModelFromCache not supported on 1.1 HAL service";
191}
192
193nn::GeneralResult<nn::SharedBuffer> Device::allocate(
194 const nn::BufferDesc& /*desc*/,
195 const std::vector<nn::SharedPreparedModel>& /*preparedModels*/,
196 const std::vector<nn::BufferRole>& /*inputRoles*/,
197 const std::vector<nn::BufferRole>& /*outputRoles*/) const {
198 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
199 << "IDevice::allocate not supported on 1.1 HAL service";
200}
201
202} // namespace android::hardware::neuralnetworks::V1_1::utils