blob: 8292f170c2e9194554dcae6bda125c27d52bf9c7 [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Device.h"
18
19#include "Callbacks.h"
20#include "Conversions.h"
21#include "Utils.h"
22
23#include <android/hardware/neuralnetworks/1.0/IDevice.h>
24#include <android/hardware/neuralnetworks/1.0/types.h>
25#include <nnapi/IBuffer.h>
26#include <nnapi/IDevice.h>
27#include <nnapi/IPreparedModel.h>
28#include <nnapi/OperandTypes.h>
29#include <nnapi/Result.h>
30#include <nnapi/Types.h>
31#include <nnapi/hal/CommonUtils.h>
32#include <nnapi/hal/HandleError.h>
33#include <nnapi/hal/ProtectCallback.h>
34
35#include <functional>
36#include <memory>
37#include <optional>
38#include <string>
39#include <vector>
40
41namespace android::hardware::neuralnetworks::V1_0::utils {
42namespace {
43
44nn::GeneralResult<nn::Capabilities> initCapabilities(V1_0::IDevice* device) {
45 CHECK(device != nullptr);
46
47 nn::GeneralResult<nn::Capabilities> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
48 << "uninitialized";
49 const auto cb = [&result](ErrorStatus status, const Capabilities& capabilities) {
50 if (status != ErrorStatus::NONE) {
51 const auto canonical =
52 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
53 result = NN_ERROR(canonical) << "getCapabilities failed with " << toString(status);
54 } else {
55 result = validatedConvertToCanonical(capabilities);
56 }
57 };
58
59 const auto ret = device->getCapabilities(cb);
60 NN_TRY(hal::utils::handleTransportError(ret));
61
62 return result;
63}
64
65} // namespace
66
67nn::GeneralResult<std::shared_ptr<const Device>> Device::create(std::string name,
68 sp<V1_0::IDevice> device) {
69 if (name.empty()) {
70 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
71 << "V1_0::utils::Device::create must have non-empty name";
72 }
73 if (device == nullptr) {
74 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
75 << "V1_0::utils::Device::create must have non-null device";
76 }
77
78 auto capabilities = NN_TRY(initCapabilities(device.get()));
79
80 auto deathHandler = NN_TRY(hal::utils::DeathHandler::create(device));
81 return std::make_shared<const Device>(PrivateConstructorTag{}, std::move(name),
82 std::move(capabilities), std::move(device),
83 std::move(deathHandler));
84}
85
86Device::Device(PrivateConstructorTag /*tag*/, std::string name, nn::Capabilities capabilities,
87 sp<V1_0::IDevice> device, hal::utils::DeathHandler deathHandler)
88 : kName(std::move(name)),
89 kCapabilities(std::move(capabilities)),
90 kDevice(std::move(device)),
91 kDeathHandler(std::move(deathHandler)) {}
92
93const std::string& Device::getName() const {
94 return kName;
95}
96
97const std::string& Device::getVersionString() const {
98 return kVersionString;
99}
100
101nn::Version Device::getFeatureLevel() const {
102 return nn::Version::ANDROID_OC_MR1;
103}
104
105nn::DeviceType Device::getType() const {
106 return nn::DeviceType::OTHER;
107}
108
109const std::vector<nn::Extension>& Device::getSupportedExtensions() const {
110 return kExtensions;
111}
112
113const nn::Capabilities& Device::getCapabilities() const {
114 return kCapabilities;
115}
116
117std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const {
118 return std::make_pair(/*numModelCache=*/0, /*numDataCache=*/0);
119}
120
121nn::GeneralResult<void> Device::wait() const {
122 const auto ret = kDevice->ping();
123 return hal::utils::handleTransportError(ret);
124}
125
126nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const {
127 // Ensure that model is ready for IPC.
128 std::optional<nn::Model> maybeModelInShared;
129 const nn::Model& modelInShared =
130 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
131
132 const auto hidlModel = NN_TRY(convert(modelInShared));
133
134 nn::GeneralResult<std::vector<bool>> result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
135 << "uninitialized";
136 auto cb = [&result, &model](ErrorStatus status, const hidl_vec<bool>& supportedOperations) {
137 if (status != ErrorStatus::NONE) {
138 const auto canonical =
139 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
140 result = NN_ERROR(canonical)
141 << "getSupportedOperations failed with " << toString(status);
142 } else if (supportedOperations.size() != model.main.operations.size()) {
143 result = NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
144 << "getSupportedOperations returned vector of size "
145 << supportedOperations.size() << " but expected "
146 << model.main.operations.size();
147 } else {
148 result = supportedOperations;
149 }
150 };
151
152 const auto ret = kDevice->getSupportedOperations(hidlModel, cb);
153 NN_TRY(hal::utils::handleTransportError(ret));
154
155 return result;
156}
157
158nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
159 const nn::Model& model, nn::ExecutionPreference /*preference*/, nn::Priority /*priority*/,
160 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::NativeHandle>& /*modelCache*/,
161 const std::vector<nn::NativeHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
162 // Ensure that model is ready for IPC.
163 std::optional<nn::Model> maybeModelInShared;
164 const nn::Model& modelInShared =
165 NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared));
166
167 const auto hidlModel = NN_TRY(convert(modelInShared));
168
169 const auto cb = sp<PreparedModelCallback>::make();
170 const auto scoped = kDeathHandler.protectCallback(cb.get());
171
172 const auto ret = kDevice->prepareModel(hidlModel, cb);
173 const auto status = NN_TRY(hal::utils::handleTransportError(ret));
174 if (status != ErrorStatus::NONE) {
175 const auto canonical =
176 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
177 return NN_ERROR(canonical) << "prepareModel failed with " << toString(status);
178 }
179
180 return cb->get();
181}
182
183nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache(
184 nn::OptionalTimePoint /*deadline*/, const std::vector<nn::NativeHandle>& /*modelCache*/,
185 const std::vector<nn::NativeHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
186 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
187 << "IDevice::prepareModelFromCache not supported on 1.0 HAL service";
188}
189
190nn::GeneralResult<nn::SharedBuffer> Device::allocate(
191 const nn::BufferDesc& /*desc*/,
192 const std::vector<nn::SharedPreparedModel>& /*preparedModels*/,
193 const std::vector<nn::BufferRole>& /*inputRoles*/,
194 const std::vector<nn::BufferRole>& /*outputRoles*/) const {
195 return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
196 << "IDevice::allocate not supported on 1.0 HAL service";
197}
198
199} // namespace android::hardware::neuralnetworks::V1_0::utils