Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2021 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "Device.h" |
| 18 | |
| 19 | #include "Buffer.h" |
| 20 | #include "Callbacks.h" |
| 21 | #include "Conversions.h" |
| 22 | #include "PreparedModel.h" |
| 23 | #include "ProtectCallback.h" |
| 24 | #include "Utils.h" |
| 25 | |
| 26 | #include <aidl/android/hardware/neuralnetworks/IDevice.h> |
| 27 | #include <android/binder_auto_utils.h> |
| 28 | #include <android/binder_interface_utils.h> |
| 29 | #include <nnapi/IBuffer.h> |
| 30 | #include <nnapi/IDevice.h> |
| 31 | #include <nnapi/IPreparedModel.h> |
| 32 | #include <nnapi/OperandTypes.h> |
| 33 | #include <nnapi/Result.h> |
| 34 | #include <nnapi/Types.h> |
| 35 | #include <nnapi/hal/CommonUtils.h> |
| 36 | |
| 37 | #include <any> |
| 38 | #include <functional> |
| 39 | #include <memory> |
| 40 | #include <optional> |
| 41 | #include <string> |
| 42 | #include <vector> |
| 43 | |
| 44 | // See hardware/interfaces/neuralnetworks/utils/README.md for more information on AIDL interface |
| 45 | // lifetimes across processes and for protecting asynchronous calls across AIDL. |
| 46 | |
| 47 | namespace aidl::android::hardware::neuralnetworks::utils { |
| 48 | |
| 49 | namespace { |
| 50 | |
| 51 | nn::GeneralResult<std::vector<std::shared_ptr<IPreparedModel>>> convert( |
| 52 | const std::vector<nn::SharedPreparedModel>& preparedModels) { |
| 53 | std::vector<std::shared_ptr<IPreparedModel>> aidlPreparedModels(preparedModels.size()); |
| 54 | for (size_t i = 0; i < preparedModels.size(); ++i) { |
| 55 | std::any underlyingResource = preparedModels[i]->getUnderlyingResource(); |
| 56 | if (const auto* aidlPreparedModel = |
| 57 | std::any_cast<std::shared_ptr<aidl_hal::IPreparedModel>>(&underlyingResource)) { |
| 58 | aidlPreparedModels[i] = *aidlPreparedModel; |
| 59 | } else { |
| 60 | return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) |
| 61 | << "Unable to convert from nn::IPreparedModel to aidl_hal::IPreparedModel"; |
| 62 | } |
| 63 | } |
| 64 | return aidlPreparedModels; |
| 65 | } |
| 66 | |
| 67 | nn::GeneralResult<nn::Capabilities> getCapabilitiesFrom(IDevice* device) { |
| 68 | CHECK(device != nullptr); |
| 69 | Capabilities capabilities; |
| 70 | const auto ret = device->getCapabilities(&capabilities); |
| 71 | HANDLE_ASTATUS(ret) << "getCapabilities failed"; |
| 72 | return nn::convert(capabilities); |
| 73 | } |
| 74 | |
| 75 | nn::GeneralResult<std::string> getVersionStringFrom(aidl_hal::IDevice* device) { |
| 76 | CHECK(device != nullptr); |
| 77 | std::string version; |
| 78 | const auto ret = device->getVersionString(&version); |
| 79 | HANDLE_ASTATUS(ret) << "getVersionString failed"; |
| 80 | return version; |
| 81 | } |
| 82 | |
| 83 | nn::GeneralResult<nn::DeviceType> getDeviceTypeFrom(aidl_hal::IDevice* device) { |
| 84 | CHECK(device != nullptr); |
| 85 | DeviceType deviceType; |
| 86 | const auto ret = device->getType(&deviceType); |
| 87 | HANDLE_ASTATUS(ret) << "getDeviceType failed"; |
| 88 | return nn::convert(deviceType); |
| 89 | } |
| 90 | |
| 91 | nn::GeneralResult<std::vector<nn::Extension>> getSupportedExtensionsFrom( |
| 92 | aidl_hal::IDevice* device) { |
| 93 | CHECK(device != nullptr); |
| 94 | std::vector<Extension> supportedExtensions; |
| 95 | const auto ret = device->getSupportedExtensions(&supportedExtensions); |
| 96 | HANDLE_ASTATUS(ret) << "getExtensions failed"; |
| 97 | return nn::convert(supportedExtensions); |
| 98 | } |
| 99 | |
| 100 | nn::GeneralResult<std::pair<uint32_t, uint32_t>> getNumberOfCacheFilesNeededFrom( |
| 101 | aidl_hal::IDevice* device) { |
| 102 | CHECK(device != nullptr); |
| 103 | NumberOfCacheFiles numberOfCacheFiles; |
| 104 | const auto ret = device->getNumberOfCacheFilesNeeded(&numberOfCacheFiles); |
| 105 | HANDLE_ASTATUS(ret) << "getNumberOfCacheFilesNeeded failed"; |
| 106 | |
| 107 | if (numberOfCacheFiles.numDataCache < 0 || numberOfCacheFiles.numModelCache < 0) { |
| 108 | return NN_ERROR() << "Driver reported negative numer of cache files needed"; |
| 109 | } |
| 110 | if (static_cast<uint32_t>(numberOfCacheFiles.numModelCache) > nn::kMaxNumberOfCacheFiles) { |
| 111 | return NN_ERROR() << "getNumberOfCacheFilesNeeded returned numModelCache files greater " |
| 112 | "than allowed max (" |
| 113 | << numberOfCacheFiles.numModelCache << " vs " |
| 114 | << nn::kMaxNumberOfCacheFiles << ")"; |
| 115 | } |
| 116 | if (static_cast<uint32_t>(numberOfCacheFiles.numDataCache) > nn::kMaxNumberOfCacheFiles) { |
| 117 | return NN_ERROR() << "getNumberOfCacheFilesNeeded returned numDataCache files greater " |
| 118 | "than allowed max (" |
| 119 | << numberOfCacheFiles.numDataCache << " vs " << nn::kMaxNumberOfCacheFiles |
| 120 | << ")"; |
| 121 | } |
| 122 | return std::make_pair(numberOfCacheFiles.numDataCache, numberOfCacheFiles.numModelCache); |
| 123 | } |
| 124 | |
| 125 | } // namespace |
| 126 | |
| 127 | nn::GeneralResult<std::shared_ptr<const Device>> Device::create( |
| 128 | std::string name, std::shared_ptr<aidl_hal::IDevice> device) { |
| 129 | if (name.empty()) { |
| 130 | return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) |
| 131 | << "aidl_hal::utils::Device::create must have non-empty name"; |
| 132 | } |
| 133 | if (device == nullptr) { |
| 134 | return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) |
| 135 | << "aidl_hal::utils::Device::create must have non-null device"; |
| 136 | } |
| 137 | |
| 138 | auto versionString = NN_TRY(getVersionStringFrom(device.get())); |
| 139 | const auto deviceType = NN_TRY(getDeviceTypeFrom(device.get())); |
| 140 | auto extensions = NN_TRY(getSupportedExtensionsFrom(device.get())); |
| 141 | auto capabilities = NN_TRY(getCapabilitiesFrom(device.get())); |
| 142 | const auto numberOfCacheFilesNeeded = NN_TRY(getNumberOfCacheFilesNeededFrom(device.get())); |
| 143 | |
| 144 | auto deathHandler = NN_TRY(DeathHandler::create(device)); |
| 145 | return std::make_shared<const Device>( |
| 146 | PrivateConstructorTag{}, std::move(name), std::move(versionString), deviceType, |
| 147 | std::move(extensions), std::move(capabilities), numberOfCacheFilesNeeded, |
| 148 | std::move(device), std::move(deathHandler)); |
| 149 | } |
| 150 | |
| 151 | Device::Device(PrivateConstructorTag /*tag*/, std::string name, std::string versionString, |
| 152 | nn::DeviceType deviceType, std::vector<nn::Extension> extensions, |
| 153 | nn::Capabilities capabilities, |
| 154 | std::pair<uint32_t, uint32_t> numberOfCacheFilesNeeded, |
| 155 | std::shared_ptr<aidl_hal::IDevice> device, DeathHandler deathHandler) |
| 156 | : kName(std::move(name)), |
| 157 | kVersionString(std::move(versionString)), |
| 158 | kDeviceType(deviceType), |
| 159 | kExtensions(std::move(extensions)), |
| 160 | kCapabilities(std::move(capabilities)), |
| 161 | kNumberOfCacheFilesNeeded(numberOfCacheFilesNeeded), |
| 162 | kDevice(std::move(device)), |
| 163 | kDeathHandler(std::move(deathHandler)) {} |
| 164 | |
| 165 | const std::string& Device::getName() const { |
| 166 | return kName; |
| 167 | } |
| 168 | |
| 169 | const std::string& Device::getVersionString() const { |
| 170 | return kVersionString; |
| 171 | } |
| 172 | |
| 173 | nn::Version Device::getFeatureLevel() const { |
| 174 | return nn::Version::ANDROID_S; |
| 175 | } |
| 176 | |
| 177 | nn::DeviceType Device::getType() const { |
| 178 | return kDeviceType; |
| 179 | } |
| 180 | |
Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 181 | const std::vector<nn::Extension>& Device::getSupportedExtensions() const { |
| 182 | return kExtensions; |
| 183 | } |
| 184 | |
| 185 | const nn::Capabilities& Device::getCapabilities() const { |
| 186 | return kCapabilities; |
| 187 | } |
| 188 | |
| 189 | std::pair<uint32_t, uint32_t> Device::getNumberOfCacheFilesNeeded() const { |
| 190 | return kNumberOfCacheFilesNeeded; |
| 191 | } |
| 192 | |
| 193 | nn::GeneralResult<void> Device::wait() const { |
| 194 | const auto ret = ndk::ScopedAStatus::fromStatus(AIBinder_ping(kDevice->asBinder().get())); |
| 195 | HANDLE_ASTATUS(ret) << "ping failed"; |
| 196 | return {}; |
| 197 | } |
| 198 | |
| 199 | nn::GeneralResult<std::vector<bool>> Device::getSupportedOperations(const nn::Model& model) const { |
| 200 | // Ensure that model is ready for IPC. |
| 201 | std::optional<nn::Model> maybeModelInShared; |
| 202 | const nn::Model& modelInShared = |
| 203 | NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared)); |
| 204 | |
| 205 | const auto aidlModel = NN_TRY(convert(modelInShared)); |
| 206 | |
| 207 | std::vector<bool> supportedOperations; |
| 208 | const auto ret = kDevice->getSupportedOperations(aidlModel, &supportedOperations); |
| 209 | HANDLE_ASTATUS(ret) << "getSupportedOperations failed"; |
| 210 | |
| 211 | return supportedOperations; |
| 212 | } |
| 213 | |
| 214 | nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel( |
| 215 | const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority, |
| 216 | nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache, |
| 217 | const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const { |
| 218 | // Ensure that model is ready for IPC. |
| 219 | std::optional<nn::Model> maybeModelInShared; |
| 220 | const nn::Model& modelInShared = |
| 221 | NN_TRY(hal::utils::flushDataFromPointerToShared(&model, &maybeModelInShared)); |
| 222 | |
| 223 | const auto aidlModel = NN_TRY(convert(modelInShared)); |
| 224 | const auto aidlPreference = NN_TRY(convert(preference)); |
| 225 | const auto aidlPriority = NN_TRY(convert(priority)); |
| 226 | const auto aidlDeadline = NN_TRY(convert(deadline)); |
| 227 | const auto aidlModelCache = NN_TRY(convert(modelCache)); |
| 228 | const auto aidlDataCache = NN_TRY(convert(dataCache)); |
| 229 | const auto aidlToken = NN_TRY(convert(token)); |
| 230 | |
| 231 | const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>(); |
| 232 | const auto scoped = kDeathHandler.protectCallback(cb.get()); |
| 233 | |
| 234 | const auto ret = kDevice->prepareModel(aidlModel, aidlPreference, aidlPriority, aidlDeadline, |
| 235 | aidlModelCache, aidlDataCache, aidlToken, cb); |
| 236 | HANDLE_ASTATUS(ret) << "prepareModel failed"; |
| 237 | |
| 238 | return cb->get(); |
| 239 | } |
| 240 | |
| 241 | nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModelFromCache( |
| 242 | nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache, |
| 243 | const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const { |
| 244 | const auto aidlDeadline = NN_TRY(convert(deadline)); |
| 245 | const auto aidlModelCache = NN_TRY(convert(modelCache)); |
| 246 | const auto aidlDataCache = NN_TRY(convert(dataCache)); |
| 247 | const auto aidlToken = NN_TRY(convert(token)); |
| 248 | |
| 249 | const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>(); |
| 250 | const auto scoped = kDeathHandler.protectCallback(cb.get()); |
| 251 | |
| 252 | const auto ret = kDevice->prepareModelFromCache(aidlDeadline, aidlModelCache, aidlDataCache, |
| 253 | aidlToken, cb); |
| 254 | HANDLE_ASTATUS(ret) << "prepareModelFromCache failed"; |
| 255 | |
| 256 | return cb->get(); |
| 257 | } |
| 258 | |
| 259 | nn::GeneralResult<nn::SharedBuffer> Device::allocate( |
| 260 | const nn::BufferDesc& desc, const std::vector<nn::SharedPreparedModel>& preparedModels, |
| 261 | const std::vector<nn::BufferRole>& inputRoles, |
| 262 | const std::vector<nn::BufferRole>& outputRoles) const { |
| 263 | const auto aidlDesc = NN_TRY(convert(desc)); |
| 264 | const auto aidlPreparedModels = NN_TRY(convert(preparedModels)); |
| 265 | const auto aidlInputRoles = NN_TRY(convert(inputRoles)); |
| 266 | const auto aidlOutputRoles = NN_TRY(convert(outputRoles)); |
| 267 | |
| 268 | std::vector<IPreparedModelParcel> aidlPreparedModelParcels; |
| 269 | aidlPreparedModelParcels.reserve(aidlPreparedModels.size()); |
| 270 | for (const auto& preparedModel : aidlPreparedModels) { |
| 271 | aidlPreparedModelParcels.push_back({.preparedModel = preparedModel}); |
| 272 | } |
| 273 | |
| 274 | DeviceBuffer buffer; |
| 275 | const auto ret = kDevice->allocate(aidlDesc, aidlPreparedModelParcels, aidlInputRoles, |
| 276 | aidlOutputRoles, &buffer); |
| 277 | HANDLE_ASTATUS(ret) << "IDevice::allocate failed"; |
| 278 | |
| 279 | if (buffer.token < 0) { |
| 280 | return NN_ERROR() << "IDevice::allocate returned negative token"; |
| 281 | } |
| 282 | |
| 283 | return Buffer::create(buffer.buffer, static_cast<nn::Request::MemoryDomainToken>(buffer.token)); |
| 284 | } |
| 285 | |
| 286 | DeathMonitor* Device::getDeathMonitor() const { |
| 287 | return kDeathHandler.getDeathMonitor().get(); |
| 288 | } |
| 289 | |
| 290 | } // namespace aidl::android::hardware::neuralnetworks::utils |