Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2021 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "PreparedModel.h" |
| 18 | |
Michael Butler | 7a9d609 | 2021-03-10 21:57:13 -0800 | [diff] [blame^] | 19 | #include "Burst.h" |
Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 20 | #include "Callbacks.h" |
| 21 | #include "Conversions.h" |
Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 22 | #include "Utils.h" |
| 23 | |
| 24 | #include <android/binder_auto_utils.h> |
| 25 | #include <nnapi/IPreparedModel.h> |
| 26 | #include <nnapi/Result.h> |
| 27 | #include <nnapi/TypeUtils.h> |
| 28 | #include <nnapi/Types.h> |
Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 29 | #include <nnapi/hal/CommonUtils.h> |
| 30 | #include <nnapi/hal/HandleError.h> |
| 31 | |
| 32 | #include <memory> |
| 33 | #include <tuple> |
| 34 | #include <utility> |
| 35 | #include <vector> |
| 36 | |
| 37 | // See hardware/interfaces/neuralnetworks/utils/README.md for more information on AIDL interface |
| 38 | // lifetimes across processes and for protecting asynchronous calls across AIDL. |
| 39 | |
| 40 | namespace aidl::android::hardware::neuralnetworks::utils { |
| 41 | namespace { |
| 42 | |
| 43 | nn::GeneralResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> convertExecutionResults( |
| 44 | const std::vector<OutputShape>& outputShapes, const Timing& timing) { |
| 45 | return std::make_pair(NN_TRY(nn::convert(outputShapes)), NN_TRY(nn::convert(timing))); |
| 46 | } |
| 47 | |
| 48 | nn::GeneralResult<std::pair<nn::Timing, nn::Timing>> convertFencedExecutionResults( |
| 49 | ErrorStatus status, const aidl_hal::Timing& timingLaunched, |
| 50 | const aidl_hal::Timing& timingFenced) { |
| 51 | HANDLE_HAL_STATUS(status) << "fenced execution callback info failed with " << toString(status); |
| 52 | return std::make_pair(NN_TRY(nn::convert(timingLaunched)), NN_TRY(nn::convert(timingFenced))); |
| 53 | } |
| 54 | |
| 55 | } // namespace |
| 56 | |
| 57 | nn::GeneralResult<std::shared_ptr<const PreparedModel>> PreparedModel::create( |
| 58 | std::shared_ptr<aidl_hal::IPreparedModel> preparedModel) { |
| 59 | if (preparedModel == nullptr) { |
| 60 | return NN_ERROR() |
| 61 | << "aidl_hal::utils::PreparedModel::create must have non-null preparedModel"; |
| 62 | } |
| 63 | |
| 64 | return std::make_shared<const PreparedModel>(PrivateConstructorTag{}, std::move(preparedModel)); |
| 65 | } |
| 66 | |
| 67 | PreparedModel::PreparedModel(PrivateConstructorTag /*tag*/, |
| 68 | std::shared_ptr<aidl_hal::IPreparedModel> preparedModel) |
| 69 | : kPreparedModel(std::move(preparedModel)) {} |
| 70 | |
| 71 | nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute( |
| 72 | const nn::Request& request, nn::MeasureTiming measure, |
| 73 | const nn::OptionalTimePoint& deadline, |
| 74 | const nn::OptionalDuration& loopTimeoutDuration) const { |
| 75 | // Ensure that request is ready for IPC. |
| 76 | std::optional<nn::Request> maybeRequestInShared; |
| 77 | const nn::Request& requestInShared = NN_TRY(hal::utils::makeExecutionFailure( |
| 78 | hal::utils::flushDataFromPointerToShared(&request, &maybeRequestInShared))); |
| 79 | |
| 80 | const auto aidlRequest = NN_TRY(hal::utils::makeExecutionFailure(convert(requestInShared))); |
| 81 | const auto aidlMeasure = NN_TRY(hal::utils::makeExecutionFailure(convert(measure))); |
| 82 | const auto aidlDeadline = NN_TRY(hal::utils::makeExecutionFailure(convert(deadline))); |
| 83 | const auto aidlLoopTimeoutDuration = |
| 84 | NN_TRY(hal::utils::makeExecutionFailure(convert(loopTimeoutDuration))); |
| 85 | |
| 86 | ExecutionResult executionResult; |
| 87 | const auto ret = kPreparedModel->executeSynchronously( |
| 88 | aidlRequest, aidlMeasure, aidlDeadline, aidlLoopTimeoutDuration, &executionResult); |
| 89 | HANDLE_ASTATUS(ret) << "executeSynchronously failed"; |
| 90 | if (!executionResult.outputSufficientSize) { |
| 91 | auto canonicalOutputShapes = |
| 92 | nn::convert(executionResult.outputShapes).value_or(std::vector<nn::OutputShape>{}); |
| 93 | return NN_ERROR(nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, std::move(canonicalOutputShapes)) |
| 94 | << "execution failed with " << nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE; |
| 95 | } |
| 96 | auto [outputShapes, timing] = NN_TRY(hal::utils::makeExecutionFailure( |
| 97 | convertExecutionResults(executionResult.outputShapes, executionResult.timing))); |
| 98 | |
| 99 | NN_TRY(hal::utils::makeExecutionFailure( |
| 100 | hal::utils::unflushDataFromSharedToPointer(request, maybeRequestInShared))); |
| 101 | |
| 102 | return std::make_pair(std::move(outputShapes), timing); |
| 103 | } |
| 104 | |
| 105 | nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> |
| 106 | PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::SyncFence>& waitFor, |
| 107 | nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline, |
| 108 | const nn::OptionalDuration& loopTimeoutDuration, |
| 109 | const nn::OptionalDuration& timeoutDurationAfterFence) const { |
| 110 | // Ensure that request is ready for IPC. |
| 111 | std::optional<nn::Request> maybeRequestInShared; |
| 112 | const nn::Request& requestInShared = |
| 113 | NN_TRY(hal::utils::flushDataFromPointerToShared(&request, &maybeRequestInShared)); |
| 114 | |
| 115 | const auto aidlRequest = NN_TRY(convert(requestInShared)); |
| 116 | const auto aidlWaitFor = NN_TRY(convert(waitFor)); |
| 117 | const auto aidlMeasure = NN_TRY(convert(measure)); |
| 118 | const auto aidlDeadline = NN_TRY(convert(deadline)); |
| 119 | const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration)); |
| 120 | const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence)); |
| 121 | |
| 122 | FencedExecutionResult result; |
| 123 | const auto ret = kPreparedModel->executeFenced(aidlRequest, aidlWaitFor, aidlMeasure, |
| 124 | aidlDeadline, aidlLoopTimeoutDuration, |
| 125 | aidlTimeoutDurationAfterFence, &result); |
| 126 | HANDLE_ASTATUS(ret) << "executeFenced failed"; |
| 127 | |
| 128 | auto resultSyncFence = nn::SyncFence::createAsSignaled(); |
| 129 | if (result.syncFence.get() != -1) { |
| 130 | resultSyncFence = NN_TRY(nn::convert(result.syncFence)); |
| 131 | } |
| 132 | |
| 133 | auto callback = result.callback; |
| 134 | if (callback == nullptr) { |
| 135 | return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE) << "callback is null"; |
| 136 | } |
| 137 | |
| 138 | // If executeFenced required the request memory to be moved into shared memory, block here until |
| 139 | // the fenced execution has completed and flush the memory back. |
| 140 | if (maybeRequestInShared.has_value()) { |
| 141 | const auto state = resultSyncFence.syncWait({}); |
| 142 | if (state != nn::SyncFence::FenceState::SIGNALED) { |
| 143 | return NN_ERROR() << "syncWait failed with " << state; |
| 144 | } |
| 145 | NN_TRY(hal::utils::unflushDataFromSharedToPointer(request, maybeRequestInShared)); |
| 146 | } |
| 147 | |
| 148 | // Create callback which can be used to retrieve the execution error status and timings. |
| 149 | nn::ExecuteFencedInfoCallback resultCallback = |
| 150 | [callback]() -> nn::GeneralResult<std::pair<nn::Timing, nn::Timing>> { |
| 151 | ErrorStatus errorStatus; |
| 152 | Timing timingLaunched; |
| 153 | Timing timingFenced; |
| 154 | const auto ret = callback->getExecutionInfo(&timingLaunched, &timingFenced, &errorStatus); |
| 155 | HANDLE_ASTATUS(ret) << "fenced execution callback getExecutionInfo failed"; |
| 156 | return convertFencedExecutionResults(errorStatus, timingLaunched, timingFenced); |
| 157 | }; |
| 158 | |
| 159 | return std::make_pair(std::move(resultSyncFence), std::move(resultCallback)); |
| 160 | } |
| 161 | |
| 162 | nn::GeneralResult<nn::SharedBurst> PreparedModel::configureExecutionBurst() const { |
Michael Butler | 7a9d609 | 2021-03-10 21:57:13 -0800 | [diff] [blame^] | 163 | std::shared_ptr<IBurst> burst; |
| 164 | const auto ret = kPreparedModel->configureExecutionBurst(&burst); |
| 165 | HANDLE_ASTATUS(ret) << "configureExecutionBurst failed"; |
| 166 | return Burst::create(std::move(burst)); |
Lev Proleev | 900c28a | 2021-01-26 19:40:20 +0000 | [diff] [blame] | 167 | } |
| 168 | |
| 169 | std::any PreparedModel::getUnderlyingResource() const { |
| 170 | std::shared_ptr<aidl_hal::IPreparedModel> resource = kPreparedModel; |
| 171 | return resource; |
| 172 | } |
| 173 | |
| 174 | } // namespace aidl::android::hardware::neuralnetworks::utils |