blob: cb739f0eebb4d16dff37c5e89b6b58cc521f8e62 [file] [log] [blame]
Michael Butler4b276a72020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "Callbacks.h"
18
19#include "Conversions.h"
20#include "PreparedModel.h"
21#include "Utils.h"
22
23#include <android/hardware/neuralnetworks/1.0/types.h>
24#include <android/hardware/neuralnetworks/1.2/IExecutionCallback.h>
25#include <android/hardware/neuralnetworks/1.2/IPreparedModelCallback.h>
26#include <android/hardware/neuralnetworks/1.2/types.h>
27#include <nnapi/IPreparedModel.h>
28#include <nnapi/Result.h>
29#include <nnapi/Types.h>
30#include <nnapi/hal/1.0/Conversions.h>
31#include <nnapi/hal/1.0/PreparedModel.h>
32#include <nnapi/hal/CommonUtils.h>
33#include <nnapi/hal/HandleError.h>
34#include <nnapi/hal/ProtectCallback.h>
35#include <nnapi/hal/TransferValue.h>
36
37#include <utility>
38
39namespace android::hardware::neuralnetworks::V1_2::utils {
40namespace {
41
42nn::GeneralResult<nn::SharedPreparedModel> convertPreparedModel(
43 const sp<V1_0::IPreparedModel>& preparedModel) {
44 return NN_TRY(V1_0::utils::PreparedModel::create(preparedModel));
45}
46
47nn::GeneralResult<nn::SharedPreparedModel> convertPreparedModel(
48 const sp<IPreparedModel>& preparedModel) {
49 return NN_TRY(utils::PreparedModel::create(preparedModel));
50}
51
52nn::GeneralResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
53convertExecutionGeneralResultsHelper(const hidl_vec<OutputShape>& outputShapes,
54 const Timing& timing) {
55 return std::make_pair(NN_TRY(validatedConvertToCanonical(outputShapes)),
56 NN_TRY(validatedConvertToCanonical(timing)));
57}
58
59nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
60convertExecutionGeneralResults(const hidl_vec<OutputShape>& outputShapes, const Timing& timing) {
61 return hal::utils::makeExecutionFailure(
62 convertExecutionGeneralResultsHelper(outputShapes, timing));
63}
64
65} // namespace
66
67Return<void> PreparedModelCallback::notify(V1_0::ErrorStatus status,
68 const sp<V1_0::IPreparedModel>& preparedModel) {
69 if (status != V1_0::ErrorStatus::NONE) {
70 const auto canonical =
71 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
72 notifyInternal(NN_ERROR(canonical) << "preparedModel failed with " << toString(status));
73 } else if (preparedModel == nullptr) {
74 notifyInternal(NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
75 << "Returned preparedModel is nullptr");
76 } else {
77 notifyInternal(convertPreparedModel(preparedModel));
78 }
79 return Void();
80}
81
82Return<void> PreparedModelCallback::notify_1_2(V1_0::ErrorStatus status,
83 const sp<IPreparedModel>& preparedModel) {
84 if (status != V1_0::ErrorStatus::NONE) {
85 const auto canonical =
86 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
87 notifyInternal(NN_ERROR(canonical) << "preparedModel failed with " << toString(status));
88 } else if (preparedModel == nullptr) {
89 notifyInternal(NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
90 << "Returned preparedModel is nullptr");
91 } else {
92 notifyInternal(convertPreparedModel(preparedModel));
93 }
94 return Void();
95}
96
97void PreparedModelCallback::notifyAsDeadObject() {
98 notifyInternal(NN_ERROR(nn::ErrorStatus::DEAD_OBJECT) << "Dead object");
99}
100
101PreparedModelCallback::Data PreparedModelCallback::get() {
102 return mData.take();
103}
104
105void PreparedModelCallback::notifyInternal(PreparedModelCallback::Data result) {
106 mData.put(std::move(result));
107}
108
109// ExecutionCallback methods begin here
110
111Return<void> ExecutionCallback::notify(V1_0::ErrorStatus status) {
112 if (status != V1_0::ErrorStatus::NONE) {
113 const auto canonical =
114 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
115 notifyInternal(NN_ERROR(canonical) << "execute failed with " << toString(status));
116 } else {
117 notifyInternal({});
118 }
119 return Void();
120}
121
122Return<void> ExecutionCallback::notify_1_2(V1_0::ErrorStatus status,
123 const hidl_vec<OutputShape>& outputShapes,
124 const Timing& timing) {
125 if (status != V1_0::ErrorStatus::NONE) {
126 const auto canonical =
127 validatedConvertToCanonical(status).value_or(nn::ErrorStatus::GENERAL_FAILURE);
128 notifyInternal(NN_ERROR(canonical) << "execute failed with " << toString(status));
129 } else {
130 notifyInternal(convertExecutionGeneralResults(outputShapes, timing));
131 }
132 return Void();
133}
134
135void ExecutionCallback::notifyAsDeadObject() {
136 notifyInternal(NN_ERROR(nn::ErrorStatus::DEAD_OBJECT) << "Dead object");
137}
138
139ExecutionCallback::Data ExecutionCallback::get() {
140 return mData.take();
141}
142
143void ExecutionCallback::notifyInternal(ExecutionCallback::Data result) {
144 mData.put(std::move(result));
145}
146
147} // namespace android::hardware::neuralnetworks::V1_2::utils