blob: faba9965f2fa09c7ee24dbe765efbd4bb07acdee [file] [log] [blame]
Michael Butler3670c382020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "ResilientPreparedModel.h"
18
19#include <android-base/logging.h>
20#include <android-base/thread_annotations.h>
21#include <nnapi/IPreparedModel.h>
22#include <nnapi/Result.h>
Michael Butler667dc2d2020-12-29 18:56:44 -080023#include <nnapi/TypeUtils.h>
Michael Butler3670c382020-08-06 23:22:35 -070024#include <nnapi/Types.h>
25
26#include <functional>
27#include <memory>
28#include <mutex>
Michael Butler667dc2d2020-12-29 18:56:44 -080029#include <sstream>
Michael Butler3670c382020-08-06 23:22:35 -070030#include <utility>
31#include <vector>
32
33namespace android::hardware::neuralnetworks::utils {
Michael Butler667dc2d2020-12-29 18:56:44 -080034namespace {
35
36template <typename FnType>
37auto protect(const ResilientPreparedModel& resilientPreparedModel, const FnType& fn)
38 -> decltype(fn(*resilientPreparedModel.getPreparedModel())) {
39 auto preparedModel = resilientPreparedModel.getPreparedModel();
40 auto result = fn(*preparedModel);
41
42 // Immediately return if prepared model is not dead.
43 if (result.has_value() || result.error().code != nn::ErrorStatus::DEAD_OBJECT) {
44 return result;
45 }
46
47 // Attempt recovery and return if it fails.
48 auto maybePreparedModel = resilientPreparedModel.recover(preparedModel.get());
49 if (!maybePreparedModel.has_value()) {
50 const auto& [message, code] = maybePreparedModel.error();
51 std::ostringstream oss;
52 oss << ", and failed to recover dead prepared model with error " << code << ": " << message;
53 result.error().message += oss.str();
54 return result;
55 }
56 preparedModel = std::move(maybePreparedModel).value();
57
58 return fn(*preparedModel);
59}
60
61} // namespace
Michael Butler3670c382020-08-06 23:22:35 -070062
63nn::GeneralResult<std::shared_ptr<const ResilientPreparedModel>> ResilientPreparedModel::create(
64 Factory makePreparedModel) {
65 if (makePreparedModel == nullptr) {
66 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
67 << "utils::ResilientPreparedModel::create must have non-empty makePreparedModel";
68 }
Michael Butler11761e32020-12-15 15:20:26 -080069 auto preparedModel = NN_TRY(makePreparedModel());
Michael Butler3670c382020-08-06 23:22:35 -070070 CHECK(preparedModel != nullptr);
71 return std::make_shared<ResilientPreparedModel>(
72 PrivateConstructorTag{}, std::move(makePreparedModel), std::move(preparedModel));
73}
74
75ResilientPreparedModel::ResilientPreparedModel(PrivateConstructorTag /*tag*/,
76 Factory makePreparedModel,
77 nn::SharedPreparedModel preparedModel)
78 : kMakePreparedModel(std::move(makePreparedModel)), mPreparedModel(std::move(preparedModel)) {
79 CHECK(kMakePreparedModel != nullptr);
80 CHECK(mPreparedModel != nullptr);
81}
82
83nn::SharedPreparedModel ResilientPreparedModel::getPreparedModel() const {
84 std::lock_guard guard(mMutex);
85 return mPreparedModel;
86}
87
Michael Butler667dc2d2020-12-29 18:56:44 -080088nn::GeneralResult<nn::SharedPreparedModel> ResilientPreparedModel::recover(
89 const nn::IPreparedModel* failingPreparedModel) const {
Michael Butler3670c382020-08-06 23:22:35 -070090 std::lock_guard guard(mMutex);
Michael Butler667dc2d2020-12-29 18:56:44 -080091
92 // Another caller updated the failing prepared model.
93 if (mPreparedModel.get() != failingPreparedModel) {
94 return mPreparedModel;
95 }
96
97 mPreparedModel = NN_TRY(kMakePreparedModel());
Michael Butler3670c382020-08-06 23:22:35 -070098 return mPreparedModel;
99}
100
101nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
102ResilientPreparedModel::execute(const nn::Request& request, nn::MeasureTiming measure,
103 const nn::OptionalTimePoint& deadline,
Michael Butlerca114202020-12-04 17:38:20 -0800104 const nn::OptionalDuration& loopTimeoutDuration) const {
Michael Butler667dc2d2020-12-29 18:56:44 -0800105 const auto fn = [&request, measure, &deadline,
106 &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
107 return preparedModel.execute(request, measure, deadline, loopTimeoutDuration);
108 };
109 return protect(*this, fn);
Michael Butler3670c382020-08-06 23:22:35 -0700110}
111
112nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
Michael Butlerca114202020-12-04 17:38:20 -0800113ResilientPreparedModel::executeFenced(const nn::Request& request,
114 const std::vector<nn::SyncFence>& waitFor,
115 nn::MeasureTiming measure,
116 const nn::OptionalTimePoint& deadline,
117 const nn::OptionalDuration& loopTimeoutDuration,
118 const nn::OptionalDuration& timeoutDurationAfterFence) const {
Michael Butler667dc2d2020-12-29 18:56:44 -0800119 const auto fn = [&request, &waitFor, measure, &deadline, &loopTimeoutDuration,
120 &timeoutDurationAfterFence](const nn::IPreparedModel& preparedModel) {
121 return preparedModel.executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
122 timeoutDurationAfterFence);
123 };
124 return protect(*this, fn);
Michael Butler3670c382020-08-06 23:22:35 -0700125}
126
Michael Butlerb6a7ed52020-12-18 20:31:14 -0800127nn::GeneralResult<nn::SharedBurst> ResilientPreparedModel::configureExecutionBurst() const {
128 const auto fn = [](const nn::IPreparedModel& preparedModel) {
129 return preparedModel.configureExecutionBurst();
130 };
131 return protect(*this, fn);
132}
133
Michael Butler3670c382020-08-06 23:22:35 -0700134std::any ResilientPreparedModel::getUnderlyingResource() const {
135 return getPreparedModel()->getUnderlyingResource();
136}
137
138} // namespace android::hardware::neuralnetworks::utils