blob: 5dd5f99f5ffe660330b9e0326f0a4498b40f0164 [file] [log] [blame]
Michael Butler3670c382020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "ResilientPreparedModel.h"
18
Michael Butler44f324f2020-12-18 20:53:55 -080019#include "InvalidBurst.h"
20#include "ResilientBurst.h"
21
Michael Butler3670c382020-08-06 23:22:35 -070022#include <android-base/logging.h>
23#include <android-base/thread_annotations.h>
24#include <nnapi/IPreparedModel.h>
25#include <nnapi/Result.h>
Michael Butler667dc2d2020-12-29 18:56:44 -080026#include <nnapi/TypeUtils.h>
Michael Butler3670c382020-08-06 23:22:35 -070027#include <nnapi/Types.h>
28
29#include <functional>
30#include <memory>
31#include <mutex>
Michael Butler667dc2d2020-12-29 18:56:44 -080032#include <sstream>
Michael Butler3670c382020-08-06 23:22:35 -070033#include <utility>
34#include <vector>
35
36namespace android::hardware::neuralnetworks::utils {
Michael Butler667dc2d2020-12-29 18:56:44 -080037namespace {
38
39template <typename FnType>
40auto protect(const ResilientPreparedModel& resilientPreparedModel, const FnType& fn)
41 -> decltype(fn(*resilientPreparedModel.getPreparedModel())) {
42 auto preparedModel = resilientPreparedModel.getPreparedModel();
43 auto result = fn(*preparedModel);
44
45 // Immediately return if prepared model is not dead.
46 if (result.has_value() || result.error().code != nn::ErrorStatus::DEAD_OBJECT) {
47 return result;
48 }
49
50 // Attempt recovery and return if it fails.
51 auto maybePreparedModel = resilientPreparedModel.recover(preparedModel.get());
52 if (!maybePreparedModel.has_value()) {
53 const auto& [message, code] = maybePreparedModel.error();
54 std::ostringstream oss;
55 oss << ", and failed to recover dead prepared model with error " << code << ": " << message;
56 result.error().message += oss.str();
57 return result;
58 }
59 preparedModel = std::move(maybePreparedModel).value();
60
61 return fn(*preparedModel);
62}
63
64} // namespace
Michael Butler3670c382020-08-06 23:22:35 -070065
66nn::GeneralResult<std::shared_ptr<const ResilientPreparedModel>> ResilientPreparedModel::create(
67 Factory makePreparedModel) {
68 if (makePreparedModel == nullptr) {
69 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
70 << "utils::ResilientPreparedModel::create must have non-empty makePreparedModel";
71 }
Michael Butler11761e32020-12-15 15:20:26 -080072 auto preparedModel = NN_TRY(makePreparedModel());
Michael Butler3670c382020-08-06 23:22:35 -070073 CHECK(preparedModel != nullptr);
74 return std::make_shared<ResilientPreparedModel>(
75 PrivateConstructorTag{}, std::move(makePreparedModel), std::move(preparedModel));
76}
77
78ResilientPreparedModel::ResilientPreparedModel(PrivateConstructorTag /*tag*/,
79 Factory makePreparedModel,
80 nn::SharedPreparedModel preparedModel)
81 : kMakePreparedModel(std::move(makePreparedModel)), mPreparedModel(std::move(preparedModel)) {
82 CHECK(kMakePreparedModel != nullptr);
83 CHECK(mPreparedModel != nullptr);
84}
85
86nn::SharedPreparedModel ResilientPreparedModel::getPreparedModel() const {
87 std::lock_guard guard(mMutex);
88 return mPreparedModel;
89}
90
Michael Butler667dc2d2020-12-29 18:56:44 -080091nn::GeneralResult<nn::SharedPreparedModel> ResilientPreparedModel::recover(
92 const nn::IPreparedModel* failingPreparedModel) const {
Michael Butler3670c382020-08-06 23:22:35 -070093 std::lock_guard guard(mMutex);
Michael Butler667dc2d2020-12-29 18:56:44 -080094
95 // Another caller updated the failing prepared model.
96 if (mPreparedModel.get() != failingPreparedModel) {
97 return mPreparedModel;
98 }
99
100 mPreparedModel = NN_TRY(kMakePreparedModel());
Michael Butler3670c382020-08-06 23:22:35 -0700101 return mPreparedModel;
102}
103
104nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
105ResilientPreparedModel::execute(const nn::Request& request, nn::MeasureTiming measure,
106 const nn::OptionalTimePoint& deadline,
Michael Butlerca114202020-12-04 17:38:20 -0800107 const nn::OptionalDuration& loopTimeoutDuration) const {
Michael Butler667dc2d2020-12-29 18:56:44 -0800108 const auto fn = [&request, measure, &deadline,
109 &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
110 return preparedModel.execute(request, measure, deadline, loopTimeoutDuration);
111 };
112 return protect(*this, fn);
Michael Butler3670c382020-08-06 23:22:35 -0700113}
114
115nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
Michael Butlerca114202020-12-04 17:38:20 -0800116ResilientPreparedModel::executeFenced(const nn::Request& request,
117 const std::vector<nn::SyncFence>& waitFor,
118 nn::MeasureTiming measure,
119 const nn::OptionalTimePoint& deadline,
120 const nn::OptionalDuration& loopTimeoutDuration,
121 const nn::OptionalDuration& timeoutDurationAfterFence) const {
Michael Butler667dc2d2020-12-29 18:56:44 -0800122 const auto fn = [&request, &waitFor, measure, &deadline, &loopTimeoutDuration,
123 &timeoutDurationAfterFence](const nn::IPreparedModel& preparedModel) {
124 return preparedModel.executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
125 timeoutDurationAfterFence);
126 };
127 return protect(*this, fn);
Michael Butler3670c382020-08-06 23:22:35 -0700128}
129
Michael Butlerb6a7ed52020-12-18 20:31:14 -0800130nn::GeneralResult<nn::SharedBurst> ResilientPreparedModel::configureExecutionBurst() const {
Michael Butler44f324f2020-12-18 20:53:55 -0800131#if 0
132 auto self = shared_from_this();
133 ResilientBurst::Factory makeBurst =
134 [preparedModel = std::move(self)]() -> nn::GeneralResult<nn::SharedBurst> {
135 return preparedModel->configureExecutionBurst();
Michael Butlerb6a7ed52020-12-18 20:31:14 -0800136 };
Michael Butler44f324f2020-12-18 20:53:55 -0800137 return ResilientBurst::create(std::move(makeBurst));
138#else
139 return configureExecutionBurstInternal();
140#endif
Michael Butlerb6a7ed52020-12-18 20:31:14 -0800141}
142
Michael Butler3670c382020-08-06 23:22:35 -0700143std::any ResilientPreparedModel::getUnderlyingResource() const {
144 return getPreparedModel()->getUnderlyingResource();
145}
146
Michael Butler44f324f2020-12-18 20:53:55 -0800147bool ResilientPreparedModel::isValidInternal() const {
148 return true;
149}
150
151nn::GeneralResult<nn::SharedBurst> ResilientPreparedModel::configureExecutionBurstInternal() const {
152 if (!isValidInternal()) {
153 return std::make_shared<const InvalidBurst>();
154 }
155 const auto fn = [](const nn::IPreparedModel& preparedModel) {
156 return preparedModel.configureExecutionBurst();
157 };
158 return protect(*this, fn);
159}
160
Michael Butler3670c382020-08-06 23:22:35 -0700161} // namespace android::hardware::neuralnetworks::utils