blob: 1c9ecba4f64be37200f1fd5ac1b224ac97963a49 [file] [log] [blame]
Michael Butler3670c382020-08-06 23:22:35 -07001/*
2 * Copyright (C) 2020 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "ResilientPreparedModel.h"
18
19#include <android-base/logging.h>
20#include <android-base/thread_annotations.h>
21#include <nnapi/IPreparedModel.h>
22#include <nnapi/Result.h>
23#include <nnapi/Types.h>
24
25#include <functional>
26#include <memory>
27#include <mutex>
28#include <utility>
29#include <vector>
30
31namespace android::hardware::neuralnetworks::utils {
32
33nn::GeneralResult<std::shared_ptr<const ResilientPreparedModel>> ResilientPreparedModel::create(
34 Factory makePreparedModel) {
35 if (makePreparedModel == nullptr) {
36 return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
37 << "utils::ResilientPreparedModel::create must have non-empty makePreparedModel";
38 }
39 auto preparedModel = NN_TRY(makePreparedModel(/*blocking=*/true));
40 CHECK(preparedModel != nullptr);
41 return std::make_shared<ResilientPreparedModel>(
42 PrivateConstructorTag{}, std::move(makePreparedModel), std::move(preparedModel));
43}
44
45ResilientPreparedModel::ResilientPreparedModel(PrivateConstructorTag /*tag*/,
46 Factory makePreparedModel,
47 nn::SharedPreparedModel preparedModel)
48 : kMakePreparedModel(std::move(makePreparedModel)), mPreparedModel(std::move(preparedModel)) {
49 CHECK(kMakePreparedModel != nullptr);
50 CHECK(mPreparedModel != nullptr);
51}
52
53nn::SharedPreparedModel ResilientPreparedModel::getPreparedModel() const {
54 std::lock_guard guard(mMutex);
55 return mPreparedModel;
56}
57
58nn::SharedPreparedModel ResilientPreparedModel::recover(
59 const nn::IPreparedModel* /*failingPreparedModel*/, bool /*blocking*/) const {
60 std::lock_guard guard(mMutex);
61 return mPreparedModel;
62}
63
64nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
65ResilientPreparedModel::execute(const nn::Request& request, nn::MeasureTiming measure,
66 const nn::OptionalTimePoint& deadline,
67 const nn::OptionalTimeoutDuration& loopTimeoutDuration) const {
68 return getPreparedModel()->execute(request, measure, deadline, loopTimeoutDuration);
69}
70
71nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
72ResilientPreparedModel::executeFenced(
73 const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
74 nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
75 const nn::OptionalTimeoutDuration& loopTimeoutDuration,
76 const nn::OptionalTimeoutDuration& timeoutDurationAfterFence) const {
77 return getPreparedModel()->executeFenced(request, waitFor, measure, deadline,
78 loopTimeoutDuration, timeoutDurationAfterFence);
79}
80
81std::any ResilientPreparedModel::getUnderlyingResource() const {
82 return getPreparedModel()->getUnderlyingResource();
83}
84
85} // namespace android::hardware::neuralnetworks::utils