Merge changes from topic "revert-1956689-add rkp to identity-default-ENFHZTRTBV"
* changes:
Revert "Fix formatting of identity credential aidl"
Revert "Log to logd in the default identity service"
Revert "Refactor IC support for RKP"
Revert "Add remote key provisioning to the IC HAL"
diff --git a/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp b/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp
index 2759801..222fad7 100644
--- a/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp
+++ b/audio/core/all-versions/vts/functional/7.0/AudioPrimaryHidlHalTest.cpp
@@ -53,9 +53,9 @@
TEST_P(AudioHidlDeviceTest, SetConnectedStateInvalidDeviceAddress) {
doc::test("Check that invalid device address is rejected by IDevice::setConnectedState");
- EXPECT_RESULT(Result::INVALID_ARGUMENTS,
+ EXPECT_RESULT(invalidArgsOrNotSupported,
getDevice()->setConnectedState(getInvalidDeviceAddress(), true));
- EXPECT_RESULT(Result::INVALID_ARGUMENTS,
+ EXPECT_RESULT(invalidArgsOrNotSupported,
getDevice()->setConnectedState(getInvalidDeviceAddress(), false));
}
diff --git a/audio/core/all-versions/vts/functional/VtsHalAudioV6_0TargetTest.xml b/audio/core/all-versions/vts/functional/VtsHalAudioV6_0TargetTest.xml
index f035baf..ae57125 100644
--- a/audio/core/all-versions/vts/functional/VtsHalAudioV6_0TargetTest.xml
+++ b/audio/core/all-versions/vts/functional/VtsHalAudioV6_0TargetTest.xml
@@ -34,5 +34,6 @@
<test class="com.android.tradefed.testtype.GTest" >
<option name="native-test-device-path" value="/data/local/tmp" />
<option name="module-name" value="VtsHalAudioV6_0TargetTest" />
+ <option name="native-test-timeout" value="5m" />
</test>
</configuration>
diff --git a/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml b/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml
index 6635f31..55dbaf1 100644
--- a/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml
+++ b/audio/core/all-versions/vts/functional/VtsHalAudioV7_0TargetTest.xml
@@ -34,5 +34,6 @@
<test class="com.android.tradefed.testtype.GTest" >
<option name="native-test-device-path" value="/data/local/tmp" />
<option name="module-name" value="VtsHalAudioV7_0TargetTest" />
+ <option name="native-test-timeout" value="5m" />
</test>
</configuration>
diff --git a/automotive/vehicle/OWNERS b/automotive/vehicle/OWNERS
new file mode 100644
index 0000000..429ec39
--- /dev/null
+++ b/automotive/vehicle/OWNERS
@@ -0,0 +1,3 @@
+ericjeong@google.com
+keunyoung@google.com
+shanyu@google.com
diff --git a/bluetooth/audio/2.2/default/OWNERS b/bluetooth/audio/2.2/default/OWNERS
new file mode 100644
index 0000000..17ea464
--- /dev/null
+++ b/bluetooth/audio/2.2/default/OWNERS
@@ -0,0 +1,4 @@
+include platform/packages/modules/Bluetooth:/OWNERS
+
+cheneyni@google.com
+aliceypkuo@google.com
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/Android.bp b/bluetooth/audio/aidl/Android.bp
index 12eed55..5107240 100644
--- a/bluetooth/audio/aidl/Android.bp
+++ b/bluetooth/audio/aidl/Android.bp
@@ -43,6 +43,10 @@
vndk: {
enabled: true,
},
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.bluetooth",
+ ],
},
},
}
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecCapabilities.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecCapabilities.aidl
index e2a08a0..948c04a 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecCapabilities.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecCapabilities.aidl
@@ -37,11 +37,16 @@
android.hardware.bluetooth.audio.CodecType codecType;
android.hardware.bluetooth.audio.CodecCapabilities.Capabilities capabilities;
@VintfStability
+ parcelable VendorCapabilities {
+ ParcelableHolder extension;
+ }
+ @VintfStability
union Capabilities {
android.hardware.bluetooth.audio.SbcCapabilities sbcCapabilities;
android.hardware.bluetooth.audio.AacCapabilities aacCapabilities;
android.hardware.bluetooth.audio.LdacCapabilities ldacCapabilities;
android.hardware.bluetooth.audio.AptxCapabilities aptxCapabilities;
android.hardware.bluetooth.audio.Lc3Capabilities lc3Capabilities;
+ android.hardware.bluetooth.audio.CodecCapabilities.VendorCapabilities vendorCapabilities;
}
}
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecConfiguration.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecConfiguration.aidl
index 34ebd60..32bccd8 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecConfiguration.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecConfiguration.aidl
@@ -40,11 +40,18 @@
boolean isScmstEnabled;
android.hardware.bluetooth.audio.CodecConfiguration.CodecSpecific config;
@VintfStability
+ parcelable VendorConfiguration {
+ int vendorId;
+ char codecId;
+ ParcelableHolder codecConfig;
+ }
+ @VintfStability
union CodecSpecific {
android.hardware.bluetooth.audio.SbcConfiguration sbcConfig;
android.hardware.bluetooth.audio.AacConfiguration aacConfig;
android.hardware.bluetooth.audio.LdacConfiguration ldacConfig;
android.hardware.bluetooth.audio.AptxConfiguration aptxConfig;
android.hardware.bluetooth.audio.Lc3Configuration lc3Config;
+ android.hardware.bluetooth.audio.CodecConfiguration.VendorConfiguration vendorConfig;
}
}
diff --git a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
index 44b434b..3a5f951 100644
--- a/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
+++ b/bluetooth/audio/aidl/aidl_api/android.hardware.bluetooth.audio/current/android/hardware/bluetooth/audio/CodecType.aidl
@@ -41,4 +41,5 @@
APTX_HD = 4,
LDAC = 5,
LC3 = 6,
+ VENDOR = 7,
}
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecCapabilities.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecCapabilities.aidl
index 5bf0252..41e2431 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecCapabilities.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecCapabilities.aidl
@@ -30,12 +30,17 @@
@VintfStability
parcelable CodecCapabilities {
@VintfStability
+ parcelable VendorCapabilities {
+ ParcelableHolder extension;
+ }
+ @VintfStability
union Capabilities {
SbcCapabilities sbcCapabilities;
AacCapabilities aacCapabilities;
LdacCapabilities ldacCapabilities;
AptxCapabilities aptxCapabilities;
Lc3Capabilities lc3Capabilities;
+ VendorCapabilities vendorCapabilities;
}
CodecType codecType;
Capabilities capabilities;
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecConfiguration.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecConfiguration.aidl
index 9e43f22..3679537 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecConfiguration.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecConfiguration.aidl
@@ -30,12 +30,19 @@
@VintfStability
parcelable CodecConfiguration {
@VintfStability
+ parcelable VendorConfiguration {
+ int vendorId;
+ char codecId;
+ ParcelableHolder codecConfig;
+ }
+ @VintfStability
union CodecSpecific {
SbcConfiguration sbcConfig;
AacConfiguration aacConfig;
LdacConfiguration ldacConfig;
AptxConfiguration aptxConfig;
Lc3Configuration lc3Config;
+ VendorConfiguration vendorConfig;
}
CodecType codecType;
/**
diff --git a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
index 68c60f5..9c33081 100644
--- a/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
+++ b/bluetooth/audio/aidl/android/hardware/bluetooth/audio/CodecType.aidl
@@ -26,4 +26,5 @@
APTX_HD,
LDAC,
LC3,
+ VENDOR,
}
diff --git a/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp
new file mode 100644
index 0000000..fc8a911
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderA2dpHW"
+
+#include "A2dpOffloadAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+A2dpOffloadAudioProvider::A2dpOffloadAudioProvider() {
+ session_type_ = SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
+}
+
+bool A2dpOffloadAudioProvider::isValid(const SessionType& session_type) {
+ return (session_type == session_type_);
+}
+
+ndk::ScopedAStatus A2dpOffloadAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (audio_config.getTag() != AudioConfiguration::a2dpConfig) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ if (!BluetoothAudioCodecs::IsOffloadCodecConfigurationValid(
+ session_type_, audio_config.get<AudioConfiguration::a2dpConfig>())) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ return BluetoothAudioProvider::startSession(host_if, audio_config,
+ _aidl_return);
+}
+
+ndk::ScopedAStatus A2dpOffloadAudioProvider::onSessionReady(
+ DataMQDesc* _aidl_return) {
+ *_aidl_return = DataMQDesc();
+ BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+ nullptr, *audio_config_);
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h
new file mode 100644
index 0000000..5934f5b
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpOffloadAudioProvider.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class A2dpOffloadAudioProvider : public BluetoothAudioProvider {
+ public:
+ A2dpOffloadAudioProvider();
+
+ bool isValid(const SessionType& session_type) override;
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+ ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp
new file mode 100644
index 0000000..5a413e0
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderA2dpSW"
+
+#include "A2dpSoftwareAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+// Here the buffer size is based on SBC
+static constexpr uint32_t kPcmFrameSize = 4; // 16 bits per sample / stereo
+// SBC is 128, and here we choose the LCM of 16, 24, and 32
+static constexpr uint32_t kPcmFrameCount = 96;
+static constexpr uint32_t kRtpFrameSize = kPcmFrameSize * kPcmFrameCount;
+// The max counts by 1 tick (20ms) for SBC is about 7. Since using 96 for the
+// PCM counts, here we just choose a greater number
+static constexpr uint32_t kRtpFrameCount = 10;
+static constexpr uint32_t kBufferSize = kRtpFrameSize * kRtpFrameCount;
+static constexpr uint32_t kBufferCount = 2; // double buffer
+static constexpr uint32_t kDataMqSize = kBufferSize * kBufferCount;
+
+A2dpSoftwareAudioProvider::A2dpSoftwareAudioProvider()
+ : BluetoothAudioProvider(), data_mq_(nullptr) {
+ LOG(INFO) << __func__ << " - size of audio buffer " << kDataMqSize
+ << " byte(s)";
+ std::unique_ptr<DataMQ> data_mq(
+ new DataMQ(kDataMqSize, /* EventFlag */ true));
+ if (data_mq && data_mq->isValid()) {
+ data_mq_ = std::move(data_mq);
+ session_type_ = SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH;
+ } else {
+ ALOGE_IF(!data_mq, "failed to allocate data MQ");
+ ALOGE_IF(data_mq && !data_mq->isValid(), "data MQ is invalid");
+ }
+}
+
+bool A2dpSoftwareAudioProvider::isValid(const SessionType& sessionType) {
+ return (sessionType == session_type_ && data_mq_ && data_mq_->isValid());
+}
+
+ndk::ScopedAStatus A2dpSoftwareAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ const PcmConfiguration& pcm_config =
+ audio_config.get<AudioConfiguration::pcmConfig>();
+ if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+ LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+ << pcm_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ return BluetoothAudioProvider::startSession(host_if, audio_config,
+ _aidl_return);
+}
+
+ndk::ScopedAStatus A2dpSoftwareAudioProvider::onSessionReady(
+ DataMQDesc* _aidl_return) {
+ if (data_mq_ == nullptr || !data_mq_->isValid()) {
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *_aidl_return = data_mq_->dupeDesc();
+ auto desc = data_mq_->dupeDesc();
+ BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+ &desc, *audio_config_);
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h
new file mode 100644
index 0000000..3bc0a13
--- /dev/null
+++ b/bluetooth/audio/aidl/default/A2dpSoftwareAudioProvider.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class A2dpSoftwareAudioProvider : public BluetoothAudioProvider {
+ public:
+ A2dpSoftwareAudioProvider();
+
+ bool isValid(const SessionType& sessionType) override;
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+ // audio data queue for software encoding
+ std::unique_ptr<DataMQ> data_mq_;
+
+ ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/aidl/default/Android.bp b/bluetooth/audio/aidl/default/Android.bp
new file mode 100644
index 0000000..fc882d4
--- /dev/null
+++ b/bluetooth/audio/aidl/default/Android.bp
@@ -0,0 +1,34 @@
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "hardware_interfaces_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["hardware_interfaces_license"],
+}
+
+cc_library_shared {
+ name: "android.hardware.bluetooth.audio-impl",
+ vendor: true,
+ vintf_fragments: ["bluetooth_audio.xml"],
+ srcs: [
+ "BluetoothAudioProvider.cpp",
+ "BluetoothAudioProviderFactory.cpp",
+ "A2dpOffloadAudioProvider.cpp",
+ "A2dpSoftwareAudioProvider.cpp",
+ "HearingAidAudioProvider.cpp",
+ "LeAudioOffloadAudioProvider.cpp",
+ "LeAudioSoftwareAudioProvider.cpp",
+ ],
+ export_include_dirs: ["."],
+ header_libs: ["libhardware_headers"],
+ shared_libs: [
+ "libbase",
+ "libbinder_ndk",
+ "libcutils",
+ "libfmq",
+ "liblog",
+ "android.hardware.bluetooth.audio-V1-ndk",
+ "libbluetooth_audio_session_aidl",
+ ],
+}
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp b/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp
new file mode 100644
index 0000000..c2ffa2e
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProvider.cpp
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderStub"
+
+#include "BluetoothAudioProvider.h"
+
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+BluetoothAudioProvider::BluetoothAudioProvider() {
+ death_recipient_ = ::ndk::ScopedAIBinder_DeathRecipient(
+ AIBinder_DeathRecipient_new(binderDiedCallbackAidl));
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (host_if == nullptr) {
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+ stack_iface_ = host_if;
+
+ AIBinder_linkToDeath(stack_iface_->asBinder().get(), death_recipient_.get(),
+ this);
+
+ onSessionReady(_aidl_return);
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::endSession() {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+
+ if (stack_iface_ != nullptr) {
+ BluetoothAudioSessionReport::OnSessionEnded(session_type_);
+
+ AIBinder_unlinkToDeath(stack_iface_->asBinder().get(),
+ death_recipient_.get(), this);
+ } else {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ }
+
+ stack_iface_ = nullptr;
+ audio_config_ = nullptr;
+
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::streamStarted(
+ BluetoothAudioStatus status) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", status=" << toString(status);
+
+ if (stack_iface_ != nullptr) {
+ BluetoothAudioSessionReport::ReportControlStatus(session_type_, true,
+ status);
+ } else {
+ LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", status=" << toString(status) << " has NO session";
+ }
+
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::streamSuspended(
+ BluetoothAudioStatus status) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", status=" << toString(status);
+
+ if (stack_iface_ != nullptr) {
+ BluetoothAudioSessionReport::ReportControlStatus(session_type_, false,
+ status);
+ } else {
+ LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", status=" << toString(status) << " has NO session";
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProvider::updateAudioConfiguration(
+ const AudioConfiguration& audio_config) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+
+ if (stack_iface_ == nullptr || audio_config_ == nullptr) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ if (audio_config.getTag() != audio_config_->getTag()) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << " audio config type is not match";
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+ BluetoothAudioSessionReport::ReportAudioConfigChanged(session_type_,
+ *audio_config_);
+ return ndk::ScopedAStatus::ok();
+}
+
+void BluetoothAudioProvider::binderDiedCallbackAidl(void* ptr) {
+ LOG(ERROR) << __func__ << " - BluetoothAudio Service died";
+ auto provider = static_cast<BluetoothAudioProvider*>(ptr);
+ if (provider == nullptr) {
+ LOG(ERROR) << __func__ << ": Null AudioProvider HAL died";
+ return;
+ }
+ provider->endSession();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProvider.h b/bluetooth/audio/aidl/default/BluetoothAudioProvider.h
new file mode 100644
index 0000000..f7acbdf
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProvider.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/BnBluetoothAudioProvider.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <fmq/AidlMessageQueue.h>
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+using MqDataType = int8_t;
+using MqDataMode = SynchronizedReadWrite;
+using DataMQ = AidlMessageQueue<MqDataType, MqDataMode>;
+using DataMQDesc = MQDescriptor<MqDataType, MqDataMode>;
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioProvider : public BnBluetoothAudioProvider {
+ public:
+ BluetoothAudioProvider();
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+ ndk::ScopedAStatus endSession();
+ ndk::ScopedAStatus streamStarted(BluetoothAudioStatus status);
+ ndk::ScopedAStatus streamSuspended(BluetoothAudioStatus status);
+ ndk::ScopedAStatus updateAudioConfiguration(
+ const AudioConfiguration& audio_config);
+
+ virtual bool isValid(const SessionType& sessionType) = 0;
+
+ protected:
+ virtual ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) = 0;
+ static void binderDiedCallbackAidl(void* cookie_ptr);
+
+ ::ndk::ScopedAIBinder_DeathRecipient death_recipient_;
+
+ std::shared_ptr<IBluetoothAudioPort> stack_iface_;
+ std::unique_ptr<AudioConfiguration> audio_config_ = nullptr;
+ SessionType session_type_;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp
new file mode 100644
index 0000000..1e55a0b
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.cpp
@@ -0,0 +1,124 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderFactoryAIDL"
+
+#include "BluetoothAudioProviderFactory.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <android-base/logging.h>
+
+#include "A2dpOffloadAudioProvider.h"
+#include "A2dpSoftwareAudioProvider.h"
+#include "BluetoothAudioProvider.h"
+#include "HearingAidAudioProvider.h"
+#include "LeAudioOffloadAudioProvider.h"
+#include "LeAudioSoftwareAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+BluetoothAudioProviderFactory::BluetoothAudioProviderFactory() {}
+
+ndk::ScopedAStatus BluetoothAudioProviderFactory::openProvider(
+ const SessionType session_type,
+ std::shared_ptr<IBluetoothAudioProvider>* _aidl_return) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type);
+ std::shared_ptr<BluetoothAudioProvider> provider = nullptr;
+
+ switch (session_type) {
+ case SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<A2dpSoftwareAudioProvider>();
+ break;
+ case SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<A2dpOffloadAudioProvider>();
+ break;
+ case SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<HearingAidAudioProvider>();
+ break;
+ case SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<LeAudioSoftwareOutputAudioProvider>();
+ break;
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<LeAudioOffloadOutputAudioProvider>();
+ break;
+ case SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<LeAudioSoftwareInputAudioProvider>();
+ break;
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH:
+ provider = ndk::SharedRefBase::make<LeAudioOffloadInputAudioProvider>();
+ break;
+ default:
+ provider = nullptr;
+ break;
+ }
+
+ if (provider == nullptr || !provider->isValid(session_type)) {
+ provider = nullptr;
+ LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type);
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *_aidl_return = provider;
+
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus BluetoothAudioProviderFactory::getProviderCapabilities(
+ const SessionType session_type,
+ std::vector<AudioCapabilities>* _aidl_return) {
+ if (session_type == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+ auto codec_capabilities =
+ BluetoothAudioCodecs::GetA2dpOffloadCodecCapabilities(session_type);
+ _aidl_return->resize(codec_capabilities.size());
+ for (int i = 0; i < codec_capabilities.size(); i++) {
+ _aidl_return->at(i).set<AudioCapabilities::a2dpCapabilities>(
+ codec_capabilities[i]);
+ }
+ } else if (session_type ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+ session_type ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+ std::vector<LeAudioCodecCapabilitiesSetting> db_codec_capabilities =
+ BluetoothAudioCodecs::GetLeAudioOffloadCodecCapabilities(session_type);
+ if (db_codec_capabilities.size()) {
+ _aidl_return->resize(db_codec_capabilities.size());
+ for (int i = 0; i < db_codec_capabilities.size(); ++i) {
+ _aidl_return->at(i).set<AudioCapabilities::leAudioCapabilities>(
+ db_codec_capabilities[i]);
+ }
+ }
+ } else if (session_type != SessionType::UNKNOWN) {
+ auto pcm_capabilities = BluetoothAudioCodecs::GetSoftwarePcmCapabilities();
+ _aidl_return->resize(pcm_capabilities.size());
+ for (int i = 0; i < pcm_capabilities.size(); i++) {
+ _aidl_return->at(i).set<AudioCapabilities::pcmCapabilities>(
+ pcm_capabilities[i]);
+ }
+ }
+
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type)
+ << " supports " << _aidl_return->size() << " codecs";
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h
new file mode 100644
index 0000000..b38cfd2
--- /dev/null
+++ b/bluetooth/audio/aidl/default/BluetoothAudioProviderFactory.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/BnBluetoothAudioProviderFactory.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioProviderFactory : public BnBluetoothAudioProviderFactory {
+ public:
+ BluetoothAudioProviderFactory();
+
+ ndk::ScopedAStatus openProvider(
+ const SessionType session_type,
+ std::shared_ptr<IBluetoothAudioProvider>* _aidl_return) override;
+
+ ndk::ScopedAStatus getProviderCapabilities(
+ const SessionType session_type,
+ std::vector<AudioCapabilities>* _aidl_return) override;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp b/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp
new file mode 100644
index 0000000..66ce93b
--- /dev/null
+++ b/bluetooth/audio/aidl/default/HearingAidAudioProvider.cpp
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderHearingAid"
+
+#include "HearingAidAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr uint32_t kPcmFrameSize = 4; // 16 bits per sample / stereo
+static constexpr uint32_t kPcmFrameCount = 128;
+static constexpr uint32_t kRtpFrameSize = kPcmFrameSize * kPcmFrameCount;
+static constexpr uint32_t kRtpFrameCount = 7; // max counts by 1 tick (20ms)
+static constexpr uint32_t kBufferSize = kRtpFrameSize * kRtpFrameCount;
+static constexpr uint32_t kBufferCount = 1; // single buffer
+static constexpr uint32_t kDataMqSize = kBufferSize * kBufferCount;
+
+HearingAidAudioProvider::HearingAidAudioProvider()
+ : BluetoothAudioProvider(), data_mq_(nullptr) {
+ LOG(INFO) << __func__ << " - size of audio buffer " << kDataMqSize
+ << " byte(s)";
+ std::unique_ptr<DataMQ> data_mq(
+ new DataMQ(kDataMqSize, /* EventFlag */ true));
+ if (data_mq && data_mq->isValid()) {
+ data_mq_ = std::move(data_mq);
+ session_type_ = SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH;
+ } else {
+ ALOGE_IF(!data_mq, "failed to allocate data MQ");
+ ALOGE_IF(data_mq && !data_mq->isValid(), "data MQ is invalid");
+ }
+}
+bool HearingAidAudioProvider::isValid(const SessionType& sessionType) {
+ return (sessionType == session_type_ && data_mq_ && data_mq_->isValid());
+}
+
+ndk::ScopedAStatus HearingAidAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ const auto& pcm_config = audio_config.get<AudioConfiguration::pcmConfig>();
+ if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+ LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+ << pcm_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ return BluetoothAudioProvider::startSession(host_if, audio_config,
+ _aidl_return);
+}
+
+ndk::ScopedAStatus HearingAidAudioProvider::onSessionReady(
+ DataMQDesc* _aidl_return) {
+ if (data_mq_ == nullptr || !data_mq_->isValid()) {
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *_aidl_return = data_mq_->dupeDesc();
+ auto desc = data_mq_->dupeDesc();
+ BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+ &desc, *audio_config_);
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/HearingAidAudioProvider.h b/bluetooth/audio/aidl/default/HearingAidAudioProvider.h
new file mode 100644
index 0000000..a7e19e9
--- /dev/null
+++ b/bluetooth/audio/aidl/default/HearingAidAudioProvider.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class HearingAidAudioProvider : public BluetoothAudioProvider {
+ public:
+ HearingAidAudioProvider();
+
+ bool isValid(const SessionType& sessionType) override;
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+ // audio data queue for software encoding
+ std::unique_ptr<DataMQ> data_mq_;
+
+ ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp
new file mode 100644
index 0000000..72ac9bd
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderLeAudioHW"
+
+#include "LeAudioOffloadAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+LeAudioOffloadOutputAudioProvider::LeAudioOffloadOutputAudioProvider()
+ : LeAudioOffloadAudioProvider() {
+ session_type_ = SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH;
+}
+
+LeAudioOffloadInputAudioProvider::LeAudioOffloadInputAudioProvider()
+ : LeAudioOffloadAudioProvider() {
+ session_type_ = SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH;
+}
+
+LeAudioOffloadAudioProvider::LeAudioOffloadAudioProvider()
+ : BluetoothAudioProvider() {}
+
+bool LeAudioOffloadAudioProvider::isValid(const SessionType& sessionType) {
+ return (sessionType == session_type_);
+}
+
+ndk::ScopedAStatus LeAudioOffloadAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (audio_config.getTag() != AudioConfiguration::leAudioConfig) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ const auto& le_audio_config =
+ audio_config.get<AudioConfiguration::leAudioConfig>();
+ if (!BluetoothAudioCodecs::IsOffloadLeAudioConfigurationValid(
+ session_type_, le_audio_config)) {
+ LOG(WARNING) << __func__ << " - Unsupported LC3 Offloaded Configuration="
+ << le_audio_config.toString();
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ return BluetoothAudioProvider::startSession(host_if, audio_config,
+ _aidl_return);
+}
+
+ndk::ScopedAStatus LeAudioOffloadAudioProvider::onSessionReady(
+ DataMQDesc* _aidl_return) {
+ BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+ nullptr, *audio_config_);
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h
new file mode 100644
index 0000000..a27a2e7
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioOffloadAudioProvider.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class LeAudioOffloadAudioProvider : public BluetoothAudioProvider {
+ public:
+ LeAudioOffloadAudioProvider();
+
+ bool isValid(const SessionType& sessionType) override;
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+ ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+class LeAudioOffloadOutputAudioProvider : public LeAudioOffloadAudioProvider {
+ public:
+ LeAudioOffloadOutputAudioProvider();
+};
+
+class LeAudioOffloadInputAudioProvider : public LeAudioOffloadAudioProvider {
+ public:
+ LeAudioOffloadInputAudioProvider();
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp
new file mode 100644
index 0000000..67b7d60
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.cpp
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioProviderLeAudioSW"
+
+#include "LeAudioSoftwareAudioProvider.h"
+
+#include <BluetoothAudioCodecs.h>
+#include <BluetoothAudioSessionReport.h>
+#include <android-base/logging.h>
+
+#include <cstdint>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr uint32_t kBufferOutCount = 2; // two frame buffer
+static constexpr uint32_t kBufferInCount = 2; // two frame buffer
+
+inline uint32_t channel_mode_to_channel_count(ChannelMode channel_mode) {
+ switch (channel_mode) {
+ case ChannelMode::MONO:
+ return 1;
+ case ChannelMode::STEREO:
+ return 2;
+ default:
+ return 0;
+ }
+ return 0;
+}
+
+LeAudioSoftwareOutputAudioProvider::LeAudioSoftwareOutputAudioProvider()
+ : LeAudioSoftwareAudioProvider() {
+ session_type_ = SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH;
+}
+
+LeAudioSoftwareInputAudioProvider::LeAudioSoftwareInputAudioProvider()
+ : LeAudioSoftwareAudioProvider() {
+ session_type_ = SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH;
+}
+
+LeAudioSoftwareAudioProvider::LeAudioSoftwareAudioProvider()
+ : BluetoothAudioProvider(), data_mq_(nullptr) {}
+
+bool LeAudioSoftwareAudioProvider::isValid(const SessionType& sessionType) {
+ return (sessionType == session_type_);
+}
+
+ndk::ScopedAStatus LeAudioSoftwareAudioProvider::startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return) {
+ if (audio_config.getTag() != AudioConfiguration::pcmConfig) {
+ LOG(WARNING) << __func__ << " - Invalid Audio Configuration="
+ << audio_config.toString();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ const auto& pcm_config = audio_config.get<AudioConfiguration::pcmConfig>();
+ if (!BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(pcm_config)) {
+ LOG(WARNING) << __func__ << " - Unsupported PCM Configuration="
+ << pcm_config.toString();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ uint32_t buffer_modifier = 0;
+ if (session_type_ == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH)
+ buffer_modifier = kBufferOutCount;
+ else if (session_type_ == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH)
+ buffer_modifier = kBufferInCount;
+
+ uint32_t data_mq_size =
+ (ceil(pcm_config.sampleRateHz) / 1000) *
+ channel_mode_to_channel_count(pcm_config.channelMode) *
+ (pcm_config.bitsPerSample / 8) * (pcm_config.dataIntervalUs / 1000) *
+ buffer_modifier;
+ if (data_mq_size <= 0) {
+ LOG(ERROR) << __func__ << "Unexpected audio buffer size: " << data_mq_size
+ << ", SampleRateHz: " << pcm_config.sampleRateHz
+ << ", ChannelMode: " << toString(pcm_config.channelMode)
+ << ", BitsPerSample: "
+ << static_cast<int>(pcm_config.bitsPerSample)
+ << ", DataIntervalUs: " << pcm_config.dataIntervalUs
+ << ", SessionType: " << toString(session_type_);
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+
+ LOG(INFO) << __func__ << " - size of audio buffer " << data_mq_size
+ << " byte(s)";
+
+ std::unique_ptr<DataMQ> temp_data_mq(
+ new DataMQ(data_mq_size, /* EventFlag */ true));
+ if (temp_data_mq == nullptr || !temp_data_mq->isValid()) {
+ ALOGE_IF(!temp_data_mq, "failed to allocate data MQ");
+ ALOGE_IF(temp_data_mq && !temp_data_mq->isValid(), "data MQ is invalid");
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ data_mq_ = std::move(temp_data_mq);
+
+ return BluetoothAudioProvider::startSession(host_if, audio_config,
+ _aidl_return);
+}
+
+ndk::ScopedAStatus LeAudioSoftwareAudioProvider::onSessionReady(
+ DataMQDesc* _aidl_return) {
+ if (data_mq_ == nullptr || !data_mq_->isValid()) {
+ *_aidl_return = DataMQDesc();
+ return ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_ARGUMENT);
+ }
+ *_aidl_return = data_mq_->dupeDesc();
+ auto desc = data_mq_->dupeDesc();
+ BluetoothAudioSessionReport::OnSessionStarted(session_type_, stack_iface_,
+ &desc, *audio_config_);
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h
new file mode 100644
index 0000000..fa58182
--- /dev/null
+++ b/bluetooth/audio/aidl/default/LeAudioSoftwareAudioProvider.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioProvider.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class LeAudioSoftwareAudioProvider : public BluetoothAudioProvider {
+ public:
+ LeAudioSoftwareAudioProvider();
+
+ bool isValid(const SessionType& sessionType) override;
+
+ ndk::ScopedAStatus startSession(
+ const std::shared_ptr<IBluetoothAudioPort>& host_if,
+ const AudioConfiguration& audio_config, DataMQDesc* _aidl_return);
+
+ private:
+ // audio data queue for software encoding
+ std::unique_ptr<DataMQ> data_mq_;
+
+ ndk::ScopedAStatus onSessionReady(DataMQDesc* _aidl_return) override;
+};
+
+class LeAudioSoftwareOutputAudioProvider : public LeAudioSoftwareAudioProvider {
+ public:
+ LeAudioSoftwareOutputAudioProvider();
+};
+
+class LeAudioSoftwareInputAudioProvider : public LeAudioSoftwareAudioProvider {
+ public:
+ LeAudioSoftwareInputAudioProvider();
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/OWNERS b/bluetooth/audio/aidl/default/OWNERS
new file mode 100644
index 0000000..17ea464
--- /dev/null
+++ b/bluetooth/audio/aidl/default/OWNERS
@@ -0,0 +1,4 @@
+include platform/packages/modules/Bluetooth:/OWNERS
+
+cheneyni@google.com
+aliceypkuo@google.com
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/default/bluetooth_audio.xml b/bluetooth/audio/aidl/default/bluetooth_audio.xml
new file mode 100644
index 0000000..1859a1a
--- /dev/null
+++ b/bluetooth/audio/aidl/default/bluetooth_audio.xml
@@ -0,0 +1,6 @@
+<manifest version="1.0" type="device">
+ <hal format="aidl">
+ <name>android.hardware.bluetooth.audio</name>
+ <fqname>IBluetoothAudioProviderFactory/default</fqname>
+ </hal>
+</manifest>
\ No newline at end of file
diff --git a/bluetooth/audio/aidl/vts/OWNERS b/bluetooth/audio/aidl/vts/OWNERS
new file mode 100644
index 0000000..17ea464
--- /dev/null
+++ b/bluetooth/audio/aidl/vts/OWNERS
@@ -0,0 +1,4 @@
+include platform/packages/modules/Bluetooth:/OWNERS
+
+cheneyni@google.com
+aliceypkuo@google.com
\ No newline at end of file
diff --git a/bluetooth/audio/utils/Android.bp b/bluetooth/audio/utils/Android.bp
index 4f712bf..974357e 100644
--- a/bluetooth/audio/utils/Android.bp
+++ b/bluetooth/audio/utils/Android.bp
@@ -32,5 +32,30 @@
"libhidlbase",
"liblog",
"libutils",
+ "libbluetooth_audio_session_aidl",
+ ],
+}
+
+cc_library_shared {
+ name: "libbluetooth_audio_session_aidl",
+ vendor: true,
+ srcs: [
+ "aidl_session/BluetoothAudioCodecs.cpp",
+ "aidl_session/BluetoothAudioSession.cpp",
+ "aidl_session/HidlToAidlMiddleware.cpp",
+ ],
+ export_include_dirs: ["aidl_session/"],
+ header_libs: ["libhardware_headers"],
+ shared_libs: [
+ "android.hardware.bluetooth.audio@2.0",
+ "android.hardware.bluetooth.audio@2.1",
+ "android.hardware.bluetooth.audio@2.2",
+ "libbase",
+ "libcutils",
+ "libbinder_ndk",
+ "libfmq",
+ "liblog",
+ "android.hardware.bluetooth.audio-V1-ndk",
+ "libhidlbase",
],
}
diff --git a/bluetooth/audio/utils/OWNERS b/bluetooth/audio/utils/OWNERS
index ed92847..17ea464 100644
--- a/bluetooth/audio/utils/OWNERS
+++ b/bluetooth/audio/utils/OWNERS
@@ -1,3 +1,4 @@
include platform/packages/modules/Bluetooth:/OWNERS
-cheneyni@google.com
\ No newline at end of file
+cheneyni@google.com
+aliceypkuo@google.com
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp
new file mode 100644
index 0000000..516ebe8
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.cpp
@@ -0,0 +1,490 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BTAudioCodecsAidl"
+
+#include "BluetoothAudioCodecs.h"
+
+#include <aidl/android/hardware/bluetooth/audio/AacCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/AacObjectType.h>
+#include <aidl/android/hardware/bluetooth/audio/AptxCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/ChannelMode.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacChannelMode.h>
+#include <aidl/android/hardware/bluetooth/audio/LdacQualityIndex.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/SbcCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/SbcChannelMode.h>
+#include <android-base/logging.h>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static const PcmCapabilities kDefaultSoftwarePcmCapabilities = {
+ .sampleRateHz = {16000, 24000, 44100, 48000, 88200, 96000},
+ .channelMode = {ChannelMode::MONO, ChannelMode::STEREO},
+ .bitsPerSample = {16, 24, 32},
+ .dataIntervalUs = {},
+};
+
+static const SbcCapabilities kDefaultOffloadSbcCapability = {
+ .sampleRateHz = {44100},
+ .channelMode = {SbcChannelMode::MONO, SbcChannelMode::JOINT_STEREO},
+ .blockLength = {4, 8, 12, 16},
+ .numSubbands = {8},
+ .allocMethod = {SbcAllocMethod::ALLOC_MD_L},
+ .bitsPerSample = {16},
+ .minBitpool = 2,
+ .maxBitpool = 53};
+
+static const AacCapabilities kDefaultOffloadAacCapability = {
+ .objectType = {AacObjectType::MPEG2_LC},
+ .sampleRateHz = {44100},
+ .channelMode = {ChannelMode::STEREO},
+ .variableBitRateSupported = true,
+ .bitsPerSample = {16}};
+
+static const LdacCapabilities kDefaultOffloadLdacCapability = {
+ .sampleRateHz = {44100, 48000, 88200, 96000},
+ .channelMode = {LdacChannelMode::DUAL, LdacChannelMode::STEREO},
+ .qualityIndex = {LdacQualityIndex::HIGH},
+ .bitsPerSample = {16, 24, 32}};
+
+static const AptxCapabilities kDefaultOffloadAptxCapability = {
+ .sampleRateHz = {44100, 48000},
+ .channelMode = {ChannelMode::STEREO},
+ .bitsPerSample = {16},
+};
+
+static const AptxCapabilities kDefaultOffloadAptxHdCapability = {
+ .sampleRateHz = {44100, 48000},
+ .channelMode = {ChannelMode::STEREO},
+ .bitsPerSample = {24},
+};
+
+static const Lc3Capabilities kDefaultA2dpOffloadLc3Capability = {
+ .samplingFrequencyHz = {44100, 48000},
+ .frameDurationUs = {7500, 10000},
+ .channelMode = {ChannelMode::MONO, ChannelMode::STEREO},
+};
+
+const std::vector<CodecCapabilities> kDefaultOffloadA2dpCodecCapabilities = {
+ {.codecType = CodecType::SBC, .capabilities = {}},
+ {.codecType = CodecType::AAC, .capabilities = {}},
+ {.codecType = CodecType::LDAC, .capabilities = {}},
+ {.codecType = CodecType::APTX, .capabilities = {}},
+ {.codecType = CodecType::APTX_HD, .capabilities = {}},
+ {.codecType = CodecType::LC3, .capabilities = {}}};
+
+std::vector<LeAudioCodecCapabilitiesSetting> kDefaultOffloadLeAudioCapabilities;
+
+static const UnicastCapability kInvalidUnicastCapability = {
+ .codecType = CodecType::UNKNOWN};
+
+static const BroadcastCapability kInvalidBroadcastCapability = {
+ .codecType = CodecType::UNKNOWN};
+
+// Default Supported Codecs
+// LC3 16_1: sample rate: 16 kHz, frame duration: 7.5 ms, octets per frame: 30
+static const Lc3Capabilities kLc3Capability_16_1 = {
+ .samplingFrequencyHz = {16000},
+ .frameDurationUs = {7500},
+ .octetsPerFrame = {30}};
+
+// Default Supported Codecs
+// LC3 16_2: sample rate: 16 kHz, frame duration: 10 ms, octets per frame: 40
+static const Lc3Capabilities kLc3Capability_16_2 = {
+ .samplingFrequencyHz = {16000},
+ .frameDurationUs = {10000},
+ .octetsPerFrame = {40}};
+
+// Default Supported Codecs
+// LC3 48_4: sample rate: 48 kHz, frame duration: 10 ms, octets per frame: 120
+static const Lc3Capabilities kLc3Capability_48_4 = {
+ .samplingFrequencyHz = {48000},
+ .frameDurationUs = {10000},
+ .octetsPerFrame = {120}};
+
+static const std::vector<Lc3Capabilities> supportedLc3CapabilityList = {
+ kLc3Capability_48_4, kLc3Capability_16_2, kLc3Capability_16_1};
+
+static AudioLocation stereoAudio = static_cast<AudioLocation>(
+ static_cast<uint8_t>(AudioLocation::FRONT_LEFT) |
+ static_cast<uint8_t>(AudioLocation::FRONT_RIGHT));
+static AudioLocation monoAudio = AudioLocation::UNKNOWN;
+
+// Stores the supported setting of audio location, connected device, and the
+// channel count for each device
+std::vector<std::tuple<AudioLocation, uint8_t, uint8_t>>
+ supportedDeviceSetting = {std::make_tuple(stereoAudio, 2, 1),
+ std::make_tuple(monoAudio, 1, 2),
+ std::make_tuple(monoAudio, 1, 1)};
+
+template <class T>
+bool BluetoothAudioCodecs::ContainedInVector(
+ const std::vector<T>& vector, const typename identity<T>::type& target) {
+ return std::find(vector.begin(), vector.end(), target) != vector.end();
+}
+
+bool BluetoothAudioCodecs::IsOffloadSbcConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::sbcConfig) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const SbcConfiguration sbc_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::sbcConfig>();
+
+ if (ContainedInVector(kDefaultOffloadSbcCapability.sampleRateHz,
+ sbc_data.sampleRateHz) &&
+ ContainedInVector(kDefaultOffloadSbcCapability.blockLength,
+ sbc_data.blockLength) &&
+ ContainedInVector(kDefaultOffloadSbcCapability.numSubbands,
+ sbc_data.numSubbands) &&
+ ContainedInVector(kDefaultOffloadSbcCapability.bitsPerSample,
+ sbc_data.bitsPerSample) &&
+ ContainedInVector(kDefaultOffloadSbcCapability.channelMode,
+ sbc_data.channelMode) &&
+ ContainedInVector(kDefaultOffloadSbcCapability.allocMethod,
+ sbc_data.allocMethod) &&
+ sbc_data.minBitpool <= sbc_data.maxBitpool &&
+ kDefaultOffloadSbcCapability.minBitpool <= sbc_data.minBitpool &&
+ kDefaultOffloadSbcCapability.maxBitpool >= sbc_data.maxBitpool) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAacConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::aacConfig) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const AacConfiguration aac_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::aacConfig>();
+
+ if (ContainedInVector(kDefaultOffloadAacCapability.sampleRateHz,
+ aac_data.sampleRateHz) &&
+ ContainedInVector(kDefaultOffloadAacCapability.bitsPerSample,
+ aac_data.bitsPerSample) &&
+ ContainedInVector(kDefaultOffloadAacCapability.channelMode,
+ aac_data.channelMode) &&
+ ContainedInVector(kDefaultOffloadAacCapability.objectType,
+ aac_data.objectType) &&
+ (!aac_data.variableBitRateEnabled ||
+ kDefaultOffloadAacCapability.variableBitRateSupported)) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLdacConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() !=
+ CodecConfiguration::CodecSpecific::ldacConfig) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const LdacConfiguration ldac_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::ldacConfig>();
+
+ if (ContainedInVector(kDefaultOffloadLdacCapability.sampleRateHz,
+ ldac_data.sampleRateHz) &&
+ ContainedInVector(kDefaultOffloadLdacCapability.bitsPerSample,
+ ldac_data.bitsPerSample) &&
+ ContainedInVector(kDefaultOffloadLdacCapability.channelMode,
+ ldac_data.channelMode) &&
+ ContainedInVector(kDefaultOffloadLdacCapability.qualityIndex,
+ ldac_data.qualityIndex)) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAptxConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() !=
+ CodecConfiguration::CodecSpecific::aptxConfig) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const AptxConfiguration aptx_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::aptxConfig>();
+
+ if (ContainedInVector(kDefaultOffloadAptxCapability.sampleRateHz,
+ aptx_data.sampleRateHz) &&
+ ContainedInVector(kDefaultOffloadAptxCapability.bitsPerSample,
+ aptx_data.bitsPerSample) &&
+ ContainedInVector(kDefaultOffloadAptxCapability.channelMode,
+ aptx_data.channelMode)) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadAptxHdConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() !=
+ CodecConfiguration::CodecSpecific::aptxConfig) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const AptxConfiguration aptx_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::aptxConfig>();
+
+ if (ContainedInVector(kDefaultOffloadAptxHdCapability.sampleRateHz,
+ aptx_data.sampleRateHz) &&
+ ContainedInVector(kDefaultOffloadAptxHdCapability.bitsPerSample,
+ aptx_data.bitsPerSample) &&
+ ContainedInVector(kDefaultOffloadAptxHdCapability.channelMode,
+ aptx_data.channelMode)) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLc3ConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific) {
+ if (codec_specific.getTag() != CodecConfiguration::CodecSpecific::lc3Config) {
+ LOG(WARNING) << __func__
+ << ": Invalid CodecSpecific=" << codec_specific.toString();
+ return false;
+ }
+ const Lc3Configuration lc3_data =
+ codec_specific.get<CodecConfiguration::CodecSpecific::lc3Config>();
+
+ if (ContainedInVector(kDefaultA2dpOffloadLc3Capability.samplingFrequencyHz,
+ lc3_data.samplingFrequencyHz) &&
+ ContainedInVector(kDefaultA2dpOffloadLc3Capability.frameDurationUs,
+ lc3_data.frameDurationUs) &&
+ ContainedInVector(kDefaultA2dpOffloadLc3Capability.channelMode,
+ lc3_data.channelMode)) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << codec_specific.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadLeAudioConfigurationValid(
+ const SessionType& session_type, const LeAudioConfiguration&) {
+ if (session_type !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+ session_type !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+ return false;
+ }
+ return true;
+}
+
+std::vector<PcmCapabilities>
+BluetoothAudioCodecs::GetSoftwarePcmCapabilities() {
+ return {kDefaultSoftwarePcmCapabilities};
+}
+
+std::vector<CodecCapabilities>
+BluetoothAudioCodecs::GetA2dpOffloadCodecCapabilities(
+ const SessionType& session_type) {
+ if (session_type != SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+ return {};
+ }
+ std::vector<CodecCapabilities> offload_a2dp_codec_capabilities =
+ kDefaultOffloadA2dpCodecCapabilities;
+ for (auto& codec_capability : offload_a2dp_codec_capabilities) {
+ switch (codec_capability.codecType) {
+ case CodecType::SBC:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::sbcCapabilities>(
+ kDefaultOffloadSbcCapability);
+ break;
+ case CodecType::AAC:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::aacCapabilities>(
+ kDefaultOffloadAacCapability);
+ break;
+ case CodecType::LDAC:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::ldacCapabilities>(
+ kDefaultOffloadLdacCapability);
+ break;
+ case CodecType::APTX:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::aptxCapabilities>(
+ kDefaultOffloadAptxCapability);
+ break;
+ case CodecType::APTX_HD:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::aptxCapabilities>(
+ kDefaultOffloadAptxHdCapability);
+ break;
+ case CodecType::LC3:
+ codec_capability.capabilities
+ .set<CodecCapabilities::Capabilities::lc3Capabilities>(
+ kDefaultA2dpOffloadLc3Capability);
+ break;
+ case CodecType::UNKNOWN:
+ case CodecType::VENDOR:
+ break;
+ }
+ }
+ return offload_a2dp_codec_capabilities;
+}
+
+bool BluetoothAudioCodecs::IsSoftwarePcmConfigurationValid(
+ const PcmConfiguration& pcm_config) {
+ if (ContainedInVector(kDefaultSoftwarePcmCapabilities.sampleRateHz,
+ pcm_config.sampleRateHz) &&
+ ContainedInVector(kDefaultSoftwarePcmCapabilities.bitsPerSample,
+ pcm_config.bitsPerSample) &&
+ ContainedInVector(kDefaultSoftwarePcmCapabilities.channelMode,
+ pcm_config.channelMode)
+ // data interval is not checked for now
+ // && pcm_config.dataIntervalUs != 0
+ ) {
+ return true;
+ }
+ LOG(WARNING) << __func__
+ << ": Unsupported CodecSpecific=" << pcm_config.toString();
+ return false;
+}
+
+bool BluetoothAudioCodecs::IsOffloadCodecConfigurationValid(
+ const SessionType& session_type, const CodecConfiguration& codec_config) {
+ if (session_type != SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+ LOG(ERROR) << __func__
+ << ": Invalid SessionType=" << toString(session_type);
+ return false;
+ }
+ const CodecConfiguration::CodecSpecific& codec_specific = codec_config.config;
+ switch (codec_config.codecType) {
+ case CodecType::SBC:
+ if (IsOffloadSbcConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::AAC:
+ if (IsOffloadAacConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::LDAC:
+ if (IsOffloadLdacConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::APTX:
+ if (IsOffloadAptxConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::APTX_HD:
+ if (IsOffloadAptxHdConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::LC3:
+ if (IsOffloadLc3ConfigurationValid(codec_specific)) {
+ return true;
+ }
+ break;
+ case CodecType::UNKNOWN:
+ case CodecType::VENDOR:
+ break;
+ }
+ return false;
+}
+
+UnicastCapability composeUnicastLc3Capability(
+ AudioLocation audioLocation, uint8_t deviceCnt, uint8_t channelCount,
+ const Lc3Capabilities& capability) {
+ return {
+ .codecType = CodecType::LC3,
+ .supportedChannel = audioLocation,
+ .deviceCount = deviceCnt,
+ .channelCountPerDevice = channelCount,
+ .leAudioCodecCapabilities =
+ UnicastCapability::LeAudioCodecCapabilities(capability),
+ };
+}
+
+std::vector<LeAudioCodecCapabilitiesSetting>
+BluetoothAudioCodecs::GetLeAudioOffloadCodecCapabilities(
+ const SessionType& session_type) {
+ if (session_type !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+ session_type !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+ return std::vector<LeAudioCodecCapabilitiesSetting>(0);
+ }
+
+ if (kDefaultOffloadLeAudioCapabilities.empty()) {
+ for (auto [audioLocation, deviceCnt, channelCount] :
+ supportedDeviceSetting) {
+ for (auto capability : supportedLc3CapabilityList) {
+ UnicastCapability lc3Capability = composeUnicastLc3Capability(
+ audioLocation, deviceCnt, channelCount, capability);
+ UnicastCapability lc3MonoDecodeCapability =
+ composeUnicastLc3Capability(monoAudio, 1, 1, capability);
+
+ // Adds the capability for encode only
+ kDefaultOffloadLeAudioCapabilities.push_back(
+ {.unicastEncodeCapability = lc3Capability,
+ .unicastDecodeCapability = kInvalidUnicastCapability,
+ .broadcastCapability = kInvalidBroadcastCapability});
+
+ // Adds the capability for decode only
+ kDefaultOffloadLeAudioCapabilities.push_back(
+ {.unicastEncodeCapability = kInvalidUnicastCapability,
+ .unicastDecodeCapability = lc3Capability,
+ .broadcastCapability = kInvalidBroadcastCapability});
+
+ // Adds the capability for the case that encode and decode exist at the
+ // same time
+ kDefaultOffloadLeAudioCapabilities.push_back(
+ {.unicastEncodeCapability = lc3Capability,
+ .unicastDecodeCapability = lc3MonoDecodeCapability,
+ .broadcastCapability = kInvalidBroadcastCapability});
+ }
+ }
+ }
+
+ return kDefaultOffloadLeAudioCapabilities;
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h
new file mode 100644
index 0000000..0259a7e
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioCodecs.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/bluetooth/audio/CodecCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/CodecConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/Lc3Configuration.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioCodecCapabilitiesSetting.h>
+#include <aidl/android/hardware/bluetooth/audio/LeAudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/PcmCapabilities.h>
+#include <aidl/android/hardware/bluetooth/audio/PcmConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+
+#include <vector>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioCodecs {
+ public:
+ static std::vector<PcmCapabilities> GetSoftwarePcmCapabilities();
+ static std::vector<CodecCapabilities> GetA2dpOffloadCodecCapabilities(
+ const SessionType& session_type);
+
+ static bool IsSoftwarePcmConfigurationValid(
+ const PcmConfiguration& pcm_config);
+ static bool IsOffloadCodecConfigurationValid(
+ const SessionType& session_type, const CodecConfiguration& codec_config);
+
+ static bool IsOffloadLeAudioConfigurationValid(
+ const SessionType& session_type, const LeAudioConfiguration&);
+
+ static std::vector<LeAudioCodecCapabilitiesSetting>
+ GetLeAudioOffloadCodecCapabilities(const SessionType& session_type);
+
+ private:
+ template <typename T>
+ struct identity {
+ typedef T type;
+ };
+ template <class T>
+ static bool ContainedInVector(const std::vector<T>& vector,
+ const typename identity<T>::type& target);
+ template <class T>
+ static bool ContainedInBitmask(const T& bitmask, const T& target);
+ static bool IsSingleBit(uint32_t bitmasks, uint32_t bitfield);
+ static bool IsOffloadSbcConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+ static bool IsOffloadAacConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+ static bool IsOffloadLdacConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+ static bool IsOffloadAptxConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+ static bool IsOffloadAptxHdConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+ static bool IsOffloadLc3ConfigurationValid(
+ const CodecConfiguration::CodecSpecific& codec_specific);
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp
new file mode 100644
index 0000000..f626db8
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.cpp
@@ -0,0 +1,550 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <sys/types.h>
+#define LOG_TAG "BTAudioSessionAidl"
+
+#include <android-base/logging.h>
+#include <android-base/stringprintf.h>
+#include <android/binder_manager.h>
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+static constexpr int kFmqSendTimeoutMs = 1000; // 1000 ms timeout for sending
+static constexpr int kFmqReceiveTimeoutMs =
+ 1000; // 1000 ms timeout for receiving
+static constexpr int kWritePollMs = 1; // polled non-blocking interval
+static constexpr int kReadPollMs = 1; // polled non-blocking interval
+
+BluetoothAudioSession::BluetoothAudioSession(const SessionType& session_type)
+ : session_type_(session_type), stack_iface_(nullptr), data_mq_(nullptr) {}
+
+/***
+ *
+ * Callback methods
+ *
+ ***/
+
+void BluetoothAudioSession::OnSessionStarted(
+ const std::shared_ptr<IBluetoothAudioPort> stack_iface,
+ const DataMQDesc* mq_desc, const AudioConfiguration& audio_config) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (stack_iface == nullptr) {
+ LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", IBluetoothAudioPort Invalid";
+ } else if (!UpdateAudioConfig(audio_config)) {
+ LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", AudioConfiguration=" << audio_config.toString()
+ << " Invalid";
+ } else if (!UpdateDataPath(mq_desc)) {
+ LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+ << " MqDescriptor Invalid";
+ audio_config_ = nullptr;
+ } else {
+ stack_iface_ = stack_iface;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", AudioConfiguration=" << audio_config.toString();
+ ReportSessionStatus();
+ }
+}
+
+void BluetoothAudioSession::OnSessionEnded() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ bool toggled = IsSessionReady();
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_);
+ audio_config_ = nullptr;
+ stack_iface_ = nullptr;
+ UpdateDataPath(nullptr);
+ if (toggled) {
+ ReportSessionStatus();
+ }
+}
+
+/***
+ *
+ * Util methods
+ *
+ ***/
+
+const AudioConfiguration BluetoothAudioSession::GetAudioConfig() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ switch (session_type_) {
+ case SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ return AudioConfiguration(CodecConfiguration{});
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH:
+ return AudioConfiguration(LeAudioConfiguration{});
+ default:
+ return AudioConfiguration(PcmConfiguration{});
+ }
+ }
+ return *audio_config_;
+}
+
+void BluetoothAudioSession::ReportAudioConfigChanged(
+ const AudioConfiguration& audio_config) {
+ if (session_type_ !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
+ session_type_ !=
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH) {
+ return;
+ }
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+ if (observers_.empty()) {
+ LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO port state observer";
+ return;
+ }
+ for (auto& observer : observers_) {
+ uint16_t cookie = observer.first;
+ std::shared_ptr<struct PortStatusCallbacks> cb = observer.second;
+ LOG(INFO) << __func__ << " for SessionType=" << toString(session_type_)
+ << ", bluetooth_audio=0x"
+ << ::android::base::StringPrintf("%04x", cookie);
+ if (cb->audio_configuration_changed_cb_ != nullptr) {
+ cb->audio_configuration_changed_cb_(cookie);
+ }
+ }
+}
+
+bool BluetoothAudioSession::IsSessionReady() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+
+ bool is_mq_valid =
+ (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+ session_type_ ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+ session_type_ ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH ||
+ (data_mq_ != nullptr && data_mq_->isValid()));
+ return stack_iface_ != nullptr && is_mq_valid && audio_config_ != nullptr;
+}
+
+/***
+ *
+ * Status callback methods
+ *
+ ***/
+
+uint16_t BluetoothAudioSession::RegisterStatusCback(
+ const PortStatusCallbacks& callbacks) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ uint16_t cookie = ObserversCookieGetInitValue(session_type_);
+ uint16_t cookie_upper_bound = ObserversCookieGetUpperBound(session_type_);
+
+ while (cookie < cookie_upper_bound) {
+ if (observers_.find(cookie) == observers_.end()) {
+ break;
+ }
+ ++cookie;
+ }
+ if (cookie >= cookie_upper_bound) {
+ LOG(ERROR) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has " << observers_.size()
+ << " observers already (No Resource)";
+ return kObserversCookieUndefined;
+ }
+ std::shared_ptr<PortStatusCallbacks> cb =
+ std::make_shared<PortStatusCallbacks>();
+ *cb = callbacks;
+ observers_[cookie] = cb;
+ return cookie;
+}
+
+void BluetoothAudioSession::UnregisterStatusCback(uint16_t cookie) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (observers_.erase(cookie) != 1) {
+ LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+ << " no such provider=0x"
+ << ::android::base::StringPrintf("%04x", cookie);
+ }
+}
+
+/***
+ *
+ * Stream methods
+ *
+ ***/
+
+bool BluetoothAudioSession::StartStream() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return false;
+ }
+ auto hal_retval = stack_iface_->startStream();
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ return false;
+ }
+ return true;
+}
+
+bool BluetoothAudioSession::SuspendStream() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return false;
+ }
+ auto hal_retval = stack_iface_->suspendStream();
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ return false;
+ }
+ return true;
+}
+
+void BluetoothAudioSession::StopStream() {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ return;
+ }
+ auto hal_retval = stack_iface_->stopStream();
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ }
+}
+
+/***
+ *
+ * Private methods
+ *
+ ***/
+
+bool BluetoothAudioSession::UpdateDataPath(const DataMQDesc* mq_desc) {
+ if (mq_desc == nullptr) {
+ // usecase of reset by nullptr
+ data_mq_ = nullptr;
+ return true;
+ }
+ std::unique_ptr<DataMQ> temp_mq;
+ temp_mq.reset(new DataMQ(*mq_desc));
+ if (!temp_mq || !temp_mq->isValid()) {
+ data_mq_ = nullptr;
+ return false;
+ }
+ data_mq_ = std::move(temp_mq);
+ return true;
+}
+
+bool BluetoothAudioSession::UpdateAudioConfig(
+ const AudioConfiguration& audio_config) {
+ bool is_software_session =
+ (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+ session_type_ == SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH ||
+ session_type_ == SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH ||
+ session_type_ == SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH);
+ bool is_offload_a2dp_session =
+ (session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH);
+ bool is_offload_le_audio_session =
+ (session_type_ ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH ||
+ session_type_ ==
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH);
+ auto audio_config_tag = audio_config.getTag();
+ bool is_software_audio_config =
+ (is_software_session &&
+ audio_config_tag == AudioConfiguration::pcmConfig);
+ bool is_a2dp_offload_audio_config =
+ (is_offload_a2dp_session &&
+ audio_config_tag == AudioConfiguration::a2dpConfig);
+ bool is_le_audio_offload_audio_config =
+ (is_offload_le_audio_session &&
+ audio_config_tag == AudioConfiguration::leAudioConfig);
+ if (!is_software_audio_config && !is_a2dp_offload_audio_config &&
+ !is_le_audio_offload_audio_config) {
+ return false;
+ }
+ audio_config_ = std::make_unique<AudioConfiguration>(audio_config);
+ return true;
+}
+
+void BluetoothAudioSession::ReportSessionStatus() {
+ // This is locked already by OnSessionStarted / OnSessionEnded
+ if (observers_.empty()) {
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO port state observer";
+ return;
+ }
+ for (auto& observer : observers_) {
+ uint16_t cookie = observer.first;
+ std::shared_ptr<PortStatusCallbacks> callback = observer.second;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << " notify to bluetooth_audio=0x"
+ << ::android::base::StringPrintf("%04x", cookie);
+ callback->session_changed_cb_(cookie);
+ }
+}
+
+/***
+ *
+ * PCM methods
+ *
+ ***/
+
+size_t BluetoothAudioSession::OutWritePcmData(const void* buffer,
+ size_t bytes) {
+ if (buffer == nullptr || bytes <= 0) {
+ return 0;
+ }
+ size_t total_written = 0;
+ int timeout_ms = kFmqSendTimeoutMs;
+ do {
+ std::unique_lock<std::recursive_mutex> lock(mutex_);
+ if (!IsSessionReady()) {
+ break;
+ }
+ size_t num_bytes_to_write = data_mq_->availableToWrite();
+ if (num_bytes_to_write) {
+ if (num_bytes_to_write > (bytes - total_written)) {
+ num_bytes_to_write = bytes - total_written;
+ }
+
+ if (!data_mq_->write(
+ static_cast<const MQDataType*>(buffer) + total_written,
+ num_bytes_to_write)) {
+ LOG(ERROR) << "FMQ datapath writing " << total_written << "/" << bytes
+ << " failed";
+ return total_written;
+ }
+ total_written += num_bytes_to_write;
+ } else if (timeout_ms >= kWritePollMs) {
+ lock.unlock();
+ usleep(kWritePollMs * 1000);
+ timeout_ms -= kWritePollMs;
+ } else {
+ LOG(DEBUG) << "Data " << total_written << "/" << bytes << " overflow "
+ << (kFmqSendTimeoutMs - timeout_ms) << " ms";
+ return total_written;
+ }
+ } while (total_written < bytes);
+ return total_written;
+}
+
+size_t BluetoothAudioSession::InReadPcmData(void* buffer, size_t bytes) {
+ if (buffer == nullptr || bytes <= 0) {
+ return 0;
+ }
+ size_t total_read = 0;
+ int timeout_ms = kFmqReceiveTimeoutMs;
+ do {
+ std::unique_lock<std::recursive_mutex> lock(mutex_);
+ if (!IsSessionReady()) {
+ break;
+ }
+ size_t num_bytes_to_read = data_mq_->availableToRead();
+ if (num_bytes_to_read) {
+ if (num_bytes_to_read > (bytes - total_read)) {
+ num_bytes_to_read = bytes - total_read;
+ }
+ if (!data_mq_->read(static_cast<MQDataType*>(buffer) + total_read,
+ num_bytes_to_read)) {
+ LOG(ERROR) << "FMQ datapath reading " << total_read << "/" << bytes
+ << " failed";
+ return total_read;
+ }
+ total_read += num_bytes_to_read;
+ } else if (timeout_ms >= kReadPollMs) {
+ lock.unlock();
+ usleep(kReadPollMs * 1000);
+ timeout_ms -= kReadPollMs;
+ continue;
+ } else {
+ LOG(DEBUG) << "Data " << total_read << "/" << bytes << " overflow "
+ << (kFmqReceiveTimeoutMs - timeout_ms) << " ms";
+ return total_read;
+ }
+ } while (total_read < bytes);
+ return total_read;
+}
+
+/***
+ *
+ * Other methods
+ *
+ ***/
+
+void BluetoothAudioSession::ReportControlStatus(bool start_resp,
+ BluetoothAudioStatus status) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (observers_.empty()) {
+ LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO port state observer";
+ return;
+ }
+ for (auto& observer : observers_) {
+ uint16_t cookie = observer.first;
+ std::shared_ptr<PortStatusCallbacks> callback = observer.second;
+ LOG(INFO) << __func__ << " - status=" << toString(status)
+ << " for SessionType=" << toString(session_type_)
+ << ", bluetooth_audio=0x"
+ << ::android::base::StringPrintf("%04x", cookie)
+ << (start_resp ? " started" : " suspended");
+ callback->control_result_cb_(cookie, start_resp, status);
+ }
+}
+
+bool BluetoothAudioSession::GetPresentationPosition(
+ PresentationPosition& presentation_position) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return false;
+ }
+ bool retval = false;
+
+ if (!stack_iface_->getPresentationPosition(&presentation_position).isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ return false;
+ }
+ return retval;
+}
+
+void BluetoothAudioSession::UpdateSourceMetadata(
+ const struct source_metadata& source_metadata) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return;
+ }
+
+ ssize_t track_count = source_metadata.track_count;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_) << ","
+ << track_count << " track(s)";
+ if (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+ session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+ return;
+ }
+
+ SourceMetadata hal_source_metadata;
+ hal_source_metadata.tracks.resize(track_count);
+ for (int i = 0; i < track_count; i++) {
+ hal_source_metadata.tracks[i].usage =
+ static_cast<media::audio::common::AudioUsage>(
+ source_metadata.tracks[i].usage);
+ hal_source_metadata.tracks[i].contentType =
+ static_cast<media::audio::common::AudioContentType>(
+ source_metadata.tracks[i].content_type);
+ hal_source_metadata.tracks[i].gain = source_metadata.tracks[i].gain;
+ LOG(VERBOSE) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", usage=" << toString(hal_source_metadata.tracks[i].usage)
+ << ", content="
+ << toString(hal_source_metadata.tracks[i].contentType)
+ << ", gain=" << hal_source_metadata.tracks[i].gain;
+ }
+
+ auto hal_retval = stack_iface_->updateSourceMetadata(hal_source_metadata);
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ }
+}
+
+void BluetoothAudioSession::UpdateSinkMetadata(
+ const struct sink_metadata& sink_metadata) {
+ std::lock_guard<std::recursive_mutex> guard(mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
+ << " has NO session";
+ return;
+ }
+
+ ssize_t track_count = sink_metadata.track_count;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_) << ","
+ << track_count << " track(s)";
+ if (session_type_ == SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH ||
+ session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH) {
+ return;
+ }
+
+ SinkMetadata hal_sink_metadata;
+ hal_sink_metadata.tracks.resize(track_count);
+ for (int i = 0; i < track_count; i++) {
+ hal_sink_metadata.tracks[i].source =
+ static_cast<media::audio::common::AudioSource>(
+ sink_metadata.tracks[i].source);
+ hal_sink_metadata.tracks[i].gain = sink_metadata.tracks[i].gain;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_)
+ << ", source=" << sink_metadata.tracks[i].source
+ << ", dest_device=" << sink_metadata.tracks[i].dest_device
+ << ", gain=" << sink_metadata.tracks[i].gain
+ << ", dest_device_address="
+ << sink_metadata.tracks[i].dest_device_address;
+ }
+
+ auto hal_retval = stack_iface_->updateSinkMetadata(hal_sink_metadata);
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_) << " failed";
+ }
+}
+
+bool BluetoothAudioSession::IsAidlAvailable() {
+ if (is_aidl_checked) return is_aidl_available;
+ is_aidl_available =
+ (AServiceManager_checkService(
+ kDefaultAudioProviderFactoryInterface.c_str()) != nullptr);
+ is_aidl_checked = true;
+ return is_aidl_available;
+}
+
+/***
+ *
+ * BluetoothAudioSessionInstance
+ *
+ ***/
+std::mutex BluetoothAudioSessionInstance::mutex_;
+std::unordered_map<SessionType, std::shared_ptr<BluetoothAudioSession>>
+ BluetoothAudioSessionInstance::sessions_map_;
+
+std::shared_ptr<BluetoothAudioSession>
+BluetoothAudioSessionInstance::GetSessionInstance(
+ const SessionType& session_type) {
+ std::lock_guard<std::mutex> guard(mutex_);
+
+ if (!sessions_map_.empty()) {
+ auto entry = sessions_map_.find(session_type);
+ if (entry != sessions_map_.end()) {
+ return entry->second;
+ }
+ }
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ std::make_shared<BluetoothAudioSession>(session_type);
+ sessions_map_[session_type] = session_ptr;
+ return session_ptr;
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h
new file mode 100644
index 0000000..73bc0f8
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSession.h
@@ -0,0 +1,219 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/audio/common/SinkMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
+#include <aidl/android/hardware/bluetooth/audio/IBluetoothAudioProvider.h>
+#include <aidl/android/hardware/bluetooth/audio/IBluetoothAudioProviderFactory.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <fmq/AidlMessageQueue.h>
+#include <hardware/audio.h>
+
+#include <mutex>
+#include <unordered_map>
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using ::aidl::android::hardware::common::fmq::MQDescriptor;
+using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
+using ::android::AidlMessageQueue;
+
+using ::aidl::android::hardware::audio::common::SinkMetadata;
+using ::aidl::android::hardware::audio::common::SourceMetadata;
+
+using MQDataType = int8_t;
+using MQDataMode = SynchronizedReadWrite;
+using DataMQ = AidlMessageQueue<MQDataType, MQDataMode>;
+using DataMQDesc =
+ ::aidl::android::hardware::common::fmq::MQDescriptor<MQDataType,
+ MQDataMode>;
+
+static constexpr uint16_t kObserversCookieSize = 0x0010; // 0x0000 ~ 0x000f
+static constexpr uint16_t kObserversCookieUndefined =
+ (static_cast<uint16_t>(SessionType::UNKNOWN) << 8 & 0xff00);
+inline SessionType ObserversCookieGetSessionType(uint16_t cookie) {
+ return static_cast<SessionType>(cookie >> 8 & 0x00ff);
+}
+inline uint16_t ObserversCookieGetInitValue(SessionType session_type) {
+ return (static_cast<uint16_t>(session_type) << 8 & 0xff00);
+}
+inline uint16_t ObserversCookieGetUpperBound(SessionType session_type) {
+ return (static_cast<uint16_t>(session_type) << 8 & 0xff00) +
+ kObserversCookieSize;
+}
+
+/***
+ * This presents the callbacks of started / suspended and session changed,
+ * and the bluetooth_audio module uses to receive the status notification
+ ***/
+struct PortStatusCallbacks {
+ /***
+ * control_result_cb_ - when the Bluetooth stack reports results of
+ * streamStarted or streamSuspended, the BluetoothAudioProvider will invoke
+ * this callback to report to the bluetooth_audio module.
+ * @param: cookie - indicates which bluetooth_audio output should handle
+ * @param: start_resp - this report is for startStream or not
+ * @param: status - the result of startStream
+ ***/
+ std::function<void(uint16_t cookie, bool start_resp,
+ BluetoothAudioStatus status)>
+ control_result_cb_;
+ /***
+ * session_changed_cb_ - when the Bluetooth stack start / end session, the
+ * BluetoothAudioProvider will invoke this callback to notify to the
+ * bluetooth_audio module.
+ * @param: cookie - indicates which bluetooth_audio output should handle
+ ***/
+ std::function<void(uint16_t cookie)> session_changed_cb_;
+ /***
+ * audio_configuration_changed_cb_ - when the Bluetooth stack change the audio
+ * configuration, the BluetoothAudioProvider will invoke this callback to
+ * notify to the bluetooth_audio module.
+ * @param: cookie - indicates which bluetooth_audio output should handle
+ ***/
+ std::function<void(uint16_t cookie)> audio_configuration_changed_cb_;
+};
+
+class BluetoothAudioSession {
+ public:
+ BluetoothAudioSession(const SessionType& session_type);
+
+ /***
+ * The function helps to check if this session is ready or not
+ * @return: true if the Bluetooth stack has started the specified session
+ ***/
+ bool IsSessionReady();
+
+ /***
+ * The report function is used to report that the Bluetooth stack has started
+ * this session without any failure, and will invoke session_changed_cb_ to
+ * notify those registered bluetooth_audio outputs
+ ***/
+ void OnSessionStarted(const std::shared_ptr<IBluetoothAudioPort> stack_iface,
+ const DataMQDesc* mq_desc,
+ const AudioConfiguration& audio_config);
+
+ /***
+ * The report function is used to report that the Bluetooth stack has ended
+ * the session, and will invoke session_changed_cb_ to notify registered
+ * bluetooth_audio outputs
+ ***/
+ void OnSessionEnded();
+
+ /***
+ * The report function is used to report that the Bluetooth stack has notified
+ * the result of startStream or suspendStream, and will invoke
+ * control_result_cb_ to notify registered bluetooth_audio outputs
+ ***/
+ void ReportControlStatus(bool start_resp, BluetoothAudioStatus status);
+
+ /***
+ * The control function helps the bluetooth_audio module to register
+ * PortStatusCallbacks
+ * @return: cookie - the assigned number to this bluetooth_audio output
+ ***/
+ uint16_t RegisterStatusCback(const PortStatusCallbacks& cbacks);
+
+ /***
+ * The control function helps the bluetooth_audio module to unregister
+ * PortStatusCallbacks
+ * @param: cookie - indicates which bluetooth_audio output is
+ ***/
+ void UnregisterStatusCback(uint16_t cookie);
+
+ /***
+ * The control function is for the bluetooth_audio module to get the current
+ * AudioConfiguration
+ ***/
+ const AudioConfiguration GetAudioConfig();
+
+ /***
+ * The report function is used to report that the Bluetooth stack has notified
+ * the audio configuration changed, and will invoke
+ * audio_configuration_changed_cb_ to notify registered bluetooth_audio
+ * outputs
+ ***/
+ void ReportAudioConfigChanged(const AudioConfiguration& audio_config);
+
+ /***
+ * Those control functions are for the bluetooth_audio module to start,
+ * suspend, stop stream, to check position, and to update metadata.
+ ***/
+ bool StartStream();
+ bool SuspendStream();
+ void StopStream();
+ bool GetPresentationPosition(PresentationPosition& presentation_position);
+ void UpdateSourceMetadata(const struct source_metadata& source_metadata);
+ void UpdateSinkMetadata(const struct sink_metadata& sink_metadata);
+
+ // The control function writes stream to FMQ
+ size_t OutWritePcmData(const void* buffer, size_t bytes);
+ // The control function read stream from FMQ
+ size_t InReadPcmData(void* buffer, size_t bytes);
+
+ // Return if IBluetoothAudioProviderFactory implementation existed
+ static bool IsAidlAvailable();
+
+ private:
+ // using recursive_mutex to allow hwbinder to re-enter again.
+ std::recursive_mutex mutex_;
+ SessionType session_type_;
+
+ // audio control path to use for both software and offloading
+ std::shared_ptr<IBluetoothAudioPort> stack_iface_;
+ // audio data path (FMQ) for software encoding
+ std::unique_ptr<DataMQ> data_mq_;
+ // audio data configuration for both software and offloading
+ std::unique_ptr<AudioConfiguration> audio_config_;
+
+ // saving those registered bluetooth_audio's callbacks
+ std::unordered_map<uint16_t, std::shared_ptr<struct PortStatusCallbacks>>
+ observers_;
+
+ bool UpdateDataPath(const DataMQDesc* mq_desc);
+ bool UpdateAudioConfig(const AudioConfiguration& audio_config);
+ // invoking the registered session_changed_cb_
+ void ReportSessionStatus();
+
+ static inline std::atomic<bool> is_aidl_checked = false;
+ static inline std::atomic<bool> is_aidl_available = false;
+ static inline const std::string kDefaultAudioProviderFactoryInterface =
+ std::string() + IBluetoothAudioProviderFactory::descriptor + "/default";
+};
+
+class BluetoothAudioSessionInstance {
+ public:
+ // The API is to fetch the specified session of A2DP / Hearing Aid
+ static std::shared_ptr<BluetoothAudioSession> GetSessionInstance(
+ const SessionType& session_type);
+
+ private:
+ static std::mutex mutex_;
+ static std::unordered_map<SessionType, std::shared_ptr<BluetoothAudioSession>>
+ sessions_map_;
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h
new file mode 100644
index 0000000..aff01e5
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionControl.h
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioSessionControl {
+ public:
+ /***
+ * The control API helps to check if session is ready or not
+ * @return: true if the Bluetooth stack has started th specified session
+ ***/
+ static bool IsSessionReady(const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->IsSessionReady();
+ }
+
+ return false;
+ }
+
+ /***
+ * The control API helps the bluetooth_audio module to register
+ * PortStatusCallbacks
+ * @return: cookie - the assigned number to this bluetooth_audio output
+ ***/
+ static uint16_t RegisterControlResultCback(
+ const SessionType& session_type, const PortStatusCallbacks& cbacks) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->RegisterStatusCback(cbacks);
+ }
+ return kObserversCookieUndefined;
+ }
+
+ /***
+ * The control API helps the bluetooth_audio module to unregister
+ * PortStatusCallbacks
+ * @param: cookie - indicates which bluetooth_audio output is
+ ***/
+ static void UnregisterControlResultCback(const SessionType& session_type,
+ uint16_t cookie) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->UnregisterStatusCback(cookie);
+ }
+ }
+
+ /***
+ * The control API for the bluetooth_audio module to get current
+ * AudioConfiguration
+ ***/
+ static const AudioConfiguration GetAudioConfig(
+ const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->GetAudioConfig();
+ }
+ switch (session_type) {
+ case SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ return AudioConfiguration(CodecConfiguration{});
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH:
+ case SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH:
+ return AudioConfiguration(LeAudioConfiguration{});
+ default:
+ return AudioConfiguration(PcmConfiguration{});
+ }
+ }
+
+ /***
+ * Those control APIs for the bluetooth_audio module to start / suspend /
+ stop
+ * stream, to check position, and to update metadata.
+ ***/
+ static bool StartStream(const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->StartStream();
+ }
+ return false;
+ }
+
+ static bool SuspendStream(const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->SuspendStream();
+ }
+ return false;
+ }
+
+ static void StopStream(const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->StopStream();
+ }
+ }
+
+ static bool GetPresentationPosition(
+ const SessionType& session_type,
+ PresentationPosition& presentation_position) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->GetPresentationPosition(presentation_position);
+ }
+ return false;
+ }
+
+ static void UpdateSourceMetadata(
+ const SessionType& session_type,
+ const struct source_metadata& source_metadata) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->UpdateSourceMetadata(source_metadata);
+ }
+ }
+
+ static void UpdateSinkMetadata(const SessionType& session_type,
+ const struct sink_metadata& sink_metadata) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->UpdateSinkMetadata(sink_metadata);
+ }
+ }
+
+ /***
+ * The control API writes stream to FMQ
+ ***/
+ static size_t OutWritePcmData(const SessionType& session_type,
+ const void* buffer, size_t bytes) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->OutWritePcmData(buffer, bytes);
+ }
+ return 0;
+ }
+
+ /***
+ * The control API reads stream from FMQ
+ ***/
+ static size_t InReadPcmData(const SessionType& session_type, void* buffer,
+ size_t bytes) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ return session_ptr->InReadPcmData(buffer, bytes);
+ }
+ return 0;
+ }
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h
new file mode 100644
index 0000000..18569c3
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/BluetoothAudioSessionReport.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+class BluetoothAudioSessionReport {
+ public:
+ /***
+ * The API reports the Bluetooth stack has started the session, and will
+ * inform registered bluetooth_audio outputs
+ ***/
+ static void OnSessionStarted(
+ const SessionType& session_type,
+ const std::shared_ptr<IBluetoothAudioPort> host_iface,
+ const DataMQDesc* data_mq, const AudioConfiguration& audio_config) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->OnSessionStarted(host_iface, data_mq, audio_config);
+ }
+ }
+
+ /***
+ * The API reports the Bluetooth stack has ended the session, and will
+ * inform registered bluetooth_audio outputs
+ ***/
+ static void OnSessionEnded(const SessionType& session_type) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->OnSessionEnded();
+ }
+ }
+
+ /***
+ * The API reports the Bluetooth stack has replied the result of startStream
+ * or suspendStream, and will inform registered bluetooth_audio outputs
+ ***/
+ static void ReportControlStatus(const SessionType& session_type,
+ const bool& start_resp,
+ BluetoothAudioStatus status) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->ReportControlStatus(start_resp, status);
+ }
+ }
+ /***
+ * The API reports the Bluetooth stack has replied the changed of the audio
+ * configuration, and will inform registered bluetooth_audio outputs
+ ***/
+ static void ReportAudioConfigChanged(const SessionType& session_type,
+ const AudioConfiguration& audio_config) {
+ std::shared_ptr<BluetoothAudioSession> session_ptr =
+ BluetoothAudioSessionInstance::GetSessionInstance(session_type);
+ if (session_ptr != nullptr) {
+ session_ptr->ReportAudioConfigChanged(audio_config);
+ }
+ }
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
\ No newline at end of file
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp
new file mode 100644
index 0000000..632a389
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware.cpp
@@ -0,0 +1,787 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "BtAudioNakahara"
+
+#include <aidl/android/hardware/bluetooth/audio/AudioConfiguration.h>
+#include <aidl/android/hardware/bluetooth/audio/BluetoothAudioStatus.h>
+#include <aidl/android/hardware/bluetooth/audio/SessionType.h>
+#include <android-base/logging.h>
+
+#include <functional>
+#include <unordered_map>
+
+#include "../aidl_session/BluetoothAudioSession.h"
+#include "../aidl_session/BluetoothAudioSessionControl.h"
+#include "HidlToAidlMiddleware_2_0.h"
+#include "HidlToAidlMiddleware_2_1.h"
+#include "HidlToAidlMiddleware_2_2.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using HidlStatus = ::android::hardware::bluetooth::audio::V2_0::Status;
+using PcmConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::PcmParameters;
+using SampleRate_2_0 = ::android::hardware::bluetooth::audio::V2_0::SampleRate;
+using ChannelMode_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::ChannelMode;
+using BitsPerSample_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::BitsPerSample;
+using CodecConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::CodecConfiguration;
+using CodecType_2_0 = ::android::hardware::bluetooth::audio::V2_0::CodecType;
+using SbcConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SbcParameters;
+using AacConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::AacParameters;
+using LdacConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::LdacParameters;
+using AptxConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::AptxParameters;
+using SbcAllocMethod_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SbcAllocMethod;
+using SbcBlockLength_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SbcBlockLength;
+using SbcChannelMode_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SbcChannelMode;
+using SbcNumSubbands_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SbcNumSubbands;
+using AacObjectType_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::AacObjectType;
+using AacVarBitRate_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::AacVariableBitRate;
+using LdacChannelMode_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::LdacChannelMode;
+using LdacQualityIndex_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::LdacQualityIndex;
+
+using PcmConfig_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::PcmParameters;
+using SampleRate_2_1 = ::android::hardware::bluetooth::audio::V2_1::SampleRate;
+using Lc3CodecConfig_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::Lc3CodecConfiguration;
+using Lc3Config_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::Lc3Parameters;
+using Lc3FrameDuration_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::Lc3FrameDuration;
+
+using LeAudioConfig_2_2 =
+ ::android::hardware::bluetooth::audio::V2_2::LeAudioConfiguration;
+using LeAudioMode_2_2 =
+ ::android::hardware::bluetooth::audio::V2_2::LeAudioMode;
+
+std::mutex legacy_callback_lock;
+std::unordered_map<
+ SessionType,
+ std::unordered_map<uint16_t, std::shared_ptr<PortStatusCallbacks_2_2>>>
+ legacy_callback_table;
+
+const static std::unordered_map<SessionType_2_1, SessionType>
+ session_type_2_1_to_aidl_map{
+ {SessionType_2_1::A2DP_SOFTWARE_ENCODING_DATAPATH,
+ SessionType::A2DP_SOFTWARE_ENCODING_DATAPATH},
+ {SessionType_2_1::A2DP_HARDWARE_OFFLOAD_DATAPATH,
+ SessionType::A2DP_HARDWARE_OFFLOAD_ENCODING_DATAPATH},
+ {SessionType_2_1::HEARING_AID_SOFTWARE_ENCODING_DATAPATH,
+ SessionType::HEARING_AID_SOFTWARE_ENCODING_DATAPATH},
+ {SessionType_2_1::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH,
+ SessionType::LE_AUDIO_SOFTWARE_ENCODING_DATAPATH},
+ {SessionType_2_1::LE_AUDIO_SOFTWARE_DECODED_DATAPATH,
+ SessionType::LE_AUDIO_SOFTWARE_DECODING_DATAPATH},
+ {SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH,
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH},
+ {SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH,
+ SessionType::LE_AUDIO_HARDWARE_OFFLOAD_DECODING_DATAPATH},
+ };
+
+const static std::unordered_map<int32_t, SampleRate_2_1>
+ sample_rate_to_hidl_2_1_map{
+ {44100, SampleRate_2_1::RATE_44100},
+ {48000, SampleRate_2_1::RATE_48000},
+ {88200, SampleRate_2_1::RATE_88200},
+ {96000, SampleRate_2_1::RATE_96000},
+ {176400, SampleRate_2_1::RATE_176400},
+ {192000, SampleRate_2_1::RATE_192000},
+ {16000, SampleRate_2_1::RATE_16000},
+ {24000, SampleRate_2_1::RATE_24000},
+ {8000, SampleRate_2_1::RATE_8000},
+ {32000, SampleRate_2_1::RATE_32000},
+ };
+
+const static std::unordered_map<CodecType, CodecType_2_0>
+ codec_type_to_hidl_2_0_map{
+ {CodecType::UNKNOWN, CodecType_2_0::UNKNOWN},
+ {CodecType::SBC, CodecType_2_0::SBC},
+ {CodecType::AAC, CodecType_2_0::AAC},
+ {CodecType::APTX, CodecType_2_0::APTX},
+ {CodecType::APTX_HD, CodecType_2_0::APTX_HD},
+ {CodecType::LDAC, CodecType_2_0::LDAC},
+ {CodecType::LC3, CodecType_2_0::UNKNOWN},
+ };
+
+const static std::unordered_map<SbcChannelMode, SbcChannelMode_2_0>
+ sbc_channel_mode_to_hidl_2_0_map{
+ {SbcChannelMode::UNKNOWN, SbcChannelMode_2_0::UNKNOWN},
+ {SbcChannelMode::JOINT_STEREO, SbcChannelMode_2_0::JOINT_STEREO},
+ {SbcChannelMode::STEREO, SbcChannelMode_2_0::STEREO},
+ {SbcChannelMode::DUAL, SbcChannelMode_2_0::DUAL},
+ {SbcChannelMode::MONO, SbcChannelMode_2_0::MONO},
+ };
+
+const static std::unordered_map<int8_t, SbcBlockLength_2_0>
+ sbc_block_length_to_hidl_map{
+ {4, SbcBlockLength_2_0::BLOCKS_4},
+ {8, SbcBlockLength_2_0::BLOCKS_8},
+ {12, SbcBlockLength_2_0::BLOCKS_12},
+ {16, SbcBlockLength_2_0::BLOCKS_16},
+ };
+
+const static std::unordered_map<int8_t, SbcNumSubbands_2_0>
+ sbc_subbands_to_hidl_map{
+ {4, SbcNumSubbands_2_0::SUBBAND_4},
+ {8, SbcNumSubbands_2_0::SUBBAND_8},
+ };
+
+const static std::unordered_map<SbcAllocMethod, SbcAllocMethod_2_0>
+ sbc_alloc_method_to_hidl_map{
+ {SbcAllocMethod::ALLOC_MD_S, SbcAllocMethod_2_0::ALLOC_MD_S},
+ {SbcAllocMethod::ALLOC_MD_L, SbcAllocMethod_2_0::ALLOC_MD_L},
+ };
+
+const static std::unordered_map<AacObjectType, AacObjectType_2_0>
+ aac_object_type_to_hidl_map{
+ {AacObjectType::MPEG2_LC, AacObjectType_2_0::MPEG2_LC},
+ {AacObjectType::MPEG4_LC, AacObjectType_2_0::MPEG4_LC},
+ {AacObjectType::MPEG4_LTP, AacObjectType_2_0::MPEG4_LTP},
+ {AacObjectType::MPEG4_SCALABLE, AacObjectType_2_0::MPEG4_SCALABLE},
+ };
+
+const static std::unordered_map<LdacChannelMode, LdacChannelMode_2_0>
+ ldac_channel_mode_to_hidl_map{
+ {LdacChannelMode::UNKNOWN, LdacChannelMode_2_0::UNKNOWN},
+ {LdacChannelMode::STEREO, LdacChannelMode_2_0::STEREO},
+ {LdacChannelMode::DUAL, LdacChannelMode_2_0::DUAL},
+ {LdacChannelMode::MONO, LdacChannelMode_2_0::MONO},
+ };
+
+const static std::unordered_map<LdacQualityIndex, LdacQualityIndex_2_0>
+ ldac_qindex_to_hidl_map{
+ {LdacQualityIndex::HIGH, LdacQualityIndex_2_0::QUALITY_HIGH},
+ {LdacQualityIndex::MID, LdacQualityIndex_2_0::QUALITY_MID},
+ {LdacQualityIndex::LOW, LdacQualityIndex_2_0::QUALITY_LOW},
+ {LdacQualityIndex::ABR, LdacQualityIndex_2_0::QUALITY_ABR},
+ };
+
+inline SessionType from_session_type_2_1(
+ const SessionType_2_1& session_type_hidl) {
+ auto it = session_type_2_1_to_aidl_map.find(session_type_hidl);
+ if (it != session_type_2_1_to_aidl_map.end()) return it->second;
+ return SessionType::UNKNOWN;
+}
+
+inline SessionType from_session_type_2_0(
+ const SessionType_2_0& session_type_hidl) {
+ return from_session_type_2_1(static_cast<SessionType_2_1>(session_type_hidl));
+}
+
+inline HidlStatus to_hidl_status(const BluetoothAudioStatus& status) {
+ switch (status) {
+ case BluetoothAudioStatus::SUCCESS:
+ return HidlStatus::SUCCESS;
+ case BluetoothAudioStatus::UNSUPPORTED_CODEC_CONFIGURATION:
+ return HidlStatus::UNSUPPORTED_CODEC_CONFIGURATION;
+ default:
+ return HidlStatus::FAILURE;
+ }
+}
+
+inline SampleRate_2_1 to_hidl_sample_rate_2_1(const int32_t sample_rate_hz) {
+ auto it = sample_rate_to_hidl_2_1_map.find(sample_rate_hz);
+ if (it != sample_rate_to_hidl_2_1_map.end()) return it->second;
+ return SampleRate_2_1::RATE_UNKNOWN;
+}
+
+inline SampleRate_2_0 to_hidl_sample_rate_2_0(const int32_t sample_rate_hz) {
+ auto it = sample_rate_to_hidl_2_1_map.find(sample_rate_hz);
+ if (it != sample_rate_to_hidl_2_1_map.end())
+ return static_cast<SampleRate_2_0>(it->second);
+ return SampleRate_2_0::RATE_UNKNOWN;
+}
+
+inline BitsPerSample_2_0 to_hidl_bits_per_sample(const int8_t bit_per_sample) {
+ switch (bit_per_sample) {
+ case 16:
+ return BitsPerSample_2_0::BITS_16;
+ case 24:
+ return BitsPerSample_2_0::BITS_24;
+ case 32:
+ return BitsPerSample_2_0::BITS_32;
+ default:
+ return BitsPerSample_2_0::BITS_UNKNOWN;
+ }
+}
+
+inline ChannelMode_2_0 to_hidl_channel_mode(const ChannelMode channel_mode) {
+ switch (channel_mode) {
+ case ChannelMode::MONO:
+ return ChannelMode_2_0::MONO;
+ case ChannelMode::STEREO:
+ return ChannelMode_2_0::STEREO;
+ default:
+ return ChannelMode_2_0::UNKNOWN;
+ }
+}
+
+inline PcmConfig_2_0 to_hidl_pcm_config_2_0(
+ const PcmConfiguration& pcm_config) {
+ PcmConfig_2_0 hidl_pcm_config;
+ hidl_pcm_config.sampleRate = to_hidl_sample_rate_2_0(pcm_config.sampleRateHz);
+ hidl_pcm_config.channelMode = to_hidl_channel_mode(pcm_config.channelMode);
+ hidl_pcm_config.bitsPerSample =
+ to_hidl_bits_per_sample(pcm_config.bitsPerSample);
+ return hidl_pcm_config;
+}
+
+inline CodecType_2_0 to_hidl_codec_type_2_0(const CodecType codec_type) {
+ auto it = codec_type_to_hidl_2_0_map.find(codec_type);
+ if (it != codec_type_to_hidl_2_0_map.end()) return it->second;
+ return CodecType_2_0::UNKNOWN;
+}
+
+inline SbcConfig_2_0 to_hidl_sbc_config(const SbcConfiguration sbc_config) {
+ SbcConfig_2_0 hidl_sbc_config;
+ hidl_sbc_config.minBitpool = sbc_config.minBitpool;
+ hidl_sbc_config.maxBitpool = sbc_config.maxBitpool;
+ hidl_sbc_config.sampleRate = to_hidl_sample_rate_2_0(sbc_config.sampleRateHz);
+ hidl_sbc_config.bitsPerSample =
+ to_hidl_bits_per_sample(sbc_config.bitsPerSample);
+ if (sbc_channel_mode_to_hidl_2_0_map.find(sbc_config.channelMode) !=
+ sbc_channel_mode_to_hidl_2_0_map.end()) {
+ hidl_sbc_config.channelMode =
+ sbc_channel_mode_to_hidl_2_0_map.at(sbc_config.channelMode);
+ }
+ if (sbc_block_length_to_hidl_map.find(sbc_config.blockLength) !=
+ sbc_block_length_to_hidl_map.end()) {
+ hidl_sbc_config.blockLength =
+ sbc_block_length_to_hidl_map.at(sbc_config.blockLength);
+ }
+ if (sbc_subbands_to_hidl_map.find(sbc_config.numSubbands) !=
+ sbc_subbands_to_hidl_map.end()) {
+ hidl_sbc_config.numSubbands =
+ sbc_subbands_to_hidl_map.at(sbc_config.numSubbands);
+ }
+ if (sbc_alloc_method_to_hidl_map.find(sbc_config.allocMethod) !=
+ sbc_alloc_method_to_hidl_map.end()) {
+ hidl_sbc_config.allocMethod =
+ sbc_alloc_method_to_hidl_map.at(sbc_config.allocMethod);
+ }
+ return hidl_sbc_config;
+}
+
+inline AacConfig_2_0 to_hidl_aac_config(const AacConfiguration aac_config) {
+ AacConfig_2_0 hidl_aac_config;
+ hidl_aac_config.sampleRate = to_hidl_sample_rate_2_0(aac_config.sampleRateHz);
+ hidl_aac_config.bitsPerSample =
+ to_hidl_bits_per_sample(aac_config.bitsPerSample);
+ hidl_aac_config.channelMode = to_hidl_channel_mode(aac_config.channelMode);
+ if (aac_object_type_to_hidl_map.find(aac_config.objectType) !=
+ aac_object_type_to_hidl_map.end()) {
+ hidl_aac_config.objectType =
+ aac_object_type_to_hidl_map.at(aac_config.objectType);
+ }
+ hidl_aac_config.variableBitRateEnabled = aac_config.variableBitRateEnabled
+ ? AacVarBitRate_2_0::ENABLED
+ : AacVarBitRate_2_0::DISABLED;
+ return hidl_aac_config;
+}
+
+inline LdacConfig_2_0 to_hidl_ldac_config(const LdacConfiguration ldac_config) {
+ LdacConfig_2_0 hidl_ldac_config;
+ hidl_ldac_config.sampleRate =
+ to_hidl_sample_rate_2_0(ldac_config.sampleRateHz);
+ hidl_ldac_config.bitsPerSample =
+ to_hidl_bits_per_sample(ldac_config.bitsPerSample);
+ if (ldac_channel_mode_to_hidl_map.find(ldac_config.channelMode) !=
+ ldac_channel_mode_to_hidl_map.end()) {
+ hidl_ldac_config.channelMode =
+ ldac_channel_mode_to_hidl_map.at(ldac_config.channelMode);
+ }
+ if (ldac_qindex_to_hidl_map.find(ldac_config.qualityIndex) !=
+ ldac_qindex_to_hidl_map.end()) {
+ hidl_ldac_config.qualityIndex =
+ ldac_qindex_to_hidl_map.at(ldac_config.qualityIndex);
+ }
+ return hidl_ldac_config;
+}
+
+inline AptxConfig_2_0 to_hidl_aptx_config(const AptxConfiguration aptx_config) {
+ AptxConfig_2_0 hidl_aptx_config;
+ hidl_aptx_config.sampleRate =
+ to_hidl_sample_rate_2_0(aptx_config.sampleRateHz);
+ hidl_aptx_config.bitsPerSample =
+ to_hidl_bits_per_sample(aptx_config.bitsPerSample);
+ hidl_aptx_config.channelMode = to_hidl_channel_mode(aptx_config.channelMode);
+ return hidl_aptx_config;
+}
+
+inline CodecConfig_2_0 to_hidl_codec_config_2_0(
+ const CodecConfiguration& codec_config) {
+ CodecConfig_2_0 hidl_codec_config;
+ hidl_codec_config.codecType = to_hidl_codec_type_2_0(codec_config.codecType);
+ hidl_codec_config.encodedAudioBitrate =
+ static_cast<uint32_t>(codec_config.encodedAudioBitrate);
+ hidl_codec_config.peerMtu = static_cast<uint32_t>(codec_config.peerMtu);
+ hidl_codec_config.isScmstEnabled = codec_config.isScmstEnabled;
+ switch (codec_config.config.getTag()) {
+ case CodecConfiguration::CodecSpecific::sbcConfig:
+ hidl_codec_config.config.sbcConfig(to_hidl_sbc_config(
+ codec_config.config
+ .get<CodecConfiguration::CodecSpecific::sbcConfig>()));
+ break;
+ case CodecConfiguration::CodecSpecific::aacConfig:
+ hidl_codec_config.config.aacConfig(to_hidl_aac_config(
+ codec_config.config
+ .get<CodecConfiguration::CodecSpecific::aacConfig>()));
+ break;
+ case CodecConfiguration::CodecSpecific::ldacConfig:
+ hidl_codec_config.config.ldacConfig(to_hidl_ldac_config(
+ codec_config.config
+ .get<CodecConfiguration::CodecSpecific::ldacConfig>()));
+ break;
+ case CodecConfiguration::CodecSpecific::aptxConfig:
+ hidl_codec_config.config.aptxConfig(to_hidl_aptx_config(
+ codec_config.config
+ .get<CodecConfiguration::CodecSpecific::aptxConfig>()));
+ break;
+ default:
+ break;
+ }
+ return hidl_codec_config;
+}
+
+inline AudioConfig_2_0 to_hidl_audio_config_2_0(
+ const AudioConfiguration& audio_config) {
+ AudioConfig_2_0 hidl_audio_config;
+ if (audio_config.getTag() == AudioConfiguration::pcmConfig) {
+ hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_0(
+ audio_config.get<AudioConfiguration::pcmConfig>()));
+ } else if (audio_config.getTag() == AudioConfiguration::a2dpConfig) {
+ hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+ audio_config.get<AudioConfiguration::a2dpConfig>()));
+ }
+ return hidl_audio_config;
+}
+
+inline PcmConfig_2_1 to_hidl_pcm_config_2_1(
+ const PcmConfiguration& pcm_config) {
+ PcmConfig_2_1 hidl_pcm_config;
+ hidl_pcm_config.sampleRate = to_hidl_sample_rate_2_1(pcm_config.sampleRateHz);
+ hidl_pcm_config.channelMode = to_hidl_channel_mode(pcm_config.channelMode);
+ hidl_pcm_config.bitsPerSample =
+ to_hidl_bits_per_sample(pcm_config.bitsPerSample);
+ hidl_pcm_config.dataIntervalUs =
+ static_cast<uint32_t>(pcm_config.dataIntervalUs);
+ return hidl_pcm_config;
+}
+
+inline Lc3Config_2_1 to_hidl_lc3_config_2_1(
+ const Lc3Configuration& lc3_config) {
+ Lc3Config_2_1 hidl_lc3_config;
+ hidl_lc3_config.pcmBitDepth = to_hidl_bits_per_sample(lc3_config.pcmBitDepth);
+ hidl_lc3_config.samplingFrequency =
+ to_hidl_sample_rate_2_1(lc3_config.samplingFrequencyHz);
+ if (lc3_config.samplingFrequencyHz == 10000)
+ hidl_lc3_config.frameDuration = Lc3FrameDuration_2_1::DURATION_10000US;
+ else if (lc3_config.samplingFrequencyHz == 7500)
+ hidl_lc3_config.frameDuration = Lc3FrameDuration_2_1::DURATION_7500US;
+ hidl_lc3_config.octetsPerFrame =
+ static_cast<uint32_t>(lc3_config.octetsPerFrame);
+ hidl_lc3_config.blocksPerSdu = static_cast<uint32_t>(lc3_config.blocksPerSdu);
+ return hidl_lc3_config;
+}
+
+inline Lc3CodecConfig_2_1 to_hidl_leaudio_config_2_1(
+ const LeAudioConfiguration& leaudio_config) {
+ Lc3CodecConfig_2_1 hidl_lc3_codec_config = {
+ .audioChannelAllocation = 0,
+ };
+ if (leaudio_config.modeConfig.getTag() ==
+ LeAudioConfiguration::LeAudioModeConfig::unicastConfig) {
+ auto& unicast_config =
+ leaudio_config.modeConfig
+ .get<LeAudioConfiguration::LeAudioModeConfig::unicastConfig>();
+ if (unicast_config.leAudioCodecConfig.getTag() ==
+ LeAudioCodecConfiguration::lc3Config) {
+ LOG(FATAL) << __func__ << ": unexpected codec type(vendor?)";
+ }
+ auto& le_codec_config = unicast_config.leAudioCodecConfig
+ .get<LeAudioCodecConfiguration::lc3Config>();
+
+ hidl_lc3_codec_config.lc3Config = to_hidl_lc3_config_2_1(le_codec_config);
+
+ for (const auto& map : unicast_config.streamMap) {
+ hidl_lc3_codec_config.audioChannelAllocation |=
+ map.audioChannelAllocation;
+ }
+ } else {
+ // NOTE: Broadcast is not officially supported in HIDL
+ auto& bcast_config =
+ leaudio_config.modeConfig
+ .get<LeAudioConfiguration::LeAudioModeConfig::broadcastConfig>();
+ if (bcast_config.streamMap.empty()) {
+ return hidl_lc3_codec_config;
+ }
+ if (bcast_config.streamMap[0].leAudioCodecConfig.getTag() !=
+ LeAudioCodecConfiguration::lc3Config) {
+ LOG(FATAL) << __func__ << ": unexpected codec type(vendor?)";
+ }
+ auto& le_codec_config =
+ bcast_config.streamMap[0]
+ .leAudioCodecConfig.get<LeAudioCodecConfiguration::lc3Config>();
+ hidl_lc3_codec_config.lc3Config = to_hidl_lc3_config_2_1(le_codec_config);
+
+ for (const auto& map : bcast_config.streamMap) {
+ hidl_lc3_codec_config.audioChannelAllocation |=
+ map.audioChannelAllocation;
+ }
+ }
+
+ return hidl_lc3_codec_config;
+}
+
+inline LeAudioConfig_2_2 to_hidl_leaudio_config_2_2(
+ const LeAudioConfiguration& leaudio_config) {
+ LeAudioConfig_2_2 hidl_leaudio_config;
+
+ if (leaudio_config.modeConfig.getTag() ==
+ LeAudioConfiguration::LeAudioModeConfig::unicastConfig) {
+ hidl_leaudio_config.mode = LeAudioMode_2_2::UNICAST;
+ auto& unicast_config =
+ leaudio_config.modeConfig
+ .get<LeAudioConfiguration::LeAudioModeConfig::unicastConfig>();
+ ::android::hardware::bluetooth::audio::V2_2::UnicastConfig
+ hidl_unicast_config;
+ hidl_unicast_config.peerDelay =
+ static_cast<uint32_t>(unicast_config.peerDelay);
+
+ auto& lc3_config = unicast_config.leAudioCodecConfig
+ .get<LeAudioCodecConfiguration::lc3Config>();
+ hidl_unicast_config.lc3Config = to_hidl_lc3_config_2_1(lc3_config);
+
+ hidl_unicast_config.streamMap.resize(unicast_config.streamMap.size());
+ for (int i = 0; i < unicast_config.streamMap.size(); i++) {
+ hidl_unicast_config.streamMap[i].audioChannelAllocation =
+ static_cast<uint32_t>(
+ unicast_config.streamMap[i].audioChannelAllocation);
+ hidl_unicast_config.streamMap[i].streamHandle =
+ static_cast<uint16_t>(unicast_config.streamMap[i].streamHandle);
+ }
+ } else if (leaudio_config.modeConfig.getTag() ==
+ LeAudioConfiguration::LeAudioModeConfig::broadcastConfig) {
+ hidl_leaudio_config.mode = LeAudioMode_2_2::BROADCAST;
+ auto bcast_config =
+ leaudio_config.modeConfig
+ .get<LeAudioConfiguration::LeAudioModeConfig::broadcastConfig>();
+ ::android::hardware::bluetooth::audio::V2_2::BroadcastConfig
+ hidl_bcast_config;
+ hidl_bcast_config.streamMap.resize(bcast_config.streamMap.size());
+ for (int i = 0; i < bcast_config.streamMap.size(); i++) {
+ hidl_bcast_config.streamMap[i].audioChannelAllocation =
+ static_cast<uint32_t>(
+ bcast_config.streamMap[i].audioChannelAllocation);
+ hidl_bcast_config.streamMap[i].streamHandle =
+ static_cast<uint16_t>(bcast_config.streamMap[i].streamHandle);
+ hidl_bcast_config.streamMap[i].lc3Config = to_hidl_lc3_config_2_1(
+ bcast_config.streamMap[i]
+ .leAudioCodecConfig.get<LeAudioCodecConfiguration::lc3Config>());
+ }
+ }
+ return hidl_leaudio_config;
+}
+
+inline AudioConfig_2_1 to_hidl_audio_config_2_1(
+ const AudioConfiguration& audio_config) {
+ AudioConfig_2_1 hidl_audio_config;
+ switch (audio_config.getTag()) {
+ case AudioConfiguration::pcmConfig:
+ hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_1(
+ audio_config.get<AudioConfiguration::pcmConfig>()));
+ break;
+ case AudioConfiguration::a2dpConfig:
+ hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+ audio_config.get<AudioConfiguration::a2dpConfig>()));
+ break;
+ case AudioConfiguration::leAudioConfig:
+ hidl_audio_config.leAudioCodecConfig(to_hidl_leaudio_config_2_1(
+ audio_config.get<AudioConfiguration::leAudioConfig>()));
+ break;
+ }
+ return hidl_audio_config;
+}
+
+inline AudioConfig_2_2 to_hidl_audio_config_2_2(
+ const AudioConfiguration& audio_config) {
+ AudioConfig_2_2 hidl_audio_config;
+ switch (audio_config.getTag()) {
+ case AudioConfiguration::pcmConfig:
+ hidl_audio_config.pcmConfig(to_hidl_pcm_config_2_1(
+ audio_config.get<AudioConfiguration::pcmConfig>()));
+ break;
+ case AudioConfiguration::a2dpConfig:
+ hidl_audio_config.codecConfig(to_hidl_codec_config_2_0(
+ audio_config.get<AudioConfiguration::a2dpConfig>()));
+ break;
+ case AudioConfiguration::leAudioConfig:
+ hidl_audio_config.leAudioConfig(to_hidl_leaudio_config_2_2(
+ audio_config.get<AudioConfiguration::leAudioConfig>()));
+ break;
+ }
+ return hidl_audio_config;
+}
+
+/***
+ *
+ * 2.0
+ *
+ ***/
+
+bool HidlToAidlMiddleware_2_0::IsSessionReady(
+ const SessionType_2_0& session_type) {
+ return BluetoothAudioSessionControl::IsSessionReady(
+ from_session_type_2_0(session_type));
+}
+
+uint16_t HidlToAidlMiddleware_2_0::RegisterControlResultCback(
+ const SessionType_2_0& session_type,
+ const PortStatusCallbacks_2_0& cbacks) {
+ PortStatusCallbacks_2_2 callback_2_2{
+ .control_result_cb_ = cbacks.control_result_cb_,
+ .session_changed_cb_ = cbacks.session_changed_cb_,
+ };
+ return HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+ static_cast<SessionType_2_1>(session_type), callback_2_2);
+}
+
+void HidlToAidlMiddleware_2_0::UnregisterControlResultCback(
+ const SessionType_2_0& session_type, uint16_t cookie) {
+ HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+ static_cast<SessionType_2_1>(session_type), cookie);
+}
+
+const AudioConfig_2_0 HidlToAidlMiddleware_2_0::GetAudioConfig(
+ const SessionType_2_0& session_type) {
+ return to_hidl_audio_config_2_0(BluetoothAudioSessionControl::GetAudioConfig(
+ from_session_type_2_0(session_type)));
+}
+
+bool HidlToAidlMiddleware_2_0::StartStream(
+ const SessionType_2_0& session_type) {
+ return BluetoothAudioSessionControl::StartStream(
+ from_session_type_2_0(session_type));
+}
+
+void HidlToAidlMiddleware_2_0::StopStream(const SessionType_2_0& session_type) {
+ return BluetoothAudioSessionControl::StopStream(
+ from_session_type_2_0(session_type));
+}
+
+bool HidlToAidlMiddleware_2_0::SuspendStream(
+ const SessionType_2_0& session_type) {
+ return BluetoothAudioSessionControl::SuspendStream(
+ from_session_type_2_0(session_type));
+}
+
+bool HidlToAidlMiddleware_2_0::GetPresentationPosition(
+ const SessionType_2_0& session_type, uint64_t* remote_delay_report_ns,
+ uint64_t* total_bytes_readed, timespec* data_position) {
+ PresentationPosition presentation_position;
+ auto ret_val = BluetoothAudioSessionControl::GetPresentationPosition(
+ from_session_type_2_0(session_type), presentation_position);
+ if (remote_delay_report_ns)
+ *remote_delay_report_ns = presentation_position.remoteDeviceAudioDelayNanos;
+ if (total_bytes_readed)
+ *total_bytes_readed = presentation_position.transmittedOctets;
+ if (data_position)
+ *data_position = {
+ .tv_sec = static_cast<__kernel_old_time_t>(
+ presentation_position.transmittedOctetsTimestamp.tvSec),
+ .tv_nsec = static_cast<long>(
+ presentation_position.transmittedOctetsTimestamp.tvNSec)};
+ return ret_val;
+}
+
+void HidlToAidlMiddleware_2_0::UpdateTracksMetadata(
+ const SessionType_2_0& session_type,
+ const struct source_metadata* source_metadata) {
+ return BluetoothAudioSessionControl::UpdateSourceMetadata(
+ from_session_type_2_0(session_type), *source_metadata);
+}
+
+size_t HidlToAidlMiddleware_2_0::OutWritePcmData(
+ const SessionType_2_0& session_type, const void* buffer, size_t bytes) {
+ return BluetoothAudioSessionControl::OutWritePcmData(
+ from_session_type_2_0(session_type), buffer, bytes);
+}
+
+size_t HidlToAidlMiddleware_2_0::InReadPcmData(
+ const SessionType_2_0& session_type, void* buffer, size_t bytes) {
+ return BluetoothAudioSessionControl::InReadPcmData(
+ from_session_type_2_0(session_type), buffer, bytes);
+}
+
+bool HidlToAidlMiddleware_2_0::IsAidlAvailable() {
+ return BluetoothAudioSession::IsAidlAvailable();
+}
+
+/***
+ *
+ * 2.1
+ *
+ ***/
+
+const AudioConfig_2_1 HidlToAidlMiddleware_2_1::GetAudioConfig(
+ const SessionType_2_1& session_type) {
+ return to_hidl_audio_config_2_1(BluetoothAudioSessionControl::GetAudioConfig(
+ from_session_type_2_1(session_type)));
+}
+
+/***
+ *
+ * 2.2
+ *
+ ***/
+
+bool HidlToAidlMiddleware_2_2::IsSessionReady(
+ const SessionType_2_1& session_type) {
+ return BluetoothAudioSessionControl::IsSessionReady(
+ from_session_type_2_1(session_type));
+}
+
+uint16_t HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+ const SessionType_2_1& session_type,
+ const PortStatusCallbacks_2_2& cbacks) {
+ LOG(INFO) << __func__ << ": " << toString(session_type);
+ auto aidl_session_type = from_session_type_2_1(session_type);
+ // Pass the exact reference to the lambda
+ auto& session_legacy_callback_table =
+ legacy_callback_table[aidl_session_type];
+ PortStatusCallbacks aidl_callbacks{};
+ if (cbacks.control_result_cb_) {
+ aidl_callbacks.control_result_cb_ =
+ [&session_legacy_callback_table](uint16_t cookie, bool start_resp,
+ const BluetoothAudioStatus& status) {
+ if (session_legacy_callback_table.find(cookie) ==
+ session_legacy_callback_table.end()) {
+ LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+ return;
+ }
+ auto& cback = session_legacy_callback_table[cookie];
+ cback->control_result_cb_(cookie, start_resp, to_hidl_status(status));
+ };
+ }
+ if (cbacks.session_changed_cb_) {
+ aidl_callbacks.session_changed_cb_ =
+ [&session_legacy_callback_table](uint16_t cookie) {
+ if (session_legacy_callback_table.find(cookie) ==
+ session_legacy_callback_table.end()) {
+ LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+ return;
+ }
+ auto& cback = session_legacy_callback_table[cookie];
+ cback->session_changed_cb_(cookie);
+ };
+ };
+ if (cbacks.audio_configuration_changed_cb_) {
+ aidl_callbacks.audio_configuration_changed_cb_ =
+ [&session_legacy_callback_table](uint16_t cookie) {
+ if (session_legacy_callback_table.find(cookie) ==
+ session_legacy_callback_table.end()) {
+ LOG(ERROR) << __func__ << ": Unknown callback invoked!";
+ return;
+ }
+ auto& cback = session_legacy_callback_table[cookie];
+ cback->audio_configuration_changed_cb_(cookie);
+ };
+ };
+ auto cookie = BluetoothAudioSessionControl::RegisterControlResultCback(
+ aidl_session_type, aidl_callbacks);
+ {
+ std::lock_guard<std::mutex> guard(legacy_callback_lock);
+ session_legacy_callback_table[cookie] =
+ std::make_shared<PortStatusCallbacks_2_2>(cbacks);
+ }
+ return cookie;
+}
+
+void HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+ const SessionType_2_1& session_type, uint16_t cookie) {
+ LOG(INFO) << __func__ << ": " << toString(session_type);
+ auto aidl_session_type = from_session_type_2_1(session_type);
+ BluetoothAudioSessionControl::UnregisterControlResultCback(aidl_session_type,
+ cookie);
+ auto& session_callback_table = legacy_callback_table[aidl_session_type];
+ if (session_callback_table.find(cookie) != session_callback_table.end()) {
+ std::lock_guard<std::mutex> guard(legacy_callback_lock);
+ session_callback_table.erase(cookie);
+ }
+}
+
+const AudioConfig_2_2 HidlToAidlMiddleware_2_2::GetAudioConfig(
+ const SessionType_2_1& session_type) {
+ return to_hidl_audio_config_2_2(BluetoothAudioSessionControl::GetAudioConfig(
+ from_session_type_2_1(session_type)));
+}
+
+bool HidlToAidlMiddleware_2_2::StartStream(
+ const SessionType_2_1& session_type) {
+ return BluetoothAudioSessionControl::StartStream(
+ from_session_type_2_1(session_type));
+}
+
+bool HidlToAidlMiddleware_2_2::SuspendStream(
+ const SessionType_2_1& session_type) {
+ return BluetoothAudioSessionControl::SuspendStream(
+ from_session_type_2_1(session_type));
+}
+
+void HidlToAidlMiddleware_2_2::StopStream(const SessionType_2_1& session_type) {
+ return BluetoothAudioSessionControl::StopStream(
+ from_session_type_2_1(session_type));
+}
+
+void HidlToAidlMiddleware_2_2::UpdateTracksMetadata(
+ const SessionType_2_1& session_type,
+ const struct source_metadata* source_metadata) {
+ return BluetoothAudioSessionControl::UpdateSourceMetadata(
+ from_session_type_2_1(session_type), *source_metadata);
+}
+
+void HidlToAidlMiddleware_2_2::UpdateSinkMetadata(
+ const SessionType_2_1& session_type,
+ const struct sink_metadata* sink_metadata) {
+ return BluetoothAudioSessionControl::UpdateSinkMetadata(
+ from_session_type_2_1(session_type), *sink_metadata);
+}
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h
new file mode 100644
index 0000000..b124d8f
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_0.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.0/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::SessionType;
+using PortStatusCallbacks_2_0 =
+ ::android::bluetooth::audio::PortStatusCallbacks;
+using AudioConfig_2_0 =
+ ::android::hardware::bluetooth::audio::V2_0::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_0 {
+ public:
+ static bool IsAidlAvailable();
+
+ static bool IsSessionReady(const SessionType_2_0& session_type);
+
+ static uint16_t RegisterControlResultCback(
+ const SessionType_2_0& session_type,
+ const PortStatusCallbacks_2_0& cbacks);
+
+ static void UnregisterControlResultCback(const SessionType_2_0& session_type,
+ uint16_t cookie);
+
+ static const AudioConfig_2_0 GetAudioConfig(
+ const SessionType_2_0& session_type);
+
+ static bool StartStream(const SessionType_2_0& session_type);
+
+ static void StopStream(const SessionType_2_0& session_type);
+
+ static bool SuspendStream(const SessionType_2_0& session_type);
+
+ static bool GetPresentationPosition(const SessionType_2_0& session_type,
+ uint64_t* remote_delay_report_ns,
+ uint64_t* total_bytes_readed,
+ timespec* data_position);
+
+ static void UpdateTracksMetadata(
+ const SessionType_2_0& session_type,
+ const struct source_metadata* source_metadata);
+
+ static size_t OutWritePcmData(const SessionType_2_0& session_type,
+ const void* buffer, size_t bytes);
+
+ static size_t InReadPcmData(const SessionType_2_0& session_type, void* buffer,
+ size_t bytes);
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h
new file mode 100644
index 0000000..82dce96
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_1.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.1/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::SessionType;
+using AudioConfig_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_1 {
+ public:
+ static const AudioConfig_2_1 GetAudioConfig(
+ const SessionType_2_1& session_type);
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h
new file mode 100644
index 0000000..f6c3e5c
--- /dev/null
+++ b/bluetooth/audio/utils/aidl_session/HidlToAidlMiddleware_2_2.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android/hardware/bluetooth/audio/2.2/types.h>
+
+#include "../session/BluetoothAudioSession.h"
+#include "../session/BluetoothAudioSession_2_2.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace bluetooth {
+namespace audio {
+
+using SessionType_2_1 =
+ ::android::hardware::bluetooth::audio::V2_1::SessionType;
+using PortStatusCallbacks_2_0 =
+ ::android::bluetooth::audio::PortStatusCallbacks;
+using PortStatusCallbacks_2_2 =
+ ::android::bluetooth::audio::PortStatusCallbacks_2_2;
+using AudioConfig_2_2 =
+ ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration;
+
+class HidlToAidlMiddleware_2_2 {
+ public:
+ static bool IsSessionReady(const SessionType_2_1& session_type);
+
+ static uint16_t RegisterControlResultCback(
+ const SessionType_2_1& session_type,
+ const PortStatusCallbacks_2_2& cbacks);
+
+ static void UnregisterControlResultCback(const SessionType_2_1& session_type,
+ uint16_t cookie);
+
+ static const AudioConfig_2_2 GetAudioConfig(
+ const SessionType_2_1& session_type);
+
+ static bool StartStream(const SessionType_2_1& session_type);
+
+ static bool SuspendStream(const SessionType_2_1& session_type);
+
+ static void StopStream(const SessionType_2_1& session_type);
+
+ static void UpdateTracksMetadata(
+ const SessionType_2_1& session_type,
+ const struct source_metadata* source_metadata);
+
+ static void UpdateSinkMetadata(const SessionType_2_1& session_type,
+ const struct sink_metadata* sink_metadata);
+};
+
+} // namespace audio
+} // namespace bluetooth
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
index 2f3ddaf..283952e 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession.cpp
@@ -21,10 +21,13 @@
#include <android-base/logging.h>
#include <android-base/stringprintf.h>
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+
namespace android {
namespace bluetooth {
namespace audio {
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
using ::android::hardware::audio::common::V5_0::AudioContentType;
using ::android::hardware::audio::common::V5_0::AudioUsage;
using ::android::hardware::audio::common::V5_0::PlaybackTrackMetadata;
@@ -149,6 +152,8 @@
// The function helps to check if this session is ready or not
// @return: true if the Bluetooth stack has started the specified session
bool BluetoothAudioSession::IsSessionReady() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::IsSessionReady(session_type_);
std::lock_guard<std::recursive_mutex> guard(mutex_);
bool dataMQ_valid =
(session_type_ == SessionType::A2DP_HARDWARE_OFFLOAD_DATAPATH ||
@@ -200,6 +205,9 @@
// @return: cookie - the assigned number to this bluetooth_audio output
uint16_t BluetoothAudioSession::RegisterStatusCback(
const PortStatusCallbacks& cbacks) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::RegisterControlResultCback(session_type_,
+ cbacks);
std::lock_guard<std::recursive_mutex> guard(mutex_);
uint16_t cookie = ObserversCookieGetInitValue(session_type_);
uint16_t cookie_upper_bound = ObserversCookieGetUpperBound(session_type_);
@@ -227,6 +235,9 @@
// PortStatusCallbacks
// @param: cookie - indicates which bluetooth_audio output is
void BluetoothAudioSession::UnregisterStatusCback(uint16_t cookie) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::UnregisterControlResultCback(session_type_,
+ cookie);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (observers_.erase(cookie) != 1) {
LOG(WARNING) << __func__ << " - SessionType=" << toString(session_type_)
@@ -238,6 +249,9 @@
// The control function is for the bluetooth_audio module to get the current
// AudioConfiguration
const AudioConfiguration& BluetoothAudioSession::GetAudioConfig() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return (audio_config_ =
+ HidlToAidlMiddleware_2_0::GetAudioConfig(session_type_));
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (IsSessionReady()) {
return audio_config_;
@@ -251,6 +265,8 @@
// Those control functions are for the bluetooth_audio module to start, suspend,
// stop stream, to check position, and to update metadata.
bool BluetoothAudioSession::StartStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::StartStream(session_type_);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -267,6 +283,8 @@
}
bool BluetoothAudioSession::SuspendStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::SuspendStream(session_type_);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -283,6 +301,8 @@
}
void BluetoothAudioSession::StopStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::StopStream(session_type_);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (!IsSessionReady()) {
return;
@@ -297,6 +317,10 @@
bool BluetoothAudioSession::GetPresentationPosition(
uint64_t* remote_delay_report_ns, uint64_t* total_bytes_readed,
timespec* data_position) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::GetPresentationPosition(
+ session_type_, remote_delay_report_ns, total_bytes_readed,
+ data_position);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -330,6 +354,9 @@
void BluetoothAudioSession::UpdateTracksMetadata(
const struct source_metadata* source_metadata) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::UpdateTracksMetadata(session_type_,
+ source_metadata);
std::lock_guard<std::recursive_mutex> guard(mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_)
@@ -374,6 +401,9 @@
// The control function writes stream to FMQ
size_t BluetoothAudioSession::OutWritePcmData(const void* buffer,
size_t bytes) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::OutWritePcmData(session_type_, buffer,
+ bytes);
if (buffer == nullptr || !bytes) return 0;
size_t totalWritten = 0;
int ms_timeout = kFmqSendTimeoutMs;
@@ -407,6 +437,9 @@
// The control function reads stream from FMQ
size_t BluetoothAudioSession::InReadPcmData(void* buffer, size_t bytes) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_0::InReadPcmData(session_type_, buffer,
+ bytes);
if (buffer == nullptr || !bytes) return 0;
size_t totalRead = 0;
int ms_timeout = kFmqReceiveTimeoutMs;
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSessionControl_2_2.h b/bluetooth/audio/utils/session/BluetoothAudioSessionControl_2_2.h
index 368939e..c270ef0 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSessionControl_2_2.h
+++ b/bluetooth/audio/utils/session/BluetoothAudioSessionControl_2_2.h
@@ -152,7 +152,7 @@
std::shared_ptr<BluetoothAudioSession_2_2> session_ptr =
BluetoothAudioSessionInstance_2_2::GetSessionInstance(session_type);
if (session_ptr != nullptr) {
- session_ptr->GetAudioSession()->UpdateTracksMetadata(source_metadata);
+ session_ptr->UpdateTracksMetadata(source_metadata);
}
}
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
index bf1f9b5..276a291 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.cpp
@@ -21,9 +21,14 @@
#include <android-base/logging.h>
#include <android-base/stringprintf.h>
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+#include "../aidl_session/HidlToAidlMiddleware_2_1.h"
+
namespace android {
namespace bluetooth {
namespace audio {
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_1;
using SessionType_2_1 =
::android::hardware::bluetooth::audio::V2_1::SessionType;
using SessionType_2_0 =
@@ -72,6 +77,7 @@
} else {
session_type_2_1_ = (session_type);
}
+ raw_session_type_ = session_type;
}
std::shared_ptr<BluetoothAudioSession>
@@ -83,6 +89,8 @@
// AudioConfiguration
const ::android::hardware::bluetooth::audio::V2_1::AudioConfiguration
BluetoothAudioSession_2_1::GetAudioConfig() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_1::GetAudioConfig(raw_session_type_);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (audio_session->IsSessionReady()) {
// If session is unknown it means it should be 2.0 type
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
index 5a35153..e634064 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_1.h
@@ -31,6 +31,7 @@
std::shared_ptr<BluetoothAudioSession> audio_session;
::android::hardware::bluetooth::audio::V2_1::SessionType session_type_2_1_;
+ ::android::hardware::bluetooth::audio::V2_1::SessionType raw_session_type_;
// audio data configuration for both software and offloading
::android::hardware::bluetooth::audio::V2_1::AudioConfiguration
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
index 60ac4ec..ceb0662 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.cpp
@@ -22,13 +22,22 @@
#include <android-base/stringprintf.h>
#include <android/hardware/bluetooth/audio/2.2/IBluetoothAudioPort.h>
+#include "../aidl_session/HidlToAidlMiddleware_2_0.h"
+#include "../aidl_session/HidlToAidlMiddleware_2_2.h"
+
namespace android {
namespace bluetooth {
namespace audio {
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_0;
+using ::aidl::android::hardware::bluetooth::audio::HidlToAidlMiddleware_2_2;
+using ::android::hardware::audio::common::V5_0::AudioContentType;
using ::android::hardware::audio::common::V5_0::AudioSource;
+using ::android::hardware::audio::common::V5_0::AudioUsage;
+using ::android::hardware::audio::common::V5_0::PlaybackTrackMetadata;
using ::android::hardware::audio::common::V5_0::RecordTrackMetadata;
using ::android::hardware::audio::common::V5_0::SinkMetadata;
+using ::android::hardware::audio::common::V5_0::SourceMetadata;
using ::android::hardware::bluetooth::audio::V2_0::BitsPerSample;
using ::android::hardware::bluetooth::audio::V2_0::ChannelMode;
using ::android::hardware::bluetooth::audio::V2_2::LeAudioConfiguration;
@@ -93,6 +102,7 @@
} else {
session_type_2_1_ = (session_type);
}
+ raw_session_type_ = session_type;
invalidSoftwareAudioConfiguration.pcmConfig(kInvalidPcmParameters);
invalidOffloadAudioConfiguration.codecConfig(
audio_session->kInvalidCodecConfiguration);
@@ -100,6 +110,8 @@
}
bool BluetoothAudioSession_2_2::IsSessionReady() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::IsSessionReady(raw_session_type_);
if (session_type_2_1_ !=
SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
session_type_2_1_ !=
@@ -120,8 +132,58 @@
return audio_session_2_1;
}
+void BluetoothAudioSession_2_2::UpdateTracksMetadata(
+ const struct source_metadata* source_metadata) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::UpdateTracksMetadata(raw_session_type_,
+ source_metadata);
+ std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
+ if (!IsSessionReady()) {
+ LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
+ << " has NO session";
+ return;
+ }
+
+ ssize_t track_count = source_metadata->track_count;
+ LOG(INFO) << __func__ << " - SessionType=" << toString(session_type_2_1_)
+ << ", " << track_count << " track(s)";
+
+ if (session_type_2_1_ == SessionType_2_1::UNKNOWN) {
+ audio_session->UpdateTracksMetadata(source_metadata);
+ return;
+ }
+
+ struct playback_track_metadata* track = source_metadata->tracks;
+ SourceMetadata sourceMetadata;
+ PlaybackTrackMetadata* halMetadata;
+
+ sourceMetadata.tracks.resize(track_count);
+ halMetadata = sourceMetadata.tracks.data();
+ while (track_count && track) {
+ halMetadata->usage = static_cast<AudioUsage>(track->usage);
+ halMetadata->contentType =
+ static_cast<AudioContentType>(track->content_type);
+ halMetadata->gain = track->gain;
+ LOG(VERBOSE) << __func__ << " - SessionType=" << toString(session_type_2_1_)
+ << ", usage=" << toString(halMetadata->usage)
+ << ", content=" << toString(halMetadata->contentType)
+ << ", gain=" << halMetadata->gain;
+ --track_count;
+ ++track;
+ ++halMetadata;
+ }
+ auto hal_retval = audio_session->stack_iface_->updateMetadata(sourceMetadata);
+ if (!hal_retval.isOk()) {
+ LOG(WARNING) << __func__ << " - IBluetoothAudioPort SessionType="
+ << toString(session_type_2_1_) << " failed";
+ }
+}
+
void BluetoothAudioSession_2_2::UpdateSinkMetadata(
const struct sink_metadata* sink_metadata) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::UpdateSinkMetadata(raw_session_type_,
+ sink_metadata);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -172,6 +234,8 @@
// AudioConfiguration
const ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration
BluetoothAudioSession_2_2::GetAudioConfig() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::GetAudioConfig(raw_session_type_);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (IsSessionReady()) {
auto audio_config_discriminator = audio_config_2_2_.getDiscriminator();
@@ -226,6 +290,8 @@
// Those control functions are for the bluetooth_audio module to start, suspend,
// stop stream, to check position, and to update metadata.
bool BluetoothAudioSession_2_2::StartStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::StartStream(raw_session_type_);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -242,6 +308,8 @@
}
bool BluetoothAudioSession_2_2::SuspendStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::SuspendStream(raw_session_type_);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (!IsSessionReady()) {
LOG(DEBUG) << __func__ << " - SessionType=" << toString(session_type_2_1_)
@@ -258,6 +326,8 @@
}
void BluetoothAudioSession_2_2::StopStream() {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::StopStream(raw_session_type_);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (!IsSessionReady()) {
return;
@@ -395,6 +465,9 @@
// @return: cookie - the assigned number to this bluetooth_audio output
uint16_t BluetoothAudioSession_2_2::RegisterStatusCback(
const PortStatusCallbacks_2_2& cbacks) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::RegisterControlResultCback(
+ raw_session_type_, cbacks);
if (session_type_2_1_ !=
SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
session_type_2_1_ !=
@@ -432,6 +505,9 @@
// PortStatusCallbacks_2_2
// @param: cookie - indicates which bluetooth_audio output is
void BluetoothAudioSession_2_2::UnregisterStatusCback(uint16_t cookie) {
+ if (HidlToAidlMiddleware_2_0::IsAidlAvailable())
+ return HidlToAidlMiddleware_2_2::UnregisterControlResultCback(
+ raw_session_type_, cookie);
std::lock_guard<std::recursive_mutex> guard(audio_session->mutex_);
if (session_type_2_1_ !=
SessionType_2_1::LE_AUDIO_HARDWARE_OFFLOAD_ENCODING_DATAPATH &&
diff --git a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
index 3673fd8..e04ad80 100644
--- a/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
+++ b/bluetooth/audio/utils/session/BluetoothAudioSession_2_2.h
@@ -68,6 +68,7 @@
std::shared_ptr<BluetoothAudioSession_2_1> audio_session_2_1;
::android::hardware::bluetooth::audio::V2_1::SessionType session_type_2_1_;
+ ::android::hardware::bluetooth::audio::V2_1::SessionType raw_session_type_;
// audio data configuration for both software and offloading
::android::hardware::bluetooth::audio::V2_2::AudioConfiguration
@@ -151,6 +152,7 @@
const ::android::hardware::bluetooth::audio::V2_2::AudioConfiguration
GetAudioConfig();
+ void UpdateTracksMetadata(const struct source_metadata* source_metadata);
void UpdateSinkMetadata(const struct sink_metadata* sink_metadata);
static constexpr ::android::hardware::bluetooth::audio::V2_2::
diff --git a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
index c89d983..37d2973 100644
--- a/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
+++ b/camera/provider/2.4/vts/functional/VtsHalCameraProviderV2_4TargetTest.cpp
@@ -935,7 +935,6 @@
camera_metadata_ro_entry* streamConfigs,
camera_metadata_ro_entry* maxResolutionStreamConfigs,
const camera_metadata_t* staticMetadata);
- static bool isColorCamera(const camera_metadata_t *metadata);
static V3_2::DataspaceFlags getDataspace(PixelFormat format);
@@ -6187,141 +6186,6 @@
}
}
-// Test the multi-camera API requirement for Google Requirement Freeze S
-// Note that this requirement can only be partially tested. If a vendor
-// device doesn't expose a physical camera in any shape or form, there is no way
-// the test can catch it.
-TEST_P(CameraHidlTest, grfSMultiCameraTest) {
- const int socGrfApi = property_get_int32("ro.board.first_api_level", /*default*/ -1);
- if (socGrfApi < 31 /*S*/) {
- // Non-GRF devices, or version < 31 Skip
- ALOGI("%s: socGrfApi level is %d. Skipping", __FUNCTION__, socGrfApi);
- return;
- }
-
- // Test that if more than one rear-facing color camera is
- // supported, there must be at least one rear-facing logical camera.
- hidl_vec<hidl_string> cameraDeviceNames = getCameraDeviceNames(mProvider);
- // Back facing non-logical color cameras
- std::set<std::string> rearColorCameras;
- // Back facing logical cameras' physical camera Id sets
- std::set<std::set<std::string>> rearPhysicalIds;
- for (const auto& name : cameraDeviceNames) {
- std::string cameraId;
- int deviceVersion = getCameraDeviceVersionAndId(name, mProviderType, &cameraId);
- switch (deviceVersion) {
- case CAMERA_DEVICE_API_VERSION_3_7:
- case CAMERA_DEVICE_API_VERSION_3_6:
- case CAMERA_DEVICE_API_VERSION_3_5:
- case CAMERA_DEVICE_API_VERSION_3_4:
- case CAMERA_DEVICE_API_VERSION_3_3:
- case CAMERA_DEVICE_API_VERSION_3_2: {
- ::android::sp<::android::hardware::camera::device::V3_2::ICameraDevice> device3_x;
- ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str());
- Return<void> ret;
- ret = mProvider->getCameraDeviceInterface_V3_x(
- name, [&](auto status, const auto& device) {
- ALOGI("getCameraDeviceInterface_V3_x returns status:%d", (int)status);
- ASSERT_EQ(Status::OK, status);
- ASSERT_NE(device, nullptr);
- device3_x = device;
- });
- ASSERT_TRUE(ret.isOk());
-
- ret = device3_x->getCameraCharacteristics([&](auto status, const auto& chars) {
- ASSERT_EQ(Status::OK, status);
- const camera_metadata_t* metadata = (camera_metadata_t*)chars.data();
-
- // Skip if this is not a color camera.
- if (!CameraHidlTest::isColorCamera(metadata)) {
- return;
- }
-
- // Check camera facing. Skip if facing is not BACK.
- // If this is not a logical camera, only note down
- // the camera ID, and skip.
- camera_metadata_ro_entry entry;
- int retcode = find_camera_metadata_ro_entry(
- metadata, ANDROID_LENS_FACING, &entry);
- ASSERT_EQ(retcode, 0);
- ASSERT_GT(entry.count, 0);
- uint8_t facing = entry.data.u8[0];
- bool isLogicalCamera = (isLogicalMultiCamera(metadata) == Status::OK);
- if (facing != ANDROID_LENS_FACING_BACK) {
- // Not BACK facing. Skip.
- return;
- }
- if (!isLogicalCamera) {
- rearColorCameras.insert(cameraId);
- return;
- }
-
- // Check logical camera's physical camera IDs for color
- // cameras.
- std::unordered_set<std::string> physicalCameraIds;
- Status s = getPhysicalCameraIds(metadata, &physicalCameraIds);
- ASSERT_EQ(Status::OK, s);
- rearPhysicalIds.emplace(physicalCameraIds.begin(), physicalCameraIds.end());
- for (const auto& physicalId : physicalCameraIds) {
- // Skip if the physicalId is publicly available
- for (auto& deviceName : cameraDeviceNames) {
- std::string publicVersion, publicId;
- ASSERT_TRUE(::matchDeviceName(deviceName, mProviderType,
- &publicVersion, &publicId));
- if (physicalId == publicId) {
- // Skip because public Ids will be iterated in outer loop.
- return;
- }
- }
-
- auto castResult = device::V3_5::ICameraDevice::castFrom(device3_x);
- ASSERT_TRUE(castResult.isOk());
- ::android::sp<::android::hardware::camera::device::V3_5::ICameraDevice>
- device3_5 = castResult;
- ASSERT_NE(device3_5, nullptr);
-
- // Check camera characteristics for hidden camera id
- Return<void> ret = device3_5->getPhysicalCameraCharacteristics(
- physicalId, [&](auto status, const auto& chars) {
- ASSERT_EQ(Status::OK, status);
- const camera_metadata_t* physicalMetadata =
- (camera_metadata_t*)chars.data();
-
- if (CameraHidlTest::isColorCamera(physicalMetadata)) {
- rearColorCameras.insert(physicalId);
- }
- });
- ASSERT_TRUE(ret.isOk());
- }
- });
- ASSERT_TRUE(ret.isOk());
- } break;
- case CAMERA_DEVICE_API_VERSION_1_0: {
- // Not applicable
- } break;
- default: {
- ALOGE("%s: Unsupported device version %d", __func__, deviceVersion);
- ADD_FAILURE();
- } break;
- }
- }
-
- // If there are more than one rear-facing color camera, a logical
- // multi-camera must be defined consisting of all rear-facing color
- // cameras.
- if (rearColorCameras.size() > 1) {
- bool hasRearLogical = false;
- for (const auto& physicalIds : rearPhysicalIds) {
- if (std::includes(physicalIds.begin(), physicalIds.end(),
- rearColorCameras.begin(), rearColorCameras.end())) {
- hasRearLogical = true;
- break;
- }
- }
- ASSERT_TRUE(hasRearLogical);
- }
-}
-
// Retrieve all valid output stream resolutions from the camera
// static characteristics.
Status CameraHidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta,
@@ -6794,23 +6658,6 @@
return ret;
}
-bool CameraHidlTest::isColorCamera(const camera_metadata_t *metadata) {
- camera_metadata_ro_entry entry;
- int retcode = find_camera_metadata_ro_entry(
- metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry);
- if ((0 == retcode) && (entry.count > 0)) {
- bool isBackwardCompatible = (std::find(entry.data.u8, entry.data.u8 + entry.count,
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) !=
- entry.data.u8 + entry.count);
- bool isMonochrome = (std::find(entry.data.u8, entry.data.u8 + entry.count,
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME) !=
- entry.data.u8 + entry.count);
- bool isColor = isBackwardCompatible && !isMonochrome;
- return isColor;
- }
- return false;
-}
-
// Retrieve the reprocess input-output format map from the static
// camera characteristics.
Status CameraHidlTest::getZSLInputOutputMap(camera_metadata_t *staticMeta,
diff --git a/compatibility_matrices/compatibility_matrix.current.xml b/compatibility_matrices/compatibility_matrix.current.xml
index db99423..f751efc 100644
--- a/compatibility_matrices/compatibility_matrix.current.xml
+++ b/compatibility_matrices/compatibility_matrix.current.xml
@@ -425,6 +425,13 @@
<instance>default</instance>
</interface>
</hal>
+ <hal format="aidl" optional="true">
+ <name>android.hardware.nfc</name>
+ <interface>
+ <name>INfc</name>
+ <instance>default</instance>
+ </interface>
+ </hal>
<hal format="hidl" optional="true">
<name>android.hardware.oemlock</name>
<version>1.0</version>
@@ -628,7 +635,7 @@
<instance>default</instance>
</interface>
</hal>
- <hal format="hidl" optional="true">
+ <hal format="hidl" optional="false">
<name>android.hardware.thermal</name>
<version>2.0</version>
<interface>
diff --git a/graphics/common/OWNERS b/graphics/common/OWNERS
new file mode 100644
index 0000000..94999ea
--- /dev/null
+++ b/graphics/common/OWNERS
@@ -0,0 +1,5 @@
+# Bug component: 1075130
+adyabr@google.com
+alecmouri@google.com
+jreck@google.com
+scroggo@google.com
diff --git a/graphics/composer/2.4/vts/functional/VtsHalGraphicsComposerV2_4TargetTest.cpp b/graphics/composer/2.4/vts/functional/VtsHalGraphicsComposerV2_4TargetTest.cpp
index b071f71..fa294ff 100644
--- a/graphics/composer/2.4/vts/functional/VtsHalGraphicsComposerV2_4TargetTest.cpp
+++ b/graphics/composer/2.4/vts/functional/VtsHalGraphicsComposerV2_4TargetTest.cpp
@@ -161,7 +161,8 @@
return mGralloc->allocate(
width, height, /*layerCount*/ 1,
static_cast<common::V1_1::PixelFormat>(PixelFormat::RGBA_8888),
- static_cast<uint64_t>(BufferUsage::CPU_WRITE_OFTEN | BufferUsage::CPU_READ_OFTEN));
+ static_cast<uint64_t>(BufferUsage::CPU_WRITE_OFTEN | BufferUsage::CPU_READ_OFTEN |
+ BufferUsage::COMPOSER_OVERLAY));
}
struct TestParameters {
diff --git a/health/aidl/aidl_api/android.hardware.health/current/android/hardware/health/HealthInfo.aidl b/health/aidl/aidl_api/android.hardware.health/current/android/hardware/health/HealthInfo.aidl
index 34a87a6..97d9e84 100644
--- a/health/aidl/aidl_api/android.hardware.health/current/android/hardware/health/HealthInfo.aidl
+++ b/health/aidl/aidl_api/android.hardware.health/current/android/hardware/health/HealthInfo.aidl
@@ -37,6 +37,7 @@
boolean chargerAcOnline;
boolean chargerUsbOnline;
boolean chargerWirelessOnline;
+ boolean chargerDockOnline;
int maxChargingCurrentMicroamps;
int maxChargingVoltageMicrovolts;
android.hardware.health.BatteryStatus batteryStatus;
diff --git a/health/aidl/android/hardware/health/HealthInfo.aidl b/health/aidl/android/hardware/health/HealthInfo.aidl
index 504e218..5b98baf 100644
--- a/health/aidl/android/hardware/health/HealthInfo.aidl
+++ b/health/aidl/android/hardware/health/HealthInfo.aidl
@@ -40,6 +40,10 @@
*/
boolean chargerWirelessOnline;
/**
+ * Dock charger state - 'true' if online
+ */
+ boolean chargerDockOnline;
+ /**
* Maximum charging current supported by charger in µA
*/
int maxChargingCurrentMicroamps;
diff --git a/health/aidl/default/HalHealthLoop.cpp b/health/aidl/default/HalHealthLoop.cpp
index c9a081e..ec23c10 100644
--- a/health/aidl/default/HalHealthLoop.cpp
+++ b/health/aidl/default/HalHealthLoop.cpp
@@ -61,7 +61,7 @@
void HalHealthLoop::set_charger_online(const HealthInfo& health_info) {
charger_online_ = health_info.chargerAcOnline || health_info.chargerUsbOnline ||
- health_info.chargerWirelessOnline;
+ health_info.chargerWirelessOnline || health_info.chargerDockOnline;
}
} // namespace aidl::android::hardware::health
diff --git a/health/aidl/default/health-convert.cpp b/health/aidl/default/health-convert.cpp
index b5251f4..6118865 100644
--- a/health/aidl/default/health-convert.cpp
+++ b/health/aidl/default/health-convert.cpp
@@ -22,6 +22,7 @@
p->chargerAcOnline = info.chargerAcOnline;
p->chargerUsbOnline = info.chargerUsbOnline;
p->chargerWirelessOnline = info.chargerWirelessOnline;
+ p->chargerDockOnline = info.chargerDockOnline;
p->maxChargingCurrent = info.maxChargingCurrentMicroamps;
p->maxChargingVoltage = info.maxChargingVoltageMicrovolts;
p->batteryStatus = static_cast<int>(info.batteryStatus);
diff --git a/keymaster/3.0/vts/functional/Android.bp b/keymaster/3.0/vts/functional/Android.bp
index e2ae803..39bec3f 100644
--- a/keymaster/3.0/vts/functional/Android.bp
+++ b/keymaster/3.0/vts/functional/Android.bp
@@ -38,5 +38,11 @@
"libcrypto_static",
"libsoftkeymasterdevice",
],
- test_suites: ["general-tests", "vts"],
+ test_suites: [
+ "general-tests",
+ "vts",
+ ],
+ sanitize: {
+ cfi: false,
+ },
}
diff --git a/keymaster/4.1/vts/functional/Android.bp b/keymaster/4.1/vts/functional/Android.bp
index c650bec..547ce38 100644
--- a/keymaster/4.1/vts/functional/Android.bp
+++ b/keymaster/4.1/vts/functional/Android.bp
@@ -48,4 +48,7 @@
"general-tests",
"vts",
],
+ sanitize: {
+ cfi: false,
+ },
}
diff --git a/neuralnetworks/1.0/utils/Android.bp b/neuralnetworks/1.0/utils/Android.bp
index 31cdded..ad30e30 100644
--- a/neuralnetworks/1.0/utils/Android.bp
+++ b/neuralnetworks/1.0/utils/Android.bp
@@ -31,16 +31,11 @@
export_include_dirs: ["include"],
cflags: ["-Wthread-safety"],
static_libs: [
+ "android.hardware.neuralnetworks@1.0",
"libarect",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
],
- shared_libs: [
- "android.hardware.neuralnetworks@1.0",
- ],
- export_static_lib_headers: [
- "neuralnetworks_utils_hal_common",
- ],
target: {
android: {
shared_libs: ["libnativewindow"],
@@ -55,19 +50,14 @@
static_libs: [
"android.hardware.neuralnetworks@1.0",
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
"neuralnetworks_utils_hal_1_0",
],
shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libbase",
"libcutils",
- "libfmq",
"libhidlbase",
- "libhidlmemory",
"liblog",
"libutils",
],
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
index 8bd2fbe..cef76c6 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Burst.h
@@ -45,12 +45,15 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
private:
const nn::SharedPreparedModel kPreparedModel;
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
index 0a6ca3e..d7c43ef 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/Device.h
@@ -65,8 +65,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
index bdb5b54..337c132 100644
--- a/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
+++ b/neuralnetworks/1.0/utils/include/nnapi/hal/1.0/PreparedModel.h
@@ -49,18 +49,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
diff --git a/neuralnetworks/1.0/utils/src/Burst.cpp b/neuralnetworks/1.0/utils/src/Burst.cpp
index 1284721..3642bc6 100644
--- a/neuralnetworks/1.0/utils/src/Burst.cpp
+++ b/neuralnetworks/1.0/utils/src/Burst.cpp
@@ -50,15 +50,20 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
- return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+ return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
}
nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
- return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration);
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+ return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
}
} // namespace android::hardware::neuralnetworks::V1_0::utils
diff --git a/neuralnetworks/1.0/utils/src/Device.cpp b/neuralnetworks/1.0/utils/src/Device.cpp
index b0c236e..620d040 100644
--- a/neuralnetworks/1.0/utils/src/Device.cpp
+++ b/neuralnetworks/1.0/utils/src/Device.cpp
@@ -143,7 +143,9 @@
nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
const nn::Model& model, nn::ExecutionPreference /*preference*/, nn::Priority /*priority*/,
nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
- const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+ const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that model is ready for IPC.
std::optional<nn::Model> maybeModelInShared;
const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.0/utils/src/PreparedModel.cpp b/neuralnetworks/1.0/utils/src/PreparedModel.cpp
index 00e7d22..b8055fc 100644
--- a/neuralnetworks/1.0/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.0/utils/src/PreparedModel.cpp
@@ -59,7 +59,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
const nn::Request& request, nn::MeasureTiming /*measure*/,
const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -94,19 +96,22 @@
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& /*request*/,
- const std::vector<nn::SyncFence>& /*waitFor*/,
- nn::MeasureTiming /*measure*/,
- const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/,
- const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+PreparedModel::executeFenced(
+ const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
+ nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
<< "IPreparedModel::executeFenced is not supported on 1.0 HAL service";
}
nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
const nn::Request& request, nn::MeasureTiming /*measure*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.0/utils/test/DeviceTest.cpp b/neuralnetworks/1.0/utils/test/DeviceTest.cpp
index 83e555f..9e9db16 100644
--- a/neuralnetworks/1.0/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.0/utils/test/DeviceTest.cpp
@@ -380,7 +380,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -399,7 +399,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -417,7 +417,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -435,7 +435,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -452,7 +452,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -469,7 +469,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -488,7 +488,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
index 7820c06..e03a98d 100644
--- a/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.0/utils/test/PreparedModelTest.cpp
@@ -121,7 +121,7 @@
.WillOnce(Invoke(makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -138,7 +138,7 @@
V1_0::ErrorStatus::GENERAL_FAILURE)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -155,7 +155,7 @@
makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -171,7 +171,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -187,7 +187,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -205,7 +205,7 @@
EXPECT_CALL(*mockPreparedModel, execute(_, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -218,7 +218,7 @@
const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -235,7 +235,7 @@
.WillRepeatedly(Invoke(makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -258,7 +258,7 @@
V1_0::ErrorStatus::GENERAL_FAILURE)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -279,7 +279,7 @@
makeExecute(V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -299,7 +299,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -319,7 +319,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -341,7 +341,7 @@
EXPECT_CALL(*mockPreparedModel, execute(_, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -358,7 +358,7 @@
const auto preparedModel = PreparedModel::create(mockPreparedModel).value();
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/1.1/utils/Android.bp b/neuralnetworks/1.1/utils/Android.bp
index 737ff58..4b8999f 100644
--- a/neuralnetworks/1.1/utils/Android.bp
+++ b/neuralnetworks/1.1/utils/Android.bp
@@ -31,17 +31,12 @@
export_include_dirs: ["include"],
cflags: ["-Wthread-safety"],
static_libs: [
+ "android.hardware.neuralnetworks@1.0",
+ "android.hardware.neuralnetworks@1.1",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
"neuralnetworks_utils_hal_1_0",
],
- shared_libs: [
- "android.hardware.neuralnetworks@1.0",
- "android.hardware.neuralnetworks@1.1",
- ],
- export_static_lib_headers: [
- "neuralnetworks_utils_hal_common",
- ],
}
cc_test {
@@ -52,20 +47,15 @@
"android.hardware.neuralnetworks@1.0",
"android.hardware.neuralnetworks@1.1",
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
"neuralnetworks_utils_hal_1_0",
"neuralnetworks_utils_hal_1_1",
],
shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libbase",
"libcutils",
- "libfmq",
"libhidlbase",
- "libhidlmemory",
"liblog",
"libutils",
],
diff --git a/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h b/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
index d6bd36a..38ca138 100644
--- a/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
+++ b/neuralnetworks/1.1/utils/include/nnapi/hal/1.1/Device.h
@@ -64,8 +64,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.1/utils/src/Device.cpp b/neuralnetworks/1.1/utils/src/Device.cpp
index 3effa84..28f3276 100644
--- a/neuralnetworks/1.1/utils/src/Device.cpp
+++ b/neuralnetworks/1.1/utils/src/Device.cpp
@@ -143,7 +143,9 @@
nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& /*modelCache*/,
- const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+ const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that model is ready for IPC.
std::optional<nn::Model> maybeModelInShared;
const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.1/utils/test/DeviceTest.cpp b/neuralnetworks/1.1/utils/test/DeviceTest.cpp
index 2248da6..8ab87bc 100644
--- a/neuralnetworks/1.1/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.1/utils/test/DeviceTest.cpp
@@ -390,7 +390,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -409,7 +409,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -427,7 +427,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -445,7 +445,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -462,7 +462,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -479,7 +479,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -498,7 +498,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.2/utils/Android.bp b/neuralnetworks/1.2/utils/Android.bp
index 4eefb0f..4c5f065 100644
--- a/neuralnetworks/1.2/utils/Android.bp
+++ b/neuralnetworks/1.2/utils/Android.bp
@@ -31,19 +31,14 @@
export_include_dirs: ["include"],
cflags: ["-Wthread-safety"],
static_libs: [
- "neuralnetworks_types",
- "neuralnetworks_utils_hal_common",
- "neuralnetworks_utils_hal_1_0",
- "neuralnetworks_utils_hal_1_1",
- ],
- shared_libs: [
"android.hardware.neuralnetworks@1.0",
"android.hardware.neuralnetworks@1.1",
"android.hardware.neuralnetworks@1.2",
"libfmq",
- ],
- export_static_lib_headers: [
+ "neuralnetworks_types",
"neuralnetworks_utils_hal_common",
+ "neuralnetworks_utils_hal_1_0",
+ "neuralnetworks_utils_hal_1_1",
],
product_variables: {
debuggable: { // eng and userdebug builds
@@ -71,7 +66,6 @@
"android.hardware.neuralnetworks@1.1",
"android.hardware.neuralnetworks@1.2",
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
"neuralnetworks_utils_hal_1_0",
@@ -79,13 +73,10 @@
"neuralnetworks_utils_hal_1_2",
],
shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libbase",
"libcutils",
"libfmq",
"libhidlbase",
- "libhidlmemory",
"liblog",
"libutils",
],
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
index ac9411c..1b28476 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Burst.h
@@ -170,13 +170,16 @@
// See IBurst::execute for information on this method.
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
// See IBurst::createReusableExecution for information on this method.
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
// If fallback is not nullptr, this method will invoke the fallback function to try another
// execution path if the packet could not be sent. Otherwise, failing to send the packet will
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
index c3348aa..4f13adc 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Conversions.h
@@ -37,7 +37,7 @@
GeneralResult<Operand::ExtraParams> unvalidatedConvert(
const hal::V1_2::Operand::ExtraParams& extraParams);
GeneralResult<Model> unvalidatedConvert(const hal::V1_2::Model& model);
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
const hal::V1_2::Model::ExtensionNameAndPrefix& extensionNameAndPrefix);
GeneralResult<OutputShape> unvalidatedConvert(const hal::V1_2::OutputShape& outputShape);
GeneralResult<MeasureTiming> unvalidatedConvert(const hal::V1_2::MeasureTiming& measureTiming);
@@ -78,7 +78,7 @@
const nn::Operand::ExtraParams& extraParams);
nn::GeneralResult<Model> unvalidatedConvert(const nn::Model& model);
nn::GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
- const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix);
+ const nn::ExtensionNameAndPrefix& extensionNameAndPrefix);
nn::GeneralResult<OutputShape> unvalidatedConvert(const nn::OutputShape& outputShape);
nn::GeneralResult<MeasureTiming> unvalidatedConvert(const nn::MeasureTiming& measureTiming);
nn::GeneralResult<Timing> unvalidatedConvert(const nn::Timing& timing);
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
index e7ac172..d92cf50 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/Device.h
@@ -83,8 +83,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
index 1150e5e..72a5b2f 100644
--- a/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
+++ b/neuralnetworks/1.2/utils/include/nnapi/hal/1.2/PreparedModel.h
@@ -49,18 +49,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
diff --git a/neuralnetworks/1.2/utils/src/Burst.cpp b/neuralnetworks/1.2/utils/src/Burst.cpp
index 911fbfa..23e8070 100644
--- a/neuralnetworks/1.2/utils/src/Burst.cpp
+++ b/neuralnetworks/1.2/utils/src/Burst.cpp
@@ -305,8 +305,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// This is the first point when we know an execution is occurring, so begin to collect
// systraces. Note that the first point we can begin collecting systraces in
// ExecutionBurstServer is when the RequestChannelReceiver realizes there is data in the FMQ, so
@@ -317,7 +318,7 @@
// fall back to another execution path
if (!compliantVersion(request).ok()) {
// fallback to another execution path if the packet could not be sent
- return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+ return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, {}, {});
}
// ensure that request is ready for IPC
@@ -346,7 +347,7 @@
// send request packet
const auto requestPacket = serialize(hidlRequest, hidlMeasure, slots);
const auto fallback = [this, &request, measure, &deadline, &loopTimeoutDuration] {
- return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration);
+ return kPreparedModel->execute(request, measure, deadline, loopTimeoutDuration, {}, {});
};
return executeInternal(requestPacket, relocation, fallback);
}
@@ -354,14 +355,17 @@
// See IBurst::createReusableExecution for information on this method.
nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
NNTRACE_RT(NNTRACE_PHASE_EXECUTION, "Burst::createReusableExecution");
// if the request is valid but of a higher version than what's supported in burst execution,
// fall back to another execution path
if (!compliantVersion(request).ok()) {
// fallback to another execution path if the packet could not be sent
- return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration);
+ return kPreparedModel->createReusableExecution(request, measure, loopTimeoutDuration, {},
+ {});
}
// ensure that request is ready for IPC
diff --git a/neuralnetworks/1.2/utils/src/Conversions.cpp b/neuralnetworks/1.2/utils/src/Conversions.cpp
index 838d9c4..62ec2ed 100644
--- a/neuralnetworks/1.2/utils/src/Conversions.cpp
+++ b/neuralnetworks/1.2/utils/src/Conversions.cpp
@@ -212,9 +212,9 @@
};
}
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
const hal::V1_2::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
- return Model::ExtensionNameAndPrefix{
+ return ExtensionNameAndPrefix{
.name = extensionNameAndPrefix.name,
.prefix = extensionNameAndPrefix.prefix,
};
@@ -495,7 +495,7 @@
}
nn::GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
- const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
+ const nn::ExtensionNameAndPrefix& extensionNameAndPrefix) {
return Model::ExtensionNameAndPrefix{
.name = extensionNameAndPrefix.name,
.prefix = extensionNameAndPrefix.prefix,
diff --git a/neuralnetworks/1.2/utils/src/Device.cpp b/neuralnetworks/1.2/utils/src/Device.cpp
index e7acecd..3a58d2c 100644
--- a/neuralnetworks/1.2/utils/src/Device.cpp
+++ b/neuralnetworks/1.2/utils/src/Device.cpp
@@ -236,7 +236,9 @@
nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority /*priority*/,
nn::OptionalTimePoint /*deadline*/, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that model is ready for IPC.
std::optional<nn::Model> maybeModelInShared;
const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.2/utils/src/PreparedModel.cpp b/neuralnetworks/1.2/utils/src/PreparedModel.cpp
index 6df3df3..feb3951 100644
--- a/neuralnetworks/1.2/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.2/utils/src/PreparedModel.cpp
@@ -91,7 +91,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
const nn::Request& request, nn::MeasureTiming measure,
const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -123,19 +125,22 @@
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& /*request*/,
- const std::vector<nn::SyncFence>& /*waitFor*/,
- nn::MeasureTiming /*measure*/,
- const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/,
- const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+PreparedModel::executeFenced(
+ const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
+ nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
<< "IPreparedModel::executeFenced is not supported on 1.2 HAL service";
}
nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.2/utils/test/DeviceTest.cpp b/neuralnetworks/1.2/utils/test/DeviceTest.cpp
index 1dc6285..0d8c141 100644
--- a/neuralnetworks/1.2/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.2/utils/test/DeviceTest.cpp
@@ -636,7 +636,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -655,7 +655,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -673,7 +673,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -691,7 +691,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -708,7 +708,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -725,7 +725,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -746,7 +746,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
index 5e2ad79..a5ec9d3 100644
--- a/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.2/utils/test/PreparedModelTest.cpp
@@ -154,7 +154,7 @@
.WillOnce(Invoke(makeExecuteSynchronously(V1_0::ErrorStatus::NONE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -172,7 +172,7 @@
makeExecuteSynchronously(V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -189,7 +189,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -206,7 +206,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -224,7 +224,7 @@
V1_0::ErrorStatus::NONE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -243,7 +243,7 @@
kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -261,7 +261,7 @@
V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -278,7 +278,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -295,7 +295,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -314,7 +314,7 @@
EXPECT_CALL(*mockPreparedModel, execute_1_2(_, _, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -328,7 +328,7 @@
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -347,7 +347,7 @@
Invoke(makeExecuteSynchronously(V1_0::ErrorStatus::NONE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -371,7 +371,7 @@
makeExecuteSynchronously(V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -392,7 +392,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -413,7 +413,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -436,7 +436,7 @@
V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::NONE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -461,7 +461,7 @@
kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -483,7 +483,7 @@
V1_0::ErrorStatus::NONE, V1_0::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -504,7 +504,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -525,7 +525,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -548,7 +548,7 @@
EXPECT_CALL(*mockPreparedModel, execute_1_2(_, _, _)).Times(1).WillOnce(InvokeWithoutArgs(ret));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -566,7 +566,7 @@
PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value();
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/1.3/utils/Android.bp b/neuralnetworks/1.3/utils/Android.bp
index 7acb4fc..c512dda 100644
--- a/neuralnetworks/1.3/utils/Android.bp
+++ b/neuralnetworks/1.3/utils/Android.bp
@@ -31,21 +31,16 @@
export_include_dirs: ["include"],
cflags: ["-Wthread-safety"],
static_libs: [
- "neuralnetworks_types",
- "neuralnetworks_utils_hal_common",
- "neuralnetworks_utils_hal_1_0",
- "neuralnetworks_utils_hal_1_1",
- "neuralnetworks_utils_hal_1_2",
- ],
- shared_libs: [
"android.hardware.neuralnetworks@1.0",
"android.hardware.neuralnetworks@1.1",
"android.hardware.neuralnetworks@1.2",
"android.hardware.neuralnetworks@1.3",
"libfmq",
- ],
- export_static_lib_headers: [
+ "neuralnetworks_types",
"neuralnetworks_utils_hal_common",
+ "neuralnetworks_utils_hal_1_0",
+ "neuralnetworks_utils_hal_1_1",
+ "neuralnetworks_utils_hal_1_2",
],
target: {
host: {
@@ -69,7 +64,6 @@
"android.hardware.neuralnetworks@1.2",
"android.hardware.neuralnetworks@1.3",
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
"neuralnetworks_utils_hal_1_0",
@@ -78,13 +72,10 @@
"neuralnetworks_utils_hal_1_3",
],
shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libbase",
"libcutils",
"libfmq",
"libhidlbase",
- "libhidlmemory",
"liblog",
"libutils",
],
diff --git a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
index c3c6fc4..cf5e5ea0 100644
--- a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
+++ b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/Device.h
@@ -66,8 +66,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
index 480438d..124cc43 100644
--- a/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
+++ b/neuralnetworks/1.3/utils/include/nnapi/hal/1.3/PreparedModel.h
@@ -48,18 +48,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
diff --git a/neuralnetworks/1.3/utils/src/Conversions.cpp b/neuralnetworks/1.3/utils/src/Conversions.cpp
index a1d414c..09e9d80 100644
--- a/neuralnetworks/1.3/utils/src/Conversions.cpp
+++ b/neuralnetworks/1.3/utils/src/Conversions.cpp
@@ -396,7 +396,7 @@
}
nn::GeneralResult<V1_2::Model::ExtensionNameAndPrefix> unvalidatedConvert(
- const nn::Model::ExtensionNameAndPrefix& extensionNameAndPrefix) {
+ const nn::ExtensionNameAndPrefix& extensionNameAndPrefix) {
return V1_2::utils::unvalidatedConvert(extensionNameAndPrefix);
}
diff --git a/neuralnetworks/1.3/utils/src/Device.cpp b/neuralnetworks/1.3/utils/src/Device.cpp
index 9517fda..824cec6 100644
--- a/neuralnetworks/1.3/utils/src/Device.cpp
+++ b/neuralnetworks/1.3/utils/src/Device.cpp
@@ -187,7 +187,9 @@
nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that model is ready for IPC.
std::optional<nn::Model> maybeModelInShared;
const nn::Model& modelInShared =
diff --git a/neuralnetworks/1.3/utils/src/PreparedModel.cpp b/neuralnetworks/1.3/utils/src/PreparedModel.cpp
index ce977e5..b92f877 100644
--- a/neuralnetworks/1.3/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/1.3/utils/src/PreparedModel.cpp
@@ -135,8 +135,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -174,10 +175,13 @@
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
- nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const {
+PreparedModel::executeFenced(
+ const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+ nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -230,7 +234,9 @@
nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
diff --git a/neuralnetworks/1.3/utils/test/DeviceTest.cpp b/neuralnetworks/1.3/utils/test/DeviceTest.cpp
index 7eba4bc..6f48837 100644
--- a/neuralnetworks/1.3/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/1.3/utils/test/DeviceTest.cpp
@@ -658,7 +658,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -677,7 +677,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -695,7 +695,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -713,7 +713,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -730,7 +730,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -747,7 +747,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -768,7 +768,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
index 6dbbd6b..51b5d29 100644
--- a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp
@@ -182,7 +182,7 @@
.WillOnce(Invoke(makeExecuteSynchronously(V1_3::ErrorStatus::NONE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -200,7 +200,7 @@
makeExecuteSynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -217,7 +217,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -234,7 +234,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -252,7 +252,7 @@
V1_3::ErrorStatus::NONE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -271,7 +271,7 @@
kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -289,7 +289,7 @@
V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -306,7 +306,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -323,7 +323,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -344,7 +344,7 @@
.WillOnce(InvokeWithoutArgs(ret));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -366,7 +366,7 @@
.WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -396,7 +396,7 @@
.WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -422,7 +422,7 @@
makeExecuteFencedReturn(V1_3::ErrorStatus::GENERAL_FAILURE, {}, nullptr)));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -439,7 +439,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -456,7 +456,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -475,7 +475,7 @@
Invoke(makeExecuteSynchronously(V1_3::ErrorStatus::NONE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -499,7 +499,7 @@
makeExecuteSynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -520,7 +520,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -541,7 +541,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -564,7 +564,7 @@
V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::NONE, {}, kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -589,7 +589,7 @@
kNoTiming)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -611,7 +611,7 @@
V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -628,7 +628,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -649,7 +649,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -674,7 +674,7 @@
.WillOnce(InvokeWithoutArgs(ret));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -702,7 +702,7 @@
Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -738,7 +738,7 @@
.WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -768,7 +768,7 @@
makeExecuteFencedReturn(V1_3::ErrorStatus::GENERAL_FAILURE, {}, nullptr)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -789,7 +789,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -810,7 +810,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl
new file mode 100644
index 0000000..cb85743
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/ExecutionConfig.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable ExecutionConfig {
+ boolean measureTiming;
+ long loopTimeoutDurationNs;
+ android.hardware.neuralnetworks.TokenValuePair[] executionHints;
+ android.hardware.neuralnetworks.ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
index eb3d0b0..461fdfa 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IBurst.aidl
@@ -36,4 +36,5 @@
interface IBurst {
android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in long[] memoryIdentifierTokens, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
void releaseMemoryResource(in long memoryIdentifierToken);
+ android.hardware.neuralnetworks.ExecutionResult executeSynchronouslyWithConfig(in android.hardware.neuralnetworks.Request request, in long[] memoryIdentifierTokens, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs);
}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
index c9c67f2..c0fba47 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IDevice.aidl
@@ -43,6 +43,7 @@
String getVersionString();
void prepareModel(in android.hardware.neuralnetworks.Model model, in android.hardware.neuralnetworks.ExecutionPreference preference, in android.hardware.neuralnetworks.Priority priority, in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
void prepareModelFromCache(in long deadlineNs, in ParcelFileDescriptor[] modelCache, in ParcelFileDescriptor[] dataCache, in byte[] token, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
+ void prepareModelWithConfig(in android.hardware.neuralnetworks.Model model, in android.hardware.neuralnetworks.PrepareModelConfig config, in android.hardware.neuralnetworks.IPreparedModelCallback callback);
const int BYTE_SIZE_OF_CACHE_TOKEN = 32;
const int MAX_NUMBER_OF_CACHE_FILES = 32;
const int EXTENSION_TYPE_HIGH_BITS_PREFIX = 15;
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
index f899567..fb0c372 100644
--- a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/IPreparedModel.aidl
@@ -37,7 +37,9 @@
android.hardware.neuralnetworks.ExecutionResult executeSynchronously(in android.hardware.neuralnetworks.Request request, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs);
android.hardware.neuralnetworks.FencedExecutionResult executeFenced(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in boolean measureTiming, in long deadlineNs, in long loopTimeoutDurationNs, in long durationNs);
android.hardware.neuralnetworks.IBurst configureExecutionBurst();
- android.hardware.neuralnetworks.IExecution createReusableExecution(in android.hardware.neuralnetworks.Request request, in boolean measureTiming, in long loopTimeoutDurationNs);
+ android.hardware.neuralnetworks.IExecution createReusableExecution(in android.hardware.neuralnetworks.Request request, in android.hardware.neuralnetworks.ExecutionConfig config);
+ android.hardware.neuralnetworks.ExecutionResult executeSynchronouslyWithConfig(in android.hardware.neuralnetworks.Request request, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs);
+ android.hardware.neuralnetworks.FencedExecutionResult executeFencedWithConfig(in android.hardware.neuralnetworks.Request request, in ParcelFileDescriptor[] waitFor, in android.hardware.neuralnetworks.ExecutionConfig config, in long deadlineNs, in long durationNs);
const long DEFAULT_LOOP_TIMEOUT_DURATION_NS = 2000000000;
const long MAXIMUM_LOOP_TIMEOUT_DURATION_NS = 15000000000;
}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl
new file mode 100644
index 0000000..85c924f
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/PrepareModelConfig.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable PrepareModelConfig {
+ android.hardware.neuralnetworks.ExecutionPreference preference;
+ android.hardware.neuralnetworks.Priority priority;
+ long deadlineNs;
+ ParcelFileDescriptor[] modelCache;
+ ParcelFileDescriptor[] dataCache;
+ byte[] cacheToken;
+ android.hardware.neuralnetworks.TokenValuePair[] compilationHints;
+ android.hardware.neuralnetworks.ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl
new file mode 100644
index 0000000..e477d6e
--- /dev/null
+++ b/neuralnetworks/aidl/aidl_api/android.hardware.neuralnetworks/current/android/hardware/neuralnetworks/TokenValuePair.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.neuralnetworks;
+@VintfStability
+parcelable TokenValuePair {
+ int token;
+ byte[] value;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl
new file mode 100644
index 0000000..00f1e11
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExecutionConfig.aidl
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+import android.hardware.neuralnetworks.ExtensionNameAndPrefix;
+import android.hardware.neuralnetworks.TokenValuePair;
+
+/**
+ * A type that is used to represent all configuration related to
+ * an Execution.
+ */
+@VintfStability
+parcelable ExecutionConfig {
+ /**
+ * Specifies whether or not to measure duration of the execution.
+ * For {@link IPreparedModel::executeSynchronouslyWithConfig}, the duration runs from the time
+ * the driver sees the corresponding call to the execute function to the time the driver returns
+ * from the function. For {@link IPreparedModel::executeFencedWithConfig}, please refer to
+ * {@link IPreparedModelCallback} for details.
+ */
+ boolean measureTiming;
+ /**
+ * The maximum amount of time in nanoseconds that should be spent
+ * executing a {@link OperationType::WHILE} operation. If a loop
+ * condition model does not output false within this duration,
+ * the execution must be aborted. If -1 is provided, the maximum
+ * amount of time is {@link DEFAULT_LOOP_TIMEOUT_DURATION_NS}.
+ * Other negative values are invalid. When provided, the duration
+ * must not exceed {@link MAXIMUM_LOOP_TIMEOUT_DURATION_NS}.
+ */
+ long loopTimeoutDurationNs;
+ /**
+ * A vector of token / value pairs represent vendor specific
+ * execution hints or metadata. The provided TokenValuePairs must not
+ * contain the same token twice. The driver must validate the
+ * data and ignore invalid hints. It is up to the driver to
+ * decide whether to respect the provided hints or not.
+ */
+ TokenValuePair[] executionHints;
+ /**
+ * The mapping between extension names and prefixes of token values.
+ * The driver must ignore the corresponding execution hint, if
+ * the extension is not supported.
+ */
+ ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
index 20109bd..9f70a53 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/Extension.aidl
@@ -20,6 +20,10 @@
/**
* Information about an extension.
+ *
+ * The extension can provide zero or more operation types (which are not enumerated), zero or more
+ * operand types (which are enumerated in {@link Extension::operandTypes}, and compilation and
+ * execution hints (which are not enumerated).
*/
@VintfStability
parcelable Extension {
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
index 29be93f..6c296e0 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/ExtensionNameAndPrefix.aidl
@@ -17,7 +17,8 @@
package android.hardware.neuralnetworks;
/**
- * The mapping between extension names and prefixes of operand and operation type values.
+ * The mapping between extension names and prefixes of values like operand and operation type, and
+ * token in {@link TokenValuePair}.
*
* An operand or operation whose numeric type value is above {@link IDevice::OPERAND_TYPE_BASE_MAX}
* or {@link IDevice::OPERATION_TYPE_BASE_MAX} respectively should be interpreted as an extension
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
index b089c49..a05a7fb 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IBurst.aidl
@@ -17,6 +17,7 @@
package android.hardware.neuralnetworks;
import android.hardware.neuralnetworks.ErrorStatus;
+import android.hardware.neuralnetworks.ExecutionConfig;
import android.hardware.neuralnetworks.ExecutionResult;
import android.hardware.neuralnetworks.Request;
@@ -68,6 +69,8 @@
*
* Only a single execution on a given burst object may be active at any time.
*
+ * Also see {@link IBurst::executeSynchronouslyWithConfig}.
+ *
* @param request The input and output information on which the prepared model is to be
* executed.
* @param memoryIdentifierTokens A list of tokens where each token is a non-negative number
@@ -117,4 +120,13 @@
* - INVALID_ARGUMENT if one of the input arguments is invalid
*/
void releaseMemoryResource(in long memoryIdentifierToken);
+
+ /**
+ * For detailed specification, please refer to {@link IBurst::executeSynchronously}. The
+ * difference between the two methods is that executeSynchronouslyWithConfig takes {@link
+ * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+ * more configuration parameters than are passed to executeSynchronously.
+ */
+ ExecutionResult executeSynchronouslyWithConfig(in Request request,
+ in long[] memoryIdentifierTokens, in ExecutionConfig config, in long deadlineNs);
}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
index 72e2623..821b9fe 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IDevice.aidl
@@ -28,6 +28,7 @@
import android.hardware.neuralnetworks.IPreparedModelParcel;
import android.hardware.neuralnetworks.Model;
import android.hardware.neuralnetworks.NumberOfCacheFiles;
+import android.hardware.neuralnetworks.PrepareModelConfig;
import android.hardware.neuralnetworks.Priority;
/**
@@ -148,7 +149,7 @@
*
* If the device reports that caching is not supported, the user may avoid calling
* IDevice::prepareModelFromCache or providing cache file descriptors to
- * IDevice::prepareModel.
+ * IDevice::prepareModel or IDevice::prepareModelWithConfig.
*
* @return NumberOfCacheFiles structure indicating how many files for model and data cache the
* driver needs to cache a single prepared model. It must be less than or equal to
@@ -302,6 +303,8 @@
*
* Multiple threads may call prepareModel on the same model concurrently.
*
+ * Also see {@link IDevice::prepareModelWithConfig}.
+ *
* @param model The model to be prepared for execution.
* @param preference Indicates the intended execution behavior of a prepared model.
* @param priority The priority of the prepared model relative to other prepared models owned by
@@ -403,17 +406,17 @@
* @param modelCache A vector of file descriptors for the security-sensitive cache. The length
* of the vector must match the numModelCache returned from
* getNumberOfCacheFilesNeeded. The cache file descriptors will be provided in
- * the same order as with prepareModel.
+ * the same order as with prepareModel or prepareModelWithConfig.
* @param dataCache A vector of file descriptors for the constants' cache. The length of the
* vector must match the numDataCache returned from
* getNumberOfCacheFilesNeeded. The cache file descriptors will be provided in
- * the same order as with prepareModel.
+ * the same order as with prepareModel or prepareModelWithConfig.
* @param token A caching token of length BYTE_SIZE_OF_CACHE_TOKEN identifying the prepared
* model. It is the same token provided when saving the cache files with
- * prepareModel. Tokens should be chosen to have a low rate of collision for a
- * particular application. The driver cannot detect a collision; a collision will
- * result in a failed execution or in a successful execution that produces
- * incorrect output values.
+ * prepareModel or prepareModelWithConfig. Tokens should be chosen to have a low
+ * rate of collision for a particular application. The driver cannot detect a
+ * collision; a collision will result in a failed execution or in a successful
+ * execution that produces incorrect output values.
* @param callback A callback object used to return the error status of preparing the model for
* execution and the prepared model if successful, nullptr otherwise. The
* callback object's notify function must be called exactly once, even if the
@@ -429,4 +432,28 @@
void prepareModelFromCache(in long deadlineNs, in ParcelFileDescriptor[] modelCache,
in ParcelFileDescriptor[] dataCache, in byte[] token,
in IPreparedModelCallback callback);
+
+ /**
+ * For detailed specification, please refer to {@link IDevice::prepareModel}. The only
+ * difference between the two methods is that prepareModelWithConfig takes {@link
+ * PrepareModelConfig} instead of standalone configuration parameters, which allows vendor
+ * specific compilation metadata to be passed.
+ *
+ * @param model The model to be prepared for execution.
+ * @param config Configuration parameters to prepare the model.
+ * @param callback A callback object used to return the error status of preparing the model for
+ * execution and the prepared model if successful, nullptr otherwise. The
+ * callback object's notify function must be called exactly once, even if the
+ * model could not be prepared.
+ * @throws ServiceSpecificException with one of the following ErrorStatus values:
+ * - DEVICE_UNAVAILABLE if driver is offline or busy
+ * - GENERAL_FAILURE if there is an unspecified error
+ * - INVALID_ARGUMENT if one of the input arguments related to preparing the model is
+ * invalid
+ * - MISSED_DEADLINE_* if the preparation is aborted because the model cannot be prepared by
+ * the deadline
+ * - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+ */
+ void prepareModelWithConfig(
+ in Model model, in PrepareModelConfig config, in IPreparedModelCallback callback);
}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
index 79053e5..949804e 100644
--- a/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/IPreparedModel.aidl
@@ -18,6 +18,7 @@
import android.hardware.common.NativeHandle;
import android.hardware.neuralnetworks.ErrorStatus;
+import android.hardware.neuralnetworks.ExecutionConfig;
import android.hardware.neuralnetworks.ExecutionResult;
import android.hardware.neuralnetworks.FencedExecutionResult;
import android.hardware.neuralnetworks.IBurst;
@@ -68,6 +69,8 @@
* Any number of calls to the execute* functions, in any combination, may be made concurrently,
* even on the same IPreparedModel object.
*
+ * Also see {@link IPreparedModel::executeSynchronouslyWithConfig}.
+ *
* @param request The input and output information on which the prepared model is to be
* executed.
* @param measure Specifies whether or not to measure duration of the execution. The duration
@@ -134,6 +137,8 @@
* Any number of calls to the execute* functions, in any combination, may be made concurrently,
* even on the same IPreparedModel object.
*
+ * Also see {@link IPreparedModel::executeFencedWithConfig}.
+ *
* @param request The input and output information on which the prepared model is to be
* executed. The outputs in the request must have fully specified dimensions.
* @param waitFor A vector of sync fence file descriptors. Execution must not start until all
@@ -201,15 +206,7 @@
*
* @param request The input and output information on which the prepared model is to be
* executed.
- * @param measure Specifies whether or not to measure duration of the execution.
- * @param loopTimeoutDurationNs The maximum amount of time in nanoseconds that should be spent
- * executing a {@link OperationType::WHILE} operation. If a loop
- * condition model does not output false within this duration, the
- * computation performed on the returned reusable execution object
- * must be aborted. If -1 is provided, the maximum amount
- * of time is {@link DEFAULT_LOOP_TIMEOUT_DURATION_NS}. Other
- * negative values are invalid. When provided, the duration must
- * not exceed {@link MAXIMUM_LOOP_TIMEOUT_DURATION_NS}.
+ * @param config Specifies the execution configuration parameters.
* @return execution An IExecution object representing a reusable execution that has been
* specialized for a fixed request.
* @throws ServiceSpecificException with one of the following ErrorStatus values:
@@ -218,6 +215,64 @@
* - INVALID_ARGUMENT if one of the input arguments is invalid
* - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
*/
- IExecution createReusableExecution(
- in Request request, in boolean measureTiming, in long loopTimeoutDurationNs);
+ IExecution createReusableExecution(in Request request, in ExecutionConfig config);
+
+ /**
+ * For detailed specification, please refer to {@link IPreparedModel::executeSynchronously}. The
+ * difference between the two methods is that executeSynchronouslyWithConfig takes {@link
+ * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+ * more configuration parameters than are passed to executeSynchronously.
+ *
+ * @param request The input and output information on which the prepared model is to be
+ * executed.
+ * @param config Specifies the execution configuration parameters.
+ * @param deadlineNs The time by which the execution is expected to complete. The time is
+ * measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+ * &ts) or ::android::base::boot_clock). If the execution cannot be finished
+ * by the deadline, the execution may be aborted. Passing -1 means the
+ * deadline is omitted. Other negative valueggs are invalid.
+ * @return ExecutionResult parcelable, containing the status of the execution, output shapes and
+ * timing information.
+ * - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+ * deadline
+ * - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+ */
+ ExecutionResult executeSynchronouslyWithConfig(
+ in Request request, in ExecutionConfig config, in long deadlineNs);
+
+ /**
+ * For detailed specification, please refer to {@link IPreparedModel::executeFenced}. The
+ * difference between the two methods is that executeFencedWithConfig takes {@link
+ * ExecutionConfig} instead of a list of configuration parameters, and ExecutionConfig contains
+ * more configuration parameters than are passed to executeFenced.
+ *
+ * @param request The input and output information on which the prepared model is to be
+ * executed. The outputs in the request must have fully specified dimensions.
+ * @param waitFor A vector of sync fence file descriptors. Execution must not start until all
+ * sync fences have been signaled.
+ * @param config Specifies the execution configuration parameters.
+ * @param deadlineNs The time by which the execution is expected to complete. The time is
+ * measured in nanoseconds since boot (as from clock_gettime(CLOCK_BOOTTIME,
+ * &ts) or ::android::base::boot_clock). If the execution cannot be finished
+ * by the deadline, the execution may be aborted. Passing -1 means the
+ * deadline is omitted. Other negative values are invalid.
+ * @param durationNs The length of time in nanoseconds within which the execution is expected to
+ * complete after all sync fences in waitFor are signaled. If the execution
+ * cannot be finished within the duration, the execution may be aborted.
+ * Passing -1 means the duration is omitted. Other negative values are
+ * invalid.
+ * @return The FencedExecutionResult parcelable, containing IFencedExecutionCallback and the
+ * sync fence.
+ * @throws ServiceSpecificException with one of the following ErrorStatus values:
+ * - DEVICE_UNAVAILABLE if driver is offline or busy
+ * - GENERAL_FAILURE if there is an unspecified error
+ * - INVALID_ARGUMENT if one of the input arguments is invalid, including fences in error
+ * states.
+ * - MISSED_DEADLINE_* if the execution is aborted because it cannot be completed by the
+ * deadline
+ * - RESOURCE_EXHAUSTED_* if the task was aborted by the driver
+ */
+ FencedExecutionResult executeFencedWithConfig(in Request request,
+ in ParcelFileDescriptor[] waitFor, in ExecutionConfig config, in long deadlineNs,
+ in long durationNs);
}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl
new file mode 100644
index 0000000..96df968
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/PrepareModelConfig.aidl
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+import android.hardware.neuralnetworks.ExecutionPreference;
+import android.hardware.neuralnetworks.ExtensionNameAndPrefix;
+import android.hardware.neuralnetworks.Priority;
+import android.hardware.neuralnetworks.TokenValuePair;
+
+/**
+ * A type that is used to represent all configuration needed to
+ * prepare a model.
+ */
+@VintfStability
+parcelable PrepareModelConfig {
+ /**
+ * Indicates the intended execution behavior of a prepared model.
+ */
+ ExecutionPreference preference;
+ /**
+ * The priority of the prepared model relative to other prepared
+ * models owned by the client.
+ */
+ Priority priority;
+ /**
+ * The time by which the model is expected to be prepared. The
+ * time is measured in nanoseconds since boot (as from
+ * clock_gettime(CLOCK_BOOTTIME, &ts) or
+ * ::android::base::boot_clock). If the model cannot be prepared
+ * by the deadline, the preparation may be aborted. Passing -1
+ * means the deadline is omitted. Other negative values are
+ * invalid.
+ */
+ long deadlineNs;
+ /**
+ * A vector of file descriptors for the security-sensitive cache.
+ * The length of the vector must either be 0 indicating that
+ * caching information is not provided, or match the
+ * numModelCache returned from IDevice::getNumberOfCacheFilesNeeded. The
+ * cache file descriptors will be provided in the same order when
+ * retrieving the preparedModel from cache files with
+ * IDevice::prepareModelFromCache.
+ */
+ ParcelFileDescriptor[] modelCache;
+ /**
+ * A vector of file descriptors for the constants' cache. The
+ * length of the vector must either be 0 indicating that caching
+ * information is not provided, or match the numDataCache
+ * returned from IDevice::getNumberOfCacheFilesNeeded. The cache file
+ * descriptors will be provided in the same order when retrieving
+ * the preparedModel from cache files with IDevice::prepareModelFromCache.
+ */
+ ParcelFileDescriptor[] dataCache;
+ /**
+ * A caching token of length IDevice::BYTE_SIZE_OF_CACHE_TOKEN identifying
+ * the prepared model. The same token will be provided when
+ * retrieving the prepared model from the cache files with
+ * IDevice::prepareModelFromCache. Tokens should be chosen to have a low
+ * rate of collision for a particular application. The driver
+ * cannot detect a collision; a collision will result in a failed
+ * execution or in a successful execution that produces incorrect
+ * output values. If both modelCache and dataCache are empty
+ * indicating that caching information is not provided, this
+ * token must be ignored.
+ */
+ byte[] cacheToken;
+ /**
+ * A vector of token / value pairs represent vendor specific
+ * compilation hints or metadata. The provided TokenValuePairs must not
+ * contain the same token twice. The driver must validate the
+ * data and ignore invalid hints. It is up to the driver to
+ * decide whether to respect the provided hints or not.
+ */
+ TokenValuePair[] compilationHints;
+ /**
+ * The mapping between extension names and prefixes of token values.
+ * The driver must ignore the corresponding compilation hint, if
+ * the extension is not supported.
+ */
+ ExtensionNameAndPrefix[] extensionNameToPrefix;
+}
diff --git a/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl b/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl
new file mode 100644
index 0000000..ec665b4
--- /dev/null
+++ b/neuralnetworks/aidl/android/hardware/neuralnetworks/TokenValuePair.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.neuralnetworks;
+
+/**
+ * A type that is used to represent a token / byte array data pair.
+ */
+@VintfStability
+parcelable TokenValuePair {
+ /**
+ * A 32bit integer token. The token is created by combining the
+ * extension prefix and enum defined within the extension.
+ * The low {@link IDevice::EXTENSION_TYPE_LOW_BITS_TYPE} bits of the value
+ * correspond to the hint within the extension and the high
+ * {@link IDevice::EXTENSION_TYPE_HIGH_BITS_PREFIX} bits encode the "prefix", which maps
+ * uniquely to the extension name. The sign bit is always 0.
+ *
+ * For example, if a token value is 0x7AAA000B and the corresponding
+ * {@link ExtensionNameAndPrefix} contains an entry with prefix=0x7AAA and
+ * name="vendor.test.test_extension", then the token should be interpreted as the hint
+ * 0x000B of the extension named vendor.test.test_extension.
+ */
+ int token;
+ /**
+ * A byte array containing the raw data.
+ */
+ byte[] value;
+}
diff --git a/neuralnetworks/aidl/utils/Android.bp b/neuralnetworks/aidl/utils/Android.bp
index 9437d5c..41eae85 100644
--- a/neuralnetworks/aidl/utils/Android.bp
+++ b/neuralnetworks/aidl/utils/Android.bp
@@ -111,19 +111,13 @@
static_libs: [
"libaidlcommonsupport",
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
],
shared_libs: [
- "android.hidl.allocator@1.0",
"libbase",
"libbinder_ndk",
"libcutils",
- "libhidlbase",
- "libhidlmemory",
- "liblog",
- "libutils",
],
target: {
android: {
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
index 0cc78d4..f2e6e75 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Burst.h
@@ -86,10 +86,12 @@
GUARDED_BY(mMutex);
};
+ // featureLevel is for testing purposes.
static nn::GeneralResult<std::shared_ptr<const Burst>> create(
- std::shared_ptr<aidl_hal::IBurst> burst);
+ std::shared_ptr<aidl_hal::IBurst> burst, nn::Version featureLevel);
- Burst(PrivateConstructorTag tag, std::shared_ptr<aidl_hal::IBurst> burst);
+ Burst(PrivateConstructorTag tag, std::shared_ptr<aidl_hal::IBurst> burst,
+ nn::Version featureLevel);
// See IBurst::cacheMemory for information.
OptionalCacheHold cacheMemory(const nn::SharedMemory& memory) const override;
@@ -97,23 +99,29 @@
// See IBurst::execute for information.
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
// See IBurst::createReusableExecution for information.
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> executeInternal(
const aidl_hal::Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
bool measure, int64_t deadline, int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
const hal::utils::RequestRelocation& relocation) const;
private:
mutable std::atomic_flag mExecutionInFlight = ATOMIC_FLAG_INIT;
const std::shared_ptr<aidl_hal::IBurst> kBurst;
const std::shared_ptr<MemoryCache> kMemoryCache;
+ const nn::Version kFeatureLevel;
};
} // namespace aidl::android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
index 477b311..af58715 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Conversions.h
@@ -46,6 +46,10 @@
#include <aidl/android/hardware/neuralnetworks/SymmPerChannelQuantParams.h>
#include <aidl/android/hardware/neuralnetworks/Timing.h>
+#ifdef NN_AIDL_V4_OR_ABOVE
+#include <aidl/android/hardware/neuralnetworks/TokenValuePair.h>
+#endif // NN_AIDL_V4_OR_ABOVE
+
#include <android/binder_auto_utils.h>
#include <nnapi/Result.h>
#include <nnapi/Types.h>
@@ -74,7 +78,7 @@
const aidl_hal::SymmPerChannelQuantParams& symmPerChannelQuantParams);
GeneralResult<Operation> unvalidatedConvert(const aidl_hal::Operation& operation);
GeneralResult<Model> unvalidatedConvert(const aidl_hal::Model& model);
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
const aidl_hal::ExtensionNameAndPrefix& extensionNameAndPrefix);
GeneralResult<Model::OperandValues> unvalidatedConvert(const std::vector<uint8_t>& operandValues);
GeneralResult<Model::Subgraph> unvalidatedConvert(const aidl_hal::Subgraph& subgraph);
@@ -97,6 +101,10 @@
const aidl_hal::ExtensionOperandTypeInformation& operandTypeInformation);
GeneralResult<SharedHandle> unvalidatedConvert(const ndk::ScopedFileDescriptor& handle);
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<TokenValuePair> unvalidatedConvert(const aidl_hal::TokenValuePair& tokenValuePair);
+#endif // NN_AIDL_V4_OR_ABOVE
+
GeneralResult<std::vector<Operation>> unvalidatedConvert(
const std::vector<aidl_hal::Operation>& operations);
@@ -116,6 +124,14 @@
GeneralResult<std::vector<Extension>> convert(const std::vector<aidl_hal::Extension>& extension);
GeneralResult<std::vector<SharedMemory>> convert(const std::vector<aidl_hal::Memory>& memories);
+GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+ const std::vector<aidl_hal::ExtensionNameAndPrefix>& extensionNameAndPrefix);
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<std::vector<TokenValuePair>> convert(
+ const std::vector<aidl_hal::TokenValuePair>& metaData);
+#endif // NN_AIDL_V4_OR_ABOVE
+
GeneralResult<std::vector<OutputShape>> convert(
const std::vector<aidl_hal::OutputShape>& outputShapes);
GeneralResult<std::vector<SharedHandle>> convert(
@@ -152,7 +168,7 @@
nn::GeneralResult<std::vector<uint8_t>> unvalidatedConvert(
const nn::Model::OperandValues& operandValues);
nn::GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
- const nn::Model::ExtensionNameAndPrefix& extensionNameToPrefix);
+ const nn::ExtensionNameAndPrefix& extensionNameToPrefix);
nn::GeneralResult<Model> unvalidatedConvert(const nn::Model& model);
nn::GeneralResult<Priority> unvalidatedConvert(const nn::Priority& priority);
nn::GeneralResult<Request> unvalidatedConvert(const nn::Request& request);
@@ -166,6 +182,10 @@
nn::GeneralResult<Capabilities> unvalidatedConvert(const nn::Capabilities& capabilities);
nn::GeneralResult<Extension> unvalidatedConvert(const nn::Extension& extension);
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<TokenValuePair> unvalidatedConvert(const nn::TokenValuePair& tokenValuePair);
+#endif // NN_AIDL_V4_OR_ABOVE
+
nn::GeneralResult<std::vector<uint8_t>> convert(const nn::CacheToken& cacheToken);
nn::GeneralResult<BufferDesc> convert(const nn::BufferDesc& bufferDesc);
nn::GeneralResult<DeviceType> convert(const nn::DeviceType& deviceType);
@@ -190,6 +210,13 @@
nn::GeneralResult<std::vector<ndk::ScopedFileDescriptor>> convert(
const std::vector<nn::SyncFence>& syncFences);
nn::GeneralResult<std::vector<Extension>> convert(const std::vector<nn::Extension>& extensions);
+nn::GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix);
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<std::vector<TokenValuePair>> convert(
+ const std::vector<nn::TokenValuePair>& metaData);
+#endif // NN_AIDL_V4_OR_ABOVE
nn::GeneralResult<std::vector<int32_t>> toSigned(const std::vector<uint32_t>& vec);
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
index d558f66..615c6de 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/Device.h
@@ -42,6 +42,7 @@
struct PrivateConstructorTag {};
public:
+ // featureLevel is for testing purposes.
static nn::GeneralResult<std::shared_ptr<const Device>> create(
std::string name, std::shared_ptr<aidl_hal::IDevice> device, nn::Version featureLevel);
@@ -67,8 +68,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
index 205d428..cacdc26 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/HalInterfaces.h
@@ -63,7 +63,9 @@
#ifdef NN_AIDL_V4_OR_ABOVE
#include <aidl/android/hardware/neuralnetworks/BnExecution.h>
+#include <aidl/android/hardware/neuralnetworks/ExecutionConfig.h>
#include <aidl/android/hardware/neuralnetworks/IExecution.h>
+#include <aidl/android/hardware/neuralnetworks/PrepareModelConfig.h>
#endif // NN_AIDL_V4_OR_ABOVE
namespace android::nn {
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
index e66507a..9375c1d 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/InvalidDevice.h
@@ -53,6 +53,9 @@
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) override;
+ ndk::ScopedAStatus prepareModelWithConfig(
+ const Model& model, const PrepareModelConfig& config,
+ const std::shared_ptr<IPreparedModelCallback>& callback) override;
ndk::ScopedAStatus prepareModelFromCache(
int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache,
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
diff --git a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
index 24cd681..cb6a85b 100644
--- a/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
+++ b/neuralnetworks/aidl/utils/include/nnapi/hal/aidl/PreparedModel.h
@@ -40,6 +40,7 @@
struct PrivateConstructorTag {};
public:
+ // featureLevel is for testing purposes.
static nn::GeneralResult<std::shared_ptr<const PreparedModel>> create(
std::shared_ptr<aidl_hal::IPreparedModel> preparedModel, nn::Version featureLevel);
@@ -49,18 +50,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
@@ -68,6 +74,8 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> executeInternal(
const Request& request, bool measure, int64_t deadline, int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
const hal::utils::RequestRelocation& relocation) const;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
@@ -75,6 +83,8 @@
const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measure,
int64_t deadline, int64_t loopTimeoutDuration,
int64_t timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
const hal::utils::RequestRelocation& relocation) const;
private:
diff --git a/neuralnetworks/aidl/utils/src/Burst.cpp b/neuralnetworks/aidl/utils/src/Burst.cpp
index fb00b26..6c7aa88 100644
--- a/neuralnetworks/aidl/utils/src/Burst.cpp
+++ b/neuralnetworks/aidl/utils/src/Burst.cpp
@@ -43,12 +43,16 @@
static nn::GeneralResult<std::shared_ptr<const BurstExecution>> create(
std::shared_ptr<const Burst> burst, Request request,
std::vector<int64_t> memoryIdentifierTokens, bool measure, int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
hal::utils::RequestRelocation relocation,
std::vector<Burst::OptionalCacheHold> cacheHolds);
BurstExecution(PrivateConstructorTag tag, std::shared_ptr<const Burst> burst, Request request,
std::vector<int64_t> memoryIdentifierTokens, bool measure,
- int64_t loopTimeoutDuration, hal::utils::RequestRelocation relocation,
+ int64_t loopTimeoutDuration, const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
+ hal::utils::RequestRelocation relocation,
std::vector<Burst::OptionalCacheHold> cacheHolds);
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> compute(
@@ -64,6 +68,8 @@
const std::vector<int64_t> kMemoryIdentifierTokens;
const bool kMeasure;
const int64_t kLoopTimeoutDuration;
+ const std::vector<nn::TokenValuePair> kHints;
+ const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix;
const hal::utils::RequestRelocation kRelocation;
const std::vector<Burst::OptionalCacheHold> kCacheHolds;
};
@@ -149,17 +155,20 @@
}
nn::GeneralResult<std::shared_ptr<const Burst>> Burst::create(
- std::shared_ptr<aidl_hal::IBurst> burst) {
+ std::shared_ptr<aidl_hal::IBurst> burst, nn::Version featureLevel) {
if (burst == nullptr) {
return NN_ERROR(nn::ErrorStatus::GENERAL_FAILURE)
<< "aidl_hal::utils::Burst::create must have non-null burst";
}
- return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(burst));
+ return std::make_shared<const Burst>(PrivateConstructorTag{}, std::move(burst), featureLevel);
}
-Burst::Burst(PrivateConstructorTag /*tag*/, std::shared_ptr<aidl_hal::IBurst> burst)
- : kBurst(std::move(burst)), kMemoryCache(std::make_shared<MemoryCache>(kBurst)) {
+Burst::Burst(PrivateConstructorTag /*tag*/, std::shared_ptr<aidl_hal::IBurst> burst,
+ nn::Version featureLevel)
+ : kBurst(std::move(burst)),
+ kMemoryCache(std::make_shared<MemoryCache>(kBurst)),
+ kFeatureLevel(featureLevel) {
CHECK(kBurst != nullptr);
}
@@ -170,8 +179,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -200,14 +210,14 @@
memoryIdentifierTokens.push_back(-1);
}
CHECK_EQ(requestInShared.pools.size(), memoryIdentifierTokens.size());
-
return executeInternal(aidlRequest, memoryIdentifierTokens, aidlMeasure, aidlDeadline,
- aidlLoopTimeoutDuration, relocation);
+ aidlLoopTimeoutDuration, hints, extensionNameToPrefix, relocation);
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> Burst::executeInternal(
const Request& request, const std::vector<int64_t>& memoryIdentifierTokens, bool measure,
- int64_t deadline, int64_t loopTimeoutDuration,
+ int64_t deadline, int64_t loopTimeoutDuration, const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
const hal::utils::RequestRelocation& relocation) const {
// Ensure that at most one execution is in flight at any given time.
const bool alreadyInFlight = mExecutionInFlight.test_and_set();
@@ -221,9 +231,21 @@
}
ExecutionResult executionResult;
- const auto ret = kBurst->executeSynchronously(request, memoryIdentifierTokens, measure,
- deadline, loopTimeoutDuration, &executionResult);
- HANDLE_ASTATUS(ret) << "execute failed";
+ if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+ auto aidlHints = NN_TRY(convert(hints));
+ auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+ const auto ret = kBurst->executeSynchronouslyWithConfig(
+ request, memoryIdentifierTokens,
+ {measure, loopTimeoutDuration, std::move(aidlHints),
+ std::move(aidlExtensionPrefix)},
+ deadline, &executionResult);
+ HANDLE_ASTATUS(ret) << "execute failed";
+ } else {
+ const auto ret =
+ kBurst->executeSynchronously(request, memoryIdentifierTokens, measure, deadline,
+ loopTimeoutDuration, &executionResult);
+ HANDLE_ASTATUS(ret) << "execute failed";
+ }
if (!executionResult.outputSufficientSize) {
auto canonicalOutputShapes =
nn::convert(executionResult.outputShapes).value_or(std::vector<nn::OutputShape>{});
@@ -241,7 +263,9 @@
nn::GeneralResult<nn::SharedExecution> Burst::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -272,12 +296,15 @@
return BurstExecution::create(shared_from_this(), std::move(aidlRequest),
std::move(memoryIdentifierTokens), aidlMeasure,
- aidlLoopTimeoutDuration, std::move(relocation), std::move(holds));
+ aidlLoopTimeoutDuration, hints, extensionNameToPrefix,
+ std::move(relocation), std::move(holds));
}
nn::GeneralResult<std::shared_ptr<const BurstExecution>> BurstExecution::create(
std::shared_ptr<const Burst> burst, Request request,
std::vector<int64_t> memoryIdentifierTokens, bool measure, int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
hal::utils::RequestRelocation relocation,
std::vector<Burst::OptionalCacheHold> cacheHolds) {
if (burst == nullptr) {
@@ -286,13 +313,15 @@
return std::make_shared<const BurstExecution>(
PrivateConstructorTag{}, std::move(burst), std::move(request),
- std::move(memoryIdentifierTokens), measure, loopTimeoutDuration, std::move(relocation),
- std::move(cacheHolds));
+ std::move(memoryIdentifierTokens), measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix, std::move(relocation), std::move(cacheHolds));
}
BurstExecution::BurstExecution(PrivateConstructorTag /*tag*/, std::shared_ptr<const Burst> burst,
Request request, std::vector<int64_t> memoryIdentifierTokens,
bool measure, int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
hal::utils::RequestRelocation relocation,
std::vector<Burst::OptionalCacheHold> cacheHolds)
: kBurst(std::move(burst)),
@@ -300,6 +329,8 @@
kMemoryIdentifierTokens(std::move(memoryIdentifierTokens)),
kMeasure(measure),
kLoopTimeoutDuration(loopTimeoutDuration),
+ kHints(hints),
+ kExtensionNameToPrefix(extensionNameToPrefix),
kRelocation(std::move(relocation)),
kCacheHolds(std::move(cacheHolds)) {}
@@ -307,7 +338,8 @@
const nn::OptionalTimePoint& deadline) const {
const auto aidlDeadline = NN_TRY(convert(deadline));
return kBurst->executeInternal(kRequest, kMemoryIdentifierTokens, kMeasure, aidlDeadline,
- kLoopTimeoutDuration, kRelocation);
+ kLoopTimeoutDuration, kHints, kExtensionNameToPrefix,
+ kRelocation);
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
diff --git a/neuralnetworks/aidl/utils/src/Conversions.cpp b/neuralnetworks/aidl/utils/src/Conversions.cpp
index 113d2da..eb28db7 100644
--- a/neuralnetworks/aidl/utils/src/Conversions.cpp
+++ b/neuralnetworks/aidl/utils/src/Conversions.cpp
@@ -302,9 +302,9 @@
};
}
-GeneralResult<Model::ExtensionNameAndPrefix> unvalidatedConvert(
+GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
const aidl_hal::ExtensionNameAndPrefix& extensionNameAndPrefix) {
- return Model::ExtensionNameAndPrefix{
+ return ExtensionNameAndPrefix{
.name = extensionNameAndPrefix.name,
.prefix = extensionNameAndPrefix.prefix,
};
@@ -506,6 +506,12 @@
return std::make_shared<const Handle>(std::move(duplicatedFd));
}
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<TokenValuePair> unvalidatedConvert(const aidl_hal::TokenValuePair& tokenValuePair) {
+ return TokenValuePair{.token = tokenValuePair.token, .value = tokenValuePair.value};
+}
+#endif // NN_AIDL_V4_OR_ABOVE
+
GeneralResult<Capabilities> convert(const aidl_hal::Capabilities& capabilities) {
return validatedConvert(capabilities);
}
@@ -562,6 +568,17 @@
GeneralResult<std::vector<SharedMemory>> convert(const std::vector<aidl_hal::Memory>& memories) {
return validatedConvert(memories);
}
+GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+ const std::vector<aidl_hal::ExtensionNameAndPrefix>& extensionNameAndPrefix) {
+ return unvalidatedConvert(extensionNameAndPrefix);
+}
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+GeneralResult<std::vector<TokenValuePair>> convert(
+ const std::vector<aidl_hal::TokenValuePair>& metaData) {
+ return validatedConvert(metaData);
+}
+#endif // NN_AIDL_V4_OR_ABOVE
GeneralResult<std::vector<OutputShape>> convert(
const std::vector<aidl_hal::OutputShape>& outputShapes) {
@@ -942,7 +959,7 @@
}
nn::GeneralResult<ExtensionNameAndPrefix> unvalidatedConvert(
- const nn::Model::ExtensionNameAndPrefix& extensionNameToPrefix) {
+ const nn::ExtensionNameAndPrefix& extensionNameToPrefix) {
return ExtensionNameAndPrefix{
.name = extensionNameToPrefix.name,
.prefix = extensionNameToPrefix.prefix,
@@ -1055,6 +1072,11 @@
return Extension{.name = extension.name,
.operandTypes = NN_TRY(unvalidatedConvert(extension.operandTypes))};
}
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<TokenValuePair> unvalidatedConvert(const nn::TokenValuePair& tokenValuePair) {
+ return TokenValuePair{.token = tokenValuePair.token, .value = tokenValuePair.value};
+}
+#endif // NN_AIDL_V4_OR_ABOVE
nn::GeneralResult<std::vector<uint8_t>> convert(const nn::CacheToken& cacheToken) {
return validatedConvert(cacheToken);
@@ -1134,6 +1156,17 @@
const std::vector<nn::SyncFence>& syncFences) {
return validatedConvert(syncFences);
}
+nn::GeneralResult<std::vector<ExtensionNameAndPrefix>> convert(
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) {
+ return unvalidatedConvert(extensionNameToPrefix);
+}
+
+#ifdef NN_AIDL_V4_OR_ABOVE
+nn::GeneralResult<std::vector<TokenValuePair>> convert(
+ const std::vector<nn::TokenValuePair>& metaData) {
+ return validatedConvert(metaData);
+}
+#endif // NN_AIDL_V4_OR_ABOVE
nn::GeneralResult<std::vector<Extension>> convert(const std::vector<nn::Extension>& extensions) {
return validatedConvert(extensions);
diff --git a/neuralnetworks/aidl/utils/src/Device.cpp b/neuralnetworks/aidl/utils/src/Device.cpp
index bad10ed..f3f4fdb 100644
--- a/neuralnetworks/aidl/utils/src/Device.cpp
+++ b/neuralnetworks/aidl/utils/src/Device.cpp
@@ -215,7 +215,9 @@
nn::GeneralResult<nn::SharedPreparedModel> Device::prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that model is ready for IPC.
std::optional<nn::Model> maybeModelInShared;
const nn::Model& modelInShared =
@@ -225,17 +227,28 @@
const auto aidlPreference = NN_TRY(convert(preference));
const auto aidlPriority = NN_TRY(convert(priority));
const auto aidlDeadline = NN_TRY(convert(deadline));
- const auto aidlModelCache = NN_TRY(convert(modelCache));
- const auto aidlDataCache = NN_TRY(convert(dataCache));
+ auto aidlModelCache = NN_TRY(convert(modelCache));
+ auto aidlDataCache = NN_TRY(convert(dataCache));
const auto aidlToken = NN_TRY(convert(token));
const auto cb = ndk::SharedRefBase::make<PreparedModelCallback>(kFeatureLevel);
const auto scoped = kDeathHandler.protectCallback(cb.get());
+ if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+ auto aidlHints = NN_TRY(convert(hints));
+ auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+ const auto ret = kDevice->prepareModelWithConfig(
+ aidlModel,
+ {aidlPreference, aidlPriority, aidlDeadline, std::move(aidlModelCache),
+ std::move(aidlDataCache), aidlToken, std::move(aidlHints),
+ std::move(aidlExtensionPrefix)},
+ cb);
+ HANDLE_ASTATUS(ret) << "prepareModel failed";
+ return cb->get();
+ }
const auto ret = kDevice->prepareModel(aidlModel, aidlPreference, aidlPriority, aidlDeadline,
aidlModelCache, aidlDataCache, aidlToken, cb);
HANDLE_ASTATUS(ret) << "prepareModel failed";
-
return cb->get();
}
diff --git a/neuralnetworks/aidl/utils/src/Execution.cpp b/neuralnetworks/aidl/utils/src/Execution.cpp
index c4add63..2fd88af 100644
--- a/neuralnetworks/aidl/utils/src/Execution.cpp
+++ b/neuralnetworks/aidl/utils/src/Execution.cpp
@@ -63,7 +63,7 @@
ExecutionWithCachedRequest::compute(const nn::OptionalTimePoint& deadline) const {
const auto aidlDeadline = NN_TRY(convert(deadline));
return kPreparedModel->executeInternal(kRequest, kMeasure, aidlDeadline, kLoopTimeoutDuration,
- kRelocation);
+ {}, {}, kRelocation);
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
@@ -73,9 +73,9 @@
const auto aidlWaitFor = NN_TRY(convert(waitFor));
const auto aidlDeadline = NN_TRY(convert(deadline));
const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
- return kPreparedModel->executeFencedInternal(kRequest, aidlWaitFor, kMeasure, aidlDeadline,
- kLoopTimeoutDuration,
- aidlTimeoutDurationAfterFence, kRelocation);
+ return kPreparedModel->executeFencedInternal(
+ kRequest, aidlWaitFor, kMeasure, aidlDeadline, kLoopTimeoutDuration,
+ aidlTimeoutDurationAfterFence, {}, {}, kRelocation);
}
nn::GeneralResult<std::shared_ptr<const Execution>> Execution::create(
diff --git a/neuralnetworks/aidl/utils/src/InvalidDevice.cpp b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
index c9d9955..33270ff 100644
--- a/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
+++ b/neuralnetworks/aidl/utils/src/InvalidDevice.cpp
@@ -167,6 +167,31 @@
return ndk::ScopedAStatus::ok();
}
+ndk::ScopedAStatus InvalidDevice::prepareModelWithConfig(
+ const Model& model, const PrepareModelConfig& config,
+ const std::shared_ptr<IPreparedModelCallback>& callback) {
+ if (!utils::valid(config.extensionNameToPrefix)) {
+ callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
+ return toAStatus(ErrorStatus::INVALID_ARGUMENT, "Invalid extensionNameToPrefix");
+ }
+ for (const auto& hint : config.compilationHints) {
+ auto result = std::find_if(config.extensionNameToPrefix.begin(),
+ config.extensionNameToPrefix.end(),
+ [&hint](const ExtensionNameAndPrefix& extension) {
+ uint16_t prefix = static_cast<uint32_t>(hint.token) >>
+ IDevice::EXTENSION_TYPE_LOW_BITS_TYPE;
+ return prefix == extension.prefix;
+ });
+ if (result == config.extensionNameToPrefix.end()) {
+ callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
+ return toAStatus(ErrorStatus::INVALID_ARGUMENT,
+ "Invalid token for compilation hints: " + std::to_string(hint.token));
+ }
+ }
+ return prepareModel(model, config.preference, config.priority, config.deadlineNs,
+ config.modelCache, config.dataCache, config.cacheToken, callback);
+}
+
ndk::ScopedAStatus InvalidDevice::prepareModelFromCache(
int64_t /*deadline*/, const std::vector<ndk::ScopedFileDescriptor>& /*modelCache*/,
const std::vector<ndk::ScopedFileDescriptor>& /*dataCache*/,
diff --git a/neuralnetworks/aidl/utils/src/PreparedModel.cpp b/neuralnetworks/aidl/utils/src/PreparedModel.cpp
index 6d1de56..7e3a31c 100644
--- a/neuralnetworks/aidl/utils/src/PreparedModel.cpp
+++ b/neuralnetworks/aidl/utils/src/PreparedModel.cpp
@@ -128,8 +128,9 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> PreparedModel::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -141,30 +142,46 @@
const auto aidlMeasure = NN_TRY(convert(measure));
const auto aidlDeadline = NN_TRY(convert(deadline));
const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
- return executeInternal(aidlRequest, aidlMeasure, aidlDeadline, aidlLoopTimeoutDuration,
- relocation);
+ return executeInternal(aidlRequest, aidlMeasure, aidlDeadline, aidlLoopTimeoutDuration, hints,
+ extensionNameToPrefix, relocation);
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
PreparedModel::executeInternal(const Request& request, bool measure, int64_t deadline,
int64_t loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
const hal::utils::RequestRelocation& relocation) const {
if (relocation.input) {
relocation.input->flush();
}
ExecutionResult executionResult;
- const auto ret = kPreparedModel->executeSynchronously(request, measure, deadline,
- loopTimeoutDuration, &executionResult);
- HANDLE_ASTATUS(ret) << "executeSynchronously failed";
+ if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+ auto aidlHints = NN_TRY(convert(hints));
+ auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+ const auto ret = kPreparedModel->executeSynchronouslyWithConfig(
+ request,
+ {measure, loopTimeoutDuration, std::move(aidlHints),
+ std::move(aidlExtensionPrefix)},
+ deadline, &executionResult);
+ HANDLE_ASTATUS(ret) << "executeSynchronouslyWithConfig failed";
+ } else {
+ const auto ret = kPreparedModel->executeSynchronously(
+ request, measure, deadline, loopTimeoutDuration, &executionResult);
+ HANDLE_ASTATUS(ret) << "executeSynchronously failed";
+ }
return handleExecutionResult(executionResult, relocation);
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFenced(const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
- nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const {
+PreparedModel::executeFenced(
+ const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+ nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -179,31 +196,45 @@
const auto aidlLoopTimeoutDuration = NN_TRY(convert(loopTimeoutDuration));
const auto aidlTimeoutDurationAfterFence = NN_TRY(convert(timeoutDurationAfterFence));
return executeFencedInternal(aidlRequest, aidlWaitFor, aidlMeasure, aidlDeadline,
- aidlLoopTimeoutDuration, aidlTimeoutDurationAfterFence,
- relocation);
+ aidlLoopTimeoutDuration, aidlTimeoutDurationAfterFence, hints,
+ extensionNameToPrefix, relocation);
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-PreparedModel::executeFencedInternal(const Request& request,
- const std::vector<ndk::ScopedFileDescriptor>& waitFor,
- bool measure, int64_t deadline, int64_t loopTimeoutDuration,
- int64_t timeoutDurationAfterFence,
- const hal::utils::RequestRelocation& relocation) const {
+PreparedModel::executeFencedInternal(
+ const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measure,
+ int64_t deadline, int64_t loopTimeoutDuration, int64_t timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix,
+ const hal::utils::RequestRelocation& relocation) const {
if (relocation.input) {
relocation.input->flush();
}
FencedExecutionResult result;
- const auto ret =
- kPreparedModel->executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
- timeoutDurationAfterFence, &result);
- HANDLE_ASTATUS(ret) << "executeFenced failed";
+ if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
+ auto aidlHints = NN_TRY(convert(hints));
+ auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+ const auto ret = kPreparedModel->executeFencedWithConfig(
+ request, waitFor,
+ {measure, loopTimeoutDuration, std::move(aidlHints),
+ std::move(aidlExtensionPrefix)},
+ deadline, timeoutDurationAfterFence, &result);
+ HANDLE_ASTATUS(ret) << "executeFencedWithConfig failed";
+ } else {
+ const auto ret = kPreparedModel->executeFenced(request, waitFor, measure, deadline,
+ loopTimeoutDuration,
+ timeoutDurationAfterFence, &result);
+ HANDLE_ASTATUS(ret) << "executeFenced failed";
+ }
return handleFencedExecutionResult(result, relocation);
}
nn::GeneralResult<nn::SharedExecution> PreparedModel::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
// Ensure that request is ready for IPC.
std::optional<nn::Request> maybeRequestInShared;
hal::utils::RequestRelocation relocation;
@@ -217,8 +248,14 @@
if (kFeatureLevel.level >= nn::Version::Level::FEATURE_LEVEL_8) {
std::shared_ptr<IExecution> execution;
+ auto aidlHints = NN_TRY(convert(hints));
+ auto aidlExtensionPrefix = NN_TRY(convert(extensionNameToPrefix));
+
const auto ret = kPreparedModel->createReusableExecution(
- aidlRequest, aidlMeasure, aidlLoopTimeoutDuration, &execution);
+ aidlRequest,
+ {aidlMeasure, aidlLoopTimeoutDuration, std::move(aidlHints),
+ std::move(aidlExtensionPrefix)},
+ &execution);
HANDLE_ASTATUS(ret) << "createReusableExecution failed";
return Execution::create(std::move(execution), std::move(relocation));
}
@@ -232,7 +269,7 @@
std::shared_ptr<IBurst> burst;
const auto ret = kPreparedModel->configureExecutionBurst(&burst);
HANDLE_ASTATUS(ret) << "configureExecutionBurst failed";
- return Burst::create(std::move(burst));
+ return Burst::create(std::move(burst), kFeatureLevel);
}
std::any PreparedModel::getUnderlyingResource() const {
diff --git a/neuralnetworks/aidl/utils/test/DeviceTest.cpp b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
index fb13af8..73727b3 100644
--- a/neuralnetworks/aidl/utils/test/DeviceTest.cpp
+++ b/neuralnetworks/aidl/utils/test/DeviceTest.cpp
@@ -61,7 +61,6 @@
.powerUsage = std::numeric_limits<float>::max()};
constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles - 1,
.numDataCache = nn::kMaxNumberOfCacheFiles};
-
constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); };
std::shared_ptr<MockDevice> createMockDevice() {
@@ -124,6 +123,18 @@
};
}
+const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}};
+const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = {
+ nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}};
+auto makePreparedModelWithConfigReturn(ErrorStatus launchStatus, ErrorStatus returnStatus,
+ const std::shared_ptr<MockPreparedModel>& preparedModel) {
+ return [launchStatus, returnStatus, preparedModel](
+ const Model& /*model*/, const PrepareModelConfig& /*config*/,
+ const std::shared_ptr<IPreparedModelCallback>& cb) -> ndk::ScopedAStatus {
+ return makePreparedModelReturnImpl(launchStatus, returnStatus, preparedModel, cb);
+ };
+}
+
auto makePreparedModelFromCacheReturn(ErrorStatus launchStatus, ErrorStatus returnStatus,
const std::shared_ptr<MockPreparedModel>& preparedModel) {
return [launchStatus, returnStatus, preparedModel](
@@ -560,6 +571,8 @@
}
TEST_P(DeviceTest, prepareModel) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -571,7 +584,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -580,6 +593,8 @@
}
TEST_P(DeviceTest, prepareModelLaunchError) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -590,7 +605,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -598,6 +613,8 @@
}
TEST_P(DeviceTest, prepareModelReturnError) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -608,7 +625,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -616,6 +633,8 @@
}
TEST_P(DeviceTest, prepareModelNullptrError) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -626,7 +645,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -634,6 +653,8 @@
}
TEST_P(DeviceTest, prepareModelTransportFailure) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -643,7 +664,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -651,6 +672,8 @@
}
TEST_P(DeviceTest, prepareModelDeadObject) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup call
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -660,7 +683,7 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -668,6 +691,8 @@
}
TEST_P(DeviceTest, prepareModelAsyncCrash) {
+ if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return;
+
// setup test
const auto mockDevice = createMockDevice();
const auto device = Device::create(kName, mockDevice, kVersion).value();
@@ -681,7 +706,157 @@
// run test
const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfig) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ const auto mockPreparedModel = MockPreparedModel::create();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE,
+ mockPreparedModel)));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_TRUE(result.has_value())
+ << "Failed with " << result.error().code << ": " << result.error().message;
+ EXPECT_NE(result.value(), nullptr);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigLaunchError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makePreparedModelWithConfigReturn(
+ ErrorStatus::GENERAL_FAILURE, ErrorStatus::GENERAL_FAILURE, nullptr)));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigReturnError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makePreparedModelWithConfigReturn(
+ ErrorStatus::NONE, ErrorStatus::GENERAL_FAILURE, nullptr)));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigNullptrError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE,
+ nullptr)));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigTransportFailure) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigDeadObject) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(DeviceTest, prepareModelWithConfigAsyncCrash) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockDevice = createMockDevice();
+ const auto device = Device::create(kName, mockDevice, kVersion).value();
+ const auto ret = [&device]() {
+ DeathMonitor::serviceDied(device->getDeathMonitor());
+ return ndk::ScopedAStatus::ok();
+ };
+ EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(ret));
+
+ // run test
+ const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT,
+ nn::Priority::DEFAULT, {}, {}, {}, {}, kHints,
+ kExtensionNameToPrefix);
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/aidl/utils/test/MockBuffer.h b/neuralnetworks/aidl/utils/test/MockBuffer.h
index f77fa86..7a05a0f 100644
--- a/neuralnetworks/aidl/utils/test/MockBuffer.h
+++ b/neuralnetworks/aidl/utils/test/MockBuffer.h
@@ -21,7 +21,6 @@
#include <android/binder_interface_utils.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <hidl/Status.h>
namespace aidl::android::hardware::neuralnetworks::utils {
diff --git a/neuralnetworks/aidl/utils/test/MockBurst.h b/neuralnetworks/aidl/utils/test/MockBurst.h
index 5083bbd..609bd30 100644
--- a/neuralnetworks/aidl/utils/test/MockBurst.h
+++ b/neuralnetworks/aidl/utils/test/MockBurst.h
@@ -21,7 +21,6 @@
#include <android/binder_interface_utils.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <hidl/Status.h>
namespace aidl::android::hardware::neuralnetworks::utils {
@@ -32,6 +31,10 @@
bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration,
ExecutionResult* executionResult),
(override));
+ MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig,
+ (const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+ const ExecutionConfig& config, int64_t deadline, ExecutionResult* executionResult),
+ (override));
MOCK_METHOD(ndk::ScopedAStatus, releaseMemoryResource, (int64_t memoryIdentifierToken),
(override));
};
diff --git a/neuralnetworks/aidl/utils/test/MockDevice.h b/neuralnetworks/aidl/utils/test/MockDevice.h
index 3a28d55..47b8346 100644
--- a/neuralnetworks/aidl/utils/test/MockDevice.h
+++ b/neuralnetworks/aidl/utils/test/MockDevice.h
@@ -50,6 +50,10 @@
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback),
(override));
+ MOCK_METHOD(ndk::ScopedAStatus, prepareModelWithConfig,
+ (const Model& model, const PrepareModelConfig& config,
+ const std::shared_ptr<IPreparedModelCallback>& callback),
+ (override));
MOCK_METHOD(ndk::ScopedAStatus, prepareModelFromCache,
(int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache,
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
diff --git a/neuralnetworks/aidl/utils/test/MockExecution.h b/neuralnetworks/aidl/utils/test/MockExecution.h
index 216f569..782e54f 100644
--- a/neuralnetworks/aidl/utils/test/MockExecution.h
+++ b/neuralnetworks/aidl/utils/test/MockExecution.h
@@ -21,8 +21,6 @@
#include <android/binder_interface_utils.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <hidl/HidlSupport.h>
-#include <hidl/Status.h>
namespace aidl::android::hardware::neuralnetworks::utils {
diff --git a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
index 06f9ea2..29449bb 100644
--- a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
+++ b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h
@@ -22,7 +22,6 @@
#include <android/binder_interface_utils.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <hidl/Status.h>
namespace aidl::android::hardware::neuralnetworks::utils {
diff --git a/neuralnetworks/aidl/utils/test/MockPreparedModel.h b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
index 0ed9af9..a5b3b66 100644
--- a/neuralnetworks/aidl/utils/test/MockPreparedModel.h
+++ b/neuralnetworks/aidl/utils/test/MockPreparedModel.h
@@ -22,8 +22,6 @@
#include <android/binder_interface_utils.h>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
-#include <hidl/HidlSupport.h>
-#include <hidl/Status.h>
namespace aidl::android::hardware::neuralnetworks::utils {
@@ -40,10 +38,19 @@
bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration,
int64_t duration, FencedExecutionResult* fencedExecutionResult),
(override));
+ MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig,
+ (const Request& request, const ExecutionConfig& config, int64_t deadline,
+ ExecutionResult* executionResult),
+ (override));
+ MOCK_METHOD(ndk::ScopedAStatus, executeFencedWithConfig,
+ (const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+ const ExecutionConfig& config, int64_t deadline, int64_t duration,
+ FencedExecutionResult* fencedExecutionResult),
+ (override));
MOCK_METHOD(ndk::ScopedAStatus, configureExecutionBurst, (std::shared_ptr<IBurst> * burst),
(override));
MOCK_METHOD(ndk::ScopedAStatus, createReusableExecution,
- (const Request& request, bool measureTiming, int64_t loopTimeoutDuration,
+ (const Request& request, const ExecutionConfig& config,
std::shared_ptr<IExecution>* execution),
(override));
};
diff --git a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
index 8cfb7c1..bf6136d 100644
--- a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
+++ b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp
@@ -70,6 +70,21 @@
class PreparedModelTest : public VersionedAidlUtilsTestBase {};
+const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}};
+const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = {
+ nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}};
+auto makeFencedExecutionWithConfigResult(
+ const std::shared_ptr<MockFencedExecutionCallback>& callback) {
+ return [callback](const Request& /*request*/,
+ const std::vector<ndk::ScopedFileDescriptor>& /*waitFor*/,
+ const ExecutionConfig& /*config*/, int64_t /*deadline*/, int64_t /*duration*/,
+ FencedExecutionResult* fencedExecutionResult) {
+ *fencedExecutionResult = FencedExecutionResult{.callback = callback,
+ .syncFence = ndk::ScopedFileDescriptor(-1)};
+ return ndk::ScopedAStatus::ok();
+ };
+}
+
} // namespace
TEST_P(PreparedModelTest, invalidPreparedModel) {
@@ -82,6 +97,8 @@
}
TEST_P(PreparedModelTest, executeSync) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup call
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -96,7 +113,7 @@
DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
EXPECT_TRUE(result.has_value())
@@ -104,6 +121,8 @@
}
TEST_P(PreparedModelTest, executeSyncError) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -112,7 +131,7 @@
.WillOnce(Invoke(makeGeneralFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -120,6 +139,8 @@
}
TEST_P(PreparedModelTest, executeSyncTransportFailure) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -128,7 +149,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -136,6 +157,8 @@
}
TEST_P(PreparedModelTest, executeSyncDeadObject) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -144,7 +167,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -152,6 +175,8 @@
}
TEST_P(PreparedModelTest, executeFenced) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup call
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -165,7 +190,7 @@
.WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -181,6 +206,8 @@
}
TEST_P(PreparedModelTest, executeFencedCallbackError) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup call
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -195,7 +222,7 @@
.WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -211,6 +238,8 @@
}
TEST_P(PreparedModelTest, executeFencedError) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -219,7 +248,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralFailure));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -227,6 +256,8 @@
}
TEST_P(PreparedModelTest, executeFencedTransportFailure) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -235,7 +266,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -243,6 +274,8 @@
}
TEST_P(PreparedModelTest, executeFencedDeadObject) {
+ if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return;
+
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
@@ -251,7 +284,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -276,7 +309,7 @@
DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -300,7 +333,7 @@
.WillOnce(Invoke(makeGeneralFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -322,7 +355,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -344,7 +377,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -372,7 +405,7 @@
.WillRepeatedly(Invoke(makeFencedExecutionResult(mockCallback)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -410,7 +443,7 @@
.WillOnce(Invoke(makeFencedExecutionResult(mockCallback)));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -440,7 +473,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -462,7 +495,7 @@
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -484,7 +517,7 @@
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
// create execution
- const auto createResult = preparedModel->createReusableExecution({}, {}, {});
+ const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {});
ASSERT_TRUE(createResult.has_value())
<< "Failed with " << createResult.error().code << ": " << createResult.error().message;
ASSERT_NE(createResult.value(), nullptr);
@@ -495,6 +528,206 @@
EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT);
}
+TEST_P(PreparedModelTest, executeSyncWithConfig) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ const auto mockExecutionResult = ExecutionResult{
+ .outputSufficientSize = true,
+ .outputShapes = {},
+ .timing = kNoTiming,
+ };
+ EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+ .Times(1)
+ .WillOnce(
+ DoAll(SetArgPointee<3>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk)));
+
+ // run test
+ const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ EXPECT_TRUE(result.has_value())
+ << "Failed with " << result.error().code << ": " << result.error().message;
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makeGeneralFailure));
+
+ // run test
+ const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigTransportFailure) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+ // run test
+ const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeSyncWithConfigDeadObject) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+ // run test
+ const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfig) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ const auto mockCallback = MockFencedExecutionCallback::create();
+ EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+ .Times(1)
+ .WillOnce(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+ SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk)));
+ EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback)));
+
+ // run test
+ const auto result =
+ preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_TRUE(result.has_value())
+ << "Failed with " << result.error().code << ": " << result.error().message;
+ const auto& [syncFence, callback] = result.value();
+ EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED);
+ ASSERT_NE(callback, nullptr);
+
+ // get results from callback
+ const auto callbackResult = callback();
+ ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code << ": "
+ << callbackResult.error().message;
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigCallbackError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup call
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ const auto mockCallback = MockFencedExecutionCallback::create();
+ EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _))
+ .Times(1)
+ .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming),
+ SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE),
+ Invoke(makeStatusOk))));
+ EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback)));
+
+ // run test
+ const auto result =
+ preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_TRUE(result.has_value())
+ << "Failed with " << result.error().code << ": " << result.error().message;
+ const auto& [syncFence, callback] = result.value();
+ EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE);
+ ASSERT_NE(callback, nullptr);
+
+ // verify callback failure
+ const auto callbackResult = callback();
+ ASSERT_FALSE(callbackResult.has_value());
+ EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigError) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeGeneralFailure));
+
+ // run test
+ const auto result =
+ preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigTransportFailure) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
+
+ // run test
+ const auto result =
+ preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE);
+}
+
+TEST_P(PreparedModelTest, executeFencedWithConfigDeadObject) {
+ if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return;
+
+ // setup test
+ const auto mockPreparedModel = MockPreparedModel::create();
+ const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
+ EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
+
+ // run test
+ const auto result =
+ preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix);
+
+ // verify result
+ ASSERT_FALSE(result.has_value());
+ EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT);
+}
+
TEST_P(PreparedModelTest, configureExecutionBurst) {
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
@@ -567,13 +800,13 @@
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
const auto mockExecution = ndk::SharedRefBase::make<MockExecution>();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
.Times(1)
- .WillOnce(DoAll(SetArgPointee<3>(mockExecution), Invoke(makeStatusOk)));
+ .WillOnce(DoAll(SetArgPointee<2>(mockExecution), Invoke(makeStatusOk)));
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -586,13 +819,13 @@
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeGeneralFailure));
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -604,13 +837,13 @@
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure));
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -622,13 +855,13 @@
// setup test
const auto mockPreparedModel = MockPreparedModel::create();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
.Times(1)
.WillOnce(InvokeWithoutArgs(makeDeadObjectFailure));
const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value();
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/aidl/vts/functional/Android.bp b/neuralnetworks/aidl/vts/functional/Android.bp
index 1ed15b8..356cdb0 100644
--- a/neuralnetworks/aidl/vts/functional/Android.bp
+++ b/neuralnetworks/aidl/vts/functional/Android.bp
@@ -30,6 +30,7 @@
"neuralnetworks_vts_functional_defaults",
"use_libaidlvintf_gtest_helper_static",
],
+ host_supported: true,
srcs: [
"BasicTests.cpp",
"Callbacks.cpp",
@@ -46,18 +47,11 @@
],
shared_libs: [
"libbinder_ndk",
- "libnativewindow",
- "libvndksupport",
],
static_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libaidlcommonsupport",
- "libgmock",
- "libhidlmemory",
"libneuralnetworks_common",
"libneuralnetworks_generated_test_harness",
- "libsync",
],
whole_static_libs: [
"neuralnetworks_generated_AIDL_V3_example",
@@ -73,6 +67,34 @@
],
test_suites: [
"general-tests",
- "vts",
],
+ target: {
+ android: {
+ shared_libs: [
+ "libnativewindow",
+ "libvndksupport",
+ ],
+ static_libs: [
+ "libsync",
+ ],
+ test_suites: [
+ "vts",
+ ],
+ test_config: "AndroidTestDevice.xml",
+ },
+ host: {
+ shared_libs: [
+ "libtextclassifier_hash",
+ ],
+ static_libs: [
+ "neuralnetworks_canonical_sample_driver",
+ "neuralnetworks_utils_hal_adapter_aidl",
+ ],
+ exclude_static_libs: [
+ "VtsHalHidlTestUtils",
+ "libaidlvintf_gtest_helper",
+ ],
+ test_config: "AndroidTestHost.xml",
+ },
+ },
}
diff --git a/neuralnetworks/aidl/vts/functional/AndroidTest.xml b/neuralnetworks/aidl/vts/functional/AndroidTestDevice.xml
similarity index 100%
rename from neuralnetworks/aidl/vts/functional/AndroidTest.xml
rename to neuralnetworks/aidl/vts/functional/AndroidTestDevice.xml
diff --git a/neuralnetworks/aidl/vts/functional/AndroidTestHost.xml b/neuralnetworks/aidl/vts/functional/AndroidTestHost.xml
new file mode 100644
index 0000000..7372a31
--- /dev/null
+++ b/neuralnetworks/aidl/vts/functional/AndroidTestHost.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2022 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<configuration description="Runs VtsHalNeuralnetworksTargetTest.">
+ <test class="com.android.tradefed.testtype.HostGTest" >
+ <option name="module-name" value="VtsHalNeuralnetworksTargetTest" />
+ <option name="native-test-timeout" value="15m" />
+ </test>
+</configuration>
+
diff --git a/neuralnetworks/aidl/vts/functional/CompilationCachingTests.cpp b/neuralnetworks/aidl/vts/functional/CompilationCachingTests.cpp
index 77208aa..7451f7e 100644
--- a/neuralnetworks/aidl/vts/functional/CompilationCachingTests.cpp
+++ b/neuralnetworks/aidl/vts/functional/CompilationCachingTests.cpp
@@ -23,7 +23,6 @@
#include <fcntl.h>
#include <ftw.h>
#include <gtest/gtest.h>
-#include <hidlmemory/mapping.h>
#include <unistd.h>
#include <cstdio>
@@ -34,7 +33,6 @@
#include "Callbacks.h"
#include "GeneratedTestHarness.h"
-#include "MemoryUtils.h"
#include "TestHarness.h"
#include "Utils.h"
#include "VtsHalNeuralnetworks.h"
@@ -229,7 +227,11 @@
// Create cache directory. The cache directory and a temporary cache file is always created
// to test the behavior of prepareModelFromCache, even when caching is not supported.
+#ifdef __ANDROID__
char cacheDirTemp[] = "/data/local/tmp/TestCompilationCachingXXXXXX";
+#else // __ANDROID__
+ char cacheDirTemp[] = "/tmp/TestCompilationCachingXXXXXX";
+#endif // __ANDROID__
char* cacheDir = mkdtemp(cacheDirTemp);
ASSERT_NE(cacheDir, nullptr);
mCacheDir = cacheDir;
diff --git a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
index f67fd34..40f6cd1 100644
--- a/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
+++ b/neuralnetworks/aidl/vts/functional/GeneratedTestHarness.cpp
@@ -20,7 +20,6 @@
#include <aidl/android/hardware/neuralnetworks/RequestMemoryPool.h>
#include <android-base/logging.h>
#include <android/binder_auto_utils.h>
-#include <android/sync.h>
#include <gtest/gtest.h>
#include <algorithm>
@@ -30,7 +29,6 @@
#include <numeric>
#include <vector>
-#include <MemoryUtils.h>
#include <android/binder_status.h>
#include <nnapi/Result.h>
#include <nnapi/SharedMemory.h>
@@ -43,6 +41,10 @@
#include "Utils.h"
#include "VtsHalNeuralnetworks.h"
+#ifdef __ANDROID__
+#include <android/sync.h>
+#endif // __ANDROID__
+
namespace aidl::android::hardware::neuralnetworks::vts::functional {
namespace nn = ::android::nn;
@@ -58,25 +60,66 @@
bool measureTiming;
OutputType outputType;
MemoryType memoryType;
+ bool reusable;
// `reportSkipping` indicates if a test should print an info message in case
// it is skipped. The field is set to true by default and is set to false in
// quantization coupling tests to suppress skipping a test
bool reportSkipping;
- TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType)
- : executor(executor),
- measureTiming(measureTiming),
- outputType(outputType),
- memoryType(memoryType),
- reportSkipping(true) {}
+ // `useConfig` indicates if a test should use execute*WithConfig functions for the execution.
+ bool useConfig;
TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
- bool reportSkipping)
+ bool reusable)
: executor(executor),
measureTiming(measureTiming),
outputType(outputType),
memoryType(memoryType),
- reportSkipping(reportSkipping) {}
+ reusable(reusable),
+ reportSkipping(true),
+ useConfig(false) {}
+ TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
+ bool reusable, bool reportSkipping)
+ : executor(executor),
+ measureTiming(measureTiming),
+ outputType(outputType),
+ memoryType(memoryType),
+ reusable(reusable),
+ reportSkipping(reportSkipping),
+ useConfig(false) {}
+ TestConfig(Executor executor, bool measureTiming, OutputType outputType, MemoryType memoryType,
+ bool reusable, bool reportSkipping, bool useConfig)
+ : executor(executor),
+ measureTiming(measureTiming),
+ outputType(outputType),
+ memoryType(memoryType),
+ reusable(reusable),
+ reportSkipping(reportSkipping),
+ useConfig(useConfig) {}
};
+std::string toString(OutputType type) {
+ switch (type) {
+ case OutputType::FULLY_SPECIFIED:
+ return "FULLY_SPECIFIED";
+ case OutputType::UNSPECIFIED:
+ return "UNSPECIFIED";
+ case OutputType::INSUFFICIENT:
+ return "INSUFFICIENT";
+ case OutputType::MISSED_DEADLINE:
+ return "MISSED_DEADLINE";
+ }
+}
+
+std::string toString(const TestConfig& config) {
+ std::stringstream ss;
+ ss << "TestConfig{.executor=" << toString(config.executor)
+ << ", .measureTiming=" << (config.measureTiming ? "true" : "false")
+ << ", .outputType=" << toString(config.outputType)
+ << ", .memoryType=" << toString(config.memoryType)
+ << ", .reusable=" << (config.reusable ? "true" : "false")
+ << ", .useConfig=" << (config.useConfig ? "true" : "false") << "}";
+ return ss.str();
+}
+
enum class IOType { INPUT, OUTPUT };
class DeviceMemoryAllocator {
@@ -240,10 +283,14 @@
} // namespace
void waitForSyncFence(int syncFd) {
- constexpr int kInfiniteTimeout = -1;
ASSERT_GT(syncFd, 0);
+#ifdef __ANDROID__
+ constexpr int kInfiniteTimeout = -1;
int r = sync_wait(syncFd, kInfiniteTimeout);
ASSERT_GE(r, 0);
+#else // __ANDROID__
+ LOG(FATAL) << "waitForSyncFence not supported on host";
+#endif // __ANDROID__
}
Model createModel(const TestModel& testModel) {
@@ -558,209 +605,266 @@
loopTimeoutDurationNs = 1 * kMillisecond;
}
- ErrorStatus executionStatus;
- std::vector<OutputShape> outputShapes;
- Timing timing = kNoTiming;
- switch (testConfig.executor) {
- case Executor::SYNC: {
- SCOPED_TRACE("synchronous");
+ std::shared_ptr<IExecution> execution;
+ if (testConfig.reusable) {
+ const auto ret = preparedModel->createReusableExecution(
+ request, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}}, &execution);
+ ASSERT_TRUE(ret.isOk()) << static_cast<nn::ErrorStatus>(ret.getServiceSpecificError());
+ ASSERT_NE(nullptr, execution.get());
+ }
- ExecutionResult executionResult;
- // execute
- const auto ret = preparedModel->executeSynchronously(request, testConfig.measureTiming,
- kNoDeadline, loopTimeoutDurationNs,
- &executionResult);
- ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
- << ret.getDescription();
- if (ret.isOk()) {
- executionStatus = executionResult.outputSufficientSize
- ? ErrorStatus::NONE
- : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
- outputShapes = std::move(executionResult.outputShapes);
- timing = executionResult.timing;
- } else {
- executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
- }
- break;
- }
- case Executor::BURST: {
- SCOPED_TRACE("burst");
+ const auto executeAndCheckResults = [&preparedModel, &execution, &testConfig, &testModel,
+ &context, &request, loopTimeoutDurationNs, skipped]() {
+ ErrorStatus executionStatus;
+ std::vector<OutputShape> outputShapes;
+ Timing timing = kNoTiming;
+ switch (testConfig.executor) {
+ case Executor::SYNC: {
+ SCOPED_TRACE("synchronous");
- // create burst
- std::shared_ptr<IBurst> burst;
- auto ret = preparedModel->configureExecutionBurst(&burst);
- ASSERT_TRUE(ret.isOk()) << ret.getDescription();
- ASSERT_NE(nullptr, burst.get());
-
- // associate a unique slot with each memory pool
- int64_t currentSlot = 0;
- std::vector<int64_t> slots;
- slots.reserve(request.pools.size());
- for (const auto& pool : request.pools) {
- if (pool.getTag() == RequestMemoryPool::Tag::pool) {
- slots.push_back(currentSlot++);
+ ExecutionResult executionResult;
+ // execute
+ ::ndk::ScopedAStatus ret;
+ if (testConfig.reusable) {
+ ret = execution->executeSynchronously(kNoDeadline, &executionResult);
+ } else if (testConfig.useConfig) {
+ ret = preparedModel->executeSynchronouslyWithConfig(
+ request, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+ kNoDeadline, &executionResult);
} else {
- EXPECT_EQ(pool.getTag(), RequestMemoryPool::Tag::token);
- slots.push_back(-1);
+ ret = preparedModel->executeSynchronously(request, testConfig.measureTiming,
+ kNoDeadline, loopTimeoutDurationNs,
+ &executionResult);
}
+ ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+ << ret.getDescription();
+ if (ret.isOk()) {
+ executionStatus = executionResult.outputSufficientSize
+ ? ErrorStatus::NONE
+ : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
+ outputShapes = std::move(executionResult.outputShapes);
+ timing = executionResult.timing;
+ } else {
+ executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+ }
+ break;
}
+ case Executor::BURST: {
+ SCOPED_TRACE("burst");
- ExecutionResult executionResult;
- // execute
- ret = burst->executeSynchronously(request, slots, testConfig.measureTiming, kNoDeadline,
- loopTimeoutDurationNs, &executionResult);
- ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
- << ret.getDescription();
- if (ret.isOk()) {
- executionStatus = executionResult.outputSufficientSize
- ? ErrorStatus::NONE
- : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
- outputShapes = std::move(executionResult.outputShapes);
- timing = executionResult.timing;
- } else {
- executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
- }
-
- // Mark each slot as unused after the execution. This is unnecessary because the burst
- // is freed after this scope ends, but this is here to test the functionality.
- for (int64_t slot : slots) {
- ret = burst->releaseMemoryResource(slot);
+ // create burst
+ std::shared_ptr<IBurst> burst;
+ auto ret = preparedModel->configureExecutionBurst(&burst);
ASSERT_TRUE(ret.isOk()) << ret.getDescription();
- }
+ ASSERT_NE(nullptr, burst.get());
- break;
- }
- case Executor::FENCED: {
- SCOPED_TRACE("fenced");
- ErrorStatus result = ErrorStatus::NONE;
- FencedExecutionResult executionResult;
- auto ret = preparedModel->executeFenced(request, {}, testConfig.measureTiming,
- kNoDeadline, loopTimeoutDurationNs, kNoDuration,
- &executionResult);
- ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
- << ret.getDescription();
- if (!ret.isOk()) {
- result = static_cast<ErrorStatus>(ret.getServiceSpecificError());
- executionStatus = result;
- } else if (executionResult.syncFence.get() != -1) {
- std::vector<ndk::ScopedFileDescriptor> waitFor;
- auto dupFd = dup(executionResult.syncFence.get());
- ASSERT_NE(dupFd, -1);
- waitFor.emplace_back(dupFd);
- // If a sync fence is returned, try start another run waiting for the sync fence.
- ret = preparedModel->executeFenced(request, waitFor, testConfig.measureTiming,
- kNoDeadline, loopTimeoutDurationNs, kNoDuration,
- &executionResult);
- ASSERT_TRUE(ret.isOk());
- waitForSyncFence(executionResult.syncFence.get());
- }
- if (result == ErrorStatus::NONE) {
- ASSERT_NE(executionResult.callback, nullptr);
- Timing timingFenced;
- auto ret = executionResult.callback->getExecutionInfo(&timing, &timingFenced,
- &executionStatus);
- ASSERT_TRUE(ret.isOk());
- }
- break;
- }
- default: {
- FAIL() << "Unsupported execution mode for AIDL interface.";
- }
- }
-
- if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
- executionStatus == ErrorStatus::GENERAL_FAILURE) {
- if (skipped != nullptr) {
- *skipped = true;
- }
- if (!testConfig.reportSkipping) {
- return;
- }
- LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
- "execute model that it does not support.";
- std::cout << "[ ] Early termination of test because vendor service cannot "
- "execute model that it does not support."
- << std::endl;
- GTEST_SKIP();
- }
- if (!testConfig.measureTiming) {
- EXPECT_EQ(timing, kNoTiming);
- } else {
- if (timing.timeOnDeviceNs != -1 && timing.timeInDriverNs != -1) {
- EXPECT_LE(timing.timeOnDeviceNs, timing.timeInDriverNs);
- }
- }
-
- switch (testConfig.outputType) {
- case OutputType::FULLY_SPECIFIED:
- if (testConfig.executor == Executor::FENCED && hasZeroSizedOutput(testModel)) {
- // Executor::FENCED does not support zero-sized output.
- ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
- return;
- }
- // If the model output operands are fully specified, outputShapes must be either
- // either empty, or have the same number of elements as the number of outputs.
- ASSERT_EQ(ErrorStatus::NONE, executionStatus);
- ASSERT_TRUE(outputShapes.size() == 0 ||
- outputShapes.size() == testModel.main.outputIndexes.size());
- break;
- case OutputType::UNSPECIFIED:
- if (testConfig.executor == Executor::FENCED) {
- // For Executor::FENCED, the output shape must be fully specified.
- ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
- return;
- }
- // If the model output operands are not fully specified, outputShapes must have
- // the same number of elements as the number of outputs.
- ASSERT_EQ(ErrorStatus::NONE, executionStatus);
- ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
- break;
- case OutputType::INSUFFICIENT:
- if (testConfig.executor == Executor::FENCED) {
- // For Executor::FENCED, the output shape must be fully specified.
- ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
- return;
- }
- ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
- ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
- // Check that all returned output dimensions are at least as fully specified as the
- // union of the information about the corresponding operand in the model and in the
- // request. In this test, all model outputs have known rank with all dimensions
- // unspecified, and no dimensional information is provided in the request.
- for (uint32_t i = 0; i < outputShapes.size(); i++) {
- ASSERT_EQ(outputShapes[i].isSufficient, i != kInsufficientOutputIndex);
- const auto& actual = outputShapes[i].dimensions;
- const auto& golden =
- testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
- ASSERT_EQ(actual.size(), golden.size());
- for (uint32_t j = 0; j < actual.size(); j++) {
- if (actual[j] == 0) continue;
- EXPECT_EQ(actual[j], golden[j]) << "index: " << j;
+ // associate a unique slot with each memory pool
+ int64_t currentSlot = 0;
+ std::vector<int64_t> slots;
+ slots.reserve(request.pools.size());
+ for (const auto& pool : request.pools) {
+ if (pool.getTag() == RequestMemoryPool::Tag::pool) {
+ slots.push_back(currentSlot++);
+ } else {
+ EXPECT_EQ(pool.getTag(), RequestMemoryPool::Tag::token);
+ slots.push_back(-1);
+ }
}
+
+ ExecutionResult executionResult;
+ // execute
+ if (testConfig.useConfig) {
+ ret = burst->executeSynchronouslyWithConfig(
+ request, slots,
+ {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}}, kNoDeadline,
+ &executionResult);
+ } else {
+ ret = burst->executeSynchronously(request, slots, testConfig.measureTiming,
+ kNoDeadline, loopTimeoutDurationNs,
+ &executionResult);
+ }
+ ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+ << ret.getDescription();
+ if (ret.isOk()) {
+ executionStatus = executionResult.outputSufficientSize
+ ? ErrorStatus::NONE
+ : ErrorStatus::OUTPUT_INSUFFICIENT_SIZE;
+ outputShapes = std::move(executionResult.outputShapes);
+ timing = executionResult.timing;
+ } else {
+ executionStatus = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+ }
+
+ // Mark each slot as unused after the execution. This is unnecessary because the
+ // burst is freed after this scope ends, but this is here to test the functionality.
+ for (int64_t slot : slots) {
+ ret = burst->releaseMemoryResource(slot);
+ ASSERT_TRUE(ret.isOk()) << ret.getDescription();
+ }
+
+ break;
}
- return;
- case OutputType::MISSED_DEADLINE:
- ASSERT_TRUE(executionStatus == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
- executionStatus == ErrorStatus::MISSED_DEADLINE_PERSISTENT)
- << "executionStatus = " << executionStatus;
- return;
+ case Executor::FENCED: {
+ SCOPED_TRACE("fenced");
+ ErrorStatus result = ErrorStatus::NONE;
+ FencedExecutionResult executionResult;
+ ::ndk::ScopedAStatus ret;
+ if (testConfig.reusable) {
+ ret = execution->executeFenced({}, kNoDeadline, kNoDuration, &executionResult);
+ } else if (testConfig.useConfig) {
+ ret = preparedModel->executeFencedWithConfig(
+ request, {}, {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+ kNoDeadline, kNoDuration, &executionResult);
+ } else {
+ ret = preparedModel->executeFenced(request, {}, testConfig.measureTiming,
+ kNoDeadline, loopTimeoutDurationNs,
+ kNoDuration, &executionResult);
+ }
+ ASSERT_TRUE(ret.isOk() || ret.getExceptionCode() == EX_SERVICE_SPECIFIC)
+ << ret.getDescription();
+ if (!ret.isOk()) {
+ result = static_cast<ErrorStatus>(ret.getServiceSpecificError());
+ executionStatus = result;
+ } else if (executionResult.syncFence.get() != -1) {
+ std::vector<ndk::ScopedFileDescriptor> waitFor;
+ auto dupFd = dup(executionResult.syncFence.get());
+ ASSERT_NE(dupFd, -1);
+ waitFor.emplace_back(dupFd);
+ // If a sync fence is returned, try start another run waiting for the sync
+ // fence.
+ if (testConfig.reusable) {
+ ret = execution->executeFenced(waitFor, kNoDeadline, kNoDuration,
+ &executionResult);
+ } else if (testConfig.useConfig) {
+ ret = preparedModel->executeFencedWithConfig(
+ request, waitFor,
+ {testConfig.measureTiming, loopTimeoutDurationNs, {}, {}},
+ kNoDeadline, kNoDuration, &executionResult);
+ } else {
+ ret = preparedModel->executeFenced(
+ request, waitFor, testConfig.measureTiming, kNoDeadline,
+ loopTimeoutDurationNs, kNoDuration, &executionResult);
+ }
+ ASSERT_TRUE(ret.isOk());
+ waitForSyncFence(executionResult.syncFence.get());
+ }
+ if (result == ErrorStatus::NONE) {
+ ASSERT_NE(executionResult.callback, nullptr);
+ Timing timingFenced;
+ auto ret = executionResult.callback->getExecutionInfo(&timing, &timingFenced,
+ &executionStatus);
+ ASSERT_TRUE(ret.isOk());
+ }
+ break;
+ }
+ default: {
+ FAIL() << "Unsupported execution mode for AIDL interface.";
+ }
+ }
+
+ if (testConfig.outputType != OutputType::FULLY_SPECIFIED &&
+ executionStatus == ErrorStatus::GENERAL_FAILURE) {
+ if (skipped != nullptr) {
+ *skipped = true;
+ }
+ if (!testConfig.reportSkipping) {
+ return;
+ }
+ LOG(INFO) << "NN VTS: Early termination of test because vendor service cannot "
+ "execute model that it does not support.";
+ std::cout << "[ ] Early termination of test because vendor service cannot "
+ "execute model that it does not support."
+ << std::endl;
+ GTEST_SKIP();
+ }
+ if (!testConfig.measureTiming) {
+ EXPECT_EQ(timing, kNoTiming);
+ } else {
+ if (timing.timeOnDeviceNs != -1 && timing.timeInDriverNs != -1) {
+ EXPECT_LE(timing.timeOnDeviceNs, timing.timeInDriverNs);
+ }
+ }
+
+ switch (testConfig.outputType) {
+ case OutputType::FULLY_SPECIFIED:
+ if (testConfig.executor == Executor::FENCED && hasZeroSizedOutput(testModel)) {
+ // Executor::FENCED does not support zero-sized output.
+ ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+ return;
+ }
+ // If the model output operands are fully specified, outputShapes must be either
+ // either empty, or have the same number of elements as the number of outputs.
+ ASSERT_EQ(ErrorStatus::NONE, executionStatus);
+ ASSERT_TRUE(outputShapes.size() == 0 ||
+ outputShapes.size() == testModel.main.outputIndexes.size());
+ break;
+ case OutputType::UNSPECIFIED:
+ if (testConfig.executor == Executor::FENCED) {
+ // For Executor::FENCED, the output shape must be fully specified.
+ ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+ return;
+ }
+ // If the model output operands are not fully specified, outputShapes must have
+ // the same number of elements as the number of outputs.
+ ASSERT_EQ(ErrorStatus::NONE, executionStatus);
+ ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
+ break;
+ case OutputType::INSUFFICIENT:
+ if (testConfig.executor == Executor::FENCED) {
+ // For Executor::FENCED, the output shape must be fully specified.
+ ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, executionStatus);
+ return;
+ }
+ ASSERT_EQ(ErrorStatus::OUTPUT_INSUFFICIENT_SIZE, executionStatus);
+ ASSERT_EQ(outputShapes.size(), testModel.main.outputIndexes.size());
+ // Check that all returned output dimensions are at least as fully specified as the
+ // union of the information about the corresponding operand in the model and in the
+ // request. In this test, all model outputs have known rank with all dimensions
+ // unspecified, and no dimensional information is provided in the request.
+ for (uint32_t i = 0; i < outputShapes.size(); i++) {
+ ASSERT_EQ(outputShapes[i].isSufficient, i != kInsufficientOutputIndex);
+ const auto& actual = outputShapes[i].dimensions;
+ const auto& golden =
+ testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
+ ASSERT_EQ(actual.size(), golden.size());
+ for (uint32_t j = 0; j < actual.size(); j++) {
+ if (actual[j] == 0) continue;
+ EXPECT_EQ(actual[j], golden[j]) << "index: " << j;
+ }
+ }
+ return;
+ case OutputType::MISSED_DEADLINE:
+ ASSERT_TRUE(executionStatus == ErrorStatus::MISSED_DEADLINE_TRANSIENT ||
+ executionStatus == ErrorStatus::MISSED_DEADLINE_PERSISTENT)
+ << "executionStatus = " << executionStatus;
+ return;
+ }
+
+ // Go through all outputs, check returned output shapes.
+ for (uint32_t i = 0; i < outputShapes.size(); i++) {
+ EXPECT_TRUE(outputShapes[i].isSufficient);
+ const auto& expect =
+ testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
+ const auto unsignedActual = nn::toUnsigned(outputShapes[i].dimensions);
+ ASSERT_TRUE(unsignedActual.has_value());
+ const std::vector<uint32_t>& actual = unsignedActual.value();
+ EXPECT_EQ(expect, actual);
+ }
+
+ // Retrieve execution results.
+ const std::vector<TestBuffer> outputs = context.getOutputBuffers(testModel, request);
+
+ // We want "close-enough" results.
+ checkResults(testModel, outputs);
+ };
+
+ executeAndCheckResults();
+
+ // For reusable execution tests, run the execution twice.
+ if (testConfig.reusable) {
+ SCOPED_TRACE("Second execution");
+ executeAndCheckResults();
}
-
- // Go through all outputs, check returned output shapes.
- for (uint32_t i = 0; i < outputShapes.size(); i++) {
- EXPECT_TRUE(outputShapes[i].isSufficient);
- const auto& expect = testModel.main.operands[testModel.main.outputIndexes[i]].dimensions;
- const auto unsignedActual = nn::toUnsigned(outputShapes[i].dimensions);
- ASSERT_TRUE(unsignedActual.has_value());
- const std::vector<uint32_t>& actual = unsignedActual.value();
- EXPECT_EQ(expect, actual);
- }
-
- // Retrieve execution results.
- const std::vector<TestBuffer> outputs = context.getOutputBuffers(testModel, request);
-
- // We want "close-enough" results.
- checkResults(testModel, outputs);
}
void EvaluatePreparedModel(const std::shared_ptr<IDevice>& device,
@@ -770,6 +874,15 @@
std::vector<bool> measureTimingList;
std::vector<Executor> executorList;
std::vector<MemoryType> memoryTypeList;
+ std::vector<bool> reusableList = {false};
+ std::vector<bool> useConfigList = {false};
+
+ int deviceVersion;
+ ASSERT_TRUE(device->getInterfaceVersion(&deviceVersion).isOk());
+ if (deviceVersion >= kMinAidlLevelForFL8) {
+ reusableList.push_back(true);
+ useConfigList.push_back(true);
+ }
switch (testKind) {
case TestKind::GENERAL: {
@@ -788,7 +901,11 @@
outputTypesList = {OutputType::FULLY_SPECIFIED};
measureTimingList = {false};
executorList = {Executor::SYNC, Executor::BURST, Executor::FENCED};
+#ifdef __ANDROID__
memoryTypeList = {MemoryType::BLOB_AHWB, MemoryType::DEVICE};
+#else // __ANDROID__
+ memoryTypeList = {MemoryType::DEVICE}; // BLOB_AHWB is not supported on the host.
+#endif // __ANDROID__
} break;
case TestKind::FENCED_COMPUTE: {
outputTypesList = {OutputType::FULLY_SPECIFIED};
@@ -812,8 +929,16 @@
for (const bool measureTiming : measureTimingList) {
for (const Executor executor : executorList) {
for (const MemoryType memoryType : memoryTypeList) {
- const TestConfig testConfig(executor, measureTiming, outputType, memoryType);
- EvaluatePreparedModel(device, preparedModel, testModel, testConfig);
+ for (const bool reusable : reusableList) {
+ for (const bool useConfig : useConfigList) {
+ if ((useConfig || executor == Executor::BURST) && reusable) continue;
+ const TestConfig testConfig(executor, measureTiming, outputType,
+ memoryType, reusable,
+ /*reportSkipping=*/true, useConfig);
+ SCOPED_TRACE(toString(testConfig));
+ EvaluatePreparedModel(device, preparedModel, testModel, testConfig);
+ }
+ }
}
}
}
@@ -833,7 +958,7 @@
for (const bool measureTiming : measureTimingList) {
for (const Executor executor : executorList) {
const TestConfig testConfig(executor, measureTiming, outputType, MemoryType::ASHMEM,
- /*reportSkipping=*/false);
+ /*reusable=*/false, /*reportSkipping=*/false);
bool baseSkipped = false;
EvaluatePreparedModel(device, preparedModel, testModel, testConfig, &baseSkipped);
bool coupledSkipped = false;
@@ -871,6 +996,13 @@
createPreparedModel(device, model, &preparedModel);
if (preparedModel == nullptr) return;
EvaluatePreparedModel(device, preparedModel, testModel, testKind);
+ int32_t deviceVersion;
+ ASSERT_TRUE(device->getInterfaceVersion(&deviceVersion).isOk());
+ if (deviceVersion >= kMinAidlLevelForFL8) {
+ createPreparedModel(device, model, &preparedModel, /*reportSkipping*/ true,
+ /*useConfig*/ true);
+ EvaluatePreparedModel(device, preparedModel, testModel, testKind);
+ }
} break;
case TestKind::QUANTIZATION_COUPLING: {
ASSERT_TRUE(testModel.hasQuant8CoupledOperands());
diff --git a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
index b3e9c63..f8341b1 100644
--- a/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
+++ b/neuralnetworks/aidl/vts/functional/MemoryDomainTests.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "neuralnetworks_aidl_hal_test"
#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <aidl/android/hardware/neuralnetworks/IPreparedModel.h>
#include <android-base/logging.h>
#include <android/binder_auto_utils.h>
#include <android/binder_interface_utils.h>
@@ -33,7 +34,6 @@
#include "Callbacks.h"
#include "GeneratedTestHarness.h"
-#include "MemoryUtils.h"
#include "Utils.h"
#include "VtsHalNeuralnetworks.h"
@@ -191,7 +191,7 @@
}
// A placeholder invalid IPreparedModel class for MemoryDomainAllocateTest.InvalidPreparedModel
-class InvalidPreparedModel : public BnPreparedModel {
+class InvalidPreparedModel final : public IPreparedModel {
public:
ndk::ScopedAStatus executeSynchronously(const Request&, bool, int64_t, int64_t,
ExecutionResult*) override {
@@ -204,15 +204,37 @@
return ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
}
+ ndk::ScopedAStatus executeSynchronouslyWithConfig(const Request&, const ExecutionConfig&,
+ int64_t, ExecutionResult*) override {
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+ }
+ ndk::ScopedAStatus executeFencedWithConfig(const Request&,
+ const std::vector<ndk::ScopedFileDescriptor>&,
+ const ExecutionConfig&, int64_t, int64_t,
+ FencedExecutionResult*) override {
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+ }
ndk::ScopedAStatus configureExecutionBurst(std::shared_ptr<IBurst>*) override {
return ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
}
- ndk::ScopedAStatus createReusableExecution(const aidl_hal::Request&, bool, int64_t,
+ ndk::ScopedAStatus createReusableExecution(const aidl_hal::Request&, const ExecutionConfig&,
std::shared_ptr<aidl_hal::IExecution>*) override {
return ndk::ScopedAStatus::fromServiceSpecificError(
static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
}
+ ndk::ScopedAStatus getInterfaceVersion(int32_t* /*interfaceVersion*/) {
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+ }
+ ndk::ScopedAStatus getInterfaceHash(std::string* /*interfaceHash*/) {
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(ErrorStatus::GENERAL_FAILURE));
+ }
+ ndk::SpAIBinder asBinder() override { return ::ndk::SpAIBinder{}; }
+ bool isRemote() override { return true; }
};
template <typename... Args>
diff --git a/neuralnetworks/aidl/vts/functional/Utils.cpp b/neuralnetworks/aidl/vts/functional/Utils.cpp
index 325a436..1bc76f2 100644
--- a/neuralnetworks/aidl/vts/functional/Utils.cpp
+++ b/neuralnetworks/aidl/vts/functional/Utils.cpp
@@ -21,18 +21,20 @@
#include <aidl/android/hardware/neuralnetworks/OperandType.h>
#include <android-base/logging.h>
#include <android/binder_status.h>
-#include <android/hardware_buffer.h>
#include <sys/mman.h>
#include <iostream>
#include <limits>
#include <numeric>
-#include <MemoryUtils.h>
#include <nnapi/SharedMemory.h>
#include <nnapi/hal/aidl/Conversions.h>
#include <nnapi/hal/aidl/Utils.h>
+#ifdef __ANDROID__
+#include <android/hardware_buffer.h>
+#endif // __ANDROID__
+
namespace aidl::android::hardware::neuralnetworks {
using test_helper::TestBuffer;
@@ -140,7 +142,8 @@
return ahwb->mIsValid ? std::move(ahwb) : nullptr;
}
-void TestBlobAHWB::initialize(uint32_t size) {
+void TestBlobAHWB::initialize([[maybe_unused]] uint32_t size) {
+#ifdef __ANDROID__
mIsValid = false;
ASSERT_GT(size, 0);
const auto usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
@@ -164,6 +167,9 @@
mAidlMemory = utils::convert(mMemory).value();
mIsValid = true;
+#else // __ANDROID__
+ LOG(FATAL) << "TestBlobAHWB::initialize not supported on host";
+#endif // __ANDROID__
}
std::string gtestCompliantName(std::string name) {
@@ -177,6 +183,17 @@
return os << toString(errorStatus);
}
+std::string toString(MemoryType type) {
+ switch (type) {
+ case MemoryType::ASHMEM:
+ return "ASHMEM";
+ case MemoryType::BLOB_AHWB:
+ return "BLOB_AHWB";
+ case MemoryType::DEVICE:
+ return "DEVICE";
+ }
+}
+
Request ExecutionContext::createRequest(const TestModel& testModel, MemoryType memoryType) {
CHECK(memoryType == MemoryType::ASHMEM || memoryType == MemoryType::BLOB_AHWB);
diff --git a/neuralnetworks/aidl/vts/functional/Utils.h b/neuralnetworks/aidl/vts/functional/Utils.h
index ca81418..4e0a4aa 100644
--- a/neuralnetworks/aidl/vts/functional/Utils.h
+++ b/neuralnetworks/aidl/vts/functional/Utils.h
@@ -18,7 +18,6 @@
#define ANDROID_HARDWARE_NEURALNETWORKS_AIDL_UTILS_H
#include <android-base/logging.h>
-#include <android/hardware_buffer.h>
#include <gtest/gtest.h>
#include <algorithm>
@@ -111,6 +110,8 @@
enum class MemoryType { ASHMEM, BLOB_AHWB, DEVICE };
+std::string toString(MemoryType type);
+
// Manages the lifetime of memory resources used in an execution.
class ExecutionContext {
DISALLOW_COPY_AND_ASSIGN(ExecutionContext);
diff --git a/neuralnetworks/aidl/vts/functional/ValidateModel.cpp b/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
index fdc7eff..931ba25 100644
--- a/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
+++ b/neuralnetworks/aidl/vts/functional/ValidateModel.cpp
@@ -77,6 +77,28 @@
ASSERT_EQ(nullptr, preparedModel.get());
}
+static void validatePrepareModelWithConfig(const std::shared_ptr<IDevice>& device,
+ const std::string& message, const Model& model,
+ ExecutionPreference preference, Priority priority) {
+ SCOPED_TRACE(message + " [prepareModelWithConfig]");
+
+ std::shared_ptr<PreparedModelCallback> preparedModelCallback =
+ ndk::SharedRefBase::make<PreparedModelCallback>();
+ const auto prepareLaunchStatus = device->prepareModelWithConfig(
+ model, {preference, priority, kNoDeadline, {}, {}, kEmptyCacheToken, {}, {}},
+ preparedModelCallback);
+ ASSERT_FALSE(prepareLaunchStatus.isOk());
+ ASSERT_EQ(prepareLaunchStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(prepareLaunchStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+
+ preparedModelCallback->wait();
+ ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
+ ASSERT_EQ(ErrorStatus::INVALID_ARGUMENT, prepareReturnStatus);
+ std::shared_ptr<IPreparedModel> preparedModel = preparedModelCallback->getPreparedModel();
+ ASSERT_EQ(nullptr, preparedModel.get());
+}
+
static bool validExecutionPreference(ExecutionPreference preference) {
return preference == ExecutionPreference::LOW_POWER ||
preference == ExecutionPreference::FAST_SINGLE_ANSWER ||
@@ -103,6 +125,13 @@
}
validatePrepareModel(device, message, model, preference, priority);
+
+ int32_t aidlVersion;
+ ASSERT_TRUE(device->getInterfaceVersion(&aidlVersion).isOk());
+ if (aidlVersion >= kMinAidlLevelForFL8) {
+ // prepareModelWithConfig must satisfy all requirements enforced by prepareModel.
+ validatePrepareModelWithConfig(device, message, model, preference, priority);
+ }
}
static uint32_t addOperand(Model* model) {
diff --git a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
index 29e2471..d749841 100644
--- a/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
+++ b/neuralnetworks/aidl/vts/functional/ValidateRequest.cpp
@@ -36,6 +36,51 @@
///////////////////////// UTILITY FUNCTIONS /////////////////////////
+// Test request validation with reusable execution.
+static void validateReusableExecution(const std::shared_ptr<IPreparedModel>& preparedModel,
+ const std::string& message, const Request& request,
+ bool measure) {
+ // createReusableExecution
+ std::shared_ptr<IExecution> execution;
+ {
+ SCOPED_TRACE(message + " [createReusableExecution]");
+ const auto createStatus = preparedModel->createReusableExecution(
+ request, {measure, kOmittedTimeoutDuration, {}, {}}, &execution);
+ if (!createStatus.isOk()) {
+ ASSERT_EQ(createStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(createStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ ASSERT_EQ(nullptr, execution);
+ return;
+ } else {
+ ASSERT_NE(nullptr, execution);
+ }
+ }
+
+ // synchronous
+ {
+ SCOPED_TRACE(message + " [executeSynchronously]");
+ ExecutionResult executionResult;
+ const auto executeStatus = execution->executeSynchronously(kNoDeadline, &executionResult);
+ ASSERT_FALSE(executeStatus.isOk());
+ ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ }
+
+ // fenced
+ {
+ SCOPED_TRACE(message + " [executeFenced]");
+ FencedExecutionResult executionResult;
+ const auto executeStatus =
+ execution->executeFenced({}, kNoDeadline, kNoDuration, &executionResult);
+ ASSERT_FALSE(executeStatus.isOk());
+ ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ }
+}
+
// Primary validation function. This function will take a valid request, apply a
// mutation to it to invalidate the request, then pass it to interface calls
// that use the request.
@@ -101,6 +146,63 @@
ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
ErrorStatus::INVALID_ARGUMENT);
}
+
+ int32_t aidlVersion;
+ ASSERT_TRUE(preparedModel->getInterfaceVersion(&aidlVersion).isOk());
+ if (aidlVersion < kMinAidlLevelForFL8) {
+ return;
+ }
+
+ // validate reusable execution
+ validateReusableExecution(preparedModel, message, request, measure);
+
+ // synchronous with empty hints
+ {
+ SCOPED_TRACE(message + " [executeSynchronouslyWithConfig]");
+ ExecutionResult executionResult;
+ const auto executeStatus = preparedModel->executeSynchronouslyWithConfig(
+ request, {measure, kOmittedTimeoutDuration, {}, {}}, kNoDeadline, &executionResult);
+ ASSERT_FALSE(executeStatus.isOk());
+ ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ }
+
+ // fenced with empty hints
+ {
+ SCOPED_TRACE(message + " [executeFencedWithConfig]");
+ FencedExecutionResult executionResult;
+ const auto executeStatus = preparedModel->executeFencedWithConfig(
+ request, {}, {false, kOmittedTimeoutDuration, {}, {}}, kNoDeadline, kNoDuration,
+ &executionResult);
+ ASSERT_FALSE(executeStatus.isOk());
+ ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ }
+
+ // burst with empty hints
+ {
+ SCOPED_TRACE(message + " [burst executeSynchronouslyWithConfig]");
+
+ // create burst
+ std::shared_ptr<IBurst> burst;
+ auto ret = preparedModel->configureExecutionBurst(&burst);
+ ASSERT_TRUE(ret.isOk()) << ret.getDescription();
+ ASSERT_NE(nullptr, burst.get());
+
+ // use -1 for all memory identifier tokens
+ const std::vector<int64_t> slots(request.pools.size(), -1);
+
+ ExecutionResult executionResult;
+ const auto executeStatus = burst->executeSynchronouslyWithConfig(
+ request, slots, {measure, kOmittedTimeoutDuration, {}, {}}, kNoDeadline,
+ &executionResult);
+ ASSERT_FALSE(executeStatus.isOk());
+ ASSERT_EQ(executeStatus.getExceptionCode(), EX_SERVICE_SPECIFIC);
+ ASSERT_EQ(static_cast<ErrorStatus>(executeStatus.getServiceSpecificError()),
+ ErrorStatus::INVALID_ARGUMENT);
+ }
}
std::shared_ptr<IBurst> createBurst(const std::shared_ptr<IPreparedModel>& preparedModel) {
diff --git a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
index c417356..51b4805 100644
--- a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
+++ b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.cpp
@@ -15,6 +15,7 @@
*/
#define LOG_TAG "neuralnetworks_aidl_hal_test"
+
#include "VtsHalNeuralnetworks.h"
#include <android-base/logging.h>
@@ -28,20 +29,27 @@
#include <utility>
#include <TestHarness.h>
-#include <aidl/Vintf.h>
#include <nnapi/hal/aidl/Conversions.h>
#include "Callbacks.h"
#include "GeneratedTestHarness.h"
#include "Utils.h"
+#ifdef __ANDROID__
+#include <aidl/Vintf.h>
+#else // __ANDROID__
+#include <CanonicalDevice.h>
+#include <nnapi/hal/aidl/Adapter.h>
+#endif // __ANDROID__
+
namespace aidl::android::hardware::neuralnetworks::vts::functional {
using implementation::PreparedModelCallback;
// internal helper function
void createPreparedModel(const std::shared_ptr<IDevice>& device, const Model& model,
- std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping) {
+ std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping,
+ bool useConfig) {
ASSERT_NE(nullptr, preparedModel);
*preparedModel = nullptr;
@@ -56,11 +64,25 @@
// launch prepare model
const std::shared_ptr<PreparedModelCallback> preparedModelCallback =
ndk::SharedRefBase::make<PreparedModelCallback>();
- const auto prepareLaunchStatus =
- device->prepareModel(model, ExecutionPreference::FAST_SINGLE_ANSWER, kDefaultPriority,
- kNoDeadline, {}, {}, kEmptyCacheToken, preparedModelCallback);
- ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
-
+ if (useConfig) {
+ const auto prepareLaunchStatus =
+ device->prepareModelWithConfig(model,
+ {ExecutionPreference::FAST_SINGLE_ANSWER,
+ kDefaultPriority,
+ kNoDeadline,
+ {},
+ {},
+ kEmptyCacheToken,
+ {},
+ {}},
+ preparedModelCallback);
+ ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
+ } else {
+ const auto prepareLaunchStatus = device->prepareModel(
+ model, ExecutionPreference::FAST_SINGLE_ANSWER, kDefaultPriority, kNoDeadline, {},
+ {}, kEmptyCacheToken, preparedModelCallback);
+ ASSERT_TRUE(prepareLaunchStatus.isOk()) << prepareLaunchStatus.getDescription();
+ }
// retrieve prepared model
preparedModelCallback->wait();
const ErrorStatus prepareReturnStatus = preparedModelCallback->getStatus();
@@ -96,6 +118,7 @@
ASSERT_TRUE(deviceIsResponsive);
}
+#ifdef __ANDROID__
static NamedDevice makeNamedDevice(const std::string& name) {
ndk::SpAIBinder binder(AServiceManager_waitForService(name.c_str()));
return {name, IDevice::fromBinder(binder)};
@@ -112,6 +135,14 @@
std::transform(names.begin(), names.end(), std::back_inserter(namedDevices), makeNamedDevice);
return namedDevices;
}
+#else // __ANDROID__
+static std::vector<NamedDevice> getNamedDevicesImpl() {
+ const std::string name = "nnapi-sample";
+ auto device = std::make_shared<const ::android::nn::sample::Device>(name);
+ auto aidlDevice = adapter::adapt(device);
+ return {{name, aidlDevice}};
+}
+#endif // __ANDROID__
const std::vector<NamedDevice>& getNamedDevices() {
const static std::vector<NamedDevice> devices = getNamedDevicesImpl();
diff --git a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
index 4312d3a..00d705c 100644
--- a/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
+++ b/neuralnetworks/aidl/vts/functional/VtsHalNeuralnetworks.h
@@ -30,6 +30,8 @@
using NamedDevice = Named<std::shared_ptr<IDevice>>;
using NeuralNetworksAidlTestParam = NamedDevice;
+constexpr int kMinAidlLevelForFL8 = 4;
+
class NeuralNetworksAidlTest : public testing::TestWithParam<NeuralNetworksAidlTestParam> {
protected:
void SetUp() override;
@@ -49,8 +51,8 @@
// Create an IPreparedModel object. If the model cannot be prepared,
// "preparedModel" will be nullptr instead.
void createPreparedModel(const std::shared_ptr<IDevice>& device, const Model& model,
- std::shared_ptr<IPreparedModel>* preparedModel,
- bool reportSkipping = true);
+ std::shared_ptr<IPreparedModel>* preparedModel, bool reportSkipping = true,
+ bool useConfig = false);
enum class Executor { SYNC, BURST, FENCED };
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
index f2687c4..8d42e2f 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Burst.h
@@ -46,6 +46,11 @@
bool measureTiming, int64_t deadlineNs,
int64_t loopTimeoutDurationNs,
ExecutionResult* executionResult) override;
+ ndk::ScopedAStatus executeSynchronouslyWithConfig(
+ const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+ const ExecutionConfig& config, int64_t deadlineNs,
+ ExecutionResult* executionResult) override;
+
ndk::ScopedAStatus releaseMemoryResource(int64_t memoryIdentifierToken) override;
class ThreadSafeMemoryCache {
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
index aa29d63..c94f270 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/Device.h
@@ -31,6 +31,7 @@
#include <aidl/android/hardware/neuralnetworks/IPreparedModelParcel.h>
#include <aidl/android/hardware/neuralnetworks/Model.h>
#include <aidl/android/hardware/neuralnetworks/NumberOfCacheFiles.h>
+#include <aidl/android/hardware/neuralnetworks/PrepareModelConfig.h>
#include <aidl/android/hardware/neuralnetworks/Priority.h>
#include <android/binder_auto_utils.h>
#include <nnapi/IDevice.h>
@@ -72,6 +73,9 @@
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) override;
+ ndk::ScopedAStatus prepareModelWithConfig(
+ const Model& model, const PrepareModelConfig& config,
+ const std::shared_ptr<IPreparedModelCallback>& callback) override;
protected:
const ::android::nn::SharedDevice kDevice;
diff --git a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
index f92b0bc..d1359d6 100644
--- a/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
+++ b/neuralnetworks/utils/adapter/aidl/include/nnapi/hal/aidl/PreparedModel.h
@@ -51,9 +51,17 @@
int64_t loopTimeoutDurationNs, int64_t durationNs,
FencedExecutionResult* executionResult) override;
ndk::ScopedAStatus configureExecutionBurst(std::shared_ptr<IBurst>* burst) override;
- ndk::ScopedAStatus createReusableExecution(const Request& request, bool measureTiming,
- int64_t loopTimeoutDurationNs,
+ ndk::ScopedAStatus createReusableExecution(const Request& request,
+ const ExecutionConfig& config,
std::shared_ptr<IExecution>* execution) override;
+ ndk::ScopedAStatus executeSynchronouslyWithConfig(const Request& request,
+ const ExecutionConfig& config,
+ int64_t deadlineNs,
+ ExecutionResult* executionResult) override;
+ ndk::ScopedAStatus executeFencedWithConfig(
+ const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+ const ExecutionConfig& config, int64_t deadlineNs, int64_t durationNs,
+ FencedExecutionResult* executionResult) override;
::android::nn::SharedPreparedModel getUnderlyingPreparedModel() const;
diff --git a/neuralnetworks/utils/adapter/aidl/src/Burst.cpp b/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
index 4fabb20..a4a80fa 100644
--- a/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/Burst.cpp
@@ -93,7 +93,8 @@
nn::ExecutionResult<ExecutionResult> executeSynchronously(
const nn::IBurst& burst, const Burst::ThreadSafeMemoryCache& cache, const Request& request,
const std::vector<int64_t>& memoryIdentifierTokens, bool measureTiming, int64_t deadlineNs,
- int64_t loopTimeoutDurationNs) {
+ int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+ const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
if (request.pools.size() != memoryIdentifierTokens.size()) {
return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT)
<< "request.pools.size() != memoryIdentifierTokens.size()";
@@ -107,11 +108,13 @@
const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
+ auto nnHints = NN_TRY(convertInput(hints));
+ auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
const auto hold = ensureAllMemoriesAreCached(&nnRequest, memoryIdentifierTokens, burst, cache);
- const auto result =
- burst.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration);
+ const auto result = burst.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration,
+ nnHints, nnExtensionNameToPrefix);
if (!result.ok() && result.error().code == nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE) {
const auto& [message, code, outputShapes] = result.error();
@@ -155,7 +158,24 @@
ExecutionResult* executionResult) {
auto result =
adapter::executeSynchronously(*kBurst, kMemoryCache, request, memoryIdentifierTokens,
- measureTiming, deadlineNs, loopTimeoutDurationNs);
+ measureTiming, deadlineNs, loopTimeoutDurationNs, {}, {});
+ if (!result.has_value()) {
+ auto [message, code, _] = std::move(result).error();
+ const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+ return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+ static_cast<int32_t>(aidlCode), message.c_str());
+ }
+ *executionResult = std::move(result).value();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus Burst::executeSynchronouslyWithConfig(
+ const Request& request, const std::vector<int64_t>& memoryIdentifierTokens,
+ const ExecutionConfig& config, int64_t deadlineNs, ExecutionResult* executionResult) {
+ auto result = adapter::executeSynchronously(
+ *kBurst, kMemoryCache, request, memoryIdentifierTokens, config.measureTiming,
+ deadlineNs, config.loopTimeoutDurationNs, config.executionHints,
+ config.extensionNameToPrefix);
if (!result.has_value()) {
auto [message, code, _] = std::move(result).error();
const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
diff --git a/neuralnetworks/utils/adapter/aidl/src/Device.cpp b/neuralnetworks/utils/adapter/aidl/src/Device.cpp
index 763be7f..84aaddb 100644
--- a/neuralnetworks/utils/adapter/aidl/src/Device.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/Device.cpp
@@ -148,13 +148,14 @@
}
}
-nn::GeneralResult<void> prepareModel(const nn::SharedDevice& device, const Executor& executor,
- const Model& model, ExecutionPreference preference,
- Priority priority, int64_t deadlineNs,
- const std::vector<ndk::ScopedFileDescriptor>& modelCache,
- const std::vector<ndk::ScopedFileDescriptor>& dataCache,
- const std::vector<uint8_t>& token,
- const std::shared_ptr<IPreparedModelCallback>& callback) {
+nn::GeneralResult<void> prepareModel(
+ const nn::SharedDevice& device, const Executor& executor, const Model& model,
+ ExecutionPreference preference, Priority priority, int64_t deadlineNs,
+ const std::vector<ndk::ScopedFileDescriptor>& modelCache,
+ const std::vector<ndk::ScopedFileDescriptor>& dataCache, const std::vector<uint8_t>& token,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix,
+ const std::shared_ptr<IPreparedModelCallback>& callback) {
if (callback.get() == nullptr) {
return NN_ERROR(nn::ErrorStatus::INVALID_ARGUMENT) << "Invalid callback";
}
@@ -166,12 +167,16 @@
auto nnModelCache = NN_TRY(convertInput(modelCache));
auto nnDataCache = NN_TRY(convertInput(dataCache));
const auto nnToken = NN_TRY(convertCacheToken(token));
+ auto nnHints = NN_TRY(convertInput(hints));
+ auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
Task task = [device, nnModel = std::move(nnModel), nnPreference, nnPriority, nnDeadline,
nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
- nnToken, callback] {
- auto result = device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline,
- nnModelCache, nnDataCache, nnToken);
+ nnToken, nnHints = std::move(nnHints),
+ nnExtensionNameToPrefix = std::move(nnExtensionNameToPrefix), callback] {
+ auto result =
+ device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline, nnModelCache,
+ nnDataCache, nnToken, nnHints, nnExtensionNameToPrefix);
notify(callback.get(), std::move(result));
};
executor(std::move(task), nnDeadline);
@@ -273,8 +278,9 @@
const std::vector<ndk::ScopedFileDescriptor>& dataCache,
const std::vector<uint8_t>& token,
const std::shared_ptr<IPreparedModelCallback>& callback) {
- const auto result = adapter::prepareModel(kDevice, kExecutor, model, preference, priority,
- deadlineNs, modelCache, dataCache, token, callback);
+ const auto result =
+ adapter::prepareModel(kDevice, kExecutor, model, preference, priority, deadlineNs,
+ modelCache, dataCache, token, {}, {}, callback);
if (!result.has_value()) {
const auto& [message, code] = result.error();
const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -301,4 +307,21 @@
return ndk::ScopedAStatus::ok();
}
+ndk::ScopedAStatus Device::prepareModelWithConfig(
+ const Model& model, const PrepareModelConfig& config,
+ const std::shared_ptr<IPreparedModelCallback>& callback) {
+ const auto result = adapter::prepareModel(
+ kDevice, kExecutor, model, config.preference, config.priority, config.deadlineNs,
+ config.modelCache, config.dataCache, config.cacheToken, config.compilationHints,
+ config.extensionNameToPrefix, callback);
+ if (!result.has_value()) {
+ const auto& [message, code] = result.error();
+ const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+ callback->notify(aidlCode, nullptr);
+ return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+ static_cast<int32_t>(aidlCode), message.c_str());
+ }
+ return ndk::ScopedAStatus::ok();
+}
+
} // namespace aidl::android::hardware::neuralnetworks::adapter
diff --git a/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp b/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
index 5cab62c..790558f 100644
--- a/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
+++ b/neuralnetworks/utils/adapter/aidl/src/PreparedModel.cpp
@@ -118,17 +118,20 @@
return durationNs < 0 ? nn::OptionalTimePoint{} : nn::TimePoint(makeDuration(durationNs));
}
-nn::ExecutionResult<ExecutionResult> executeSynchronously(const nn::IPreparedModel& preparedModel,
- const Request& request,
- bool measureTiming, int64_t deadlineNs,
- int64_t loopTimeoutDurationNs) {
+nn::ExecutionResult<ExecutionResult> executeSynchronously(
+ const nn::IPreparedModel& preparedModel, const Request& request, bool measureTiming,
+ int64_t deadlineNs, int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+ const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
const auto nnRequest = NN_TRY(convertInput(request));
const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
+ auto nnHints = NN_TRY(convertInput(hints));
+ auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
const auto result =
- preparedModel.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration);
+ preparedModel.execute(nnRequest, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration,
+ nnHints, nnExtensionNameToPrefix);
if (!result.ok() && result.error().code == nn::ErrorStatus::OUTPUT_INSUFFICIENT_SIZE) {
const auto& [message, code, outputShapes] = result.error();
@@ -147,16 +150,21 @@
nn::GeneralResult<FencedExecutionResult> executeFenced(
const nn::IPreparedModel& preparedModel, const Request& request,
const std::vector<ndk::ScopedFileDescriptor>& waitFor, bool measureTiming,
- int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs) {
+ int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
const auto nnRequest = NN_TRY(convertInput(request));
const auto nnWaitFor = NN_TRY(convertSyncFences(waitFor));
const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
const auto nnDeadline = NN_TRY(makeOptionalTimePoint(deadlineNs));
const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
const auto nnDuration = NN_TRY(makeOptionalDuration(durationNs));
+ auto nnHints = NN_TRY(convertInput(hints));
+ auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
auto [syncFence, executeFencedInfoCallback] = NN_TRY(preparedModel.executeFenced(
- nnRequest, nnWaitFor, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration, nnDuration));
+ nnRequest, nnWaitFor, nnMeasureTiming, nnDeadline, nnLoopTimeoutDuration, nnDuration,
+ nnHints, nnExtensionNameToPrefix));
ndk::ScopedFileDescriptor fileDescriptor;
if (syncFence.hasFd()) {
@@ -171,11 +179,16 @@
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::IPreparedModel& preparedModel, const Request& request, bool measureTiming,
- int64_t loopTimeoutDurationNs) {
+ int64_t loopTimeoutDurationNs, const std::vector<TokenValuePair>& hints,
+ const std::vector<ExtensionNameAndPrefix>& extensionNameToPrefix) {
const auto nnRequest = NN_TRY(convertInput(request));
const auto nnMeasureTiming = measureTiming ? nn::MeasureTiming::YES : nn::MeasureTiming::NO;
const auto nnLoopTimeoutDuration = NN_TRY(makeOptionalDuration(loopTimeoutDurationNs));
- return preparedModel.createReusableExecution(nnRequest, nnMeasureTiming, nnLoopTimeoutDuration);
+ auto nnHints = NN_TRY(convertInput(hints));
+ auto nnExtensionNameToPrefix = NN_TRY(convertInput(extensionNameToPrefix));
+
+ return preparedModel.createReusableExecution(nnRequest, nnMeasureTiming, nnLoopTimeoutDuration,
+ nnHints, nnExtensionNameToPrefix);
}
nn::ExecutionResult<ExecutionResult> executeSynchronously(const nn::IExecution& execution,
@@ -231,7 +244,7 @@
int64_t loopTimeoutDurationNs,
ExecutionResult* executionResult) {
auto result = adapter::executeSynchronously(*kPreparedModel, request, measureTiming, deadlineNs,
- loopTimeoutDurationNs);
+ loopTimeoutDurationNs, {}, {});
if (!result.has_value()) {
const auto& [message, code, _] = result.error();
const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -247,7 +260,41 @@
bool measureTiming, int64_t deadlineNs, int64_t loopTimeoutDurationNs, int64_t durationNs,
FencedExecutionResult* executionResult) {
auto result = adapter::executeFenced(*kPreparedModel, request, waitFor, measureTiming,
- deadlineNs, loopTimeoutDurationNs, durationNs);
+ deadlineNs, loopTimeoutDurationNs, durationNs, {}, {});
+ if (!result.has_value()) {
+ const auto& [message, code] = result.error();
+ const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+ return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+ static_cast<int32_t>(aidlCode), message.c_str());
+ }
+ *executionResult = std::move(result).value();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PreparedModel::executeSynchronouslyWithConfig(const Request& request,
+ const ExecutionConfig& config,
+ int64_t deadlineNs,
+ ExecutionResult* executionResult) {
+ auto result = adapter::executeSynchronously(
+ *kPreparedModel, request, config.measureTiming, deadlineNs,
+ config.loopTimeoutDurationNs, config.executionHints, config.extensionNameToPrefix);
+ if (!result.has_value()) {
+ const auto& [message, code, _] = result.error();
+ const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
+ return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
+ static_cast<int32_t>(aidlCode), message.c_str());
+ }
+ *executionResult = std::move(result).value();
+ return ndk::ScopedAStatus::ok();
+}
+
+ndk::ScopedAStatus PreparedModel::executeFencedWithConfig(
+ const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor,
+ const ExecutionConfig& config, int64_t deadlineNs, int64_t durationNs,
+ FencedExecutionResult* executionResult) {
+ auto result = adapter::executeFenced(*kPreparedModel, request, waitFor, config.measureTiming,
+ deadlineNs, config.loopTimeoutDurationNs, durationNs,
+ config.executionHints, config.extensionNameToPrefix);
if (!result.has_value()) {
const auto& [message, code] = result.error();
const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
@@ -275,11 +322,11 @@
}
ndk::ScopedAStatus PreparedModel::createReusableExecution(const Request& request,
- bool measureTiming,
- int64_t loopTimeoutDurationNs,
+ const ExecutionConfig& config,
std::shared_ptr<IExecution>* execution) {
- auto result = adapter::createReusableExecution(*kPreparedModel, request, measureTiming,
- loopTimeoutDurationNs);
+ auto result = adapter::createReusableExecution(
+ *kPreparedModel, request, config.measureTiming, config.loopTimeoutDurationNs,
+ config.executionHints, config.extensionNameToPrefix);
if (!result.has_value()) {
const auto& [message, code] = result.error();
const auto aidlCode = utils::convert(code).value_or(ErrorStatus::GENERAL_FAILURE);
diff --git a/neuralnetworks/utils/adapter/hidl/Android.bp b/neuralnetworks/utils/adapter/hidl/Android.bp
index d073106..6875daa 100644
--- a/neuralnetworks/utils/adapter/hidl/Android.bp
+++ b/neuralnetworks/utils/adapter/hidl/Android.bp
@@ -30,17 +30,16 @@
local_include_dirs: ["include/nnapi/hal"],
export_include_dirs: ["include"],
static_libs: [
- "neuralnetworks_types",
- "neuralnetworks_utils_hal_1_0",
- "neuralnetworks_utils_hal_1_1",
- "neuralnetworks_utils_hal_1_2",
- "neuralnetworks_utils_hal_1_3",
- ],
- shared_libs: [
"android.hardware.neuralnetworks@1.0",
"android.hardware.neuralnetworks@1.1",
"android.hardware.neuralnetworks@1.2",
"android.hardware.neuralnetworks@1.3",
"libfmq",
+ "neuralnetworks_types",
+ "neuralnetworks_utils_hal_1_0",
+ "neuralnetworks_utils_hal_1_1",
+ "neuralnetworks_utils_hal_1_2",
+ "neuralnetworks_utils_hal_1_3",
+ "neuralnetworks_utils_hal_common",
],
}
diff --git a/neuralnetworks/utils/adapter/hidl/src/Burst.cpp b/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
index 8b2e1dd..e3b165b 100644
--- a/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/Burst.cpp
@@ -250,7 +250,7 @@
nn::MeasureTiming canonicalMeasure = NN_TRY(nn::convert(measure));
const auto [outputShapes, timing] =
- NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure, {}, {}));
+ NN_TRY(mBurstExecutor->execute(canonicalRequest, canonicalMeasure, {}, {}, {}, {}));
return std::make_pair(NN_TRY(V1_2::utils::convert(outputShapes)),
NN_TRY(V1_2::utils::convert(timing)));
diff --git a/neuralnetworks/utils/adapter/hidl/src/Device.cpp b/neuralnetworks/utils/adapter/hidl/src/Device.cpp
index 4993a80..0f44638 100644
--- a/neuralnetworks/utils/adapter/hidl/src/Device.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/Device.cpp
@@ -135,7 +135,7 @@
Task task = [device, nnModel = std::move(nnModel), executor, callback] {
auto result = device->prepareModel(nnModel, nn::ExecutionPreference::DEFAULT,
- nn::Priority::DEFAULT, {}, {}, {}, {});
+ nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {});
notify(callback.get(), std::move(result), executor);
};
executor(std::move(task), {});
@@ -155,8 +155,8 @@
const auto nnPreference = NN_TRY(convertInput(preference));
Task task = [device, nnModel = std::move(nnModel), nnPreference, executor, callback] {
- auto result =
- device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {}, {}, {}, {});
+ auto result = device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {}, {}, {},
+ {}, {}, {});
notify(callback.get(), std::move(result), executor);
};
executor(std::move(task), {});
@@ -185,7 +185,7 @@
nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
nnToken, executor, callback] {
auto result = device->prepareModel(nnModel, nnPreference, nn::Priority::DEFAULT, {},
- nnModelCache, nnDataCache, nnToken);
+ nnModelCache, nnDataCache, nnToken, {}, {});
notify(callback.get(), std::move(result), executor);
};
executor(std::move(task), {});
@@ -215,7 +215,7 @@
nnModelCache = std::move(nnModelCache), nnDataCache = std::move(nnDataCache),
nnToken, executor, callback] {
auto result = device->prepareModel(nnModel, nnPreference, nnPriority, nnDeadline,
- nnModelCache, nnDataCache, nnToken);
+ nnModelCache, nnDataCache, nnToken, {}, {});
notify(callback.get(), std::move(result), executor);
};
executor(std::move(task), nnDeadline);
diff --git a/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp b/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
index 71060d5..c6055a6 100644
--- a/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
+++ b/neuralnetworks/utils/adapter/hidl/src/PreparedModel.cpp
@@ -159,7 +159,7 @@
}
Task task = [preparedModel, nnRequest = std::move(nnRequest), callback] {
- auto result = preparedModel->execute(nnRequest, nn::MeasureTiming::NO, {}, {});
+ auto result = preparedModel->execute(nnRequest, nn::MeasureTiming::NO, {}, {}, {}, {});
notify(callback.get(), std::move(result));
};
executor(std::move(task), {});
@@ -185,7 +185,7 @@
}
Task task = [preparedModel, nnRequest = std::move(nnRequest), nnMeasure, callback] {
- auto result = preparedModel->execute(nnRequest, nnMeasure, {}, {});
+ auto result = preparedModel->execute(nnRequest, nnMeasure, {}, {}, {}, {});
notify(callback.get(), std::move(result));
};
executor(std::move(task), {});
@@ -216,8 +216,8 @@
Task task = [preparedModel, nnRequest = std::move(nnRequest), nnMeasure, nnDeadline,
nnLoopTimeoutDuration, callback] {
- auto result =
- preparedModel->execute(nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration);
+ auto result = preparedModel->execute(nnRequest, nnMeasure, nnDeadline,
+ nnLoopTimeoutDuration, {}, {});
notify(callback.get(), std::move(result));
};
executor(std::move(task), nnDeadline);
@@ -232,7 +232,7 @@
const auto nnMeasure = NN_TRY(convertInput(measure));
const auto [outputShapes, timing] =
- NN_TRY(preparedModel->execute(nnRequest, nnMeasure, {}, {}));
+ NN_TRY(preparedModel->execute(nnRequest, nnMeasure, {}, {}, {}, {}));
auto hidlOutputShapes = NN_TRY(V1_2::utils::convert(outputShapes));
const auto hidlTiming = NN_TRY(V1_2::utils::convert(timing));
@@ -248,8 +248,8 @@
const auto nnDeadline = NN_TRY(convertInput(deadline));
const auto nnLoopTimeoutDuration = NN_TRY(convertInput(loopTimeoutDuration));
- const auto [outputShapes, timing] =
- NN_TRY(preparedModel->execute(nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration));
+ const auto [outputShapes, timing] = NN_TRY(preparedModel->execute(
+ nnRequest, nnMeasure, nnDeadline, nnLoopTimeoutDuration, {}, {}));
auto hidlOutputShapes = NN_TRY(V1_3::utils::convert(outputShapes));
const auto hidlTiming = NN_TRY(V1_3::utils::convert(timing));
@@ -293,8 +293,9 @@
const auto nnLoopTimeoutDuration = NN_TRY(convertInput(loopTimeoutDuration));
const auto nnDuration = NN_TRY(convertInput(duration));
- auto [syncFence, executeFencedCallback] = NN_TRY(preparedModel->executeFenced(
- nnRequest, nnWaitFor, nnMeasure, nnDeadline, nnLoopTimeoutDuration, nnDuration));
+ auto [syncFence, executeFencedCallback] =
+ NN_TRY(preparedModel->executeFenced(nnRequest, nnWaitFor, nnMeasure, nnDeadline,
+ nnLoopTimeoutDuration, nnDuration, {}, {}));
auto hidlSyncFence = NN_TRY(V1_3::utils::convert(syncFence.getSharedHandle()));
auto hidlExecuteFencedCallback = sp<FencedExecutionCallback>::make(executeFencedCallback);
diff --git a/neuralnetworks/utils/common/Android.bp b/neuralnetworks/utils/common/Android.bp
index 39927a3..bfba24f 100644
--- a/neuralnetworks/utils/common/Android.bp
+++ b/neuralnetworks/utils/common/Android.bp
@@ -39,20 +39,12 @@
srcs: ["test/*.cpp"],
static_libs: [
"libgmock",
- "libneuralnetworks_common",
"neuralnetworks_types",
"neuralnetworks_utils_hal_common",
],
shared_libs: [
- "android.hidl.allocator@1.0",
- "android.hidl.memory@1.0",
"libbase",
"libcutils",
- "libfmq",
- "libhidlbase",
- "libhidlmemory",
- "liblog",
- "libutils",
],
target: {
android: {
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
index e86edda..1f1245f 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidBurst.h
@@ -33,12 +33,15 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
};
} // namespace android::hardware::neuralnetworks::utils
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
index 5e62b9a..9582873 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidDevice.h
@@ -52,8 +52,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h b/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
index de30aae..3f1f290 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/InvalidPreparedModel.h
@@ -31,18 +31,23 @@
public:
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
index fde2486..129431f 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientBurst.h
@@ -48,18 +48,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
private:
bool isValidInternal() const EXCLUDES(mMutex);
nn::GeneralResult<nn::SharedExecution> createReusableExecutionInternal(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
const Factory kMakeBurst;
mutable std::mutex mMutex;
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
index 84ae799..267d634 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientDevice.h
@@ -65,8 +65,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache,
- const nn::CacheToken& token) const override;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCache(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
@@ -83,7 +84,9 @@
nn::GeneralResult<nn::SharedPreparedModel> prepareModelInternal(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const;
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
nn::GeneralResult<nn::SharedPreparedModel> prepareModelFromCacheInternal(
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const;
diff --git a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
index 86533ed..bbfc220 100644
--- a/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
+++ b/neuralnetworks/utils/common/include/nnapi/hal/ResilientPreparedModel.h
@@ -49,18 +49,23 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>> executeFenced(
const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const override;
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedExecution> createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const override;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const override;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurst() const override;
@@ -70,7 +75,9 @@
bool isValidInternal() const EXCLUDES(mMutex);
nn::GeneralResult<nn::SharedExecution> createReusableExecutionInternal(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const;
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& metaData,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const;
nn::GeneralResult<nn::SharedBurst> configureExecutionBurstInternal() const;
const Factory kMakePreparedModel;
diff --git a/neuralnetworks/utils/common/src/InvalidBurst.cpp b/neuralnetworks/utils/common/src/InvalidBurst.cpp
index 0191533..3fdfb5c 100644
--- a/neuralnetworks/utils/common/src/InvalidBurst.cpp
+++ b/neuralnetworks/utils/common/src/InvalidBurst.cpp
@@ -34,13 +34,17 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> InvalidBurst::execute(
const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidBurst";
}
nn::GeneralResult<nn::SharedExecution> InvalidBurst::createReusableExecution(
const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidBurst";
}
diff --git a/neuralnetworks/utils/common/src/InvalidDevice.cpp b/neuralnetworks/utils/common/src/InvalidDevice.cpp
index 535ccb4..c8cc287 100644
--- a/neuralnetworks/utils/common/src/InvalidDevice.cpp
+++ b/neuralnetworks/utils/common/src/InvalidDevice.cpp
@@ -84,7 +84,9 @@
const nn::Model& /*model*/, nn::ExecutionPreference /*preference*/,
nn::Priority /*priority*/, nn::OptionalTimePoint /*deadline*/,
const std::vector<nn::SharedHandle>& /*modelCache*/,
- const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/) const {
+ const std::vector<nn::SharedHandle>& /*dataCache*/, const nn::CacheToken& /*token*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidDevice";
}
diff --git a/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp b/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
index 8195462..f6f978d 100644
--- a/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
+++ b/neuralnetworks/utils/common/src/InvalidPreparedModel.cpp
@@ -27,9 +27,12 @@
namespace android::hardware::neuralnetworks::utils {
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
-InvalidPreparedModel::execute(const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
- const nn::OptionalTimePoint& /*deadline*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+InvalidPreparedModel::execute(
+ const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
+ const nn::OptionalTimePoint& /*deadline*/,
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidPreparedModel";
}
@@ -38,13 +41,17 @@
const nn::Request& /*request*/, const std::vector<nn::SyncFence>& /*waitFor*/,
nn::MeasureTiming /*measure*/, const nn::OptionalTimePoint& /*deadline*/,
const nn::OptionalDuration& /*loopTimeoutDuration*/,
- const nn::OptionalDuration& /*timeoutDurationAfterFence*/) const {
+ const nn::OptionalDuration& /*timeoutDurationAfterFence*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidPreparedModel";
}
nn::GeneralResult<nn::SharedExecution> InvalidPreparedModel::createReusableExecution(
const nn::Request& /*request*/, nn::MeasureTiming /*measure*/,
- const nn::OptionalDuration& /*loopTimeoutDuration*/) const {
+ const nn::OptionalDuration& /*loopTimeoutDuration*/,
+ const std::vector<nn::TokenValuePair>& /*hints*/,
+ const std::vector<nn::ExtensionNameAndPrefix>& /*extensionNameToPrefix*/) const {
return NN_ERROR() << "InvalidPreparedModel";
}
diff --git a/neuralnetworks/utils/common/src/ResilientBurst.cpp b/neuralnetworks/utils/common/src/ResilientBurst.cpp
index 79cbe39..bf7a8ea 100644
--- a/neuralnetworks/utils/common/src/ResilientBurst.cpp
+++ b/neuralnetworks/utils/common/src/ResilientBurst.cpp
@@ -105,37 +105,49 @@
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>> ResilientBurst::execute(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
- const auto fn = [&request, measure, deadline, loopTimeoutDuration](const nn::IBurst& burst) {
- return burst.execute(request, measure, deadline, loopTimeoutDuration);
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+ const auto fn = [&request, measure, deadline, loopTimeoutDuration, &hints,
+ &extensionNameToPrefix](const nn::IBurst& burst) {
+ return burst.execute(request, measure, deadline, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
};
return protect(*this, fn);
}
nn::GeneralResult<nn::SharedExecution> ResilientBurst::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
#if 0
auto self = shared_from_this();
- ResilientExecution::Factory makeExecution =
- [burst = std::move(self), request, measure, loopTimeoutDuration] {
- return burst->createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+ ResilientExecution::Factory makeExecution = [burst = std::move(self), request, measure,
+ loopTimeoutDuration, &hints,
+ &extensionNameToPrefix] {
+ return burst->createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
};
return ResilientExecution::create(std::move(makeExecution));
#else
- return createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+ return createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
#endif
}
nn::GeneralResult<nn::SharedExecution> ResilientBurst::createReusableExecutionInternal(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
if (!isValidInternal()) {
return std::make_shared<const InvalidExecution>();
}
- const auto fn = [&request, measure, &loopTimeoutDuration](const nn::IBurst& burst) {
- return burst.createReusableExecution(request, measure, loopTimeoutDuration);
+ const auto fn = [&request, measure, &loopTimeoutDuration, &hints,
+ &extensionNameToPrefix](const nn::IBurst& burst) {
+ return burst.createReusableExecution(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
};
return protect(*this, fn);
}
diff --git a/neuralnetworks/utils/common/src/ResilientDevice.cpp b/neuralnetworks/utils/common/src/ResilientDevice.cpp
index 2023c9a..a5c2640 100644
--- a/neuralnetworks/utils/common/src/ResilientDevice.cpp
+++ b/neuralnetworks/utils/common/src/ResilientDevice.cpp
@@ -179,19 +179,21 @@
nn::GeneralResult<nn::SharedPreparedModel> ResilientDevice::prepareModel(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
#if 0
auto self = shared_from_this();
ResilientPreparedModel::Factory makePreparedModel = [device = std::move(self), model,
preference, priority, deadline, modelCache,
- dataCache, token] {
+ dataCache, token, hints, extensionNameToPrefix] {
return device->prepareModelInternal(model, preference, priority, deadline, modelCache,
- dataCache, token);
+ dataCache, token, hints, extensionNameToPrefix);
};
return ResilientPreparedModel::create(std::move(makePreparedModel));
#else
- return prepareModelInternal(model, preference, priority, deadline, modelCache, dataCache,
- token);
+ return prepareModelInternal(model, preference, priority, deadline, modelCache, dataCache, token,
+ hints, extensionNameToPrefix);
#endif
}
@@ -234,14 +236,16 @@
nn::GeneralResult<nn::SharedPreparedModel> ResilientDevice::prepareModelInternal(
const nn::Model& model, nn::ExecutionPreference preference, nn::Priority priority,
nn::OptionalTimePoint deadline, const std::vector<nn::SharedHandle>& modelCache,
- const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token) const {
+ const std::vector<nn::SharedHandle>& dataCache, const nn::CacheToken& token,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
if (!isValidInternal()) {
return std::make_shared<const InvalidPreparedModel>();
}
- const auto fn = [&model, preference, priority, &deadline, &modelCache, &dataCache,
- &token](const nn::IDevice& device) {
+ const auto fn = [&model, preference, priority, &deadline, &modelCache, &dataCache, &token,
+ &hints, &extensionNameToPrefix](const nn::IDevice& device) {
return device.prepareModel(model, preference, priority, deadline, modelCache, dataCache,
- token);
+ token, hints, extensionNameToPrefix);
};
return protect(*this, fn, /*blocking=*/false);
}
diff --git a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
index 1ae19bc..b5843c0 100644
--- a/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
+++ b/neuralnetworks/utils/common/src/ResilientPreparedModel.cpp
@@ -104,43 +104,53 @@
}
nn::ExecutionResult<std::pair<std::vector<nn::OutputShape>, nn::Timing>>
-ResilientPreparedModel::execute(const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration) const {
- const auto fn = [&request, measure, &deadline,
- &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
- return preparedModel.execute(request, measure, deadline, loopTimeoutDuration);
+ResilientPreparedModel::execute(
+ const nn::Request& request, nn::MeasureTiming measure,
+ const nn::OptionalTimePoint& deadline, const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
+ const auto fn = [&request, measure, &deadline, &loopTimeoutDuration, &hints,
+ &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
+ return preparedModel.execute(request, measure, deadline, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
};
return protect(*this, fn);
}
nn::GeneralResult<std::pair<nn::SyncFence, nn::ExecuteFencedInfoCallback>>
-ResilientPreparedModel::executeFenced(const nn::Request& request,
- const std::vector<nn::SyncFence>& waitFor,
- nn::MeasureTiming measure,
- const nn::OptionalTimePoint& deadline,
- const nn::OptionalDuration& loopTimeoutDuration,
- const nn::OptionalDuration& timeoutDurationAfterFence) const {
+ResilientPreparedModel::executeFenced(
+ const nn::Request& request, const std::vector<nn::SyncFence>& waitFor,
+ nn::MeasureTiming measure, const nn::OptionalTimePoint& deadline,
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const nn::OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
const auto fn = [&request, &waitFor, measure, &deadline, &loopTimeoutDuration,
- &timeoutDurationAfterFence](const nn::IPreparedModel& preparedModel) {
+ &timeoutDurationAfterFence, &hints,
+ &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
return preparedModel.executeFenced(request, waitFor, measure, deadline, loopTimeoutDuration,
- timeoutDurationAfterFence);
+ timeoutDurationAfterFence, hints, extensionNameToPrefix);
};
return protect(*this, fn);
}
nn::GeneralResult<nn::SharedExecution> ResilientPreparedModel::createReusableExecution(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
#if 0
auto self = shared_from_this();
- ResilientExecution::Factory makeExecution =
- [preparedModel = std::move(self), request, measure, loopTimeoutDuration] {
- return preparedModel->createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+ ResilientExecution::Factory makeExecution = [preparedModel = std::move(self), request, measure,
+ loopTimeoutDuration, hints,
+ extensionNameToPrefix] {
+ return preparedModel->createReusableExecutionInternal(request, measure, loopTimeoutDuration,
+ hints, extensionNameToPrefix);
};
return ResilientExecution::create(std::move(makeExecution));
#else
- return createReusableExecutionInternal(request, measure, loopTimeoutDuration);
+ return createReusableExecutionInternal(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
#endif
}
@@ -159,13 +169,16 @@
nn::GeneralResult<nn::SharedExecution> ResilientPreparedModel::createReusableExecutionInternal(
const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration) const {
+ const nn::OptionalDuration& loopTimeoutDuration,
+ const std::vector<nn::TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix) const {
if (!isValidInternal()) {
return std::make_shared<const InvalidExecution>();
}
- const auto fn = [&request, measure,
- &loopTimeoutDuration](const nn::IPreparedModel& preparedModel) {
- return preparedModel.createReusableExecution(request, measure, loopTimeoutDuration);
+ const auto fn = [&request, measure, &loopTimeoutDuration, &hints,
+ &extensionNameToPrefix](const nn::IPreparedModel& preparedModel) {
+ return preparedModel.createReusableExecution(request, measure, loopTimeoutDuration, hints,
+ extensionNameToPrefix);
};
return protect(*this, fn);
}
diff --git a/neuralnetworks/utils/common/test/MockDevice.h b/neuralnetworks/utils/common/test/MockDevice.h
index a9428bc..a0fc5c3 100644
--- a/neuralnetworks/utils/common/test/MockDevice.h
+++ b/neuralnetworks/utils/common/test/MockDevice.h
@@ -39,7 +39,9 @@
MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModel,
(const Model& model, ExecutionPreference preference, Priority priority,
OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
- const std::vector<SharedHandle>& dataCache, const CacheToken& token),
+ const std::vector<SharedHandle>& dataCache, const CacheToken& token,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
(const, override));
MOCK_METHOD(GeneralResult<SharedPreparedModel>, prepareModelFromCache,
(OptionalTimePoint deadline, const std::vector<SharedHandle>& modelCache,
diff --git a/neuralnetworks/utils/common/test/MockPreparedModel.h b/neuralnetworks/utils/common/test/MockPreparedModel.h
index c8ce006..b8613b2 100644
--- a/neuralnetworks/utils/common/test/MockPreparedModel.h
+++ b/neuralnetworks/utils/common/test/MockPreparedModel.h
@@ -27,17 +27,23 @@
public:
MOCK_METHOD((ExecutionResult<std::pair<std::vector<OutputShape>, Timing>>), execute,
(const Request& request, MeasureTiming measure, const OptionalTimePoint& deadline,
- const OptionalDuration& loopTimeoutDuration),
+ const OptionalDuration& loopTimeoutDuration,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
(const, override));
MOCK_METHOD((GeneralResult<std::pair<SyncFence, ExecuteFencedInfoCallback>>), executeFenced,
(const Request& request, const std::vector<SyncFence>& waitFor,
MeasureTiming measure, const OptionalTimePoint& deadline,
const OptionalDuration& loopTimeoutDuration,
- const OptionalDuration& timeoutDurationAfterFence),
+ const OptionalDuration& timeoutDurationAfterFence,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
(const, override));
MOCK_METHOD((GeneralResult<SharedExecution>), createReusableExecution,
- (const nn::Request& request, nn::MeasureTiming measure,
- const nn::OptionalDuration& loopTimeoutDuration),
+ (const Request& request, MeasureTiming measure,
+ const OptionalDuration& loopTimeoutDuration,
+ const std::vector<TokenValuePair>& hints,
+ const std::vector<nn::ExtensionNameAndPrefix>& extensionNameToPrefix),
(const, override));
MOCK_METHOD(GeneralResult<SharedBurst>, configureExecutionBurst, (), (const, override));
MOCK_METHOD(std::any, getUnderlyingResource, (), (const, override));
diff --git a/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
index 0488b63..d9b8505 100644
--- a/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
+++ b/neuralnetworks/utils/common/test/ResilientDeviceTest.cpp
@@ -309,12 +309,12 @@
// setup call
const auto [mockDevice, mockDeviceFactory, device] = setup();
const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
- EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+ EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(Return(mockPreparedModel));
// run test
- const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+ const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -324,12 +324,12 @@
TEST(ResilientDeviceTest, prepareModelError) {
// setup call
const auto [mockDevice, mockDeviceFactory, device] = setup();
- EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+ EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnGeneralFailure);
// run test
- const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+ const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -339,13 +339,13 @@
TEST(ResilientDeviceTest, prepareModelDeadObjectFailedRecovery) {
// setup call
const auto [mockDevice, mockDeviceFactory, device] = setup();
- EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+ EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnDeadObject);
EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(kReturnGeneralFailure);
// run test
- const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+ const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -355,18 +355,18 @@
TEST(ResilientDeviceTest, prepareModelDeadObjectSuccessfulRecovery) {
// setup call
const auto [mockDevice, mockDeviceFactory, device] = setup();
- EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _))
+ EXPECT_CALL(*mockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnDeadObject);
const auto recoveredMockDevice = createConfiguredMockDevice();
const auto mockPreparedModel = std::make_shared<const nn::MockPreparedModel>();
- EXPECT_CALL(*recoveredMockDevice, prepareModel(_, _, _, _, _, _, _))
+ EXPECT_CALL(*recoveredMockDevice, prepareModel(_, _, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(Return(mockPreparedModel));
EXPECT_CALL(*mockDeviceFactory, Call(false)).Times(1).WillOnce(Return(recoveredMockDevice));
// run test
- const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+ const auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -679,7 +679,7 @@
device->recover(mockDevice.get(), /*blocking=*/false);
// run test
- auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {});
+ auto result = device->prepareModel({}, {}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
diff --git a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
index d396ca8..276bfba 100644
--- a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
+++ b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp
@@ -104,12 +104,12 @@
TEST(ResilientPreparedModelTest, execute) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _))
.Times(1)
.WillOnce(Return(kNoExecutionError));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -119,10 +119,12 @@
TEST(ResilientPreparedModelTest, executeError) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnGeneralFailure);
+ EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _))
+ .Times(1)
+ .WillOnce(kReturnGeneralFailure);
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -132,12 +134,12 @@
TEST(ResilientPreparedModelTest, executeDeadObjectFailedRecovery) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+ EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
constexpr auto ret = [] { return nn::error(nn::ErrorStatus::GENERAL_FAILURE); };
EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(ret);
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -147,9 +149,9 @@
TEST(ResilientPreparedModelTest, executeDeadObjectSuccessfulRecovery) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
+ EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _, _, _)).Times(1).WillOnce(kReturnDeadObject);
const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
- EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _))
+ EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _, _, _))
.Times(1)
.WillOnce(Return(kNoExecutionError));
EXPECT_CALL(*mockPreparedModelFactory, Call())
@@ -157,7 +159,7 @@
.WillOnce(Return(recoveredMockPreparedModel));
// run test
- const auto result = preparedModel->execute({}, {}, {}, {});
+ const auto result = preparedModel->execute({}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -167,12 +169,12 @@
TEST(ResilientPreparedModelTest, executeFenced) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(Return(kNoFencedExecutionError));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -182,12 +184,12 @@
TEST(ResilientPreparedModelTest, executeFencedError) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnGeneralFailure);
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -197,13 +199,13 @@
TEST(ResilientPreparedModelTest, executeFencedDeadObjectFailedRecovery) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnDeadObject);
EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure);
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
@@ -213,11 +215,11 @@
TEST(ResilientPreparedModelTest, executeFencedDeadObjectSuccessfulRecovery) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _))
+ EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(kReturnDeadObject);
const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel();
- EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _))
+ EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _, _, _))
.Times(1)
.WillOnce(Return(kNoFencedExecutionError));
EXPECT_CALL(*mockPreparedModelFactory, Call())
@@ -225,7 +227,7 @@
.WillOnce(Return(recoveredMockPreparedModel));
// run test
- const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {});
+ const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -235,12 +237,12 @@
TEST(ResilientPreparedModelTest, createReusableExecution) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _, _))
.Times(1)
.WillOnce(Return(kNoCreateReusableExecutionError));
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_TRUE(result.has_value())
@@ -250,12 +252,12 @@
TEST(ResilientPreparedModelTest, createReusableExecutionError) {
// setup call
const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup();
- EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _))
+ EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _, _))
.Times(1)
.WillOnce(kReturnGeneralFailure);
// run test
- const auto result = preparedModel->createReusableExecution({}, {}, {});
+ const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {});
// verify result
ASSERT_FALSE(result.has_value());
diff --git a/neuralnetworks/utils/service/Android.bp b/neuralnetworks/utils/service/Android.bp
index c3272ae..452078b 100644
--- a/neuralnetworks/utils/service/Android.bp
+++ b/neuralnetworks/utils/service/Android.bp
@@ -33,6 +33,10 @@
local_include_dirs: ["include/nnapi/hal"],
export_include_dirs: ["include"],
static_libs: [
+ "android.hardware.neuralnetworks@1.0",
+ "android.hardware.neuralnetworks@1.1",
+ "android.hardware.neuralnetworks@1.2",
+ "android.hardware.neuralnetworks@1.3",
"neuralnetworks_types",
"neuralnetworks_utils_hal_1_0",
"neuralnetworks_utils_hal_1_1",
@@ -40,10 +44,4 @@
"neuralnetworks_utils_hal_1_3",
"neuralnetworks_utils_hal_common",
],
- shared_libs: [
- "android.hardware.neuralnetworks@1.0",
- "android.hardware.neuralnetworks@1.1",
- "android.hardware.neuralnetworks@1.2",
- "android.hardware.neuralnetworks@1.3",
- ],
}
diff --git a/nfc/OWNERS b/nfc/OWNERS
new file mode 100644
index 0000000..7867204
--- /dev/null
+++ b/nfc/OWNERS
@@ -0,0 +1,6 @@
+# Bug component: 48448
+alisher@google.com
+georgekgchang@google.com
+jackcwyu@google.com
+
+
diff --git a/nfc/aidl/Android.bp b/nfc/aidl/Android.bp
new file mode 100644
index 0000000..d390c7e
--- /dev/null
+++ b/nfc/aidl/Android.bp
@@ -0,0 +1,34 @@
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+aidl_interface {
+ name: "android.hardware.nfc",
+ vendor_available: true,
+ srcs: ["android/hardware/nfc/*.aidl"],
+ stability: "vintf",
+ backend: {
+ cpp: {
+ enabled: false,
+ },
+ java: {
+ sdk_version: "module_current",
+ enabled: false,
+ },
+ ndk: {
+ vndk: {
+ enabled: true,
+ },
+ },
+ },
+}
diff --git a/nfc/aidl/TEST_MAPPING b/nfc/aidl/TEST_MAPPING
new file mode 100644
index 0000000..3a10084
--- /dev/null
+++ b/nfc/aidl/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+ "presubmit": [
+ {
+ "name": "VtsAidlHalNfcTargetTest"
+ }
+ ]
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfc.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfc.aidl
new file mode 100644
index 0000000..7a0ae54
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfc.aidl
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@VintfStability
+interface INfc {
+ void open(in android.hardware.nfc.INfcClientCallback clientCallback);
+ void close(in android.hardware.nfc.NfcCloseType type);
+ void coreInitialized();
+ void factoryReset();
+ android.hardware.nfc.NfcConfig getConfig();
+ void powerCycle();
+ void preDiscover();
+ int write(in byte[] data);
+ void setEnableVerboseLogging(in boolean enable);
+ boolean isVerboseLoggingEnabled();
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfcClientCallback.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfcClientCallback.aidl
new file mode 100644
index 0000000..8150e81
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/INfcClientCallback.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@VintfStability
+interface INfcClientCallback {
+ void sendData(in byte[] data);
+ void sendEvent(in android.hardware.nfc.NfcEvent event, in android.hardware.nfc.NfcStatus status);
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcCloseType.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcCloseType.aidl
new file mode 100644
index 0000000..7d44d48
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcCloseType.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@Backing(type="int") @VintfStability
+enum NfcCloseType {
+ DISABLE = 0,
+ HOST_SWITCHED_OFF = 1,
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcConfig.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcConfig.aidl
new file mode 100644
index 0000000..92e0a9a
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcConfig.aidl
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@VintfStability
+parcelable NfcConfig {
+ boolean nfaPollBailOutMode;
+ android.hardware.nfc.PresenceCheckAlgorithm presenceCheckAlgorithm;
+ android.hardware.nfc.ProtocolDiscoveryConfig nfaProprietaryCfg;
+ byte defaultOffHostRoute;
+ byte defaultOffHostRouteFelica;
+ byte defaultSystemCodeRoute;
+ byte defaultSystemCodePowerState;
+ byte defaultRoute;
+ byte offHostESEPipeId;
+ byte offHostSIMPipeId;
+ int maxIsoDepTransceiveLength;
+ byte[] hostAllowlist;
+ byte[] offHostRouteUicc;
+ byte[] offHostRouteEse;
+ byte defaultIsoDepRoute;
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcEvent.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcEvent.aidl
new file mode 100644
index 0000000..dda258e
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcEvent.aidl
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@Backing(type="int") @VintfStability
+enum NfcEvent {
+ OPEN_CPLT = 0,
+ CLOSE_CPLT = 1,
+ POST_INIT_CPLT = 2,
+ PRE_DISCOVER_CPLT = 3,
+ HCI_NETWORK_RESET = 4,
+ ERROR = 5,
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcStatus.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcStatus.aidl
new file mode 100644
index 0000000..2632480
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/NfcStatus.aidl
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@Backing(type="int") @VintfStability
+enum NfcStatus {
+ OK = 0,
+ FAILED = 1,
+ ERR_TRANSPORT = 2,
+ ERR_CMD_TIMEOUT = 3,
+ REFUSED = 4,
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/PresenceCheckAlgorithm.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/PresenceCheckAlgorithm.aidl
new file mode 100644
index 0000000..9a9be21
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/PresenceCheckAlgorithm.aidl
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@Backing(type="byte") @VintfStability
+enum PresenceCheckAlgorithm {
+ DEFAULT = 0,
+ I_BLOCK = 1,
+ ISO_DEP_NAK = 2,
+}
diff --git a/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/ProtocolDiscoveryConfig.aidl b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/ProtocolDiscoveryConfig.aidl
new file mode 100644
index 0000000..021dfe2
--- /dev/null
+++ b/nfc/aidl/aidl_api/android.hardware.nfc/current/android/hardware/nfc/ProtocolDiscoveryConfig.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+///////////////////////////////////////////////////////////////////////////////
+// THIS FILE IS IMMUTABLE. DO NOT EDIT IN ANY CASE. //
+///////////////////////////////////////////////////////////////////////////////
+
+// This file is a snapshot of an AIDL file. Do not edit it manually. There are
+// two cases:
+// 1). this is a frozen version file - do not edit this in any case.
+// 2). this is a 'current' file. If you make a backwards compatible change to
+// the interface (from the latest frozen version), the build system will
+// prompt you to update this file with `m <name>-update-api`.
+//
+// You must not make a backward incompatible change to any AIDL file built
+// with the aidl_interface module type with versions property set. The module
+// type is used to build AIDL files in a way that they can be used across
+// independently updatable components of the system. If a device is shipped
+// with such a backward incompatible change, it has a high risk of breaking
+// later when a module using the interface is updated, e.g., Mainline modules.
+
+package android.hardware.nfc;
+@VintfStability
+parcelable ProtocolDiscoveryConfig {
+ byte protocol18092Active;
+ byte protocolBPrime;
+ byte protocolDual;
+ byte protocol15693;
+ byte protocolKovio;
+ byte protocolMifare;
+ byte discoveryPollKovio;
+ byte discoveryPollBPrime;
+ byte discoveryListenBPrime;
+}
diff --git a/nfc/aidl/android/hardware/nfc/INfc.aidl b/nfc/aidl/android/hardware/nfc/INfc.aidl
new file mode 100644
index 0000000..662f8d4
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/INfc.aidl
@@ -0,0 +1,143 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+import android.hardware.nfc.INfcClientCallback;
+import android.hardware.nfc.NfcCloseType;
+import android.hardware.nfc.NfcConfig;
+import android.hardware.nfc.NfcStatus;
+
+@VintfStability
+interface INfc {
+ /**
+ * Opens the NFC controller device and performs initialization.
+ * This may include patch download and other vendor-specific initialization.
+ *
+ * If open completes successfully, the controller must be ready to perform NCI
+ * initialization - ie accept CORE_RESET and subsequent commands through the write()
+ * call.
+ *
+ * Returns ok() if initialization starts successfully.
+ * If open() returns ok(), the NCI stack must wait for a NfcEvent.OPEN_CPLT
+ * callback before continuing.
+ * If a NfcEvent.OPEN_CPLT callback received with status NfcStatus::OK, the controller
+ * must be ready to perform NCI initialization - ie accept CORE_RESET and subsequent
+ * commands through the write() call.
+ */
+ void open(in INfcClientCallback clientCallback);
+
+ /**
+ * Close the NFC HAL and setup NFC controller close type.
+ * Associated resources such as clientCallback must be released.
+ * The clientCallback reference from open() must be invalid after close().
+ * If close() returns ok(), the NCI stack must wait for a NfcEvent.CLOSE_CPLT
+ * callback before continuing.
+ * Returns an error if close may be called more than once.
+ * Calls to any other method which expect a callback after this must return
+ * a service-specific error NfcStatus::FAILED.
+ * HAL close automatically if the client drops the reference to the HAL or
+ * crashes.
+ */
+ void close(in NfcCloseType type);
+
+ /**
+ * coreInitialized() is called after the CORE_INIT_RSP is received from the
+ * NFCC. At this time, the HAL can do any chip-specific configuration.
+ *
+ * If coreInitialized() returns ok(), the NCI stack must wait for a
+ * NfcEvent.POST_INIT_CPLT before continuing.
+ * If coreInitialized() returns an error, the NCI stack must continue immediately.
+ *
+ * coreInitialized() must be called after open() registers the clientCallback
+ * or return a service-specific error NfcStatus::FAILED directly.
+ *
+ */
+ void coreInitialized();
+
+ /**
+ * Clears the NFC chip.
+ *
+ * Must be called during factory reset and/or before the first time the HAL is
+ * initialized after a factory reset.
+ */
+ void factoryReset();
+
+ /**
+ * Fetches vendor specific configurations.
+ * @return NfcConfig indicates support for certain features and
+ * populates the vendor specific configs.
+ */
+ NfcConfig getConfig();
+
+ /**
+ * Restart controller by power cyle;
+ * It's similar to open but just reset the controller without initialize all the
+ * resources.
+ *
+ * If powerCycle() returns ok(), the NCI stack must wait for a NfcEvent.OPEN_CPLT
+ * before continuing.
+ *
+ * powerCycle() must be called after open() registers the clientCallback
+ * or return a service-specific error NfcStatus::FAILED directly.
+ */
+ void powerCycle();
+
+ /**
+ * preDiscover is called every time before starting RF discovery.
+ * It is a good place to do vendor-specific configuration that must be
+ * performed every time RF discovery is about to be started.
+ *
+ * If preDiscover() returns ok(), the NCI stack must wait for a
+ * NfcEvent.PREDISCOVER_CPLT before continuing.
+ *
+ * preDiscover() must be called after open() registers the clientCallback
+ * or return a service-specific error NfcStatus::FAILED directly.
+ *
+ * If preDiscover() reports an error, the NCI stack must start RF discovery immediately.
+ */
+ void preDiscover();
+
+ /**
+ * Performs an NCI write.
+ *
+ * This method may queue writes and return immediately. The only
+ * requirement is that the writes are executed in order.
+ *
+ * @param data
+ * Data packet to transmit NCI Commands and Data Messages over write.
+ * Detailed format is defined in NFC Controller Interface (NCI) Technical Specification.
+ * https://nfc-forum.org/our-work/specification-releases/
+ *
+ * @return number of bytes written to the NFCC.
+ */
+ int write(in byte[] data);
+
+ /**
+ * Set the logging flag for NFC HAL to enable it's verbose logging.
+ * If verbose logging is not supported, the call must not have any effect on logging verbosity.
+ * However, isVerboseLoggingEnabled() must still return the value set by the last call to
+ * setEnableVerboseLogging().
+ * @param enable for setting the verbose logging flag to HAL
+ */
+ void setEnableVerboseLogging(in boolean enable);
+
+ /**
+ * Get the verbose logging flag value from NFC HAL.
+ * @return true if verbose logging flag value is enabled, false if disabled.
+ */
+ boolean isVerboseLoggingEnabled();
+}
diff --git a/nfc/aidl/android/hardware/nfc/INfcClientCallback.aidl b/nfc/aidl/android/hardware/nfc/INfcClientCallback.aidl
new file mode 100644
index 0000000..43d4f5c
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/INfcClientCallback.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+import android.hardware.nfc.NfcEvent;
+import android.hardware.nfc.NfcStatus;
+
+@VintfStability
+interface INfcClientCallback {
+ /**
+ * The callback passed in from the NFC stack that the HAL
+ * can use to pass incomming response data to the stack.
+ *
+ * @param data
+ * Data packet to transmit NCI Responses, Notifications, and Data
+ * Messages over sendData.
+ * Detailed format is defined in NFC Controller Interface (NCI) Technical Specification.
+ * https://nfc-forum.org/our-work/specification-releases/
+ */
+ void sendData(in byte[] data);
+
+ /**
+ * The callback passed in from the NFC stack that the HAL
+ * can use to pass events back to the stack.
+ */
+ void sendEvent(in NfcEvent event, in NfcStatus status);
+}
diff --git a/nfc/aidl/android/hardware/nfc/NfcCloseType.aidl b/nfc/aidl/android/hardware/nfc/NfcCloseType.aidl
new file mode 100644
index 0000000..5160532
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/NfcCloseType.aidl
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+/**
+ * Different closing types for NFC HAL.
+ */
+@VintfStability
+@Backing(type="int")
+enum NfcCloseType {
+ /**
+ * Close the NFC controller and free NFC HAL resources.
+ */
+ DISABLE = 0,
+
+ /**
+ * Switch the NFC controller operation mode and free NFC HAL resources.
+ * Enable NFC functionality for off host card emulation usecases in
+ * device(host) power off(switched off) state, if the device
+ * supports power off use cases. If the device doesn't support power
+ * off use cases, this call should be same as DISABLE.
+ */
+ HOST_SWITCHED_OFF = 1,
+}
diff --git a/nfc/aidl/android/hardware/nfc/NfcConfig.aidl b/nfc/aidl/android/hardware/nfc/NfcConfig.aidl
new file mode 100644
index 0000000..1b4fcfb
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/NfcConfig.aidl
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+import android.hardware.nfc.PresenceCheckAlgorithm;
+import android.hardware.nfc.ProtocolDiscoveryConfig;
+
+/**
+ * Define Nfc related configurations based on:
+ * NFC Controller Interface (NCI) Technical Specification
+ * https://nfc-forum.org/our-work/specification-releases/
+ */
+@VintfStability
+parcelable NfcConfig {
+ /**
+ * If true, NFCC is using bail out mode for either Type A or Type B poll
+ * based on: Nfc-Forum Activity Technical Specification
+ * https://nfc-forum.org/our-work/specification-releases/
+ */
+ boolean nfaPollBailOutMode;
+ PresenceCheckAlgorithm presenceCheckAlgorithm;
+ ProtocolDiscoveryConfig nfaProprietaryCfg;
+ /**
+ * Default off-host route. 0x00 if there aren't any. Refer to NCI spec.
+ */
+ byte defaultOffHostRoute;
+ /**
+ * Default off-host route for Felica. 0x00 if there aren't any. Refer to
+ * NCI spec.
+ */
+ byte defaultOffHostRouteFelica;
+ /**
+ * Default system code route. 0x00 if there aren't any. Refer NCI spec.
+ */
+ byte defaultSystemCodeRoute;
+ /**
+ * Default power state for system code route. 0x00 if there aren't any.
+ * Refer to NCI spec.
+ */
+ byte defaultSystemCodePowerState;
+ /**
+ * Default route for all remaining protocols and technology which haven't
+ * been configured.
+ * Device Host(0x00) is the default. Refer to NCI spec.
+ *
+ */
+ byte defaultRoute;
+ /**
+ * Pipe ID for eSE. 0x00 if there aren't any.
+ */
+ byte offHostESEPipeId;
+ /**
+ * Pipe ID for UICC. 0x00 if there aren't any.
+ */
+ byte offHostSIMPipeId;
+ /**
+ * Extended APDU length for ISO_DEP. If not supported default length is 261
+ */
+ int maxIsoDepTransceiveLength;
+ /**
+ * list of allowed host ids, as per ETSI TS 102 622
+ * https://www.etsi.org/
+ */
+ byte[] hostAllowlist;
+ /**
+ * NFCEE ID for offhost UICC & eSE secure element.
+ * 0x00 if there aren't any. Refer to NCI spec.
+ */
+ byte[] offHostRouteUicc;
+ byte[] offHostRouteEse;
+ /**
+ * Default IsoDep route. 0x00 if there aren't any. Refer to NCI spec.
+ */
+ byte defaultIsoDepRoute;
+}
diff --git a/nfc/aidl/android/hardware/nfc/NfcEvent.aidl b/nfc/aidl/android/hardware/nfc/NfcEvent.aidl
new file mode 100644
index 0000000..a78b1cd
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/NfcEvent.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+/**
+ * Nfc event to notify nfc status change.
+ */
+@VintfStability
+@Backing(type="int")
+enum NfcEvent {
+ /**
+ * Open complete event to notify upper layer when NFC HAL and NFCC
+ * initialization complete.
+ */
+ OPEN_CPLT = 0,
+ /**
+ * Close complete event to notify upper layer when HAL close is done.
+ */
+ CLOSE_CPLT = 1,
+ /**
+ * Post init complete event to notify upper layer when post init operations
+ * are done.
+ */
+ POST_INIT_CPLT = 2,
+ /**
+ * Pre-discover complete event to notify upper layer when pre-discover
+ * operations are done.
+ */
+ PRE_DISCOVER_CPLT = 3,
+ /**
+ * HCI network reset event to notify upplayer when HCI network needs to
+ * be re-initialized in case of an error.
+ */
+ HCI_NETWORK_RESET = 4,
+ /**
+ * Error event to notify upper layer when there's an unknown error.
+ */
+ ERROR = 5,
+}
diff --git a/nfc/aidl/android/hardware/nfc/NfcStatus.aidl b/nfc/aidl/android/hardware/nfc/NfcStatus.aidl
new file mode 100644
index 0000000..a38d370
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/NfcStatus.aidl
@@ -0,0 +1,45 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+/**
+ * Used to specify the status of the NfcEvent
+ */
+@VintfStability
+@Backing(type="int")
+enum NfcStatus {
+ /**
+ * Default status when NfcEvent with status OK.
+ */
+ OK = 0,
+ /**
+ * Generic error.
+ */
+ FAILED = 1,
+ /**
+ * Transport error.
+ */
+ ERR_TRANSPORT = 2,
+ /**
+ * Command timeout error.
+ */
+ ERR_CMD_TIMEOUT = 3,
+ /**
+ * Refused error when command is rejected.
+ */
+ REFUSED = 4,
+}
diff --git a/nfc/aidl/android/hardware/nfc/PresenceCheckAlgorithm.aidl b/nfc/aidl/android/hardware/nfc/PresenceCheckAlgorithm.aidl
new file mode 100644
index 0000000..20e7bff
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/PresenceCheckAlgorithm.aidl
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+/*
+ * Presence Check Algorithm as defined in ISO/IEC 14443-4:
+ * https://www.iso.org/standard/73599.html
+ */
+@VintfStability
+@Backing(type="byte")
+enum PresenceCheckAlgorithm {
+ /**
+ * Let the stack select an algorithm
+ */
+ DEFAULT = 0,
+ /**
+ * ISO-DEP protocol's empty I-block
+ */
+ I_BLOCK = 1,
+ /**
+ * Type - 4 tag protocol iso-dep nak presence check command is sent waiting for
+ * response and notification.
+ */
+ ISO_DEP_NAK = 2,
+}
diff --git a/nfc/aidl/android/hardware/nfc/ProtocolDiscoveryConfig.aidl b/nfc/aidl/android/hardware/nfc/ProtocolDiscoveryConfig.aidl
new file mode 100644
index 0000000..f8e3228
--- /dev/null
+++ b/nfc/aidl/android/hardware/nfc/ProtocolDiscoveryConfig.aidl
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.nfc;
+
+/**
+ * Vendor Specific Proprietary Protocol & Discovery Configuration.
+ * Set to 0xFF if not supported.
+ * discovery* fields map to "RF Technology and Mode" in NCI Spec
+ * protocol* fields map to "RF protocols" in NCI Spec
+ */
+@VintfStability
+parcelable ProtocolDiscoveryConfig {
+ byte protocol18092Active;
+ byte protocolBPrime;
+ byte protocolDual;
+ byte protocol15693;
+ byte protocolKovio;
+ byte protocolMifare;
+ byte discoveryPollKovio;
+ byte discoveryPollBPrime;
+ byte discoveryListenBPrime;
+}
diff --git a/nfc/aidl/default/Android.bp b/nfc/aidl/default/Android.bp
new file mode 100644
index 0000000..907d23d
--- /dev/null
+++ b/nfc/aidl/default/Android.bp
@@ -0,0 +1,23 @@
+cc_binary {
+ name: "android.hardware.nfc-service.example",
+ relative_install_path: "hw",
+ init_rc: ["nfc-service-example.rc"],
+ vintf_fragments: ["nfc-service-example.xml"],
+ vendor: true,
+ cflags: [
+ "-Wall",
+ "-Wextra",
+ ],
+ shared_libs: [
+ "libbase",
+ "liblog",
+ "libutils",
+ "libbinder_ndk",
+ "android.hardware.nfc-V1-ndk",
+ ],
+ srcs: [
+ "main.cpp",
+ "Nfc.cpp",
+ "Vendor_hal_api.cpp",
+ ],
+}
diff --git a/nfc/aidl/default/Nfc.cpp b/nfc/aidl/default/Nfc.cpp
new file mode 100644
index 0000000..4685b59
--- /dev/null
+++ b/nfc/aidl/default/Nfc.cpp
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Nfc.h"
+
+#include <android-base/logging.h>
+
+#include "Vendor_hal_api.h"
+
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace nfc {
+
+std::shared_ptr<INfcClientCallback> Nfc::mCallback = nullptr;
+AIBinder_DeathRecipient* clientDeathRecipient = nullptr;
+
+void OnDeath(void* cookie) {
+ if (Nfc::mCallback != nullptr && !AIBinder_isAlive(Nfc::mCallback->asBinder().get())) {
+ LOG(INFO) << __func__ << " Nfc service has died";
+ Nfc* nfc = static_cast<Nfc*>(cookie);
+ nfc->close(NfcCloseType::DISABLE);
+ }
+}
+
+::ndk::ScopedAStatus Nfc::open(const std::shared_ptr<INfcClientCallback>& clientCallback) {
+ LOG(INFO) << "open";
+ if (clientCallback == nullptr) {
+ LOG(INFO) << "Nfc::open null callback";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ Nfc::mCallback = clientCallback;
+
+ clientDeathRecipient = AIBinder_DeathRecipient_new(OnDeath);
+ auto linkRet = AIBinder_linkToDeath(clientCallback->asBinder().get(), clientDeathRecipient,
+ this /* cookie */);
+ if (linkRet != STATUS_OK) {
+ LOG(ERROR) << __func__ << ": linkToDeath failed: " << linkRet;
+ // Just ignore the error.
+ }
+
+ int ret = Vendor_hal_open(eventCallback, dataCallback);
+ return ret == 0 ? ndk::ScopedAStatus::ok()
+ : ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+}
+
+::ndk::ScopedAStatus Nfc::close(NfcCloseType type) {
+ LOG(INFO) << "close";
+ if (Nfc::mCallback == nullptr) {
+ LOG(ERROR) << __func__ << "mCallback null";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ int ret = 0;
+ if (type == NfcCloseType::HOST_SWITCHED_OFF) {
+ ret = Vendor_hal_close_off();
+ } else {
+ ret = Vendor_hal_close();
+ }
+ Nfc::mCallback = nullptr;
+ AIBinder_DeathRecipient_delete(clientDeathRecipient);
+ clientDeathRecipient = nullptr;
+ return ret == 0 ? ndk::ScopedAStatus::ok()
+ : ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+}
+
+::ndk::ScopedAStatus Nfc::coreInitialized() {
+ LOG(INFO) << "coreInitialized";
+ if (Nfc::mCallback == nullptr) {
+ LOG(ERROR) << __func__ << "mCallback null";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ int ret = Vendor_hal_core_initialized();
+
+ return ret == 0 ? ndk::ScopedAStatus::ok()
+ : ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+}
+
+::ndk::ScopedAStatus Nfc::factoryReset() {
+ LOG(INFO) << "factoryReset";
+ Vendor_hal_factoryReset();
+ return ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus Nfc::getConfig(NfcConfig* _aidl_return) {
+ LOG(INFO) << "getConfig";
+ NfcConfig nfcVendorConfig;
+ Vendor_hal_getConfig(nfcVendorConfig);
+
+ *_aidl_return = nfcVendorConfig;
+ return ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus Nfc::powerCycle() {
+ LOG(INFO) << "powerCycle";
+ if (Nfc::mCallback == nullptr) {
+ LOG(ERROR) << __func__ << "mCallback null";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ return Vendor_hal_power_cycle() ? ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED))
+ : ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus Nfc::preDiscover() {
+ LOG(INFO) << "preDiscover";
+ if (Nfc::mCallback == nullptr) {
+ LOG(ERROR) << __func__ << "mCallback null";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ return Vendor_hal_pre_discover() ? ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED))
+ : ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus Nfc::write(const std::vector<uint8_t>& data, int32_t* _aidl_return) {
+ LOG(INFO) << "write";
+ if (Nfc::mCallback == nullptr) {
+ LOG(ERROR) << __func__ << "mCallback null";
+ return ndk::ScopedAStatus::fromServiceSpecificError(
+ static_cast<int32_t>(NfcStatus::FAILED));
+ }
+ *_aidl_return = Vendor_hal_write(data.size(), &data[0]);
+ return ndk::ScopedAStatus::ok();
+}
+::ndk::ScopedAStatus Nfc::setEnableVerboseLogging(bool enable) {
+ LOG(INFO) << "setVerboseLogging";
+ Vendor_hal_setVerboseLogging(enable);
+ return ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus Nfc::isVerboseLoggingEnabled(bool* _aidl_return) {
+ *_aidl_return = Vendor_hal_getVerboseLogging();
+ return ndk::ScopedAStatus::ok();
+}
+
+} // namespace nfc
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/nfc/aidl/default/Nfc.h b/nfc/aidl/default/Nfc.h
new file mode 100644
index 0000000..1b14534
--- /dev/null
+++ b/nfc/aidl/default/Nfc.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/hardware/nfc/BnNfc.h>
+#include <aidl/android/hardware/nfc/INfcClientCallback.h>
+#include <android-base/logging.h>
+namespace aidl {
+namespace android {
+namespace hardware {
+namespace nfc {
+
+using ::aidl::android::hardware::nfc::NfcCloseType;
+using ::aidl::android::hardware::nfc::NfcConfig;
+using ::aidl::android::hardware::nfc::NfcStatus;
+
+// Default implementation that reports no support NFC.
+struct Nfc : public BnNfc {
+ public:
+ Nfc() = default;
+
+ ::ndk::ScopedAStatus open(const std::shared_ptr<INfcClientCallback>& clientCallback) override;
+ ::ndk::ScopedAStatus close(NfcCloseType type) override;
+ ::ndk::ScopedAStatus coreInitialized() override;
+ ::ndk::ScopedAStatus factoryReset() override;
+ ::ndk::ScopedAStatus getConfig(NfcConfig* _aidl_return) override;
+ ::ndk::ScopedAStatus powerCycle() override;
+ ::ndk::ScopedAStatus preDiscover() override;
+ ::ndk::ScopedAStatus write(const std::vector<uint8_t>& data, int32_t* _aidl_return) override;
+ ::ndk::ScopedAStatus setEnableVerboseLogging(bool enable) override;
+ ::ndk::ScopedAStatus isVerboseLoggingEnabled(bool* _aidl_return) override;
+
+ static void eventCallback(uint8_t event, uint8_t status) {
+ if (mCallback != nullptr) {
+ auto ret = mCallback->sendEvent((NfcEvent)event, (NfcStatus)status);
+ if (!ret.isOk()) {
+ LOG(ERROR) << "Failed to send event!";
+ }
+ }
+ }
+
+ static void dataCallback(uint16_t data_len, uint8_t* p_data) {
+ std::vector<uint8_t> data(p_data, p_data + data_len);
+ if (mCallback != nullptr) {
+ auto ret = mCallback->sendData(data);
+ if (!ret.isOk()) {
+ LOG(ERROR) << "Failed to send data!";
+ }
+ }
+ }
+
+ static std::shared_ptr<INfcClientCallback> mCallback;
+};
+
+} // namespace nfc
+} // namespace hardware
+} // namespace android
+} // namespace aidl
diff --git a/nfc/aidl/default/Vendor_hal_api.cpp b/nfc/aidl/default/Vendor_hal_api.cpp
new file mode 100644
index 0000000..66a2ebc
--- /dev/null
+++ b/nfc/aidl/default/Vendor_hal_api.cpp
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/properties.h>
+#include <dlfcn.h>
+#include <errno.h>
+#include <string.h>
+
+#include "Vendor_hal_api.h"
+
+bool logging = false;
+
+int Vendor_hal_open(nfc_stack_callback_t* p_cback, nfc_stack_data_callback_t* p_data_cback) {
+ (void)p_cback;
+ (void)p_data_cback;
+ // nothing to open in this example
+ return -1;
+}
+
+int Vendor_hal_write(uint16_t data_len, const uint8_t* p_data) {
+ (void)data_len;
+ (void)p_data;
+ return -1;
+}
+
+int Vendor_hal_core_initialized() {
+ return -1;
+}
+
+int Vendor_hal_pre_discover() {
+ return -1;
+}
+
+int Vendor_hal_close() {
+ return -1;
+}
+
+int Vendor_hal_close_off() {
+ return -1;
+}
+
+int Vendor_hal_power_cycle() {
+ return -1;
+}
+
+void Vendor_hal_factoryReset() {}
+
+void Vendor_hal_getConfig(NfcConfig& config) {
+ (void)config;
+}
+
+void Vendor_hal_setVerboseLogging(bool enable) {
+ logging = enable;
+}
+
+bool Vendor_hal_getVerboseLogging() {
+ return logging;
+}
diff --git a/nfc/aidl/default/Vendor_hal_api.h b/nfc/aidl/default/Vendor_hal_api.h
new file mode 100644
index 0000000..595c2dd
--- /dev/null
+++ b/nfc/aidl/default/Vendor_hal_api.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <aidl/android/hardware/nfc/INfc.h>
+#include <aidl/android/hardware/nfc/NfcConfig.h>
+#include <aidl/android/hardware/nfc/NfcEvent.h>
+#include <aidl/android/hardware/nfc/NfcStatus.h>
+#include <aidl/android/hardware/nfc/PresenceCheckAlgorithm.h>
+#include <aidl/android/hardware/nfc/ProtocolDiscoveryConfig.h>
+#include "hardware_nfc.h"
+
+using aidl::android::hardware::nfc::NfcConfig;
+using aidl::android::hardware::nfc::NfcEvent;
+using aidl::android::hardware::nfc::NfcStatus;
+using aidl::android::hardware::nfc::PresenceCheckAlgorithm;
+using aidl::android::hardware::nfc::ProtocolDiscoveryConfig;
+
+int Vendor_hal_open(nfc_stack_callback_t* p_cback, nfc_stack_data_callback_t* p_data_cback);
+int Vendor_hal_write(uint16_t data_len, const uint8_t* p_data);
+
+int Vendor_hal_core_initialized();
+
+int Vendor_hal_pre_discover();
+
+int Vendor_hal_close();
+
+int Vendor_hal_close_off();
+
+int Vendor_hal_power_cycle();
+
+void Vendor_hal_factoryReset();
+
+void Vendor_hal_getConfig(NfcConfig& config);
+
+void Vendor_hal_setVerboseLogging(bool enable);
+
+bool Vendor_hal_getVerboseLogging();
diff --git a/nfc/aidl/default/hardware_nfc.h b/nfc/aidl/default/hardware_nfc.h
new file mode 100644
index 0000000..0f856c5
--- /dev/null
+++ b/nfc/aidl/default/hardware_nfc.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+typedef uint8_t nfc_event_t;
+typedef uint8_t nfc_status_t;
+
+/*
+ * The callback passed in from the NFC stack that the HAL
+ * can use to pass events back to the stack.
+ */
+typedef void(nfc_stack_callback_t)(nfc_event_t event, nfc_status_t event_status);
+
+/*
+ * The callback passed in from the NFC stack that the HAL
+ * can use to pass incomming data to the stack.
+ */
+typedef void(nfc_stack_data_callback_t)(uint16_t data_len, uint8_t* p_data);
diff --git a/nfc/aidl/default/main.cpp b/nfc/aidl/default/main.cpp
new file mode 100644
index 0000000..0cc51e7
--- /dev/null
+++ b/nfc/aidl/default/main.cpp
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+#include "Nfc.h"
+using ::aidl::android::hardware::nfc::Nfc;
+
+int main() {
+ LOG(INFO) << "NFC HAL starting up";
+ if (!ABinderProcess_setThreadPoolMaxThreadCount(1)) {
+ LOG(INFO) << "failed to set thread pool max thread count";
+ return 1;
+ }
+ std::shared_ptr<Nfc> nfc_service = ndk::SharedRefBase::make<Nfc>();
+
+ const std::string instance = std::string() + Nfc::descriptor + "/default";
+ binder_status_t status =
+ AServiceManager_addService(nfc_service->asBinder().get(), instance.c_str());
+ CHECK(status == STATUS_OK);
+ ABinderProcess_joinThreadPool();
+ return 0;
+}
diff --git a/nfc/aidl/default/nfc-service-example.rc b/nfc/aidl/default/nfc-service-example.rc
new file mode 100644
index 0000000..7d7052e
--- /dev/null
+++ b/nfc/aidl/default/nfc-service-example.rc
@@ -0,0 +1,4 @@
+service nfc_hal_service /vendor/bin/hw/android.hardware.nfc-service.st
+ class hal
+ user nfc
+ group nfc
diff --git a/nfc/aidl/default/nfc-service-example.xml b/nfc/aidl/default/nfc-service-example.xml
new file mode 100644
index 0000000..70fed20
--- /dev/null
+++ b/nfc/aidl/default/nfc-service-example.xml
@@ -0,0 +1,6 @@
+<manifest version="1.0" type="device">
+ <hal format="aidl">
+ <name>android.hardware.nfc</name>
+ <fqname>INfc/default</fqname>
+ </hal>
+</manifest>
diff --git a/nfc/aidl/vts/functional/Android.bp b/nfc/aidl/vts/functional/Android.bp
new file mode 100644
index 0000000..99eecd0
--- /dev/null
+++ b/nfc/aidl/vts/functional/Android.bp
@@ -0,0 +1,46 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "hardware_interfaces_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["hardware_interfaces_license"],
+}
+
+cc_test {
+ name: "VtsAidlHalNfcTargetTest",
+ defaults: [
+ "VtsHalTargetTestDefaults",
+ "use_libaidlvintf_gtest_helper_static",
+ ],
+ srcs: [
+ "VtsAidlHalNfcTargetTest.cpp",
+ ],
+ shared_libs: [
+ "libbinder",
+ "libbinder_ndk",
+ ],
+ static_libs: [
+ "android.hardware.nfc-V1-ndk",
+ ],
+ test_suites: [
+ "general-tests",
+ "vts",
+ ],
+}
diff --git a/nfc/aidl/vts/functional/VtsAidlHalNfcTargetTest.cpp b/nfc/aidl/vts/functional/VtsAidlHalNfcTargetTest.cpp
new file mode 100644
index 0000000..977b25c
--- /dev/null
+++ b/nfc/aidl/vts/functional/VtsAidlHalNfcTargetTest.cpp
@@ -0,0 +1,458 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "nfc_aidl_hal_test"
+
+#include <aidl/Gtest.h>
+#include <aidl/Vintf.h>
+#include <aidl/android/hardware/nfc/BnNfc.h>
+#include <aidl/android/hardware/nfc/BnNfcClientCallback.h>
+#include <aidl/android/hardware/nfc/INfc.h>
+#include <android-base/logging.h>
+#include <android-base/stringprintf.h>
+#include <android/binder_auto_utils.h>
+#include <android/binder_enums.h>
+#include <android/binder_interface_utils.h>
+#include <android/binder_manager.h>
+#include <android/binder_process.h>
+
+#include <chrono>
+#include <future>
+
+using aidl::android::hardware::nfc::INfc;
+using aidl::android::hardware::nfc::INfcClientCallback;
+using aidl::android::hardware::nfc::NfcCloseType;
+using aidl::android::hardware::nfc::NfcConfig;
+using aidl::android::hardware::nfc::NfcEvent;
+using aidl::android::hardware::nfc::NfcStatus;
+using aidl::android::hardware::nfc::PresenceCheckAlgorithm;
+
+using android::getAidlHalInstanceNames;
+using android::PrintInstanceNameToString;
+using android::base::StringPrintf;
+using ndk::enum_range;
+using ndk::ScopedAStatus;
+using ndk::SharedRefBase;
+using ndk::SpAIBinder;
+
+constexpr static int kCallbackTimeoutMs = 10000;
+
+// 261 bytes is the default and minimum transceive length
+constexpr unsigned int MIN_ISO_DEP_TRANSCEIVE_LENGTH = 261;
+
+// Range of valid off host route ids
+constexpr uint8_t MIN_OFFHOST_ROUTE_ID = 0x01;
+constexpr uint8_t MAX_OFFHOST_ROUTE_ID = 0xFE;
+
+class NfcClientCallback : public aidl::android::hardware::nfc::BnNfcClientCallback {
+ public:
+ NfcClientCallback(const std::function<void(NfcEvent, NfcStatus)>& on_hal_event_cb,
+ const std::function<void(const std::vector<uint8_t>&)>& on_nci_data_cb)
+ : on_nci_data_cb_(on_nci_data_cb), on_hal_event_cb_(on_hal_event_cb) {}
+ virtual ~NfcClientCallback() = default;
+
+ ::ndk::ScopedAStatus sendEvent(NfcEvent event, NfcStatus event_status) override {
+ on_hal_event_cb_(event, event_status);
+ return ::ndk::ScopedAStatus::ok();
+ };
+ ::ndk::ScopedAStatus sendData(const std::vector<uint8_t>& data) override {
+ on_nci_data_cb_(data);
+ return ::ndk::ScopedAStatus::ok();
+ };
+
+ private:
+ std::function<void(const std::vector<uint8_t>&)> on_nci_data_cb_;
+ std::function<void(NfcEvent, NfcStatus)> on_hal_event_cb_;
+};
+
+class NfcAidl : public testing::TestWithParam<std::string> {
+ public:
+ void SetUp() override {
+ SpAIBinder binder(AServiceManager_waitForService(GetParam().c_str()));
+ infc_ = INfc::fromBinder(binder);
+ ASSERT_NE(infc_, nullptr);
+ }
+ std::shared_ptr<INfc> infc_;
+};
+
+/*
+ * OpenAndCloseForDisable:
+ * Makes an open call, waits for NfcEvent::OPEN_CPLT
+ * Immediately calls close(NfcCloseType::DISABLE) and
+ * waits for NfcEvent::CLOSE_CPLT
+ *
+ */
+TEST_P(NfcAidl, OpenAndCloseForDisable) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ LOG(INFO) << StringPrintf("%s,%d ", __func__, event);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close DISABLE";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ LOG(INFO) << "wait for close";
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+}
+
+/*
+ * OpenAndCloseForHostSwitchedOff:
+ * Makes an open call, waits for NfcEvent::OPEN_CPLT
+ * Immediately calls close(NfcCloseType::HOST_SWITCHED_OFF) and
+ * waits for NfcEvent::CLOSE_CPLT
+ *
+ */
+TEST_P(NfcAidl, OpenAndCloseForHostSwitchedOff) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close HOST_SWITCHED_OFF";
+ EXPECT_TRUE(infc_->close(NfcCloseType::HOST_SWITCHED_OFF).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+}
+
+/*
+ * OpenAfterOpen:
+ * Calls open() multiple times
+ * Checks status
+ */
+TEST_P(NfcAidl, OpenAfterOpen) {
+ int open_count = 0;
+ std::promise<void> open_cb_promise;
+ std::promise<void> open2_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto open2_cb_future = open2_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &open2_cb_promise, &open_count](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) {
+ open_count == 0 ? open_cb_promise.set_value() : open2_cb_promise.set_value();
+ open_count++;
+ }
+ },
+ [](auto) {});
+
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Open again and wait for OPEN_CPLT
+ LOG(INFO) << "open again";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open2_cb_future.wait_for(timeout), std::future_status::ready);
+}
+
+/*
+ * CloseAfterClose:
+ * Calls close() multiple times
+ * Checks status
+ */
+TEST_P(NfcAidl, CloseAfterClose) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+ // Close again should fail.
+ LOG(INFO) << "close again";
+ EXPECT_TRUE(!(infc_->close(NfcCloseType::DISABLE).isOk()));
+}
+
+/*
+ * PowerCycleAfterOpen:
+ * Calls powerCycle() after open
+ * Waits for NfcEvent.OPEN_CPLT
+ * Checks status
+ */
+TEST_P(NfcAidl, PowerCycleAfterOpen) {
+ int open_cplt_count = 0;
+ std::promise<void> open_cb_promise;
+ std::promise<void> power_cycle_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto power_cycle_cb_future = power_cycle_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise, &power_cycle_cb_promise, &open_cplt_count](
+ auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) {
+ if (open_cplt_count == 0) {
+ open_cplt_count++;
+ open_cb_promise.set_value();
+ } else {
+ power_cycle_cb_promise.set_value();
+ }
+ }
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // PowerCycle and wait for OPEN_CPLT
+ LOG(INFO) << "PowerCycle";
+ EXPECT_TRUE(infc_->powerCycle().isOk());
+ EXPECT_EQ(power_cycle_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+}
+
+/*
+ * PowerCycleAfterClose:
+ * Calls powerCycle() after close
+ * PowerCycle should fail immediately
+ */
+TEST_P(NfcAidl, PowerCycleAfterClose) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // PowerCycle should fail
+ LOG(INFO) << "PowerCycle";
+ EXPECT_TRUE(!(infc_->powerCycle().isOk()));
+}
+
+/*
+ * CoreInitializedAfterOpen:
+ * Calls coreInitialized() after open
+ * Waits for NfcEvent.POST_INIT_CPLT
+ */
+TEST_P(NfcAidl, CoreInitializedAfterOpen) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> core_init_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto core_init_cb_future = core_init_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise, &core_init_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::POST_INIT_CPLT) core_init_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // CoreInitialized and wait for POST_INIT_CPLT
+ LOG(INFO) << "coreInitialized";
+ EXPECT_TRUE(infc_->coreInitialized().isOk());
+ EXPECT_EQ(core_init_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+}
+
+/*
+ * CoreInitializedAfterClose:
+ * Calls coreInitialized() after close
+ * coreInitialized() should fail immediately
+ */
+TEST_P(NfcAidl, CoreInitializedAfterClose) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // coreInitialized should fail
+ LOG(INFO) << "CoreInitialized";
+ EXPECT_TRUE(!(infc_->coreInitialized().isOk()));
+}
+
+/*
+ * PreDiscoverAfterClose:
+ * Call preDiscover() after close
+ * preDiscover() should fail immediately
+ */
+TEST_P(NfcAidl, PreDiscoverAfterClose) {
+ std::promise<void> open_cb_promise;
+ std::promise<void> close_cb_promise;
+ auto open_cb_future = open_cb_promise.get_future();
+ auto close_cb_future = close_cb_promise.get_future();
+ std::shared_ptr<INfcClientCallback> mCallback = ::ndk::SharedRefBase::make<NfcClientCallback>(
+ [&open_cb_promise, &close_cb_promise](auto event, auto status) {
+ EXPECT_EQ(status, NfcStatus::OK);
+ if (event == NfcEvent::OPEN_CPLT) open_cb_promise.set_value();
+ if (event == NfcEvent::CLOSE_CPLT) close_cb_promise.set_value();
+ },
+ [](auto) {});
+ std::chrono::milliseconds timeout{kCallbackTimeoutMs};
+ // Open and wait for OPEN_CPLT
+ LOG(INFO) << "open";
+ EXPECT_TRUE(infc_->open(mCallback).isOk());
+ EXPECT_EQ(open_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // Close and wait for CLOSE_CPLT
+ LOG(INFO) << "close";
+ EXPECT_TRUE(infc_->close(NfcCloseType::DISABLE).isOk());
+ EXPECT_EQ(close_cb_future.wait_for(timeout), std::future_status::ready);
+
+ // preDiscover should fail
+ LOG(INFO) << "preDiscover";
+ EXPECT_TRUE(!(infc_->preDiscover().isOk()));
+}
+
+/*
+ * checkGetConfigValues:
+ * Calls getConfig()
+ * checks if fields in NfcConfig are populated correctly
+ */
+TEST_P(NfcAidl, CheckGetConfigValues) {
+ NfcConfig configValue;
+ EXPECT_TRUE(infc_->getConfig(&configValue).isOk());
+ EXPECT_GE(configValue.maxIsoDepTransceiveLength, MIN_ISO_DEP_TRANSCEIVE_LENGTH);
+ LOG(INFO) << StringPrintf("configValue.maxIsoDepTransceiveLength = %x",
+ configValue.maxIsoDepTransceiveLength);
+ for (auto uicc : configValue.offHostRouteUicc) {
+ LOG(INFO) << StringPrintf("offHostRouteUicc = %x", uicc);
+ EXPECT_GE(uicc, MIN_OFFHOST_ROUTE_ID);
+ EXPECT_LE(uicc, MAX_OFFHOST_ROUTE_ID);
+ }
+ for (auto ese : configValue.offHostRouteEse) {
+ LOG(INFO) << StringPrintf("offHostRouteEse = %x", ese);
+ EXPECT_GE(ese, MIN_OFFHOST_ROUTE_ID);
+ EXPECT_LE(ese, MAX_OFFHOST_ROUTE_ID);
+ }
+ if (configValue.defaultIsoDepRoute != 0) {
+ EXPECT_GE((uint8_t)configValue.defaultIsoDepRoute, MIN_OFFHOST_ROUTE_ID);
+ EXPECT_LE((uint8_t)configValue.defaultIsoDepRoute, MAX_OFFHOST_ROUTE_ID);
+ }
+}
+
+/*
+ * CheckisVerboseLoggingEnabledAfterSetEnableVerboseLogging:
+ * Calls setEnableVerboseLogging()
+ * checks the return value of isVerboseLoggingEnabled
+ */
+TEST_P(NfcAidl, CheckisVerboseLoggingEnabledAfterSetEnableVerboseLogging) {
+ bool enabled = false;
+ EXPECT_TRUE(infc_->setEnableVerboseLogging(true).isOk());
+ EXPECT_TRUE(infc_->isVerboseLoggingEnabled(&enabled).isOk());
+ EXPECT_TRUE(enabled);
+ EXPECT_TRUE(infc_->setEnableVerboseLogging(false).isOk());
+ EXPECT_TRUE(infc_->isVerboseLoggingEnabled(&enabled).isOk());
+ EXPECT_TRUE(!enabled);
+}
+
+GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NfcAidl);
+INSTANTIATE_TEST_SUITE_P(Nfc, NfcAidl,
+ testing::ValuesIn(::android::getAidlHalInstanceNames(INfc::descriptor)),
+ ::android::PrintInstanceNameToString);
+
+int main(int argc, char** argv) {
+ ::testing::InitGoogleTest(&argc, argv);
+ ABinderProcess_startThreadPool();
+ std::system("/system/bin/svc nfc disable"); /* Turn off NFC */
+ sleep(5);
+ int status = RUN_ALL_TESTS();
+ LOG(INFO) << "Test result = " << status;
+ std::system("/system/bin/svc nfc enable"); /* Turn on NFC */
+ sleep(5);
+ return status;
+}
diff --git a/security/keymint/aidl/android/hardware/security/keymint/IKeyMintDevice.aidl b/security/keymint/aidl/android/hardware/security/keymint/IKeyMintDevice.aidl
index 9846ee9..4b63799 100644
--- a/security/keymint/aidl/android/hardware/security/keymint/IKeyMintDevice.aidl
+++ b/security/keymint/aidl/android/hardware/security/keymint/IKeyMintDevice.aidl
@@ -96,7 +96,9 @@
* - TRUSTED_ENVRIONMENT IKeyMintDevices must support curve 25519 for Purpose::SIGN (Ed25519,
* as specified in RFC 8032), Purpose::ATTEST_KEY (Ed25519) or for KeyPurpose::AGREE_KEY
* (X25519, as specified in RFC 7748). However, a key must have exactly one of these
- * purpose values; the same key cannot be used for multiple purposes.
+ * purpose values; the same key cannot be used for multiple purposes. Signing operations
+ * (Purpose::SIGN) have a message size limit of 16 KiB; operations on messages longer than
+ * this limit must fail with ErrorCode::INVALID_INPUT_LENGTH.
* STRONGBOX IKeyMintDevices do not support curve 25519.
*
* o AES
diff --git a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
index 374f2da..146a527 100644
--- a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
+++ b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.cpp
@@ -25,6 +25,7 @@
#include <cppbor_parse.h>
#include <cutils/properties.h>
#include <gmock/gmock.h>
+#include <openssl/evp.h>
#include <openssl/mem.h>
#include <remote_prov/remote_prov_utils.h>
@@ -206,6 +207,21 @@
return boot_patch_level(key_characteristics_);
}
+bool KeyMintAidlTestBase::Curve25519Supported() {
+ // Strongbox never supports curve 25519.
+ if (SecLevel() == SecurityLevel::STRONGBOX) {
+ return false;
+ }
+
+ // Curve 25519 was included in version 2 of the KeyMint interface.
+ int32_t version = 0;
+ auto status = keymint_->getInterfaceVersion(&version);
+ if (!status.isOk()) {
+ ADD_FAILURE() << "Failed to determine interface version";
+ }
+ return version >= 2;
+}
+
ErrorCode KeyMintAidlTestBase::GetReturnErrorCode(const Status& result) {
if (result.isOk()) return ErrorCode::OK;
@@ -543,7 +559,12 @@
std::vector<uint8_t> o_put;
result = op_->update(vector<uint8_t>(input.begin(), input.end()), {}, {}, &o_put);
- if (result.isOk()) output->append(o_put.begin(), o_put.end());
+ if (result.isOk()) {
+ output->append(o_put.begin(), o_put.end());
+ } else {
+ // Failure always terminates the operation.
+ op_ = {};
+ }
return GetReturnErrorCode(result);
}
@@ -740,6 +761,19 @@
if (digest == Digest::NONE) {
switch (EVP_PKEY_id(pub_key.get())) {
+ case EVP_PKEY_ED25519: {
+ ASSERT_EQ(64, signature.size());
+ uint8_t pub_keydata[32];
+ size_t pub_len = sizeof(pub_keydata);
+ ASSERT_EQ(1, EVP_PKEY_get_raw_public_key(pub_key.get(), pub_keydata, &pub_len));
+ ASSERT_EQ(sizeof(pub_keydata), pub_len);
+ ASSERT_EQ(1, ED25519_verify(reinterpret_cast<const uint8_t*>(message.data()),
+ message.size(),
+ reinterpret_cast<const uint8_t*>(signature.data()),
+ pub_keydata));
+ break;
+ }
+
case EVP_PKEY_EC: {
vector<uint8_t> data((EVP_PKEY_bits(pub_key.get()) + 7) / 8);
size_t data_size = std::min(data.size(), message.size());
@@ -1166,16 +1200,31 @@
vector<EcCurve> KeyMintAidlTestBase::ValidCurves() {
if (securityLevel_ == SecurityLevel::STRONGBOX) {
return {EcCurve::P_256};
+ } else if (Curve25519Supported()) {
+ return {EcCurve::P_224, EcCurve::P_256, EcCurve::P_384, EcCurve::P_521,
+ EcCurve::CURVE_25519};
} else {
- return {EcCurve::P_224, EcCurve::P_256, EcCurve::P_384, EcCurve::P_521};
+ return {
+ EcCurve::P_224,
+ EcCurve::P_256,
+ EcCurve::P_384,
+ EcCurve::P_521,
+ };
}
}
vector<EcCurve> KeyMintAidlTestBase::InvalidCurves() {
if (SecLevel() == SecurityLevel::STRONGBOX) {
- return {EcCurve::P_224, EcCurve::P_384, EcCurve::P_521};
+ // Curve 25519 is not supported, either because:
+ // - KeyMint v1: it's an unknown enum value
+ // - KeyMint v2+: it's not supported by StrongBox.
+ return {EcCurve::P_224, EcCurve::P_384, EcCurve::P_521, EcCurve::CURVE_25519};
} else {
- return {};
+ if (Curve25519Supported()) {
+ return {};
+ } else {
+ return {EcCurve::CURVE_25519};
+ }
}
}
diff --git a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.h b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.h
index 61f9d4d..27cb99c 100644
--- a/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.h
+++ b/security/keymint/aidl/vts/functional/KeyMintAidlTestBase.h
@@ -80,6 +80,8 @@
uint32_t boot_patch_level(const vector<KeyCharacteristics>& key_characteristics);
uint32_t boot_patch_level();
+ bool Curve25519Supported();
+
ErrorCode GetReturnErrorCode(const Status& result);
ErrorCode GenerateKey(const AuthorizationSet& key_desc, vector<uint8_t>* key_blob,
diff --git a/security/keymint/aidl/vts/functional/KeyMintTest.cpp b/security/keymint/aidl/vts/functional/KeyMintTest.cpp
index d109058..62f22bb 100644
--- a/security/keymint/aidl/vts/functional/KeyMintTest.cpp
+++ b/security/keymint/aidl/vts/functional/KeyMintTest.cpp
@@ -22,6 +22,7 @@
#include <algorithm>
#include <iostream>
+#include <openssl/curve25519.h>
#include <openssl/ec.h>
#include <openssl/evp.h>
#include <openssl/mem.h>
@@ -69,6 +70,9 @@
namespace {
+// Maximum supported Ed25519 message size.
+const size_t MAX_ED25519_MSG_SIZE = 16 * 1024;
+
// Whether to check that BOOT_PATCHLEVEL is populated.
bool check_boot_pl = true;
@@ -415,6 +419,126 @@
// } end SEQUENCE (PrivateKeyInfo)
);
+/**
+ * Ed25519 key pair generated as follows:
+ * ```
+ * % openssl req -x509 -newkey ED25519 -days 700 -nodes \
+ * -keyout ed25519_priv.key -out ed25519.pem * -subj "/CN=fake.ed25519.com"
+ * Generating a ED25519 private key writing new private key to
+ * 'ed25519_priv.key'
+ * -----
+ * % cat ed25519_priv.key
+ * -----BEGIN PRIVATE KEY-----
+ * MC4CAQAwBQYDK2VwBCIEIKl3A5quNywcj1P+0XI9SBalFPIvO52NxceMLRH6dVmR
+ * -----END PRIVATE KEY-----
+ * % der2ascii -pem -i ed25519_priv.key
+ * SEQUENCE {
+ * INTEGER { 0 }
+ * SEQUENCE {
+ * # ed25519
+ * OBJECT_IDENTIFIER { 1.3.101.112 }
+ * }
+ * OCTET_STRING {
+ * OCTET_STRING { `a977039aae372c1c8f53fed1723d4816a514f22f3b9d8dc5c78c2d11fa755991` }
+ * }
+ * }
+ * % cat ed25519.pem
+ * -----BEGIN CERTIFICATE-----
+ * MIIBSjCB/aADAgECAhR0Jron3eKcdgqyecv/eEfGWAzn8DAFBgMrZXAwGzEZMBcG
+ * A1UEAwwQZmFrZS5lZDI1NTE5LmNvbTAeFw0yMTEwMjAwODI3NDJaFw0yMzA5MjAw
+ * ODI3NDJaMBsxGTAXBgNVBAMMEGZha2UuZWQyNTUxOS5jb20wKjAFBgMrZXADIQDv
+ * uwHz+3TaQ69D2digxlz0fFfsZg0rPqgQae3jBPRWkaNTMFEwHQYDVR0OBBYEFN9O
+ * od30SY4JTs66ZR403UPya+iXMB8GA1UdIwQYMBaAFN9Ood30SY4JTs66ZR403UPy
+ * a+iXMA8GA1UdEwEB/wQFMAMBAf8wBQYDK2VwA0EAKjVrYQjuE/gEL2j/ABpDbFjV
+ * Ilg5tJ6MN/P3psAv3Cs7f0X1lFqdlt15nJ/6aj2cmGCwNRXt5wcyYDKNu+v2Dw==
+ * -----END CERTIFICATE-----
+ * % openssl x509 -in ed25519.pem -text -noout
+ * Certificate:
+ * Data:
+ * Version: 3 (0x2)
+ * Serial Number:
+ * 74:26:ba:27:dd:e2:9c:76:0a:b2:79:cb:ff:78:47:c6:58:0c:e7:f0
+ * Signature Algorithm: ED25519
+ * Issuer: CN = fake.ed25519.com
+ * Validity
+ * Not Before: Oct 20 08:27:42 2021 GMT
+ * Not After : Sep 20 08:27:42 2023 GMT
+ * Subject: CN = fake.ed25519.com
+ * Subject Public Key Info:
+ * Public Key Algorithm: ED25519
+ * ED25519 Public-Key:
+ * pub:
+ * ef:bb:01:f3:fb:74:da:43:af:43:d9:d8:a0:c6:5c:
+ * f4:7c:57:ec:66:0d:2b:3e:a8:10:69:ed:e3:04:f4:
+ * 56:91
+ * X509v3 extensions:
+ * X509v3 Subject Key Identifier:
+ * DF:4E:A1:DD:F4:49:8E:09:4E:CE:BA:65:1E:34:DD:43:F2:6B:E8:97
+ * X509v3 Authority Key Identifier:
+ * keyid:DF:4E:A1:DD:F4:49:8E:09:4E:CE:BA:65:1E:34:DD:43:F2:6B:E8:97
+ *
+ * X509v3 Basic Constraints: critical
+ * CA:TRUE
+ * Signature Algorithm: ED25519
+ * 2a:35:6b:61:08:ee:13:f8:04:2f:68:ff:00:1a:43:6c:58:d5:
+ * 22:58:39:b4:9e:8c:37:f3:f7:a6:c0:2f:dc:2b:3b:7f:45:f5:
+ * 94:5a:9d:96:dd:79:9c:9f:fa:6a:3d:9c:98:60:b0:35:15:ed:
+ * e7:07:32:60:32:8d:bb:eb:f6:0f
+ * ```
+ */
+string ed25519_key = hex2str("a977039aae372c1c8f53fed1723d4816a514f22f3b9d8dc5c78c2d11fa755991");
+string ed25519_pkcs8_key = hex2str(
+ // RFC 5208 s5
+ "302e" // SEQUENCE length 0x2e (PrivateKeyInfo) {
+ "0201" // INTEGER length 1 (Version)
+ "00" // version 0
+ "3005" // SEQUENCE length 05 (AlgorithmIdentifier) {
+ "0603" // OBJECT IDENTIFIER length 3 (algorithm)
+ "2b6570" // 1.3.101.112 (id-Ed125519 RFC 8410 s3)
+ // } end SEQUENCE (AlgorithmIdentifier)
+ "0422" // OCTET STRING length 0x22 (PrivateKey)
+ "0420" // OCTET STRING length 0x20 (RFC 8410 s7)
+ "a977039aae372c1c8f53fed1723d4816a514f22f3b9d8dc5c78c2d11fa755991"
+ // } end SEQUENCE (PrivateKeyInfo)
+);
+string ed25519_pubkey = hex2str("efbb01f3fb74da43af43d9d8a0c65cf47c57ec660d2b3ea81069ede304f45691");
+
+/**
+ * X25519 key pair generated as follows:
+ * ```
+ * % openssl genpkey -algorithm X25519 > x25519_priv.key
+ * % cat x25519_priv.key
+ * -----BEGIN PRIVATE KEY-----
+ * MC4CAQAwBQYDK2VuBCIEIGgPwF3NLwQx/Sfwr2nfJvXitwlDNh3Skzh+TISN/y1C
+ * -----END PRIVATE KEY-----
+ * % der2ascii -pem -i x25519_priv.key
+ * SEQUENCE {
+ * INTEGER { 0 }
+ * SEQUENCE {
+ * # x25519
+ * OBJECT_IDENTIFIER { 1.3.101.110 }
+ * }
+ * OCTET_STRING {
+ * OCTET_STRING { `680fc05dcd2f0431fd27f0af69df26f5e2b70943361dd293387e4c848dff2d42` }
+ * }
+ * }
+ * ```
+ */
+
+string x25519_key = hex2str("680fc05dcd2f0431fd27f0af69df26f5e2b70943361dd293387e4c848dff2d42");
+string x25519_pkcs8_key = hex2str(
+ // RFC 5208 s5
+ "302e" // SEQUENCE length 0x2e (PrivateKeyInfo) {
+ "0201" // INTEGER length 1 (Version)
+ "00" // version 0
+ "3005" // SEQUENCE length 05 (AlgorithmIdentifier) {
+ "0603" // OBJECT IDENTIFIER length 3 (algorithm)
+ "2b656e" // 1.3.101.110 (id-X125519 RFC 8410 s3)
+ "0422" // OCTET STRING length 0x22 (PrivateKey)
+ "0420" // OCTET STRING length 0x20 (RFC 8410 s7)
+ "680fc05dcd2f0431fd27f0af69df26f5e2b70943361dd293387e4c848dff2d42");
+string x25519_pubkey = hex2str("be46925a857f17831d6d454b9d3d36a4a30166edf80eb82b684661c3e258f768");
+
struct RSA_Delete {
void operator()(RSA* p) { RSA_free(p); }
};
@@ -1374,7 +1498,7 @@
/*
* NewKeyGenerationTest.Ecdsa
*
- * Verifies that keymint can generate all required EC key sizes, and that the resulting keys
+ * Verifies that keymint can generate all required EC curves, and that the resulting keys
* have correct characteristics.
*/
TEST_P(NewKeyGenerationTest, Ecdsa) {
@@ -1400,6 +1524,65 @@
}
/*
+ * NewKeyGenerationTest.EcdsaCurve25519
+ *
+ * Verifies that keymint can generate a curve25519 key, and that the resulting key
+ * has correct characteristics.
+ */
+TEST_P(NewKeyGenerationTest, EcdsaCurve25519) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ vector<uint8_t> key_blob;
+ vector<KeyCharacteristics> key_characteristics;
+ ErrorCode result = GenerateKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ &key_blob, &key_characteristics);
+ ASSERT_EQ(result, ErrorCode::OK);
+ ASSERT_GT(key_blob.size(), 0U);
+
+ EXPECT_TRUE(ChainSignaturesAreValid(cert_chain_));
+ ASSERT_GT(cert_chain_.size(), 0);
+
+ CheckBaseParams(key_characteristics);
+ CheckCharacteristics(key_blob, key_characteristics);
+
+ AuthorizationSet crypto_params = SecLevelAuthorizations(key_characteristics);
+
+ EXPECT_TRUE(crypto_params.Contains(TAG_ALGORITHM, Algorithm::EC));
+ EXPECT_TRUE(crypto_params.Contains(TAG_EC_CURVE, curve)) << "Curve " << curve << "missing";
+
+ CheckedDeleteKey(&key_blob);
+}
+
+/*
+ * NewKeyGenerationTest.EcCurve25519MultiPurposeFail
+ *
+ * Verifies that KeyMint rejects an attempt to generate a curve 25519 key for both
+ * SIGN and AGREE_KEY.
+ */
+TEST_P(NewKeyGenerationTest, EcdsaCurve25519MultiPurposeFail) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ vector<uint8_t> key_blob;
+ vector<KeyCharacteristics> key_characteristics;
+ ErrorCode result = GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ &key_blob, &key_characteristics);
+ ASSERT_EQ(result, ErrorCode::INCOMPATIBLE_PURPOSE);
+}
+
+/*
* NewKeyGenerationTest.EcdsaAttestation
*
* Verifies that for all Ecdsa key sizes, if challenge and app id is provided,
@@ -1453,6 +1636,62 @@
}
/*
+ * NewKeyGenerationTest.EcdsaAttestationCurve25519
+ *
+ * Verifies that for a curve 25519 key, if challenge and app id is provided,
+ * an attestation will be generated.
+ */
+TEST_P(NewKeyGenerationTest, EcdsaAttestationCurve25519) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ auto challenge = "hello";
+ auto app_id = "foo";
+
+ auto subject = "cert subj 2";
+ vector<uint8_t> subject_der(make_name_from_str(subject));
+
+ uint64_t serial_int = 0xFFFFFFFFFFFFFFFF;
+ vector<uint8_t> serial_blob(build_serial_blob(serial_int));
+
+ vector<uint8_t> key_blob;
+ vector<KeyCharacteristics> key_characteristics;
+ ErrorCode result = GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .AttestationChallenge(challenge)
+ .AttestationApplicationId(app_id)
+ .Authorization(TAG_CERTIFICATE_SERIAL, serial_blob)
+ .Authorization(TAG_CERTIFICATE_SUBJECT, subject_der)
+ .SetDefaultValidity(),
+ &key_blob, &key_characteristics);
+ ASSERT_EQ(ErrorCode::OK, result);
+ ASSERT_GT(key_blob.size(), 0U);
+ CheckBaseParams(key_characteristics);
+ CheckCharacteristics(key_blob, key_characteristics);
+
+ AuthorizationSet crypto_params = SecLevelAuthorizations(key_characteristics);
+
+ EXPECT_TRUE(crypto_params.Contains(TAG_ALGORITHM, Algorithm::EC));
+ EXPECT_TRUE(crypto_params.Contains(TAG_EC_CURVE, curve)) << "Curve " << curve << "missing";
+
+ EXPECT_TRUE(ChainSignaturesAreValid(cert_chain_));
+ ASSERT_GT(cert_chain_.size(), 0);
+ verify_subject_and_serial(cert_chain_[0], serial_int, subject, false);
+
+ AuthorizationSet hw_enforced = HwEnforcedAuthorizations(key_characteristics);
+ AuthorizationSet sw_enforced = SwEnforcedAuthorizations(key_characteristics);
+ EXPECT_TRUE(verify_attestation_record(AidlVersion(), challenge, app_id, //
+ sw_enforced, hw_enforced, SecLevel(),
+ cert_chain_[0].encodedCertificate));
+
+ CheckedDeleteKey(&key_blob);
+}
+
+/*
* NewKeyGenerationTest.EcdsaAttestationTags
*
* Verifies that creation of an attested ECDSA key includes various tags in the
@@ -1984,20 +2223,22 @@
}
/*
- * NewKeyGenerationTest.EcdsaInvalidSize
+ * NewKeyGenerationTest.EcdsaInvalidCurve
*
- * Verifies that specifying an invalid key size for EC key generation returns
+ * Verifies that specifying an invalid curve for EC key generation returns
* UNSUPPORTED_KEY_SIZE.
*/
-TEST_P(NewKeyGenerationTest, EcdsaInvalidSize) {
+TEST_P(NewKeyGenerationTest, EcdsaInvalidCurve) {
for (auto curve : InvalidCurves()) {
vector<uint8_t> key_blob;
vector<KeyCharacteristics> key_characteristics;
- ASSERT_EQ(ErrorCode::UNSUPPORTED_KEY_SIZE, GenerateKey(AuthorizationSetBuilder()
- .EcdsaSigningKey(curve)
- .Digest(Digest::NONE)
- .SetDefaultValidity(),
- &key_blob, &key_characteristics));
+ auto result = GenerateKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ &key_blob, &key_characteristics);
+ ASSERT_TRUE(result == ErrorCode::UNSUPPORTED_KEY_SIZE ||
+ result == ErrorCode::UNSUPPORTED_EC_CURVE);
}
ASSERT_EQ(ErrorCode::UNSUPPORTED_KEY_SIZE,
@@ -2808,15 +3049,19 @@
/*
* SigningOperationsTest.EcdsaAllDigestsAndCurves
*
- * Verifies ECDSA signature/verification for all digests and curves.
+ * Verifies ECDSA signature/verification for all digests and required curves.
*/
TEST_P(SigningOperationsTest, EcdsaAllDigestsAndCurves) {
- auto digests = ValidDigests(true /* withNone */, false /* withMD5 */);
string message = "1234567890";
string corrupt_message = "2234567890";
for (auto curve : ValidCurves()) {
SCOPED_TRACE(testing::Message() << "Curve::" << curve);
+ // Ed25519 only allows Digest::NONE.
+ auto digests = (curve == EcCurve::CURVE_25519)
+ ? std::vector<Digest>(1, Digest::NONE)
+ : ValidDigests(true /* withNone */, false /* withMD5 */);
+
ErrorCode error = GenerateKey(AuthorizationSetBuilder()
.Authorization(TAG_NO_AUTH_REQUIRED)
.EcdsaSigningKey(curve)
@@ -2841,25 +3086,141 @@
/*
* SigningOperationsTest.EcdsaAllCurves
*
- * Verifies that ECDSA operations succeed with all possible curves.
+ * Verifies that ECDSA operations succeed with all required curves.
*/
TEST_P(SigningOperationsTest, EcdsaAllCurves) {
for (auto curve : ValidCurves()) {
+ Digest digest = (curve == EcCurve::CURVE_25519 ? Digest::NONE : Digest::SHA_2_256);
+ SCOPED_TRACE(testing::Message() << "Curve::" << curve);
ErrorCode error = GenerateKey(AuthorizationSetBuilder()
.Authorization(TAG_NO_AUTH_REQUIRED)
.EcdsaSigningKey(curve)
- .Digest(Digest::SHA_2_256)
+ .Digest(digest)
.SetDefaultValidity());
EXPECT_EQ(ErrorCode::OK, error) << "Failed to generate ECDSA key with curve " << curve;
if (error != ErrorCode::OK) continue;
string message(1024, 'a');
- SignMessage(message, AuthorizationSetBuilder().Digest(Digest::SHA_2_256));
+ SignMessage(message, AuthorizationSetBuilder().Digest(digest));
CheckedDeleteKey();
}
}
/*
+ * SigningOperationsTest.EcdsaCurve25519
+ *
+ * Verifies that ECDSA operations succeed with curve25519.
+ */
+TEST_P(SigningOperationsTest, EcdsaCurve25519) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ ErrorCode error = GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity());
+ ASSERT_EQ(ErrorCode::OK, error) << "Failed to generate ECDSA key with curve " << curve;
+
+ string message(1024, 'a');
+ SignMessage(message, AuthorizationSetBuilder().Digest(Digest::NONE));
+ CheckedDeleteKey();
+}
+
+/*
+ * SigningOperationsTest.EcdsaCurve25519MaxSize
+ *
+ * Verifies that EDDSA operations with curve25519 under the maximum message size succeed.
+ */
+TEST_P(SigningOperationsTest, EcdsaCurve25519MaxSize) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ ErrorCode error = GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity());
+ ASSERT_EQ(ErrorCode::OK, error) << "Failed to generate ECDSA key with curve " << curve;
+
+ auto params = AuthorizationSetBuilder().Digest(Digest::NONE);
+
+ for (size_t msg_size : {MAX_ED25519_MSG_SIZE - 1, MAX_ED25519_MSG_SIZE}) {
+ SCOPED_TRACE(testing::Message() << "-msg-size=" << msg_size);
+ string message(msg_size, 'a');
+
+ // Attempt to sign via Begin+Finish.
+ AuthorizationSet out_params;
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::SIGN, key_blob_, params, &out_params));
+ EXPECT_TRUE(out_params.empty());
+ string signature;
+ auto result = Finish(message, &signature);
+ EXPECT_EQ(result, ErrorCode::OK);
+ LocalVerifyMessage(message, signature, params);
+
+ // Attempt to sign via Begin+Update+Finish
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::SIGN, key_blob_, params, &out_params));
+ EXPECT_TRUE(out_params.empty());
+ string output;
+ result = Update(message, &output);
+ EXPECT_EQ(result, ErrorCode::OK);
+ EXPECT_EQ(output.size(), 0);
+ string signature2;
+ EXPECT_EQ(ErrorCode::OK, Finish({}, &signature2));
+ LocalVerifyMessage(message, signature2, params);
+ }
+
+ CheckedDeleteKey();
+}
+
+/*
+ * SigningOperationsTest.EcdsaCurve25519MaxSizeFail
+ *
+ * Verifies that EDDSA operations with curve25519 fail when message size is too large.
+ */
+TEST_P(SigningOperationsTest, EcdsaCurve25519MaxSizeFail) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ EcCurve curve = EcCurve::CURVE_25519;
+ ErrorCode error = GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(curve)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity());
+ ASSERT_EQ(ErrorCode::OK, error) << "Failed to generate ECDSA key with curve " << curve;
+
+ auto params = AuthorizationSetBuilder().Digest(Digest::NONE);
+
+ for (size_t msg_size : {MAX_ED25519_MSG_SIZE + 1, MAX_ED25519_MSG_SIZE * 2}) {
+ SCOPED_TRACE(testing::Message() << "-msg-size=" << msg_size);
+ string message(msg_size, 'a');
+
+ // Attempt to sign via Begin+Finish.
+ AuthorizationSet out_params;
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::SIGN, key_blob_, params, &out_params));
+ EXPECT_TRUE(out_params.empty());
+ string signature;
+ auto result = Finish(message, &signature);
+ EXPECT_EQ(result, ErrorCode::INVALID_INPUT_LENGTH);
+
+ // Attempt to sign via Begin+Update (but never get to Finish)
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::SIGN, key_blob_, params, &out_params));
+ EXPECT_TRUE(out_params.empty());
+ string output;
+ result = Update(message, &output);
+ EXPECT_EQ(result, ErrorCode::INVALID_INPUT_LENGTH);
+ }
+
+ CheckedDeleteKey();
+}
+
+/*
* SigningOperationsTest.EcdsaNoDigestHugeData
*
* Verifies that ECDSA operations support very large messages, even without digesting. This
@@ -3509,6 +3870,255 @@
}
/*
+ * ImportKeyTest.Ed25519RawSuccess
+ *
+ * Verifies that importing and using a raw Ed25519 private key works correctly.
+ */
+TEST_P(ImportKeyTest, Ed25519RawSuccess) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_EQ(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, ed25519_key));
+ CheckCryptoParam(TAG_ALGORITHM, Algorithm::EC);
+ CheckCryptoParam(TAG_EC_CURVE, EcCurve::CURVE_25519);
+ CheckOrigin();
+
+ // The returned cert should hold the correct public key.
+ ASSERT_GT(cert_chain_.size(), 0);
+ X509_Ptr kmKeyCert(parse_cert_blob(cert_chain_[0].encodedCertificate));
+ ASSERT_NE(kmKeyCert, nullptr);
+ EVP_PKEY_Ptr kmPubKey(X509_get_pubkey(kmKeyCert.get()));
+ ASSERT_NE(kmPubKey.get(), nullptr);
+ size_t kmPubKeySize = 32;
+ uint8_t kmPubKeyData[32];
+ ASSERT_EQ(1, EVP_PKEY_get_raw_public_key(kmPubKey.get(), kmPubKeyData, &kmPubKeySize));
+ ASSERT_EQ(kmPubKeySize, 32);
+ EXPECT_EQ(string(kmPubKeyData, kmPubKeyData + 32), ed25519_pubkey);
+
+ string message(32, 'a');
+ auto params = AuthorizationSetBuilder().Digest(Digest::NONE);
+ string signature = SignMessage(message, params);
+ LocalVerifyMessage(message, signature, params);
+}
+
+/*
+ * ImportKeyTest.Ed25519Pkcs8Success
+ *
+ * Verifies that importing and using a PKCS#8-encoded Ed25519 private key works correctly.
+ */
+TEST_P(ImportKeyTest, Ed25519Pkcs8Success) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_EQ(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, ed25519_pkcs8_key));
+ CheckCryptoParam(TAG_ALGORITHM, Algorithm::EC);
+ CheckCryptoParam(TAG_EC_CURVE, EcCurve::CURVE_25519);
+ CheckOrigin();
+
+ // The returned cert should hold the correct public key.
+ ASSERT_GT(cert_chain_.size(), 0);
+ X509_Ptr kmKeyCert(parse_cert_blob(cert_chain_[0].encodedCertificate));
+ ASSERT_NE(kmKeyCert, nullptr);
+ EVP_PKEY_Ptr kmPubKey(X509_get_pubkey(kmKeyCert.get()));
+ ASSERT_NE(kmPubKey.get(), nullptr);
+ size_t kmPubKeySize = 32;
+ uint8_t kmPubKeyData[32];
+ ASSERT_EQ(1, EVP_PKEY_get_raw_public_key(kmPubKey.get(), kmPubKeyData, &kmPubKeySize));
+ ASSERT_EQ(kmPubKeySize, 32);
+ EXPECT_EQ(string(kmPubKeyData, kmPubKeyData + 32), ed25519_pubkey);
+
+ string message(32, 'a');
+ auto params = AuthorizationSetBuilder().Digest(Digest::NONE);
+ string signature = SignMessage(message, params);
+ LocalVerifyMessage(message, signature, params);
+}
+
+/*
+ * ImportKeyTest.Ed25519CurveMismatch
+ *
+ * Verifies that importing an Ed25519 key pair with a curve that doesn't match the key fails in
+ * the correct way.
+ */
+TEST_P(ImportKeyTest, Ed25519CurveMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_NE(ErrorCode::OK,
+ ImportKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(EcCurve::P_224 /* Doesn't match key */)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, ed25519_key));
+}
+
+/*
+ * ImportKeyTest.Ed25519FormatMismatch
+ *
+ * Verifies that importing an Ed25519 key pair with an invalid format fails.
+ */
+TEST_P(ImportKeyTest, Ed25519FormatMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, ed25519_key));
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, ed25519_pkcs8_key));
+}
+
+/*
+ * ImportKeyTest.Ed25519PurposeMismatch
+ *
+ * Verifies that importing an Ed25519 key pair with an invalid purpose fails.
+ */
+TEST_P(ImportKeyTest, Ed25519PurposeMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ // Can't have both SIGN and ATTEST_KEY
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::ATTEST_KEY)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, ed25519_key));
+ // AGREE_KEY is for X25519 (but can only tell the difference if the import key is in
+ // PKCS#8 format and so includes an OID).
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .Digest(Digest::NONE)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, ed25519_pkcs8_key));
+}
+
+/*
+ * ImportKeyTest.X25519RawSuccess
+ *
+ * Verifies that importing and using a raw X25519 private key works correctly.
+ */
+TEST_P(ImportKeyTest, X25519RawSuccess) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_EQ(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, x25519_key));
+
+ CheckCryptoParam(TAG_ALGORITHM, Algorithm::EC);
+ CheckCryptoParam(TAG_EC_CURVE, EcCurve::CURVE_25519);
+ CheckOrigin();
+}
+
+/*
+ * ImportKeyTest.X25519Pkcs8Success
+ *
+ * Verifies that importing and using a PKCS#8-encoded X25519 private key works correctly.
+ */
+TEST_P(ImportKeyTest, X25519Pkcs8Success) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_EQ(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, x25519_pkcs8_key));
+
+ CheckCryptoParam(TAG_ALGORITHM, Algorithm::EC);
+ CheckCryptoParam(TAG_EC_CURVE, EcCurve::CURVE_25519);
+ CheckOrigin();
+}
+
+/*
+ * ImportKeyTest.X25519CurveMismatch
+ *
+ * Verifies that importing an X25519 key with a curve that doesn't match the key fails in
+ * the correct way.
+ */
+TEST_P(ImportKeyTest, X25519CurveMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaKey(EcCurve::P_224 /* Doesn't match key */)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, x25519_key));
+}
+
+/*
+ * ImportKeyTest.X25519FormatMismatch
+ *
+ * Verifies that importing an X25519 key with an invalid format fails.
+ */
+TEST_P(ImportKeyTest, X25519FormatMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, x25519_key));
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::RAW, x25519_pkcs8_key));
+}
+
+/*
+ * ImportKeyTest.X25519PurposeMismatch
+ *
+ * Verifies that importing an X25519 key pair with an invalid format fails.
+ */
+TEST_P(ImportKeyTest, X25519PurposeMismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::ATTEST_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, x25519_pkcs8_key));
+ ASSERT_NE(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .EcdsaSigningKey(EcCurve::CURVE_25519)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, x25519_pkcs8_key));
+}
+
+/*
* ImportKeyTest.AesSuccess
*
* Verifies that importing and using an AES key works.
@@ -6660,8 +7270,6 @@
INSTANTIATE_KEYMINT_AIDL_TEST(TransportLimitTest);
-typedef KeyMintAidlTestBase KeyAgreementTest;
-
static int EcdhCurveToOpenSslCurveName(EcCurve curve) {
switch (curve) {
case EcCurve::P_224:
@@ -6677,10 +7285,108 @@
}
}
+class KeyAgreementTest : public KeyMintAidlTestBase {
+ protected:
+ void GenerateLocalEcKey(EcCurve localCurve, EVP_PKEY_Ptr* localPrivKey,
+ std::vector<uint8_t>* localPublicKey) {
+ // Generate EC key locally (with access to private key material)
+ if (localCurve == EcCurve::CURVE_25519) {
+ uint8_t privKeyData[32];
+ uint8_t pubKeyData[32];
+ X25519_keypair(pubKeyData, privKeyData);
+ *localPublicKey = vector<uint8_t>(pubKeyData, pubKeyData + 32);
+ *localPrivKey = EVP_PKEY_Ptr(EVP_PKEY_new_raw_private_key(
+ EVP_PKEY_X25519, nullptr, privKeyData, sizeof(privKeyData)));
+ } else {
+ auto ecKey = EC_KEY_Ptr(EC_KEY_new());
+ int curveName = EcdhCurveToOpenSslCurveName(localCurve);
+ auto group = EC_GROUP_Ptr(EC_GROUP_new_by_curve_name(curveName));
+ ASSERT_NE(group, nullptr);
+ ASSERT_EQ(EC_KEY_set_group(ecKey.get(), group.get()), 1);
+ ASSERT_EQ(EC_KEY_generate_key(ecKey.get()), 1);
+ *localPrivKey = EVP_PKEY_Ptr(EVP_PKEY_new());
+ ASSERT_EQ(EVP_PKEY_set1_EC_KEY(localPrivKey->get(), ecKey.get()), 1);
+
+ // Get encoded form of the public part of the locally generated key...
+ unsigned char* p = nullptr;
+ int localPublicKeySize = i2d_PUBKEY(localPrivKey->get(), &p);
+ ASSERT_GT(localPublicKeySize, 0);
+ *localPublicKey =
+ vector<uint8_t>(reinterpret_cast<const uint8_t*>(p),
+ reinterpret_cast<const uint8_t*>(p + localPublicKeySize));
+ OPENSSL_free(p);
+ }
+ }
+
+ void GenerateKeyMintEcKey(EcCurve curve, EVP_PKEY_Ptr* kmPubKey) {
+ vector<uint8_t> challenge = {0x41, 0x42};
+ ErrorCode result =
+ GenerateKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .Authorization(TAG_EC_CURVE, curve)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .Authorization(TAG_ALGORITHM, Algorithm::EC)
+ .Authorization(TAG_ATTESTATION_APPLICATION_ID, {0x61, 0x62})
+ .Authorization(TAG_ATTESTATION_CHALLENGE, challenge)
+ .SetDefaultValidity());
+ ASSERT_EQ(ErrorCode::OK, result) << "Failed to generate key";
+ ASSERT_GT(cert_chain_.size(), 0);
+ X509_Ptr kmKeyCert(parse_cert_blob(cert_chain_[0].encodedCertificate));
+ ASSERT_NE(kmKeyCert, nullptr);
+ // Check that keyAgreement (bit 4) is set in KeyUsage
+ EXPECT_TRUE((X509_get_key_usage(kmKeyCert.get()) & X509v3_KU_KEY_AGREEMENT) != 0);
+ *kmPubKey = EVP_PKEY_Ptr(X509_get_pubkey(kmKeyCert.get()));
+ ASSERT_NE(*kmPubKey, nullptr);
+ if (dump_Attestations) {
+ for (size_t n = 0; n < cert_chain_.size(); n++) {
+ std::cout << bin2hex(cert_chain_[n].encodedCertificate) << std::endl;
+ }
+ }
+ }
+
+ void CheckAgreement(EVP_PKEY_Ptr kmPubKey, EVP_PKEY_Ptr localPrivKey,
+ const std::vector<uint8_t>& localPublicKey) {
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::AGREE_KEY, AuthorizationSetBuilder()));
+ string ZabFromKeyMintStr;
+ ASSERT_EQ(ErrorCode::OK,
+ Finish(string(localPublicKey.begin(), localPublicKey.end()), &ZabFromKeyMintStr));
+ vector<uint8_t> ZabFromKeyMint(ZabFromKeyMintStr.begin(), ZabFromKeyMintStr.end());
+ vector<uint8_t> ZabFromTest;
+
+ if (EVP_PKEY_id(kmPubKey.get()) == EVP_PKEY_X25519) {
+ size_t kmPubKeySize = 32;
+ uint8_t kmPubKeyData[32];
+ ASSERT_EQ(1, EVP_PKEY_get_raw_public_key(kmPubKey.get(), kmPubKeyData, &kmPubKeySize));
+ ASSERT_EQ(kmPubKeySize, 32);
+
+ uint8_t localPrivKeyData[32];
+ size_t localPrivKeySize = 32;
+ ASSERT_EQ(1, EVP_PKEY_get_raw_private_key(localPrivKey.get(), localPrivKeyData,
+ &localPrivKeySize));
+ ASSERT_EQ(localPrivKeySize, 32);
+
+ uint8_t sharedKey[32];
+ ASSERT_EQ(1, X25519(sharedKey, localPrivKeyData, kmPubKeyData));
+ ZabFromTest = std::vector<uint8_t>(sharedKey, sharedKey + 32);
+ } else {
+ // Perform local ECDH between the two keys so we can check if we get the same Zab..
+ auto ctx = EVP_PKEY_CTX_Ptr(EVP_PKEY_CTX_new(localPrivKey.get(), nullptr));
+ ASSERT_NE(ctx, nullptr);
+ ASSERT_EQ(EVP_PKEY_derive_init(ctx.get()), 1);
+ ASSERT_EQ(EVP_PKEY_derive_set_peer(ctx.get(), kmPubKey.get()), 1);
+ size_t ZabFromTestLen = 0;
+ ASSERT_EQ(EVP_PKEY_derive(ctx.get(), nullptr, &ZabFromTestLen), 1);
+ ZabFromTest.resize(ZabFromTestLen);
+ ASSERT_EQ(EVP_PKEY_derive(ctx.get(), ZabFromTest.data(), &ZabFromTestLen), 1);
+ }
+ EXPECT_EQ(ZabFromKeyMint, ZabFromTest);
+ }
+};
+
/*
* KeyAgreementTest.Ecdh
*
- * Verifies that ECDH works for all curves
+ * Verifies that ECDH works for all required curves
*/
TEST_P(KeyAgreementTest, Ecdh) {
// Because it's possible to use this API with keys on different curves, we
@@ -6694,49 +7400,13 @@
for (auto curve : ValidCurves()) {
for (auto localCurve : ValidCurves()) {
// Generate EC key locally (with access to private key material)
- auto ecKey = EC_KEY_Ptr(EC_KEY_new());
- int curveName = EcdhCurveToOpenSslCurveName(localCurve);
- auto group = EC_GROUP_Ptr(EC_GROUP_new_by_curve_name(curveName));
- ASSERT_NE(group, nullptr);
- ASSERT_EQ(EC_KEY_set_group(ecKey.get(), group.get()), 1);
- ASSERT_EQ(EC_KEY_generate_key(ecKey.get()), 1);
- auto pkey = EVP_PKEY_Ptr(EVP_PKEY_new());
- ASSERT_EQ(EVP_PKEY_set1_EC_KEY(pkey.get(), ecKey.get()), 1);
-
- // Get encoded form of the public part of the locally generated key...
- unsigned char* p = nullptr;
- int encodedPublicKeySize = i2d_PUBKEY(pkey.get(), &p);
- ASSERT_GT(encodedPublicKeySize, 0);
- vector<uint8_t> encodedPublicKey(
- reinterpret_cast<const uint8_t*>(p),
- reinterpret_cast<const uint8_t*>(p + encodedPublicKeySize));
- OPENSSL_free(p);
+ EVP_PKEY_Ptr localPrivKey;
+ vector<uint8_t> localPublicKey;
+ GenerateLocalEcKey(localCurve, &localPrivKey, &localPublicKey);
// Generate EC key in KeyMint (only access to public key material)
- vector<uint8_t> challenge = {0x41, 0x42};
- EXPECT_EQ(
- ErrorCode::OK,
- GenerateKey(AuthorizationSetBuilder()
- .Authorization(TAG_NO_AUTH_REQUIRED)
- .Authorization(TAG_EC_CURVE, curve)
- .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
- .Authorization(TAG_ALGORITHM, Algorithm::EC)
- .Authorization(TAG_ATTESTATION_APPLICATION_ID, {0x61, 0x62})
- .Authorization(TAG_ATTESTATION_CHALLENGE, challenge)
- .SetDefaultValidity()))
- << "Failed to generate key";
- ASSERT_GT(cert_chain_.size(), 0);
- X509_Ptr kmKeyCert(parse_cert_blob(cert_chain_[0].encodedCertificate));
- ASSERT_NE(kmKeyCert, nullptr);
- // Check that keyAgreement (bit 4) is set in KeyUsage
- EXPECT_TRUE((X509_get_key_usage(kmKeyCert.get()) & X509v3_KU_KEY_AGREEMENT) != 0);
- auto kmPkey = EVP_PKEY_Ptr(X509_get_pubkey(kmKeyCert.get()));
- ASSERT_NE(kmPkey, nullptr);
- if (dump_Attestations) {
- for (size_t n = 0; n < cert_chain_.size(); n++) {
- std::cout << bin2hex(cert_chain_[n].encodedCertificate) << std::endl;
- }
- }
+ EVP_PKEY_Ptr kmPubKey;
+ GenerateKeyMintEcKey(curve, &kmPubKey);
// Now that we have the two keys, we ask KeyMint to perform ECDH...
if (curve != localCurve) {
@@ -6745,30 +7415,12 @@
EXPECT_EQ(ErrorCode::OK, Begin(KeyPurpose::AGREE_KEY, AuthorizationSetBuilder()));
string ZabFromKeyMintStr;
EXPECT_EQ(ErrorCode::INVALID_ARGUMENT,
- Finish(string(encodedPublicKey.begin(), encodedPublicKey.end()),
+ Finish(string(localPublicKey.begin(), localPublicKey.end()),
&ZabFromKeyMintStr));
} else {
// Otherwise if the keys are using the same curve, it should work.
- EXPECT_EQ(ErrorCode::OK, Begin(KeyPurpose::AGREE_KEY, AuthorizationSetBuilder()));
- string ZabFromKeyMintStr;
- EXPECT_EQ(ErrorCode::OK,
- Finish(string(encodedPublicKey.begin(), encodedPublicKey.end()),
- &ZabFromKeyMintStr));
- vector<uint8_t> ZabFromKeyMint(ZabFromKeyMintStr.begin(), ZabFromKeyMintStr.end());
-
- // Perform local ECDH between the two keys so we can check if we get the same Zab..
- auto ctx = EVP_PKEY_CTX_Ptr(EVP_PKEY_CTX_new(pkey.get(), nullptr));
- ASSERT_NE(ctx, nullptr);
- ASSERT_EQ(EVP_PKEY_derive_init(ctx.get()), 1);
- ASSERT_EQ(EVP_PKEY_derive_set_peer(ctx.get(), kmPkey.get()), 1);
- size_t ZabFromTestLen = 0;
- ASSERT_EQ(EVP_PKEY_derive(ctx.get(), nullptr, &ZabFromTestLen), 1);
- vector<uint8_t> ZabFromTest;
- ZabFromTest.resize(ZabFromTestLen);
- ASSERT_EQ(EVP_PKEY_derive(ctx.get(), ZabFromTest.data(), &ZabFromTestLen), 1);
-
- EXPECT_EQ(ZabFromKeyMint, ZabFromTest);
+ CheckAgreement(std::move(kmPubKey), std::move(localPrivKey), localPublicKey);
}
CheckedDeleteKey();
@@ -6776,6 +7428,140 @@
}
}
+/*
+ * KeyAgreementTest.EcdhCurve25519
+ *
+ * Verifies that ECDH works for curve25519. This is also covered by the general
+ * KeyAgreementTest.Ecdh case, but is pulled out separately here because this curve was added after
+ * KeyMint 1.0.
+ */
+TEST_P(KeyAgreementTest, EcdhCurve25519) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ // Generate EC key in KeyMint (only access to public key material)
+ EcCurve curve = EcCurve::CURVE_25519;
+ EVP_PKEY_Ptr kmPubKey = nullptr;
+ GenerateKeyMintEcKey(curve, &kmPubKey);
+
+ // Generate EC key on same curve locally (with access to private key material).
+ EVP_PKEY_Ptr privKey;
+ vector<uint8_t> encodedPublicKey;
+ GenerateLocalEcKey(curve, &privKey, &encodedPublicKey);
+
+ // Agree on a key between local and KeyMint and check it.
+ CheckAgreement(std::move(kmPubKey), std::move(privKey), encodedPublicKey);
+
+ CheckedDeleteKey();
+}
+
+/*
+ * KeyAgreementTest.EcdhCurve25519Imported
+ *
+ * Verifies that ECDH works for an imported curve25519 key.
+ */
+TEST_P(KeyAgreementTest, EcdhCurve25519Imported) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ // Import x25519 key into KeyMint.
+ EcCurve curve = EcCurve::CURVE_25519;
+ ASSERT_EQ(ErrorCode::OK, ImportKey(AuthorizationSetBuilder()
+ .Authorization(TAG_NO_AUTH_REQUIRED)
+ .EcdsaKey(EcCurve::CURVE_25519)
+ .Authorization(TAG_PURPOSE, KeyPurpose::AGREE_KEY)
+ .SetDefaultValidity(),
+ KeyFormat::PKCS8, x25519_pkcs8_key));
+ ASSERT_GT(cert_chain_.size(), 0);
+ X509_Ptr kmKeyCert(parse_cert_blob(cert_chain_[0].encodedCertificate));
+ ASSERT_NE(kmKeyCert, nullptr);
+ EVP_PKEY_Ptr kmPubKey(X509_get_pubkey(kmKeyCert.get()));
+ ASSERT_NE(kmPubKey.get(), nullptr);
+
+ // Expect the import to emit corresponding public key data.
+ size_t kmPubKeySize = 32;
+ uint8_t kmPubKeyData[32];
+ ASSERT_EQ(1, EVP_PKEY_get_raw_public_key(kmPubKey.get(), kmPubKeyData, &kmPubKeySize));
+ ASSERT_EQ(kmPubKeySize, 32);
+ EXPECT_EQ(bin2hex(std::vector<uint8_t>(kmPubKeyData, kmPubKeyData + 32)),
+ bin2hex(std::vector<uint8_t>(x25519_pubkey.begin(), x25519_pubkey.end())));
+
+ // Generate EC key on same curve locally (with access to private key material).
+ EVP_PKEY_Ptr privKey;
+ vector<uint8_t> encodedPublicKey;
+ GenerateLocalEcKey(curve, &privKey, &encodedPublicKey);
+
+ // Agree on a key between local and KeyMint and check it.
+ CheckAgreement(std::move(kmPubKey), std::move(privKey), encodedPublicKey);
+
+ CheckedDeleteKey();
+}
+
+/*
+ * KeyAgreementTest.EcdhCurve25519InvalidSize
+ *
+ * Verifies that ECDH fails for curve25519 if the wrong size of public key is provided.
+ */
+TEST_P(KeyAgreementTest, EcdhCurve25519InvalidSize) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ // Generate EC key in KeyMint (only access to public key material)
+ EcCurve curve = EcCurve::CURVE_25519;
+ EVP_PKEY_Ptr kmPubKey = nullptr;
+ GenerateKeyMintEcKey(curve, &kmPubKey);
+
+ // Generate EC key on same curve locally (with access to private key material).
+ EVP_PKEY_Ptr privKey;
+ vector<uint8_t> encodedPublicKey;
+ GenerateLocalEcKey(curve, &privKey, &encodedPublicKey);
+
+ ASSERT_EQ(ErrorCode::OK, Begin(KeyPurpose::AGREE_KEY, AuthorizationSetBuilder()));
+ string ZabFromKeyMintStr;
+ // Send in an incomplete public key.
+ ASSERT_NE(ErrorCode::OK, Finish(string(encodedPublicKey.begin(), encodedPublicKey.end() - 1),
+ &ZabFromKeyMintStr));
+
+ CheckedDeleteKey();
+}
+
+/*
+ * KeyAgreementTest.EcdhCurve25519Mismatch
+ *
+ * Verifies that ECDH fails between curve25519 and other curves.
+ */
+TEST_P(KeyAgreementTest, EcdhCurve25519Mismatch) {
+ if (!Curve25519Supported()) {
+ GTEST_SKIP() << "Test not applicable to device that is not expected to support curve 25519";
+ }
+
+ // Generate EC key in KeyMint (only access to public key material)
+ EcCurve curve = EcCurve::CURVE_25519;
+ EVP_PKEY_Ptr kmPubKey = nullptr;
+ GenerateKeyMintEcKey(curve, &kmPubKey);
+
+ for (auto localCurve : ValidCurves()) {
+ if (localCurve == curve) {
+ continue;
+ }
+ // Generate EC key on a different curve locally (with access to private key material).
+ EVP_PKEY_Ptr privKey;
+ vector<uint8_t> encodedPublicKey;
+ GenerateLocalEcKey(localCurve, &privKey, &encodedPublicKey);
+
+ EXPECT_EQ(ErrorCode::OK, Begin(KeyPurpose::AGREE_KEY, AuthorizationSetBuilder()));
+ string ZabFromKeyMintStr;
+ EXPECT_EQ(ErrorCode::INVALID_ARGUMENT,
+ Finish(string(encodedPublicKey.begin(), encodedPublicKey.end()),
+ &ZabFromKeyMintStr));
+ }
+
+ CheckedDeleteKey();
+}
+
INSTANTIATE_KEYMINT_AIDL_TEST(KeyAgreementTest);
using DestroyAttestationIdsTest = KeyMintAidlTestBase;