Merge "Properly handle memory layout of input buffer for test camera" into main
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 1df5727..02367dc 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -53,6 +53,15 @@
}
flag {
+ name: "set_stream_volume_order"
+ namespace: "media_audio"
+ description:
+ "Fix race condition by adjusting the order when"
+ "setStreamVolume is calling into the BT stack"
+ bug: "329202581"
+}
+
+flag {
name: "spatializer_offload"
namespace: "media_audio"
description: "Enable spatializer offload"
diff --git a/media/codec2/components/aom/C2SoftAomEnc.cpp b/media/codec2/components/aom/C2SoftAomEnc.cpp
index 128e16e..256bcb8 100644
--- a/media/codec2/components/aom/C2SoftAomEnc.cpp
+++ b/media/codec2/components/aom/C2SoftAomEnc.cpp
@@ -50,11 +50,13 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
index 76680a3..4ec26d6 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.cpp
@@ -243,10 +243,17 @@
.build());
addParameter(
+ DefineParam(mLowLatencyMode, C2_PARAMKEY_LOW_LATENCY_MODE)
+ .withDefault(new C2GlobalLowLatencyModeTuning(0))
+ .withFields({C2F(mLowLatencyMode, value).oneOf({0,1})})
+ .withSetter(Setter<decltype(*mLowLatencyMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
.withDefault(new C2PortActualDelayTuning::output(kOutputDelay))
.withFields({C2F(mActualOutputDelay, value).inRange(0, kOutputDelay)})
- .withSetter(Setter<decltype(*mActualOutputDelay)>::StrictValueWithNoDeps)
+ .withSetter(ActualOutputDelaySetter, mLowLatencyMode)
.build());
}
@@ -365,6 +372,10 @@
return mPixelFormat;
}
+ std::shared_ptr<C2PortActualDelayTuning::output> getActualOutputDelay_l() const {
+ return mActualOutputDelay;
+ }
+
static C2R HdrStaticInfoSetter(bool mayBlock, C2P<C2StreamHdrStaticInfo::output>& me) {
(void)mayBlock;
if (me.v.mastering.red.x > 1) {
@@ -406,6 +417,13 @@
return C2R::Ok();
}
+ static C2R ActualOutputDelaySetter(bool mayBlock, C2P<C2PortActualDelayTuning::output>& me,
+ const C2P<C2GlobalLowLatencyModeTuning>& lowLatencyMode) {
+ (void)mayBlock;
+ me.set().value = lowLatencyMode.v.value ? 1 : kOutputDelay;
+ return C2R::Ok();
+ }
+
private:
std::shared_ptr<C2StreamProfileLevelInfo::input> mProfileLevel;
std::shared_ptr<C2StreamPictureSizeInfo::output> mSize;
@@ -419,6 +437,7 @@
std::shared_ptr<C2StreamHdr10PlusInfo::input> mHdr10PlusInfoInput;
std::shared_ptr<C2StreamHdr10PlusInfo::output> mHdr10PlusInfoOutput;
std::shared_ptr<C2StreamHdrStaticInfo::output> mHdrStaticInfo;
+ std::shared_ptr<C2GlobalLowLatencyModeTuning> mLowLatencyMode;
};
C2SoftDav1dDec::C2SoftDav1dDec(const char* name, c2_node_id_t id,
@@ -516,6 +535,7 @@
{
IntfImpl::Lock lock = mIntf->lock();
mPixelFormatInfo = mIntf->getPixelFormat_l();
+ mActualOutputDelayInfo = mIntf->getActualOutputDelay_l();
}
const char* version = dav1d_version();
@@ -529,7 +549,7 @@
android::base::GetIntProperty(NUM_THREADS_DAV1D_PROPERTY, NUM_THREADS_DAV1D_DEFAULT);
if (numThreads > 0) lib_settings.n_threads = numThreads;
- lib_settings.max_frame_delay = kOutputDelay;
+ lib_settings.max_frame_delay = mActualOutputDelayInfo->value;
int res = 0;
if ((res = dav1d_open(&mDav1dCtx, &lib_settings))) {
diff --git a/media/codec2/components/dav1d/C2SoftDav1dDec.h b/media/codec2/components/dav1d/C2SoftDav1dDec.h
index 5d2a725..6008325 100644
--- a/media/codec2/components/dav1d/C2SoftDav1dDec.h
+++ b/media/codec2/components/dav1d/C2SoftDav1dDec.h
@@ -62,6 +62,7 @@
// configurations used by component in process
// (TODO: keep this in intf but make them internal only)
std::shared_ptr<C2StreamPixelFormatInfo::output> mPixelFormatInfo;
+ std::shared_ptr<C2PortActualDelayTuning::output> mActualOutputDelayInfo;
uint32_t mHalPixelFormat;
uint32_t mWidth;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 0384e2e..2409894 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -22,6 +22,7 @@
#include <media/hardware/VideoAPI.h>
#include <Codec2BufferUtils.h>
+#include <Codec2CommonUtils.h>
#include <C2Debug.h>
#include "C2SoftVpxEnc.h"
@@ -63,12 +64,14 @@
0u, (uint64_t)C2MemoryUsage::CPU_READ))
.build());
+ // Odd dimension support in encoders requires Android V and above
+ size_t stepSize = isAtLeastV() ? 1 : 2;
addParameter(
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 2048, stepSize),
+ C2F(mSize, height).inRange(2, 2048, stepSize),
})
.withSetter(SizeSetter)
.build());
diff --git a/media/codec2/hal/aidl/ComponentInterface.cpp b/media/codec2/hal/aidl/ComponentInterface.cpp
index 8ae9fa8..8c7a986 100644
--- a/media/codec2/hal/aidl/ComponentInterface.cpp
+++ b/media/codec2/hal/aidl/ComponentInterface.cpp
@@ -79,6 +79,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index b3ae514..1d2794e 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -1868,6 +1868,10 @@
return nullptr;
}
+bool Codec2Client::IsAidlSelected() {
+ return c2_aidl::utils::IsSelected();
+}
+
// Codec2Client::Interface
Codec2Client::Interface::Interface(const sp<HidlBase>& base)
: Configurable{
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 3b7f7a6..5c75a47 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -270,6 +270,9 @@
static std::shared_ptr<InputSurface> CreateInputSurface(
char const* serviceName = nullptr);
+ // Whether AIDL is selected.
+ static bool IsAidlSelected();
+
// base and/or configurable cannot be null.
Codec2Client(
sp<HidlBase> const& base,
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
index 41a8904..08f1ae2 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentInterface.cpp
@@ -78,6 +78,26 @@
}
c2_status_t err2 = C2_OK;
if (paramsToLargeFrameIntf.size() > 0) {
+ C2ComponentKindSetting kind;
+ C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+ c2_status_t err = mIntf->query_vb(
+ {&kind, &maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+ if ((err == C2_OK) && (kind.value == C2Component::KIND_ENCODER)) {
+ for (int i = 0 ; i < paramsToLargeFrameIntf.size(); i++) {
+ if (paramsToLargeFrameIntf[i]->index() ==
+ C2LargeFrame::output::PARAM_TYPE) {
+ C2LargeFrame::output *lfp = C2LargeFrame::output::From(
+ paramsToLargeFrameIntf[i]);
+ // This is assuming a worst case compression ratio of 1:1
+ // In no case the encoder should give an output more than
+ // what is being provided to the encoder in a single call.
+ if (lfp && (lfp->maxSize < maxInputSize.value)) {
+ lfp->maxSize = maxInputSize.value;
+ }
+ break;
+ }
+ }
+ }
err2 = mMultiAccessUnitIntf->config(
paramsToLargeFrameIntf, mayBlock, failures);
}
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 4de2347..362373e 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -19,7 +19,9 @@
export_include_dirs: ["include"],
srcs: [
+ "C2AidlNode.cpp",
"C2OMXNode.cpp",
+ "C2NodeImpl.cpp",
"CCodec.cpp",
"CCodecBufferChannel.cpp",
"CCodecBuffers.cpp",
@@ -54,8 +56,11 @@
"android.hardware.drm@1.0",
"android.hardware.media.c2@1.0",
"android.hardware.media.omx@1.0",
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
"libbase",
"libbinder",
+ "libbinder_ndk",
"libcodec2",
"libcodec2_client",
"libcodec2_vndk",
@@ -67,9 +72,11 @@
"liblog",
"libmedia_codeclist",
"libmedia_omx",
+ "libnativewindow",
"libsfplugin_ccodec_utils",
"libstagefright_bufferqueue_helper",
"libstagefright_codecbase",
+ "libstagefright_graphicbuffersource_aidl",
"libstagefright_foundation",
"libstagefright_omx",
"libstagefright_surface_utils",
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
new file mode 100644
index 0000000..c20f50d
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2AidlNode"
+#include <log/log.h>
+#include <private/android/AHardwareBufferHelpers.h>
+
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+#include "C2NodeImpl.h"
+#include "C2AidlNode.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+using ::android::media::CommandStateSet;
+using ::android::media::NodeStatusLoaded;
+
+// Conversion
+using ::android::media::aidl_conversion::toAidlStatus;
+
+C2AidlNode::C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp)
+ : mImpl(new C2NodeImpl(comp, true)) {}
+
+// aidl ndk interfaces
+::ndk::ScopedAStatus C2AidlNode::freeNode() {
+ return toAidlStatus(mImpl->freeNode());
+}
+
+::ndk::ScopedAStatus C2AidlNode::sendCommand(int32_t cmd, int32_t param) {
+ if (cmd == CommandStateSet && param == NodeStatusLoaded) {
+ mImpl->onFirstInputFrame();
+ }
+ return toAidlStatus(ERROR_UNSUPPORTED);
+}
+
+::ndk::ScopedAStatus C2AidlNode::getConsumerUsage(int64_t* _aidl_return) {
+ uint64_t usage;
+ mImpl->getConsumerUsageBits(&usage);
+ *_aidl_return = usage;
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::getInputBufferParams(IAidlNode::InputBufferParams* _aidl_return) {
+ mImpl->getInputBufferParams(_aidl_return);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setConsumerUsage(int64_t usage) {
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setAdjustTimestampGapUs(int32_t gapUs) {
+ mImpl->setAdjustTimestampGapUs(gapUs);
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus C2AidlNode::setInputSurface(
+ const std::shared_ptr<IAidlBufferSource>& bufferSource) {
+ return toAidlStatus(mImpl->setAidlInputSurface(bufferSource));
+}
+
+::ndk::ScopedAStatus C2AidlNode::submitBuffer(
+ int32_t buffer, const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
+ sp<GraphicBuffer> gBuf;
+ AHardwareBuffer *ahwb = hBuffer.get();
+ if (ahwb) {
+ gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
+ }
+ return toAidlStatus(mImpl->submitBuffer(
+ buffer, gBuf, flags, timestamp, ::dup(fence.get())));
+}
+
+::ndk::ScopedAStatus C2AidlNode::onDataSpaceChanged(
+ int32_t dataSpace,
+ int32_t aspects,
+ int32_t pixelFormat) {
+ // NOTE: legacy codes passed aspects, but they didn't used.
+ (void)aspects;
+
+ return toAidlStatus(mImpl->onDataspaceChanged(
+ static_cast<uint32_t>(dataSpace),
+ static_cast<uint32_t>(pixelFormat)));
+}
+
+// cpp interface
+
+std::shared_ptr<IAidlBufferSource> C2AidlNode::getSource() {
+ return mImpl->getAidlSource();
+}
+
+void C2AidlNode::setFrameSize(uint32_t width, uint32_t height) {
+ return mImpl->setFrameSize(width, height);
+}
+
+void C2AidlNode::onInputBufferDone(c2_cntr64_t index) {
+ return mImpl->onInputBufferDone(index);
+}
+
+android_dataspace C2AidlNode::getDataspace() {
+ return mImpl->getDataspace();
+}
+
+uint32_t C2AidlNode::getPixelFormat() {
+ return mImpl->getPixelFormat();
+}
+
+void C2AidlNode::setPriority(int priority) {
+ return mImpl->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
new file mode 100644
index 0000000..a7c328e
--- /dev/null
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <aidl/android/media/BnAidlNode.h>
+#include <codec2/hidl/client.h>
+
+namespace android {
+
+struct C2NodeImpl;
+
+/**
+ * Thin Codec2 AIdL encoder HAL wrapper for InputSurface
+ */
+class C2AidlNode : public ::aidl::android::media::BnAidlNode {
+public:
+ explicit C2AidlNode(const std::shared_ptr<Codec2Client::Component> &comp);
+ ~C2AidlNode() override = default;
+
+ // IAidlNode
+ ::ndk::ScopedAStatus freeNode() override;
+
+ ::ndk::ScopedAStatus sendCommand(int32_t cmd, int32_t param) override;
+
+ ::ndk::ScopedAStatus getConsumerUsage(int64_t *_aidl_return) override;
+
+ ::ndk::ScopedAStatus getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *_aidl_return) override;
+
+ ::ndk::ScopedAStatus setConsumerUsage(int64_t usage) override;
+
+ ::ndk::ScopedAStatus setAdjustTimestampGapUs(int32_t gapUs) override;
+
+ ::ndk::ScopedAStatus setInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource>&
+ bufferSource) override;
+
+ ::ndk::ScopedAStatus submitBuffer(
+ int32_t buffer,
+ const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+ int32_t flags,
+ int64_t timestampUs,
+ const ::ndk::ScopedFileDescriptor& fence) override;
+
+ ::ndk::ScopedAStatus onDataSpaceChanged(
+ int dataSpace, int aspects, int pixelFormat) override;
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::shared_ptr<C2NodeImpl> mImpl;
+};
+
+} // namespace android
+
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
new file mode 100644
index 0000000..6f53e0f
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -0,0 +1,451 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "C2NodeImpl"
+#include <log/log.h>
+
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Component.h>
+#include <C2Config.h>
+#include <C2Debug.h>
+#include <C2PlatformSupport.h>
+
+#include <android/fdsan.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+#include <utils/Thread.h>
+
+#include "utils/Codec2Mapper.h"
+#include "C2NodeImpl.h"
+#include "Codec2Buffer.h"
+
+namespace android {
+
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+
+using ::android::media::BUFFERFLAG_EOS;
+
+namespace {
+
+class Buffer2D : public C2Buffer {
+public:
+ explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
+};
+
+} // namespace
+
+class C2NodeImpl::QueueThread : public Thread {
+public:
+ QueueThread() : Thread(false) {}
+ ~QueueThread() override = default;
+ void queue(
+ const std::shared_ptr<Codec2Client::Component> &comp,
+ int fenceFd,
+ std::unique_ptr<C2Work> &&work,
+ android::base::unique_fd &&fd0,
+ android::base::unique_fd &&fd1) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ auto it = jobs->queues.try_emplace(comp, comp).first;
+ it->second.workList.emplace_back(
+ std::move(work), fenceFd, std::move(fd0), std::move(fd1));
+ jobs->cond.broadcast();
+ }
+
+ void setDataspace(android_dataspace dataspace) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ ColorUtils::convertDataSpaceToV0(dataspace);
+ jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
+ int32_t standard;
+ int32_t transfer;
+ int32_t range;
+ ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
+ std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
+ std::make_unique<C2StreamColorAspectsInfo::input>(0u);
+ if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
+ && C2Mapper::map(transfer, &colorAspects->transfer)
+ && C2Mapper::map(range, &colorAspects->range)) {
+ jobs->configUpdate.push_back(std::move(colorAspects));
+ }
+ }
+
+ void setPriority(int priority) {
+ androidSetThreadPriority(getTid(), priority);
+ }
+
+protected:
+ bool threadLoop() override {
+ constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
+ constexpr nsecs_t kWaitNs = kIntervalNs * 2;
+ for (int i = 0; i < 2; ++i) {
+ Mutexed<Jobs>::Locked jobs(mJobs);
+ nsecs_t nowNs = systemTime();
+ bool queued = false;
+ for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
+ Queue &queue = it->second;
+ if (queue.workList.empty()
+ || (queue.lastQueuedTimestampNs != 0 &&
+ nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
+ ++it;
+ continue;
+ }
+ std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
+ if (!comp) {
+ it = jobs->queues.erase(it);
+ continue;
+ }
+ std::list<std::unique_ptr<C2Work>> items;
+ std::vector<int> fenceFds;
+ std::vector<android::base::unique_fd> uniqueFds;
+ while (!queue.workList.empty()) {
+ items.push_back(std::move(queue.workList.front().work));
+ fenceFds.push_back(queue.workList.front().fenceFd);
+ uniqueFds.push_back(std::move(queue.workList.front().fd0));
+ uniqueFds.push_back(std::move(queue.workList.front().fd1));
+ queue.workList.pop_front();
+ }
+ for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
+ items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
+ }
+
+ jobs.unlock();
+ for (int fenceFd : fenceFds) {
+ sp<Fence> fence(new Fence(fenceFd));
+ fence->waitForever(LOG_TAG);
+ }
+ queue.lastQueuedTimestampNs = nowNs;
+ comp->queue(&items);
+ for (android::base::unique_fd &ufd : uniqueFds) {
+ (void)ufd.release();
+ }
+ jobs.lock();
+
+ it = jobs->queues.upper_bound(comp);
+ queued = true;
+ }
+ if (queued) {
+ jobs->configUpdate.clear();
+ return true;
+ }
+ if (i == 0) {
+ jobs.waitForConditionRelative(jobs->cond, kWaitNs);
+ }
+ }
+ return true;
+ }
+
+private:
+ struct WorkFence {
+ WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
+
+ WorkFence(
+ std::unique_ptr<C2Work> &&w,
+ int fd,
+ android::base::unique_fd &&uniqueFd0,
+ android::base::unique_fd &&uniqueFd1)
+ : work(std::move(w)),
+ fenceFd(fd),
+ fd0(std::move(uniqueFd0)),
+ fd1(std::move(uniqueFd1)) {}
+
+ std::unique_ptr<C2Work> work;
+ int fenceFd;
+ android::base::unique_fd fd0;
+ android::base::unique_fd fd1;
+ };
+ struct Queue {
+ Queue(const std::shared_ptr<Codec2Client::Component> &comp)
+ : component(comp), lastQueuedTimestampNs(0) {}
+ Queue(const Queue &) = delete;
+ Queue &operator =(const Queue &) = delete;
+
+ std::weak_ptr<Codec2Client::Component> component;
+ std::list<WorkFence> workList;
+ nsecs_t lastQueuedTimestampNs;
+ };
+ struct Jobs {
+ std::map<std::weak_ptr<Codec2Client::Component>,
+ Queue,
+ std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ Condition cond;
+ };
+ Mutexed<Jobs> mJobs;
+};
+
+C2NodeImpl::C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl)
+ : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
+ mAdjustTimestampGapUs(0), mFirstInputFrame(true),
+ mQueueThread(new QueueThread), mAidlHal(aidl) {
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
+ mQueueThread->run("C2NodeImpl", PRIORITY_AUDIO);
+
+ android_dataspace ds = HAL_DATASPACE_UNKNOWN;
+ mDataspace.lock().set(ds);
+ uint32_t pf = PIXEL_FORMAT_UNKNOWN;
+ mPixelFormat.lock().set(pf);
+}
+
+C2NodeImpl::~C2NodeImpl() {
+}
+
+status_t C2NodeImpl::freeNode() {
+ mComp.reset();
+ android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
+ return mQueueThread->requestExitAndWait();
+}
+
+void C2NodeImpl::onFirstInputFrame() {
+ mFirstInputFrame = true;
+}
+
+void C2NodeImpl::getConsumerUsageBits(uint64_t *usage) {
+ *usage = mUsage;
+}
+
+void C2NodeImpl::getInputBufferParams(IAidlNode::InputBufferParams *params) {
+ params->bufferCountActual = 16;
+
+ // WORKAROUND: having more slots improve performance while consuming
+ // more memory. This is a temporary workaround to reduce memory for
+ // larger-than-4K scenario.
+ if (mWidth * mHeight > 4096 * 2340) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ C2PortActualDelayTuning::input inputDelay(0);
+ C2ActualPipelineDelayTuning pipelineDelay(0);
+ c2_status_t c2err = C2_NOT_FOUND;
+ if (comp) {
+ c2err = comp->query(
+ {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
+ }
+ if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
+ params->bufferCountActual = 4;
+ params->bufferCountActual += (inputDelay ? inputDelay.value : 0u);
+ params->bufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
+ }
+ }
+
+ params->frameWidth = mWidth;
+ params->frameHeight = mHeight;
+}
+
+void C2NodeImpl::setConsumerUsageBits(uint64_t usage) {
+ mUsage = usage;
+}
+
+void C2NodeImpl::setAdjustTimestampGapUs(int32_t gapUs) {
+ mAdjustTimestampGapUs = gapUs;
+}
+
+status_t C2NodeImpl::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(!mAidlHal);
+ mBufferSource = bufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::setAidlInputSurface(
+ const std::shared_ptr<IAidlBufferSource> &aidlBufferSource) {
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC,
+ &mAllocator);
+ if (err != OK) {
+ return UNKNOWN_ERROR;
+ }
+ CHECK(mAidlHal);
+ mAidlBufferSource = aidlBufferSource;
+ return OK;
+}
+
+status_t C2NodeImpl::submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd) {
+ std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
+ if (!comp) {
+ return NO_INIT;
+ }
+
+ uint32_t c2Flags = (flags & BUFFERFLAG_EOS)
+ ? C2FrameData::FLAG_END_OF_STREAM : 0;
+ std::shared_ptr<C2GraphicBlock> block;
+
+ android::base::unique_fd fd0, fd1;
+ C2Handle *handle = nullptr;
+ if (graphicBuffer) {
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ handle = WrapNativeCodec2GrallocHandle(
+ graphicBuffer->handle,
+ graphicBuffer->width,
+ graphicBuffer->height,
+ graphicBuffer->format,
+ graphicBuffer->usage,
+ graphicBuffer->stride);
+ if (handle != nullptr) {
+ // unique_fd takes ownership of the fds, we'll get warning if these
+ // fds get closed by somebody else. Onwership will be released before
+ // we return, so that the fds get closed as usually when this function
+ // goes out of scope (when both items and block are gone).
+ native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
+ fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
+ fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
+ }
+ c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
+ if (err != OK) {
+ (void)fd0.release();
+ (void)fd1.release();
+ native_handle_close(handle);
+ native_handle_delete(handle);
+ return UNKNOWN_ERROR;
+ }
+ block = _C2BlockFactory::CreateGraphicBlock(alloc);
+ } else if (!(flags & BUFFERFLAG_EOS)) {
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<C2Work> work(new C2Work);
+ work->input.flags = (C2FrameData::flags_t)c2Flags;
+ work->input.ordinal.timestamp = timestamp;
+
+ // WORKAROUND: adjust timestamp based on gapUs
+ {
+ work->input.ordinal.customOrdinal = timestamp; // save input timestamp
+ if (mFirstInputFrame) {
+ // grab timestamps on first frame
+ mPrevInputTimestamp = timestamp;
+ mPrevCodecTimestamp = timestamp;
+ mFirstInputFrame = false;
+ } else if (mAdjustTimestampGapUs > 0) {
+ work->input.ordinal.timestamp =
+ mPrevCodecTimestamp
+ + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
+ } else if (mAdjustTimestampGapUs < 0) {
+ work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
+ }
+ mPrevInputTimestamp = work->input.ordinal.customOrdinal;
+ mPrevCodecTimestamp = work->input.ordinal.timestamp;
+ ALOGV("adjusting %lld to %lld (gap=%lld)",
+ work->input.ordinal.customOrdinal.peekll(),
+ work->input.ordinal.timestamp.peekll(),
+ (long long)mAdjustTimestampGapUs);
+ }
+
+ work->input.ordinal.frameIndex = mFrameIndex++;
+ work->input.buffers.clear();
+ if (block) {
+ std::shared_ptr<C2Buffer> c2Buffer(
+ new Buffer2D(block->share(
+ C2Rect(block->width(), block->height()), ::C2Fence())));
+ work->input.buffers.push_back(c2Buffer);
+ std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
+ std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
+ GetHdrMetadataFromGralloc4Handle(
+ block->handle(),
+ &staticInfo,
+ &dynamicInfo);
+ if (staticInfo && *staticInfo) {
+ c2Buffer->setInfo(staticInfo);
+ }
+ if (dynamicInfo && *dynamicInfo) {
+ c2Buffer->setInfo(dynamicInfo);
+ }
+ }
+ work->worklets.clear();
+ work->worklets.emplace_back(new C2Worklet);
+ mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+ mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
+
+ return OK;
+}
+
+status_t C2NodeImpl::onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat) {
+ ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
+ android_dataspace d = (android_dataspace)dataSpace;
+ mQueueThread->setDataspace(d);
+
+ mDataspace.lock().set(d);
+ mPixelFormat.lock().set(pixelFormat);
+ return OK;
+}
+
+sp<IOMXBufferSource> C2NodeImpl::getSource() {
+ CHECK(!mAidlHal);
+ return mBufferSource;
+}
+
+std::shared_ptr<IAidlBufferSource> C2NodeImpl::getAidlSource() {
+ CHECK(mAidlHal);
+ return mAidlBufferSource;
+}
+
+void C2NodeImpl::setFrameSize(uint32_t width, uint32_t height) {
+ mWidth = width;
+ mHeight = height;
+}
+
+void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
+ if (mAidlHal) {
+ if (!mAidlBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ } else {
+ if (!mBufferSource) {
+ ALOGD("Buffer source not set (index=%llu)", index.peekull());
+ return;
+ }
+ }
+
+ int32_t bufferId = 0;
+ {
+ decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
+ auto it = bufferIds->find(index.peeku());
+ if (it == bufferIds->end()) {
+ ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+ return;
+ }
+ bufferId = it->second;
+ (void)bufferIds->erase(it);
+ }
+ if (mAidlHal) {
+ ::ndk::ScopedFileDescriptor nullFence;
+ (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
+ } else {
+ (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ }
+}
+
+android_dataspace C2NodeImpl::getDataspace() {
+ return *mDataspace.lock();
+}
+
+uint32_t C2NodeImpl::getPixelFormat() {
+ return *mPixelFormat.lock();
+}
+
+void C2NodeImpl::setPriority(int priority) {
+ mQueueThread->setPriority(priority);
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
new file mode 100644
index 0000000..e060fd8
--- /dev/null
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+
+#include <android/IOMXBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <codec2/hidl/client.h>
+#include <media/stagefright/foundation/Mutexed.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android {
+
+/**
+ * IOmxNode implementation around codec 2.0 component, only to be used in
+ * IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented.
+ * As a result, one cannot expect this IOmxNode to work in any other usage than
+ * IGraphicBufferSource(if aidl hal is used, IAidlGraphicBufferSource).
+ */
+struct C2NodeImpl {
+ explicit C2NodeImpl(const std::shared_ptr<Codec2Client::Component> &comp, bool aidl);
+ ~C2NodeImpl();
+
+ // IOMXNode and/or IAidlNode
+ status_t freeNode();
+
+ void onFirstInputFrame();
+ void getConsumerUsageBits(uint64_t *usage /* nonnull */);
+ void getInputBufferParams(
+ ::aidl::android::media::IAidlNode::InputBufferParams *params /* nonnull */);
+ void setConsumerUsageBits(uint64_t usage);
+ void setAdjustTimestampGapUs(int32_t gapUs);
+
+ status_t setInputSurface(
+ const sp<IOMXBufferSource> &bufferSource);
+ status_t setAidlInputSurface(
+ const std::shared_ptr<::aidl::android::media::IAidlBufferSource> &aidlBufferSource);
+
+ status_t submitBuffer(
+ uint32_t buffer, const sp<GraphicBuffer> &graphicBuffer,
+ uint32_t flags, int64_t timestamp, int fenceFd);
+ status_t onDataspaceChanged(uint32_t dataSpace, uint32_t pixelFormat);
+
+ /**
+ * Returns underlying IOMXBufferSource object.
+ */
+ sp<IOMXBufferSource> getSource();
+
+ /**
+ * Returns underlying IAidlBufferSource object.
+ */
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> getAidlSource();
+
+ /**
+ * Configure the frame size.
+ */
+ void setFrameSize(uint32_t width, uint32_t height);
+
+ /**
+ * Clean up work item reference.
+ *
+ * \param index input work index
+ */
+ void onInputBufferDone(c2_cntr64_t index);
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ android_dataspace getDataspace();
+
+ /**
+ * Returns dataspace information from GraphicBufferSource.
+ */
+ uint32_t getPixelFormat();
+
+ /**
+ * Sets priority of the queue thread.
+ */
+ void setPriority(int priority);
+
+private:
+ std::weak_ptr<Codec2Client::Component> mComp;
+
+ sp<IOMXBufferSource> mBufferSource;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mAidlBufferSource;
+
+ std::shared_ptr<C2Allocator> mAllocator;
+ std::atomic_uint64_t mFrameIndex;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ uint64_t mUsage;
+ Mutexed<android_dataspace> mDataspace;
+ Mutexed<uint32_t> mPixelFormat;
+
+ // WORKAROUND: timestamp adjustment
+
+ // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
+ // if 0: no timestamp adjustment is made
+ // note that C2OMXNode can be recycled between encoding sessions.
+ int32_t mAdjustTimestampGapUs;
+ bool mFirstInputFrame; // true for first input
+ c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
+ c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
+
+ Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+
+ class QueueThread;
+ sp<QueueThread> mQueueThread;
+
+ bool mAidlHal;
+};
+
+} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index bba022b..ce02c88 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -19,30 +19,17 @@
#endif
//#define LOG_NDEBUG 0
-#define LOG_TAG "C2OMXNode"
+#define LOG_TAG "C2OMXNODE"
#include <log/log.h>
-#include <C2AllocatorGralloc.h>
-#include <C2BlockInternal.h>
-#include <C2Component.h>
-#include <C2Config.h>
-#include <C2PlatformSupport.h>
-
#include <OMX_Component.h>
#include <OMX_Index.h>
#include <OMX_IndexExt.h>
-#include <android/fdsan.h>
-#include <media/stagefright/foundation/ColorUtils.h>
-#include <media/stagefright/omx/OMXUtils.h>
#include <media/stagefright/MediaErrors.h>
-#include <ui/Fence.h>
-#include <ui/GraphicBuffer.h>
-#include <utils/Thread.h>
-#include "utils/Codec2Mapper.h"
#include "C2OMXNode.h"
-#include "Codec2Buffer.h"
+#include "C2NodeImpl.h"
namespace android {
@@ -50,175 +37,25 @@
constexpr OMX_U32 kPortIndexInput = 0;
-class Buffer2D : public C2Buffer {
-public:
- explicit Buffer2D(C2ConstGraphicBlock block) : C2Buffer({ block }) {}
-};
+} // anomymous namespace
-} // namespace
+using ::android::media::BUFFERFLAG_ENDOFFRAME;
+using ::android::media::BUFFERFLAG_EOS;
-class C2OMXNode::QueueThread : public Thread {
-public:
- QueueThread() : Thread(false) {}
- ~QueueThread() override = default;
- void queue(
- const std::shared_ptr<Codec2Client::Component> &comp,
- int fenceFd,
- std::unique_ptr<C2Work> &&work,
- android::base::unique_fd &&fd0,
- android::base::unique_fd &&fd1) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- auto it = jobs->queues.try_emplace(comp, comp).first;
- it->second.workList.emplace_back(
- std::move(work), fenceFd, std::move(fd0), std::move(fd1));
- jobs->cond.broadcast();
- }
-
- void setDataspace(android_dataspace dataspace) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- ColorUtils::convertDataSpaceToV0(dataspace);
- jobs->configUpdate.emplace_back(new C2StreamDataSpaceInfo::input(0u, dataspace));
- int32_t standard;
- int32_t transfer;
- int32_t range;
- ColorUtils::getColorConfigFromDataSpace(dataspace, &range, &standard, &transfer);
- std::unique_ptr<C2StreamColorAspectsInfo::input> colorAspects =
- std::make_unique<C2StreamColorAspectsInfo::input>(0u);
- if (C2Mapper::map(standard, &colorAspects->primaries, &colorAspects->matrix)
- && C2Mapper::map(transfer, &colorAspects->transfer)
- && C2Mapper::map(range, &colorAspects->range)) {
- jobs->configUpdate.push_back(std::move(colorAspects));
- }
- }
-
- void setPriority(int priority) {
- androidSetThreadPriority(getTid(), priority);
- }
-
-protected:
- bool threadLoop() override {
- constexpr nsecs_t kIntervalNs = nsecs_t(10) * 1000 * 1000; // 10ms
- constexpr nsecs_t kWaitNs = kIntervalNs * 2;
- for (int i = 0; i < 2; ++i) {
- Mutexed<Jobs>::Locked jobs(mJobs);
- nsecs_t nowNs = systemTime();
- bool queued = false;
- for (auto it = jobs->queues.begin(); it != jobs->queues.end(); ) {
- Queue &queue = it->second;
- if (queue.workList.empty()
- || (queue.lastQueuedTimestampNs != 0 &&
- nowNs - queue.lastQueuedTimestampNs < kIntervalNs)) {
- ++it;
- continue;
- }
- std::shared_ptr<Codec2Client::Component> comp = queue.component.lock();
- if (!comp) {
- it = jobs->queues.erase(it);
- continue;
- }
- std::list<std::unique_ptr<C2Work>> items;
- std::vector<int> fenceFds;
- std::vector<android::base::unique_fd> uniqueFds;
- while (!queue.workList.empty()) {
- items.push_back(std::move(queue.workList.front().work));
- fenceFds.push_back(queue.workList.front().fenceFd);
- uniqueFds.push_back(std::move(queue.workList.front().fd0));
- uniqueFds.push_back(std::move(queue.workList.front().fd1));
- queue.workList.pop_front();
- }
- for (const std::unique_ptr<C2Param> ¶m : jobs->configUpdate) {
- items.front()->input.configUpdate.emplace_back(C2Param::Copy(*param));
- }
-
- jobs.unlock();
- for (int fenceFd : fenceFds) {
- sp<Fence> fence(new Fence(fenceFd));
- fence->waitForever(LOG_TAG);
- }
- queue.lastQueuedTimestampNs = nowNs;
- comp->queue(&items);
- for (android::base::unique_fd &ufd : uniqueFds) {
- (void)ufd.release();
- }
- jobs.lock();
-
- it = jobs->queues.upper_bound(comp);
- queued = true;
- }
- if (queued) {
- jobs->configUpdate.clear();
- return true;
- }
- if (i == 0) {
- jobs.waitForConditionRelative(jobs->cond, kWaitNs);
- }
- }
- return true;
- }
-
-private:
- struct WorkFence {
- WorkFence(std::unique_ptr<C2Work> &&w, int fd) : work(std::move(w)), fenceFd(fd) {}
-
- WorkFence(
- std::unique_ptr<C2Work> &&w,
- int fd,
- android::base::unique_fd &&uniqueFd0,
- android::base::unique_fd &&uniqueFd1)
- : work(std::move(w)),
- fenceFd(fd),
- fd0(std::move(uniqueFd0)),
- fd1(std::move(uniqueFd1)) {}
-
- std::unique_ptr<C2Work> work;
- int fenceFd;
- android::base::unique_fd fd0;
- android::base::unique_fd fd1;
- };
- struct Queue {
- Queue(const std::shared_ptr<Codec2Client::Component> &comp)
- : component(comp), lastQueuedTimestampNs(0) {}
- Queue(const Queue &) = delete;
- Queue &operator =(const Queue &) = delete;
-
- std::weak_ptr<Codec2Client::Component> component;
- std::list<WorkFence> workList;
- nsecs_t lastQueuedTimestampNs;
- };
- struct Jobs {
- std::map<std::weak_ptr<Codec2Client::Component>,
- Queue,
- std::owner_less<std::weak_ptr<Codec2Client::Component>>> queues;
- std::vector<std::unique_ptr<C2Param>> configUpdate;
- Condition cond;
- };
- Mutexed<Jobs> mJobs;
-};
+using ::aidl::android::media::IAidlNode;
C2OMXNode::C2OMXNode(const std::shared_ptr<Codec2Client::Component> &comp)
- : mComp(comp), mFrameIndex(0), mWidth(0), mHeight(0), mUsage(0),
- mAdjustTimestampGapUs(0), mFirstInputFrame(true),
- mQueueThread(new QueueThread) {
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ALWAYS);
- mQueueThread->run("C2OMXNode", PRIORITY_AUDIO);
-
- android_dataspace ds = HAL_DATASPACE_UNKNOWN;
- mDataspace.lock().set(ds);
- uint32_t pf = PIXEL_FORMAT_UNKNOWN;
- mPixelFormat.lock().set(pf);
-}
+ : mImpl(new C2NodeImpl(comp, false)) {}
status_t C2OMXNode::freeNode() {
- mComp.reset();
- android_fdsan_set_error_level(ANDROID_FDSAN_ERROR_LEVEL_WARN_ONCE);
- return mQueueThread->requestExitAndWait();
+ return mImpl->freeNode();
}
status_t C2OMXNode::sendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
if (cmd == OMX_CommandStateSet && param == OMX_StateLoaded) {
// Reset first input frame so if C2OMXNode is recycled, the timestamp does not become
// negative. This is a workaround for HW codecs that do not handle timestamp rollover.
- mFirstInputFrame = true;
+ mImpl->onFirstInputFrame();
}
return ERROR_UNSUPPORTED;
}
@@ -228,13 +65,19 @@
switch ((uint32_t)index) {
case OMX_IndexParamConsumerUsageBits: {
OMX_U32 *usage = (OMX_U32 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = static_cast<uint32_t>(val & 0xFFFFFFFF);
+ ALOGW("retrieving usage bits in 32 bits %llu -> %u",
+ (unsigned long long)val, (unsigned int)*usage);
err = OK;
break;
}
case OMX_IndexParamConsumerUsageBits64: {
OMX_U64 *usage = (OMX_U64 *)params;
- *usage = mUsage;
+ uint64_t val;
+ mImpl->getConsumerUsageBits(&val);
+ *usage = val;
err = OK;
break;
}
@@ -246,31 +89,12 @@
if (pDef->nPortIndex != kPortIndexInput) {
break;
}
-
- pDef->nBufferCountActual = 16;
-
- // WORKAROUND: having more slots improve performance while consuming
- // more memory. This is a temporary workaround to reduce memory for
- // larger-than-4K scenario.
- if (mWidth * mHeight > 4096 * 2340) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- C2PortActualDelayTuning::input inputDelay(0);
- C2ActualPipelineDelayTuning pipelineDelay(0);
- c2_status_t c2err = C2_NOT_FOUND;
- if (comp) {
- c2err = comp->query(
- {&inputDelay, &pipelineDelay}, {}, C2_DONT_BLOCK, nullptr);
- }
- if (c2err == C2_OK || c2err == C2_BAD_INDEX) {
- pDef->nBufferCountActual = 4;
- pDef->nBufferCountActual += (inputDelay ? inputDelay.value : 0u);
- pDef->nBufferCountActual += (pipelineDelay ? pipelineDelay.value : 0u);
- }
- }
-
+ IAidlNode::InputBufferParams bufferParams;
+ mImpl->getInputBufferParams(&bufferParams);
+ pDef->nBufferCountActual = bufferParams.bufferCountActual;
pDef->eDomain = OMX_PortDomainVideo;
- pDef->format.video.nFrameWidth = mWidth;
- pDef->format.video.nFrameHeight = mHeight;
+ pDef->format.video.nFrameWidth = bufferParams.frameWidth;
+ pDef->format.video.nFrameHeight = bufferParams.frameHeight;
pDef->format.video.eColorFormat = OMX_COLOR_FormatAndroidOpaque;
err = OK;
break;
@@ -286,28 +110,34 @@
return BAD_VALUE;
}
switch ((uint32_t)index) {
- case OMX_IndexParamMaxFrameDurationForBitrateControl:
+ case OMX_IndexParamMaxFrameDurationForBitrateControl: {
// handle max/fixed frame duration control
if (size != sizeof(OMX_PARAM_U32TYPE)) {
return BAD_VALUE;
}
// The incoming number is an int32_t contained in OMX_U32.
- mAdjustTimestampGapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ int32_t gapUs = (int32_t)((OMX_PARAM_U32TYPE*)params)->nU32;
+ mImpl->setAdjustTimestampGapUs(gapUs);
return OK;
-
- case OMX_IndexParamConsumerUsageBits:
+ }
+ case OMX_IndexParamConsumerUsageBits: {
if (size != sizeof(OMX_U32)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U32 *)params);
+ uint32_t usage = *((OMX_U32 *)params);
+ mImpl->setConsumerUsageBits(static_cast<uint64_t>(usage));
return OK;
-
- case OMX_IndexParamConsumerUsageBits64:
+ }
+ case OMX_IndexParamConsumerUsageBits64: {
if (size != sizeof(OMX_U64)) {
return BAD_VALUE;
}
- mUsage = *((OMX_U64 *)params);
+ uint64_t usagell = *((OMX_U64 *)params);
+ mImpl->setConsumerUsageBits(usagell);
return OK;
+ }
+ default:
+ break;
}
return ERROR_UNSUPPORTED;
}
@@ -359,14 +189,7 @@
}
status_t C2OMXNode::setInputSurface(const sp<IOMXBufferSource> &bufferSource) {
- c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
- C2PlatformAllocatorStore::GRALLOC,
- &mAllocator);
- if (err != OK) {
- return UNKNOWN_ERROR;
- }
- mBufferSource = bufferSource;
- return OK;
+ return mImpl->setInputSurface(bufferSource);
}
status_t C2OMXNode::allocateSecureBuffer(
@@ -402,105 +225,39 @@
return ERROR_UNSUPPORTED;
}
+namespace {
+ uint32_t toNodeFlags(OMX_U32 flags) {
+ uint32_t retFlags = 0;
+ if (flags & OMX_BUFFERFLAG_ENDOFFRAME) {
+ retFlags |= BUFFERFLAG_ENDOFFRAME;
+ }
+ if (flags & OMX_BUFFERFLAG_EOS) {
+ retFlags |= BUFFERFLAG_EOS;
+ }
+ return retFlags;
+ }
+ int64_t toNodeTimestamp(OMX_TICKS ticks) {
+ int64_t timestamp = 0;
+#ifndef OMX_SKIP64BIT
+ timestamp = ticks;
+#else
+ timestamp = ((ticks.nHighPart << 32) | ticks.nLowPart);
+#endif
+ return timestamp;
+ }
+} // anonymous namespace
+
status_t C2OMXNode::emptyBuffer(
buffer_id buffer, const OMXBuffer &omxBuf,
OMX_U32 flags, OMX_TICKS timestamp, int fenceFd) {
- std::shared_ptr<Codec2Client::Component> comp = mComp.lock();
- if (!comp) {
- return NO_INIT;
- }
-
- uint32_t c2Flags = (flags & OMX_BUFFERFLAG_EOS)
- ? C2FrameData::FLAG_END_OF_STREAM : 0;
- std::shared_ptr<C2GraphicBlock> block;
-
- android::base::unique_fd fd0, fd1;
- C2Handle *handle = nullptr;
if (omxBuf.mBufferType == OMXBuffer::kBufferTypeANWBuffer
&& omxBuf.mGraphicBuffer != nullptr) {
- std::shared_ptr<C2GraphicAllocation> alloc;
- handle = WrapNativeCodec2GrallocHandle(
- omxBuf.mGraphicBuffer->handle,
- omxBuf.mGraphicBuffer->width,
- omxBuf.mGraphicBuffer->height,
- omxBuf.mGraphicBuffer->format,
- omxBuf.mGraphicBuffer->usage,
- omxBuf.mGraphicBuffer->stride);
- if (handle != nullptr) {
- // unique_fd takes ownership of the fds, we'll get warning if these
- // fds get closed by somebody else. Onwership will be released before
- // we return, so that the fds get closed as usually when this function
- // goes out of scope (when both items and block are gone).
- native_handle_t *nativeHandle = reinterpret_cast<native_handle_t*>(handle);
- fd0.reset(nativeHandle->numFds > 0 ? nativeHandle->data[0] : -1);
- fd1.reset(nativeHandle->numFds > 1 ? nativeHandle->data[1] : -1);
- }
- c2_status_t err = mAllocator->priorGraphicAllocation(handle, &alloc);
- if (err != OK) {
- (void)fd0.release();
- (void)fd1.release();
- native_handle_close(handle);
- native_handle_delete(handle);
- return UNKNOWN_ERROR;
- }
- block = _C2BlockFactory::CreateGraphicBlock(alloc);
- } else if (!(flags & OMX_BUFFERFLAG_EOS)) {
- return BAD_VALUE;
+ return mImpl->submitBuffer(buffer, omxBuf.mGraphicBuffer, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
-
- std::unique_ptr<C2Work> work(new C2Work);
- work->input.flags = (C2FrameData::flags_t)c2Flags;
- work->input.ordinal.timestamp = timestamp;
-
- // WORKAROUND: adjust timestamp based on gapUs
- {
- work->input.ordinal.customOrdinal = timestamp; // save input timestamp
- if (mFirstInputFrame) {
- // grab timestamps on first frame
- mPrevInputTimestamp = timestamp;
- mPrevCodecTimestamp = timestamp;
- mFirstInputFrame = false;
- } else if (mAdjustTimestampGapUs > 0) {
- work->input.ordinal.timestamp =
- mPrevCodecTimestamp
- + c2_min((timestamp - mPrevInputTimestamp).peek(), mAdjustTimestampGapUs);
- } else if (mAdjustTimestampGapUs < 0) {
- work->input.ordinal.timestamp = mPrevCodecTimestamp - mAdjustTimestampGapUs;
- }
- mPrevInputTimestamp = work->input.ordinal.customOrdinal;
- mPrevCodecTimestamp = work->input.ordinal.timestamp;
- ALOGV("adjusting %lld to %lld (gap=%lld)",
- work->input.ordinal.customOrdinal.peekll(),
- work->input.ordinal.timestamp.peekll(),
- (long long)mAdjustTimestampGapUs);
- }
-
- work->input.ordinal.frameIndex = mFrameIndex++;
- work->input.buffers.clear();
- if (block) {
- std::shared_ptr<C2Buffer> c2Buffer(
- new Buffer2D(block->share(
- C2Rect(block->width(), block->height()), ::C2Fence())));
- work->input.buffers.push_back(c2Buffer);
- std::shared_ptr<C2StreamHdrStaticInfo::input> staticInfo;
- std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> dynamicInfo;
- GetHdrMetadataFromGralloc4Handle(
- block->handle(),
- &staticInfo,
- &dynamicInfo);
- if (staticInfo && *staticInfo) {
- c2Buffer->setInfo(staticInfo);
- }
- if (dynamicInfo && *dynamicInfo) {
- c2Buffer->setInfo(dynamicInfo);
- }
- }
- work->worklets.clear();
- work->worklets.emplace_back(new C2Worklet);
- mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
- mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
-
- return OK;
+ sp<GraphicBuffer> gBuf;
+ return mImpl->submitBuffer(buffer, gBuf, toNodeFlags(flags),
+ toNodeTimestamp(timestamp), fenceFd);
}
status_t C2OMXNode::getExtensionIndex(
@@ -517,56 +274,33 @@
if (msg.u.event_data.event != OMX_EventDataSpaceChanged) {
return ERROR_UNSUPPORTED;
}
- android_dataspace dataSpace = (android_dataspace)msg.u.event_data.data1;
- uint32_t pixelFormat = msg.u.event_data.data3;
-
- ALOGD("dataspace changed to %#x pixel format: %#x", dataSpace, pixelFormat);
- mQueueThread->setDataspace(dataSpace);
-
- mDataspace.lock().set(dataSpace);
- mPixelFormat.lock().set(pixelFormat);
- return OK;
+ return mImpl->onDataspaceChanged(
+ msg.u.event_data.data1,
+ msg.u.event_data.data3);
}
sp<IOMXBufferSource> C2OMXNode::getSource() {
- return mBufferSource;
+ return mImpl->getSource();
}
void C2OMXNode::setFrameSize(uint32_t width, uint32_t height) {
- mWidth = width;
- mHeight = height;
+ return mImpl->setFrameSize(width, height);
}
void C2OMXNode::onInputBufferDone(c2_cntr64_t index) {
- if (!mBufferSource) {
- ALOGD("Buffer source not set (index=%llu)", index.peekull());
- return;
- }
-
- int32_t bufferId = 0;
- {
- decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
- auto it = bufferIds->find(index.peeku());
- if (it == bufferIds->end()) {
- ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
- return;
- }
- bufferId = it->second;
- (void)bufferIds->erase(it);
- }
- (void)mBufferSource->onInputBufferEmptied(bufferId, -1);
+ return mImpl->onInputBufferDone(index);
}
android_dataspace C2OMXNode::getDataspace() {
- return *mDataspace.lock();
+ return mImpl->getDataspace();
}
uint32_t C2OMXNode::getPixelFormat() {
- return *mPixelFormat.lock();
+ return mImpl->getPixelFormat();
}
void C2OMXNode::setPriority(int priority) {
- mQueueThread->setPriority(priority);
+ return mImpl->setPriority(priority);
}
} // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index c8ce336..d077202 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2018, The Android Open Source Project
+ * Copyright 2024, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -17,16 +17,15 @@
#ifndef C2_OMX_NODE_H_
#define C2_OMX_NODE_H_
-#include <atomic>
-
#include <android/IOMXBufferSource.h>
#include <codec2/hidl/client.h>
-#include <media/stagefright/foundation/Mutexed.h>
#include <media/IOMX.h>
#include <media/OMXBuffer.h>
namespace android {
+struct C2NodeImpl;
+
/**
* IOmxNode implementation around codec 2.0 component, only to be used in
* IGraphicBufferSource::configure. Only subset of IOmxNode API is implemented
@@ -109,30 +108,7 @@
void setPriority(int priority);
private:
- std::weak_ptr<Codec2Client::Component> mComp;
- sp<IOMXBufferSource> mBufferSource;
- std::shared_ptr<C2Allocator> mAllocator;
- std::atomic_uint64_t mFrameIndex;
- uint32_t mWidth;
- uint32_t mHeight;
- uint64_t mUsage;
- Mutexed<android_dataspace> mDataspace;
- Mutexed<uint32_t> mPixelFormat;
-
- // WORKAROUND: timestamp adjustment
-
- // if >0: this is the max timestamp gap, if <0: this is -1 times the fixed timestamp gap
- // if 0: no timestamp adjustment is made
- // note that C2OMXNode can be recycled between encoding sessions.
- int32_t mAdjustTimestampGapUs;
- bool mFirstInputFrame; // true for first input
- c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
- c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
-
- Mutexed<std::map<uint64_t, buffer_id>> mBufferIdsInUse;
-
- class QueueThread;
- sp<QueueThread> mQueueThread;
+ std::shared_ptr<C2NodeImpl> mImpl;
};
} // namespace android
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 9c264af..463b63f 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -21,11 +21,16 @@
#include <sstream>
#include <thread>
+#include <android_media_codec.h>
+
#include <C2Config.h>
#include <C2Debug.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlGraphicBufferSource.h>
+#include <aidl/android/media/IAidlBufferSource.h>
#include <android/IOMXBufferSource.h>
#include <android/hardware/media/c2/1.0/IInputSurface.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +45,11 @@
#include <media/openmax/OMX_Core.h>
#include <media/openmax/OMX_IndexExt.h>
#include <media/stagefright/foundation/avc_utils.h>
+#include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
#include <media/stagefright/omx/1.0/WGraphicBufferSource.h>
#include <media/stagefright/omx/OmxGraphicBufferSource.h>
#include <media/stagefright/CCodec.h>
@@ -50,6 +60,7 @@
#include <media/stagefright/RenderedFrameInfo.h>
#include <utils/NativeHandle.h>
+#include "C2AidlNode.h"
#include "C2OMXNode.h"
#include "CCodecBufferChannel.h"
#include "CCodecConfig.h"
@@ -64,8 +75,14 @@
using ::android::hardware::graphics::bufferqueue::V1_0::utils::H2BGraphicBufferProducer;
using android::base::StringPrintf;
using ::android::hardware::media::c2::V1_0::IInputSurface;
+using ::aidl::android::media::IAidlBufferSource;
+using ::aidl::android::media::IAidlNode;
+using ::android::media::AidlGraphicBufferSource;
+using ::android::media::WAidlGraphicBufferSource;
+using ::android::media::aidl_conversion::fromAidlStatus;
typedef hardware::media::omx::V1_0::IGraphicBufferSource HGraphicBufferSource;
+typedef aidl::android::media::IAidlGraphicBufferSource AGraphicBufferSource;
typedef CCodecConfig Config;
namespace {
@@ -189,11 +206,11 @@
std::shared_ptr<Codec2Client::InputSurfaceConnection> mConnection;
};
-class GraphicBufferSourceWrapper : public InputSurfaceWrapper {
+class HGraphicBufferSourceWrapper : public InputSurfaceWrapper {
public:
typedef hardware::media::omx::V1_0::Status OmxStatus;
- GraphicBufferSourceWrapper(
+ HGraphicBufferSourceWrapper(
const sp<HGraphicBufferSource> &source,
uint32_t width,
uint32_t height,
@@ -202,7 +219,7 @@
mDataSpace = HAL_DATASPACE_BT709;
mConfig.mUsage = usage;
}
- ~GraphicBufferSourceWrapper() override = default;
+ ~HGraphicBufferSourceWrapper() override = default;
status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
mNode = new C2OMXNode(comp);
@@ -444,6 +461,224 @@
Config mConfig;
};
+class AGraphicBufferSourceWrapper : public InputSurfaceWrapper {
+public:
+ AGraphicBufferSourceWrapper(
+ const std::shared_ptr<AGraphicBufferSource> &source,
+ uint32_t width,
+ uint32_t height,
+ uint64_t usage)
+ : mSource(source), mWidth(width), mHeight(height) {
+ mDataSpace = HAL_DATASPACE_BT709;
+ mConfig.mUsage = usage;
+ }
+ ~AGraphicBufferSourceWrapper() override = default;
+
+ status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
+ mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+ mNode->setFrameSize(mWidth, mHeight);
+ // Usage is queried during configure(), so setting it beforehand.
+ uint64_t usage = mConfig.mUsage;
+ (void)mNode->setConsumerUsage((int64_t)usage);
+
+ return fromAidlStatus(mSource->configure(
+ mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+ mDataSpace)));
+ }
+
+ void disconnect() override {
+ if (mNode == nullptr) {
+ return;
+ }
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
+ return;
+ }
+ (void)source->onStop();
+ (void)source->onRelease();
+ mNode.reset();
+ }
+
+ status_t start() override {
+ std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+ if (source == nullptr) {
+ return NO_INIT;
+ }
+
+ size_t numSlots = 16;
+
+ IAidlNode::InputBufferParams param;
+ status_t err = fromAidlStatus(mNode->getInputBufferParams(¶m));
+ if (err == OK) {
+ numSlots = param.bufferCountActual;
+ }
+
+ for (size_t i = 0; i < numSlots; ++i) {
+ (void)source->onInputBufferAdded(i);
+ }
+
+ (void)source->onStart();
+ return OK;
+ }
+
+ status_t signalEndOfInputStream() override {
+ return fromAidlStatus(mSource->signalEndOfInputStream());
+ }
+
+ status_t configure(Config &config) {
+ std::stringstream status;
+ status_t err = OK;
+
+ // handle each configuration granually, in case we need to handle part of the configuration
+ // elsewhere
+
+ // TRICKY: we do not unset frame delay repeating
+ if (config.mMinFps > 0 && config.mMinFps != mConfig.mMinFps) {
+ int64_t us = 1e6 / config.mMinFps + 0.5;
+ status_t res = fromAidlStatus(mSource->setRepeatPreviousFrameDelayUs(us));
+ status << " minFps=" << config.mMinFps << " => repeatDelayUs=" << us;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMinFps = config.mMinFps;
+ }
+
+ // pts gap
+ if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
+ if (mNode != nullptr) {
+ float gap = (config.mMinAdjustedFps > 0)
+ ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
+ : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
+ // float -> uint32_t is undefined if the value is negative.
+ // First convert to int32_t to ensure the expected behavior.
+ int32_t gapUs = int32_t(gap);
+ (void)mNode->setAdjustTimestampGapUs(gapUs);
+ }
+ }
+
+ // max fps
+ // TRICKY: we do not unset max fps to 0 unless using fixed fps
+ if ((config.mMaxFps > 0 || (config.mFixedAdjustedFps > 0 && config.mMaxFps == -1))
+ && config.mMaxFps != mConfig.mMaxFps) {
+ status_t res = fromAidlStatus(mSource->setMaxFps(config.mMaxFps));
+ status << " maxFps=" << config.mMaxFps;
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mMaxFps = config.mMaxFps;
+ }
+
+ if (config.mTimeOffsetUs != mConfig.mTimeOffsetUs) {
+ status_t res = fromAidlStatus(mSource->setTimeOffsetUs(config.mTimeOffsetUs));
+ status << " timeOffset " << config.mTimeOffsetUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mTimeOffsetUs = config.mTimeOffsetUs;
+ }
+
+ if (config.mCaptureFps != mConfig.mCaptureFps || config.mCodedFps != mConfig.mCodedFps) {
+ status_t res =
+ fromAidlStatus(mSource->setTimeLapseConfig(config.mCodedFps, config.mCaptureFps));
+ status << " timeLapse " << config.mCaptureFps << "fps as " << config.mCodedFps << "fps";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mCaptureFps = config.mCaptureFps;
+ mConfig.mCodedFps = config.mCodedFps;
+ }
+
+ if (config.mStartAtUs != mConfig.mStartAtUs
+ || (config.mStopped != mConfig.mStopped && !config.mStopped)) {
+ status_t res = fromAidlStatus(mSource->setStartTimeUs(config.mStartAtUs));
+ status << " start at " << config.mStartAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mStartAtUs = config.mStartAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // suspend-resume
+ if (config.mSuspended != mConfig.mSuspended) {
+ status_t res = fromAidlStatus(mSource->setSuspend(
+ config.mSuspended, config.mSuspendAtUs));
+ status << " " << (config.mSuspended ? "suspend" : "resume")
+ << " at " << config.mSuspendAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ }
+ mConfig.mSuspended = config.mSuspended;
+ mConfig.mSuspendAtUs = config.mSuspendAtUs;
+ }
+
+ if (config.mStopped != mConfig.mStopped && config.mStopped) {
+ status_t res = fromAidlStatus(mSource->setStopTimeUs(config.mStopAtUs));
+ status << " stop at " << config.mStopAtUs << "us";
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ err = res;
+ } else {
+ status << " delayUs";
+ res = fromAidlStatus(mSource->getStopTimeOffsetUs(&config.mInputDelayUs));
+ if (res != OK) {
+ status << " (=> " << asString(res) << ")";
+ } else {
+ status << "=" << config.mInputDelayUs << "us";
+ }
+ mConfig.mInputDelayUs = config.mInputDelayUs;
+ }
+ mConfig.mStopAtUs = config.mStopAtUs;
+ mConfig.mStopped = config.mStopped;
+ }
+
+ // color aspects (android._color-aspects)
+
+ // consumer usage is queried earlier.
+
+ // priority
+ if (mConfig.mPriority != config.mPriority) {
+ if (config.mPriority != INT_MAX) {
+ mNode->setPriority(config.mPriority);
+ }
+ mConfig.mPriority = config.mPriority;
+ }
+
+ if (status.str().empty()) {
+ ALOGD("ISConfig not changed");
+ } else {
+ ALOGD("ISConfig%s", status.str().c_str());
+ }
+ return err;
+ }
+
+ void onInputBufferDone(c2_cntr64_t index) override {
+ mNode->onInputBufferDone(index);
+ }
+
+ android_dataspace getDataspace() override {
+ return mNode->getDataspace();
+ }
+
+ uint32_t getPixelFormat() override {
+ return mNode->getPixelFormat();
+ }
+
+private:
+ std::shared_ptr<AGraphicBufferSource> mSource;
+ std::shared_ptr<C2AidlNode> mNode;
+ uint32_t mWidth;
+ uint32_t mHeight;
+ Config mConfig;
+};
+
class Codec2ClientInterfaceWrapper : public C2ComponentStore {
std::shared_ptr<Codec2Client> mClient;
@@ -1178,6 +1413,23 @@
}
}
+ /*
+ * configure mock region of interest if Feature_Roi is enabled
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if ((config->mDomain & Config::IS_ENCODER) && (config->mDomain & Config::IS_VIDEO)) {
+ int32_t enableRoi;
+ if (msg->findInt32("feature-region-of-interest", &enableRoi) && enableRoi != 0) {
+ if (!msg->contains(PARAMETER_KEY_QP_OFFSET_MAP) &&
+ !msg->contains(PARAMETER_KEY_QP_OFFSET_RECTS)) {
+ msg->setString(PARAMETER_KEY_QP_OFFSET_RECTS,
+ AStringPrintf("%d,%d-%d,%d=%d;", 0, 0, height, width, 0));
+ }
+ }
+ }
+ }
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
// NOTE: We used to ignore "video-bitrate" at configure; replicate
// the behavior here.
@@ -1458,7 +1710,8 @@
int64_t blockUsage =
usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
- width, height, componentColorFormat, blockUsage, {comp->getName()});
+ align(width, 2), align(height, 2), componentColorFormat, blockUsage,
+ {comp->getName()});
sp<GraphicBlockBuffer> buffer;
if (block) {
buffer = GraphicBlockBuffer::Allocate(
@@ -1627,28 +1880,46 @@
}
sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
- sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
- sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
-
- if (hidlInputSurface) {
- std::shared_ptr<Codec2Client::InputSurface> inputSurface =
- std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
- err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- inputSurface));
- bufferProducer = inputSurface->getGraphicBufferProducer();
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- bufferProducer = persistentSurface->getBufferProducer();
+ if (persistentSurface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = persistentSurface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface(aidl)");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
} else {
- ALOGE("Corrupted input surface");
- mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = persistentSurface->getHidlTarget();
+ sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+
+ if (hidlInputSurface) {
+ std::shared_ptr<Codec2Client::InputSurface> inputSurface =
+ std::make_shared<Codec2Client::InputSurface>(hidlInputSurface);
+ err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ inputSurface));
+ bufferProducer = inputSurface->getGraphicBufferProducer();
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ bufferProducer = persistentSurface->getBufferProducer();
+ } else {
+ ALOGE("Corrupted input surface");
+ mCallback->onInputSurfaceCreationFailed(UNKNOWN_ERROR);
+ return;
+ }
}
if (err != OK) {
@@ -1743,33 +2014,56 @@
outputFormat = config->mOutputFormat;
usage = config->mISConfig ? config->mISConfig->mUsage : 0;
}
- sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
- sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
- sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
- if (inputSurface) {
- status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
- std::make_shared<Codec2Client::InputSurface>(inputSurface)));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
- return;
- }
- } else if (gbs) {
- int32_t width = 0;
- (void)outputFormat->findInt32("width", &width);
- int32_t height = 0;
- (void)outputFormat->findInt32("height", &height);
- status_t err = setupInputSurface(std::make_shared<GraphicBufferSourceWrapper>(
- gbs, width, height, usage));
- if (err != OK) {
- ALOGE("Failed to set up input surface: %d", err);
- mCallback->onInputSurfaceDeclined(err);
+ if (surface->isTargetAidl()) {
+ ::ndk::SpAIBinder aidlTarget = surface->getAidlTarget();
+ std::shared_ptr<AGraphicBufferSource> gbs = AGraphicBufferSource::fromBinder(aidlTarget);
+ if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+
+ status_t err = setupInputSurface(std::make_shared<AGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface(aidl): %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface(aidl): Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
return;
}
} else {
- ALOGE("Failed to set input surface: Corrupted surface.");
- mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
- return;
+ sp<hidl::base::V1_0::IBase> hidlTarget = surface->getHidlTarget();
+ sp<IInputSurface> inputSurface = IInputSurface::castFrom(hidlTarget);
+ sp<HGraphicBufferSource> gbs = HGraphicBufferSource::castFrom(hidlTarget);
+ if (inputSurface) {
+ status_t err = setupInputSurface(std::make_shared<C2InputSurfaceWrapper>(
+ std::make_shared<Codec2Client::InputSurface>(inputSurface)));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else if (gbs) {
+ int32_t width = 0;
+ (void)outputFormat->findInt32("width", &width);
+ int32_t height = 0;
+ (void)outputFormat->findInt32("height", &height);
+ status_t err = setupInputSurface(std::make_shared<HGraphicBufferSourceWrapper>(
+ gbs, width, height, usage));
+ if (err != OK) {
+ ALOGE("Failed to set up input surface: %d", err);
+ mCallback->onInputSurfaceDeclined(err);
+ return;
+ }
+ } else {
+ ALOGE("Failed to set input surface: Corrupted surface.");
+ mCallback->onInputSurfaceDeclined(UNKNOWN_ERROR);
+ return;
+ }
}
// Formats can change after setupInputSurface
sp<AMessage> inputFormat;
@@ -2259,6 +2553,40 @@
}
}
+ /**
+ * Handle ROI QP map configuration. Recover the QP map configuration from AMessage as an
+ * ABuffer and configure to CCodecBufferChannel as a C2InfoBuffer
+ */
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ sp<ABuffer> qpOffsetMap;
+ if ((config->mDomain & (Config::IS_VIDEO | Config::IS_IMAGE))
+ && (config->mDomain & Config::IS_ENCODER)
+ && params->findBuffer(PARAMETER_KEY_QP_OFFSET_MAP, &qpOffsetMap)) {
+ std::shared_ptr<C2BlockPool> pool;
+ // TODO(b/331443865) Use pooled block pool to improve efficiency
+ c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
+
+ if (status == C2_OK) {
+ size_t mapSize = qpOffsetMap->size();
+ std::shared_ptr<C2LinearBlock> block;
+ status = pool->fetchLinearBlock(mapSize,
+ C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
+ if (status == C2_OK && !block->map().get().error()) {
+ C2WriteView wView = block->map().get();
+ uint8_t* outData = wView.data();
+ memcpy(outData, qpOffsetMap->data(), mapSize);
+ C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+ kParamIndexQpOffsetMapBuffer,
+ block->share(0, mapSize, C2Fence()));
+ mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+ }
+ }
+ params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
+ }
+ }
+
+
std::vector<std::unique_ptr<C2Param>> configUpdate;
(void)config->getConfigUpdateFromSdkParams(
comp, params, Config::IS_PARAM, C2_MAY_BLOCK, &configUpdate);
@@ -2661,15 +2989,32 @@
Codec2Client::CreateInputSurface();
if (!inputSurface) {
if (property_get_int32("debug.stagefright.c2inputsurface", 0) == -1) {
- sp<IGraphicBufferProducer> gbp;
- sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
- status_t err = gbs->initCheck();
- if (err != OK) {
- ALOGE("Failed to create persistent input surface: error %d", err);
- return nullptr;
+ if (Codec2Client::IsAidlSelected()) {
+ sp<IGraphicBufferProducer> gbp;
+ sp<AidlGraphicBufferSource> gbs = new AidlGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("aidl based PersistentSurface created");
+ std::shared_ptr<WAidlGraphicBufferSource> wrapper =
+ ::ndk::SharedRefBase::make<WAidlGraphicBufferSource>(gbs);
+
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), wrapper->asBinder());
+ } else {
+ sp<IGraphicBufferProducer> gbp;
+ sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ ALOGD("hidl based PersistentSurface created");
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
}
- return new PersistentSurface(
- gbs->getIGraphicBufferProducer(), new TWGraphicBufferSource(gbs));
} else {
return nullptr;
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index f58dc65..3984b83 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -28,6 +28,8 @@
#include <thread>
#include <chrono>
+#include <android_media_codec.h>
+
#include <C2AllocatorGralloc.h>
#include <C2PlatformSupport.h>
#include <C2BlockInternal.h>
@@ -370,7 +372,17 @@
}
} else {
work->input.flags = (C2FrameData::flags_t)flags;
+
// TODO: fill info's
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mInfoBuffers.size()) {
+ for (auto infoBuffer : mInfoBuffers) {
+ work->input.infoBuffers.emplace_back(*infoBuffer);
+ }
+ mInfoBuffers.clear();
+ }
+ }
work->input.configUpdate = std::move(mParamsToBeSet);
if (tunnelFirstFrame) {
@@ -2058,6 +2070,7 @@
void CCodecBufferChannel::stop() {
mSync.stop();
mFirstValidFrameIndex = mFrameIndex.load(std::memory_order_relaxed);
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::stopUseOutputSurface(bool pushBlankBuffer) {
@@ -2099,6 +2112,7 @@
}
void CCodecBufferChannel::release() {
+ mInfoBuffers.clear();
mComponent.reset();
mInputAllocator.reset();
mOutputSurface.lock()->surface.clear();
@@ -2164,6 +2178,7 @@
output->buffers->flushStash();
}
}
+ mInfoBuffers.clear();
}
void CCodecBufferChannel::onWorkDone(
@@ -2768,6 +2783,10 @@
}
}
+void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
+ mInfoBuffers.push_back(buffer);
+}
+
status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
// C2_OK is always translated to OK.
if (c2s == C2_OK) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index b470655..94a5998 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -227,6 +227,14 @@
void resetBuffersPixelFormat(bool isEncoder);
+ /**
+ * Queue a C2 info buffer that will be sent to codec in the subsequent
+ * queueInputBuffer
+ *
+ * @param buffer C2 info buffer
+ */
+ void setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer);
+
private:
uint32_t getInputBuffersPixelFormat();
@@ -400,6 +408,8 @@
std::atomic_bool mSendEncryptedInfoBuffer;
std::atomic_bool mTunneled;
+
+ std::vector<std::shared_ptr<C2InfoBuffer>> mInfoBuffers;
};
// Conversion of a c2_status_t value to a status_t value may depend on the
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index d313f33..2fa89e7 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -24,6 +24,7 @@
#include <C2PlatformSupport.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/CodecBase.h>
#include <media/stagefright/MediaCodecConstants.h>
@@ -57,7 +58,7 @@
std::shared_ptr<C2GraphicBlock> block;
c2_status_t err = pool->fetchGraphicBlock(
- width, height, pixelFormat, fullUsage, &block);
+ align(width, 2), align(height, 2), pixelFormat, fullUsage, &block);
if (err != C2_OK) {
ALOGD("fetch graphic block failed: %d", err);
return nullptr;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index c22deca..db59227 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -19,6 +19,8 @@
#include <initializer_list>
+#include <android_media_codec.h>
+
#include <cutils/properties.h>
#include <log/log.h>
#include <utils/NativeHandle.h>
@@ -591,6 +593,13 @@
}
return C2Value();
}));
+
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ add(ConfigMapper(C2_PARAMKEY_QP_OFFSET_RECTS, C2_PARAMKEY_QP_OFFSET_RECTS, "")
+ .limitTo(D::VIDEO & (D::CONFIG | D::PARAM) & D::ENCODER & D::INPUT));
+ }
+
deprecated(ConfigMapper(PARAMETER_KEY_REQUEST_SYNC_FRAME,
"coding.request-sync", "value")
.limitTo(D::PARAM & D::ENCODER)
@@ -1121,6 +1130,11 @@
mParamUpdater->clear();
mParamUpdater->supportWholeParam(
C2_PARAMKEY_TEMPORAL_LAYERING, C2StreamTemporalLayeringTuning::CORE_INDEX);
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ mParamUpdater->supportWholeParam(
+ C2_PARAMKEY_QP_OFFSET_RECTS, C2StreamQpOffsetRects::CORE_INDEX);
+ }
mParamUpdater->addParamDesc(mReflector, mParamDescs);
// TEMP: add some standard fields even if not reflected
@@ -1871,6 +1885,39 @@
}
}
+ if (android::media::codec::provider_->region_of_interest()
+ && android::media::codec::provider_->region_of_interest_support()) {
+ if (mDomain == (IS_VIDEO | IS_ENCODER)) {
+ AString qpOffsetRects;
+ if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
+ char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
+ strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
+ char* box = strtok(mutableStrQpOffsetRects, ";");
+ while (box != nullptr) {
+ int top, left, bottom, right, offset;
+ if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
+ left = c2_max(0, left);
+ top = c2_max(0, top);
+ if (right > left && bottom > top) {
+ C2Rect rect(right - left, bottom - top);
+ rect.at(left, top);
+ c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+ }
+ }
+ box = strtok(nullptr, ";");
+ }
+ if (c2QpOffsetRects.size() != 0) {
+ const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
+ C2StreamQpOffsetRects::output::AllocUnique(
+ c2QpOffsetRects.size(), 0u, c2QpOffsetRects);
+ params->setBuffer(C2_PARAMKEY_QP_OFFSET_RECTS,
+ ABuffer::CreateAsCopy(regions.get(), regions->size()));
+ }
+ }
+ }
+ }
+
// this is to verify that we set proper signedness for standard parameters
bool beVeryStrict = property_get_bool("debug.stagefright.ccodec_strict_type", false);
// this is to allow vendors to use the wrong signedness for standard parameters
diff --git a/media/codec2/sfplugin/tests/Android.bp b/media/codec2/sfplugin/tests/Android.bp
index 246e563..2739f44 100644
--- a/media/codec2/sfplugin/tests/Android.bp
+++ b/media/codec2/sfplugin/tests/Android.bp
@@ -42,6 +42,7 @@
],
static_libs: [
+ "android.media.codec-aconfig-cc",
"libcodec2_hidl@1.0",
"libstagefright_bufferpool@2.0",
],
diff --git a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
index 3615289..508bec2 100644
--- a/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecConfig_test.cpp
@@ -20,6 +20,8 @@
#include <gtest/gtest.h>
+#include <android_media_codec.h>
+
#include <codec2/hidl/1.0/Configurable.h>
#include <codec2/hidl/client.h>
#include <util/C2InterfaceHelper.h>
@@ -235,6 +237,22 @@
})
.withSetter(Setter<C2StreamProfileLevelInfo::output>)
.build());
+
+ std::vector<C2QpOffsetRectStruct> c2QpOffsetRectsInfo;
+ addParameter(
+ DefineParam(mInputQpOffsetRects, C2_PARAMKEY_QP_OFFSET_RECTS)
+ .withDefault(C2StreamQpOffsetRects::output::AllocShared(
+ c2QpOffsetRectsInfo.size(), 0, c2QpOffsetRectsInfo))
+ .withFields({
+ C2F(mInputQpOffsetRects, m.values[0].qpOffset)
+ .inRange(-128, 127),
+ C2F(mInputQpOffsetRects, m.values[0].left).any(),
+ C2F(mInputQpOffsetRects, m.values[0].top).any(),
+ C2F(mInputQpOffsetRects, m.values[0].width).any(),
+ C2F(mInputQpOffsetRects, m.values[0].height).any(),
+ })
+ .withSetter(Setter<C2StreamQpOffsetRects::output>)
+ .build());
}
// TODO: more SDK params
@@ -254,6 +272,7 @@
std::shared_ptr<C2StreamBitrateInfo::output> mOutputBitrate;
std::shared_ptr<C2StreamProfileLevelInfo::input> mInputProfileLevel;
std::shared_ptr<C2StreamProfileLevelInfo::output> mOutputProfileLevel;
+ std::shared_ptr<C2StreamQpOffsetRects::output> mInputQpOffsetRects;
template<typename T>
static C2R Setter(bool, C2P<T> &) {
@@ -636,4 +655,56 @@
HdrProfilesTest,
::testing::ValuesIn(kHdrProfilesParams));
+TEST_F(CCodecConfigTest, SetRegionOfInterestParams) {
+ if (!android::media::codec::provider_->region_of_interest()
+ || !android::media::codec::provider_->region_of_interest_support()) {
+ GTEST_SKIP() << "Skipping the test as region_of_interest flags are not enabled.\n";
+ }
+
+ init(C2Component::DOMAIN_VIDEO, C2Component::KIND_ENCODER, MIMETYPE_VIDEO_VP9);
+
+ ASSERT_EQ(OK, mConfig.initialize(mReflector, mConfigurable));
+
+ const int kWidth = 32;
+ const int kHeight = 32;
+ const int kNumBlocks = ((kWidth + 15) / 16) * ((kHeight + 15) / 16);
+ int8_t mapInfo[kNumBlocks] = {-1, 0, 1, 1};
+ int top[kNumBlocks] = {0, 0, 16, 16};
+ int left[kNumBlocks] = {0, 16, 0, 16};
+ int bottom[kNumBlocks] = {16, 16, 32, 32};
+ int right[kNumBlocks] = {16, 32, 16, 32};
+ sp<AMessage> format{new AMessage};
+ format->setInt32(KEY_WIDTH, kWidth);
+ format->setInt32(KEY_HEIGHT, kHeight);
+ AString val;
+ for (int i = 0; i < kNumBlocks; i++) {
+ val.append(AStringPrintf("%d,%d-%d,%d=%d;", top[i], left[i], bottom[i],
+ right[i], mapInfo[i]));
+ }
+ format->setString(PARAMETER_KEY_QP_OFFSET_RECTS, val);
+
+ std::vector<std::unique_ptr<C2Param>> configUpdate;
+ ASSERT_EQ(OK, mConfig.getConfigUpdateFromSdkParams(mConfigurable, format, D::CONFIG,
+ C2_MAY_BLOCK, &configUpdate));
+
+ EXPECT_EQ(1u, configUpdate.size());
+
+ C2StreamQpOffsetRects::output* qpRectParam =
+ FindParam<std::remove_pointer<decltype(qpRectParam)>::type>(configUpdate);
+ ASSERT_NE(nullptr, qpRectParam);
+ ASSERT_EQ(kNumBlocks, qpRectParam->flexCount());
+ for (auto i = 0; i < kNumBlocks; i++) {
+ EXPECT_EQ(mapInfo[i], (int8_t)qpRectParam->m.values[i].qpOffset)
+ << "qp offset for index " << i << " is not as expected ";
+ EXPECT_EQ(left[i], qpRectParam->m.values[i].left)
+ << "left for index " << i << " is not as expected ";
+ EXPECT_EQ(top[i], qpRectParam->m.values[i].top)
+ << "top for index " << i << " is not as expected ";
+ EXPECT_EQ(right[i] - left[i], qpRectParam->m.values[i].width)
+ << "width for index " << i << " is not as expected ";
+ EXPECT_EQ(bottom[i] - top[i], qpRectParam->m.values[i].height)
+ << "height for index " << i << " is not as expected ";
+ }
+}
+
} // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 261fd05..75e9bbc 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -593,8 +593,6 @@
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
CHECK(dstY != nullptr);
- CHECK((src.width() & 1) == 0);
- CHECK((src.height() & 1) == 0);
if (dstStride * dstVStride * 3 / 2 > bufferSize) {
ALOGD("conversion buffer is too small for converting from RGB to YUV");
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ff72b1f..77a76e8 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -46,6 +46,10 @@
return isAtLeast(__ANDROID_API_U__, "UpsideDownCake");
}
+bool isAtLeastV() {
+ return isAtLeast(__ANDROID_API_V__, "VanillaIceCream");
+}
+
static bool isP010Allowed() {
// The Vendor API level which is min(ro.product.first_api_level, ro.board.[first_]api_level).
// This is the api level to which VSR requirement the device conform.
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.h b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
index 9bb52bd..693b3db 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.h
@@ -25,6 +25,8 @@
bool isAtLeastU();
+bool isAtLeastV();
+
bool isVendorApiOrFirstApiAtLeastT();
/**
diff --git a/media/libaudiohal/impl/AidlUtils.cpp b/media/libaudiohal/impl/AidlUtils.cpp
new file mode 100644
index 0000000..a916802
--- /dev/null
+++ b/media/libaudiohal/impl/AidlUtils.cpp
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AidlUtils.h"
+
+#define LOG_TAG "AIDLUtils"
+#include <utils/Log.h>
+
+namespace android {
+
+//static
+HalDeathHandler& HalDeathHandler::getInstance() {
+ // never-delete singleton
+ static HalDeathHandler* instance = new HalDeathHandler;
+ return *instance;
+}
+
+//static
+void HalDeathHandler::OnBinderDied(void*) {
+ ALOGE("HAL instance died, audio server is restarting");
+ _exit(1); // Avoid calling atexit handlers, as this code runs on a thread from RPC threadpool.
+}
+
+HalDeathHandler::HalDeathHandler()
+ : mDeathRecipient(AIBinder_DeathRecipient_new(OnBinderDied)) {}
+
+bool HalDeathHandler::registerHandler(AIBinder* binder) {
+ binder_status_t status = AIBinder_linkToDeath(binder, mDeathRecipient.get(), nullptr);
+ if (status == STATUS_OK) return true;
+ ALOGE("%s: linkToDeath failed: %d", __func__, status);
+ return false;
+}
+
+} // namespace android
diff --git a/media/libaudiohal/impl/AidlUtils.h b/media/libaudiohal/impl/AidlUtils.h
index 3d42b53..97a5bba 100644
--- a/media/libaudiohal/impl/AidlUtils.h
+++ b/media/libaudiohal/impl/AidlUtils.h
@@ -20,10 +20,24 @@
#include <string>
#include <android/binder_auto_utils.h>
+#include <android/binder_ibinder.h>
#include <android/binder_manager.h>
namespace android {
+class HalDeathHandler {
+ public:
+ static HalDeathHandler& getInstance();
+
+ bool registerHandler(AIBinder* binder);
+ private:
+ static void OnBinderDied(void*);
+
+ HalDeathHandler();
+
+ ::ndk::ScopedAIBinder_DeathRecipient mDeathRecipient;
+};
+
template<class Intf>
std::shared_ptr<Intf> getServiceInstance(const std::string& instanceName) {
const std::string serviceName =
@@ -37,6 +51,9 @@
}
// `fromBinder` may fail and return a nullptr if the service has died in the meantime.
service = Intf::fromBinder(ndk::SpAIBinder(serviceBinder));
+ if (service != nullptr) {
+ HalDeathHandler::getInstance().registerHandler(serviceBinder);
+ }
}
return service;
}
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index c7fa96e..dd8f021 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -262,6 +262,7 @@
srcs: [
":audio_effect_hal_aidl_src_files",
":core_audio_hal_aidl_src_files",
+ "AidlUtils.cpp",
"DevicesFactoryHalEntry.cpp",
"EffectsFactoryHalEntry.cpp",
],
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 2a8ebc6..ce56d87 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -569,7 +569,19 @@
status_t StreamOutHalAidl::setVolume(float left, float right) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- return statusTFromBinderStatus(mStream->setHwVolume({left, right}));
+ size_t channelCount = audio_channel_out_mask_from_count(mConfig.channel_mask);
+ if (channelCount == 0) channelCount = 2;
+ std::vector<float> volumes(channelCount);
+ if (channelCount == 1) {
+ volumes[0] = (left + right) / 2;
+ } else {
+ volumes[0] = left;
+ volumes[1] = right;
+ for (size_t i = 2; i < channelCount; ++i) {
+ volumes[i] = (left + right) / 2;
+ }
+ }
+ return statusTFromBinderStatus(mStream->setHwVolume(volumes));
}
status_t StreamOutHalAidl::selectPresentation(int presentationId, int programId) {
@@ -866,7 +878,9 @@
status_t StreamInHalAidl::setGain(float gain) {
TIME_CHECK();
if (!mStream) return NO_INIT;
- return statusTFromBinderStatus(mStream->setHwGain({gain}));
+ const size_t channelCount = audio_channel_count_from_in_mask(mConfig.channel_mask);
+ std::vector<float> gains(channelCount != 0 ? channelCount : 1, gain);
+ return statusTFromBinderStatus(mStream->setHwGain(gains));
}
status_t StreamInHalAidl::read(void *buffer, size_t bytes, size_t *read) {
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index dd6da15..b7efbce 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -20,7 +20,6 @@
{
"exclude-annotation": "android.platform.test.annotations.RequiresDevice"
},
- // TODO: b/149314419
{
"exclude-filter": "android.media.audio.cts.AudioPlaybackCaptureTest"
},
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index dc7d787..0e89521 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -335,7 +335,7 @@
</MediaCodec>
<MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
<Alias name="OMX.google.vp8.encoder" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="2048x2048" />
@@ -371,7 +371,7 @@
<Alias name="OMX.google.vp9.encoder" />
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<!-- 2016 devices can encode at about 8fps at this block count -->
<Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
@@ -380,7 +380,7 @@
<Attribute name="software-codec" />
</MediaCodec>
<MediaCodec name="c2.android.av1.encoder" type="video/av01" enabled="false" minsdk="34" variant="slow-cpu,!slow-cpu">
- <Limit name="alignment" value="2x2" />
+ <Limit name="alignment" value="1x1" />
<Limit name="block-size" value="16x16" />
<Variant name="!slow-cpu">
<Limit name="size" min="2x2" max="1920x1920" />
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 24ac2e8..72785d5 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -893,6 +893,8 @@
inline constexpr char PARAMETER_KEY_SUSPEND_TIME[] = "drop-start-time-us";
inline constexpr char PARAMETER_KEY_TUNNEL_PEEK[] = "tunnel-peek";
inline constexpr char PARAMETER_KEY_VIDEO_BITRATE[] = "video-bitrate";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_MAP[] = "qp-offset-map";
+inline constexpr char PARAMETER_KEY_QP_OFFSET_RECTS[] = "qp-offset-rects";
}
diff --git a/media/libstagefright/include/media/stagefright/PersistentSurface.h b/media/libstagefright/include/media/stagefright/PersistentSurface.h
index f4943c3..554ee43 100644
--- a/media/libstagefright/include/media/stagefright/PersistentSurface.h
+++ b/media/libstagefright/include/media/stagefright/PersistentSurface.h
@@ -18,6 +18,8 @@
#define PERSISTENT_SURFACE_H_
+#include <android/binder_auto_utils.h>
+#include <android/binder_libbinder.h>
#include <binder/Parcel.h>
#include <hidl/HidlSupport.h>
#include <hidl/HybridInterface.h>
@@ -29,24 +31,43 @@
struct PersistentSurface : public RefBase {
PersistentSurface() {}
- // create a persistent surface
+ // create a persistent surface in HIDL
PersistentSurface(
const sp<IGraphicBufferProducer>& bufferProducer,
const sp<hidl::base::V1_0::IBase>& hidlTarget) :
mBufferProducer(bufferProducer),
- mHidlTarget(hidlTarget) { }
+ mHidlTarget(hidlTarget),
+ mAidlTarget(nullptr),
+ mAidl(false) { }
+
+ // create a persistent surface in AIDL
+ PersistentSurface(
+ const sp<IGraphicBufferProducer>& bufferProducer,
+ const ::ndk::SpAIBinder& aidlTarget) :
+ mBufferProducer(bufferProducer),
+ mHidlTarget(nullptr),
+ mAidlTarget(aidlTarget),
+ mAidl(true) { }
sp<IGraphicBufferProducer> getBufferProducer() const {
return mBufferProducer;
}
+ bool isTargetAidl() const {
+ return mAidl;
+ }
+
sp<hidl::base::V1_0::IBase> getHidlTarget() const {
- return mHidlTarget;
+ return mAidl ? nullptr : mHidlTarget;
+ }
+
+ ::ndk::SpAIBinder getAidlTarget() const {
+ return mAidl ? mAidlTarget : nullptr;
}
status_t writeToParcel(Parcel *parcel) const {
parcel->writeStrongBinder(IInterface::asBinder(mBufferProducer));
- // write hidl target
+ // write hidl target if available
if (mHidlTarget != nullptr) {
HalToken token;
bool result = createHalToken(mHidlTarget, &token);
@@ -57,6 +78,22 @@
} else {
parcel->writeBool(false);
}
+ // write aidl target if available
+ if (mAidl) {
+ AIBinder *binder = mAidlTarget.get();
+ if (binder != nullptr) {
+ ::android::sp<::android::IBinder> intf =
+ AIBinder_toPlatformBinder(binder);
+ if (intf) {
+ parcel->writeBool(true);
+ parcel->writeStrongBinder(intf);
+ } else {
+ parcel->writeBool(false);
+ }
+ } else {
+ parcel->writeBool(false);
+ }
+ }
return NO_ERROR;
}
@@ -65,21 +102,43 @@
parcel->readStrongBinder());
// read hidl target
bool haveHidlTarget = parcel->readBool();
+ mAidl = false;
if (haveHidlTarget) {
std::vector<uint8_t> tokenVector;
parcel->readByteVector(&tokenVector);
HalToken token = HalToken(tokenVector);
mHidlTarget = retrieveHalInterface(token);
deleteHalToken(token);
+ return NO_ERROR;
} else {
mHidlTarget.clear();
}
+
+ // read aidl target
+ bool haveAidlTarget = false;
+ if (parcel->readBool(&haveAidlTarget) != NO_ERROR) {
+ return NO_ERROR;
+ }
+ mAidl = true;
+ if (haveAidlTarget) {
+ ::android::sp<::android::IBinder> intf = parcel->readStrongBinder();
+ AIBinder *ndkBinder = AIBinder_fromPlatformBinder(intf);
+ if (ndkBinder) {
+ mAidlTarget.set(ndkBinder);
+ } else {
+ mAidlTarget.set(nullptr);
+ }
+ } else {
+ mAidlTarget.set(nullptr);
+ }
return NO_ERROR;
}
private:
sp<IGraphicBufferProducer> mBufferProducer;
sp<hidl::base::V1_0::IBase> mHidlTarget;
+ ::ndk::SpAIBinder mAidlTarget;
+ bool mAidl;
DISALLOW_EVIL_CONSTRUCTORS(PersistentSurface);
};
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 3598e8d..22b13f6 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -118,3 +118,11 @@
local_include_dirs: ["include"],
export_include_dirs: ["include"],
}
+
+cc_library_headers {
+ name: "librenderfright_gl_headers",
+ export_include_dirs: ["gl"],
+ visibility: [
+ "//frameworks/av/media/libstagefright/renderfright/fuzzer:__subpackages__",
+ ],
+}
diff --git a/media/libstagefright/renderfright/fuzzer/Android.bp b/media/libstagefright/renderfright/fuzzer/Android.bp
new file mode 100644
index 0000000..574e49f
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/Android.bp
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+ default_team: "trendy_team_android_media_codec_framework",
+ // See: http://go/android-license-faq
+ // A large-scale-change added 'default_applicable_licenses' to import
+ // all of the 'license_kinds' from "frameworks_av_media_libstagefright_license"
+ // to get the below license kinds:
+ // SPDX-license-identifier-Apache-2.0
+ default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+}
+
+cc_fuzz {
+ name: "libstagefright_renderfright_fuzzer",
+ srcs: [
+ "libstagefright_renderfright_fuzzer.cpp",
+ ],
+ static_libs: [
+ "librenderfright",
+ ],
+ header_libs: [
+ "librenderfright_gl_headers",
+ ],
+ shared_libs: [
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libutils",
+ "libEGL",
+ "libGLESv1_CM",
+ "libGLESv2",
+ "libGLESv3",
+ "libui",
+ "libbase",
+ "libprocessgroup",
+ "libsync",
+ ],
+ fuzz_config: {
+ cc: [
+ "android-media-fuzzing-reports@google.com",
+ ],
+ componentid: 155276,
+ hotlists: ["4593311"],
+ description: "The fuzzer targets the APIs of librenderfright",
+ vector: "local_no_privileges_required",
+ service_privilege: "constrained",
+ users: "multi_user",
+ fuzzed_code_usage: "shipped",
+ },
+}
diff --git a/media/libstagefright/renderfright/fuzzer/README.md b/media/libstagefright/renderfright/fuzzer/README.md
new file mode 100644
index 0000000..742bfdc
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/README.md
@@ -0,0 +1,33 @@
+# Fuzzer for libstagefright_renderfright
+
+RenderFright supports the following parameters:
+1. SetContextPriority (parameter name: "kSetContextPriority")
+2. SetRenderEngineType (parameter name: "kSetRenderEngineType")
+3. CleanupMode (parameter name: "kCleanupMode")
+4. DataSpace (parameter name: "kDataSpace")
+5. ReadBufferUsage(parameter name: "kReadBufferUsage")
+6. WriteBufferUsage(parameter name: "kWriteBufferUsage")
+7. RenderBufferUsage(parameter name: "kRenderBufferUsage")
+
+| Parameter| Valid Values| Configured Value|
+|------------- |-------------| ----- |
+|`kSetContextPriority`| 0. `RenderEngine::ContextPriority::LOW`<br/>1. `RenderEngine::ContextPriority::MEDIUM`<br/>2. `RenderEngine::ContextPriority::HIGH` |Value obtained from FuzzedDataProvider|
+|`kSetRenderEngineType`| 0. `RenderEngine::RenderEngineType::GLES`<br/>1. `RenderEngine::RenderEngineType::THREADED`|Value obtained from FuzzedDataProvider|
+|`kCleanupMode`| 0. `RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES`<br/>1. `RenderEngine::CleanupMode::CLEAN_ALL`|Value obtained from FuzzedDataProvider|
+|`kDataSpace`| 0. `ui::Dataspace::UNKNOWN`<br/>1. `ui::Dataspace::ARBITRARY`<br/>2. `ui::Dataspace::STANDARD_SHIFT`<br/>3. `ui::Dataspace::STANDARD_MASK`<br/>4. `ui::Dataspace::STANDARD_UNSPECIFIED`<br/>5. `ui::Dataspace::STANDARD_BT709`<br/>6. `ui::Dataspace::STANDARD_BT601_625`<br/>7. `ui::Dataspace::STANDARD_BT601_625_UNADJUSTED`<br/>8. `ui::Dataspace::STANDARD_BT601_525`<br/>9. `ui::Dataspace::STANDARD_BT601_525_UNADJUSTED`<br/>10. `ui::Dataspace::STANDARD_BT2020`<br/>11. `ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE`<br/>12. `ui::Dataspace::STANDARD_BT470M`<br/>13. `ui::Dataspace::STANDARD_FILM`<br/>14. `ui::Dataspace::STANDARD_DCI_P3`<br/>15. `ui::Dataspace::STANDARD_ADOBE_RGB`<br/>16. `ui::Dataspace::TRANSFER_SHIFT`<br/>17. `ui::Dataspace::TRANSFER_MASK`<br/>18. `ui::Dataspace::TRANSFER_UNSPECIFIED`<br/>19. `ui::Dataspace::TRANSFER_LINEAR`<br/>20. `ui::Dataspace::TRANSFER_SRGB`<br/>21. `ui::Dataspace::TRANSFER_SMPTE_170M`<br/>22. `ui::Dataspace::TRANSFER_GAMMA2_2`<br/>23. `ui::Dataspace::TRANSFER_GAMMA2_6`<br/>24. `ui::Dataspace::TRANSFER_GAMMA2_8`<br/>25. `ui::Dataspace::TRANSFER_ST2084`<br/>26. `ui::Dataspace::TRANSFER_HLG`<br/>27. `ui::Dataspace::RANGE_SHIFT`<br/>28. `ui::Dataspace::RANGE_MASK`<br/>29. `ui::Dataspace::RANGE_UNSPECIFIED`<br/>30. `ui::Dataspace::RANGE_FULL`<br/>31. `ui::Dataspace::RANGE_LIMITED`<br/>32. `ui::Dataspace::RANGE_EXTENDED`<br/>33. `ui::Dataspace::SRGB_LINEAR`<br/>34. `ui::Dataspace::V0_SRGB_LINEAR`<br/>35. `ui::Dataspace::V0_SCRGB_LINEAR`<br/>36. `ui::Dataspace::SRGB`<br/>37. `ui::Dataspace::V0_SRGB`<br/>38. `ui::Dataspace::V0_SCRGB`<br/>39. `ui::Dataspace::JFIF`<br/>40. `ui::Dataspace::V0_JFIF`<br/>41. `ui::Dataspace::BT601_625`<br/>42. `ui::Dataspace::V0_BT601_625`<br/>43. `ui::Dataspace::BT601_525`<br/>44. `ui::Dataspace::V0_BT601_525`<br/>45. `ui::Dataspace::BT709`<br/>46. `ui::Dataspace::V0_BT709`<br/>47. `ui::Dataspace::DCI_P3_LINEAR`<br/>48. `ui::Dataspace::DCI_P3`<br/>49. `ui::Dataspace::DISPLAY_P3_LINEAR`<br/>50. `ui::Dataspace::DISPLAY_P3`<br/>51. `ui::Dataspace::ADOBE_RGB`<br/>52. `ui::Dataspace::BT2020_LINEAR`<br/>53. `ui::Dataspace::BT2020`<br/>54. `ui::Dataspace::BT2020_PQ`<br/>55. `ui::Dataspace::DEPTH`<br/>56. `ui::Dataspace::SENSOR`<br/>57. `ui::Dataspace::BT2020_ITU`<br/>58. `ui::Dataspace::BT2020_ITU_PQ`<br/>59. `ui::Dataspace::BT2020_ITU_HLG`<br/>60. `ui::Dataspace::BT2020_HLG`<br/>61. `ui::Dataspace::DISPLAY_BT2020`<br/>62. `ui::Dataspace::DYNAMIC_DEPTH`<br/>63. `ui::Dataspace::JPEG_APP_SEGMENTS`<br/>64. `ui::Dataspace::HEIF`|Value obtained from FuzzedDataProvider|
+|`kReadBufferUsage`| 0. `GRALLOC_USAGE_SW_READ_NEVER`<br/>1. `GRALLOC_USAGE_SW_READ_RARELY`<br/>2. `GRALLOC_USAGE_SW_READ_OFTEN`<br/>3. `GRALLOC_USAGE_SW_READ_MASK`|Value obtained from FuzzedDataProvider|
+|`kWriteBufferUsage`| 0. `GRALLOC_USAGE_SW_WRITE_NEVER`<br/>1. `GRALLOC_USAGE_SW_WRITE_RARELY`<br/>2. `GRALLOC_USAGE_SW_WRITE_OFTEN`<br/>3. `GRALLOC_USAGE_SW_WRITE_MASK`|Value obtained from FuzzedDataProvider|
+|`kRenderBufferUsage`| 0. `GRALLOC_USAGE_HW_TEXTURE`<br/>1. `GRALLOC_USAGE_HW_RENDER`<br/>2. `GRALLOC_USAGE_HW_2D`<br/>3. `GRALLOC_USAGE_HW_COMPOSER`<br/>4. `GRALLOC_USAGE_HW_FB`<br/>5. `GRALLOC_USAGE_EXTERNAL_DISP`<br/>6. `GRALLOC_USAGE_PROTECTED`<br/>7. `GRALLOC_USAGE_CURSOR`<br/>8. `GRALLOC_USAGE_HW_VIDEO_ENCODER`<br/>9. `GRALLOC_USAGE_HW_CAMERA_WRITE`<br/>10. `GRALLOC_USAGE_HW_CAMERA_READ`<br/>11. `GRALLOC_USAGE_HW_CAMERA_ZSL`<br/>12. `GRALLOC_USAGE_HW_CAMERA_MASK`<br/>13. `GRALLOC_USAGE_HW_MASK`<br/>14. `GRALLOC_USAGE_RENDERSCRIPT`<br/>15. `GRALLOC_USAGE_FOREIGN_BUFFERS`<br/>16. `GRALLOC_USAGE_HW_IMAGE_ENCODER`|Value obtained from FuzzedDataProvider|
+
+
+
+#### Steps to run
+1. Build the fuzzer
+```
+ $ mm -j$(nproc) libstagefright_renderfright_fuzzer
+```
+2. Run on device
+```
+ $ adb sync data
+ $ adb shell /data/fuzz/arm64/libstagefright_renderfright_fuzzer/libstagefright_renderfright_fuzzer
+```
diff --git a/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
new file mode 100644
index 0000000..b0721e0
--- /dev/null
+++ b/media/libstagefright/renderfright/fuzzer/libstagefright_renderfright_fuzzer.cpp
@@ -0,0 +1,297 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <GLESRenderEngine.h>
+#include <GLFramebuffer.h>
+#include <GLImage.h>
+#include <Program.h>
+#include <ProgramCache.h>
+#include <fuzzer/FuzzedDataProvider.h>
+#include <renderengine/RenderEngine.h>
+
+using namespace android::renderengine;
+using namespace android;
+
+static constexpr int32_t kMinRenderAPI = 0;
+static constexpr int32_t kMaxRenderAPI = 8;
+static constexpr int32_t kMaxTextureCount = 100;
+static constexpr int32_t KMaxDisplayWidth = 3840;
+static constexpr int32_t KMaxDisplayHeight = 2160;
+static constexpr int32_t kMinPixelFormat = 1;
+static constexpr int32_t kMaxPixelFormat = 55;
+static constexpr int32_t kMaxRenderLayer = 5;
+
+static constexpr ui::Dataspace kDataSpace[] = {
+ ui::Dataspace::UNKNOWN,
+ ui::Dataspace::ARBITRARY,
+ ui::Dataspace::STANDARD_SHIFT,
+ ui::Dataspace::STANDARD_MASK,
+ ui::Dataspace::STANDARD_UNSPECIFIED,
+ ui::Dataspace::STANDARD_BT709,
+ ui::Dataspace::STANDARD_BT601_625,
+ ui::Dataspace::STANDARD_BT601_625_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT601_525,
+ ui::Dataspace::STANDARD_BT601_525_UNADJUSTED,
+ ui::Dataspace::STANDARD_BT2020,
+ ui::Dataspace::STANDARD_BT2020_CONSTANT_LUMINANCE,
+ ui::Dataspace::STANDARD_BT470M,
+ ui::Dataspace::STANDARD_FILM,
+ ui::Dataspace::STANDARD_DCI_P3,
+ ui::Dataspace::STANDARD_ADOBE_RGB,
+ ui::Dataspace::TRANSFER_SHIFT,
+ ui::Dataspace::TRANSFER_MASK,
+ ui::Dataspace::TRANSFER_UNSPECIFIED,
+ ui::Dataspace::TRANSFER_LINEAR,
+ ui::Dataspace::TRANSFER_SRGB,
+ ui::Dataspace::TRANSFER_SMPTE_170M,
+ ui::Dataspace::TRANSFER_GAMMA2_2,
+ ui::Dataspace::TRANSFER_GAMMA2_6,
+ ui::Dataspace::TRANSFER_GAMMA2_8,
+ ui::Dataspace::TRANSFER_ST2084,
+ ui::Dataspace::TRANSFER_HLG,
+ ui::Dataspace::RANGE_SHIFT,
+ ui::Dataspace::RANGE_MASK,
+ ui::Dataspace::RANGE_UNSPECIFIED,
+ ui::Dataspace::RANGE_FULL,
+ ui::Dataspace::RANGE_LIMITED,
+ ui::Dataspace::RANGE_EXTENDED,
+ ui::Dataspace::SRGB_LINEAR,
+ ui::Dataspace::V0_SRGB_LINEAR,
+ ui::Dataspace::V0_SCRGB_LINEAR,
+ ui::Dataspace::SRGB,
+ ui::Dataspace::V0_SRGB,
+ ui::Dataspace::V0_SCRGB,
+ ui::Dataspace::JFIF,
+ ui::Dataspace::V0_JFIF,
+ ui::Dataspace::BT601_625,
+ ui::Dataspace::V0_BT601_625,
+ ui::Dataspace::BT601_525,
+ ui::Dataspace::V0_BT601_525,
+ ui::Dataspace::BT709,
+ ui::Dataspace::V0_BT709,
+ ui::Dataspace::DCI_P3_LINEAR,
+ ui::Dataspace::DCI_P3,
+ ui::Dataspace::DISPLAY_P3_LINEAR,
+ ui::Dataspace::DISPLAY_P3,
+ ui::Dataspace::ADOBE_RGB,
+ ui::Dataspace::BT2020_LINEAR,
+ ui::Dataspace::BT2020,
+ ui::Dataspace::BT2020_PQ,
+ ui::Dataspace::DEPTH,
+ ui::Dataspace::SENSOR,
+ ui::Dataspace::BT2020_ITU,
+ ui::Dataspace::BT2020_ITU_PQ,
+ ui::Dataspace::BT2020_ITU_HLG,
+ ui::Dataspace::BT2020_HLG,
+ ui::Dataspace::DISPLAY_BT2020,
+ ui::Dataspace::DYNAMIC_DEPTH,
+ ui::Dataspace::JPEG_APP_SEGMENTS,
+ ui::Dataspace::HEIF,
+};
+
+static constexpr int32_t kReadBufferUsage[] = {
+ GRALLOC_USAGE_SW_READ_NEVER, GRALLOC_USAGE_SW_READ_RARELY, GRALLOC_USAGE_SW_READ_OFTEN,
+ GRALLOC_USAGE_SW_READ_MASK};
+
+static constexpr int32_t kWriteBufferUsage[] = {
+ GRALLOC_USAGE_SW_WRITE_NEVER, GRALLOC_USAGE_SW_WRITE_RARELY, GRALLOC_USAGE_SW_WRITE_OFTEN,
+ GRALLOC_USAGE_SW_WRITE_MASK};
+
+static constexpr int32_t kRenderBufferUsage[] = {
+ GRALLOC_USAGE_HW_TEXTURE,
+ GRALLOC_USAGE_HW_RENDER,
+ GRALLOC_USAGE_HW_2D,
+ GRALLOC_USAGE_HW_COMPOSER,
+ GRALLOC_USAGE_HW_FB,
+ GRALLOC_USAGE_EXTERNAL_DISP,
+ GRALLOC_USAGE_PROTECTED,
+ GRALLOC_USAGE_CURSOR,
+ GRALLOC_USAGE_HW_VIDEO_ENCODER,
+ GRALLOC_USAGE_HW_CAMERA_WRITE,
+ GRALLOC_USAGE_HW_CAMERA_READ,
+ GRALLOC_USAGE_HW_CAMERA_ZSL,
+ GRALLOC_USAGE_HW_CAMERA_MASK,
+ GRALLOC_USAGE_HW_MASK,
+ GRALLOC_USAGE_RENDERSCRIPT,
+ GRALLOC_USAGE_FOREIGN_BUFFERS,
+ GRALLOC_USAGE_HW_IMAGE_ENCODER,
+};
+
+static constexpr RenderEngine::ContextPriority kSetContextPriority[] = {
+ RenderEngine::ContextPriority::LOW, RenderEngine::ContextPriority::MEDIUM,
+ RenderEngine::ContextPriority::HIGH};
+
+static constexpr RenderEngine::RenderEngineType kSetRenderEngineType[] = {
+ RenderEngine::RenderEngineType::GLES, RenderEngine::RenderEngineType::THREADED};
+
+static constexpr RenderEngine::CleanupMode kCleanupMode[] = {
+ RenderEngine::CleanupMode::CLEAN_OUTPUT_RESOURCES, RenderEngine::CleanupMode::CLEAN_ALL};
+
+class RenderFrightFuzzer {
+ public:
+ RenderFrightFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+ void process();
+
+ private:
+ FuzzedDataProvider mFdp;
+ void getLayerSetting(renderengine::LayerSettings& layerSetting, sp<GraphicBuffer> buffer,
+ const Rect& sourceCrop, uint32_t textureName);
+};
+
+void RenderFrightFuzzer::getLayerSetting(renderengine::LayerSettings& layerSetting,
+ sp<GraphicBuffer> buffer, const Rect& sourceCrop,
+ uint32_t textureName) {
+ layerSetting.geometry.boundaries = sourceCrop.toFloatRect();
+ layerSetting.geometry.roundedCornersRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.geometry.roundedCornersCrop = sourceCrop.toFloatRect();
+
+ layerSetting.alpha = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.sourceDataspace = mFdp.PickValueInArray(kDataSpace);
+ layerSetting.backgroundBlurRadius = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.buffer = buffer;
+ layerSetting.source.buffer.isOpaque = mFdp.ConsumeBool();
+ layerSetting.source.buffer.fence = Fence::NO_FENCE;
+ layerSetting.source.buffer.textureName = textureName;
+ layerSetting.source.buffer.usePremultipliedAlpha = mFdp.ConsumeBool();
+ layerSetting.source.buffer.isY410BT2020 =
+ (layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_PQ ||
+ layerSetting.sourceDataspace == ui::Dataspace::BT2020_ITU_HLG);
+ layerSetting.source.buffer.maxMasteringLuminance = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.source.buffer.maxContentLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ layerSetting.shadow.lightPos =
+ vec3(mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(), 0);
+ layerSetting.shadow.ambientColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.spotColor = {
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>(),
+ mFdp.ConsumeFloatingPoint<float>(), mFdp.ConsumeFloatingPoint<float>()};
+ layerSetting.shadow.length = mFdp.ConsumeFloatingPoint<float>();
+ layerSetting.shadow.casterIsTranslucent = mFdp.ConsumeBool();
+}
+
+void RenderFrightFuzzer::process() {
+ auto args = RenderEngineCreationArgs::Builder()
+ .setPixelFormat(mFdp.ConsumeIntegralInRange<int32_t>(kMinPixelFormat,
+ kMaxPixelFormat))
+ .setImageCacheSize(mFdp.ConsumeIntegral<uint32_t>())
+ .setUseColorManagerment(mFdp.ConsumeBool())
+ .setEnableProtectedContext(mFdp.ConsumeBool())
+ .setPrecacheToneMapperShaderOnly(mFdp.ConsumeBool())
+ .setSupportsBackgroundBlur(mFdp.ConsumeBool())
+ .setContextPriority(mFdp.PickValueInArray(kSetContextPriority))
+ .setRenderEngineType(mFdp.PickValueInArray(kSetRenderEngineType))
+ .build();
+ std::unique_ptr<RenderEngine> renderEngine = RenderEngine::create(args);
+
+ std::vector<uint32_t> textures;
+ int32_t maxCount = mFdp.ConsumeIntegralInRange<size_t>(0, kMaxTextureCount);
+ for (size_t i = 0; i < maxCount; ++i) {
+ textures.push_back(mFdp.ConsumeIntegral<uint32_t>());
+ }
+
+ while (mFdp.remaining_bytes()) {
+ int32_t renderFrightAPIs =
+ mFdp.ConsumeIntegralInRange<int32_t>(kMinRenderAPI, kMaxRenderAPI);
+ switch (renderFrightAPIs) {
+ case 0: {
+ renderEngine->genTextures(textures.size(), textures.data());
+ break;
+ }
+ case 1: {
+ renderEngine->deleteTextures(textures.size(), textures.data());
+ break;
+ }
+ case 2: {
+ renderEngine->useProtectedContext(mFdp.ConsumeBool());
+ break;
+ }
+ case 3: {
+ renderEngine->cleanupPostRender(mFdp.PickValueInArray(kCleanupMode));
+ break;
+ }
+ case 4: {
+ renderEngine->unbindExternalTextureBuffer(mFdp.ConsumeIntegral<uint64_t>());
+ break;
+ }
+ case 5: {
+ renderEngine->primeCache();
+ break;
+ }
+ case 6: {
+ sp<Fence> fence = sp<Fence>::make();
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->bindExternalTextureBuffer(mFdp.ConsumeIntegral<uint32_t>(), buffer,
+ fence);
+ break;
+ }
+ case 7: {
+ sp<GraphicBuffer> buffer = sp<GraphicBuffer>::make();
+ renderEngine->cacheExternalTextureBuffer(buffer);
+ break;
+ }
+ case 8: {
+ std::vector<const renderengine::LayerSettings*> layers;
+ renderengine::LayerSettings layerSetting;
+ int32_t width = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayWidth);
+ int32_t height = mFdp.ConsumeIntegralInRange<int32_t>(0, KMaxDisplayHeight);
+ Rect sourceCrop(mFdp.ConsumeIntegralInRange<int32_t>(0, width),
+ mFdp.ConsumeIntegralInRange<int32_t>(0, height));
+ uint32_t textureName = 0;
+ /* Get a single texture name to pass to layers */
+ renderEngine->genTextures(1 /*numTextures*/, &textureName);
+ sp<GraphicBuffer> buffer;
+ const uint32_t usage = (mFdp.PickValueInArray(kReadBufferUsage) |
+ mFdp.PickValueInArray(kWriteBufferUsage) |
+ mFdp.PickValueInArray(kRenderBufferUsage));
+
+ for (int i = 0; i < kMaxRenderLayer; ++i) {
+ buffer = new GraphicBuffer(
+ width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(PIXEL_FORMAT_RGBA_8888,
+ PIXEL_FORMAT_RGBA_4444),
+ usage, "input");
+ getLayerSetting(layerSetting, buffer, sourceCrop, textureName);
+ layers.push_back(&layerSetting);
+ }
+
+ DisplaySettings settings;
+ settings.physicalDisplay = sourceCrop;
+ settings.clip = sourceCrop;
+ settings.outputDataspace = mFdp.PickValueInArray(kDataSpace);
+ settings.maxLuminance = mFdp.ConsumeFloatingPoint<float>();
+
+ sp<GraphicBuffer> dstBuffer =
+ new GraphicBuffer(width, height,
+ mFdp.ConsumeIntegralInRange<int32_t>(
+ PIXEL_FORMAT_RGBA_8888, PIXEL_FORMAT_RGBA_4444),
+ usage, "output");
+ base::unique_fd bufferFence;
+ base::unique_fd drawFence;
+
+ renderEngine->drawLayers(settings, layers, dstBuffer, mFdp.ConsumeBool(),
+ std::move(bufferFence),
+ (mFdp.ConsumeBool() ? nullptr : &drawFence));
+ }
+ }
+ }
+}
+
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+ RenderFrightFuzzer renderFrightFuzzer(data, size);
+ renderFrightFuzzer.process();
+ return 0;
+}
diff --git a/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..c208666
--- /dev/null
+++ b/media/module/aidlpersistentsurface/AidlGraphicBufferSource.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "AidlGraphicBufferSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <media/stagefright/bqhelper/ComponentWrapper.h>
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+
+namespace android::media {
+
+namespace {
+
+class AidlComponentWrapper : public ComponentWrapper {
+public:
+ explicit AidlComponentWrapper(const sp<IAidlNodeWrapper> &node)
+ : mAidlNode(node) {}
+ virtual ~AidlComponentWrapper() = default;
+
+ status_t submitBuffer(
+ int32_t bufferId, const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME, buffer, timestamp, fenceFd);
+ }
+
+ status_t submitEos(int32_t bufferId) override {
+ return mAidlNode->submitBuffer(
+ bufferId, BUFFERFLAG_ENDOFFRAME | BUFFERFLAG_EOS);
+ }
+
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ mAidlNode->dispatchDataSpaceChanged(dataSpace, aspects, pixelFormat);
+ }
+
+private:
+ sp<IAidlNodeWrapper> mAidlNode;
+
+ DISALLOW_EVIL_CONSTRUCTORS(AidlComponentWrapper);
+};
+
+} // namespace
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStart() {
+ status_t err = start();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onStop() {
+ status_t err = stop();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+::ndk::ScopedAStatus AidlGraphicBufferSource::onRelease(){
+ status_t err = release();
+ return (OK == err) ? ::ndk::ScopedAStatus::ok() :
+ ::ndk::ScopedAStatus::fromServiceSpecificError(err);
+}
+
+status_t AidlGraphicBufferSource::configure(
+ const sp<IAidlNodeWrapper>& aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage) {
+ if (aidlNode == NULL) {
+ return BAD_VALUE;
+ }
+
+ return GraphicBufferSource::configure(
+ new AidlComponentWrapper(aidlNode), dataSpace, bufferCount,
+ frameWidth, frameHeight, consumerUsage);
+}
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/Android.bp b/media/module/aidlpersistentsurface/Android.bp
new file mode 100644
index 0000000..5c1a010
--- /dev/null
+++ b/media/module/aidlpersistentsurface/Android.bp
@@ -0,0 +1,69 @@
+aidl_interface {
+ name: "graphicbuffersource-aidl",
+ unstable: true,
+ local_include_dir: "aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "aidl/android/media/AidlColorAspects.aidl",
+ "aidl/android/media/IAidlGraphicBufferSource.aidl",
+ "aidl/android/media/IAidlBufferSource.aidl",
+ "aidl/android/media/IAidlNode.aidl",
+ ],
+ headers: [
+ "HardwareBuffer_aidl",
+ ],
+ imports: [
+ "android.hardware.graphics.common-V5",
+ ],
+ include_dirs: [
+ "frameworks/native/aidl/gui",
+ ],
+ backend: {
+ cpp: {
+ enabled: false,
+ },
+ java: {
+ enabled: false,
+ },
+ ndk: {
+ enabled: true,
+ additional_shared_libraries: [
+ "libnativewindow",
+ ],
+ },
+ rust: {
+ // No users, and no rust implementation of android.os.Surface yet
+ enabled: false,
+ },
+ },
+}
+
+cc_library_shared {
+ name: "libstagefright_graphicbuffersource_aidl",
+ min_sdk_version: "29",
+ srcs: [
+ "AidlGraphicBufferSource.cpp",
+ "wrapper/WAidlGraphicBufferSource.cpp",
+ ],
+ export_include_dirs: [
+ "include",
+ ],
+ header_libs: [
+ "media_plugin_headers",
+ ],
+
+ export_header_lib_headers: [
+ "media_plugin_headers",
+ ],
+ shared_libs: [
+ "android.hardware.graphics.common-V5-ndk",
+ "graphicbuffersource-aidl-ndk",
+ "libbinder_ndk",
+ "libcutils",
+ "libgui",
+ "liblog",
+ "libnativewindow",
+ "libstagefright_bufferqueue_helper",
+ "libutils",
+ ],
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
new file mode 100644
index 0000000..4edd6ce
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/AidlColorAspects.aidl
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Ref: frameworks/native/include/media/hardware/VideoAPI.h
+ *
+ * Framework defined color aspects. These are based mainly on ISO 23001-8 spec. As this standard
+ * continues to evolve, new values may be defined in the future. Use OTHER for these future values
+ * as well as for values not listed here, as those are not supported by the framework.
+ */
+parcelable AidlColorAspects {
+ @Backing(type="int")
+ enum Range {
+ UNSPECIFIED, // Unspecified
+ FULL, // Full range
+ LIMITED, // Limited range (if defined), or not full range
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Color primaries
+ @Backing(type="int")
+ enum Primaries {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // Rec.ITU-R BT.470-6 System M or equivalent
+ BT601_6_625, // Rec.ITU-R BT.601-6 625 or equivalent
+ BT601_6_525, // Rec.ITU-R BT.601-6 525 or equivalent
+ GENERIC_FILM, // Generic Film
+ BT2020, // Rec.ITU-R BT.2020 or equivalent
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // Transfer characteristics
+ @Backing(type="int")
+ enum Transfer {
+ UNSPECIFIED, // Unspecified
+ LINEAR, // Linear transfer characteristics
+ SRGB, // sRGB or equivalent
+ SMPTE170M, // SMPTE 170M or equivalent (e.g. BT.601/709/2020)
+ GAMMA22, // Assumed display gamma 2.2
+ GAMMA28, // Assumed display gamma 2.8
+ ST2084, // SMPTE ST 2084 for 10/12/14/16 bit systems
+ HLG, // ARIB STD-B67 hybrid-log-gamma
+
+ // values unlikely to be required by Android follow here
+ SMPTE240M = 0x40, // SMPTE 240M
+ XVYCC, // IEC 61966-2-4
+ BT1361, // Rec.ITU-R BT.1361 extended gamut
+ ST428, // SMPTE ST 428-1
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ // YUV <-> RGB conversion
+ @Backing(type="int")
+ enum MatrixCoeffs {
+ UNSPECIFIED, // Unspecified
+ BT709_5, // Rec.ITU-R BT.709-5 or equivalent
+ BT470_6M, // KR=0.30, KB=0.11 or equivalent
+ BT601_6, // Rec.ITU-R BT.601-6 625 or equivalent
+ SMPTE240M, // SMPTE 240M or equivalent
+ BT2020, // Rec.ITU-R BT.2020 non-constant luminance
+ BT2020CONSTANT, // Rec.ITU-R BT.2020 constant luminance
+
+ OTHER = 0xff, // Not one of the above values
+ }
+
+ Range range;
+ Primaries primaries;
+ Transfer transfer;
+ MatrixCoeffs matrixCoeffs;
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
new file mode 100644
index 0000000..0679e41
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlBufferSource.aidl
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface for controlling and handling IAidlGraphicBufferSource
+ * from the process which owns IAidlNode.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder
+ */
+interface IAidlBufferSource {
+ /**
+ * This is called when IAidlGraphicBufferSource can start handing buffers.
+ * If we already have buffers of data sitting in the BufferQueue,
+ * this will send them to the codec.
+ */
+ void onStart();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicaters that
+ * the codec is meant to return all buffers back to the client for them
+ * to be freed. Do NOT submit any more buffers to the component.
+ */
+ void onStop();
+
+ /**
+ * This is called when IAidlGraphicBufferSource indicates that
+ * we are shutting down.
+ */
+ void onRelease();
+
+ /**
+ * A "codec buffer", i.e. a buffer that can be used to pass data into
+ * the encoder, has been allocated.
+ */
+ void onInputBufferAdded(int bufferID);
+
+ /**
+ * Called from OnEmptyBufferDone. If we have a BQ buffer available,
+ * fill it with a new frame of data; otherwise, just mark it as available.
+ *
+ * fence contains the fence's fd that the callee should wait on before
+ * using the buffer (or pass on to the user of the buffer, if the user supports
+ * fences). Callee takes ownership of the fence fd even if it fails.
+ */
+ void onInputBufferEmptied(int bufferID, in @nullable ParcelFileDescriptor fence);
+}
+
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
new file mode 100644
index 0000000..6642e89
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlGraphicBufferSource.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.graphics.common.Dataspace;
+import android.media.AidlColorAspects;
+import android.media.IAidlNode;
+
+/**
+ * Binder interface for configuring/controlling a Codec2 AIDL encoder instance
+ * on behalf of a Surface which will produce input buffers.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlGraphicBufferSource {
+ void configure(IAidlNode node, Dataspace dataSpace);
+ void setSuspend(boolean suspend, long suspendTimeUs);
+ void setRepeatPreviousFrameDelayUs(long repeatAfterUs);
+ void setMaxFps(float maxFps);
+ void setTimeLapseConfig(double fps, double captureFps);
+ void setStartTimeUs(long startTimeUs);
+ void setStopTimeUs(long stopTimeUs);
+ long getStopTimeOffsetUs();
+ void setColorAspects(in AidlColorAspects aspects);
+ void setTimeOffsetUs(long timeOffsetsUs);
+ void signalEndOfInputStream();
+}
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
new file mode 100644
index 0000000..6cac19e
--- /dev/null
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.HardwareBuffer;
+import android.media.IAidlBufferSource;
+import android.os.ParcelFileDescriptor;
+
+/**
+ * Binder interface abstraction for codec2 encoder instance.
+ *
+ * In order to support Persistent InputSurface and/or MediaRecorder.
+ */
+interface IAidlNode {
+
+ /**
+ * InputBuffer parameter for retrieval for the Node
+ */
+ parcelable InputBufferParams {
+ int bufferCountActual;
+ int frameWidth;
+ int frameHeight;
+ }
+
+ void freeNode();
+ void sendCommand(int cmd, int param);
+ long getConsumerUsage();
+ InputBufferParams getInputBufferParams();
+ void setConsumerUsage(long usage);
+ void setAdjustTimestampGapUs(int gapUs);
+ void setInputSurface(IAidlBufferSource bufferSource);
+ void submitBuffer(
+ int buffer,
+ in HardwareBuffer hBuffer,
+ int flags,
+ long timestampUs,
+ in @nullable ParcelFileDescriptor fence);
+ void onDataSpaceChanged(int dataSpace, int aspects, int pixelFormat);
+}
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
new file mode 100644
index 0000000..4b38294
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/stagefright/bqhelper/GraphicBufferSource.h>
+#include <media/stagefright/foundation/ABase.h>
+
+#include <media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h>
+
+#include <utils/Errors.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+
+namespace android::media {
+
+/*
+ * This class is used to emulate feed OMX codecs from a Surface via BufferQueue or
+ * HW producer using AIDL binder interfaces.
+ *
+ * See media/stagefright/bqhelper/GraphicBufferSource.h for documentation.
+ */
+class AidlGraphicBufferSource : public GraphicBufferSource {
+public:
+ AidlGraphicBufferSource() = default;
+ virtual ~AidlGraphicBufferSource() = default;
+
+ // For IAidlBufferSource interface
+ // ------------------------------
+
+ // When we can start handling buffers. If we already have buffers of data
+ // sitting in the BufferQueue, this will send them to the codec.
+ ::ndk::ScopedAStatus onStart();
+
+ // When the codec is meant to return all buffers back to the client for
+ // them to be freed. Do NOT submit any more buffers to the component.
+ ::ndk::ScopedAStatus onStop();
+
+ // When we are shutting down.
+ ::ndk::ScopedAStatus onRelease();
+
+ // Rest of the interface in GraphicBufferSource.
+
+ // IAidlGraphicBufferSource interface
+ // ------------------------------
+
+ // Configure the buffer source to be used with a codec2 aidl node given
+ // parameters.
+ status_t configure(
+ const sp<IAidlNodeWrapper> &aidlNode,
+ int32_t dataSpace,
+ int32_t bufferCount,
+ uint32_t frameWidth,
+ uint32_t frameHeight,
+ uint64_t consumerUsage);
+
+ // Rest of the interface in GraphicBufferSource.
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(AidlGraphicBufferSource);
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
new file mode 100644
index 0000000..64de7ae
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/C2NodeDef.h
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+namespace android::media {
+
+// Node definitions for aidl input surface.
+//
+// Copied from non-aidl version implementation.
+// Unnecessary definitions for input surface implementation
+// are all omitted.
+
+enum C2NodeBufferFlag : uint32_t {
+ BUFFERFLAG_EOS = 1,
+ BUFFERFLAG_ENDOFFRAME = (1 << 4)
+};
+
+enum C2NodeCommand : int32_t {
+ CommandStateSet = 1
+};
+
+enum C2NodeStatus : int32_t {
+ NodeStatusLoaded = 1
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
new file mode 100644
index 0000000..f23b5e4
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/IAidlNodeWrapper.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <utils/StrongPointer.h>
+#include <ui/GraphicBuffer.h>
+
+#include <stdint.h>
+
+namespace android::media {
+
+struct IAidlNodeWrapper : public RefBase {
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer = nullptr,
+ int64_t timestamp = 0, int fenceFd = -1) = 0;
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) = 0;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
new file mode 100644
index 0000000..dcb83f6
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/Conversion.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <ui/GraphicBuffer.h>
+#include <utils/Errors.h>
+
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/hardware/graphics/common/PixelFormat.h>
+#include <aidl/android/media/AidlColorAspects.h>
+
+namespace android::media::aidl_conversion {
+
+inline status_t fromAidlStatus(const ::ndk::ScopedAStatus &status) {
+ if (!status.isOk()) {
+ if (status.getExceptionCode() == EX_SERVICE_SPECIFIC) {
+ return static_cast<status_t>(status.getServiceSpecificError());
+ } else {
+ return static_cast<status_t>(FAILED_TRANSACTION);
+ }
+ }
+ return NO_ERROR;
+}
+
+inline ::ndk::ScopedAStatus toAidlStatus(status_t status) {
+ if (status == NO_ERROR) {
+ return ::ndk::ScopedAStatus::ok();
+ }
+ return ::ndk::ScopedAStatus::fromServiceSpecificError(status);
+}
+
+inline int32_t compactFromAidlColorAspects(::aidl::android::media::AidlColorAspects const& s) {
+ return static_cast<int32_t>(
+ (static_cast<uint32_t>(s.range) << 24) |
+ (static_cast<uint32_t>(s.primaries) << 16) |
+ (static_cast<uint32_t>(s.transfer)) |
+ (static_cast<uint32_t>(s.matrixCoeffs) << 8));
+}
+
+inline int32_t rawFromAidlDataspace(
+ ::aidl::android::hardware::graphics::common::Dataspace const& s) {
+ return static_cast<int32_t>(s);
+}
+
+} // namespace android::media::aidl_conversion
diff --git a/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
new file mode 100644
index 0000000..f4d7fe8
--- /dev/null
+++ b/media/module/aidlpersistentsurface/include/media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <utils/RefBase.h>
+#include <aidl/android/hardware/graphics/common/Dataspace.h>
+#include <aidl/android/media/IAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+#include <aidl/android/media/BnAidlGraphicBufferSource.h>
+
+namespace android::media {
+
+class AidlGraphicBufferSource;
+
+using ::android::sp;
+
+/**
+ * Aidl wrapper implementation for IAidlGraphicBufferSource
+ */
+class WAidlGraphicBufferSource : public ::aidl::android::media::BnAidlGraphicBufferSource {
+public:
+
+ struct WAidlNodeWrapper;
+ class WAidlBufferSource;
+
+ sp<AidlGraphicBufferSource> mBase;
+ std::shared_ptr<::aidl::android::media::IAidlBufferSource> mBufferSource;
+
+ WAidlGraphicBufferSource(sp<AidlGraphicBufferSource> const& base);
+ ::ndk::ScopedAStatus configure(
+ const std::shared_ptr<::aidl::android::media::IAidlNode>& node,
+ aidl::android::hardware::graphics::common::Dataspace dataspace) override;
+ ::ndk::ScopedAStatus setSuspend(bool suspend, int64_t timeUs) override;
+ ::ndk::ScopedAStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
+ ::ndk::ScopedAStatus setMaxFps(float maxFps) override;
+ ::ndk::ScopedAStatus setTimeLapseConfig(double fps, double captureFps) override;
+ ::ndk::ScopedAStatus setStartTimeUs(int64_t startTimeUs) override;
+ ::ndk::ScopedAStatus setStopTimeUs(int64_t stopTimeUs) override;
+ ::ndk::ScopedAStatus getStopTimeOffsetUs(int64_t *_aidl_return) override;
+ ::ndk::ScopedAStatus setColorAspects(
+ const ::aidl::android::media::AidlColorAspects& aspects) override;
+ ::ndk::ScopedAStatus setTimeOffsetUs(int64_t timeOffsetUs) override;
+ ::ndk::ScopedAStatus signalEndOfInputStream() override;
+};
+
+} // namespace android::media
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
new file mode 100644
index 0000000..5526b10
--- /dev/null
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -0,0 +1,221 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WAidlGraphicBufferSource"
+#include <android/hardware_buffer_aidl.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <utils/Log.h>
+
+#include <aidl/android/media/BnAidlBufferSource.h>
+#include <aidl/android/media/IAidlNode.h>
+
+#include <media/stagefright/aidlpersistentsurface/AidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/C2NodeDef.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.h>
+#include <media/stagefright/aidlpersistentsurface/wrapper/Conversion.h>
+
+namespace android::media {
+using ::android::binder::unique_fd;
+using ::aidl::android::hardware::graphics::common::PixelFormat;
+using ::aidl::android::hardware::graphics::common::Dataspace;
+using ::aidl::android::media::AidlColorAspects;
+using ::aidl::android::media::IAidlNode;
+using ::aidl::android::media::BnAidlBufferSource;
+
+// Conversion
+using ::android::media::aidl_conversion::fromAidlStatus;
+using ::android::media::aidl_conversion::toAidlStatus;
+using ::android::media::aidl_conversion::compactFromAidlColorAspects;
+using ::android::media::aidl_conversion::rawFromAidlDataspace;
+
+struct WAidlGraphicBufferSource::WAidlNodeWrapper : public IAidlNodeWrapper {
+ std::shared_ptr<IAidlNode> mNode;
+
+ WAidlNodeWrapper(const std::shared_ptr<IAidlNode> &node): mNode(node) {
+ }
+
+ virtual status_t submitBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ AHardwareBuffer *ahwBuffer = nullptr;
+ ::aidl::android::hardware::HardwareBuffer hBuffer;
+ if (buffer.get()) {
+ ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+ AHardwareBuffer_acquire(ahwBuffer);
+ hBuffer.reset(ahwBuffer);
+ }
+
+ ::ndk::ScopedFileDescriptor fence(fenceFd);
+
+ return fromAidlStatus(mNode->submitBuffer(
+ bufferId,
+ hBuffer,
+ flags,
+ timestamp,
+ fence));
+ }
+
+ virtual void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ ::ndk::ScopedAStatus err = mNode->onDataSpaceChanged(
+ dataSpace, aspects, pixelFormat);
+ status_t status = fromAidlStatus(err);
+ if (status != NO_ERROR) {
+ ALOGE("WAidlNodeWrapper failed to change dataspace (%d): "
+ "dataSpace = %ld, aspects = %ld, pixelFormat = %ld",
+ static_cast<int>(status),
+ static_cast<long>(dataSpace),
+ static_cast<long>(aspects),
+ static_cast<long>(pixelFormat));
+ }
+ }
+};
+
+class WAidlGraphicBufferSource::WAidlBufferSource : public BnAidlBufferSource {
+ sp<AidlGraphicBufferSource> mSource;
+
+public:
+ WAidlBufferSource(const sp<AidlGraphicBufferSource> &source): mSource(source) {
+ }
+
+ ::ndk::ScopedAStatus onStart() override {
+ mSource->onStart();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onStop() override {
+ mSource->onStop();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onRelease() override {
+ mSource->onRelease();
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferAdded(int32_t bufferId) override {
+ mSource->onInputBufferAdded(bufferId);
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ ::ndk::ScopedAStatus onInputBufferEmptied(
+ int32_t bufferId, const ::ndk::ScopedFileDescriptor& fence) override {
+ mSource->onInputBufferEmptied(bufferId, ::dup(fence.get()));
+ return ::ndk::ScopedAStatus::ok();
+ }
+};
+
+// WAidlGraphicBufferSource
+WAidlGraphicBufferSource::WAidlGraphicBufferSource(
+ sp<AidlGraphicBufferSource> const& base) :
+ mBase(base),
+ mBufferSource(::ndk::SharedRefBase::make<WAidlBufferSource>(base)) {
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::configure(
+ const std::shared_ptr<IAidlNode>& node, Dataspace dataspace) {
+ if (node == NULL) {
+ return toAidlStatus(BAD_VALUE);
+ }
+
+ // Do setInputSurface() first, the node will try to enable metadata
+ // mode on input, and does necessary error checking. If this fails,
+ // we can't use this input surface on the node.
+ ::ndk::ScopedAStatus err = node->setInputSurface(mBufferSource);
+ status_t fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Unable to set input surface: %d", fnStatus);
+ return err;
+ }
+
+ // use consumer usage bits queried from encoder, but always add
+ // HW_VIDEO_ENCODER for backward compatibility.
+ int64_t consumerUsage;
+ fnStatus = OK;
+ err = node->getConsumerUsage(&consumerUsage);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ if (fnStatus == FAILED_TRANSACTION) {
+ return err;
+ }
+ consumerUsage = 0;
+ }
+
+ IAidlNode::InputBufferParams rDef ;
+ err = node->getInputBufferParams(&rDef);
+ fnStatus = fromAidlStatus(err);
+ if (fnStatus != NO_ERROR) {
+ ALOGE("Failed to get port definition: %d", fnStatus);
+ return toAidlStatus(fnStatus);
+ }
+
+ return toAidlStatus(mBase->configure(
+ new WAidlNodeWrapper(node),
+ rawFromAidlDataspace(dataspace),
+ rDef.bufferCountActual,
+ rDef.frameWidth,
+ rDef.frameHeight,
+ static_cast<uint64_t>(consumerUsage)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return toAidlStatus(mBase->setSuspend(suspend, timeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setRepeatPreviousFrameDelayUs(
+ int64_t repeatAfterUs) {
+ return toAidlStatus(mBase->setRepeatPreviousFrameDelayUs(repeatAfterUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setMaxFps(float maxFps) {
+ return toAidlStatus(mBase->setMaxFps(maxFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeLapseConfig(
+ double fps, double captureFps) {
+ return toAidlStatus(mBase->setTimeLapseConfig(fps, captureFps));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStartTimeUs(int64_t startTimeUs) {
+ return toAidlStatus(mBase->setStartTimeUs(startTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setStopTimeUs(int64_t stopTimeUs) {
+ return toAidlStatus(mBase->setStopTimeUs(stopTimeUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::getStopTimeOffsetUs(int64_t* _aidl_return) {
+ status_t status = mBase->getStopTimeOffsetUs(_aidl_return);
+ return toAidlStatus(status);
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setColorAspects(
+ const AidlColorAspects& aspects) {
+ return toAidlStatus(mBase->setColorAspects(compactFromAidlColorAspects(aspects)));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::setTimeOffsetUs(int64_t timeOffsetUs) {
+ return toAidlStatus(mBase->setTimeOffsetUs(timeOffsetUs));
+}
+
+::ndk::ScopedAStatus WAidlGraphicBufferSource::signalEndOfInputStream() {
+ return toAidlStatus(mBase->signalEndOfInputStream());
+}
+
+
+} // namespace android::media
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 4fbfab1..6df9dc8 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -49,6 +49,7 @@
using ::android::hardware::Return;
using ::android::sp;
using ::ndk::ScopedAStatus;
+namespace c2_hidl_V1_0 = ::android::hardware::media::c2::V1_0;
namespace c2_hidl = ::android::hardware::media::c2::V1_2;
namespace c2_aidl = ::aidl::android::hardware::media::c2;
@@ -734,6 +735,46 @@
} // unnamed namespace
+static android::sp<c2_hidl_V1_0::IComponentStore> getDeclaredHidlSwcodec(
+ const std::shared_ptr<C2ComponentStore>& store) {
+ using ::android::hidl::manager::V1_2::IServiceManager;
+ using namespace ::android::hardware::media::c2;
+
+ int platformVersion = android_get_device_api_level();
+ // STOPSHIP: Remove code name checking once platform version bumps up to 35.
+ std::string codeName = android::base::GetProperty("ro.build.version.codename", "");
+
+ if (codeName == "VanillaIceCream") {
+ platformVersion = __ANDROID_API_V__;
+ }
+ IServiceManager::Transport transport =
+ android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_2::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion < __ANDROID_API_S__) {
+ LOG(ERROR) << "We don't expect V1.2::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_2::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_1::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_R__) {
+ LOG(ERROR) << "We don't expect V1.1::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_1::utils::ComponentStore>::make(store);
+ }
+ transport = android::hardware::defaultServiceManager1_2()->getTransport(
+ V1_0::IComponentStore::descriptor, "software");
+ if (transport == IServiceManager::Transport::HWBINDER) {
+ if (platformVersion != __ANDROID_API_Q__) {
+ LOG(ERROR) << "We don't expect V1.0::IComponentStore to be declared on this device";
+ }
+ return ::android::sp<V1_0::utils::ComponentStore>::make(store);
+ }
+ return nullptr;
+}
+
extern "C" void RegisterCodecServices() {
const bool aidlSelected = c2_aidl::utils::IsSelected();
constexpr int kThreadCount = 64;
@@ -751,33 +792,6 @@
using namespace ::android::hardware::media::c2;
- int platformVersion = android_get_device_api_level();
- // STOPSHIP: Remove code name checking once platform version bumps up to 35.
- std::string codeName =
- android::base::GetProperty("ro.build.version.codename", "");
- if (codeName == "VanillaIceCream") {
- platformVersion = __ANDROID_API_V__;
- }
-
- android::sp<V1_0::IComponentStore> hidlStore;
- std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
- const char *hidlVer = "(unknown)";
- if (aidlSelected) {
- aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
- } else if (platformVersion >= __ANDROID_API_S__) {
- hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(store);
- hidlVer = "1.2";
- } else if (platformVersion == __ANDROID_API_R__) {
- hidlStore = ::android::sp<V1_1::utils::ComponentStore>::make(store);
- hidlVer = "1.1";
- } else if (platformVersion == __ANDROID_API_Q__) {
- hidlStore = ::android::sp<V1_0::utils::ComponentStore>::make(store);
- hidlVer = "1.0";
- } else { // platformVersion < __ANDROID_API_Q__
- LOG(ERROR) << "The platform version " << platformVersion <<
- " is not supported.";
- return;
- }
if (!ionPropertiesDefined()) {
using IComponentStore =
::android::hardware::media::c2::V1_0::IComponentStore;
@@ -823,7 +837,10 @@
std::string(c2_aidl::IComponentStore::descriptor) + "/software";
if (__builtin_available(android __ANDROID_API_S__, *)) {
if (AServiceManager_isDeclared(aidlServiceName.c_str())) {
- if (!aidlStore) {
+ std::shared_ptr<c2_aidl::IComponentStore> aidlStore;
+ if (aidlSelected) {
+ aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(store);
+ } else {
aidlStore = ::ndk::SharedRefBase::make<c2_aidl::utils::ComponentStore>(
std::make_shared<H2C2ComponentStore>(nullptr));
}
@@ -837,22 +854,23 @@
}
}
+ android::sp<V1_0::IComponentStore> hidlStore = getDeclaredHidlSwcodec(store);
// If the software component store isn't declared in the manifest, we don't
// need to create the service and register it.
- using ::android::hidl::manager::V1_2::IServiceManager;
- IServiceManager::Transport transport =
- android::hardware::defaultServiceManager1_2()->getTransport(
- V1_2::utils::ComponentStore::descriptor, "software");
- if (transport == IServiceManager::Transport::HWBINDER) {
- if (!hidlStore) {
+ if (hidlStore) {
+ if (registered && aidlSelected) {
+ LOG(INFO) << "Both HIDL and AIDL software codecs are declared in the vintf "
+ << "manifest, but AIDL was selected. "
+ << "Creating a null HIDL service so it's not accidentally "
+ << "used. The AIDL software codec is already registered.";
hidlStore = ::android::sp<V1_2::utils::ComponentStore>::make(
std::make_shared<H2C2ComponentStore>(nullptr));
- hidlVer = "1.2";
}
if (hidlStore->registerAsService("software") == android::OK) {
registered = true;
} else {
- LOG(ERROR) << "Cannot register software Codec2 v" << hidlVer << " service.";
+ LOG(ERROR) << "Cannot register software Codec2 " << hidlStore->descriptor
+ << " service.";
}
} else {
LOG(INFO) << "The HIDL software Codec2 service is deprecated"
diff --git a/media/module/extractors/fuzzers/Android.bp b/media/module/extractors/fuzzers/Android.bp
index d096d63..7a49d8e 100644
--- a/media/module/extractors/fuzzers/Android.bp
+++ b/media/module/extractors/fuzzers/Android.bp
@@ -134,6 +134,8 @@
],
dictionary: "mp4_extractor_fuzzer.dict",
+
+ corpus: ["corpus_mp4/*"],
}
cc_fuzz {
@@ -202,7 +204,6 @@
"ogg_extractor_fuzzer.cpp",
],
-
static_libs: [
"libstagefright_metadatautils",
"libvorbisidec",
diff --git a/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0 b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
new file mode 100644
index 0000000..c17251b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/164a5bad5340b262316f93932c4160813657e1e0
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
new file mode 100644
index 0000000..16907fd
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/24f242f3b30fd5c2ff0f9aebed4375a3ab5cdceb
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
new file mode 100644
index 0000000..2ec7881
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/2a017927fdab79a8cc3b0bb75224cb44f4c1b35b
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
new file mode 100644
index 0000000..cd1fdcc
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/58b3155e64ac16e4e6c68b68257871bcd769b92f
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5 b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
new file mode 100644
index 0000000..f1ea812
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/64093d4da00ba406310c7679cd8b37562e6344b5
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
new file mode 100644
index 0000000..c5d3eb2
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7355066d4975de07e9b6d0e9907c896eeb90577a
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2 b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
new file mode 100644
index 0000000..1f6c29d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/7a48b0237581c794097a15add08517b3c6dc0aa2
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
new file mode 100644
index 0000000..40d639d
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/8706f07041a0cf828a7b40d727533d6c732b5ebc
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615 b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
new file mode 100644
index 0000000..2056348
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/911a46d40d60b9a806dbdc70799048df2f546615
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271 b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
new file mode 100644
index 0000000..f50d4f4
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/97b1d0e78525c793574cce3e66f86564c2a10271
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
new file mode 100644
index 0000000..25ea55b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/d52e7095534fdf1f040b10a80df4cbc069a97a4e
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456 b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
new file mode 100644
index 0000000..62d259b
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/ec6bd6069f74a2f6e92442f88efb29288ad6f456
Binary files differ
diff --git a/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
new file mode 100644
index 0000000..d649632
--- /dev/null
+++ b/media/module/extractors/fuzzers/corpus_mp4/faa22bcb745206340d5d411b498a3868d2b1feec
Binary files differ
diff --git a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
index 3683649..b48c854 100644
--- a/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
+++ b/media/module/extractors/fuzzers/mp4_extractor_fuzzer.dict
@@ -246,3 +246,4 @@
kw245="iso5"
kw246="resv"
kw247="iso6"
+kw248="clap"
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 01f55dd..d5d778f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1079,6 +1079,7 @@
client = registerPid(clientPid);
IAfPlaybackThread* effectThread = nullptr;
+ sp<IAfEffectChain> effectChain = nullptr;
// check if an effect chain with the same session ID is present on another
// output thread and move it here.
for (size_t i = 0; i < mPlaybackThreads.size(); i++) {
@@ -1091,6 +1092,10 @@
}
}
}
+ // Check if an orphan effect chain exists for this session
+ if (effectThread == nullptr) {
+ effectChain = getOrphanEffectChain_l(sessionId);
+ }
ALOGV("createTrack() sessionId: %d", sessionId);
output.sampleRate = input.config.sample_rate;
@@ -1135,6 +1140,13 @@
effectIds = thread->getEffectIds_l(sessionId);
}
}
+ if (effectChain != nullptr) {
+ if (moveEffectChain_ll(sessionId, nullptr, thread, effectChain.get())
+ == NO_ERROR) {
+ effectThreadId = thread->id();
+ effectIds = thread->getEffectIds_l(sessionId);
+ }
+ }
}
// Look for sync events awaiting for a session to be used.
@@ -4222,8 +4234,9 @@
// before creating the AudioEffect or the io handle must be specified.
//
// Detect if the effect is created after an AudioRecord is destroyed.
- if ((sessionId != AUDIO_SESSION_OUTPUT_MIX) &&
- getOrphanEffectChain_l(sessionId).get() != nullptr) {
+ if (sessionId != AUDIO_SESSION_OUTPUT_MIX
+ && ((descOut.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_PRE_PROC)
+ && getOrphanEffectChain_l(sessionId).get() != nullptr) {
ALOGE("%s: effect %s with no specified io handle is denied because the AudioRecord"
" for session %d no longer exists",
__func__, descOut.name, sessionId);
@@ -4234,11 +4247,27 @@
// Legacy handling of creating an effect on an expired or made-up
// session id. We think that it is a Playback effect.
//
- // If no output thread contains the requested session ID, default to
- // first output. The effect chain will be moved to the correct output
- // thread when a track with the same session ID is created
- if (io == AUDIO_IO_HANDLE_NONE && mPlaybackThreads.size() > 0) {
- io = mPlaybackThreads.keyAt(0);
+ // If no output thread contains the requested session ID, park the effect to
+ // the orphan chains. The effect chain will be moved to the correct output
+ // thread when a track with the same session ID is created.
+ if (io == AUDIO_IO_HANDLE_NONE) {
+ if (probe) {
+ // In probe mode, as no compatible thread found, exit with error.
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+ ALOGV("%s() got io %d for effect %s", __func__, io, descOut.name);
+ sp<Client> client = registerPid(currentPid);
+ bool pinned = !audio_is_global_session(sessionId) && isSessionAcquired_l(sessionId);
+ handle = createOrphanEffect_l(client, effectClient, priority, sessionId,
+ &descOut, &enabledOut, &lStatus, pinned,
+ request.notifyFramesProcessed);
+ if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+ // remove local strong reference to Client with clientMutex() held
+ audio_utils::lock_guard _cl(clientMutex());
+ client.clear();
+ }
+ goto Register;
}
ALOGV("createEffect() got io %d for effect %s", io, descOut.name);
} else if (checkPlaybackThread_l(io) != nullptr
@@ -4356,6 +4385,85 @@
return lStatus;
}
+sp<IAfEffectHandle> AudioFlinger::createOrphanEffect_l(
+ const sp<Client>& client,
+ const sp<IEffectClient>& effectClient,
+ int32_t priority,
+ audio_session_t sessionId,
+ effect_descriptor_t *desc,
+ int *enabled,
+ status_t *status,
+ bool pinned,
+ bool notifyFramesProcessed)
+{
+ ALOGV("%s effectClient %p, priority %d, sessionId %d, factory %p",
+ __func__, effectClient.get(), priority, sessionId, mEffectsFactoryHal.get());
+
+ // Check if an orphan effect chain exists for this session or create new chain for this session
+ sp<IAfEffectModule> effect;
+ sp<IAfEffectChain> chain = getOrphanEffectChain_l(sessionId);
+ bool chainCreated = false;
+ if (chain == nullptr) {
+ chain = IAfEffectChain::create(/* ThreadBase= */ nullptr, sessionId, this);
+ chainCreated = true;
+ } else {
+ effect = chain->getEffectFromDesc(desc);
+ }
+ bool effectCreated = false;
+ if (effect == nullptr) {
+ audio_unique_id_t effectId = nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
+ // create a new effect module if none present in the chain
+ status_t llStatus =
+ chain->createEffect(effect, desc, effectId, sessionId, pinned);
+ if (llStatus != NO_ERROR) {
+ *status = llStatus;
+ // if the effect chain was not created here, put it back
+ if (!chainCreated) {
+ putOrphanEffectChain_l(chain);
+ }
+ return nullptr;
+ }
+ effect->setMode(getMode());
+
+ if (effect->isHapticGenerator()) {
+ // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
+ // for the HapticGenerator.
+ const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
+ std::move(getDefaultVibratorInfo_l());
+ if (defaultVibratorInfo) {
+ // Only set the vibrator info when it is a valid one.
+ audio_utils::lock_guard _cl(chain->mutex());
+ effect->setVibratorInfo_l(*defaultVibratorInfo);
+ }
+ }
+ effectCreated = true;
+ }
+ // create effect handle and connect it to effect module
+ sp<IAfEffectHandle> handle =
+ IAfEffectHandle::create(effect, client, effectClient, priority, notifyFramesProcessed);
+ status_t lStatus = handle->initCheck();
+ if (lStatus == OK) {
+ lStatus = effect->addHandle(handle.get());
+ }
+ // in case of lStatus error, EffectHandle will still return and caller should do the clear
+ if (lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
+ if (effectCreated) {
+ chain->removeEffect(effect);
+ }
+ // if the effect chain was not created here, put it back
+ if (!chainCreated) {
+ putOrphanEffectChain_l(chain);
+ }
+ } else {
+ if (enabled != NULL) {
+ *enabled = (int)effect->isEnabled();
+ }
+ putOrphanEffectChain_l(chain);
+ }
+ *status = lStatus;
+ return handle;
+}
+
status_t AudioFlinger::moveEffects(audio_session_t sessionId, audio_io_handle_t srcIo,
audio_io_handle_t dstIo)
NO_THREAD_SAFETY_ANALYSIS
@@ -4487,7 +4595,7 @@
if (srcThread != nullptr) {
srcThread->removeEffect_l(effect);
} else {
- chain->removeEffect_l(effect);
+ chain->removeEffect(effect);
}
removed.add(effect);
status = dstThread->addEffect_ll(effect);
@@ -4730,7 +4838,7 @@
ALOGV("updateOrphanEffectChains session %d index %zd", session, index);
if (index >= 0) {
sp<IAfEffectChain> chain = mOrphanEffectChains.valueAt(index);
- if (chain->removeEffect_l(effect, true) == 0) {
+ if (chain->removeEffect(effect, true) == 0) {
ALOGV("updateOrphanEffectChains removing effect chain at index %zd", index);
mOrphanEffectChains.removeItemsAt(index);
}
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 7c26f72..4e46bea 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -703,6 +703,16 @@
sp<Client> registerPid(pid_t pid) EXCLUDES_AudioFlinger_ClientMutex; // always returns non-0
+ sp<IAfEffectHandle> createOrphanEffect_l(const sp<Client>& client,
+ const sp<media::IEffectClient>& effectClient,
+ int32_t priority,
+ audio_session_t sessionId,
+ effect_descriptor_t *desc,
+ int *enabled,
+ status_t *status /*non-NULL*/,
+ bool pinned,
+ bool notifyFramesProcessed) REQUIRES(mutex());
+
// for use from destructor
status_t closeOutput_nonvirtual(audio_io_handle_t output) EXCLUDES_AudioFlinger_Mutex;
status_t closeInput_nonvirtual(audio_io_handle_t input) EXCLUDES_AudioFlinger_Mutex;
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 10a8883..ab098de 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -1357,7 +1357,7 @@
}
}
-status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller) {
+status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller, bool force) {
AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
if (mStatus != NO_ERROR) {
return mStatus;
@@ -1365,7 +1365,7 @@
status_t status = NO_ERROR;
// Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
// if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set)
- if (isProcessEnabled() &&
+ if ((isProcessEnabled() || force) &&
((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
(mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
(mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
@@ -1376,6 +1376,9 @@
status_t EffectModule::setVolumeInternal(
uint32_t *left, uint32_t *right, bool controller) {
+ if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1]) {
+ return NO_ERROR;
+ }
uint32_t volume[2] = {*left, *right};
uint32_t *pVolume = controller ? volume : nullptr;
uint32_t size = sizeof(volume);
@@ -1387,6 +1390,7 @@
if (controller && status == NO_ERROR && size == sizeof(volume)) {
*left = volume[0];
*right = volume[1];
+ mVolume = {*left, *right};
}
return status;
}
@@ -2157,27 +2161,31 @@
/* static */
sp<IAfEffectChain> IAfEffectChain::create(
const sp<IAfThreadBase>& thread,
- audio_session_t sessionId)
+ audio_session_t sessionId,
+ const sp<IAfThreadCallback>& afThreadCallback)
{
- return sp<EffectChain>::make(thread, sessionId);
+ return sp<EffectChain>::make(thread, sessionId, afThreadCallback);
}
-EffectChain::EffectChain(const sp<IAfThreadBase>& thread,
- audio_session_t sessionId)
+EffectChain::EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId,
+ const sp<IAfThreadCallback>& afThreadCallback)
: mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX),
- mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread))
+ mEffectCallback(new EffectCallback(wp<EffectChain>(this), thread, afThreadCallback))
{
- mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
- mMaxTailBuffers = ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
- thread->frameCount();
+ if (thread != nullptr) {
+ mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+ mMaxTailBuffers =
+ ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+ thread->frameCount();
+ }
}
-// getEffectFromDesc_l() must be called with IAfThreadBase::mutex() held
-sp<IAfEffectModule> EffectChain::getEffectFromDesc_l(
+sp<IAfEffectModule> EffectChain::getEffectFromDesc(
effect_descriptor_t *descriptor) const
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
@@ -2191,6 +2199,7 @@
// getEffectFromId_l() must be called with IAfThreadBase::mutex() held
sp<IAfEffectModule> EffectChain::getEffectFromId_l(int id) const
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
@@ -2206,6 +2215,7 @@
sp<IAfEffectModule> EffectChain::getEffectFromType_l(
const effect_uuid_t *type) const
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
@@ -2296,8 +2306,7 @@
}
}
-// createEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::createEffect_l(sp<IAfEffectModule>& effect,
+status_t EffectChain::createEffect(sp<IAfEffectModule>& effect,
effect_descriptor_t *desc,
int id,
audio_session_t sessionId,
@@ -2307,7 +2316,7 @@
effect = new EffectModule(mEffectCallback, desc, id, sessionId, pinned, AUDIO_PORT_HANDLE_NONE);
status_t lStatus = effect->status();
if (lStatus == NO_ERROR) {
- lStatus = addEffect_ll(effect);
+ lStatus = addEffect_l(effect);
}
if (lStatus != NO_ERROR) {
effect.clear();
@@ -2315,22 +2324,22 @@
return lStatus;
}
-// addEffect_l() must be called with IAfThreadBase::mutex() held
-status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
+status_t EffectChain::addEffect(const sp<IAfEffectModule>& effect)
{
audio_utils::lock_guard _l(mutex());
- return addEffect_ll(effect);
+ return addEffect_l(effect);
}
-// addEffect_l() must be called with IAfThreadBase::mutex() and EffectChain::mutex() held
-status_t EffectChain::addEffect_ll(const sp<IAfEffectModule>& effect)
+// addEffect_l() must be called with EffectChain::mutex() held
+status_t EffectChain::addEffect_l(const sp<IAfEffectModule>& effect)
{
effect->setCallback(mEffectCallback);
effect_descriptor_t desc = effect->desc();
+ ssize_t idx_insert = 0;
if ((desc.flags & EFFECT_FLAG_TYPE_MASK) == EFFECT_FLAG_TYPE_AUXILIARY) {
// Auxiliary effects are inserted at the beginning of mEffects vector as
// they are processed first and accumulated in chain input buffer
- mEffects.insertAt(effect, 0);
+ mEffects.insertAt(effect, idx_insert);
// the input buffer for auxiliary effect contains mono samples in
// 32 bit format. This is to avoid saturation in AudoMixer
@@ -2350,7 +2359,7 @@
// by insert effects
effect->setOutBuffer(mInBuffer);
} else {
- ssize_t idx_insert = getInsertIndex_ll(desc);
+ idx_insert = getInsertIndex_l(desc);
if (idx_insert < 0) {
return INVALID_OPERATION;
}
@@ -2399,6 +2408,18 @@
}
effect->configure_l();
+ if (effect->isVolumeControl()) {
+ const auto volumeControlIndex = findVolumeControl_l(0, mEffects.size());
+ if (!volumeControlIndex.has_value() || (ssize_t)volumeControlIndex.value() < idx_insert) {
+ // If this effect will be the new volume control effect when it is enabled, force
+ // initializing the volume as 0 for volume control effect for safer ramping. The actual
+ // volume will be set from setVolume_l.
+ uint32_t left = 0;
+ uint32_t right = 0;
+ effect->setVolume(&left, &right, true /*controller*/, true /*force*/);
+ }
+ }
+
return NO_ERROR;
}
@@ -2411,7 +2432,7 @@
return std::nullopt;
}
-ssize_t EffectChain::getInsertIndex_ll(const effect_descriptor_t& desc) {
+ssize_t EffectChain::getInsertIndex_l(const effect_descriptor_t& desc) {
// Insert effects are inserted at the end of mEffects vector as they are processed
// after track and auxiliary effects.
// Insert effect order as a function of indicated preference:
@@ -2484,8 +2505,7 @@
return idx_insert;
}
-// removeEffect_l() must be called with IAfThreadBase::mutex() held
-size_t EffectChain::removeEffect_l(const sp<IAfEffectModule>& effect,
+size_t EffectChain::removeEffect(const sp<IAfEffectModule>& effect,
bool release)
{
audio_utils::lock_guard _l(mutex());
@@ -2536,6 +2556,7 @@
// setDevices_l() must be called with IAfThreadBase::mutex() held
void EffectChain::setDevices_l(const AudioDeviceTypeAddrVector &devices)
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
mEffects[i]->setDevices(devices);
@@ -2545,6 +2566,7 @@
// setInputDevice_l() must be called with IAfThreadBase::mutex() held
void EffectChain::setInputDevice_l(const AudioDeviceTypeAddr &device)
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
mEffects[i]->setInputDevice(device);
@@ -2554,6 +2576,7 @@
// setMode_l() must be called with IAfThreadBase::mutex() held
void EffectChain::setMode_l(audio_mode_t mode)
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
mEffects[i]->setMode(mode);
@@ -2563,6 +2586,7 @@
// setAudioSource_l() must be called with IAfThreadBase::mutex() held
void EffectChain::setAudioSource_l(audio_source_t source)
{
+ audio_utils::lock_guard _l(mutex());
size_t size = mEffects.size();
for (size_t i = 0; i < size; i++) {
mEffects[i]->setAudioSource(source);
@@ -2604,21 +2628,16 @@
return volumeControlIndex.has_value();
}
- if (volumeControlEffect != cachedVolumeControlEffect) {
- // The volume control effect is a new one. Set the old one as full volume. Set the new onw
- // as zero for safe ramping.
- if (cachedVolumeControlEffect != nullptr) {
+ for (int i = 0; i < ctrlIdx; ++i) {
+ // For all volume control effects before the effect that controls volume, set the volume
+ // to maximum to avoid double attenuation.
+ if (mEffects[i]->isVolumeControl()) {
uint32_t leftMax = 1 << 24;
uint32_t rightMax = 1 << 24;
- cachedVolumeControlEffect->setVolume(&leftMax, &rightMax, true /*controller*/);
+ mEffects[i]->setVolume(&leftMax, &rightMax, true /*controller*/, true /*force*/);
}
- if (volumeControlEffect != nullptr) {
- uint32_t leftZero = 0;
- uint32_t rightZero = 0;
- volumeControlEffect->setVolume(&leftZero, &rightZero, true /*controller*/);
- }
- mVolumeControlEffect = volumeControlEffect;
}
+
mLeftVolume = newLeft;
mRightVolume = newRight;
@@ -2668,8 +2687,12 @@
}
}
-// containsHapticGeneratingEffect_l must be called with
-// IAfThreadBase::mutex() or EffectChain::mutex() held
+bool EffectChain::containsHapticGeneratingEffect()
+{
+ audio_utils::lock_guard _l(mutex());
+ return containsHapticGeneratingEffect_l();
+}
+// containsHapticGeneratingEffect_l must be called with EffectChain::mutex() held
bool EffectChain::containsHapticGeneratingEffect_l()
{
for (size_t i = 0; i < mEffects.size(); ++i) {
@@ -2810,7 +2833,7 @@
}
if (desc->mRefCount++ == 0) {
Vector< sp<IAfEffectModule> > effects;
- getSuspendEligibleEffects_l(effects);
+ getSuspendEligibleEffects(effects);
for (size_t i = 0; i < effects.size(); i++) {
setEffectSuspended_l(&effects[i]->desc().type, true);
}
@@ -2860,7 +2883,7 @@
return false;
}
-bool EffectChain::isEffectEligibleForSuspend_l(const effect_descriptor_t& desc)
+bool EffectChain::isEffectEligibleForSuspend(const effect_descriptor_t& desc)
{
// auxiliary effects and visualizer are never suspended on output mix
if ((mSessionId == AUDIO_SESSION_OUTPUT_MIX) &&
@@ -2873,12 +2896,13 @@
return true;
}
-void EffectChain::getSuspendEligibleEffects_l(
+void EffectChain::getSuspendEligibleEffects(
Vector< sp<IAfEffectModule> > &effects)
{
effects.clear();
+ audio_utils::lock_guard _l(mutex());
for (size_t i = 0; i < mEffects.size(); i++) {
- if (isEffectEligibleForSuspend_l(mEffects[i]->desc())) {
+ if (isEffectEligibleForSuspend(mEffects[i]->desc())) {
effects.add(mEffects[i]);
}
}
@@ -2899,7 +2923,7 @@
if (index < 0) {
return;
}
- if (!isEffectEligibleForSuspend_l(effect->desc())) {
+ if (!isEffectEligibleForSuspend(effect->desc())) {
return;
}
setEffectSuspended_l(&effect->desc().type, enabled);
@@ -2947,6 +2971,12 @@
void EffectChain::setThread(const sp<IAfThreadBase>& thread)
{
+ if (thread != nullptr) {
+ mStrategy = thread->getStrategyForStream(AUDIO_STREAM_MUSIC);
+ mMaxTailBuffers =
+ ((kProcessTailDurationMs * thread->sampleRate()) / 1000) /
+ thread->frameCount();
+ }
audio_utils::lock_guard _l(mutex());
mEffectCallback->setThread(thread);
}
@@ -3136,7 +3166,7 @@
uint32_t EffectChain::EffectCallback::sampleRate() const {
const sp<IAfThreadBase> t = thread().promote();
if (t == nullptr) {
- return 0;
+ return DEFAULT_OUTPUT_SAMPLE_RATE;
}
return t->sampleRate();
}
@@ -3144,6 +3174,7 @@
audio_channel_mask_t EffectChain::EffectCallback::inChannelMask(int id) const
NO_THREAD_SAFETY_ANALYSIS
// calling function 'hasAudioSession_l' requires holding mutex 'ThreadBase_Mutex' exclusively
+// calling function 'isFirstEffect_l' requires holding mutex 'EffectChain_Mutex' exclusively
{
const sp<IAfThreadBase> t = thread().promote();
if (t == nullptr) {
@@ -3156,7 +3187,7 @@
if (mThreadType == IAfThreadBase::SPATIALIZER) {
if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
- if (c->isFirstEffect(id)) {
+ if (c->isFirstEffect_l(id)) {
return t->mixerChannelMask();
} else {
return t->channelMask();
@@ -3224,7 +3255,8 @@
size_t EffectChain::EffectCallback::frameCount() const {
const sp<IAfThreadBase> t = thread().promote();
if (t == nullptr) {
- return 0;
+ // frameCount cannot be zero.
+ return 1;
}
return t->frameCount();
}
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index 5e527d3..0fa1b83 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -25,6 +25,8 @@
#include <private/media/AudioEffectShared.h>
#include <map> // avoid transitive dependency
+#include <optional>
+#include <vector>
namespace android {
@@ -215,7 +217,7 @@
}
status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
- status_t setVolume(uint32_t *left, uint32_t *right, bool controller) final;
+ status_t setVolume(uint32_t *left, uint32_t *right, bool controller, bool force) final;
status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
@@ -234,9 +236,9 @@
bool isSpatializer() const final;
status_t setHapticScale_l(int id, os::HapticScale hapticScale) final
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
+ REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo) final
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex;
+ REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata) final
REQUIRES(audio_utils::ThreadBase_Mutex,
audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
@@ -304,6 +306,8 @@
static constexpr pid_t INVALID_PID = (pid_t)-1;
// this tid is allowed to call setVolume() without acquiring the mutex.
pid_t mSetVolumeReentrantTid = INVALID_PID;
+
+ std::optional<std::vector<uint32_t>> mVolume;
};
// The EffectHandle class implements the IEffect interface. It provides resources
@@ -412,7 +416,9 @@
// it also provide it's own input buffer used by the track as accumulation buffer.
class EffectChain : public IAfEffectChain {
public:
- EffectChain(const sp<IAfThreadBase>& thread, audio_session_t sessionId);
+ EffectChain(const sp<IAfThreadBase>& thread,
+ audio_session_t sessionId,
+ const sp<IAfThreadCallback>& afThreadCallback);
void process_l() final REQUIRES(audio_utils::EffectChain_Mutex);
@@ -420,25 +426,25 @@
return mMutex;
}
- status_t createEffect_l(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+ status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
audio_session_t sessionId, bool pinned) final
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+ EXCLUDES_EffectChain_Mutex;
+ status_t addEffect(const sp<IAfEffectModule>& handle) final
+ EXCLUDES_EffectChain_Mutex;
status_t addEffect_l(const sp<IAfEffectModule>& handle) final
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
- status_t addEffect_ll(const sp<IAfEffectModule>& handle) final
- REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
- size_t removeEffect_l(const sp<IAfEffectModule>& handle, bool release = false) final
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+ REQUIRES(audio_utils::EffectChain_Mutex);
+ size_t removeEffect(const sp<IAfEffectModule>& handle, bool release = false) final
+ EXCLUDES_EffectChain_Mutex;
audio_session_t sessionId() const final { return mSessionId; }
void setSessionId(audio_session_t sessionId) final { mSessionId = sessionId; }
- sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t* descriptor) const final
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const final
+ EXCLUDES_EffectChain_Mutex;
sp<IAfEffectModule> getEffectFromId_l(int id) const final
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const final
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
std::vector<int> getEffectIds_l() const final REQUIRES(audio_utils::ThreadBase_Mutex);
// FIXME use float to improve the dynamic range
@@ -446,11 +452,13 @@
bool force = false) final EXCLUDES_EffectChain_Mutex;
void resetVolume_l() final REQUIRES(audio_utils::EffectChain_Mutex);
void setDevices_l(const AudioDeviceTypeAddrVector& devices) final
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
void setInputDevice_l(const AudioDeviceTypeAddr& device) final
- REQUIRES(audio_utils::ThreadBase_Mutex);
- void setMode_l(audio_mode_t mode) final REQUIRES(audio_utils::ThreadBase_Mutex);
- void setAudioSource_l(audio_source_t source) final REQUIRES(audio_utils::ThreadBase_Mutex);
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+ void setMode_l(audio_mode_t mode) final
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
+ void setAudioSource_l(audio_source_t source) final
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
void setInBuffer(const sp<EffectBufferHalInterface>& buffer) final {
mInBuffer = buffer;
@@ -517,8 +525,11 @@
bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const final
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
- // Requires either IAfThreadBase::mutex() or EffectChain::mutex() held
- bool containsHapticGeneratingEffect_l() final;
+ bool containsHapticGeneratingEffect() final
+ EXCLUDES_EffectChain_Mutex;
+
+ bool containsHapticGeneratingEffect_l() final
+ REQUIRES(audio_utils::EffectChain_Mutex);
void setHapticScale_l(int id, os::HapticScale hapticScale) final
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex;
@@ -527,15 +538,19 @@
wp<IAfThreadBase> thread() const final { return mEffectCallback->thread(); }
- bool isFirstEffect(int id) const final {
+ bool isFirstEffect_l(int id) const final REQUIRES(audio_utils::EffectChain_Mutex) {
return !mEffects.isEmpty() && id == mEffects[0]->id();
}
void dump(int fd, const Vector<String16>& args) const final;
- size_t numberOfEffects() const final { return mEffects.size(); }
+ size_t numberOfEffects() const final {
+ audio_utils::lock_guard _l(mutex());
+ return mEffects.size();
+ }
sp<IAfEffectModule> getEffectModule(size_t index) const final {
+ audio_utils::lock_guard _l(mutex());
return mEffects[index];
}
@@ -560,11 +575,13 @@
// Note: ctors taking a weak pointer to their owner must not promote it
// during construction (but may keep a reference for later promotion).
EffectCallback(const wp<EffectChain>& owner,
- const sp<IAfThreadBase>& thread) // we take a sp<> but store a wp<>.
+ const sp<IAfThreadBase>& thread,
+ const sp<IAfThreadCallback>& afThreadCallback) // we take a sp<> but store a wp<>.
: mChain(owner)
- , mThread(thread) {
- mThreadType = thread->type();
- mAfThreadCallback = thread->afThreadCallback();
+ , mThread(thread), mAfThreadCallback(afThreadCallback) {
+ if (thread != nullptr) {
+ mThreadType = thread->type();
+ }
}
status_t createEffectHal(const effect_uuid_t *pEffectUuid,
@@ -606,6 +623,9 @@
wp<IAfEffectChain> chain() const final { return mChain; }
bool isAudioPolicyReady() const final {
+ if (mAfThreadCallback == nullptr) {
+ return false;
+ }
return mAfThreadCallback->isAudioPolicyReady();
}
@@ -613,8 +633,10 @@
void setThread(const sp<IAfThreadBase>& thread) {
mThread = thread;
- mThreadType = thread->type();
- mAfThreadCallback = thread->afThreadCallback();
+ if (thread != nullptr) {
+ mThreadType = thread->type();
+ mAfThreadCallback = thread->afThreadCallback();
+ }
}
bool hasThreadAttached() const {
return thread().promote() != nullptr;
@@ -623,7 +645,7 @@
const wp<IAfEffectChain> mChain;
mediautils::atomic_wp<IAfThreadBase> mThread;
sp<IAfThreadCallback> mAfThreadCallback;
- IAfThreadBase::type_t mThreadType;
+ IAfThreadBase::type_t mThreadType = IAfThreadBase::MIXER;
};
DISALLOW_COPY_AND_ASSIGN(EffectChain);
@@ -639,8 +661,8 @@
// get a list of effect modules to suspend when an effect of the type
// passed is enabled.
- void getSuspendEligibleEffects_l(Vector<sp<IAfEffectModule>>& effects)
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ void getSuspendEligibleEffects(Vector<sp<IAfEffectModule>>& effects)
+ EXCLUDES_EffectChain_Mutex;
// get an effect module if it is currently enable
sp<IAfEffectModule> getEffectIfEnabled_l(const effect_uuid_t* type)
@@ -648,8 +670,7 @@
// true if the effect whose descriptor is passed can be suspended
// OEMs can modify the rules implemented in this method to exclude specific effect
// types or implementations from the suspend/restore mechanism.
- bool isEffectEligibleForSuspend_l(const effect_descriptor_t& desc)
- REQUIRES(audio_utils::ThreadBase_Mutex);
+ bool isEffectEligibleForSuspend(const effect_descriptor_t& desc);
static bool isEffectEligibleForBtNrecSuspend_l(const effect_uuid_t* type)
REQUIRES(audio_utils::ThreadBase_Mutex);
@@ -662,15 +683,15 @@
void setVolumeForOutput_l(uint32_t left, uint32_t right)
REQUIRES(audio_utils::EffectChain_Mutex);
- ssize_t getInsertIndex_ll(const effect_descriptor_t& desc)
- REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex);
+ ssize_t getInsertIndex_l(const effect_descriptor_t& desc)
+ REQUIRES(audio_utils::EffectChain_Mutex);
std::optional<size_t> findVolumeControl_l(size_t from, size_t to) const
REQUIRES(audio_utils::EffectChain_Mutex);
// mutex protecting effect list
mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectChain_Mutex};
- Vector<sp<IAfEffectModule>> mEffects; // list of effect modules
+ Vector<sp<IAfEffectModule>> mEffects GUARDED_BY(mutex()); // list of effect modules
audio_session_t mSessionId; // audio session ID
sp<EffectBufferHalInterface> mInBuffer; // chain input buffer
sp<EffectBufferHalInterface> mOutBuffer; // chain output buffer
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index fd4dd62..d5adeb4 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -163,7 +163,8 @@
virtual int16_t *inBuffer() const = 0;
virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
- virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller) = 0;
+ virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller,
+ bool force = false) = 0;
virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
virtual bool isOffloaded_l() const = 0;
@@ -181,9 +182,9 @@
virtual bool isSpatializer() const = 0;
virtual status_t setHapticScale_l(int id, os::HapticScale hapticScale)
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+ REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
virtual status_t setVibratorInfo_l(const media::AudioVibratorInfo& vibratorInfo)
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+ REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
REQUIRES(audio_utils::ThreadBase_Mutex,
audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
@@ -218,7 +219,8 @@
public:
static sp<IAfEffectChain> create(
const sp<IAfThreadBase>& thread,
- audio_session_t sessionId);
+ audio_session_t sessionId,
+ const sp<IAfThreadCallback>& afThreadCallback);
// special key used for an entry in mSuspendedEffects keyed vector
// corresponding to a suspend all request.
@@ -232,35 +234,36 @@
virtual audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::EffectChain_Mutex) = 0;
- virtual status_t createEffect_l(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
+ virtual status_t createEffect(sp<IAfEffectModule>& effect, effect_descriptor_t* desc, int id,
audio_session_t sessionId, bool pinned)
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+ EXCLUDES_EffectChain_Mutex = 0;
+ virtual status_t addEffect(const sp<IAfEffectModule>& handle)
+ EXCLUDES_EffectChain_Mutex = 0;
virtual status_t addEffect_l(const sp<IAfEffectModule>& handle)
- REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
- virtual status_t addEffect_ll(const sp<IAfEffectModule>& handle)
- REQUIRES(audio_utils::ThreadBase_Mutex, audio_utils::EffectChain_Mutex) = 0;
- virtual size_t removeEffect_l(const sp<IAfEffectModule>& handle,
+ REQUIRES(audio_utils::EffectChain_Mutex) = 0;
+ virtual size_t removeEffect(const sp<IAfEffectModule>& handle,
bool release = false) EXCLUDES_EffectChain_Mutex = 0;
virtual audio_session_t sessionId() const = 0;
virtual void setSessionId(audio_session_t sessionId) = 0;
- virtual sp<IAfEffectModule> getEffectFromDesc_l(effect_descriptor_t* descriptor) const
- REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+ virtual sp<IAfEffectModule> getEffectFromDesc(effect_descriptor_t* descriptor) const
+ EXCLUDES_EffectChain_Mutex = 0;
virtual sp<IAfEffectModule> getEffectFromId_l(int id) const
- REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
virtual sp<IAfEffectModule> getEffectFromType_l(const effect_uuid_t* type) const
- REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
virtual std::vector<int> getEffectIds_l() const = 0;
virtual bool setVolume(uint32_t* left, uint32_t* right,
bool force = false) EXCLUDES_EffectChain_Mutex = 0;
virtual void resetVolume_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
virtual void setDevices_l(const AudioDeviceTypeAddrVector& devices)
- REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
virtual void setInputDevice_l(const AudioDeviceTypeAddr& device)
- REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
- virtual void setMode_l(audio_mode_t mode) REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
+ virtual void setMode_l(audio_mode_t mode)
+ REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
virtual void setAudioSource_l(audio_source_t source)
REQUIRES(audio_utils::ThreadBase_Mutex) = 0;
@@ -317,7 +320,11 @@
virtual bool isCompatibleWithThread_l(const sp<IAfThreadBase>& thread) const
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
- virtual bool containsHapticGeneratingEffect_l() = 0;
+ virtual bool containsHapticGeneratingEffect()
+ EXCLUDES_EffectChain_Mutex = 0;
+
+ virtual bool containsHapticGeneratingEffect_l()
+ REQUIRES(audio_utils::EffectChain_Mutex) = 0;
virtual void setHapticScale_l(int id, os::HapticScale hapticScale)
REQUIRES(audio_utils::ThreadBase_Mutex) EXCLUDES_EffectChain_Mutex = 0;
@@ -327,7 +334,7 @@
virtual wp<IAfThreadBase> thread() const = 0;
virtual void setThread(const sp<IAfThreadBase>& thread) EXCLUDES_EffectChain_Mutex = 0;
- virtual bool isFirstEffect(int id) const = 0;
+ virtual bool isFirstEffect_l(int id) const REQUIRES(audio_utils::EffectChain_Mutex) = 0;
virtual size_t numberOfEffects() const = 0;
virtual sp<IAfEffectModule> getEffectModule(size_t index) const = 0;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index b519289..bcd8b99 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -694,6 +694,10 @@
}
// When Thread::requestExitAndWait is made virtual and this method is renamed to
// "virtual status_t requestExitAndWait()", replace by "return Thread::requestExitAndWait();"
+
+ // For TimeCheck: track waiting on the thread join of getTid().
+ audio_utils::mutex::scoped_join_wait_check sjw(getTid());
+
requestExitAndWait();
}
@@ -1662,12 +1666,12 @@
if (chain == 0) {
// create a new chain for this session
ALOGV("createEffect_l() new effect chain for session %d", sessionId);
- chain = IAfEffectChain::create(this, sessionId);
+ chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
addEffectChain_l(chain);
chain->setStrategy(getStrategyForSession_l(sessionId));
chainCreated = true;
} else {
- effect = chain->getEffectFromDesc_l(desc);
+ effect = chain->getEffectFromDesc(desc);
}
ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get());
@@ -1675,7 +1679,7 @@
if (effect == 0) {
effectId = mAfThreadCallback->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
// create a new effect module if none present in the chain
- lStatus = chain->createEffect_l(effect, desc, effectId, sessionId, pinned);
+ lStatus = chain->createEffect(effect, desc, effectId, sessionId, pinned);
if (lStatus != NO_ERROR) {
goto Exit;
}
@@ -1693,6 +1697,7 @@
const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
if (defaultVibratorInfo) {
+ audio_utils::lock_guard _cl(chain->mutex());
// Only set the vibrator info when it is a valid one.
effect->setVibratorInfo_l(*defaultVibratorInfo);
}
@@ -1714,7 +1719,7 @@
if (!probe && lStatus != NO_ERROR && lStatus != ALREADY_EXISTS) {
audio_utils::lock_guard _l(mutex());
if (effectCreated) {
- chain->removeEffect_l(effect);
+ chain->removeEffect(effect);
}
if (chainCreated) {
removeEffectChain_l(chain);
@@ -1815,7 +1820,7 @@
if (chain == 0) {
// create a new chain for this session
ALOGV("%s: new effect chain for session %d", __func__, sessionId);
- chain = IAfEffectChain::create(this, sessionId);
+ chain = IAfEffectChain::create(this, sessionId, mAfThreadCallback);
addEffectChain_l(chain);
chain->setStrategy(getStrategyForSession_l(sessionId));
chainCreated = true;
@@ -1830,7 +1835,7 @@
effect->setOffloaded_l(mType == OFFLOAD, mId);
- status_t status = chain->addEffect_l(effect);
+ status_t status = chain->addEffect(effect);
if (status != NO_ERROR) {
if (chainCreated) {
removeEffectChain_l(chain);
@@ -1857,7 +1862,7 @@
sp<IAfEffectChain> chain = effect->getCallback()->chain().promote();
if (chain != 0) {
// remove effect chain if removing last effect
- if (chain->removeEffect_l(effect, release) == 0) {
+ if (chain->removeEffect(effect, release) == 0) {
removeEffectChain_l(chain);
}
} else {
@@ -2897,7 +2902,7 @@
sp<IAfEffectChain> chain = getEffectChain_l(track->sessionId());
if (mHapticChannelMask != AUDIO_CHANNEL_NONE
&& ((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
- || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+ || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
mutex().unlock();
@@ -4800,7 +4805,7 @@
}
if (mHapticChannelCount > 0 &&
((track->channelMask() & AUDIO_CHANNEL_HAPTIC_ALL) != AUDIO_CHANNEL_NONE
- || (chain != nullptr && chain->containsHapticGeneratingEffect_l()))) {
+ || (chain != nullptr && chain->containsHapticGeneratingEffect()))) {
mutex().unlock();
// Unlock due to VibratorService will lock for this call and will
// call Tracks.mute/unmute which also require thread's lock.
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index a256a82..2dea379 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3595,7 +3595,7 @@
int session,
int id)
{
- if (session != AUDIO_SESSION_DEVICE) {
+ if (session != AUDIO_SESSION_DEVICE && io != AUDIO_IO_HANDLE_NONE) {
ssize_t index = mOutputs.indexOfKey(io);
if (index < 0) {
index = mInputs.indexOfKey(io);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index c0d9fdf..8ec2e4f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1461,6 +1461,11 @@
int triggerId;
{
SharedParameters::Lock l(mParameters);
+ if (l.mParameters.state == Parameters::DISCONNECTED) {
+ ALOGE("%s: Camera %d has been disconnected.", __FUNCTION__, mCameraId);
+ return INVALID_OPERATION;
+ }
+
// Canceling does nothing in FIXED or INFINITY modes
if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED ||
l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) {
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 34964d4..92309c3 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -137,16 +137,17 @@
return NO_INIT;
}
+ // Verify ops permissions
+ res = TClientBase::startCameraOps();
+ if (res != OK) {
+ TClientBase::finishCameraOps();
+ return res;
+ }
+
res = mDevice->initialize(providerPtr, monitorTags);
if (res != OK) {
ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
__FUNCTION__, TClientBase::mCameraIdStr.c_str(), strerror(-res), res);
- return res;
- }
-
- // Verify ops permissions
- res = TClientBase::startCameraOps();
- if (res != OK) {
TClientBase::finishCameraOps();
return res;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index ecebad4..0a9699c 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -198,6 +198,8 @@
return res;
}
+ setCameraMuteLocked(mCameraMuteInitial);
+
mPreparerThread = new PreparerThread();
internalUpdateStatusLocked(STATUS_UNCONFIGURED);
@@ -5606,10 +5608,19 @@
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
+ return setCameraMuteLocked(enabled);
+}
- if (mRequestThread == nullptr || !mSupportCameraMute) {
+status_t Camera3Device::setCameraMuteLocked(bool enabled) {
+ if (mRequestThread == nullptr) {
+ mCameraMuteInitial = enabled;
+ return OK;
+ }
+
+ if (!mSupportCameraMute) {
return INVALID_OPERATION;
}
+
int32_t muteMode =
!enabled ? ANDROID_SENSOR_TEST_PATTERN_MODE_OFF :
mSupportTestPatternSolidColor ? ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR :
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5b37f6c..42fa8f1 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -308,6 +308,15 @@
status_t setCameraMute(bool enabled);
/**
+ * Mute the camera.
+ *
+ * When muted, black image data is output on all output streams.
+ * This method assumes the caller already acquired the 'mInterfaceLock'
+ * and 'mLock' locks.
+ */
+ status_t setCameraMuteLocked(bool enabled);
+
+ /**
* Enables/disables camera service watchdog
*/
status_t setCameraServiceWatchdog(bool enabled);
@@ -1513,6 +1522,10 @@
// Auto framing override value
camera_metadata_enum_android_control_autoframing mAutoframingOverride;
+ // Initial camera mute state stored before the request thread
+ // is active.
+ bool mCameraMuteInitial = false;
+
// Settings override value
int32_t mSettingsOverride; // -1 = use original, otherwise
// the settings override to use.
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index 18961c0..27873e7 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -18,18 +18,25 @@
#define LOG_TAG "VirtualCameraService"
#include "VirtualCameraService.h"
+#include <algorithm>
#include <cinttypes>
#include <cstdint>
#include <cstdio>
+#include <iterator>
#include <memory>
#include <mutex>
+#include <optional>
+#include <regex>
+#include <variant>
#include "VirtualCameraDevice.h"
#include "VirtualCameraProvider.h"
#include "aidl/android/companion/virtualcamera/Format.h"
+#include "aidl/android/companion/virtualcamera/LensFacing.h"
#include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
#include "android/binder_auto_utils.h"
#include "android/binder_libbinder.h"
+#include "android/binder_status.h"
#include "binder/Status.h"
#include "util/Permissions.h"
#include "util/Util.h"
@@ -57,9 +64,15 @@
constexpr int kMaxFps = 60;
constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
+constexpr char kHelp[] = "help";
constexpr char kShellCmdHelp[] = R"(
+Usage:
+ cmd virtual_camera command [--option=value]
Available commands:
* enable_test_camera
+ Options:
+ --camera_id=(ID) - override numerical ID for test camera instance
+ --lens_facing=(front|back|external) - specifies lens facing for test camera instance
* disable_test_camera
)";
constexpr char kCreateVirtualDevicePermission[] =
@@ -102,6 +115,66 @@
return ndk::ScopedAStatus::ok();
}
+enum class Command {
+ ENABLE_TEST_CAMERA,
+ DISABLE_TEST_CAMERA,
+ HELP,
+};
+
+struct CommandWithOptions {
+ Command command;
+ std::map<std::string, std::string> optionToValueMap;
+};
+
+std::optional<int> parseInt(const std::string& s) {
+ if (!std::all_of(s.begin(), s.end(), [](char c) { return std::isdigit(c); })) {
+ return std::nullopt;
+ }
+ int ret = atoi(s.c_str());
+ return ret > 0 ? std::optional(ret) : std::nullopt;
+}
+
+std::optional<LensFacing> parseLensFacing(const std::string& s) {
+ static const std::map<std::string, LensFacing> strToLensFacing{
+ {"front", LensFacing::FRONT},
+ {"back", LensFacing::BACK},
+ {"external", LensFacing::EXTERNAL}};
+ auto it = strToLensFacing.find(s);
+ return it == strToLensFacing.end() ? std::nullopt : std::optional(it->second);
+}
+
+std::variant<CommandWithOptions, std::string> parseCommand(
+ const char** args, const uint32_t numArgs) {
+ static const std::regex optionRegex("^--(\\w+)(?:=(.+))?$");
+ static const std::map<std::string, Command> strToCommand{
+ {kHelp, Command::HELP},
+ {kEnableTestCameraCmd, Command::ENABLE_TEST_CAMERA},
+ {kDisableTestCameraCmd, Command::DISABLE_TEST_CAMERA}};
+
+ if (numArgs < 1) {
+ return CommandWithOptions{.command = Command::HELP};
+ }
+
+ // We interpret the first argument as command;
+ auto it = strToCommand.find(args[0]);
+ if (it == strToCommand.end()) {
+ return "Unknown command: " + std::string(args[0]);
+ }
+
+ CommandWithOptions cmd{.command = it->second};
+
+ for (int i = 1; i < numArgs; i++) {
+ std::cmatch cm;
+ if (!std::regex_match(args[i], cm, optionRegex)) {
+ return "Not an option: " + std::string(args[i]);
+ }
+
+ cmd.optionToValueMap[cm[1]] = cm[2];
+ }
+
+ return cmd;
+};
+
} // namespace
VirtualCameraService::VirtualCameraService(
@@ -232,8 +305,7 @@
return mVirtualCameraProvider->getCamera(it->second);
}
-binder_status_t VirtualCameraService::handleShellCommand(int in, int out,
- int err,
+binder_status_t VirtualCameraService::handleShellCommand(int, int out, int err,
const char** args,
uint32_t numArgs) {
if (numArgs <= 0) {
@@ -242,36 +314,68 @@
return STATUS_OK;
}
- if (args == nullptr || args[0] == nullptr) {
+ auto isNullptr = [](const char* ptr) { return ptr == nullptr; };
+ if (args == nullptr || std::any_of(args, args + numArgs, isNullptr)) {
return STATUS_BAD_VALUE;
}
- const char* const cmd = args[0];
- if (strcmp(kEnableTestCameraCmd, cmd) == 0) {
- int cameraId = 0;
- if (numArgs > 1 && args[1] != nullptr) {
- cameraId = atoi(args[1]);
- }
- if (cameraId == 0) {
- cameraId = sNextId++;
- }
- enableTestCameraCmd(in, err, cameraId);
- } else if (strcmp(kDisableTestCameraCmd, cmd) == 0) {
- disableTestCameraCmd(in);
- } else {
- dprintf(out, kShellCmdHelp);
+ std::variant<CommandWithOptions, std::string> cmdOrErrorMessage =
+ parseCommand(args, numArgs);
+ if (std::holds_alternative<std::string>(cmdOrErrorMessage)) {
+ dprintf(err, "Error: %s\n",
+ std::get<std::string>(cmdOrErrorMessage).c_str());
+ return STATUS_BAD_VALUE;
}
+ const CommandWithOptions& cmd =
+ std::get<CommandWithOptions>(cmdOrErrorMessage);
+ binder_status_t status = STATUS_OK;
+ switch (cmd.command) {
+ case Command::HELP:
+ dprintf(out, kShellCmdHelp);
+ break;
+ case Command::ENABLE_TEST_CAMERA:
+ status = enableTestCameraCmd(out, err, cmd.optionToValueMap);
+ break;
+ case Command::DISABLE_TEST_CAMERA:
+ disableTestCameraCmd(out);
+ break;
+ }
+
+ fsync(err);
fsync(out);
- return STATUS_OK;
+ return status;
}
-void VirtualCameraService::enableTestCameraCmd(const int out, const int err,
- const int cameraId) {
+binder_status_t VirtualCameraService::enableTestCameraCmd(
+ const int out, const int err,
+ const std::map<std::string, std::string>& options) {
if (mTestCameraToken != nullptr) {
- dprintf(out, "Test camera is already enabled (%s).",
+ dprintf(out, "Test camera is already enabled (%s).\n",
getCamera(mTestCameraToken)->getCameraName().c_str());
- return;
+ return STATUS_OK;
+ }
+
+ std::optional<int> cameraId;
+ auto it = options.find("camera_id");
+ if (it != options.end()) {
+ cameraId = parseInt(it->second);
+ if (!cameraId.has_value()) {
+ dprintf(err, "Invalid camera_id: %s\n, must be number > 0",
+ it->second.c_str());
+ return STATUS_BAD_VALUE;
+ }
+ }
+
+ std::optional<LensFacing> lensFacing;
+ it = options.find("lens_facing");
+ if (it != options.end()) {
+ lensFacing = parseLensFacing(it->second);
+ if (!lensFacing.has_value()) {
+ dprintf(err, "Invalid lens_facing: %s\n, must be front|back|external",
+ it->second.c_str());
+ return STATUS_BAD_VALUE;
+ }
}
sp<BBinder> token = sp<BBinder>::make();
@@ -283,14 +387,16 @@
.height = kVgaHeight,
Format::YUV_420_888,
.maxFps = kMaxFps});
- configuration.lensFacing = LensFacing::EXTERNAL;
- registerCamera(mTestCameraToken, configuration, cameraId, &ret);
+ configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
+ registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
+ &ret);
if (ret) {
- dprintf(out, "Successfully registered test camera %s",
+ dprintf(out, "Successfully registered test camera %s\n",
getCamera(mTestCameraToken)->getCameraName().c_str());
} else {
- dprintf(err, "Failed to create test camera");
+ dprintf(err, "Failed to create test camera\n");
}
+ return STATUS_OK;
}
void VirtualCameraService::disableTestCameraCmd(const int out) {
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index d447fc7..0b80514 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -71,7 +71,8 @@
private:
// Create and enable test camera instance if there's none.
- void enableTestCameraCmd(int out, int err, int cameraId);
+ binder_status_t enableTestCameraCmd(
+ int out, int err, const std::map<std::string, std::string>& options);
// Disable and destroy test camera instance if there's one.
void disableTestCameraCmd(int out);
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index f729f73..16f40ff 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -14,8 +14,11 @@
* limitations under the License.
*/
+#include <algorithm>
#include <cstdio>
+#include <iterator>
#include <memory>
+#include <regex>
#include "VirtualCameraService.h"
#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
@@ -29,6 +32,7 @@
#include "binder/Binder.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
+#include "util/MetadataUtil.h"
#include "util/Permissions.h"
#include "utils/Errors.h"
@@ -44,15 +48,18 @@
using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
using ::aidl::android::hardware::camera::common::CameraDeviceStatus;
using ::aidl::android::hardware::camera::common::TorchModeStatus;
+using ::aidl::android::hardware::camera::device::CameraMetadata;
using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback;
using ::aidl::android::hardware::graphics::common::PixelFormat;
using ::aidl::android::view::Surface;
using ::testing::_;
+using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Ge;
using ::testing::IsEmpty;
using ::testing::IsNull;
using ::testing::Not;
+using ::testing::Optional;
using ::testing::Return;
using ::testing::SizeIs;
@@ -134,20 +141,21 @@
close(mDevNullFd);
}
- void execute_shell_command(const std::string cmd) {
- std::array<const char*, 1> args{cmd.data()};
- ASSERT_THAT(
- mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
- args.data(), args.size()),
- Eq(NO_ERROR));
- }
+ binder_status_t execute_shell_command(const std::string& cmd) {
+ const static std::regex whitespaceRegex("\\s+");
+ std::vector<std::string> tokens;
+ std::copy_if(
+ std::sregex_token_iterator(cmd.begin(), cmd.end(), whitespaceRegex, -1),
+ std::sregex_token_iterator(), std::back_inserter(tokens),
+ [](const std::string& token) { return !token.empty(); });
- void execute_shell_command(const std::string cmd, const std::string cameraId) {
- std::array<const char*, 2> args{cmd.data(), cameraId.data()};
- ASSERT_THAT(
- mCameraService->handleShellCommand(mDevNullFd, mDevNullFd, mDevNullFd,
- args.data(), args.size()),
- Eq(NO_ERROR));
+ std::vector<const char*> argv;
+ argv.reserve(tokens.size());
+ std::transform(tokens.begin(), tokens.end(), std::back_inserter(argv),
+ [](const std::string& str) { return str.c_str(); });
+
+ return mCameraService->handleShellCommand(
+ mDevNullFd, mDevNullFd, mDevNullFd, argv.data(), argv.size());
}
std::vector<std::string> getCameraIds() {
@@ -156,6 +164,17 @@
return cameraIds;
}
+ std::optional<camera_metadata_enum_android_lens_facing> getCameraLensFacing(
+ const std::string& id) {
+ std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+ if (camera == nullptr) {
+ return std::nullopt;
+ }
+ CameraMetadata metadata;
+ camera->getCameraCharacteristics(&metadata);
+ return getLensFacing(metadata);
+ }
+
protected:
std::shared_ptr<VirtualCameraService> mCameraService;
std::shared_ptr<VirtualCameraProvider> mCameraProvider;
@@ -367,29 +386,61 @@
}
TEST_F(VirtualCameraServiceTest, TestCameraShellCmd) {
- execute_shell_command("enable_test_camera");
+ EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
- execute_shell_command("disable_test_camera");
+ EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
}
TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
- execute_shell_command("enable_test_camera", "12345");
+ EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id=12345"),
+ Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
- EXPECT_THAT(cameraIdsAfterEnable, SizeIs(1));
+ EXPECT_THAT(cameraIdsAfterEnable, ElementsAre("device@1.1/virtual/12345"));
- execute_shell_command("disable_test_camera");
+ EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
std::vector<std::string> cameraIdsAfterDisable = getCameraIds();
EXPECT_THAT(cameraIdsAfterDisable, IsEmpty());
}
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidId) {
+ EXPECT_THAT(
+ execute_shell_command("enable_test_camera --camera_id=NotNumericalId"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithUnknownCommand) {
+ EXPECT_THAT(execute_shell_command("brew_coffee --flavor=vanilla"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithMalformedOption) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera **camera_id=12345"),
+ Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithLensFacing) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=front"),
+ Eq(NO_ERROR));
+
+ std::vector<std::string> cameraIds = getCameraIds();
+ ASSERT_THAT(cameraIds, SizeIs(1));
+ EXPECT_THAT(getCameraLensFacing(cameraIds[0]),
+ Optional(Eq(ANDROID_LENS_FACING_FRONT)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidLensFacing) {
+ EXPECT_THAT(execute_shell_command("enable_test_camera --lens_facing=west"),
+ Eq(STATUS_BAD_VALUE));
+}
+
} // namespace
} // namespace virtualcamera
} // namespace companion
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
index c7dc80e..822862b 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -903,6 +903,20 @@
return coordinates;
}
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+ const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+ auto metadata =
+ reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+ camera_metadata_ro_entry_t entry;
+ if (find_camera_metadata_ro_entry(metadata, ANDROID_LENS_FACING, &entry) !=
+ OK) {
+ return std::nullopt;
+ }
+
+ return static_cast<camera_metadata_enum_android_lens_facing>(entry.data.u8[0]);
+}
+
} // namespace virtualcamera
} // namespace companion
} // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
index 5d16506..cee867e 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -469,6 +469,9 @@
std::optional<GpsCoordinates> getGpsCoordinates(
const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+std::optional<camera_metadata_enum_android_lens_facing> getLensFacing(
+ const aidl::android::hardware::camera::device::CameraMetadata& metadata);
+
} // namespace virtualcamera
} // namespace companion
} // namespace android