Merge "fix: Re-namespace st flags" into main
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index cf6b970..1400121 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -576,9 +576,7 @@
*
* @param session the capture session of interest
*
- * @return <ul><li>
- * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- * if it is not NULL.</li>
+ * @return <ul><li>{@link ACAMERA_OK} if the method succeeds.</li>
* <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
* <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
* <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
@@ -617,9 +615,7 @@
*
* @param session the capture session of interest
*
- * @return <ul><li>
- * {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- * if it is not NULL.</li>
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
* <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
* <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
* <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 39fa187..d1c6239 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -65,6 +65,22 @@
}
flag {
+ name: "portid_volume_management"
+ namespace: "media_audio"
+ description:
+ "Allows to manage volume by port id within audio flinger instead of legacy stream type."
+ bug: "317212590"
+}
+
+flag {
+ name: "power_stats"
+ namespace: "media_audio"
+ description:
+ "Add power stats tracking and management."
+ bug: "350114693"
+}
+
+flag {
name: "use_bt_sco_for_media"
namespace: "media_audio"
description:
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 8f489ec..6a6da0f 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -17,7 +17,6 @@
#define LOG_TAG "Codec2-GraphicBufferAllocator"
-#include <gui/IProducerListener.h>
#include <media/stagefright/foundation/ADebug.h>
#include <codec2/aidl/GraphicBufferAllocator.h>
@@ -25,25 +24,6 @@
namespace aidl::android::hardware::media::c2::implementation {
-class OnBufferReleasedListener : public ::android::BnProducerListener {
-private:
- uint32_t mGeneration;
- std::weak_ptr<GraphicBufferAllocator> mAllocator;
-public:
- OnBufferReleasedListener(
- uint32_t generation,
- const std::shared_ptr<GraphicBufferAllocator> &allocator)
- : mGeneration(generation), mAllocator(allocator) {}
- virtual ~OnBufferReleasedListener() = default;
- virtual void onBufferReleased() {
- auto p = mAllocator.lock();
- if (p) {
- p->onBufferReleased(mGeneration);
- }
- }
- virtual bool needsReleaseNotify() { return true; }
-};
-
::ndk::ScopedAStatus GraphicBufferAllocator::allocate(
const IGraphicBufferAllocator::Description& in_desc,
IGraphicBufferAllocator::Allocation* _aidl_return) {
@@ -108,15 +88,14 @@
mGraphicsTracker->stop();
}
-const ::android::sp<::android::IProducerListener> GraphicBufferAllocator::createReleaseListener(
- uint32_t generation) {
- return new OnBufferReleasedListener(generation, ref<GraphicBufferAllocator>());
-}
-
void GraphicBufferAllocator::onBufferReleased(uint32_t generation) {
mGraphicsTracker->onReleased(generation);
}
+void GraphicBufferAllocator::onBufferAttached(uint32_t generation) {
+ mGraphicsTracker->onAttached(generation);
+}
+
c2_status_t GraphicBufferAllocator::allocate(
uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 95f5a6e..8d9e76e 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -1001,6 +1001,11 @@
{
std::unique_lock<std::mutex> l(mLock);
if (mBufferCache->mGeneration == generation) {
+ if (mBufferCache->mNumAttached > 0) {
+ ALOGV("one onReleased() ignored for each prior onAttached().");
+ mBufferCache->mNumAttached--;
+ return;
+ }
if (!adjustDequeueConfLocked(&updateDequeue)) {
mDequeueable++;
writeIncDequeueableLocked(1);
@@ -1012,4 +1017,12 @@
}
}
+void GraphicsTracker::onAttached(uint32_t generation) {
+ std::unique_lock<std::mutex> l(mLock);
+ if (mBufferCache->mGeneration == generation) {
+ ALOGV("buffer attached");
+ mBufferCache->mNumAttached++;
+ }
+}
+
} // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 1d2794e..a137dbb 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -2556,6 +2556,19 @@
mOutputBufferQueue->onBufferReleased(generation);
}
+void Codec2Client::Component::onBufferAttachedToOutputSurface(
+ uint32_t generation) {
+ if (mAidlBase) {
+ std::shared_ptr<AidlGraphicBufferAllocator> gba =
+ mGraphicBufferAllocators->current();
+ if (gba) {
+ gba->onBufferAttached(generation);
+ }
+ return;
+ }
+ mOutputBufferQueue->onBufferAttached(generation);
+}
+
void Codec2Client::Component::holdIgbaBlocks(
const std::list<std::unique_ptr<C2Work>>& workList) {
if (!mAidlBase) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index 902c53f..a797cb7 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -71,18 +71,6 @@
void reset();
/**
- * Create a listener for buffer being released.
- *
- * Surface will register this listener and notify whenever the consumer
- * releases a buffer.
- *
- * @param generation generation # for the BufferQueue.
- * @return IProducerListener can be used when connect# to Surface.
- */
- const ::android::sp<::android::IProducerListener> createReleaseListener(
- uint32_t generation);
-
- /**
* Notifies a buffer being released.
*
* @param generation generation # for the BufferQueue.
@@ -90,6 +78,13 @@
void onBufferReleased(uint32_t generation);
/**
+ * Notifies a buffer being attached to the consumer.
+ *
+ * @param generation generation # for the BufferQueue.
+ */
+ void onBufferAttached(uint32_t generation);
+
+ /**
* Allocates a buffer.
*
* @param width width of the requested buffer.
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index 762030b..9a4fa12 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -143,6 +143,18 @@
void onReleased(uint32_t generation);
/**
+ * Notifies when a Buffer is attached to Graphics(consumer side).
+ * If generation does not match to the current, notifications via the interface
+ * will be ignored. (In the case, the notifications are from one of the old surfaces
+ * which is no longer used.)
+ * One onReleased() should be ignored for one onAttached() when both of
+ * them have the same generation as params.
+ *
+ * @param[in] generation generation id for specifying Graphics(BQ)
+ */
+ void onAttached(uint32_t generation);
+
+ /**
* Get waitable fd for events.(allocate is ready, end of life cycle)
*
* @param[out] pipeFd a file descriptor created from pipe2()
@@ -217,9 +229,11 @@
BlockedSlot mBlockedSlots[kNumSlots];
- BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr} {}
+ std::atomic<int> mNumAttached;
+
+ BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr}, mNumAttached{0} {}
BufferCache(uint64_t bqId, uint32_t generation, const sp<IGraphicBufferProducer>& igbp) :
- mBqId{bqId}, mGeneration{generation}, mIgbp{igbp} {}
+ mBqId{bqId}, mGeneration{generation}, mIgbp{igbp}, mNumAttached{0} {}
~BufferCache();
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 5c75a47..413e92e 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -481,6 +481,10 @@
void onBufferReleasedFromOutputSurface(
uint32_t generation);
+ // Notify a buffer is attached to output surface.
+ void onBufferAttachedToOutputSurface(
+ uint32_t generation);
+
// When the client received \p workList and the blocks inside
// \p workList are IGBA based graphic blocks, specify the owner
// as the current IGBA for the future operations.
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index fda34a8..ddb9855 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -69,6 +69,9 @@
// update the number of dequeueable/allocatable buffers.
void onBufferReleased(uint32_t generation);
+ // Nofify a buffer is attached to the output surface.
+ void onBufferAttached(uint32_t generation);
+
// Retrieve frame event history from the output surface.
void pollForRenderedFrames(FrameEventHistoryDelta* delta);
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 36322f5..54d78a0 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -542,6 +542,11 @@
}
}
+void OutputBufferQueue::onBufferAttached(uint32_t generation) {
+ // TODO
+ (void) generation;
+}
+
void OutputBufferQueue::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
if (mIgbp) {
mIgbp->getFrameTimestamps(delta);
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index f0a4180..1348ef0 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1469,6 +1469,17 @@
}
}
+void CCodecBufferChannel::onBufferAttachedToOutputSurface(uint32_t generation) {
+ // Note: Since this is called asynchronously from IProducerListener not
+ // knowing the internal state of CCodec/CCodecBufferChannel,
+ // prevent mComponent from being destroyed by holding the shared reference
+ // during this interface being executed.
+ std::shared_ptr<Codec2Client::Component> comp = mComponent;
+ if (comp) {
+ comp->onBufferAttachedToOutputSurface(generation);
+ }
+}
+
status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
bool released = false;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 94a5998..e62742b 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -102,6 +102,7 @@
const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
void pollForRenderedBuffers() override;
void onBufferReleasedFromOutputSurface(uint32_t generation) override;
+ void onBufferAttachedToOutputSurface(uint32_t generation) override;
status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 3f4fcfd..3602e94 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -322,23 +322,6 @@
return NO_ERROR;
}
-status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
- audio_io_handle_t output) {
- if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- *volume = af->streamVolume(stream, output);
- return NO_ERROR;
-}
-
-status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
- if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
- const sp<IAudioFlinger> af = get_audio_flinger();
- if (af == 0) return PERMISSION_DENIED;
- *mute = af->streamMute(stream);
- return NO_ERROR;
-}
-
status_t AudioSystem::setMode(audio_mode_t mode) {
if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
const sp<IAudioFlinger> af = get_audio_flinger();
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index a707909..e0dca2d 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -350,34 +350,6 @@
return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
}
-float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
- audio_io_handle_t output) const {
- auto result = [&]() -> ConversionResult<float> {
- AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
- int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
- float aidlRet;
- RETURN_IF_ERROR(statusTFromBinderStatus(
- mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
- return aidlRet;
- }();
- // Failure is ignored.
- return result.value_or(0.f);
-}
-
-bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
- auto result = [&]() -> ConversionResult<bool> {
- AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
- legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
- bool aidlRet;
- RETURN_IF_ERROR(statusTFromBinderStatus(
- mDelegate->streamMute(streamAidl, &aidlRet)));
- return aidlRet;
- }();
- // Failure is ignored.
- return result.value_or(false);
-}
-
status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
@@ -1040,23 +1012,6 @@
return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
}
-Status AudioFlingerServerAdapter::streamVolume(AudioStreamType stream, int32_t output,
- float* _aidl_return) {
- audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
- aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
- audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
- aidl2legacy_int32_t_audio_io_handle_t(output));
- *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
- return Status::ok();
-}
-
-Status AudioFlingerServerAdapter::streamMute(AudioStreamType stream, bool* _aidl_return) {
- audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
- aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
- *_aidl_return = mDelegate->streamMute(streamLegacy);
- return Status::ok();
-}
-
Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
return Status::fromStatusT(mDelegate->setMode(modeLegacy));
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 4f00f83..29de9c2 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -94,13 +94,11 @@
float getMasterBalance();
/*
- * Set/gets stream type state. This will probably be used by
+ * Set stream type state. This will probably be used by
* the preference panel, mostly.
*/
void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
void setStreamMute(AudioStreamType stream, boolean muted);
- float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
- boolean streamMute(AudioStreamType stream);
// set audio mode.
void setMode(AudioMode mode);
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index dfdb4cf..4c94974 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -519,12 +519,6 @@
stream = getValue(&mFdp, kStreamtypes);
AudioSystem::getOutputLatency(&latency, stream);
- stream = getValue(&mFdp, kStreamtypes);
- AudioSystem::getStreamVolume(stream, &volume, mFdp.ConsumeIntegral<int32_t>());
-
- stream = getValue(&mFdp, kStreamtypes);
- AudioSystem::getStreamMute(stream, &state);
-
uint32_t samplingRate;
AudioSystem::getSamplingRate(mFdp.ConsumeIntegral<int32_t>(), &samplingRate);
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 9cfd540..67b3dcd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -124,15 +124,12 @@
static status_t setMasterMute(bool mute);
static status_t getMasterMute(bool* mute);
- // set/get stream volume on specified output
+ // set stream volume on specified output
static status_t setStreamVolume(audio_stream_type_t stream, float value,
audio_io_handle_t output);
- static status_t getStreamVolume(audio_stream_type_t stream, float* volume,
- audio_io_handle_t output);
// mute/unmute stream
static status_t setStreamMute(audio_stream_type_t stream, bool mute);
- static status_t getStreamMute(audio_stream_type_t stream, bool* mute);
// set audio mode in audio hardware
static status_t setMode(audio_mode_t mode);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 211fffa..667e9ae 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -229,10 +229,6 @@
audio_io_handle_t output) = 0;
virtual status_t setStreamMute(audio_stream_type_t stream, bool muted) = 0;
- virtual float streamVolume(audio_stream_type_t stream,
- audio_io_handle_t output) const = 0;
- virtual bool streamMute(audio_stream_type_t stream) const = 0;
-
// set audio mode
virtual status_t setMode(audio_mode_t mode) = 0;
@@ -424,9 +420,6 @@
status_t setStreamVolume(audio_stream_type_t stream, float value,
audio_io_handle_t output) override;
status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
- float streamVolume(audio_stream_type_t stream,
- audio_io_handle_t output) const override;
- bool streamMute(audio_stream_type_t stream) const override;
status_t setMode(audio_mode_t mode) override;
status_t setMicMute(bool state) override;
bool getMicMute() const override;
@@ -549,8 +542,6 @@
MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
- STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
- STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
@@ -673,9 +664,6 @@
Status setStreamVolume(media::audio::common::AudioStreamType stream,
float value, int32_t output) override;
Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
- Status streamVolume(media::audio::common::AudioStreamType stream,
- int32_t output, float* _aidl_return) override;
- Status streamMute(media::audio::common::AudioStreamType stream, bool* _aidl_return) override;
Status setMode(media::audio::common::AudioMode mode) override;
Status setMicMute(bool state) override;
Status getMicMute(bool* _aidl_return) override;
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index db998cd..742ca48 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -199,20 +199,6 @@
EXPECT_EQ(origBalance, tstBalance);
}
-TEST_F(AudioSystemTest, GetStreamVolume) {
- ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
- float origStreamVol;
- const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
- EXPECT_EQ(NO_ERROR,
- AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol, pbAudioIo));
-}
-
-TEST_F(AudioSystemTest, GetStreamMute) {
- ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
- bool origMuteState;
- EXPECT_EQ(NO_ERROR, AudioSystem::getStreamMute(AUDIO_STREAM_MUSIC, &origMuteState));
-}
-
TEST_F(AudioSystemTest, StartAndStopAudioSource) {
std::vector<struct audio_port_v7> ports;
audio_port_config sourcePortConfig;
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index d4b4b6c..8a962c6 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -255,6 +255,45 @@
}
cc_library_shared {
+ name: "libmedia_codeclist_capabilities",
+
+ srcs: [
+ "AudioCapabilities.cpp",
+ "CodecCapabilities.cpp",
+ "CodecCapabilitiesUtils.cpp",
+ ],
+
+ local_include_dirs: [
+ "include",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ export_include_dirs: [
+ "include",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ "-Wall",
+ ],
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+}
+
+cc_library_shared {
name: "libmedia_codeclist",
srcs: [
@@ -270,6 +309,7 @@
"android.hardware.media.omx@1.0",
"libbinder",
"liblog",
+ "libmedia_codeclist_capabilities",
"libstagefright_foundation",
"libutils",
],
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
new file mode 100644
index 0000000..e8cf517
--- /dev/null
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioCapabilities"
+
+#include <android-base/strings.h>
+#include <android-base/properties.h>
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilities.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+namespace android {
+
+const Range<int>& AudioCapabilities::getBitrateRange() const {
+ return mBitrateRange;
+}
+
+const std::vector<int>& AudioCapabilities::getSupportedSampleRates() const {
+ return mSampleRates;
+}
+
+const std::vector<Range<int>>&
+ AudioCapabilities::getSupportedSampleRateRanges() const {
+ return mSampleRateRanges;
+}
+
+int AudioCapabilities::getMaxInputChannelCount() const {
+ int overallMax = 0;
+ for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+ int lmax = mInputChannelRanges[i].upper();
+ if (lmax > overallMax) {
+ overallMax = lmax;
+ }
+ }
+ return overallMax;
+}
+
+int AudioCapabilities::getMinInputChannelCount() const {
+ int overallMin = MAX_INPUT_CHANNEL_COUNT;
+ for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+ int lmin = mInputChannelRanges[i].lower();
+ if (lmin < overallMin) {
+ overallMin = lmin;
+ }
+ }
+ return overallMin;
+}
+
+const std::vector<Range<int>>&
+ AudioCapabilities::getInputChannelCountRanges() const {
+ return mInputChannelRanges;
+}
+
+// static
+std::shared_ptr<AudioCapabilities> AudioCapabilities::Create(std::string mediaType,
+ std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+ std::shared_ptr<AudioCapabilities> caps(new AudioCapabilities());
+ caps->init(mediaType, profLevs, format);
+ return caps;
+}
+
+void AudioCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+ const sp<AMessage> &format) {
+ mMediaType = mediaType;
+ mProfileLevels = profLevs;
+ mError = 0;
+
+ initWithPlatformLimits();
+ applyLevelLimits();
+ parseFromInfo(format);
+}
+
+void AudioCapabilities::initWithPlatformLimits() {
+ mBitrateRange = Range<int>(0, INT_MAX);
+ mInputChannelRanges.push_back(Range<int>(1, MAX_INPUT_CHANNEL_COUNT));
+
+ const int minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
+ const int maxSampleRate = base::GetIntProperty("ro.mediacodec.max_sample_rate", 192000);
+ mSampleRateRanges.push_back(Range<int>(minSampleRate, maxSampleRate));
+}
+
+bool AudioCapabilities::supports(int sampleRate, int inputChannels) {
+ // channels and sample rates are checked orthogonally
+ if (inputChannels != 0
+ && !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
+ [inputChannels](const Range<int> &a) { return a.contains(inputChannels); })) {
+ return false;
+ }
+ if (sampleRate != 0
+ && !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
+ [sampleRate](const Range<int> &a) { return a.contains(sampleRate); })) {
+ return false;
+ }
+ return true;
+}
+
+bool AudioCapabilities::isSampleRateSupported(int sampleRate) {
+ return supports(sampleRate, 0);
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<int> rates) {
+ std::vector<Range<int>> sampleRateRanges;
+ std::sort(rates.begin(), rates.end());
+ for (int rate : rates) {
+ if (supports(rate, 0 /* channels */)) {
+ sampleRateRanges.push_back(Range<int>(rate, rate));
+ }
+ }
+ mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, sampleRateRanges);
+ createDiscreteSampleRates();
+}
+
+void AudioCapabilities::createDiscreteSampleRates() {
+ mSampleRates.clear();
+ for (int i = 0; i < mSampleRateRanges.size(); i++) {
+ mSampleRates.push_back(mSampleRateRanges[i].lower());
+ }
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<Range<int>> rateRanges) {
+ sortDistinctRanges(&rateRanges);
+ mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges);
+ // check if all values are discrete
+ for (Range<int> range: mSampleRateRanges) {
+ if (range.lower() != range.upper()) {
+ mSampleRates.clear();
+ return;
+ }
+ }
+ createDiscreteSampleRates();
+}
+
+void AudioCapabilities::applyLevelLimits() {
+ std::vector<int> sampleRates;
+ std::optional<Range<int>> sampleRateRange;
+ std::optional<Range<int>> bitRates;
+ int maxChannels = MAX_INPUT_CHANNEL_COUNT;
+
+ // const char *mediaType = mMediaType.c_str();
+ if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MPEG)) {
+ sampleRates = {
+ 8000, 11025, 12000,
+ 16000, 22050, 24000,
+ 32000, 44100, 48000 };
+ bitRates = Range<int>(8000, 320000);
+ maxChannels = 2;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_NB)) {
+ sampleRates = { 8000 };
+ bitRates = Range<int>(4750, 12200);
+ maxChannels = 1;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_WB)) {
+ sampleRates = { 16000 };
+ bitRates = Range<int>(6600, 23850);
+ maxChannels = 1;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+ sampleRates = {
+ 7350, 8000,
+ 11025, 12000, 16000,
+ 22050, 24000, 32000,
+ 44100, 48000, 64000,
+ 88200, 96000 };
+ bitRates = Range<int>(8000, 510000);
+ maxChannels = 48;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_VORBIS)) {
+ bitRates = Range<int>(32000, 500000);
+ sampleRateRange = Range<int>(8000, 192000);
+ maxChannels = 255;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_OPUS)) {
+ bitRates = Range<int>(6000, 510000);
+ sampleRates = { 8000, 12000, 16000, 24000, 48000 };
+ maxChannels = 255;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_RAW)) {
+ sampleRateRange = Range<int>(1, 192000);
+ bitRates = Range<int>(1, 10000000);
+ maxChannels = MAX_NUM_CHANNELS;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+ sampleRateRange = Range<int>(1, 655350);
+ // lossless codec, so bitrate is ignored
+ maxChannels = 255;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_ALAW)
+ || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_MLAW)) {
+ sampleRates = { 8000 };
+ bitRates = Range<int>(64000, 64000);
+ // platform allows multiple channels for this format
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MSGSM)) {
+ sampleRates = { 8000 };
+ bitRates = Range<int>(13000, 13000);
+ maxChannels = 1;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC3)) {
+ maxChannels = 6;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3)) {
+ maxChannels = 16;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3_JOC)) {
+ sampleRates = { 48000 };
+ bitRates = Range<int>(32000, 6144000);
+ maxChannels = 16;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC4)) {
+ sampleRates = { 44100, 48000, 96000, 192000 };
+ bitRates = Range<int>(16000, 2688000);
+ maxChannels = 24;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)) {
+ sampleRates = { 44100, 48000 };
+ bitRates = Range<int>(96000, 1524000);
+ maxChannels = 6;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)) {
+ for (ProfileLevel profileLevel: mProfileLevels) {
+ switch (profileLevel.mProfile) {
+ case DTS_HDProfileLBR:
+ sampleRates = { 22050, 24000, 44100, 48000 };
+ bitRates = Range<int>(32000, 768000);
+ break;
+ case DTS_HDProfileHRA:
+ case DTS_HDProfileMA:
+ sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+ bitRates = Range<int>(96000, 24500000);
+ break;
+ default:
+ ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+ mMediaType.c_str());
+ mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+ sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+ bitRates = Range<int>(96000, 24500000);
+ }
+ }
+ maxChannels = 8;
+ } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+ for (ProfileLevel profileLevel: mProfileLevels) {
+ switch (profileLevel.mProfile) {
+ case DTS_UHDProfileP2:
+ sampleRates = { 48000 };
+ bitRates = Range<int>(96000, 768000);
+ maxChannels = 10;
+ break;
+ case DTS_UHDProfileP1:
+ sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+ bitRates = Range<int>(96000, 24500000);
+ maxChannels = 32;
+ break;
+ default:
+ ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+ mMediaType.c_str());
+ mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+ sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+ bitRates = Range<int>(96000, 24500000);
+ maxChannels = 32;
+ }
+ }
+ } else {
+ ALOGW("Unsupported mediaType %s", mMediaType.c_str());
+ mError |= ERROR_CAPABILITIES_UNSUPPORTED;
+ }
+
+ // restrict ranges
+ if (!sampleRates.empty()) {
+ limitSampleRates(sampleRates);
+ } else if (sampleRateRange) {
+ std::vector<Range<int>> rateRanges = { sampleRateRange.value() };
+ limitSampleRates(rateRanges);
+ }
+
+ Range<int> channelRange = Range<int>(1, maxChannels);
+ std::vector<Range<int>> inputChannels = { channelRange };
+ applyLimits(inputChannels, bitRates);
+}
+
+void AudioCapabilities::applyLimits(
+ const std::vector<Range<int>> &inputChannels,
+ const std::optional<Range<int>> &bitRates) {
+ // clamp & make a local copy
+ std::vector<Range<int>> inputChannelsCopy(inputChannels.size());
+ for (int i = 0; i < inputChannels.size(); i++) {
+ int lower = inputChannels[i].clamp(1);
+ int upper = inputChannels[i].clamp(MAX_INPUT_CHANNEL_COUNT);
+ inputChannelsCopy[i] = Range<int>(lower, upper);
+ }
+
+ // sort, intersect with existing, & save channel list
+ sortDistinctRanges(&inputChannelsCopy);
+ mInputChannelRanges = intersectSortedDistinctRanges(inputChannelsCopy, mInputChannelRanges);
+
+ if (bitRates) {
+ mBitrateRange = mBitrateRange.intersect(bitRates.value());
+ }
+}
+
+void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
+ int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
+ std::vector<Range<int>> channels = { Range<int>(1, maxInputChannels) };
+ std::optional<Range<int>> bitRates = POSITIVE_INTEGERS;
+
+ AString rateAString;
+ if (format->findString("sample-rate-ranges", &rateAString)) {
+ std::vector<std::string> rateStrings = base::Split(std::string(rateAString.c_str()), ",");
+ std::vector<Range<int>> rateRanges;
+ for (std::string rateString : rateStrings) {
+ std::optional<Range<int>> rateRange = ParseIntRange(rateString);
+ if (!rateRange) {
+ continue;
+ }
+ rateRanges.push_back(rateRange.value());
+ }
+ limitSampleRates(rateRanges);
+ }
+
+ // we will prefer channel-ranges over max-channel-count
+ AString valueStr;
+ if (format->findString("channel-ranges", &valueStr)) {
+ std::vector<std::string> channelStrings = base::Split(std::string(valueStr.c_str()), ",");
+ std::vector<Range<int>> channelRanges;
+ for (std::string channelString : channelStrings) {
+ std::optional<Range<int>> channelRange = ParseIntRange(channelString);
+ if (!channelRange) {
+ continue;
+ }
+ channelRanges.push_back(channelRange.value());
+ }
+ channels = channelRanges;
+ } else if (format->findString("channel-range", &valueStr)) {
+ std::optional<Range<int>> oneRange = ParseIntRange(std::string(valueStr.c_str()));
+ if (oneRange) {
+ channels = { oneRange.value() };
+ }
+ } else if (format->findString("max-channel-count", &valueStr)) {
+ maxInputChannels = std::atoi(valueStr.c_str());
+ if (maxInputChannels == 0) {
+ channels = { Range<int>(0, 0) };
+ } else {
+ channels = { Range<int>(1, maxInputChannels) };
+ }
+ } else if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0) {
+ maxInputChannels = 0;
+ channels = { Range<int>(0, 0) };
+ }
+
+ if (format->findString("bitrate-range", &valueStr)) {
+ std::optional<Range<int>> parsedBitrate = ParseIntRange(valueStr.c_str());
+ if (parsedBitrate) {
+ bitRates = bitRates.value().intersect(parsedBitrate.value());
+ }
+ }
+
+ applyLimits(channels, bitRates);
+}
+
+void AudioCapabilities::getDefaultFormat(sp<AMessage> &format) {
+ // report settings that have only a single choice
+ if (mBitrateRange.lower() == mBitrateRange.upper()) {
+ format->setInt32(KEY_BIT_RATE, mBitrateRange.lower());
+ }
+ if (getMaxInputChannelCount() == 1) {
+ // mono-only format
+ format->setInt32(KEY_CHANNEL_COUNT, 1);
+ }
+ if (!mSampleRates.empty() && mSampleRates.size() == 1) {
+ format->setInt32(KEY_SAMPLE_RATE, mSampleRates[0]);
+ }
+}
+
+bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
+ int32_t sampleRate;
+ format->findInt32(KEY_SAMPLE_RATE, &sampleRate);
+ int32_t channels;
+ format->findInt32(KEY_CHANNEL_COUNT, &channels);
+
+ if (!supports(sampleRate, channels)) {
+ return false;
+ }
+
+ if (!CodecCapabilities::SupportsBitrate(mBitrateRange, format)) {
+ return false;
+ }
+
+ // nothing to do for:
+ // KEY_CHANNEL_MASK: codecs don't get this
+ // KEY_IS_ADTS: required feature for all AAC decoders
+ return true;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
new file mode 100644
index 0000000..5bed1c4
--- /dev/null
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilities"
+
+#include <utils/Log.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+bool CodecCapabilities::SupportsBitrate(Range<int> bitrateRange,
+ const sp<AMessage> &format) {
+ // consider max bitrate over average bitrate for support
+ int32_t maxBitrate = 0;
+ format->findInt32(KEY_MAX_BIT_RATE, &maxBitrate);
+ int32_t bitrate = 0;
+ format->findInt32(KEY_BIT_RATE, &bitrate);
+
+ if (bitrate == 0) {
+ bitrate = maxBitrate;
+ } else if (maxBitrate != 0) {
+ bitrate = std::max(bitrate, maxBitrate);
+ }
+
+ if (bitrate > 0) {
+ return bitrateRange.contains(bitrate);
+ }
+
+ return true;
+}
+
+const std::string& CodecCapabilities::getMediaType() {
+ return mMediaType;
+}
+
+const std::vector<ProfileLevel>& CodecCapabilities::getProfileLevels() {
+ return mProfileLevels;
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilitiesUtils.cpp b/media/libmedia/CodecCapabilitiesUtils.cpp
new file mode 100644
index 0000000..edfc9be
--- /dev/null
+++ b/media/libmedia/CodecCapabilitiesUtils.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesUtils"
+#include <utils/Log.h>
+
+#include <algorithm>
+#include <cmath>
+#include <regex>
+#include <string>
+#include <vector>
+
+#include <media/CodecCapabilitiesUtils.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+std::optional<Range<int>> ParseIntRange(const std::string &str) {
+ if (str.empty()) {
+ ALOGW("could not parse empty integer range");
+ return std::nullopt;
+ }
+ int lower, upper;
+ std::regex regex("([0-9]+)-([0-9]+)");
+ std::smatch match;
+ if (std::regex_match(str, match, regex)) {
+ lower = std::atoi(match[1].str().c_str());
+ upper = std::atoi(match[2].str().c_str());
+ } else if (std::atoi(str.c_str()) != 0) {
+ lower = upper = std::atoi(str.c_str());
+ } else {
+ ALOGW("could not parse integer range: %s", str.c_str());
+ return std::nullopt;
+ }
+ return std::make_optional<Range<int>>(lower, upper);
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
new file mode 100644
index 0000000..2bc3335
--- /dev/null
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_CAPABILITIES_H_
+
+#define AUDIO_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <system/audio.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct AudioCapabilities {
+ /**
+ * Create AudioCapabilities.
+ */
+ static std::shared_ptr<AudioCapabilities> Create(std::string mediaType,
+ std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+ /**
+ * Returns the range of supported bitrates in bits/second.
+ */
+ const Range<int>& getBitrateRange() const;
+
+ /**
+ * Returns the array of supported sample rates if the codec
+ * supports only discrete values. Otherwise, it returns an empty array.
+ * The array is sorted in ascending order.
+ */
+ const std::vector<int>& getSupportedSampleRates() const;
+
+ /**
+ * Returns the array of supported sample rate ranges. The
+ * array is sorted in ascending order, and the ranges are
+ * distinct.
+ */
+ const std::vector<Range<int>>& getSupportedSampleRateRanges() const;
+
+ /**
+ * Returns the maximum number of input channels supported.
+ * The returned value should be between 1 and 255.
+ *
+ * Through {@link android.os.Build.VERSION_CODES#R}, this method indicated support
+ * for any number of input channels between 1 and this maximum value.
+ *
+ * As of {@link android.os.Build.VERSION_CODES#S},
+ * the implied lower limit of 1 channel is no longer valid.
+ * As of {@link android.os.Build.VERSION_CODES#S}, {@link #getMaxInputChannelCount} is
+ * superseded by {@link #getInputChannelCountRanges},
+ * which returns an array of ranges of channels.
+ * The {@link #getMaxInputChannelCount} method will return the highest value
+ * in the ranges returned by {@link #getInputChannelCountRanges}
+ */
+ int getMaxInputChannelCount() const;
+
+ /**
+ * Returns the minimum number of input channels supported.
+ * This is often 1, but does vary for certain mime types.
+ *
+ * This returns the lowest channel count in the ranges returned by
+ * {@link #getInputChannelCountRanges}.
+ */
+ int getMinInputChannelCount() const;
+
+ /**
+ * Returns an array of ranges representing the number of input channels supported.
+ * The codec supports any number of input channels within this range.
+ *
+ * This supersedes the {@link #getMaxInputChannelCount} method.
+ *
+ * For many codecs, this will be a single range [1..N], for some N.
+ *
+ * The returned array cannot be empty.
+ */
+ const std::vector<Range<int>>& getInputChannelCountRanges() const;
+
+ /**
+ * Query whether the sample rate is supported by the codec.
+ */
+ bool isSampleRateSupported(int sampleRate);
+
+ /* For internal use only. Not exposed as a public API */
+ void getDefaultFormat(sp<AMessage> &format);
+
+ /* For internal use only. Not exposed as a public API */
+ bool supportsFormat(const sp<AMessage> &format);
+
+private:
+ static constexpr int MAX_INPUT_CHANNEL_COUNT = 30;
+ static constexpr uint32_t MAX_NUM_CHANNELS = FCC_LIMIT;
+
+ int mError;
+ std::string mMediaType;
+ std::vector<ProfileLevel> mProfileLevels;
+
+ Range<int> mBitrateRange;
+
+ std::vector<int> mSampleRates;
+ std::vector<Range<int>> mSampleRateRanges;
+ std::vector<Range<int>> mInputChannelRanges;
+
+ /* no public constructor */
+ AudioCapabilities() {}
+ void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+ const sp<AMessage> &format);
+ void initWithPlatformLimits();
+ bool supports(int sampleRate, int inputChannels);
+ void limitSampleRates(std::vector<int> rates);
+ void createDiscreteSampleRates();
+ void limitSampleRates(std::vector<Range<int>> rateRanges);
+ void applyLevelLimits();
+ void applyLimits(const std::vector<Range<int>> &inputChannels,
+ const std::optional<Range<int>> &bitRates);
+ void parseFromInfo(const sp<AMessage> &format);
+
+ friend struct CodecCapabilities;
+};
+
+} // namespace android
+
+#endif // AUDIO_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
new file mode 100644
index 0000000..9d1c4ea
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES_H_
+
+#define CODEC_CAPABILITIES_H_
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct CodecCapabilities {
+
+ static bool SupportsBitrate(Range<int> bitrateRange,
+ const sp<AMessage> &format);
+
+ /**
+ * Returns the media type for which this codec-capability object was created.
+ */
+ const std::string& getMediaType();
+
+ /**
+ * Returns the supported profile levels.
+ */
+ const std::vector<ProfileLevel>& getProfileLevels();
+
+private:
+ std::string mMediaType;
+ std::vector<ProfileLevel> mProfileLevels;
+
+ std::shared_ptr<AudioCapabilities> mAudioCaps;
+};
+
+} // namespace android
+
+#endif // CODEC_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
new file mode 100644
index 0000000..2bf822a
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES__UTILS_H_
+
+#define CODEC_CAPABILITIES__UTILS_H_
+
+#include <algorithm>
+#include <cmath>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+struct ProfileLevel {
+ uint32_t mProfile;
+ uint32_t mLevel;
+ bool operator <(const ProfileLevel &o) const {
+ return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
+ }
+};
+
+/**
+ * Immutable class for describing the range of two numeric values.
+ *
+ * To make it immutable, all data are private and all functions are const.
+ *
+ * From frameworks/base/core/java/android/util/Range.java
+ */
+template<typename T>
+struct Range {
+ Range() : lower_(), upper_() {}
+
+ Range(T l, T u) : lower_(l), upper_(u) {}
+
+ constexpr bool empty() const { return lower_ > upper_; }
+
+ T lower() const { return lower_; }
+
+ T upper() const { return upper_; }
+
+ // Check if a value is in the range.
+ bool contains(T value) const {
+ return lower_ <= value && upper_ >= value;
+ }
+
+ bool contains(Range<T> range) const {
+ return (range.lower_ >= lower_) && (range.upper_ <= upper_);
+ }
+
+ // Clamp a value in the range
+ T clamp(T value) const{
+ if (value < lower_) {
+ return lower_;
+ } else if (value > upper_) {
+ return upper_;
+ } else {
+ return value;
+ }
+ }
+
+ // Return the intersected range
+ Range<T> intersect(Range<T> range) const {
+ if (lower_ >= range.lower() && range.upper() >= upper_) {
+ // range includes this
+ return *this;
+ } else if (range.lower() >= lower_ && range.upper() <= upper_) {
+ // this includes range
+ return range;
+ } else {
+ // if ranges are disjoint returns an empty Range(lower > upper)
+ Range<T> result = Range<T>(std::max(lower_, range.lower_),
+ std::min(upper_, range.upper_));
+ if (result.empty()) {
+ ALOGE("Failed to intersect 2 ranges as they are disjoint");
+ }
+ return result;
+ }
+ }
+
+ /**
+ * Returns the intersection of this range and the inclusive range
+ * specified by {@code [lower, upper]}.
+ * <p>
+ * See {@link #intersect(Range)} for more details.</p>
+ *
+ * @param lower a non-{@code null} {@code T} reference
+ * @param upper a non-{@code null} {@code T} reference
+ * @return the intersection of this range and the other range
+ *
+ * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
+ * @throws IllegalArgumentException if the ranges are disjoint.
+ */
+ Range<T> intersect(T lower, T upper) {
+ return Range(std::max(lower_, lower), std::min(upper_, upper));
+ }
+
+private:
+ T lower_;
+ T upper_;
+};
+
+static const Range<int> POSITIVE_INTEGERS = Range<int>(1, INT_MAX);
+
+// found stuff that is not supported by framework (=> this should not happen)
+constexpr int ERROR_CAPABILITIES_UNRECOGNIZED = (1 << 0);
+// found profile/level for which we don't have capability estimates
+constexpr int ERROR_CAPABILITIES_UNSUPPORTED = (1 << 1);
+// have not found any profile/level for which we don't have capability estimate
+// constexpr int ERROR_NONE_SUPPORTED = (1 << 2);
+
+/**
+ * Sorts distinct (non-intersecting) range array in ascending order.
+ * From frameworks/base/media/java/android/media/Utils.java
+ */
+template<typename T>
+void sortDistinctRanges(std::vector<Range<T>> *ranges) {
+ std::sort(ranges->begin(), ranges->end(),
+ [](Range<T> r1, Range<T> r2) {
+ if (r1.upper() < r2.lower()) {
+ return true;
+ } else if (r1.lower() > r2.upper()) {
+ return false;
+ } else {
+ ALOGE("sample rate ranges must be distinct.");
+ return false;
+ }
+ });
+}
+
+/**
+ * Returns the intersection of two sets of non-intersecting ranges
+ * From frameworks/base/media/java/android/media/Utils.java
+ * @param one a sorted set of non-intersecting ranges in ascending order
+ * @param another another sorted set of non-intersecting ranges in ascending order
+ * @return the intersection of the two sets, sorted in ascending order
+ */
+template<typename T>
+std::vector<Range<T>> intersectSortedDistinctRanges(
+ const std::vector<Range<T>> &one, const std::vector<Range<T>> &another) {
+ std::vector<Range<T>> result;
+ int ix = 0;
+ for (Range<T> range : another) {
+ while (ix < one.size() && one[ix].upper() < range.lower()) {
+ ++ix;
+ }
+ while (ix < one.size() && one[ix].upper() < range.upper()) {
+ result.push_back(range.intersect(one[ix]));
+ ++ix;
+ }
+ if (ix == one.size()) {
+ break;
+ }
+ if (one[ix].lower() <= range.upper()) {
+ result.push_back(range.intersect(one[ix]));
+ }
+ }
+ return result;
+}
+
+// parse string into int range
+std::optional<Range<int>> ParseIntRange(const std::string &str);
+
+} // namespace android
+
+#endif // CODEC_CAPABILITIES__UTILS_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 88a2dc4..72aca98 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -20,6 +20,8 @@
#include <android-base/macros.h>
#include <binder/Parcel.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AString.h>
@@ -43,13 +45,10 @@
struct MediaCodecListWriter;
struct MediaCodecInfo : public RefBase {
- struct ProfileLevel {
- uint32_t mProfile;
- uint32_t mLevel;
- bool operator <(const ProfileLevel &o) const {
- return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
- }
- };
+
+ // Moved to CodecCapabilitiesUtils.h
+ // Map MediaCodecInfo::ProfileLevel to android::ProfileLevel to maintain compatibility.
+ typedef ::android::ProfileLevel ProfileLevel;
struct CapabilitiesWriter;
diff --git a/media/libmedia/tests/codeccapabilities/Android.bp b/media/libmedia/tests/codeccapabilities/Android.bp
new file mode 100644
index 0000000..79eb71a
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/Android.bp
@@ -0,0 +1,36 @@
+cc_test {
+ name: "CodecCapabilitiesTest",
+ team: "trendy_team_media_codec_framework",
+
+ test_suites: [
+ "general-tests",
+ ],
+ gtest: true,
+
+ srcs: [
+ "CodecCapabilitiesTest.cpp",
+ ],
+
+ shared_libs: [
+ "libbinder",
+ "liblog",
+ "libmedia_codeclist", // available >= R
+ "libmedia_codeclist_capabilities",
+ "libstagefright",
+ "libstagefright_foundation",
+ "libutils",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ sanitize: {
+ cfi: true,
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
new file mode 100644
index 0000000..89c9739
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesTest"
+
+#include <utils/Log.h>
+
+#include <memory>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+
+using namespace android;
+
+class AudioCapsAacTest : public testing::Test {
+protected:
+ AudioCapsAacTest() {
+ std::string mediaType = MIMETYPE_AUDIO_AAC;
+
+ sp<AMessage> details = new AMessage;
+ details->setString("bitrate-range", "8000-960000");
+ details->setString("max-channel-count", "8");
+ details->setString("sample-rate-ranges",
+ "7350,8000,11025,12000,16000,22050,24000,32000,44100,48000");
+
+ std::vector<ProfileLevel> profileLevel{
+ ProfileLevel(2, 0),
+ ProfileLevel(5, 0),
+ ProfileLevel(29, 0),
+ ProfileLevel(23, 0),
+ ProfileLevel(39, 0),
+ ProfileLevel(20, 0),
+ ProfileLevel(42, 0),
+ };
+
+ audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+ }
+
+ std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
+ const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+ EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
+ << bitrateRange.lower();
+ EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
+ << bitrateRange.upper();
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_InputChannelCount) {
+ int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+ EXPECT_EQ(maxInputChannelCount, 8);
+ int minInputChannelCount = audioCaps->getMinInputChannelCount();
+ EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_SupportedSampleRates) {
+ const std::vector<int>& sampleRates = audioCaps->getSupportedSampleRates();
+ EXPECT_EQ(sampleRates, std::vector<int>({7350, 8000, 11025, 12000, 16000, 22050,
+ 24000, 32000, 44100, 48000}));
+
+ EXPECT_FALSE(audioCaps->isSampleRateSupported(6000))
+ << "isSampleRateSupported returned true for unsupported sample rate";
+ EXPECT_TRUE(audioCaps->isSampleRateSupported(8000))
+ << "isSampleRateSupported returned false for supported sample rate";
+ EXPECT_TRUE(audioCaps->isSampleRateSupported(12000))
+ << "isSampleRateSupported returned false for supported sample rate";
+ EXPECT_FALSE(audioCaps->isSampleRateSupported(44000))
+ << "isSampleRateSupported returned true for unsupported sample rate";
+ EXPECT_TRUE(audioCaps->isSampleRateSupported(48000))
+ << "isSampleRateSupported returned true for unsupported sample rate";
+}
+
+class AudioCapsRawTest : public testing::Test {
+protected:
+ AudioCapsRawTest() {
+ std::string mediaType = MIMETYPE_AUDIO_RAW;
+
+ sp<AMessage> details = new AMessage;
+ details->setString("bitrate-range", "1-10000000");
+ details->setString("channel-ranges", "1,2,3,4,5,6,7,8,9,10,11,12");
+ details->setString("sample-rate-ranges", "8000-192000");
+
+ std::vector<ProfileLevel> profileLevel;
+
+ audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+ }
+
+ std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
+ const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+ EXPECT_EQ(bitrateRange.lower(), 1);
+ EXPECT_EQ(bitrateRange.upper(), 10000000);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCount) {
+ int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+ EXPECT_EQ(maxInputChannelCount, 12);
+ int minInputChannelCount = audioCaps->getMinInputChannelCount();
+ EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCountRanges) {
+ const std::vector<Range<int>>& inputChannelCountRanges
+ = audioCaps->getInputChannelCountRanges();
+ std::vector<Range<int>> expectedOutput({{1,1}, {2,2}, {3,3}, {4,4}, {5,5},
+ {6,6}, {7,7}, {8,8}, {9,9}, {10,10}, {11,11}, {12,12}});
+ ASSERT_EQ(inputChannelCountRanges.size(), expectedOutput.size());
+ for (int i = 0; i < inputChannelCountRanges.size(); i++) {
+ EXPECT_EQ(inputChannelCountRanges.at(i).lower(), expectedOutput.at(i).lower());
+ EXPECT_EQ(inputChannelCountRanges.at(i).upper(), expectedOutput.at(i).upper());
+ }
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_SupportedSampleRates) {
+ const std::vector<Range<int>>& sampleRateRanges = audioCaps->getSupportedSampleRateRanges();
+ EXPECT_EQ(sampleRateRanges.size(), 1);
+ EXPECT_EQ(sampleRateRanges.at(0).lower(), 8000);
+ EXPECT_EQ(sampleRateRanges.at(0).upper(), 192000);
+
+ EXPECT_EQ(audioCaps->isSampleRateSupported(7000), false);
+ EXPECT_EQ(audioCaps->isSampleRateSupported(10000), true);
+ EXPECT_EQ(audioCaps->isSampleRateSupported(193000), false);
+}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3020ead..857b08a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -852,6 +852,13 @@
}
}
+ void notifyBufferAttached() {
+ auto p = mBufferChannel.lock();
+ if (p) {
+ p->onBufferAttachedToOutputSurface(mGeneration);
+ }
+ }
+
public:
explicit OnBufferReleasedListener(
uint32_t generation,
@@ -872,6 +879,14 @@
}
bool needsReleaseNotify() override { return true; }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
+ void onBufferAttached() override {
+ notifyBufferAttached();
+ }
+
+ bool needsAttachNotify() override { return true; }
+#endif
};
class BufferCallback : public CodecBase::BufferCallback {
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index bffb294..c6087b0 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -517,6 +517,15 @@
};
/**
+ * Notify a buffer is attached to output surface.
+ *
+ * @param generation MediaCodec's surface specifier
+ */
+ virtual void onBufferAttachedToOutputSurface(uint32_t /*generation*/) {
+ // default: no-op
+ };
+
+ /**
* Discard a buffer to the underlying CodecBase object.
*
* TODO: remove once this operation can be handled by just clearing the
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 4e4aa75..b1cf665 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -562,6 +562,30 @@
}
}
+inline constexpr int32_t DTS_HDProfileHRA = 0x1;
+inline constexpr int32_t DTS_HDProfileLBR = 0x2;
+inline constexpr int32_t DTS_HDProfileMA = 0x4;
+
+inline static const char *asString_Dts_HDProfile(int32_t i, const char *def = "??") {
+ switch (i) {
+ case DTS_HDProfileHRA: return "HRA";
+ case DTS_HDProfileLBR: return "LBR";
+ case DTS_HDProfileMA: return "MA";
+ default: return def;
+ }
+}
+
+inline constexpr int32_t DTS_UHDProfileP1 = 0x1;
+inline constexpr int32_t DTS_UHDProfileP2 = 0x2;
+
+inline static const char *asString_Dts_UHDProfile(int32_t i, const char *def = "??") {
+ switch (i) {
+ case DTS_UHDProfileP1: return "P1";
+ case DTS_UHDProfileP2: return "P2";
+ default: return def;
+ }
+}
+
inline constexpr int32_t BITRATE_MODE_CBR = 2;
inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -729,8 +753,13 @@
inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_AC4[] = "audio/ac4";
inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3_JOC[] = "audio/eac3-joc";
inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_DTS[] = "audio/vnd.dts";
+inline constexpr char MIMETYPE_AUDIO_DTS_HD[] = "audio/vnd.dts.hd";
+inline constexpr char MIMETYPE_AUDIO_DTS_UHD[] = "audio/vnd.dts.uhd";
inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index becb98a..56cd8b8 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -55,6 +55,8 @@
"com.android.media.swcodec",
],
+ export_include_dirs: ["include"],
+
srcs: [
"CodecServiceRegistrant.cpp",
],
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 6df9dc8..42fd94e 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -40,7 +40,7 @@
#include <codec2/aidl/ComponentStore.h>
#include <codec2/aidl/ParamTypes.h>
-#include <media/CodecServiceRegistrant.h>
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
namespace /* unnamed */ {
@@ -775,13 +775,12 @@
return nullptr;
}
-extern "C" void RegisterCodecServices() {
+/**
+ * This function encapsulates the core logic required to register codec services,
+ * separated from threadpool management to avoid timeouts when called by the fuzzer.
+ */
+static void RegisterCodecServicesWithExistingThreadpool() {
const bool aidlSelected = c2_aidl::utils::IsSelected();
- constexpr int kThreadCount = 64;
- ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
- ABinderProcess_startThreadPool();
- ::android::hardware::configureRpcThreadpool(kThreadCount, false);
-
LOG(INFO) << "Creating software Codec2 service...";
std::shared_ptr<C2ComponentStore> store =
android::GetCodec2PlatformComponentStore();
@@ -880,8 +879,16 @@
if (registered) {
LOG(INFO) << "Software Codec2 service created and registered.";
}
+}
+
+extern "C" void RegisterCodecServices() {
+ constexpr int kThreadCount = 64;
+ ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
+ ABinderProcess_startThreadPool();
+ ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+
+ RegisterCodecServicesWithExistingThreadpool();
ABinderProcess_joinThreadPool();
::android::hardware::joinRpcThreadpool();
}
-
diff --git a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
index 4868e0c..0baf1ca 100644
--- a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
+++ b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -13,6 +13,9 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
+
#include "../CodecServiceRegistrant.cpp"
#include "fuzzer/FuzzedDataProvider.h"
#include <C2Config.h>
@@ -166,9 +169,9 @@
void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
mFDP = new FuzzedDataProvider(data, size);
invokeH2C2ComponentStore();
- /** RegisterCodecServices is called here to improve code coverage */
- /** as currently it is not called by codecServiceRegistrant */
- RegisterCodecServices();
+ /** RegisterCodecServicesWithExistingThreadpool() is called here to improve
+ * code coverage as currently it is not called in codecServiceRegistrant.cpp */
+ RegisterCodecServicesWithExistingThreadpool();
delete mFDP;
}
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
similarity index 77%
rename from media/libmedia/include/media/CodecServiceRegistrant.h
rename to media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
index e0af781..1c6f71f 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
@@ -18,6 +18,13 @@
#define CODEC_SERVICE_REGISTRANT_H_
+/**
+ * This function starts the threadpool, calls the registration logic
+ * encapsulated in RegisterCodecServicesWithExistingThreadpool(), and
+ * then stops the threadpool.
+ */
+extern "C" void RegisterCodecServices();
+
typedef void (*RegisterCodecServicesFunc)();
#endif // CODEC_SERVICE_REGISTRANT_H_
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..f247f8c 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -523,11 +523,10 @@
}
[this, &track] {
- int64_t duration;
+ int64_t duration = track->mMdhdDurationUs;
int32_t samplerate;
// Only for audio track.
- if (track->elst_needs_processing && mHeaderTimescale != 0 &&
- AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
+ if (track->elst_needs_processing && mHeaderTimescale != 0 && duration != 0 &&
AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
// Elst has to be processed only the first time this function is called.
track->elst_needs_processing = false;
@@ -1645,7 +1644,10 @@
(long long) duration, (long long) mLastTrack->timescale);
return ERROR_MALFORMED;
}
- AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+ // Store this track's mdhd duration to calculate the padding.
+ mLastTrack->mMdhdDurationUs = (int64_t)durationUs;
+ } else {
+ mLastTrack->mMdhdDurationUs = 0;
}
uint8_t lang[2];
@@ -3907,17 +3909,18 @@
}
int32_t id;
+ int64_t duration;
if (version == 1) {
// we can get ctime value from U64_AT(&buffer[4])
// we can get mtime value from U64_AT(&buffer[12])
id = U32_AT(&buffer[20]);
- // we can get duration value from U64_AT(&buffer[28])
+ duration = U64_AT(&buffer[28]);
} else if (version == 0) {
// we can get ctime value from U32_AT(&buffer[4])
// we can get mtime value from U32_AT(&buffer[8])
id = U32_AT(&buffer[12]);
- // we can get duration value from U32_AT(&buffer[20])
+ duration = U32_AT(&buffer[20]);
} else {
return ERROR_UNSUPPORTED;
}
@@ -3926,6 +3929,15 @@
return ERROR_MALFORMED;
AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+ if (duration != 0 && mHeaderTimescale != 0) {
+ long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+ if (durationUs < 0 || durationUs > INT64_MAX) {
+ ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+ (long long) duration, (long long) mHeaderTimescale);
+ return ERROR_MALFORMED;
+ }
+ AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+ }
size_t matrixOffset = dynSize + 16;
int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/module/extractors/mp4/include/MPEG4Extractor.h b/media/module/extractors/mp4/include/MPEG4Extractor.h
index 542a3e6..59626f6 100644
--- a/media/module/extractors/mp4/include/MPEG4Extractor.h
+++ b/media/module/extractors/mp4/include/MPEG4Extractor.h
@@ -96,7 +96,7 @@
uint8_t *mTx3gBuffer;
size_t mTx3gSize, mTx3gFilled;
-
+ int64_t mMdhdDurationUs;
Track() {
next = NULL;
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index e76ece2..20cd40c 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -187,8 +187,6 @@
BINDER_METHOD_ENTRY(masterMute) \
BINDER_METHOD_ENTRY(setStreamVolume) \
BINDER_METHOD_ENTRY(setStreamMute) \
-BINDER_METHOD_ENTRY(streamVolume) \
-BINDER_METHOD_ENTRY(streamMute) \
BINDER_METHOD_ENTRY(setMode) \
BINDER_METHOD_ENTRY(setMicMute) \
BINDER_METHOD_ENTRY(getMicMute) \
@@ -1748,37 +1746,6 @@
return NO_ERROR;
}
-float AudioFlinger::streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
-{
- status_t status = checkStreamType(stream);
- if (status != NO_ERROR) {
- return 0.0f;
- }
- if (output == AUDIO_IO_HANDLE_NONE) {
- return 0.0f;
- }
-
- audio_utils::lock_guard lock(mutex());
- sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
- if (volumeInterface == NULL) {
- return 0.0f;
- }
-
- return volumeInterface->streamVolume(stream);
-}
-
-bool AudioFlinger::streamMute(audio_stream_type_t stream) const
-{
- status_t status = checkStreamType(stream);
- if (status != NO_ERROR) {
- return true;
- }
-
- audio_utils::lock_guard lock(mutex());
- return streamMute_l(stream);
-}
-
-
void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
{
for (size_t i = 0; i < mRecordThreads.size(); i++) {
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 46f4068..adec4aa 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -96,10 +96,6 @@
status_t setStreamMute(audio_stream_type_t stream, bool muted) final
EXCLUDES_AudioFlinger_Mutex;
- float streamVolume(audio_stream_type_t stream,
- audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
- bool streamMute(audio_stream_type_t stream) const final EXCLUDES_AudioFlinger_Mutex;
-
status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
status_t setMicMute(bool state) final EXCLUDES_AudioFlinger_Mutex;
@@ -339,7 +335,9 @@
const String8& address,
audio_output_flags_t flags) final REQUIRES(mutex());
const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
- getAudioHwDevs_l() const final REQUIRES(mutex()) { return mAudioHwDevs; }
+ getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
+ return mAudioHwDevs;
+ }
void updateDownStreamPatches_l(const struct audio_patch* patch,
const std::set<audio_io_handle_t>& streams) final REQUIRES(mutex());
void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 2dcbbce..7c7d812 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -165,6 +165,9 @@
// maximum time to wait in sendConfigEvent_l() for a status to be received
static const nsecs_t kConfigEventTimeoutNs = seconds(2);
+// longer timeout for create audio patch to account for specific scenarii
+// with Bluetooth devices
+static const nsecs_t kCreatePatchEventTimeoutNs = seconds(4);
// minimum sleep time for the mixer thread loop when tracks are active but in underrun
static const uint32_t kMinThreadSleepTimeUs = 5000;
@@ -731,9 +734,11 @@
mutex().unlock();
{
audio_utils::unique_lock _l(event->mutex());
+ nsecs_t timeoutNs = event->mType == CFG_EVENT_CREATE_AUDIO_PATCH ?
+ kCreatePatchEventTimeoutNs : kConfigEventTimeoutNs;
while (event->mWaitStatus) {
if (event->mCondition.wait_for(
- _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+ _l, std::chrono::nanoseconds(timeoutNs), getTid())
== std::cv_status::timeout) {
event->mStatus = TIMED_OUT;
event->mWaitStatus = false;
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index d9d8a3d..d21241b 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -22,13 +22,9 @@
#include <utils/Trace.h>
#include <cutils/properties.h>
-#include "camera/CameraMetadata.h"
#include "CameraFlashlight.h"
-#include "gui/IGraphicBufferConsumer.h"
-#include "gui/BufferQueue.h"
+#include "camera/CameraMetadata.h"
#include "camera/camera2/CaptureRequest.h"
-#include "device3/Camera3Device.h"
-
namespace android {
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3cd4543..54d55a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -555,8 +555,8 @@
// Configure consumer-side ANativeWindow interface. The listener may be used
// to notify buffer manager (if it is used) of the returned buffers.
res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
- /*reportBufferRemoval*/true,
- /*listener*/mBufferProducerListener);
+ /*listener*/mBufferProducerListener,
+ /*reportBufferRemoval*/true);
if (res != OK) {
ALOGE("%s: Unable to connect to native window for stream %d",
__FUNCTION__, mId);
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 8bb5ffa..bbc10dc 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -84,6 +84,7 @@
"libcutils",
"libhidlbase",
"libcamera_client",
+ "libgui",
"libui",
"android.companion.virtualdevice.flags-aconfig-cc",
"android.hardware.camera.common@1.0",
@@ -108,6 +109,7 @@
// Only include sources that can't be run host-side here
srcs: [
+ "Camera3StreamSplitterTest.cpp",
"CameraPermissionsTest.cpp",
"CameraProviderManagerTest.cpp",
],
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
new file mode 100644
index 0000000..acb82d2
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3StreamSplitterTest"
+// #define LOG_NDEBUG 0
+
+#include "../device3/Camera3StreamSplitter.h"
+
+#include <android/hardware_buffer.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/PixelFormat.h>
+
+#include <system/window.h>
+#include <vndk/window.h>
+
+#include <gtest/gtest.h>
+
+using namespace android;
+
+namespace {
+
+uint64_t kConsumerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+uint64_t kProducerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+size_t kHalMaxBuffers = 3;
+uint32_t kWidth = 640;
+uint32_t kHeight = 480;
+PixelFormat kFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+int64_t kDynamicRangeProfile = 0;
+
+std::tuple<sp<BufferItemConsumer>, sp<Surface>> createConsumerAndSurface() {
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+
+ return {sp<BufferItemConsumer>::make(consumer, kConsumerUsage), sp<Surface>::make(producer)};
+}
+
+class Camera3StreamSplitterTest : public testing::Test {
+ public:
+ void SetUp() override { mSplitter = sp<Camera3StreamSplitter>::make(); }
+
+ protected:
+ sp<Camera3StreamSplitter> mSplitter;
+};
+
+class TestSurfaceListener : public SurfaceListener {
+ public:
+ virtual void onBufferReleased() override { mNumBuffersReleased++; }
+ virtual bool needsReleaseNotify() { return true; }
+ virtual void onBufferDetached(int) override {}
+ virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {};
+
+ uint32_t mNumBuffersReleased = 0;
+};
+
+class TestConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+ public:
+ TestConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+ virtual void onFrameAvailable(const BufferItem&) {
+ sp<BufferItemConsumer> consumer = mConsumer.promote();
+ EXPECT_NE(nullptr, consumer);
+
+ BufferItem item;
+ EXPECT_EQ(OK, consumer->acquireBuffer(&item, 0));
+ mNumBuffersAcquired++;
+ EXPECT_EQ(OK, consumer->releaseBuffer(item, Fence::NO_FENCE));
+ }
+ virtual void onFrameReplaced(const BufferItem&) {}
+ virtual void onFrameDequeued(const uint64_t) {}
+ virtual void onFrameCancelled(const uint64_t) {}
+ virtual void onFrameDetached(const uint64_t) {}
+
+ wp<BufferItemConsumer> mConsumer;
+ uint32_t mNumBuffersAcquired = 0;
+};
+
+} // namespace
+
+TEST_F(Camera3StreamSplitterTest, TestWithoutSurfaces_NoBuffersConsumed) {
+ sp<Surface> consumer;
+ EXPECT_EQ(OK, mSplitter->connect({}, kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+ kHeight, kFormat, &consumer, kDynamicRangeProfile));
+
+ sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+ EXPECT_EQ(OK, consumer->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+
+ sp<GraphicBuffer> buffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+ EXPECT_EQ(OK, consumer->attachBuffer(buffer->getNativeBuffer()));
+ // TODO: Do this with the surface itself once the API is available.
+ EXPECT_EQ(OK,
+ ANativeWindow_queueBuffer(consumer.get(), buffer->getNativeBuffer(), /*fenceFd*/ -1));
+
+ EXPECT_EQ(0u, surfaceListener->mNumBuffersReleased);
+}
+
+TEST_F(Camera3StreamSplitterTest, TestProcessSingleBuffer) {
+ //
+ // Set up output consumers:
+ //
+ constexpr auto kSurfaceId1 = 1;
+ auto [bufferItemConsumer1, surface1] = createConsumerAndSurface();
+ sp<TestConsumerListener> consumerListener1 =
+ sp<TestConsumerListener>::make(bufferItemConsumer1);
+ bufferItemConsumer1->setFrameAvailableListener(consumerListener1);
+
+ constexpr auto kSurfaceId2 = 2;
+ auto [bufferItemConsumer2, surface2] = createConsumerAndSurface();
+ sp<TestConsumerListener> consumerListener2 =
+ sp<TestConsumerListener>::make(bufferItemConsumer2);
+ bufferItemConsumer2->setFrameAvailableListener(consumerListener2);
+
+ //
+ // Connect it to the splitter, get the input surface, and set it up:
+ //
+ sp<Surface> inputSurface;
+ EXPECT_EQ(OK, mSplitter->connect({{kSurfaceId1, surface1}, {kSurfaceId2, surface2}},
+ kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+ kHeight, kFormat, &inputSurface, kDynamicRangeProfile));
+ sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+ EXPECT_EQ(OK, inputSurface->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+ // TODO: Do this with the surface itself once the API is available.
+ EXPECT_EQ(OK, inputSurface->getIGraphicBufferProducer()->allowAllocation(false));
+
+ //
+ // Create a buffer to use:
+ //
+ sp<GraphicBuffer> singleBuffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+ EXPECT_NE(nullptr, singleBuffer);
+ mSplitter->attachBufferToOutputs(singleBuffer->getNativeBuffer(), {kSurfaceId1, kSurfaceId2});
+
+ //
+ // Verify that when we attach the buffer, it's processed appropriately:
+ //
+ EXPECT_EQ(OK, inputSurface->attachBuffer(singleBuffer->getNativeBuffer()));
+ EXPECT_EQ(OK, mSplitter->getOnFrameAvailableResult());
+ // TODO: Do this with the surface itself once the API is available.
+ EXPECT_EQ(OK, ANativeWindow_queueBuffer(inputSurface.get(), singleBuffer->getNativeBuffer(),
+ /*fenceFd*/ -1));
+
+ EXPECT_EQ(1u, consumerListener1->mNumBuffersAcquired);
+ EXPECT_EQ(1u, consumerListener2->mNumBuffersAcquired);
+ EXPECT_EQ(1u, surfaceListener->mNumBuffersReleased);
+}
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index fc186fb..7ece0cb 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -65,14 +65,7 @@
cc_library_static {
name: "libvirtualcamera",
srcs: [
- "VirtualCameraProvider.cc",
- "VirtualCameraDevice.cc",
- "VirtualCameraSession.cc",
- "VirtualCameraStream.cc",
- "VirtualCameraService.cc",
- "VirtualCameraSessionContext.cc",
- "VirtualCameraTestInstance.cc",
- "VirtualCameraRenderThread.cc",
+ "*.cc",
],
defaults: [
"libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureRequest.h b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
new file mode 100644
index 0000000..cf5402e
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+
+#include "VirtualCameraDevice.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Struct used to pass request settings in the different part of
+// the virtual camera system.
+struct RequestSettings {
+ // JPEG_QUALITY metadata
+ int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+ // JPEG_ORIENTATION metadata
+ int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+
+ // JPEG_THUMBNAIL_SIZE metadata
+ Resolution thumbnailResolution = Resolution(0, 0);
+
+ // JPEG_THUMBNAIL_QUALITY metadata
+ int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+ // ANDROID_CONTROL_AE_TARGET_FPS_RANGE metadata
+ std::optional<FpsRange> fpsRange;
+
+ // CONTROL_CAPTURE_INTENT metadata
+ camera_metadata_enum_android_control_capture_intent_t captureIntent =
+ VirtualCameraDevice::kDefaultCaptureIntent;
+
+ // JPEG_GPS_LOCATION metadata
+ std::optional<GpsCoordinates> gpsCoordinates;
+
+ // CONTROL_AE_PRECAPTURE_TRIGGER metadata
+ std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+ aePrecaptureTrigger;
+};
+
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
+
+#endif // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
new file mode 100644
index 0000000..a61f553
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "VirtualCameraCaptureResult.h"
+
+#include <cstdint>
+
+#include "VirtualCameraCaptureRequest.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "util/MetadataUtil.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+namespace {
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+} // namespace
+
+CameraMetadata createCaptureResultMetadata(
+ const std::chrono::nanoseconds timestamp,
+ const RequestSettings& requestSettings,
+ const Resolution reportedSensorSize) {
+ // All of the keys used in the response needs to be referenced in
+ // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+ // in VirtualCameraDevice.cc).
+ MetadataBuilder builder =
+ MetadataBuilder()
+ .setAberrationCorrectionMode(
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+ .setControlAeAvailableAntibandingModes(
+ {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+ .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+ .setControlAeExposureCompensation(0)
+ .setControlAeLockAvailable(false)
+ .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+ .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+ .setControlAePrecaptureTrigger(
+ // Limited devices are expected to have precapture ae enabled and
+ // respond to cancellation request. Since we don't actuall support
+ // AE at all, let's just respect the cancellation expectation in
+ // case it's requested
+ requestSettings.aePrecaptureTrigger ==
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+ .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+ .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+ .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+ .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+ .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+ .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+ .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+ .setControlCaptureIntent(requestSettings.captureIntent)
+ .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+ .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+ .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+ .setControlVideoStabilizationMode(
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+ .setCropRegion(0, 0, reportedSensorSize.width,
+ reportedSensorSize.height)
+ .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+ .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+ .setFlashMode(ANDROID_FLASH_MODE_OFF)
+ .setFocalLength(VirtualCameraDevice::kFocalLength)
+ .setJpegQuality(requestSettings.jpegQuality)
+ .setJpegOrientation(requestSettings.jpegOrientation)
+ .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+ requestSettings.thumbnailResolution.height)
+ .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+ .setLensOpticalStabilizationMode(
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+ .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+ .setPipelineDepth(kPipelineDepth)
+ .setSensorTimestamp(timestamp)
+ .setStatisticsHotPixelMapMode(
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+ .setStatisticsLensShadingMapMode(
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+ .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+ if (requestSettings.fpsRange.has_value()) {
+ builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+ }
+
+ if (requestSettings.gpsCoordinates.has_value()) {
+ const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+ builder.setJpegGpsCoordinates(coordinates);
+ }
+
+ std::unique_ptr<CameraMetadata> metadata = builder.build();
+
+ if (metadata == nullptr) {
+ ALOGE("%s: Failed to build capture result metadata", __func__);
+ return CameraMetadata();
+ }
+ return std::move(*metadata);
+}
+
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
new file mode 100644
index 0000000..9e5b4d7
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+
+#include <chrono>
+#include <cstdint>
+#include <cstring>
+#include <future>
+#include <memory>
+#include <mutex>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#include "Exif.h"
+#include "GLES/gl.h"
+#include "VirtualCameraCaptureRequest.h"
+#include "VirtualCameraDevice.h"
+#include "VirtualCameraRenderThread.h"
+#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Construct the Metadata for the Capture result based on the request
+// settings, timestamp and reported sensore size
+::aidl::android::hardware::camera::device::CameraMetadata
+createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
+ const RequestSettings& requestSettings,
+ Resolution reportedSensorSize);
+
+} // namespace virtualcamera
+} // namespace companion
+} // namespace android
+
+#endif // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 633acec..40a96e4 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -29,6 +29,7 @@
#include "Exif.h"
#include "GLES/gl.h"
+#include "VirtualCameraCaptureResult.h"
#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/common/Status.h"
@@ -92,95 +93,10 @@
static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
-// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
-// This roughly corresponds to frame latency, we set to
-// documented minimum of 2.
-static constexpr uint8_t kPipelineDepth = 2;
-
static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024; // 32 KiB
static constexpr UpdateTextureTask kUpdateTextureTask;
-CameraMetadata createCaptureResultMetadata(
- const std::chrono::nanoseconds timestamp,
- const RequestSettings& requestSettings,
- const Resolution reportedSensorSize) {
- // All of the keys used in the response needs to be referenced in
- // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
- // in VirtualCameraDevice.cc).
- MetadataBuilder builder =
- MetadataBuilder()
- .setAberrationCorrectionMode(
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
- .setControlAeAvailableAntibandingModes(
- {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
- .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
- .setControlAeExposureCompensation(0)
- .setControlAeLockAvailable(false)
- .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
- .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
- .setControlAePrecaptureTrigger(
- // Limited devices are expected to have precapture ae enabled and
- // respond to cancellation request. Since we don't actuall support
- // AE at all, let's just respect the cancellation expectation in
- // case it's requested
- requestSettings.aePrecaptureTrigger ==
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
- : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
- .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
- .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
- .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
- .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
- .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
- .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
- .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
- .setControlCaptureIntent(requestSettings.captureIntent)
- .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
- .setControlMode(ANDROID_CONTROL_MODE_AUTO)
- .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
- .setControlVideoStabilizationMode(
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
- .setCropRegion(0, 0, reportedSensorSize.width,
- reportedSensorSize.height)
- .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
- .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
- .setFlashMode(ANDROID_FLASH_MODE_OFF)
- .setFocalLength(VirtualCameraDevice::kFocalLength)
- .setJpegQuality(requestSettings.jpegQuality)
- .setJpegOrientation(requestSettings.jpegOrientation)
- .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
- requestSettings.thumbnailResolution.height)
- .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
- .setLensOpticalStabilizationMode(
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
- .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
- .setPipelineDepth(kPipelineDepth)
- .setSensorTimestamp(timestamp)
- .setStatisticsHotPixelMapMode(
- ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
- .setStatisticsLensShadingMapMode(
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
- .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
-
- if (requestSettings.fpsRange.has_value()) {
- builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
- }
-
- if (requestSettings.gpsCoordinates.has_value()) {
- const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
- builder.setJpegGpsCoordinates(coordinates);
- }
-
- std::unique_ptr<CameraMetadata> metadata = builder.build();
-
- if (metadata == nullptr) {
- ALOGE("%s: Failed to build capture result metadata", __func__);
- return CameraMetadata();
- }
- return std::move(*metadata);
-}
-
NotifyMsg createShutterNotifyMsg(int frameNumber,
std::chrono::nanoseconds timestamp) {
NotifyMsg msg;
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 5a5966b..aafed44 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -26,6 +26,7 @@
#include <variant>
#include <vector>
+#include "VirtualCameraCaptureRequest.h"
#include "VirtualCameraDevice.h"
#include "VirtualCameraSessionContext.h"
#include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -56,19 +57,6 @@
const sp<Fence> mFence;
};
-struct RequestSettings {
- int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
- int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
- Resolution thumbnailResolution = Resolution(0, 0);
- int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
- std::optional<FpsRange> fpsRange;
- camera_metadata_enum_android_control_capture_intent_t captureIntent =
- VirtualCameraDevice::kDefaultCaptureIntent;
- std::optional<GpsCoordinates> gpsCoordinates;
- std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
- aePrecaptureTrigger;
-};
-
// Represents single capture request to fill set of buffers.
class ProcessCaptureRequestTask {
public:
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index e1815c7..88929cc 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -72,7 +72,6 @@
namespace companion {
namespace virtualcamera {
-using ::aidl::android::companion::virtualcamera::Format;
using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
using ::aidl::android::hardware::camera::common::Status;
@@ -87,7 +86,6 @@
using ::aidl::android::hardware::camera::device::Stream;
using ::aidl::android::hardware::camera::device::StreamBuffer;
using ::aidl::android::hardware::camera::device::StreamConfiguration;
-using ::aidl::android::hardware::camera::device::StreamRotation;
using ::aidl::android::hardware::common::fmq::MQDescriptor;
using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
using ::aidl::android::hardware::graphics::common::BufferUsage;