Merge "codec2: enc stat: add mapper for picture_type_t" into tm-dev
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 8088d06..d1aa36a 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -264,7 +264,8 @@
mInternalReconfigure(0),
mRequestCount(0),
mResultErrorCount(0),
- mDeviceError(false) {}
+ mDeviceError(false),
+ mVideoStabilizationMode(-1) {}
CameraSessionStats::CameraSessionStats(const String16& cameraId,
int facing, int newCameraState, const String16& clientName,
@@ -281,7 +282,8 @@
mInternalReconfigure(0),
mRequestCount(0),
mResultErrorCount(0),
- mDeviceError(0) {}
+ mDeviceError(0),
+ mVideoStabilizationMode(-1) {}
status_t CameraSessionStats::readFromParcel(const android::Parcel* parcel) {
if (parcel == NULL) {
@@ -381,6 +383,12 @@
return BAD_VALUE;
}
+ int32_t videoStabilizationMode;
+ if ((err = parcel->readInt32(&videoStabilizationMode)) != OK) {
+ ALOGE("%s: Failed to read video stabilization mode from parcel", __FUNCTION__);
+ return err;
+ }
+
mCameraId = id;
mFacing = facing;
mNewCameraState = newCameraState;
@@ -396,6 +404,7 @@
mDeviceError = deviceError;
mStreamStats = std::move(streamStats);
mUserTag = userTag;
+ mVideoStabilizationMode = videoStabilizationMode;
return OK;
}
@@ -482,6 +491,11 @@
ALOGE("%s: Failed to write user tag!", __FUNCTION__);
return err;
}
+
+ if ((err = parcel->writeInt32(mVideoStabilizationMode)) != OK) {
+ ALOGE("%s: Failed to write video stabilization mode!", __FUNCTION__);
+ return err;
+ }
return OK;
}
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index e1ec6cf..aaa88b2 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -137,6 +137,7 @@
bool mDeviceError;
std::vector<CameraStreamStats> mStreamStats;
String16 mUserTag;
+ int mVideoStabilizationMode;
// Constructors
CameraSessionStats();
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index bda664a..8b9d1de 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -219,6 +219,7 @@
}
mPlugin.reset();
+ mInitCheck = NO_INIT;
return OK;
}
@@ -372,6 +373,10 @@
Mutex::Autolock autoLock(mLock);
+ if (mInitCheck != OK) {
+ return -1;
+ }
+
int32_t seqNum = mHeapSeqNum++;
uint32_t bufferId = static_cast<uint32_t>(seqNum);
mHeapSizes.add(seqNum, heap->size());
diff --git a/drm/libmediadrm/CryptoHalHidl.cpp b/drm/libmediadrm/CryptoHalHidl.cpp
index a290704..55364b5 100644
--- a/drm/libmediadrm/CryptoHalHidl.cpp
+++ b/drm/libmediadrm/CryptoHalHidl.cpp
@@ -190,6 +190,7 @@
mPlugin.clear();
mPluginV1_2.clear();
+ mInitCheck = NO_INIT;
return OK;
}
@@ -221,6 +222,10 @@
Mutex::Autolock autoLock(mLock);
+ if (mInitCheck != OK) {
+ return -1;
+ }
+
int32_t seqNum = mHeapSeqNum++;
uint32_t bufferId = static_cast<uint32_t>(seqNum);
mHeapSizes.add(seqNum, heap->size());
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 57cdcd0..4e4a9a1 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -221,6 +221,12 @@
.withFields({C2F(mDrcOutputLoudness, value).inRange(-57.75, 0.25)})
.withSetter(Setter<decltype(*mDrcOutputLoudness)>::StrictValueWithNoDeps)
.build());
+
+ addParameter(DefineParam(mChannelMask, C2_PARAMKEY_CHANNEL_MASK)
+ .withDefault(new C2StreamChannelMaskInfo::output(0u, 0))
+ .withFields({C2F(mChannelMask, value).inRange(0, 4294967292)})
+ .withSetter(Setter<decltype(*mChannelMask)>::StrictValueWithNoDeps)
+ .build());
}
bool isAdts() const { return mAacFormat->value == C2Config::AAC_PACKAGING_ADTS; }
@@ -255,6 +261,7 @@
std::shared_ptr<C2StreamDrcAlbumModeTuning::input> mDrcAlbumMode;
std::shared_ptr<C2StreamMaxChannelCountInfo::input> mMaxChannelCount;
std::shared_ptr<C2StreamDrcOutputLoudnessTuning::output> mDrcOutputLoudness;
+ std::shared_ptr<C2StreamChannelMaskInfo::output> mChannelMask;
// TODO Add : C2StreamAacSbrModeTuning
};
@@ -829,9 +836,11 @@
C2StreamSampleRateInfo::output sampleRateInfo(0u, mStreamInfo->sampleRate);
C2StreamChannelCountInfo::output channelCountInfo(0u, mStreamInfo->numChannels);
+ C2StreamChannelMaskInfo::output channelMaskInfo(0u,
+ maskFromCount(mStreamInfo->numChannels));
std::vector<std::unique_ptr<C2SettingResult>> failures;
c2_status_t err = mIntf->config(
- { &sampleRateInfo, &channelCountInfo },
+ { &sampleRateInfo, &channelCountInfo, &channelMaskInfo },
C2_MAY_BLOCK,
&failures);
if (err == OK) {
@@ -840,6 +849,7 @@
C2FrameData &output = work->worklets.front()->output;
output.configUpdate.push_back(C2Param::Copy(sampleRateInfo));
output.configUpdate.push_back(C2Param::Copy(channelCountInfo));
+ output.configUpdate.push_back(C2Param::Copy(channelMaskInfo));
} else {
ALOGE("Config Update failed");
mSignalledError = true;
@@ -1056,6 +1066,47 @@
}
}
+// definitions based on android.media.AudioFormat.CHANNEL_OUT_*
+#define CHANNEL_OUT_FL 0x4
+#define CHANNEL_OUT_FR 0x8
+#define CHANNEL_OUT_FC 0x10
+#define CHANNEL_OUT_LFE 0x20
+#define CHANNEL_OUT_BL 0x40
+#define CHANNEL_OUT_BR 0x80
+#define CHANNEL_OUT_SL 0x800
+#define CHANNEL_OUT_SR 0x1000
+
+uint32_t C2SoftAacDec::maskFromCount(uint32_t channelCount) {
+ // KEY_CHANNEL_MASK expects masks formatted according to Java android.media.AudioFormat
+ // where the two left-most bits are 0 for output channel mask
+ switch (channelCount) {
+ case 1: // mono is front left
+ return (CHANNEL_OUT_FL);
+ case 2: // stereo
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FR);
+ case 4: // 4.0 = stereo with backs
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FC
+ | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+ case 5: // 5.0
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+ | CHANNEL_OUT_BL | CHANNEL_OUT_BR);
+ case 6: // 5.1 = 5.0 + LFE
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+ | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+ | CHANNEL_OUT_LFE);
+ case 7: // 7.0 = 5.0 + Sides
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+ | CHANNEL_OUT_BL | CHANNEL_OUT_BR
+ | CHANNEL_OUT_SL | CHANNEL_OUT_SR);
+ case 8: // 7.1 = 7.0 + LFE
+ return (CHANNEL_OUT_FL | CHANNEL_OUT_FC | CHANNEL_OUT_FR
+ | CHANNEL_OUT_BL | CHANNEL_OUT_BR | CHANNEL_OUT_SL | CHANNEL_OUT_SR
+ | CHANNEL_OUT_LFE);
+ default:
+ return 0;
+ }
+}
+
class C2SoftAacDecFactory : public C2ComponentFactory {
public:
C2SoftAacDecFactory() : mHelper(std::static_pointer_cast<C2ReflectorHelper>(
diff --git a/media/codec2/components/aac/C2SoftAacDec.h b/media/codec2/components/aac/C2SoftAacDec.h
index a03fc70..b45f148 100644
--- a/media/codec2/components/aac/C2SoftAacDec.h
+++ b/media/codec2/components/aac/C2SoftAacDec.h
@@ -101,6 +101,7 @@
int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
int32_t outputDelayRingBufferSamplesAvailable();
int32_t outputDelayRingBufferSpaceLeft();
+ uint32_t maskFromCount(uint32_t channelCount);
C2_DO_NOT_COPY(C2SoftAacDec);
};
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 70e742c..31840a2 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -60,6 +60,7 @@
enum drc_effect_type_t : int32_t; ///< DRC effect type
enum drc_album_mode_t : int32_t; ///< DRC album mode
enum hdr_dynamic_metadata_type_t : uint32_t; ///< HDR dynamic metadata type
+ enum hdr_format_t : uint32_t; ///< HDR format
enum intra_refresh_mode_t : uint32_t; ///< intra refresh modes
enum level_t : uint32_t; ///< coding level
enum ordinal_key_t : uint32_t; ///< work ordering keys
@@ -192,10 +193,9 @@
kParamIndexPictureType,
// deprecated
kParamIndexHdr10PlusMetadata,
-
kParamIndexPictureQuantization,
-
kParamIndexHdrDynamicMetadata,
+ kParamIndexHdrFormat,
/* ------------------------------------ video components ------------------------------------ */
@@ -277,6 +277,9 @@
// encoding statistics, average block qp of a frame
kParamIndexAverageBlockQuantization, // int32
+
+ // channel mask for decoded audio
+ kParamIndexAndroidChannelMask, // uint32
};
}
@@ -1664,6 +1667,34 @@
constexpr char C2_PARAMKEY_INPUT_HDR_DYNAMIC_INFO[] = "input.hdr-dynamic-info";
constexpr char C2_PARAMKEY_OUTPUT_HDR_DYNAMIC_INFO[] = "output.hdr-dynamic-info";
+/**
+ * HDR Format
+ */
+C2ENUM(C2Config::hdr_format_t, uint32_t,
+ UNKNOWN, ///< HDR format not known (default)
+ SDR, ///< not HDR (SDR)
+ HLG, ///< HLG
+ HDR10, ///< HDR10
+ HDR10_PLUS, ///< HDR10+
+);
+
+/**
+ * HDR Format Info
+ *
+ * This information may be present during configuration to allow encoders to
+ * prepare encoding certain HDR formats. When this information is not present
+ * before start, encoders should determine the HDR format based on the available
+ * HDR metadata on the first input frame.
+ *
+ * While this information is optional, it is not a hint. When present, encoders
+ * that do not support dynamic reconfiguration do not need to switch to the HDR
+ * format based on the metadata on the first input frame.
+ */
+typedef C2StreamParam<C2Info, C2SimpleValueStruct<C2EasyEnum<C2Config::hdr_format_t>>,
+ kParamIndexHdrFormat>
+ C2StreamHdrFormatInfo;
+constexpr char C2_PARAMKEY_HDR_FORMAT[] = "coded.hdr-format";
+
/* ------------------------------------ block-based coding ----------------------------------- */
/**
@@ -1981,6 +2012,14 @@
constexpr char C2_PARAMKEY_MAX_CODED_CHANNEL_COUNT[] = "coded.max-channel-count";
/**
+ * Audio channel mask. Used by decoder to express audio channel mask of decoded content.
+ * Channel representation is specified according to the Java android.media.AudioFormat
+ * CHANNEL_OUT_* constants.
+ */
+ typedef C2StreamParam<C2Info, C2Uint32Value, kParamIndexAndroidChannelMask> C2StreamChannelMaskInfo;
+ const char C2_PARAMKEY_CHANNEL_MASK[] = "raw.channel-mask";
+
+/**
* Audio sample format (PCM encoding)
*/
C2ENUM(C2Config::pcm_encoding_t, uint32_t,
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index 72f7c43..5c24bd7 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -26,6 +26,7 @@
#include <C2BlockInternal.h>
#include <C2Buffer.h>
#include <C2Component.h>
+#include <C2FenceFactory.h>
#include <C2Param.h>
#include <C2ParamInternal.h>
#include <C2PlatformSupport.h>
@@ -759,17 +760,14 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(hidl_handle* d, const C2Fence& s) {
- (void)s; // TODO: implement s.fd()
- int fenceFd = -1;
d->setTo(nullptr);
- if (fenceFd >= 0) {
- native_handle_t *handle = native_handle_create(1, 0);
- if (!handle) {
- LOG(ERROR) << "Failed to create a native handle.";
- return false;
- }
- handle->data[0] = fenceFd;
+ native_handle_t* handle = _C2FenceFactory::CreateNativeHandle(s);
+ if (handle) {
d->setTo(handle, true /* owns */);
+// } else if (!s.ready()) {
+// // TODO: we should wait for unmarshallable fences but this may not be
+// // the best place for it. We can safely ignore here as at this time
+// // all fences used here are marshallable.
}
return true;
}
@@ -1184,9 +1182,8 @@
// Note: File descriptors are not duplicated. The original file descriptor must
// not be closed before the transaction is complete.
bool objcpy(C2Fence* d, const hidl_handle& s) {
- // TODO: Implement.
- (void)s;
- *d = C2Fence();
+ const native_handle_t* handle = s.getNativeHandle();
+ *d = _C2FenceFactory::CreateFromNativeHandle(handle);
return true;
}
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 42b3c43..0acf7d7 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -1502,6 +1502,7 @@
igbp = new B2HGraphicBufferProducer2(surface);
}
+ std::scoped_lock lock(mOutputMutex);
std::shared_ptr<SurfaceSyncObj> syncObj;
if (!surface) {
@@ -1586,6 +1587,24 @@
mOutputBufferQueue->updateMaxDequeueBufferCount(maxDequeueCount);
}
+void Codec2Client::Component::stopUsingOutputSurface(
+ C2BlockPool::local_id_t blockPoolId) {
+ std::scoped_lock lock(mOutputMutex);
+ mOutputBufferQueue->stop();
+ Return<Status> transStatus = mBase1_0->setOutputSurface(
+ static_cast<uint64_t>(blockPoolId), nullptr);
+ if (!transStatus.isOk()) {
+ LOG(ERROR) << "setOutputSurface(stopUsingOutputSurface) -- transaction failed.";
+ } else {
+ c2_status_t status =
+ static_cast<c2_status_t>(static_cast<Status>(transStatus));
+ if (status != C2_OK) {
+ LOG(DEBUG) << "setOutputSurface(stopUsingOutputSurface) -- call failed: "
+ << status << ".";
+ }
+ }
+}
+
c2_status_t Codec2Client::Component::connectToInputSurface(
const std::shared_ptr<InputSurface>& inputSurface,
std::shared_ptr<InputSurfaceConnection>* connection) {
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index 347e58a..49d9b28 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -411,6 +411,10 @@
// Set max dequeue count for output surface.
void setOutputSurfaceMaxDequeueCount(int maxDequeueCount);
+ // Stop using the current output surface.
+ void stopUsingOutputSurface(
+ C2BlockPool::local_id_t blockPoolId);
+
// Connect to a given InputSurface.
c2_status_t connectToInputSurface(
const std::shared_ptr<InputSurface>& inputSurface,
@@ -441,6 +445,11 @@
struct OutputBufferQueue;
std::unique_ptr<OutputBufferQueue> mOutputBufferQueue;
+ // (b/202903117) Sometimes MediaCodec::setSurface races between normal
+ // setSurface and setSurface with ReleaseSurface due to timing issues.
+ // In order to prevent the race condition mutex is added.
+ std::mutex mOutputMutex;
+
static c2_status_t setDeathListener(
const std::shared_ptr<Component>& component,
const std::shared_ptr<Listener>& listener);
diff --git a/media/codec2/hidl/client/include/codec2/hidl/output.h b/media/codec2/hidl/client/include/codec2/hidl/output.h
index 877148a..a13edf3 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/output.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/output.h
@@ -50,6 +50,10 @@
int maxDequeueBufferCount,
std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj);
+ // Stop using the current output surface. Pending buffer opeations will not
+ // perform anymore.
+ void stop();
+
// Render a graphic block to current surface.
status_t outputBuffer(
const C2ConstGraphicBlock& block,
@@ -81,6 +85,7 @@
sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+ bool mStopped;
bool registerBuffer(const C2ConstGraphicBlock& block);
};
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index de34c24..f789030 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -169,7 +169,7 @@
} // unnamed namespace
OutputBufferQueue::OutputBufferQueue()
- : mGeneration{0}, mBqId{0} {
+ : mGeneration{0}, mBqId{0}, mStopped{false} {
}
OutputBufferQueue::~OutputBufferQueue() {
@@ -219,6 +219,8 @@
poolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
{
std::scoped_lock<std::mutex> l(mMutex);
+ bool stopped = mStopped;
+ mStopped = false;
if (generation == mGeneration) {
// case of old BlockPool destruction
C2SyncVariables *var = mSyncMem ? mSyncMem->mem() : nullptr;
@@ -258,7 +260,7 @@
return false;
}
for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
- if (mBqId == 0 || !mBuffers[i]) {
+ if (mBqId == 0 || !mBuffers[i] || stopped) {
continue;
}
std::shared_ptr<_C2BlockPoolData> data = mPoolDatas[i].lock();
@@ -317,6 +319,12 @@
return true;
}
+void OutputBufferQueue::stop() {
+ std::scoped_lock<std::mutex> l(mMutex);
+ mStopped = true;
+ mOwner.reset(); // destructor of the block will not triger IGBP::cancel()
+}
+
bool OutputBufferQueue::registerBuffer(const C2ConstGraphicBlock& block) {
std::shared_ptr<_C2BlockPoolData> data =
_C2BlockFactory::GetGraphicBlockPoolData(block);
@@ -325,7 +333,7 @@
}
std::scoped_lock<std::mutex> l(mMutex);
- if (!mIgbp) {
+ if (!mIgbp || mStopped) {
return false;
}
@@ -371,11 +379,17 @@
std::shared_ptr<C2SurfaceSyncMemory> syncMem;
mMutex.lock();
+ bool stopped = mStopped;
sp<IGraphicBufferProducer> outputIgbp = mIgbp;
uint32_t outputGeneration = mGeneration;
syncMem = mSyncMem;
mMutex.unlock();
+ if (stopped) {
+ LOG(INFO) << "outputBuffer -- already stopped.";
+ return DEAD_OBJECT;
+ }
+
status_t status = attachToBufferQueue(
block, outputIgbp, outputGeneration, &bqSlot, syncMem);
@@ -408,12 +422,18 @@
std::shared_ptr<C2SurfaceSyncMemory> syncMem;
mMutex.lock();
+ bool stopped = mStopped;
sp<IGraphicBufferProducer> outputIgbp = mIgbp;
uint32_t outputGeneration = mGeneration;
uint64_t outputBqId = mBqId;
syncMem = mSyncMem;
mMutex.unlock();
+ if (stopped) {
+ LOG(INFO) << "outputBuffer -- already stopped.";
+ return DEAD_OBJECT;
+ }
+
if (!outputIgbp) {
LOG(VERBOSE) << "outputBuffer -- output surface is null.";
return NO_INIT;
@@ -467,7 +487,7 @@
mMutex.lock();
mMaxDequeueBufferCount = maxDequeueBufferCount;
auto syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
- if (syncVar) {
+ if (syncVar && !mStopped) {
syncVar->lock();
syncVar->updateMaxDequeueCountLocked(maxDequeueBufferCount);
syncVar->unlock();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 19bb206..7a1728c 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1435,7 +1435,7 @@
int64_t blockUsage =
usage.value | C2MemoryUsage::CPU_READ | C2MemoryUsage::CPU_WRITE;
std::shared_ptr<C2GraphicBlock> block = FetchGraphicBlock(
- width, height, pixelFormat, blockUsage, {comp->getName()});
+ width, height, componentColorFormat, blockUsage, {comp->getName()});
sp<GraphicBlockBuffer> buffer;
if (block) {
buffer = GraphicBlockBuffer::Allocate(
@@ -1826,13 +1826,20 @@
return;
}
+ // preparation of input buffers may not succeed due to the lack of
+ // memory; returning correct error code (NO_MEMORY) as an error allows
+ // MediaCodec to try reclaim and restart codec gracefully.
+ std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+ err2 = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
+ if (err2 != OK) {
+ ALOGE("Initial preparation for Input Buffers failed");
+ mCallback->onError(err2, ACTION_CODE_FATAL);
+ return;
+ }
+
mCallback->onStartCompleted();
- err2 = mChannel->requestInitialInputBuffers();
- if (err2 != OK) {
- ALOGE("Initial request for Input Buffers failed");
- mCallback->onError(err2, ACTION_CODE_FATAL);
- }
+ mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
}
void CCodec::initiateShutdown(bool keepComponentAllocated) {
@@ -2126,11 +2133,14 @@
state->set(RUNNING);
}
- status_t err = mChannel->requestInitialInputBuffers();
+ std::map<size_t, sp<MediaCodecBuffer>> clientInputBuffers;
+ status_t err = mChannel->prepareInitialInputBuffers(&clientInputBuffers);
if (err != OK) {
ALOGE("Resume request for Input Buffers failed");
mCallback->onError(err, ACTION_CODE_FATAL);
+ return;
}
+ mChannel->requestInitialInputBuffers(std::move(clientInputBuffers));
}
void CCodec::signalSetParameters(const sp<AMessage> &msg) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 62a1d02..709e1a6 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -880,6 +880,19 @@
return UNKNOWN_ERROR;
}
const C2ConstGraphicBlock &block = blocks.front();
+ C2Fence c2fence = block.fence();
+ sp<Fence> fence = Fence::NO_FENCE;
+ // TODO: it's not sufficient to just check isHW() and then construct android::fence from it.
+ // Once C2Fence::type() is added, check the exact C2Fence type
+ if (c2fence.isHW()) {
+ int fenceFd = c2fence.fd();
+ fence = sp<Fence>::make(fenceFd);
+ if (!fence) {
+ ALOGE("[%s] Failed to allocate a fence", mName);
+ close(fenceFd);
+ return NO_MEMORY;
+ }
+ }
// TODO: revisit this after C2Fence implementation.
android::IGraphicBufferProducer::QueueBufferInput qbi(
@@ -892,7 +905,7 @@
blocks.front().crop().bottom()),
videoScalingMode,
transform,
- Fence::NO_FENCE, 0);
+ fence, 0);
if (hdrStaticInfo || hdrDynamicInfo) {
HdrMetadata hdr;
if (hdrStaticInfo) {
@@ -1457,54 +1470,47 @@
return OK;
}
-status_t CCodecBufferChannel::requestInitialInputBuffers() {
+status_t CCodecBufferChannel::prepareInitialInputBuffers(
+ std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers) {
if (mInputSurface) {
return OK;
}
+ size_t numInputSlots = mInput.lock()->numSlots;
+
+ {
+ Mutexed<Input>::Locked input(mInput);
+ while (clientInputBuffers->size() < numInputSlots) {
+ size_t index;
+ sp<MediaCodecBuffer> buffer;
+ if (!input->buffers->requestNewBuffer(&index, &buffer)) {
+ break;
+ }
+ clientInputBuffers->emplace(index, buffer);
+ }
+ }
+ if (clientInputBuffers->empty()) {
+ ALOGW("[%s] start: cannot allocate memory at all", mName);
+ return NO_MEMORY;
+ } else if (clientInputBuffers->size() < numInputSlots) {
+ ALOGD("[%s] start: cannot allocate memory for all slots, "
+ "only %zu buffers allocated",
+ mName, clientInputBuffers->size());
+ } else {
+ ALOGV("[%s] %zu initial input buffers available",
+ mName, clientInputBuffers->size());
+ }
+ return OK;
+}
+
+status_t CCodecBufferChannel::requestInitialInputBuffers(
+ std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers) {
C2StreamBufferTypeSetting::output oStreamFormat(0u);
C2PrependHeaderModeSetting prepend(PREPEND_HEADER_TO_NONE);
c2_status_t err = mComponent->query({ &oStreamFormat, &prepend }, {}, C2_DONT_BLOCK, nullptr);
if (err != C2_OK && err != C2_BAD_INDEX) {
return UNKNOWN_ERROR;
}
- size_t numInputSlots = mInput.lock()->numSlots;
-
- struct ClientInputBuffer {
- size_t index;
- sp<MediaCodecBuffer> buffer;
- size_t capacity;
- };
- std::list<ClientInputBuffer> clientInputBuffers;
-
- {
- Mutexed<Input>::Locked input(mInput);
- while (clientInputBuffers.size() < numInputSlots) {
- ClientInputBuffer clientInputBuffer;
- if (!input->buffers->requestNewBuffer(&clientInputBuffer.index,
- &clientInputBuffer.buffer)) {
- break;
- }
- clientInputBuffer.capacity = clientInputBuffer.buffer->capacity();
- clientInputBuffers.emplace_back(std::move(clientInputBuffer));
- }
- }
- if (clientInputBuffers.empty()) {
- ALOGW("[%s] start: cannot allocate memory at all", mName);
- return NO_MEMORY;
- } else if (clientInputBuffers.size() < numInputSlots) {
- ALOGD("[%s] start: cannot allocate memory for all slots, "
- "only %zu buffers allocated",
- mName, clientInputBuffers.size());
- } else {
- ALOGV("[%s] %zu initial input buffers available",
- mName, clientInputBuffers.size());
- }
- // Sort input buffers by their capacities in increasing order.
- clientInputBuffers.sort(
- [](const ClientInputBuffer& a, const ClientInputBuffer& b) {
- return a.capacity < b.capacity;
- });
std::list<std::unique_ptr<C2Work>> flushedConfigs;
mFlushedConfigs.lock()->swap(flushedConfigs);
@@ -1526,25 +1532,31 @@
}
}
if (oStreamFormat.value == C2BufferData::LINEAR &&
- (!prepend || prepend.value == PREPEND_HEADER_TO_NONE)) {
- sp<MediaCodecBuffer> buffer = clientInputBuffers.front().buffer;
+ (!prepend || prepend.value == PREPEND_HEADER_TO_NONE) &&
+ !clientInputBuffers.empty()) {
+ size_t minIndex = clientInputBuffers.begin()->first;
+ sp<MediaCodecBuffer> minBuffer = clientInputBuffers.begin()->second;
+ for (const auto &[index, buffer] : clientInputBuffers) {
+ if (minBuffer->capacity() > buffer->capacity()) {
+ minIndex = index;
+ minBuffer = buffer;
+ }
+ }
// WORKAROUND: Some apps expect CSD available without queueing
// any input. Queue an empty buffer to get the CSD.
- buffer->setRange(0, 0);
- buffer->meta()->clear();
- buffer->meta()->setInt64("timeUs", 0);
- if (queueInputBufferInternal(buffer) != OK) {
+ minBuffer->setRange(0, 0);
+ minBuffer->meta()->clear();
+ minBuffer->meta()->setInt64("timeUs", 0);
+ if (queueInputBufferInternal(minBuffer) != OK) {
ALOGW("[%s] Error while queueing an empty buffer to get CSD",
mName);
return UNKNOWN_ERROR;
}
- clientInputBuffers.pop_front();
+ clientInputBuffers.erase(minIndex);
}
- for (const ClientInputBuffer& clientInputBuffer: clientInputBuffers) {
- mCallback->onInputBufferAvailable(
- clientInputBuffer.index,
- clientInputBuffer.buffer);
+ for (const auto &[index, buffer] : clientInputBuffers) {
+ mCallback->onInputBufferAvailable(index, buffer);
}
return OK;
@@ -1570,6 +1582,14 @@
Mutexed<Output>::Locked output(mOutput);
output->buffers.reset();
}
+ if (mOutputSurface.lock()->surface) {
+ C2BlockPool::local_id_t outputPoolId;
+ {
+ Mutexed<BlockPools>::Locked pools(mBlockPools);
+ outputPoolId = pools->outputPoolId;
+ }
+ mComponent->stopUsingOutputSurface(outputPoolId);
+ }
}
void CCodecBufferChannel::release() {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index b3a5f4b..f29a225 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -130,9 +130,23 @@
bool buffersBoundToCodec);
/**
- * Request initial input buffers to be filled by client.
+ * Prepare initial input buffers to be filled by client.
+ *
+ * \param clientInputBuffers[out] pointer to slot index -> buffer map.
+ * On success, it contains prepared
+ * initial input buffers.
*/
- status_t requestInitialInputBuffers();
+ status_t prepareInitialInputBuffers(
+ std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers);
+
+ /**
+ * Request initial input buffers as prepared in clientInputBuffers.
+ *
+ * \param clientInputBuffers[in] slot index -> buffer map with prepared
+ * initial input buffers.
+ */
+ status_t requestInitialInputBuffers(
+ std::map<size_t, sp<MediaCodecBuffer>> &&clientInputBuffers);
/**
* Stop queueing buffers to the component. This object should never queue
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 3cf8c3e..2643290 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -400,10 +400,10 @@
// Rotation
// Note: SDK rotation is clock-wise, while C2 rotation is counter-clock-wise
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_VUI_ROTATION, "value")
- .limitTo(D::VIDEO & D::CODED)
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED)
.withMappers(negate, negate));
add(ConfigMapper(KEY_ROTATION, C2_PARAMKEY_ROTATION, "value")
- .limitTo(D::VIDEO & D::RAW)
+ .limitTo((D::VIDEO | D::IMAGE) & D::RAW)
.withMappers(negate, negate));
// android 'video-scaling'
@@ -513,6 +513,9 @@
add(ConfigMapper("cta861.max-fall", C2_PARAMKEY_HDR_STATIC_INFO, "max-fall")
.limitTo((D::VIDEO | D::IMAGE) & D::RAW));
+ add(ConfigMapper(C2_PARAMKEY_HDR_FORMAT, C2_PARAMKEY_HDR_FORMAT, "value")
+ .limitTo((D::VIDEO | D::IMAGE) & D::CODED & D::CONFIG));
+
add(ConfigMapper(std::string(KEY_FEATURE_) + FEATURE_SecurePlayback,
C2_PARAMKEY_SECURE_MODE, "value"));
@@ -905,6 +908,9 @@
add(ConfigMapper(KEY_MAX_OUTPUT_CHANNEL_COUNT, C2_PARAMKEY_MAX_CHANNEL_COUNT, "value")
.limitTo(D::AUDIO & (D::CONFIG | D::PARAM | D::READ)));
+ add(ConfigMapper(KEY_CHANNEL_MASK, C2_PARAMKEY_CHANNEL_MASK, "value")
+ .limitTo(D::AUDIO & D::DECODER & D::READ));
+
add(ConfigMapper(KEY_AAC_SBR_MODE, C2_PARAMKEY_AAC_SBR_MODE, "value")
.limitTo(D::AUDIO & D::ENCODER & (D::CONFIG | D::PARAM | D::READ))
.withMapper([](C2Value v) -> C2Value {
@@ -1637,6 +1643,27 @@
params->setFloat(C2_PARAMKEY_INPUT_TIME_STRETCH, captureRate / frameRate);
}
}
+
+ // add HDR format for video encoding
+ if (configDomain == IS_CONFIG) {
+ // don't assume here that transfer is set for HDR, only require it for HLG
+ int transfer = 0;
+ params->findInt32(KEY_COLOR_TRANSFER, &transfer);
+
+ int profile;
+ if (params->findInt32(KEY_PROFILE, &profile)) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetProfileLevelMapper(mCodingMediaType);
+ C2Config::hdr_format_t c2 = C2Config::hdr_format_t::UNKNOWN;
+ if (mapper && mapper->mapHdrFormat(profile, &c2)) {
+ if (c2 == C2Config::hdr_format_t::HLG &&
+ transfer != COLOR_TRANSFER_HLG) {
+ c2 = C2Config::hdr_format_t::UNKNOWN;
+ }
+ params->setInt32(C2_PARAMKEY_HDR_FORMAT, c2);
+ }
+ }
+ }
}
{ // reflect temporal layering into a binary blob
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 3a94016..c606d6f 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -276,6 +276,13 @@
{ C2Config::PROFILE_HEVC_MAIN_10, HEVCProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sHevcHdrFormats = {
+ { C2Config::hdr_format_t::SDR, HEVCProfileMain },
+ { C2Config::hdr_format_t::HLG, HEVCProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, HEVCProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, HEVCProfileMain10HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sMpeg2Levels = {
{ C2Config::LEVEL_MP2V_LOW, MPEG2LevelLL },
{ C2Config::LEVEL_MP2V_MAIN, MPEG2LevelML },
@@ -365,6 +372,17 @@
{ C2Config::PROFILE_VP9_3, VP9Profile3HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sVp9HdrFormats = {
+ { C2Config::hdr_format_t::SDR, VP9Profile0 },
+ { C2Config::hdr_format_t::SDR, VP9Profile1 },
+ { C2Config::hdr_format_t::HLG, VP9Profile2 },
+ { C2Config::hdr_format_t::HLG, VP9Profile3 },
+ { C2Config::hdr_format_t::HDR10, VP9Profile2HDR },
+ { C2Config::hdr_format_t::HDR10, VP9Profile3HDR },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile2HDR10Plus },
+ { C2Config::hdr_format_t::HDR10_PLUS, VP9Profile3HDR10Plus },
+};
+
ALookup<C2Config::level_t, int32_t> sAv1Levels = {
{ C2Config::LEVEL_AV1_2, AV1Level2 },
{ C2Config::LEVEL_AV1_2_1, AV1Level21 },
@@ -411,6 +429,13 @@
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
+ALookup<C2Config::hdr_format_t, int32_t> sAv1HdrFormats = {
+ { C2Config::hdr_format_t::SDR, AV1ProfileMain8 },
+ { C2Config::hdr_format_t::HLG, AV1ProfileMain10 },
+ { C2Config::hdr_format_t::HDR10, AV1ProfileMain10HDR10 },
+ { C2Config::hdr_format_t::HDR10_PLUS, AV1ProfileMain10HDR10Plus },
+};
+
// HAL_PIXEL_FORMAT_* -> COLOR_Format*
ALookup<uint32_t, int32_t> sPixelFormats = {
{ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, COLOR_FormatSurface },
@@ -494,6 +519,10 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sAacProfiles.map(from, to);
}
+ // AAC does not have HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t*) override {
+ return false;
+ }
};
struct AvcProfileLevelMapper : ProfileLevelMapperHelper {
@@ -524,6 +553,12 @@
virtual bool simpleMap(int32_t from, C2Config::profile_t *to) {
return sDolbyVisionProfiles.map(from, to);
}
+ // Dolby Vision is always HDR and the profile is fully expressive so use unknown
+ // HDR format
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *to) override {
+ *to = C2Config::hdr_format_t::UNKNOWN;
+ return true;
+ }
};
struct H263ProfileLevelMapper : ProfileLevelMapperHelper {
@@ -562,6 +597,9 @@
mIsHdr ? sHevcHdrProfiles.map(from, to) :
sHevcProfiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sHevcHdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -640,6 +678,9 @@
mIsHdr ? sVp9HdrProfiles.map(from, to) :
sVp9Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sVp9HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -669,6 +710,9 @@
mIsHdr ? sAv1HdrProfiles.map(from, to) :
sAv1Profiles.map(from, to);
}
+ virtual bool mapHdrFormat(int32_t from, C2Config::hdr_format_t *to) override {
+ return sAv1HdrFormats.map(from, to);
+ }
private:
bool mIsHdr;
@@ -678,6 +722,13 @@
} // namespace
+// the default mapper is used for media types that do not support HDR
+bool C2Mapper::ProfileLevelMapper::mapHdrFormat(int32_t, C2Config::hdr_format_t *to) {
+ // by default map all (including vendor) profiles to SDR
+ *to = C2Config::hdr_format_t::SDR;
+ return true;
+}
+
// static
std::shared_ptr<C2Mapper::ProfileLevelMapper>
C2Mapper::GetProfileLevelMapper(std::string mediaType) {
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.h b/media/codec2/sfplugin/utils/Codec2Mapper.h
index 33d305e..c8e9e13 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.h
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.h
@@ -34,6 +34,16 @@
virtual bool mapProfile(int32_t, C2Config::profile_t*) = 0;
virtual bool mapLevel(C2Config::level_t, int32_t*) = 0;
virtual bool mapLevel(int32_t, C2Config::level_t*) = 0;
+
+ /**
+ * Mapper method that maps a MediaCodec profile to the supported
+ * HDR format for that profile. Since 10-bit profiles are used for
+ * HLG, this method will return HLG for all 10-bit profiles, but
+ * the caller should also verify that the transfer function is
+ * indeed HLG.
+ */
+ // not an abstract method as we have a default implementation for SDR
+ virtual bool mapHdrFormat(int32_t, C2Config::hdr_format_t *hdr);
virtual ~ProfileLevelMapper() = default;
};
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 9c5183e..6f98d11 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -16,13 +16,24 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2FenceFactory"
+#include <cutils/native_handle.h>
#include <utils/Log.h>
+#include <ui/Fence.h>
#include <C2FenceFactory.h>
#include <C2SurfaceSyncObj.h>
+#define MAX_FENCE_FDS 1
+
class C2Fence::Impl {
public:
+ enum type_t : uint32_t {
+ INVALID_FENCE,
+ NULL_FENCE,
+ SURFACE_FENCE,
+ SYNC_FENCE,
+ };
+
virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
virtual bool valid() const = 0;
@@ -33,9 +44,26 @@
virtual bool isHW() const = 0;
+ virtual type_t type() const = 0;
+
+ /**
+ * Create a native handle for the fence so it can be marshalled.
+ * The native handle must store fence type in the first integer.
+ *
+ * \return a valid native handle if the fence can be marshalled, otherwise return null.
+ */
+ virtual native_handle_t *createNativeHandle() const = 0;
+
virtual ~Impl() = default;
Impl() = default;
+
+ static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
+ if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
+ return static_cast<type_t>(nh->data[nh->numFds]);
+ }
+ return INVALID_FENCE;
+ }
};
c2_status_t C2Fence::wait(c2_nsecs_t timeoutNs) {
@@ -115,6 +143,15 @@
return false;
}
+ virtual type_t type() const {
+ return SURFACE_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ ALOG_ASSERT(false, "Cannot create native handle from surface fence");
+ return nullptr;
+ }
+
virtual ~SurfaceFenceImpl() {};
SurfaceFenceImpl(std::shared_ptr<C2SurfaceSyncMemory> syncMem, uint32_t waitId) :
@@ -143,3 +180,119 @@
}
return C2Fence();
}
+
+using namespace android;
+
+class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
+public:
+ virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
+ c2_nsecs_t timeoutMs = timeoutNs / 1000;
+ if (timeoutMs > INT_MAX) {
+ timeoutMs = INT_MAX;
+ }
+
+ switch (mFence->wait((int)timeoutMs)) {
+ case NO_ERROR:
+ return C2_OK;
+ case -ETIME:
+ return C2_TIMED_OUT;
+ default:
+ return C2_CORRUPTED;
+ }
+ }
+
+ virtual bool valid() const {
+ return mFence->getStatus() != Fence::Status::Invalid;
+ }
+
+ virtual bool ready() const {
+ return mFence->getStatus() == Fence::Status::Signaled;
+ }
+
+ virtual int fd() const {
+ return mFence->dup();
+ }
+
+ virtual bool isHW() const {
+ return true;
+ }
+
+ virtual type_t type() const {
+ return SYNC_FENCE;
+ }
+
+ virtual native_handle_t *createNativeHandle() const {
+ native_handle_t* nh = native_handle_create(1, 1);
+ if (!nh) {
+ ALOGE("Failed to allocate native handle for sync fence");
+ return nullptr;
+ }
+ nh->data[0] = fd();
+ nh->data[1] = type();
+ return nh;
+ }
+
+ virtual ~SyncFenceImpl() {};
+
+ SyncFenceImpl(int fenceFd) :
+ mFence(sp<Fence>::make(fenceFd)) {}
+
+ static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
+ if (!nh || nh->numFds != 1 || nh->numInts != 1) {
+ ALOGE("Invalid handle for sync fence");
+ return nullptr;
+ }
+ int fd = dup(nh->data[0]);
+ std::shared_ptr<SyncFenceImpl> p = std::make_shared<SyncFenceImpl>(fd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fd);
+ }
+ return p;
+ }
+
+private:
+ const sp<Fence> mFence;
+};
+
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+ std::shared_ptr<C2Fence::Impl> p;
+ if (fenceFd >= 0) {
+ p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
+ if (!p) {
+ ALOGE("Failed to allocate sync fence impl");
+ close(fenceFd);
+ }
+ if (!p->valid()) {
+ p.reset();
+ }
+ } else {
+ ALOGE("Create sync fence from invalid fd");
+ }
+ return C2Fence(p);
+}
+
+native_handle_t* _C2FenceFactory::CreateNativeHandle(const C2Fence& fence) {
+ return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
+}
+
+C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+ if (!handle) {
+ return C2Fence();
+ }
+ C2Fence::Impl::type_t type = C2Fence::Impl::GetTypeFromNativeHandle(handle);
+ std::shared_ptr<C2Fence::Impl> p;
+ switch (type) {
+ case C2Fence::Impl::SYNC_FENCE:
+ p = SyncFenceImpl::CreateFromNativeHandle(handle);
+ break;
+ default:
+ ALOG_ASSERT(false, "Unsupported fence type %d", type);
+ break;
+ }
+ if (p && !p->valid()) {
+ p.reset();
+ }
+ return C2Fence(p);
+}
+
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index bec978a..29aad5e 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -129,8 +129,9 @@
// Create a local BlockPoolData.
C2BufferQueueBlockPoolData(
uint32_t generation, uint64_t bqId, int32_t bqSlot,
+ const std::shared_ptr<int> &owner,
const android::sp<HGraphicBufferProducer>& producer,
- std::shared_ptr<C2SurfaceSyncMemory>, int noUse);
+ std::shared_ptr<C2SurfaceSyncMemory>);
virtual ~C2BufferQueueBlockPoolData() override;
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index d4bed26..4944115 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -28,6 +28,7 @@
struct _C2FenceFactory {
class SurfaceFenceImpl;
+ class SyncFenceImpl;
/*
* Create C2Fence for BufferQueueBased blockpool.
@@ -38,6 +39,30 @@
static C2Fence CreateSurfaceFence(
std::shared_ptr<C2SurfaceSyncMemory> syncMem,
uint32_t waitId);
+
+ /*
+ * Create C2Fence from a fence file fd.
+ *
+ * \param fenceFd Fence file descriptor.
+ * It will be owned and closed by the returned fence object.
+ */
+ static C2Fence CreateSyncFence(int fenceFd);
+
+ /**
+ * Create a native handle from fence for marshalling
+ *
+ * \return a non-null pointer if the fence can be marshalled, otherwise return nullptr
+ */
+ static native_handle_t* CreateNativeHandle(const C2Fence& fence);
+
+ /*
+ * Create C2Fence from a native handle.
+
+ * \param handle A native handle representing a fence
+ * The fd in the native handle will be duplicated, so the caller will
+ * still own the handle and have to close it.
+ */
+ static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
};
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 63b0f39..e67e42f 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -542,7 +542,7 @@
std::make_shared<C2BufferQueueBlockPoolData>(
slotBuffer->getGenerationNumber(),
mProducerId, slot,
- mProducer, mSyncMem, 0);
+ mIgbpValidityToken, mProducer, mSyncMem);
mPoolDatas[slot] = poolData;
*block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
return C2_OK;
@@ -572,10 +572,11 @@
Impl(const std::shared_ptr<C2Allocator> &allocator)
: mInit(C2_OK), mProducerId(0), mGeneration(0),
mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
- mLastDqLogTs(0), mAllocator(allocator) {
+ mLastDqLogTs(0), mAllocator(allocator), mIgbpValidityToken(std::make_shared<int>(0)) {
}
~Impl() {
+ mIgbpValidityToken.reset();
for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
mBuffers[i].clear();
}
@@ -618,7 +619,7 @@
}
std::shared_ptr<C2BufferQueueBlockPoolData> poolData =
std::make_shared<C2BufferQueueBlockPoolData>(
- 0, (uint64_t)0, ~0, nullptr, nullptr, 0);
+ 0, (uint64_t)0, ~0, nullptr, nullptr, nullptr);
*block = _C2BlockFactory::CreateGraphicBlock(alloc, poolData);
ALOGV("allocated a buffer successfully");
@@ -694,8 +695,7 @@
mProducer = nullptr;
mProducerId = 0;
mGeneration = 0;
- ALOGW("invalid producer producer(%d), generation(%d)",
- (bool)producer, bqInformation);
+ ALOGD("configuring null producer: igbp_information(%d)", bqInformation);
}
oldMem = mSyncMem; // preven destruction while locked.
mSyncMem = c2SyncMem;
@@ -720,6 +720,10 @@
}
}
}
+ } else {
+ // old buffers should not be cancelled since the associated IGBP
+ // is no longer valid.
+ mIgbpValidityToken = std::make_shared<int>(0);
}
for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
mBuffers[i] = buffers[i];
@@ -761,6 +765,20 @@
std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
+
+ // IGBP invalidation notification token.
+ // The buffers(C2BufferQueueBlockPoolData) has the reference to the IGBP where
+ // they belong in order to call IGBP::cancelBuffer() when they are of no use.
+ //
+ // In certain cases, IGBP is no longer used by this class(actually MediaCodec)
+ // any more and the situation needs to be addressed quickly. In order to
+ // achieve those, std::shared_ptr<> is used as a token for quick IGBP invalidation
+ // notification from the buffers.
+ //
+ // The buffer side will have the reference of the token as std::weak_ptr<>.
+ // if the token has been expired, the buffers will not call IGBP::cancelBuffer()
+ // when they are no longer used.
+ std::shared_ptr<int> mIgbpValidityToken;
};
C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -776,14 +794,14 @@
C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
uint32_t generation, uint64_t bqId, int32_t bqSlot,
+ const std::shared_ptr<int>& owner,
const android::sp<HGraphicBufferProducer>& producer,
- std::shared_ptr<C2SurfaceSyncMemory> syncMem, int noUse) :
+ std::shared_ptr<C2SurfaceSyncMemory> syncMem) :
mLocal(true), mHeld(true),
mGeneration(generation), mBqId(bqId), mBqSlot(bqSlot),
mCurrentGeneration(generation), mCurrentBqId(bqId),
mTransfer(false), mAttach(false), mDisplay(false),
- mIgbp(producer), mSyncMem(syncMem) {
- (void)noUse;
+ mOwner(owner), mIgbp(producer), mSyncMem(syncMem) {
}
C2BufferQueueBlockPoolData::~C2BufferQueueBlockPoolData() {
@@ -792,7 +810,7 @@
}
if (mLocal) {
- if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId) {
+ if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId && !mOwner.expired()) {
C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
if (syncVar) {
syncVar->lock();
diff --git a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
index ce2bc82..5e90588 100644
--- a/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
+++ b/media/libaaudio/src/flowgraph/FlowgraphUtilities.h
@@ -39,9 +39,9 @@
static const float limneg = -1.;
if (f <= limneg) {
- return -0x80000000; /* or 0x80000000 */
+ return INT32_MIN;
} else if (f >= limpos) {
- return 0x7fffffff;
+ return INT32_MAX;
}
f *= scale;
/* integer conversion is through truncation (though int to float is not).
diff --git a/media/libaaudio/tests/test_flowgraph.cpp b/media/libaaudio/tests/test_flowgraph.cpp
index 913feb0..66b77eb 100644
--- a/media/libaaudio/tests/test_flowgraph.cpp
+++ b/media/libaaudio/tests/test_flowgraph.cpp
@@ -16,6 +16,9 @@
/*
* Test FlowGraph
+ *
+ * This file also tests a few different conversion techniques because
+ * sometimes that have caused compiler bugs.
*/
#include <iostream>
@@ -30,6 +33,7 @@
#include "flowgraph/SinkFloat.h"
#include "flowgraph/SinkI16.h"
#include "flowgraph/SinkI24.h"
+#include "flowgraph/SinkI32.h"
#include "flowgraph/SourceI16.h"
#include "flowgraph/SourceI24.h"
@@ -37,6 +41,22 @@
constexpr int kBytesPerI24Packed = 3;
+constexpr int kNumSamples = 8;
+constexpr std::array<float, kNumSamples> kInputFloat = {
+ 1.0f, 0.5f, -0.25f, -1.0f,
+ 0.0f, 53.9f, -87.2f, -1.02f};
+
+// Corresponding PCM values as integers.
+constexpr std::array<int16_t, kNumSamples> kExpectedI16 = {
+ INT16_MAX, 1 << 14, INT16_MIN / 4, INT16_MIN,
+ 0, INT16_MAX, INT16_MIN, INT16_MIN};
+
+constexpr std::array<int32_t, kNumSamples> kExpectedI32 = {
+ INT32_MAX, 1 << 30, INT32_MIN / 4, INT32_MIN,
+ 0, INT32_MAX, INT32_MIN, INT32_MIN};
+
+// =================================== FLOAT to I16 ==============
+
// Simple test that tries to reproduce a Clang compiler bug.
__attribute__((noinline))
void local_convert_float_to_int16(const float *input,
@@ -49,18 +69,11 @@
}
TEST(test_flowgraph, local_convert_float_int16) {
- static constexpr int kNumSamples = 8;
- static constexpr std::array<float, kNumSamples> input = {
- 1.0f, 0.5f, -0.25f, -1.0f,
- 0.0f, 53.9f, -87.2f, -1.02f};
- static constexpr std::array<int16_t, kNumSamples> expected = {
- 32767, 16384, -8192, -32768,
- 0, 32767, -32768, -32768};
std::array<int16_t, kNumSamples> output;
// Do it inline, which will probably work even with the buggy compiler.
// This validates the expected data.
- const float *in = input.data();
+ const float *in = kInputFloat.data();
int16_t *out = output.data();
output.fill(777);
for (int i = 0; i < kNumSamples; i++) {
@@ -68,38 +81,106 @@
*out++ = std::min(INT16_MAX, std::max(INT16_MIN, n)); // clip
}
for (int i = 0; i < kNumSamples; i++) {
- EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+ EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
}
// Convert audio signal using the function.
output.fill(777);
- local_convert_float_to_int16(input.data(), output.data(), kNumSamples);
+ local_convert_float_to_int16(kInputFloat.data(), output.data(), kNumSamples);
for (int i = 0; i < kNumSamples; i++) {
- EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+ EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
}
}
TEST(test_flowgraph, module_sinki16) {
static constexpr int kNumSamples = 8;
- static constexpr std::array<float, kNumSamples> input = {
- 1.0f, 0.5f, -0.25f, -1.0f,
- 0.0f, 53.9f, -87.2f, -1.02f};
- static constexpr std::array<int16_t, kNumSamples> expected = {
- 32767, 16384, -8192, -32768,
- 0, 32767, -32768, -32768};
std::array<int16_t, kNumSamples + 10> output; // larger than input
SourceFloat sourceFloat{1};
SinkI16 sinkI16{1};
- sourceFloat.setData(input.data(), kNumSamples);
+ sourceFloat.setData(kInputFloat.data(), kNumSamples);
sourceFloat.output.connect(&sinkI16.input);
output.fill(777);
int32_t numRead = sinkI16.read(output.data(), output.size());
ASSERT_EQ(kNumSamples, numRead);
for (int i = 0; i < numRead; i++) {
- EXPECT_EQ(expected.at(i), output.at(i)) << ", i = " << i;
+ EXPECT_EQ(kExpectedI16.at(i), output.at(i)) << ", i = " << i;
+ }
+}
+
+// =================================== FLOAT to I32 ==============
+// Simple test that tries to reproduce a Clang compiler bug.
+__attribute__((noinline))
+static int32_t clamp32FromFloat(float f)
+{
+ static const float scale = (float)(1UL << 31);
+ static const float limpos = 1.;
+ static const float limneg = -1.;
+
+ if (f <= limneg) {
+ return INT32_MIN;
+ } else if (f >= limpos) {
+ return INT32_MAX;
+ }
+ f *= scale;
+ /* integer conversion is through truncation (though int to float is not).
+ * ensure that we round to nearest, ties away from 0.
+ */
+ return f > 0 ? f + 0.5 : f - 0.5;
+}
+
+void local_convert_float_to_int32(const float *input,
+ int32_t *output,
+ int count) {
+ for (int i = 0; i < count; i++) {
+ *output++ = clamp32FromFloat(*input++);
+ }
+}
+
+TEST(test_flowgraph, simple_convert_float_int32) {
+ std::array<int32_t, kNumSamples> output;
+
+ // Do it inline, which will probably work even with a buggy compiler.
+ // This validates the expected data.
+ const float *in = kInputFloat.data();
+ output.fill(777);
+ int32_t *out = output.data();
+ for (int i = 0; i < kNumSamples; i++) {
+ int64_t n = (int64_t) (*in++ * 2147483648.0f);
+ *out++ = (int32_t)std::min((int64_t)INT32_MAX,
+ std::max((int64_t)INT32_MIN, n)); // clip
+ }
+ for (int i = 0; i < kNumSamples; i++) {
+ EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+ }
+}
+
+TEST(test_flowgraph, local_convert_float_int32) {
+ std::array<int32_t, kNumSamples> output;
+ // Convert audio signal using the function.
+ output.fill(777);
+ local_convert_float_to_int32(kInputFloat.data(), output.data(), kNumSamples);
+ for (int i = 0; i < kNumSamples; i++) {
+ EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
+ }
+}
+
+TEST(test_flowgraph, module_sinki32) {
+ std::array<int32_t, kNumSamples + 10> output; // larger than input
+
+ SourceFloat sourceFloat{1};
+ SinkI32 sinkI32{1};
+
+ sourceFloat.setData(kInputFloat.data(), kNumSamples);
+ sourceFloat.output.connect(&sinkI32.input);
+
+ output.fill(777);
+ int32_t numRead = sinkI32.read(output.data(), output.size());
+ ASSERT_EQ(kNumSamples, numRead);
+ for (int i = 0; i < numRead; i++) {
+ EXPECT_EQ(kExpectedI32.at(i), output.at(i)) << ", i = " << i;
}
}
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 36f8e10..6ab8339 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -728,11 +728,13 @@
// (b) we can support re-creation of offloaded tracks
if (offloadInfo != NULL) {
mOffloadInfoCopy = *offloadInfo;
- mOffloadInfo = &mOffloadInfoCopy;
} else {
- mOffloadInfo = NULL;
memset(&mOffloadInfoCopy, 0, sizeof(audio_offload_info_t));
mOffloadInfoCopy = AUDIO_INFO_INITIALIZER;
+ mOffloadInfoCopy.format = format;
+ mOffloadInfoCopy.sample_rate = sampleRate;
+ mOffloadInfoCopy.channel_mask = channelMask;
+ mOffloadInfoCopy.stream_type = streamType;
}
mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
diff --git a/media/libaudioclient/include/media/AidlConversionUtil.h b/media/libaudioclient/include/media/AidlConversionUtil.h
index 820b7cb..8817c35 100644
--- a/media/libaudioclient/include/media/AidlConversionUtil.h
+++ b/media/libaudioclient/include/media/AidlConversionUtil.h
@@ -20,6 +20,7 @@
#include <type_traits>
#include <utility>
+#include <binder/Enums.h>
#include <binder/Status.h>
#include <error/Result.h>
@@ -270,6 +271,29 @@
namespace aidl_utils {
/**
+ * Return true if the value is valid for the AIDL enumeration.
+ */
+template <typename T>
+bool isValidEnum(T value) {
+ constexpr android::enum_range<T> er{};
+ return std::find(er.begin(), er.end(), value) != er.end();
+}
+
+// T is a "container" of enum binder types with a toString().
+template <typename T>
+std::string enumsToString(const T& t) {
+ std::string s;
+ for (const auto item : t) {
+ if (s.empty()) {
+ s = toString(item);
+ } else {
+ s.append("|").append(toString(item));
+ }
+ }
+ return s;
+}
+
+/**
* Return the equivalent Android status_t from a binder exception code.
*
* Generally one should use statusTFromBinderStatus() instead.
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 1cf6ef9..9f540e6 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1344,7 +1344,6 @@
sp<IMemory> mSharedBuffer;
transfer_type mTransfer;
audio_offload_info_t mOffloadInfoCopy;
- const audio_offload_info_t* mOffloadInfo;
audio_attributes_t mAttributes;
size_t mFrameSize; // frame size in bytes
diff --git a/media/libheadtracking/Android.bp b/media/libheadtracking/Android.bp
index 1d41889..9d63f9b 100644
--- a/media/libheadtracking/Android.bp
+++ b/media/libheadtracking/Android.bp
@@ -46,6 +46,7 @@
],
export_shared_lib_headers: [
"libheadtracking",
+ "libsensor",
],
}
diff --git a/media/libheadtracking/SensorPoseProvider.cpp b/media/libheadtracking/SensorPoseProvider.cpp
index 4884ae4..8ebaf6e 100644
--- a/media/libheadtracking/SensorPoseProvider.cpp
+++ b/media/libheadtracking/SensorPoseProvider.cpp
@@ -26,7 +26,6 @@
#include <android-base/thread_annotations.h>
#include <log/log_main.h>
-#include <sensor/Sensor.h>
#include <sensor/SensorEventQueue.h>
#include <sensor/SensorManager.h>
#include <utils/Looper.h>
@@ -285,7 +284,7 @@
return DataFormat::kUnknown;
}
- std::optional<const Sensor> getSensorByHandle(int32_t handle) {
+ std::optional<const Sensor> getSensorByHandle(int32_t handle) override {
const Sensor* const* list;
ssize_t size;
diff --git a/media/libheadtracking/include/media/SensorPoseProvider.h b/media/libheadtracking/include/media/SensorPoseProvider.h
index d2a6b77..0f42074 100644
--- a/media/libheadtracking/include/media/SensorPoseProvider.h
+++ b/media/libheadtracking/include/media/SensorPoseProvider.h
@@ -20,6 +20,7 @@
#include <optional>
#include <android/sensor.h>
+#include <sensor/Sensor.h>
#include "Pose.h"
#include "Twist.h"
@@ -91,6 +92,14 @@
* @param handle The sensor handle, as provided to startSensor().
*/
virtual void stopSensor(int32_t handle) = 0;
+
+ /**
+ * Returns the sensor or nullopt if it does not exist.
+ *
+ * The Sensor object has const methods that can be used to
+ * discover properties of the sensor.
+ */
+ virtual std::optional<const Sensor> getSensorByHandle(int32_t handle) = 0;
};
} // namespace media
diff --git a/media/libmediametrics/MediaMetricsItem.cpp b/media/libmediametrics/MediaMetricsItem.cpp
index 57fc49d..ecb248d 100644
--- a/media/libmediametrics/MediaMetricsItem.cpp
+++ b/media/libmediametrics/MediaMetricsItem.cpp
@@ -26,6 +26,7 @@
#include <unordered_map>
#include <binder/Parcel.h>
+#include <cutils/multiuser.h>
#include <cutils/properties.h>
#include <utils/Errors.h>
#include <utils/Log.h>
@@ -343,7 +344,8 @@
// now.
// TODO(b/190151205): Either allow the HotwordDetectionService to access MediaMetrics or
// make this disabling specific to that process.
- if (uid >= AID_ISOLATED_START && uid <= AID_ISOLATED_END) {
+ uid_t appid = multiuser_get_app_id(uid);
+ if (appid >= AID_ISOLATED_START && appid <= AID_ISOLATED_END) {
return false;
}
break;
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 90472eb..1c30510 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -51,6 +51,12 @@
// The AudioRecord key appends the "trackId" to the prefix.
#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
+// The Audio Sensor key appends the sensor handle integer.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR AMEDIAMETRICS_KEY_PREFIX_AUDIO "sensor."
+
+// The Audio Spatializer key appends the spatializerId (currently 0)
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER AMEDIAMETRICS_KEY_PREFIX_AUDIO "spatializer."
+
// The AudioStream key appends the "streamId" to the prefix.
#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM AMEDIAMETRICS_KEY_PREFIX_AUDIO "stream."
@@ -135,13 +141,17 @@
#define AMEDIAMETRICS_PROP_FLAGS "flags"
#define AMEDIAMETRICS_PROP_FRAMECOUNT "frameCount" // int32
+#define AMEDIAMETRICS_PROP_HEADTRACKINGMODES "headTrackingModes" // string |, like modes.
#define AMEDIAMETRICS_PROP_INPUTDEVICES "inputDevices" // string value
#define AMEDIAMETRICS_PROP_INTERNALTRACKID "internalTrackId" // int32
#define AMEDIAMETRICS_PROP_INTERVALCOUNT "intervalCount" // int32
#define AMEDIAMETRICS_PROP_LATENCYMS "latencyMs" // double value
+#define AMEDIAMETRICS_PROP_LEVELS "levels" // string | with levels
#define AMEDIAMETRICS_PROP_LOGSESSIONID "logSessionId" // hex string, "" none
#define AMEDIAMETRICS_PROP_METHODCODE "methodCode" // int64_t an int indicating method
#define AMEDIAMETRICS_PROP_METHODNAME "methodName" // string method name
+#define AMEDIAMETRICS_PROP_MODE "mode" // string
+#define AMEDIAMETRICS_PROP_MODES "modes" // string | with modes
#define AMEDIAMETRICS_PROP_NAME "name" // string value
#define AMEDIAMETRICS_PROP_ORIGINALFLAGS "originalFlags" // int32
#define AMEDIAMETRICS_PROP_OUTPUTDEVICES "outputDevices" // string value
@@ -241,6 +251,11 @@
#define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_TONEGENERATOR "tonegenerator" // dial tones
#define AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN "unknown" // callerName not set
+// Many properties are available for the sensor.
+// The mode is how the sensor is being currently used.
+#define AMEDIAMETRICS_PROP_MODE_VALUE_HEAD "head" // used for head tracking
+#define AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN "screen" // used for screen
+
// MediaMetrics errors are expected to cover the following sources:
// https://docs.oracle.com/javase/7/docs/api/java/lang/RuntimeException.html
// https://docs.oracle.com/javase/7/docs/api/java/lang/Exception.html
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a71631a..8e19d02 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -611,29 +611,42 @@
IPCThreadState::self()->getCallingUid());
result.append(buffer);
} else {
- Mutex::Autolock lock(mLock);
- for (int i = 0, n = mClients.size(); i < n; ++i) {
- sp<Client> c = mClients[i].promote();
- if (c != 0) c->dump(fd, args);
- clients.add(c);
- }
- if (mMediaRecorderClients.size() == 0) {
- result.append(" No media recorder client\n\n");
- } else {
+ {
+ // capture clients under lock
+ Mutex::Autolock lock(mLock);
+ for (int i = 0, n = mClients.size(); i < n; ++i) {
+ sp<Client> c = mClients[i].promote();
+ if (c != nullptr) {
+ clients.add(c);
+ }
+ }
+
for (int i = 0, n = mMediaRecorderClients.size(); i < n; ++i) {
sp<MediaRecorderClient> c = mMediaRecorderClients[i].promote();
- if (c != 0) {
- snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
- c->mAttributionSource.pid);
- result.append(buffer);
- write(fd, result.string(), result.size());
- result = "\n";
- c->dump(fd, args);
+ if (c != nullptr) {
mediaRecorderClients.add(c);
}
}
}
+ // dump clients outside of lock
+ for (const sp<Client> &c : clients) {
+ c->dump(fd, args);
+ }
+ if (mediaRecorderClients.size() == 0) {
+ result.append(" No media recorder client\n\n");
+ } else {
+ for (const sp<MediaRecorderClient> &c : mediaRecorderClients) {
+ snprintf(buffer, 255, " MediaRecorderClient pid(%d)\n",
+ c->mAttributionSource.pid);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+ result = "\n";
+ c->dump(fd, args);
+
+ }
+ }
+
result.append(" Files opened and/or mapped:\n");
snprintf(buffer, SIZE - 1, "/proc/%d/maps", getpid());
FILE *f = fopen(buffer, "r");
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c6b22a6..9b4fc8f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2893,6 +2893,38 @@
in.writeInt32(recvTimeUs & 0xFFFFFFFF);
break;
}
+ case ARTPSource::RTCP_RR:
+ {
+ int64_t recvTimeUs;
+ int32_t senderId;
+ int32_t ssrc;
+ int32_t fraction;
+ int32_t lost;
+ int32_t lastSeq;
+ int32_t jitter;
+ int32_t lsr;
+ int32_t dlsr;
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ CHECK(msg->findInt32("rtcp-rr-ssrc", &senderId));
+ CHECK(msg->findInt32("rtcp-rrb-ssrc", &ssrc));
+ CHECK(msg->findInt32("rtcp-rrb-fraction", &fraction));
+ CHECK(msg->findInt32("rtcp-rrb-lost", &lost));
+ CHECK(msg->findInt32("rtcp-rrb-lastSeq", &lastSeq));
+ CHECK(msg->findInt32("rtcp-rrb-jitter", &jitter));
+ CHECK(msg->findInt32("rtcp-rrb-lsr", &lsr));
+ CHECK(msg->findInt32("rtcp-rrb-dlsr", &dlsr));
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ in.writeInt32(senderId);
+ in.writeInt32(ssrc);
+ in.writeInt32(fraction);
+ in.writeInt32(lost);
+ in.writeInt32(lastSeq);
+ in.writeInt32(jitter);
+ in.writeInt32(lsr);
+ in.writeInt32(dlsr);
+ break;
+ }
case ARTPSource::RTCP_TSFB: // RTCP TSFB
case ARTPSource::RTCP_PSFB: // RTCP PSFB
case ARTPSource::RTP_AUTODOWN:
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5a27362..9b0b2a9 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -120,7 +120,9 @@
static const char *kCodecParsedColorStandard = "android.media.mediacodec.parsed-color-standard";
static const char *kCodecParsedColorRange = "android.media.mediacodec.parsed-color-range";
static const char *kCodecParsedColorTransfer = "android.media.mediacodec.parsed-color-transfer";
-static const char *kCodecHDRMetadataFlags = "android.media.mediacodec.hdr-metadata-flags";
+static const char *kCodecHDRStaticInfo = "android.media.mediacodec.hdr-static-info";
+static const char *kCodecHDR10PlusInfo = "android.media.mediacodec.hdr10-plus-info";
+static const char *kCodecHDRFormat = "android.media.mediacodec.hdr-format";
// Min/Max QP before shaping
static const char *kCodecOriginalVideoQPIMin = "android.media.mediacodec.original-video-qp-i-min";
@@ -805,7 +807,9 @@
mWidth(0),
mHeight(0),
mRotationDegrees(0),
- mHDRMetadataFlags(0),
+ mConfigColorTransfer(-1),
+ mHDRStaticInfo(false),
+ mHDR10PlusInfo(false),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
@@ -951,13 +955,71 @@
mIndexOfFirstFrameWhenLowLatencyOn);
}
- mediametrics_setInt32(mMetricsHandle, kCodecHDRMetadataFlags, mHDRMetadataFlags);
+ mediametrics_setInt32(mMetricsHandle, kCodecHDRStaticInfo, mHDRStaticInfo ? 1 : 0);
+ mediametrics_setInt32(mMetricsHandle, kCodecHDR10PlusInfo, mHDR10PlusInfo ? 1 : 0);
#if 0
// enable for short term, only while debugging
updateEphemeralMediametrics(mMetricsHandle);
#endif
}
+void MediaCodec::updateHDRFormatMetric() {
+ int32_t profile = -1;
+ AString mediaType;
+ if (mOutputFormat->findInt32(KEY_PROFILE, &profile)
+ && mOutputFormat->findString("mime", &mediaType)) {
+ hdr_format hdrFormat = getHDRFormat(profile, mConfigColorTransfer, mediaType);
+ mediametrics_setInt32(mMetricsHandle, kCodecHDRFormat, static_cast<int>(hdrFormat));
+ }
+}
+
+hdr_format MediaCodec::getHDRFormat(const int32_t profile, const int32_t transfer,
+ const AString &mediaType) {
+ switch (transfer) {
+ case COLOR_TRANSFER_ST2084:
+ if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_VP9)) {
+ switch (profile) {
+ case VP9Profile2HDR:
+ return HDR_FORMAT_HDR10;
+ case VP9Profile2HDR10Plus:
+ return HDR_FORMAT_HDR10PLUS;
+ default:
+ return HDR_FORMAT_NONE;
+ }
+ } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_AV1)) {
+ switch (profile) {
+ case AV1ProfileMain10HDR10:
+ return HDR_FORMAT_HDR10;
+ case AV1ProfileMain10HDR10Plus:
+ return HDR_FORMAT_HDR10PLUS;
+ default:
+ return HDR_FORMAT_NONE;
+ }
+ } else if (mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ switch (profile) {
+ case HEVCProfileMain10HDR10:
+ return HDR_FORMAT_HDR10;
+ case HEVCProfileMain10HDR10Plus:
+ return HDR_FORMAT_HDR10PLUS;
+ default:
+ return HDR_FORMAT_NONE;
+ }
+ } else {
+ return HDR_FORMAT_NONE;
+ }
+ case COLOR_TRANSFER_HLG:
+ if (!mediaType.equalsIgnoreCase(MEDIA_MIMETYPE_VIDEO_DOLBY_VISION)) {
+ return HDR_FORMAT_HLG;
+ } else {
+ // TODO: DOLBY format
+ return HDR_FORMAT_NONE;
+ }
+ default:
+ return HDR_FORMAT_NONE;
+ }
+}
+
+
void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
ALOGD("MediaCodec::updateEphemeralMediametrics()");
@@ -1647,12 +1709,13 @@
}
int32_t colorTransfer = -1;
if (format->findInt32(KEY_COLOR_TRANSFER, &colorTransfer)) {
+ mConfigColorTransfer = colorTransfer;
mediametrics_setInt32(mMetricsHandle, kCodecConfigColorTransfer, colorTransfer);
}
HDRStaticInfo info;
if (ColorUtils::getHDRStaticInfoFromFormat(format, &info)
&& ColorUtils::isHDRStaticInfoValid(&info)) {
- mHDRMetadataFlags |= kFlagHDRStaticInfo;
+ mHDRStaticInfo = true;
}
}
@@ -3307,6 +3370,8 @@
CHECK(msg->findMessage("input-format", &mInputFormat));
CHECK(msg->findMessage("output-format", &mOutputFormat));
+ updateHDRFormatMetric();
+
// limit to confirming the opt-in behavior to minimize any behavioral change
if (mSurface != nullptr && !mAllowFrameDroppingBySurface) {
// signal frame dropping mode in the input format as this may also be
@@ -3388,6 +3453,7 @@
mComponentName.c_str(),
mInputFormat->debugString(4).c_str(),
mOutputFormat->debugString(4).c_str());
+ updateHDRFormatMetric();
CHECK(obj != NULL);
response->setObject("input-surface", obj);
mHaveInputSurface = true;
@@ -3412,6 +3478,7 @@
if (!msg->findInt32("err", &err)) {
CHECK(msg->findMessage("input-format", &mInputFormat));
CHECK(msg->findMessage("output-format", &mOutputFormat));
+ updateHDRFormatMetric();
mHaveInputSurface = true;
} else {
response->setInt32("err", err);
@@ -4155,26 +4222,29 @@
break;
}
- if (asyncNotify != nullptr) {
- if (mSurface != NULL) {
- if (!mReleaseSurface) {
- uint64_t usage = 0;
- if (mSurface->getConsumerUsage(&usage) != OK) {
- usage = 0;
- }
- mReleaseSurface.reset(new ReleaseSurface(usage));
+ bool forceSync = false;
+ if (asyncNotify != nullptr && mSurface != NULL) {
+ if (!mReleaseSurface) {
+ uint64_t usage = 0;
+ if (mSurface->getConsumerUsage(&usage) != OK) {
+ usage = 0;
}
- if (mSurface != mReleaseSurface->getSurface()) {
- status_t err = connectToSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
- if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
- err = mCodec->setSurface(mReleaseSurface->getSurface());
- ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
- }
- if (err == OK) {
- (void)disconnectFromSurface();
- mSurface = mReleaseSurface->getSurface();
- }
+ mReleaseSurface.reset(new ReleaseSurface(usage));
+ }
+ if (mSurface != mReleaseSurface->getSurface()) {
+ status_t err = connectToSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error connecting to release surface: err = %d", err);
+ if (err == OK && !(mFlags & kFlagUsesSoftwareRenderer)) {
+ err = mCodec->setSurface(mReleaseSurface->getSurface());
+ ALOGW_IF(err != OK, "error setting release surface: err = %d", err);
+ }
+ if (err == OK) {
+ (void)disconnectFromSurface();
+ mSurface = mReleaseSurface->getSurface();
+ } else {
+ // We were not able to switch the surface, so force
+ // synchronous release.
+ forceSync = true;
}
}
}
@@ -4198,8 +4268,10 @@
}
if (asyncNotify != nullptr) {
- mResourceManagerProxy->markClientForPendingRemoval();
- postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ if (!forceSync) {
+ mResourceManagerProxy->markClientForPendingRemoval();
+ postPendingRepliesAndDeferredMessages("kWhatRelease:async");
+ }
asyncNotifyPost.clear();
mAsyncReleaseCompleteNotification = asyncNotify;
}
@@ -4583,6 +4655,7 @@
buffer->meta()->setObject("changedKeys", changedKeys);
}
mOutputFormat = format;
+ updateHDRFormatMetric();
mapFormat(mComponentName, format, nullptr, true);
ALOGV("[%s] output format changed to: %s",
mComponentName.c_str(), mOutputFormat->debugString(4).c_str());
@@ -4609,7 +4682,7 @@
if (ColorUtils::getHDRStaticInfoFromFormat(mOutputFormat, &info)) {
setNativeWindowHdrMetadata(mSurface.get(), &info);
if (ColorUtils::isHDRStaticInfoValid(&info)) {
- mHDRMetadataFlags |= kFlagHDRStaticInfo;
+ mHDRStaticInfo = true;
}
}
}
@@ -4619,7 +4692,7 @@
&& hdr10PlusInfo != nullptr && hdr10PlusInfo->size() > 0) {
native_window_set_buffers_hdr10_plus_metadata(mSurface.get(),
hdr10PlusInfo->size(), hdr10PlusInfo->data());
- mHDRMetadataFlags |= kFlagHDR10PlusInfo;
+ mHDR10PlusInfo = true;
}
if (mime.startsWithIgnoreCase("video/")) {
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index f5af50d..a00a3e6 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -25,6 +25,7 @@
#include <media/hardware/CryptoAPI.h>
#include <media/MediaCodecInfo.h>
#include <media/MediaMetrics.h>
+#include <media/MediaProfiles.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/FrameRenderTracker.h>
#include <utils/Vector.h>
@@ -451,11 +452,12 @@
int32_t mRotationDegrees;
int32_t mAllowFrameDroppingBySurface;
- uint32_t mHDRMetadataFlags; /* bitmask of kFlagHDR* */
- enum {
- kFlagHDRStaticInfo = 1 << 0,
- kFlagHDR10PlusInfo = 1 << 1,
- };
+ int32_t mConfigColorTransfer;
+ bool mHDRStaticInfo;
+ bool mHDR10PlusInfo;
+ void updateHDRFormatMetric();
+ hdr_format getHDRFormat(const int32_t profile, const int32_t transfer,
+ const AString &mediaType);
// initial create parameters
AString mInitName;
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 7d72510..a61f48f 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "ARTPConnection"
#include <utils/Log.h>
+#include <media/stagefright/rtsp/ARTPAssembler.h>
#include <media/stagefright/rtsp/ARTPConnection.h>
#include <media/stagefright/rtsp/ARTPSource.h>
#include <media/stagefright/rtsp/ASessionDescription.h>
@@ -41,6 +42,10 @@
return data[0] << 8 | data[1];
}
+static uint32_t u24at(const uint8_t *data) {
+ return u16at(data) << 16 | data[2];
+}
+
static uint32_t u32at(const uint8_t *data) {
return u16at(data) << 16 | u16at(&data[2]);
}
@@ -877,11 +882,15 @@
switch (data[1]) {
case 200:
{
- parseSR(s, data, headerLength);
+ parseSenderReport(s, data, headerLength);
break;
}
case 201: // RR
+ {
+ parseReceiverReport(s, data, headerLength);
+ break;
+ }
case 202: // SDES
case 204: // APP
break;
@@ -940,18 +949,44 @@
return OK;
}
-status_t ARTPConnection::parseSR(
+status_t ARTPConnection::parseSenderReport(
StreamInfo *s, const uint8_t *data, size_t size) {
- size_t RC = data[0] & 0x1f;
-
- if (size < (7 + RC * 6) * 4) {
- // Packet too short for the minimal SR header.
+ ALOG_ASSERT(size >= 1, "parseSenderReport: invalid packet size.");
+ size_t receptionReportCount = data[0] & 0x1f;
+ if (size < (7 + (receptionReportCount * 6)) * 4) {
+ // Packet too short for the minimal sender report header.
return -1;
}
- uint32_t id = u32at(&data[4]);
+ int64_t recvTimeUs = ALooper::GetNowUs();
+ uint32_t senderId = u32at(&data[4]);
uint64_t ntpTime = u64at(&data[8]);
uint32_t rtpTime = u32at(&data[16]);
+ uint32_t pktCount = u32at(&data[20]);
+ uint32_t octCount = u32at(&data[24]);
+
+ ALOGD("SR received: ssrc=0x%08x, rtpTime%u == ntpTime %llu, pkt=%u, oct=%u",
+ senderId, rtpTime, (unsigned long long)ntpTime, pktCount, octCount);
+
+ sp<ARTPSource> source = findSource(s, senderId);
+ source->timeUpdate(recvTimeUs, rtpTime, ntpTime);
+
+ for (int32_t i = 0; i < receptionReportCount; i++) {
+ int32_t offset = 28 + (i * 24);
+ parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+ }
+
+ return 0;
+}
+
+status_t ARTPConnection::parseReceiverReport(
+ StreamInfo *s, const uint8_t *data, size_t size) {
+ ALOG_ASSERT(size >= 1, "parseReceiverReport: invalid packet size.");
+ size_t receptionReportCount = data[0] & 0x1f;
+ if (size < (2 + (receptionReportCount * 6)) * 4) {
+ // Packet too short for the minimal receiver report header.
+ return -1;
+ }
#if 0
ALOGI("XXX timeUpdate: ssrc=0x%08x, rtpTime %u == ntpTime %.3f",
@@ -959,10 +994,40 @@
rtpTime,
(ntpTime >> 32) + (double)(ntpTime & 0xffffffff) / (1ll << 32));
#endif
+ int64_t recvTimeUs = ALooper::GetNowUs();
+ uint32_t senderId = u32at(&data[4]);
- sp<ARTPSource> source = findSource(s, id);
+ for (int i = 0; i < receptionReportCount; i++) {
+ int32_t offset = 8 + (i * 24);
+ parseReceptionReportBlock(s, recvTimeUs, senderId, data + offset, size - offset);
+ }
- source->timeUpdate(rtpTime, ntpTime);
+ return 0;
+}
+
+status_t ARTPConnection::parseReceptionReportBlock(
+ StreamInfo *s, int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size) {
+ ALOG_ASSERT(size >= 24, "parseReceptionReportBlock: invalid packet size.");
+ if (size < 24) {
+ // remaining size is smaller than reception report block size.
+ return -1;
+ }
+
+ uint32_t rbId = u32at(&data[0]);
+ uint32_t fLost = data[4];
+ int32_t cumLost = u24at(&data[5]);
+ uint32_t ehSeq = u32at(&data[8]);
+ uint32_t jitter = u32at(&data[12]);
+ uint32_t lsr = u32at(&data[16]);
+ uint32_t dlsr = u32at(&data[20]);
+
+ ALOGD("Reception Report Block: t:%llu sid:%u rid:%u fl:%u cl:%u hs:%u jt:%u lsr:%u dlsr:%u",
+ (unsigned long long)recvTimeUs, senderId, rbId, fLost, cumLost,
+ ehSeq, jitter, lsr, dlsr);
+ sp<ARTPSource> source = findSource(s, senderId);
+ sp<ReceptionReportBlock> rrb = new ReceptionReportBlock(
+ rbId, fLost, cumLost, ehSeq, jitter, lsr, dlsr);
+ source->processReceptionReportBlock(recvTimeUs, senderId, rrb);
return 0;
}
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 5f62b9d..717d8af 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -132,10 +132,10 @@
}
}
-void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
+void ARTPSource::timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime) {
mLastSrRtpTime = rtpTime;
mLastSrNtpTime = ntpTime;
- mLastSrUpdateTimeUs = ALooper::GetNowUs();
+ mLastSrUpdateTimeUs = recvTimeUs;
sp<AMessage> notify = mNotify->dup();
notify->setInt32("time-update", true);
@@ -143,7 +143,30 @@
notify->setInt64("ntp-time", ntpTime);
notify->setInt32("rtcp-event", 1);
notify->setInt32("payload-type", RTCP_SR);
- notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
+ notify->setInt64("recv-time-us", recvTimeUs);
+ notify->post();
+}
+
+void ARTPSource::processReceptionReportBlock(
+ int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb) {
+ mLastRrUpdateTimeUs = recvTimeUs;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("rtcp-event", 1);
+ // A Reception Report Block (RRB) can be included in both Sender Report and Receiver Report.
+ // But it means 'Packet Reception Report' actually.
+ // So that, we will report RRB as RR since there is no meaning difference
+ // between RRB(Reception Report Block) and RR(Receiver Report).
+ notify->setInt32("payload-type", RTCP_RR);
+ notify->setInt64("recv-time-us", recvTimeUs);
+ notify->setInt32("rtcp-rr-ssrc", senderId);
+ notify->setInt32("rtcp-rrb-ssrc", rrb->ssrc);
+ notify->setInt32("rtcp-rrb-fraction", rrb->fraction);
+ notify->setInt32("rtcp-rrb-lost", rrb->lost);
+ notify->setInt32("rtcp-rrb-lastSeq", rrb->lastSeq);
+ notify->setInt32("rtcp-rrb-jitter", rrb->jitter);
+ notify->setInt32("rtcp-rrb-lsr", rrb->lsr);
+ notify->setInt32("rtcp-rrb-dlsr", rrb->dlsr);
notify->post();
}
@@ -453,7 +476,8 @@
data[18] = (mHighestSeqNumber >> 8) & 0xff;
data[19] = mHighestSeqNumber & 0xff;
- uint32_t jitterTime = 0;
+ uint32_t jitterTimeMs = (uint32_t)getInterArrivalJitterTimeMs();
+ uint32_t jitterTime = jitterTimeMs * mClockRate / 1000;
data[20] = jitterTime >> 24; // Interarrival jitter
data[21] = (jitterTime >> 16) & 0xff;
data[22] = (jitterTime >> 8) & 0xff;
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
index f959c40..39161b6 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPAssembler.h
@@ -105,6 +105,27 @@
(long long)rtp, (long long)play, (long long)exp, isExp);
}
+struct ReceptionReportBlock : public RefBase {
+ uint32_t ssrc; // ssrc of data source being reported
+ uint32_t fraction; // fraction lost since last SR/RR
+ int32_t lost; // cumul. no. pkts lost (signed!)
+ uint32_t lastSeq; // extended last seq. no. received
+ uint32_t jitter; // interarrival jitter
+ uint32_t lsr; // last SR packet from this source
+ uint32_t dlsr; // delay since last SR packet
+
+ ReceptionReportBlock(uint32_t ssrc, uint32_t fraction, int32_t lost, uint32_t lastSeq,
+ uint32_t jitter, uint32_t lsr, uint32_t dlsr) {
+ this->ssrc = ssrc;
+ this->fraction = fraction;
+ this->lost = lost;
+ this->lastSeq = lastSeq;
+ this->jitter = jitter;
+ this->lsr = lsr;
+ this->dlsr = dlsr;
+ }
+};
+
} // namespace android
#endif // A_RTP_ASSEMBLER_H_
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
index 36cca31..73d2866 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPConnection.h
@@ -110,7 +110,10 @@
status_t parseRTP(StreamInfo *info, const sp<ABuffer> &buffer);
status_t parseRTPExt(StreamInfo *s, const uint8_t *extData, size_t extLen, int32_t *cvoDegrees);
status_t parseRTCP(StreamInfo *info, const sp<ABuffer> &buffer);
- status_t parseSR(StreamInfo *info, const uint8_t *data, size_t size);
+ status_t parseSenderReport(StreamInfo *info, const uint8_t *data, size_t size);
+ status_t parseReceiverReport(StreamInfo *info, const uint8_t *data, size_t size);
+ status_t parseReceptionReportBlock(StreamInfo *info,
+ int64_t recvTimeUs, uint32_t senderId, const uint8_t *data, size_t size);
status_t parseTSFB(StreamInfo *info, const uint8_t *data, size_t size);
status_t parsePSFB(StreamInfo *info, const uint8_t *data, size_t size);
status_t parseBYE(StreamInfo *info, const uint8_t *data, size_t size);
diff --git a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
index 4984e91..e9b4942 100644
--- a/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/include/media/stagefright/rtsp/ARTPSource.h
@@ -36,6 +36,7 @@
struct ABuffer;
struct AMessage;
struct ARTPAssembler;
+struct ReceptionReportBlock;
struct ASessionDescription;
struct ARTPSource : public RefBase {
@@ -59,8 +60,10 @@
void processRTPPacket(const sp<ABuffer> &buffer);
void processRTPPacket();
+ void processReceptionReportBlock(
+ int64_t recvTimeUs, uint32_t senderId, sp<ReceptionReportBlock> rrb);
void timeReset();
- void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
+ void timeUpdate(int64_t recvTimeUs, uint32_t rtpTime, uint64_t ntpTime);
void byeReceived();
List<sp<ABuffer> > *queue() { return &mQueue; }
@@ -135,6 +138,8 @@
uint64_t mLastSrNtpTime;
int64_t mLastSrUpdateTimeUs;
+ int64_t mLastRrUpdateTimeUs;
+
bool mIsFirstRtpRtcpGap;
double mAvgRtpRtcpGapMs;
double mAvgUnderlineDelayMs;
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index 75a1b22..0848eb3 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -189,15 +189,18 @@
// Generate audio HAL processes tombstones and allow time to complete
// before forcing restart
std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
+ std::string halPids = "HAL pids [ ";
if (pids.size() != 0) {
for (const auto& pid : pids) {
ALOGI("requesting tombstone for pid: %d", pid);
+ halPids.append(std::to_string(pid)).append(" ");
sigqueue(pid, DEBUGGER_SIGNAL, {.sival_int = 0});
}
sleep(1);
} else {
ALOGI("No HAL process pid available, skipping tombstones");
}
+ halPids.append("]");
LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
@@ -206,6 +209,7 @@
.append(tag)
.append(" scheduled ").append(formatTime(startTime))
.append(" on thread ").append(std::to_string(tid)).append("\n")
+ .append(halPids).append("\n")
.append(summary);
// Note: LOG_ALWAYS_FATAL limits the size of the string - per log/log.h:
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 40a1eaa..8eefe77 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -571,12 +571,6 @@
lHalConfig.offload_info.channel_mask = lHalConfig.channel_mask;
lHalConfig.offload_info.format = lHalConfig.format;
lHalConfig.offload_info.stream_type = stream;
- lHalConfig.offload_info.duration_us = -1;
- lHalConfig.offload_info.has_video = true; // conservative
- lHalConfig.offload_info.is_streaming = true; // likely
- lHalConfig.offload_info.encapsulation_mode = lHalConfig.offload_info.encapsulation_mode;
- lHalConfig.offload_info.content_id = lHalConfig.offload_info.content_id;
- lHalConfig.offload_info.sync_id = lHalConfig.offload_info.sync_id;
}
audio_config_base_t lMixerConfig;
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 95aacb2..d4d514d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -509,7 +509,7 @@
if (device != nullptr) break;
}
device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+ AUDIO_DEVICE_IN_WIRED_HEADSET,
AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
@@ -563,7 +563,7 @@
// because sometimes user want to do voice search by bt remote
// even if BUILDIN_MIC is available.
device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+ AUDIO_DEVICE_IN_WIRED_HEADSET,
AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
AUDIO_DEVICE_IN_BLUETOOTH_BLE, AUDIO_DEVICE_IN_BUILTIN_MIC});
@@ -579,7 +579,7 @@
if (device != nullptr) break;
}
device = availableDevices.getFirstExistingDevice({
- AUDIO_DEVICE_IN_BLE_HEADSET, AUDIO_DEVICE_IN_WIRED_HEADSET,
+ AUDIO_DEVICE_IN_WIRED_HEADSET,
AUDIO_DEVICE_IN_USB_HEADSET, AUDIO_DEVICE_IN_USB_DEVICE,
AUDIO_DEVICE_IN_BUILTIN_MIC});
break;
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index ef11072..c199a76 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -31,6 +31,7 @@
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/MediaMetricsItem.h>
#include <media/ShmemCompat.h>
#include <mediautils/ServiceUtilities.h>
#include <utils/Thread.h>
@@ -57,6 +58,19 @@
if (!_tmp.ok()) return aidl_utils::binderStatusFromStatusT(_tmp.error()); \
std::move(_tmp.value()); })
+audio_channel_mask_t getMaxChannelMask(std::vector<audio_channel_mask_t> masks) {
+ uint32_t maxCount = 0;
+ audio_channel_mask_t maxMask = AUDIO_CHANNEL_NONE;
+ for (auto mask : masks) {
+ const size_t count = audio_channel_count_from_out_mask(mask);
+ if (count > maxCount) {
+ maxMask = mask;
+ maxCount = count;
+ }
+ }
+ return maxMask;
+}
+
// ---------------------------------------------------------------------------
class Spatializer::EngineCallbackHandler : public AHandler {
@@ -214,21 +228,94 @@
status_t status = getHalParameter<false>(effect, SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED,
&supportsHeadTracking);
if (status != NO_ERROR) {
+ ALOGW("%s: cannot get SPATIALIZER_PARAM_HEADTRACKING_SUPPORTED", __func__);
return status;
}
mSupportsHeadTracking = supportsHeadTracking[0];
- status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS, &mLevels);
+ std::vector<media::SpatializationLevel> spatializationLevels;
+ status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_LEVELS,
+ &spatializationLevels);
if (status != NO_ERROR) {
+ ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_LEVELS", __func__);
return status;
}
+ bool noneLevelFound = false;
+ bool activeLevelFound = false;
+ for (const auto spatializationLevel : spatializationLevels) {
+ if (!aidl_utils::isValidEnum(spatializationLevel)) {
+ ALOGW("%s: ignoring spatializationLevel:%d", __func__, (int)spatializationLevel);
+ continue;
+ }
+ if (spatializationLevel == media::SpatializationLevel::NONE) {
+ noneLevelFound = true;
+ } else {
+ activeLevelFound = true;
+ }
+ // we don't detect duplicates.
+ mLevels.emplace_back(spatializationLevel);
+ }
+ if (!noneLevelFound || !activeLevelFound) {
+ ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_LEVELS must include NONE"
+ " and another valid level", __func__);
+ return BAD_VALUE;
+ }
+
+ std::vector<media::SpatializationMode> spatializationModes;
status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES,
- &mSpatializationModes);
+ &spatializationModes);
if (status != NO_ERROR) {
+ ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES", __func__);
return status;
}
- return getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
- &mChannelMasks);
+ for (const auto spatializationMode : spatializationModes) {
+ if (!aidl_utils::isValidEnum(spatializationMode)) {
+ ALOGW("%s: ignoring spatializationMode:%d", __func__, (int)spatializationMode);
+ continue;
+ }
+ // we don't detect duplicates.
+ mSpatializationModes.emplace_back(spatializationMode);
+ }
+ if (mSpatializationModes.empty()) {
+ ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_SPATIALIZATION_MODES reports empty", __func__);
+ return BAD_VALUE;
+ }
+
+ std::vector<audio_channel_mask_t> channelMasks;
+ status = getHalParameter<true>(effect, SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS,
+ &channelMasks);
+ if (status != NO_ERROR) {
+ ALOGW("%s: cannot get SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS", __func__);
+ return status;
+ }
+ for (const auto channelMask : channelMasks) {
+ if (!audio_is_channel_mask_spatialized(channelMask)) {
+ ALOGW("%s: ignoring channelMask:%#x", __func__, channelMask);
+ continue;
+ }
+ // we don't detect duplicates.
+ mChannelMasks.emplace_back(channelMask);
+ }
+ if (mChannelMasks.empty()) {
+ ALOGW("%s: SPATIALIZER_PARAM_SUPPORTED_CHANNEL_MASKS reports empty", __func__);
+ return BAD_VALUE;
+ }
+
+ // Currently we expose only RELATIVE_WORLD.
+ // This is a limitation of the head tracking library based on a UX choice.
+ mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::DISABLED);
+ if (mSupportsHeadTracking) {
+ mHeadTrackingModes.push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
+ }
+ mediametrics::LogItem(mMetricsId)
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE)
+ .set(AMEDIAMETRICS_PROP_CHANNELMASK, (int32_t)getMaxChannelMask(mChannelMasks))
+ .set(AMEDIAMETRICS_PROP_LEVELS, aidl_utils::enumsToString(mLevels))
+ .set(AMEDIAMETRICS_PROP_MODES, aidl_utils::enumsToString(mSpatializationModes))
+ .set(AMEDIAMETRICS_PROP_HEADTRACKINGMODES, aidl_utils::enumsToString(mHeadTrackingModes))
+ .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+ .record();
+ return NO_ERROR;
}
/** Gets the channel mask, sampling rate and format set for the spatializer input. */
@@ -236,12 +323,7 @@
std::lock_guard lock(mLock);
audio_config_base_t config = AUDIO_CONFIG_BASE_INITIALIZER;
// For now use highest supported channel count
- uint32_t maxCount = 0;
- for ( auto mask : mChannelMasks) {
- if (audio_channel_count_from_out_mask(mask) > maxCount) {
- config.channel_mask = mask;
- }
- }
+ config.channel_mask = getMaxChannelMask(mChannelMasks);
return config;
}
@@ -278,7 +360,7 @@
if (levels == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
- levels->push_back(SpatializationLevel::NONE);
+ // SpatializationLevel::NONE is already required from the effect or we don't load it.
levels->insert(levels->end(), mLevels.begin(), mLevels.end());
return Status::ok();
}
@@ -339,11 +421,7 @@
if (modes == nullptr) {
return binderStatusFromStatusT(BAD_VALUE);
}
-
- modes->push_back(SpatializerHeadTrackingMode::DISABLED);
- if (mSupportsHeadTracking) {
- modes->push_back(SpatializerHeadTrackingMode::RELATIVE_WORLD);
- }
+ modes->insert(modes->end(), mHeadTrackingModes.begin(), mHeadTrackingModes.end());
return Status::ok();
}
@@ -441,9 +519,11 @@
return binderStatusFromStatusT(INVALID_OPERATION);
}
std::lock_guard lock(mLock);
- mHeadSensor = sensorHandle;
- checkPoseController_l();
- checkSensorsState_l();
+ if (mHeadSensor != sensorHandle) {
+ mHeadSensor = sensorHandle;
+ checkPoseController_l();
+ checkSensorsState_l();
+ }
return Status::ok();
}
@@ -453,8 +533,13 @@
return binderStatusFromStatusT(INVALID_OPERATION);
}
std::lock_guard lock(mLock);
- mScreenSensor = sensorHandle;
- checkSensorsState_l();
+ if (mScreenSensor != sensorHandle) {
+ mScreenSensor = sensorHandle;
+ // TODO: consider a new method setHeadAndScreenSensor()
+ // because we generally set both at the same time.
+ // This will avoid duplicated work and recentering.
+ checkSensorsState_l();
+ }
return Status::ok();
}
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index a36ba61..29f4b08 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -298,7 +298,10 @@
/** Effect engine descriptor */
const effect_descriptor_t mEngineDescriptor;
/** Callback interface to parent audio policy service */
- SpatializerPolicyCallback* mPolicyCallback;
+ SpatializerPolicyCallback* const mPolicyCallback;
+
+ /** Currently there is only one version of the spatializer running */
+ const std::string mMetricsId = AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "0";
/** Mutex protecting internal state */
mutable std::mutex mLock;
@@ -339,6 +342,7 @@
float mDisplayOrientation GUARDED_BY(mLock) = kDisplayOrientationInvalid;
std::vector<media::SpatializationLevel> mLevels;
+ std::vector<media::SpatializerHeadTrackingMode> mHeadTrackingModes;
std::vector<media::SpatializationMode> mSpatializationModes;
std::vector<audio_channel_mask_t> mChannelMasks;
bool mSupportsHeadTracking;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 0a9f4d9..304d44a 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "SpatializerPoseController"
//#define LOG_NDEBUG 0
#include <sensor/Sensor.h>
+#include <media/MediaMetricsItem.h>
#include <utils/Log.h>
#include <utils/SystemClock.h>
@@ -75,6 +76,10 @@
// How many ticks in a second.
constexpr auto kTicksPerSecond = Ticks::period::den;
+std::string getSensorMetricsId(int32_t sensorId) {
+ return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_SENSOR).append(std::to_string(sensorId));
+}
+
} // namespace
SpatializerPoseController::SpatializerPoseController(Listener* listener,
@@ -144,9 +149,16 @@
void SpatializerPoseController::setHeadSensor(int32_t sensor) {
std::lock_guard lock(mMutex);
+ if (sensor == mHeadSensor) return;
+ ALOGV("%s: new sensor:%d mHeadSensor:%d mScreenSensor:%d",
+ __func__, sensor, mHeadSensor, mScreenSensor);
+
// Stop current sensor, if valid and different from the other sensor.
if (mHeadSensor != INVALID_SENSOR && mHeadSensor != mScreenSensor) {
mPoseProvider->stopSensor(mHeadSensor);
+ mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+ .record();
}
if (sensor != INVALID_SENSOR) {
@@ -154,6 +166,15 @@
// Start new sensor.
mHeadSensor =
mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+ if (mHeadSensor != INVALID_SENSOR) {
+ auto sensor = mPoseProvider->getSensorByHandle(mHeadSensor);
+ std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+ mediametrics::LogItem(getSensorMetricsId(mHeadSensor))
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+ .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_HEAD)
+ .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+ .record();
+ }
} else {
// Sensor is already enabled.
mHeadSensor = mScreenSensor;
@@ -162,14 +183,21 @@
mHeadSensor = INVALID_SENSOR;
}
- mProcessor->recenter(true, false);
+ mProcessor->recenter(true /* recenterHead */, false /* recenterScreen */);
}
void SpatializerPoseController::setScreenSensor(int32_t sensor) {
std::lock_guard lock(mMutex);
+ if (sensor == mScreenSensor) return;
+ ALOGV("%s: new sensor:%d mHeadSensor:%d mScreenSensor:%d",
+ __func__, sensor, mHeadSensor, mScreenSensor);
+
// Stop current sensor, if valid and different from the other sensor.
if (mScreenSensor != INVALID_SENSOR && mScreenSensor != mHeadSensor) {
mPoseProvider->stopSensor(mScreenSensor);
+ mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_STOP)
+ .record();
}
if (sensor != INVALID_SENSOR) {
@@ -177,6 +205,13 @@
// Start new sensor.
mScreenSensor =
mPoseProvider->startSensor(sensor, mSensorPeriod) ? sensor : INVALID_SENSOR;
+ auto sensor = mPoseProvider->getSensorByHandle(mScreenSensor);
+ std::string stringType = sensor ? sensor->getStringType().c_str() : "";
+ mediametrics::LogItem(getSensorMetricsId(mScreenSensor))
+ .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_START)
+ .set(AMEDIAMETRICS_PROP_MODE, AMEDIAMETRICS_PROP_MODE_VALUE_SCREEN)
+ .set(AMEDIAMETRICS_PROP_TYPE, stringType)
+ .record();
} else {
// Sensor is already enabled.
mScreenSensor = mHeadSensor;
@@ -185,7 +220,7 @@
mScreenSensor = INVALID_SENSOR;
}
- mProcessor->recenter(false, true);
+ mProcessor->recenter(false /* recenterHead */, true /* recenterScreen */);
}
void SpatializerPoseController::setDesiredMode(HeadTrackingMode mode) {
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index 493696b..e98975e 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -92,9 +92,9 @@
"gui/RingBufferConsumer.cpp",
"hidl/AidlCameraDeviceCallbacks.cpp",
"hidl/AidlCameraServiceListener.cpp",
- "hidl/Convert.cpp",
"hidl/HidlCameraDeviceUser.cpp",
"hidl/HidlCameraService.cpp",
+ "hidl/Utils.cpp",
"utils/CameraServiceProxyWrapper.cpp",
"utils/CameraThreadState.cpp",
"utils/CameraTraces.cpp",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3702812..c4b5a6c 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -3723,10 +3723,21 @@
void CameraService::UidPolicy::onUidStateChanged(uid_t uid, int32_t procState,
int64_t procStateSeq __unused, int32_t capability __unused) {
- Mutex::Autolock _l(mUidLock);
- if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
- mMonitoredUids[uid].procState != procState) {
- mMonitoredUids[uid].procState = procState;
+ bool procStateChange = false;
+ {
+ Mutex::Autolock _l(mUidLock);
+ if (mMonitoredUids.find(uid) != mMonitoredUids.end() &&
+ mMonitoredUids[uid].procState != procState) {
+ mMonitoredUids[uid].procState = procState;
+ procStateChange = true;
+ }
+ }
+
+ if (procStateChange) {
+ sp<CameraService> service = mService.promote();
+ if (service != nullptr) {
+ service->notifyMonitoredUids();
+ }
}
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 5db3fa6..5e91501 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -168,9 +168,8 @@
__FUNCTION__, mCameraIdStr.c_str(), entry.data.i64[i]);
}
}
- mDynamicProfileMap.emplace(
- ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
- standardBitmap);
+ mDynamicProfileMap[ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD] =
+ standardBitmap;
} else {
ALOGE("%s: Device %s supports 10-bit output but doesn't include a dynamic range"
" profile map!", __FUNCTION__, mCameraIdStr.c_str());
@@ -519,9 +518,16 @@
metadataRequestList.push_back(physicalSettingsList);
surfaceMapList.push_back(surfaceMap);
+ // Save certain CaptureRequest settings
if (!request.mUserTag.empty()) {
mUserTag = request.mUserTag;
}
+ camera_metadata_entry entry =
+ physicalSettingsList.begin()->metadata.find(
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
+ if (entry.count == 1) {
+ mVideoStabilizationMode = entry.data.u8[0];
+ }
}
mRequestIdCounter++;
@@ -1971,7 +1977,7 @@
remoteCb->onDeviceIdle();
}
Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
- streamStats, mUserTag);
+ streamStats, mUserTag, mVideoStabilizationMode);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 3af0b80..c5aad6b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -352,6 +352,8 @@
// The string representation of object passed into CaptureRequest.setTag.
std::string mUserTag;
+ // The last set video stabilization mode
+ int mVideoStabilizationMode = -1;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index a7097fb..49a9760 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -338,7 +338,7 @@
void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
const std::vector<hardware::CameraStreamStats>& streamStats,
- const std::string& userTag) {
+ const std::string& userTag, int videoStabilizationMode) {
if (mDeviceActive) {
status_t res = TClientBase::finishCameraStreamingOps();
if (res != OK) {
@@ -346,7 +346,8 @@
TClientBase::mCameraIdStr.string(), res);
}
CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
- requestCount, resultErrorCount, deviceError, userTag, streamStats);
+ requestCount, resultErrorCount, deviceError, userTag, videoStabilizationMode,
+ streamStats);
}
mDeviceActive = false;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 9cba2f1..ec33f46 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -91,7 +91,7 @@
void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
bool deviceError,
const std::vector<hardware::CameraStreamStats>& streamStats,
- const std::string& userTag);
+ const std::string& userTag, int videoStabilizationMode);
int getCameraId() const;
const sp<CameraDeviceBase>&
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index b133263..dec439f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -4265,8 +4265,8 @@
if (parent != nullptr) {
parent->mRequestBufferSM.onRequestThreadPaused();
}
- mRequestClearing = false;
}
+ mRequestClearing = false;
// Stop waiting for now and let thread management happen
return NULL;
}
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index b5d0746..add1483 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -89,9 +89,10 @@
if (strlen(camera_stream::physical_camera_id) > 0) {
lines.appendFormat(" Physical camera id: %s\n", camera_stream::physical_camera_id);
}
- lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64,
+ lines.appendFormat(" Dynamic Range Profile: 0x%" PRIx64 "\n",
camera_stream::dynamic_range_profile);
lines.appendFormat(" Stream use case: %" PRId64 "\n", camera_stream::use_case);
+ lines.appendFormat(" Timestamp base: %d\n", getTimestampBase());
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index b21322c..d2167e3 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -460,8 +460,8 @@
}
if (mPreviewFrameSpacer != nullptr) {
- res = mPreviewFrameSpacer->queuePreviewBuffer(timestamp - mTimestampOffset, transform,
- anwBuffer, anwReleaseFence);
+ res = mPreviewFrameSpacer->queuePreviewBuffer(timestamp - mTimestampOffset,
+ readoutTimestamp - mTimestampOffset, transform, anwBuffer, anwReleaseFence);
if (res != OK) {
ALOGE("%s: Stream %d: Error queuing buffer to preview buffer spacer: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
@@ -680,13 +680,16 @@
bool forceChoreographer = (timestampBase ==
OutputConfiguration::TIMESTAMP_BASE_CHOREOGRAPHER_SYNCED);
bool defaultToChoreographer = (isDefaultTimeBase &&
- isConsumedByHWComposer() &&
- !property_get_bool("camera.disable_preview_scheduler", false));
+ isConsumedByHWComposer());
+ bool defaultToSpacer = (isDefaultTimeBase &&
+ isConsumedByHWTexture() &&
+ !isConsumedByCPU() &&
+ !isVideoStream());
if (forceChoreographer || defaultToChoreographer) {
mSyncToDisplay = true;
mTotalBufferCount += kDisplaySyncExtraBuffer;
- } else if (isConsumedByHWTexture() && !isVideoStream()) {
- mPreviewFrameSpacer = new PreviewFrameSpacer(*this, mConsumer);
+ } else if (defaultToSpacer) {
+ mPreviewFrameSpacer = new PreviewFrameSpacer(this, mConsumer);
mTotalBufferCount ++;
res = mPreviewFrameSpacer->run(String8::format("PreviewSpacer-%d", mId).string());
if (res != OK) {
@@ -967,6 +970,10 @@
returnPrefetchedBuffersLocked();
+ if (mPreviewFrameSpacer != nullptr) {
+ mPreviewFrameSpacer->requestExit();
+ }
+
ALOGV("%s: disconnecting stream %d from native window", __FUNCTION__, getId());
res = native_window_api_disconnect(mConsumer.get(),
@@ -1260,6 +1267,17 @@
return (usage & GRALLOC_USAGE_HW_TEXTURE) != 0;
}
+bool Camera3OutputStream::isConsumedByCPU() const {
+ uint64_t usage = 0;
+ status_t res = getEndpointUsage(&usage);
+ if (res != OK) {
+ ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
+ return false;
+ }
+
+ return (usage & GRALLOC_USAGE_SW_READ_MASK) != 0;
+}
+
void Camera3OutputStream::dumpImageToDisk(nsecs_t timestamp,
ANativeWindowBuffer* anwBuffer, int fence) {
// Deriver output file name
@@ -1388,9 +1406,13 @@
nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
// Derive minimum intervals between presentation times based on minimal
- // expected duration.
- size_t minVsyncs = (mMinExpectedDuration + vsyncEventData.frameInterval - 1) /
- vsyncEventData.frameInterval - 1;
+ // expected duration. The minimum number of Vsyncs is:
+ // - 0 if minFrameDuration in (0, 1.5] * vSyncInterval,
+ // - 1 if minFrameDuration in (1.5, 2.5] * vSyncInterval,
+ // - and so on.
+ int minVsyncs = (mMinExpectedDuration - vsyncEventData.frameInterval / 2) /
+ vsyncEventData.frameInterval;
+ if (minVsyncs < 0) minVsyncs = 0;
nsecs_t minInterval = minVsyncs * vsyncEventData.frameInterval + kTimelineThresholdNs;
// Find best timestamp in the vsync timeline:
// - closest to the ideal present time,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 45e995d..5f2831f 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -160,6 +160,11 @@
bool isConsumedByHWTexture() const;
/**
+ * Return if this output stream is consumed by CPU.
+ */
+ bool isConsumedByCPU() const;
+
+ /**
* Return if the consumer configuration of this stream is deferred.
*/
virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 9112b93..0439501 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -27,21 +27,20 @@
namespace camera3 {
-PreviewFrameSpacer::PreviewFrameSpacer(Camera3OutputStream& parent, sp<Surface> consumer) :
+PreviewFrameSpacer::PreviewFrameSpacer(wp<Camera3OutputStream> parent, sp<Surface> consumer) :
mParent(parent),
mConsumer(consumer) {
}
PreviewFrameSpacer::~PreviewFrameSpacer() {
- Thread::requestExitAndWait();
}
-status_t PreviewFrameSpacer::queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
- ANativeWindowBuffer* anwBuffer, int releaseFence) {
+status_t PreviewFrameSpacer::queuePreviewBuffer(nsecs_t timestamp, nsecs_t readoutTimestamp,
+ int32_t transform, ANativeWindowBuffer* anwBuffer, int releaseFence) {
Mutex::Autolock l(mLock);
- mPendingBuffers.emplace(timestamp, transform, anwBuffer, releaseFence);
- ALOGV("%s: mPendingBuffers size %zu, timestamp %" PRId64, __FUNCTION__,
- mPendingBuffers.size(), timestamp);
+ mPendingBuffers.emplace(timestamp, readoutTimestamp, transform, anwBuffer, releaseFence);
+ ALOGV("%s: mPendingBuffers size %zu, timestamp %" PRId64 ", readoutTime %" PRId64,
+ __FUNCTION__, mPendingBuffers.size(), timestamp, readoutTimestamp);
mBufferCond.signal();
return OK;
@@ -51,32 +50,36 @@
Mutex::Autolock l(mLock);
if (mPendingBuffers.size() == 0) {
mBufferCond.waitRelative(mLock, kWaitDuration);
- return true;
+ if (exitPending()) {
+ return false;
+ } else {
+ return true;
+ }
}
nsecs_t currentTime = systemTime();
auto buffer = mPendingBuffers.front();
- nsecs_t captureInterval = buffer.timestamp - mLastCameraCaptureTime;
- // If the capture interval exceeds threshold, directly queue
+ nsecs_t readoutInterval = buffer.readoutTimestamp - mLastCameraReadoutTime;
+ // If the readout interval exceeds threshold, directly queue
// cached buffer.
- if (captureInterval >= kFrameIntervalThreshold) {
+ if (readoutInterval >= kFrameIntervalThreshold) {
mPendingBuffers.pop();
queueBufferToClientLocked(buffer, currentTime);
return true;
}
- // Cache the frame to match capture time interval, for up to 33ms
- nsecs_t expectedQueueTime = mLastCameraPresentTime + captureInterval;
+ // Cache the frame to match readout time interval, for up to 33ms
+ nsecs_t expectedQueueTime = mLastCameraPresentTime + readoutInterval;
nsecs_t frameWaitTime = std::min(kMaxFrameWaitTime, expectedQueueTime - currentTime);
if (frameWaitTime > 0 && mPendingBuffers.size() < 2) {
mBufferCond.waitRelative(mLock, frameWaitTime);
if (exitPending()) {
- return true;
+ return false;
}
currentTime = systemTime();
}
- ALOGV("%s: captureInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
- ", timestamp %" PRId64, __FUNCTION__, captureInterval,
+ ALOGV("%s: readoutInterval %" PRId64 ", queueInterval %" PRId64 ", waited for %" PRId64
+ ", timestamp %" PRId64, __FUNCTION__, readoutInterval,
currentTime - mLastCameraPresentTime, frameWaitTime, buffer.timestamp);
mPendingBuffers.pop();
queueBufferToClientLocked(buffer, currentTime);
@@ -92,7 +95,13 @@
void PreviewFrameSpacer::queueBufferToClientLocked(
const BufferHolder& bufferHolder, nsecs_t currentTime) {
- mParent.setTransform(bufferHolder.transform, true/*mayChangeMirror*/);
+ sp<Camera3OutputStream> parent = mParent.promote();
+ if (parent == nullptr) {
+ ALOGV("%s: Parent camera3 output stream was destroyed", __FUNCTION__);
+ return;
+ }
+
+ parent->setTransform(bufferHolder.transform, true/*mayChangeMirror*/);
status_t res = native_window_set_buffers_timestamp(mConsumer.get(), bufferHolder.timestamp);
if (res != OK) {
@@ -101,20 +110,20 @@
}
Camera3Stream::queueHDRMetadata(bufferHolder.anwBuffer.get()->handle, mConsumer,
- mParent.getDynamicRangeProfile());
+ parent->getDynamicRangeProfile());
res = mConsumer->queueBuffer(mConsumer.get(), bufferHolder.anwBuffer.get(),
bufferHolder.releaseFence);
if (res != OK) {
close(bufferHolder.releaseFence);
- if (mParent.shouldLogError(res)) {
+ if (parent->shouldLogError(res)) {
ALOGE("%s: Failed to queue buffer to client: %s(%d)", __FUNCTION__,
strerror(-res), res);
}
}
mLastCameraPresentTime = currentTime;
- mLastCameraCaptureTime = bufferHolder.timestamp;
+ mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
}
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index 5062553..e165768 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -42,19 +42,19 @@
*
* The PreviewFrameSpacer improves the viewfinder user experience by:
* - Cache the frame buffers if the intervals between queueBuffer is shorter
- * than the camera capture intervals.
- * - Queue frame buffers in the same cadence as the camera capture time.
+ * than the camera readout intervals.
+ * - Queue frame buffers in the same cadence as the camera readout time.
* - Maintain at most 1 queue-able buffer. If the 2nd preview buffer becomes
* available, queue the oldest cached buffer to the buffer queue.
*/
class PreviewFrameSpacer : public Thread {
public:
- explicit PreviewFrameSpacer(Camera3OutputStream& parent, sp<Surface> consumer);
+ explicit PreviewFrameSpacer(wp<Camera3OutputStream> parent, sp<Surface> consumer);
virtual ~PreviewFrameSpacer();
// Queue preview buffer locally
- status_t queuePreviewBuffer(nsecs_t timestamp, int32_t transform,
- ANativeWindowBuffer* anwBuffer, int releaseFence);
+ status_t queuePreviewBuffer(nsecs_t timestamp, nsecs_t readoutTimestamp,
+ int32_t transform, ANativeWindowBuffer* anwBuffer, int releaseFence);
bool threadLoop() override;
void requestExit() override;
@@ -63,24 +63,25 @@
// structure holding cached preview buffer info
struct BufferHolder {
nsecs_t timestamp;
+ nsecs_t readoutTimestamp;
int32_t transform;
sp<ANativeWindowBuffer> anwBuffer;
int releaseFence;
- BufferHolder(nsecs_t t, int32_t tr, ANativeWindowBuffer* anwb, int rf) :
- timestamp(t), transform(tr), anwBuffer(anwb), releaseFence(rf) {}
+ BufferHolder(nsecs_t t, nsecs_t readoutT, int32_t tr, ANativeWindowBuffer* anwb, int rf) :
+ timestamp(t), readoutTimestamp(readoutT), transform(tr), anwBuffer(anwb),
+ releaseFence(rf) {}
};
void queueBufferToClientLocked(const BufferHolder& bufferHolder, nsecs_t currentTime);
-
- Camera3OutputStream& mParent;
+ wp<Camera3OutputStream> mParent;
sp<ANativeWindow> mConsumer;
mutable Mutex mLock;
Condition mBufferCond;
std::queue<BufferHolder> mPendingBuffers;
- nsecs_t mLastCameraCaptureTime = 0;
+ nsecs_t mLastCameraReadoutTime = 0;
nsecs_t mLastCameraPresentTime = 0;
static constexpr nsecs_t kWaitDuration = 5000000LL; // 50ms
static constexpr nsecs_t kFrameIntervalThreshold = 80000000LL; // 80ms
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
index 3809f37..02eebd2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OutputUtils.cpp
@@ -244,6 +244,8 @@
__FUNCTION__, streamId, strerror(-res), res);
if (res == TIMED_OUT || res == NO_MEMORY) {
bufRet.val.set<Tag::error>(StreamBufferRequestError::NO_BUFFER_AVAILABLE);
+ } else if (res == INVALID_OPERATION) {
+ bufRet.val.set<Tag::error>(StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
} else {
bufRet.val.set<Tag::error>(StreamBufferRequestError::UNKNOWN_ERROR);
}
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
index f063506..3392db1 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.cpp
@@ -16,7 +16,7 @@
#include <hardware/camera.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
namespace android {
namespace frameworks {
@@ -34,7 +34,7 @@
H2BCameraDeviceCallbacks::H2BCameraDeviceCallbacks(const sp<HalInterface>& base) : CBase(base) { }
-bool H2BCameraDeviceCallbacks::initializeLooper() {
+bool H2BCameraDeviceCallbacks::initializeLooper(int vndkVersion) {
mCbLooper = new ALooper;
mCbLooper->setName("cs-looper");
status_t err = mCbLooper->start(/*runOnCallingThread*/ false, /*canCallJava*/ false,
@@ -43,7 +43,7 @@
ALOGE("Unable to start camera device callback looper");
return false;
}
- mHandler = new CallbackHandler(this);
+ mHandler = new CallbackHandler(this, vndkVersion);
mCbLooper->registerHandler(mHandler);
return true;
}
@@ -144,6 +144,12 @@
// Convert Metadata into HCameraMetadata;
FmqSizeOrMetadata hResult;
+ using hardware::cameraservice::utils::conversion::filterVndkKeys;
+ if (filterVndkKeys(mVndkVersion, result, /*isStatic*/false) != OK) {
+ ALOGE("%s: filtering vndk keys from result failed, not sending onResultReceived callback",
+ __FUNCTION__);
+ return;
+ }
const camera_metadata_t *rawMetadata = result.getAndLock();
converter->convertResultMetadataToHidl(rawMetadata, &hResult);
result.unlock(rawMetadata);
diff --git a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
index dbf520a..152002b 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
+++ b/services/camera/libcameraservice/hidl/AidlCameraDeviceCallbacks.h
@@ -61,7 +61,7 @@
~H2BCameraDeviceCallbacks();
- bool initializeLooper();
+ bool initializeLooper(int vndkVersion);
virtual binder::Status onDeviceError(int32_t errorCode,
const CaptureResultExtras& resultExtras) override;
@@ -103,10 +103,12 @@
struct CallbackHandler : public AHandler {
public:
void onMessageReceived(const sp<AMessage> &msg) override;
- CallbackHandler(H2BCameraDeviceCallbacks *converter) : mConverter(converter) { }
+ CallbackHandler(H2BCameraDeviceCallbacks *converter, int vndkVersion) :
+ mConverter(converter), mVndkVersion(vndkVersion) { }
private:
void processResultMessage(sp<ResultWrapper> &resultWrapper);
wp<H2BCameraDeviceCallbacks> mConverter = nullptr;
+ int mVndkVersion = -1;
Mutex mMetadataQueueLock;
};
diff --git a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
index cca3f2e..add9121 100644
--- a/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
+++ b/services/camera/libcameraservice/hidl/AidlCameraServiceListener.cpp
@@ -15,7 +15,7 @@
*/
#include <hidl/AidlCameraServiceListener.h>
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
namespace android {
namespace frameworks {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
index 2509e6c..26e813a 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraDeviceUser.cpp
@@ -20,8 +20,8 @@
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
-#include <hidl/Convert.h>
#include <hidl/HidlCameraDeviceUser.h>
+#include <hidl/Utils.h>
#include <android/hardware/camera/device/3.2/types.h>
namespace android {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index a812587..65a0300 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -14,13 +14,13 @@
* limitations under the License.
*/
-#include <hidl/Convert.h>
+#include <android-base/properties.h>
-#include <hidl/HidlCameraService.h>
-
-#include <hidl/HidlCameraDeviceUser.h>
#include <hidl/AidlCameraDeviceCallbacks.h>
#include <hidl/AidlCameraServiceListener.h>
+#include <hidl/HidlCameraService.h>
+#include <hidl/HidlCameraDeviceUser.h>
+#include <hidl/Utils.h>
#include <hidl/HidlTransportSupport.h>
@@ -34,6 +34,7 @@
using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
using hardware::hidl_vec;
using hardware::cameraservice::utils::conversion::convertToHidl;
+using hardware::cameraservice::utils::conversion::filterVndkKeys;
using hardware::cameraservice::utils::conversion::B2HStatus;
using hardware::Void;
@@ -53,6 +54,10 @@
return gHidlCameraService;
}
+HidlCameraService::HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) {
+ mVndkVersion = base::GetIntProperty("ro.vndk.version", __ANDROID_API_FUTURE__);
+};
+
Return<void>
HidlCameraService::getCameraCharacteristics(const hidl_string& cameraId,
getCameraCharacteristics_cb _hidl_cb) {
@@ -77,6 +82,11 @@
_hidl_cb(status, hidlMetadata);
return Void();
}
+ if (filterVndkKeys(mVndkVersion, cameraMetadata) != OK) {
+ ALOGE("%s: Unable to filter vndk metadata keys for version %d", __FUNCTION__, mVndkVersion);
+ _hidl_cb(HStatus::UNKNOWN_ERROR, hidlMetadata);
+ return Void();
+ }
const camera_metadata_t *rawMetadata = cameraMetadata.getAndLock();
convertToHidl(rawMetadata, &hidlMetadata);
_hidl_cb(status, hidlMetadata);
@@ -97,7 +107,7 @@
// Create a hardware::camera2::ICameraDeviceCallback object which internally
// calls callback functions passed through hCallback.
sp<H2BCameraDeviceCallbacks> hybridCallbacks = new H2BCameraDeviceCallbacks(hCallback);
- if (!hybridCallbacks->initializeLooper()) {
+ if (!hybridCallbacks->initializeLooper(mVndkVersion)) {
ALOGE("Unable to handle callbacks on device, cannot connect");
_hidl_cb(HStatus::UNKNOWN_ERROR, nullptr);
return Void();
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.h b/services/camera/libcameraservice/hidl/HidlCameraService.h
index 86a7cec..1c8145c 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.h
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.h
@@ -75,7 +75,7 @@
static sp<HidlCameraService> getInstance(android::CameraService *cs);
private:
- HidlCameraService(android::CameraService *cs) : mAidlICameraService(cs) { };
+ HidlCameraService(android::CameraService *cs);
sp<hardware::ICameraServiceListener> searchListenerCacheLocked(
sp<HCameraServiceListener> listener, /*removeIfFound*/ bool shouldRemove = false);
@@ -95,6 +95,7 @@
using HIListeners =
std::pair<sp<HCameraServiceListener>, sp<ICameraServiceListener>>;
std::list<HIListeners> mListeners;
+ int mVndkVersion = -1;
};
} // namespace implementation
diff --git a/services/camera/libcameraservice/hidl/Convert.cpp b/services/camera/libcameraservice/hidl/Utils.cpp
similarity index 92%
rename from services/camera/libcameraservice/hidl/Convert.cpp
rename to services/camera/libcameraservice/hidl/Utils.cpp
index 597147b..057a6e9 100644
--- a/services/camera/libcameraservice/hidl/Convert.cpp
+++ b/services/camera/libcameraservice/hidl/Utils.cpp
@@ -14,7 +14,8 @@
* limitations under the License.
*/
-#include <hidl/Convert.h>
+#include <hidl/Utils.h>
+#include <hidl/VndkVersionMetadataTags.h>
#include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
#include <cutils/native_handle.h>
#include <mediautils/AImageReaderUtils.h>
@@ -297,6 +298,31 @@
return hPhysicalCaptureResultInfos;
}
+status_t filterVndkKeys(int vndkVersion, CameraMetadata &metadata, bool isStatic) {
+ if (vndkVersion == __ANDROID_API_FUTURE__) {
+ // VNDK version in ro.vndk.version is a version code-name that
+ // corresponds to the current version.
+ return OK;
+ }
+ const auto &apiLevelToKeys =
+ isStatic ? static_api_level_to_keys : dynamic_api_level_to_keys;
+ // Find the vndk versions above the given vndk version. All the vndk
+ // versions above the given one, need to have their keys filtered from the
+ // metadata in order to avoid metadata invalidation.
+ auto it = apiLevelToKeys.upper_bound(vndkVersion);
+ while (it != apiLevelToKeys.end()) {
+ for (const auto &key : it->second) {
+ status_t res = metadata.erase(key);
+ if (res != OK) {
+ ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
+ return res;
+ }
+ }
+ it++;
+ }
+ return OK;
+}
+
} //conversion
} // utils
} //cameraservice
diff --git a/services/camera/libcameraservice/hidl/Convert.h b/services/camera/libcameraservice/hidl/Utils.h
similarity index 96%
rename from services/camera/libcameraservice/hidl/Convert.h
rename to services/camera/libcameraservice/hidl/Utils.h
index 82ffc48..e6d4393 100644
--- a/services/camera/libcameraservice/hidl/Convert.h
+++ b/services/camera/libcameraservice/hidl/Utils.h
@@ -29,6 +29,7 @@
#include <android/hardware/camera2/ICameraDeviceUser.h>
#include <android/hardware/graphics/bufferqueue/1.0/IGraphicBufferProducer.h>
#include <android/hardware/ICameraService.h>
+#include <camera/CameraMetadata.h>
#include <fmq/MessageQueue.h>
#include <hardware/camera.h>
#include <hidl/MQDescriptor.h>
@@ -96,6 +97,8 @@
HStatus B2HStatus(const binder::Status &bStatus);
+status_t filterVndkKeys(int vndk_version, CameraMetadata &metadata, bool isStatic = true);
+
} // conversion
} // utils
} // cameraservice
diff --git a/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
new file mode 100644
index 0000000..d3377f4
--- /dev/null
+++ b/services/camera/libcameraservice/hidl/VndkVersionMetadataTags.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <map>
+#include <vector>
+#pragma once
+/**
+ * ! Do not edit this file directly !
+ *
+ * Generated automatically from vndk_camera_metadata_tags.mako. To be included in libcameraservice
+ * only by hidl/Utils.cpp.
+ */
+
+/**
+ * API level to static keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::map<int, std::vector<camera_metadata_tag>> static_api_level_to_keys{
+ {30, {
+ ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
+ ANDROID_CONTROL_ZOOM_RATIO_RANGE,
+ ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
+ ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
+ } },
+ {31, {
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
+ ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
+ ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
+ ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
+ ANDROID_SENSOR_INFO_BINNING_FACTOR,
+ } },
+ {32, {
+ ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
+ } },
+ {33, {
+ ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
+ ANDROID_AUTOMOTIVE_LENS_FACING,
+ ANDROID_AUTOMOTIVE_LOCATION,
+ ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES,
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
+ ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
+ } },
+};
+
+/**
+ * API level to dynamic keys mapping. To be used for filtering out keys depending on vndk version
+ * used by vendor clients.
+ */
+std::map<int, std::vector<camera_metadata_tag>> dynamic_api_level_to_keys{
+ {30, {
+ ANDROID_CONTROL_ZOOM_RATIO,
+ ANDROID_SCALER_ROTATE_AND_CROP,
+ ANDROID_CONTROL_EXTENDED_SCENE_MODE,
+ } },
+ {31, {
+ ANDROID_SENSOR_PIXEL_MODE,
+ ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+ } },
+};
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index f7cede8..69175cc 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -80,7 +80,7 @@
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag,
+ const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats) {
Mutex::Autolock l(mLock);
@@ -89,6 +89,7 @@
mSessionStats.mResultErrorCount = resultErrorCount;
mSessionStats.mDeviceError = deviceError;
mSessionStats.mUserTag = String16(userTag.c_str());
+ mSessionStats.mVideoStabilizationMode = videoStabilizationMode;
mSessionStats.mStreamStats = streamStats;
updateProxyDeviceState(mSessionStats);
@@ -179,7 +180,7 @@
void CameraServiceProxyWrapper::logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag,
+ const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
@@ -194,8 +195,8 @@
}
ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
- ", userTag %s", __FUNCTION__, id.c_str(), requestCount, resultErrorCount,
- deviceError, userTag.c_str());
+ ", userTag %s, videoStabilizationMode %d", __FUNCTION__, id.c_str(), requestCount,
+ resultErrorCount, deviceError, userTag.c_str(), videoStabilizationMode);
for (size_t i = 0; i < streamStats.size(); i++) {
ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
PRId64 ", startTimeMs %d" ,
@@ -204,7 +205,8 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag, streamStats);
+ sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag,
+ videoStabilizationMode, streamStats);
}
void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 61fc915..e34a8f0 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -50,7 +50,7 @@
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
void onActive(float maxPreviewFps);
void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag,
+ const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
};
@@ -87,7 +87,7 @@
// Session state becomes idle
static void logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::string& userTag,
+ const std::string& userTag, int32_t videoStabilizationMode,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 9f8f457..d5ddf9f 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -620,6 +620,10 @@
stream.bufferSize = 0;
stream.groupId = -1;
stream.sensorPixelModesUsed = defaultSensorPixelModes;
+ using DynamicRangeProfile =
+ aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap;
+ stream.dynamicRangeProfile =
+ DynamicRangeProfile::ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
streamConfiguration.streams[streamIdx++] = stream;
streamConfiguration.multiResolutionInputImage =
sessionConfiguration.inputIsMultiResolution();
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index e322d62..a737ba0 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -390,47 +390,59 @@
}
AStatsEvent_writeInt32(event, qpBMaxOri);
- // int32_t configColorStandard = -1;
- // if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
- // metrics_proto.set_config_color_standard(configColorStandard);
- // }
- // AStatsEvent_writeInt32(event, configColorStandard);
+ int32_t configColorStandard = -1;
+ if (item->getInt32("android.media.mediacodec.config-color-standard", &configColorStandard)) {
+ metrics_proto.set_config_color_standard(configColorStandard);
+ }
+ AStatsEvent_writeInt32(event, configColorStandard);
- // int32_t configColorRange = -1;
- // if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
- // metrics_proto.set_config_color_range(configColorRange);
- // }
- // AStatsEvent_writeInt32(event, configColorRange);
+ int32_t configColorRange = -1;
+ if (item->getInt32("android.media.mediacodec.config-color-range", &configColorRange)) {
+ metrics_proto.set_config_color_range(configColorRange);
+ }
+ AStatsEvent_writeInt32(event, configColorRange);
- // int32_t configColorTransfer = -1;
- // if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
- // metrics_proto.set_config_color_transfer(configColorTransfer);
- // }
- // AStatsEvent_writeInt32(event, configColorTransfer);
+ int32_t configColorTransfer = -1;
+ if (item->getInt32("android.media.mediacodec.config-color-transfer", &configColorTransfer)) {
+ metrics_proto.set_config_color_transfer(configColorTransfer);
+ }
+ AStatsEvent_writeInt32(event, configColorTransfer);
- // int32_t parsedColorStandard = -1;
- // if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
- // metrics_proto.set_parsed_color_standard(parsedColorStandard);
- // }
- // AStatsEvent_writeInt32(event, parsedColorStandard);
+ int32_t parsedColorStandard = -1;
+ if (item->getInt32("android.media.mediacodec.parsed-color-standard", &parsedColorStandard)) {
+ metrics_proto.set_parsed_color_standard(parsedColorStandard);
+ }
+ AStatsEvent_writeInt32(event, parsedColorStandard);
- // int32_t parsedColorRange = -1;
- // if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
- // metrics_proto.set_parsed_color_range(parsedColorRange);
- // }
- // AStatsEvent_writeInt32(event, parsedColorRange);
+ int32_t parsedColorRange = -1;
+ if (item->getInt32("android.media.mediacodec.parsed-color-range", &parsedColorRange)) {
+ metrics_proto.set_parsed_color_range(parsedColorRange);
+ }
+ AStatsEvent_writeInt32(event, parsedColorRange);
- // int32_t parsedColorTransfer = -1;
- // if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
- // metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
- // }
- // AStatsEvent_writeInt32(event, parsedColorTransfer);
+ int32_t parsedColorTransfer = -1;
+ if (item->getInt32("android.media.mediacodec.parsed-color-transfer", &parsedColorTransfer)) {
+ metrics_proto.set_parsed_color_transfer(parsedColorTransfer);
+ }
+ AStatsEvent_writeInt32(event, parsedColorTransfer);
- // int32_t hdrMetadataFlags = -1;
- // if (item->getInt32("android.media.mediacodec.hdr-metadata-flags", &hdrMetadataFlags)) {
- // metrics_proto.set_hdr_metadata_flags(hdrMetadataFlags);
- // }
- // AStatsEvent_writeInt32(event, hdrMetadataFlags);
+ int32_t hdrStaticInfo = -1;
+ if (item->getInt32("android.media.mediacodec.hdr-static-info", &hdrStaticInfo)) {
+ metrics_proto.set_hdr_static_info(hdrStaticInfo);
+ }
+ AStatsEvent_writeInt32(event, hdrStaticInfo);
+
+ int32_t hdr10PlusInfo = -1;
+ if (item->getInt32("android.media.mediacodec.hdr10-plus-info", &hdr10PlusInfo)) {
+ metrics_proto.set_hdr10_plus_info(hdr10PlusInfo);
+ }
+ AStatsEvent_writeInt32(event, hdr10PlusInfo);
+
+ int32_t hdrFormat= -1;
+ if (item->getInt32("android.media.mediacodec.hdr-format", &hdrFormat)) {
+ metrics_proto.set_hdr_format(hdrFormat);
+ }
+ AStatsEvent_writeInt32(event, hdrFormat);
int err = AStatsEvent_write(event);
if (err < 0) {