Merge "vts: Disable checking md5 checksum and timestamps for tunneled decoders" into android14-tests-dev am: 8d43abd037
Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/3222363
Change-Id: I874c1b141a79a7839d8794f60acaba4db9d84199
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index bd679e5..ebdaee9 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -584,6 +584,19 @@
* <p>Only constrains auto-exposure (AE) algorithm, not
* manual control of ACAMERA_SENSOR_EXPOSURE_TIME and
* ACAMERA_SENSOR_FRAME_DURATION.</p>
+ * <p>Note that the actual achievable max framerate also depends on the minimum frame
+ * duration of the output streams. The max frame rate will be
+ * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+ * if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
+ * all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
+ * 30fps.</p>
+ * <p>To start a CaptureSession with a target FPS range different from the
+ * capture request template's default value, the application
+ * is strongly recommended to call
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }
+ * with the target fps range before creating the capture session. The aeTargetFpsRange is
+ * typically a session parameter. Specifying it at session creation time helps avoid
+ * session reconfiguration delays in cases like 60fps or high speed recording.</p>
*
* @see ACAMERA_SENSOR_EXPOSURE_TIME
* @see ACAMERA_SENSOR_FRAME_DURATION
@@ -1128,6 +1141,12 @@
* ACAMERA_CONTROL_VIDEO_STABILIZATION_MODE field will return
* OFF if the recording output is not stabilized, or if there are no output
* Surface types that can be stabilized.</p>
+ * <p>The application is strongly recommended to call
+ * {@link ACameraDevice_createCaptureSessionWithSessionParameters }
+ * with the desired video stabilization mode before creating the capture session.
+ * Video stabilization mode is a session parameter on many devices. Specifying
+ * it at session creation time helps avoid reconfiguration delay caused by difference
+ * between the default value and the first CaptureRequest.</p>
* <p>If a camera device supports both this mode and OIS
* (ACAMERA_LENS_OPTICAL_STABILIZATION_MODE), turning both modes on may
* produce undesirable interaction, so it is recommended not to enable
@@ -4487,8 +4506,8 @@
* <p>The guaranteed stream combinations related to stream use case for a camera device with
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
* capability is documented in the camera device
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>. The application is strongly recommended to use one of the guaranteed stream
- * combinations.
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>.
+ * The application is strongly recommended to use one of the guaranteed stream combinations.
* If the application creates a session with a stream combination not in the guaranteed
* list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
* the camera device may ignore some stream use cases due to hardware constraints
@@ -5384,7 +5403,7 @@
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_DEFAULT">CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT</a> mode.
* They can be queried through
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#get">CameraCharacteristics#get</a> with
- * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION)</a>.
+ * <a href="https://developer.android.com/reference/CameraCharacteristics.html#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION">CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION</a>.
* Unless reported by both
* <a href="https://developer.android.com/reference/android/hardware/camera2/params/StreamConfigurationMap.html">StreamConfigurationMap</a>s, the outputs from
* <code>android.scaler.streamConfigurationMapMaximumResolution</code> and
@@ -5399,13 +5418,12 @@
* <ul>
* <li>
* <p>The mandatory stream combinations listed in
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics/mandatoryMaximumResolutionStreamCombinations.html">mandatoryMaximumResolutionStreamCombinations</a>
- * would not apply.</p>
+ * android.scaler.mandatoryMaximumResolutionStreamCombinations would not apply.</p>
* </li>
* <li>
* <p>The bayer pattern of {@code RAW} streams when
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>
- * is selected will be the one listed in <a href="https://developer.android.com/reference/android/sensor/info/binningFactor.html">binningFactor</a>.</p>
+ * is selected will be the one listed in ACAMERA_SENSOR_INFO_BINNING_FACTOR.</p>
* </li>
* <li>
* <p>The following keys will always be present:</p>
@@ -5419,6 +5437,7 @@
* </ul>
*
* @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
+ * @see ACAMERA_SENSOR_INFO_BINNING_FACTOR
* @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION
* @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION
*/
@@ -9866,8 +9885,8 @@
* </ul>
* <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
* lists all of the supported stream use cases.</p>
- * <p>Refer to
- * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#stream-use-case-capability-additional-guaranteed-configurations">CameraDevice#stream-use-case-capability-additional-guaranteed-configurations</a>
+ * <p>Refer to the
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations">guideline</a>
* for the mandatory stream combinations involving stream use cases, which can also be
* queried via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
*/
@@ -10728,9 +10747,9 @@
/**
* <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
* better.</p>
- * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
- * {@link ACameraDevice_createCaptureSession }
- * documentation are guaranteed to be supported.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#limited-level-additional-guaranteed-configurations">tables</a>
+ * in the documentation are guaranteed to be supported.</p>
* <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
* support for color image capture. The only exception is that the device may
* alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
@@ -10755,9 +10774,9 @@
/**
* <p>This camera device is capable of supporting advanced imaging applications.</p>
- * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
- * {@link ACameraDevice_createCaptureSession }
- * documentation are guaranteed to be supported.</p>
+ * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#full-level-additional-guaranteed-configurations">tables</a>
+ * in the documentation are guaranteed to be supported.</p>
* <p>A <code>FULL</code> device will support below capabilities:</p>
* <ul>
* <li><code>BURST_CAPTURE</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
@@ -10784,9 +10803,9 @@
/**
* <p>This camera device is running in backward compatibility mode.</p>
- * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the
- * {@link ACameraDevice_createCaptureSession }
- * documentation are supported.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations">table</a>
+ * in the documentation are supported.</p>
* <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
* post-processing, arbitrary cropping regions, and has relaxed performance constraints.
* No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
@@ -10808,9 +10827,9 @@
* <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
* FULL-level capabilities.</p>
* <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
- * <code>LIMITED</code> tables in the
- * {@link ACameraDevice_createCaptureSession }
- * documentation are guaranteed to be supported.</p>
+ * <code>LIMITED</code>
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice#level-3-additional-guaranteed-configurations">tables</a>
+ * in the documentation are guaranteed to be supported.</p>
* <p>The following additional capabilities are guaranteed to be supported:</p>
* <ul>
* <li><code>YUV_REPROCESSING</code> capability (ACAMERA_REQUEST_AVAILABLE_CAPABILITIES contains
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index df30dba..0aeed08 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -222,6 +222,21 @@
return mInit;
}
+void Component::onDeathReceived() {
+ {
+ std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+ mClientDied = true;
+ for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+ if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+ std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+ std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+ bqPool->invalidate();
+ }
+ }
+ }
+ release();
+}
+
// Methods from ::android::hardware::media::c2::V1_0::IComponent
Return<Status> Component::queue(const WorkBundle& workBundle) {
std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
blockPool = nullptr;
}
if (blockPool) {
- mBlockPoolsMutex.lock();
- mBlockPools.emplace(blockPool->getLocalId(), blockPool);
- mBlockPoolsMutex.unlock();
+ bool emplaced = false;
+ {
+ mBlockPoolsMutex.lock();
+ if (!mClientDied) {
+ mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+ emplaced = true;
+ }
+ mBlockPoolsMutex.unlock();
+ }
+ if (!emplaced) {
+ blockPool.reset();
+ status = C2_BAD_STATE;
+ }
} else if (status == C2_OK) {
status = C2_CORRUPTED;
}
@@ -494,8 +519,8 @@
) override {
auto strongComponent = mComponent.promote();
if (strongComponent) {
- LOG(INFO) << "Client died ! release the component !!";
- strongComponent->release();
+ LOG(INFO) << "Client died ! notify and release the component !!";
+ strongComponent->onDeathReceived();
} else {
LOG(ERROR) << "Client died ! no component to release !!";
}
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
index e343655..3f55618 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/Component.h
@@ -66,6 +66,8 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager);
c2_status_t status() const;
+ // Receives a death notification of the client.
+ void onDeathReceived();
typedef ::android::hardware::graphics::bufferqueue::V1_0::
IGraphicBufferProducer HGraphicBufferProducer1;
@@ -135,6 +137,7 @@
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
+ bool mClientDied{false};
};
} // namespace utils
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index 2dd922f..d0f4f19 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -222,6 +222,21 @@
return mInit;
}
+void Component::onDeathReceived() {
+ {
+ std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+ mClientDied = true;
+ for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+ if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+ std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+ std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+ bqPool->invalidate();
+ }
+ }
+ }
+ release();
+}
+
// Methods from ::android::hardware::media::c2::V1_1::IComponent
Return<Status> Component::queue(const WorkBundle& workBundle) {
std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
blockPool = nullptr;
}
if (blockPool) {
- mBlockPoolsMutex.lock();
- mBlockPools.emplace(blockPool->getLocalId(), blockPool);
- mBlockPoolsMutex.unlock();
+ bool emplaced = false;
+ {
+ mBlockPoolsMutex.lock();
+ if (!mClientDied) {
+ mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+ emplaced = true;
+ }
+ mBlockPoolsMutex.unlock();
+ }
+ if (!emplaced) {
+ blockPool.reset();
+ status = C2_BAD_STATE;
+ }
} else if (status == C2_OK) {
status = C2_CORRUPTED;
}
@@ -501,8 +526,8 @@
) override {
auto strongComponent = component.promote();
if (strongComponent) {
- LOG(INFO) << "Client died ! release the component !!";
- strongComponent->release();
+ LOG(INFO) << "Client died ! notify and release the component !!";
+ strongComponent->onDeathReceived();
} else {
LOG(ERROR) << "Client died ! no component to release !!";
}
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
index 1c8c20c..f16de24 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/Component.h
@@ -69,6 +69,8 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager);
c2_status_t status() const;
+ // Receives a death notification of the client.
+ void onDeathReceived();
typedef ::android::hardware::graphics::bufferqueue::V1_0::
IGraphicBufferProducer HGraphicBufferProducer1;
@@ -140,6 +142,7 @@
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
+ bool mClientDied{false};
};
} // namespace utils
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index 7994d32..036c900 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -222,6 +222,21 @@
return mInit;
}
+void Component::onDeathReceived() {
+ {
+ std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+ mClientDied = true;
+ for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+ if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+ std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+ std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+ bqPool->invalidate();
+ }
+ }
+ }
+ release();
+}
+
// Methods from ::android::hardware::media::c2::V1_1::IComponent
Return<Status> Component::queue(const WorkBundle& workBundle) {
std::list<std::unique_ptr<C2Work>> c2works;
@@ -409,9 +424,19 @@
blockPool = nullptr;
}
if (blockPool) {
- mBlockPoolsMutex.lock();
- mBlockPools.emplace(blockPool->getLocalId(), blockPool);
- mBlockPoolsMutex.unlock();
+ bool emplaced = false;
+ {
+ mBlockPoolsMutex.lock();
+ if (!mClientDied) {
+ mBlockPools.emplace(blockPool->getLocalId(), blockPool);
+ emplaced = true;
+ }
+ mBlockPoolsMutex.unlock();
+ }
+ if (!emplaced) {
+ blockPool.reset();
+ status = C2_BAD_STATE;
+ }
} else if (status == C2_OK) {
status = C2_CORRUPTED;
}
@@ -532,8 +557,8 @@
) override {
auto strongComponent = component.promote();
if (strongComponent) {
- LOG(INFO) << "Client died ! release the component !!";
- strongComponent->release();
+ LOG(INFO) << "Client died ! notify and release the component !!";
+ strongComponent->onDeathReceived();
} else {
LOG(ERROR) << "Client died ! no component to release !!";
}
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
index d0972ee..6a73392 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/Component.h
@@ -69,6 +69,8 @@
const sp<::android::hardware::media::bufferpool::V2_0::
IClientManager>& clientPoolManager);
c2_status_t status() const;
+ // Receives a death notification of the client.
+ void onDeathReceived();
typedef ::android::hardware::graphics::bufferqueue::V1_0::
IGraphicBufferProducer HGraphicBufferProducer1;
@@ -145,7 +147,7 @@
using HwDeathRecipient = ::android::hardware::hidl_death_recipient;
sp<HwDeathRecipient> mDeathRecipient;
-
+ bool mClientDied{false};
};
} // namespace utils
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index d72d228..feca03d 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -67,6 +67,7 @@
using hardware::hidl_vec;
using hardware::fromHeap;
using hardware::HidlMemory;
+using server_configurable_flags::GetServerConfigurableFlag;
using namespace hardware::cas::V1_0;
using namespace hardware::cas::native::V1_0;
@@ -82,6 +83,11 @@
// than making it non-blocking. Do not change this value.
const static size_t kDequeueTimeoutNs = 0;
+static bool areRenderMetricsEnabled() {
+ std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+ return v == "true";
+}
+
} // namespace
CCodecBufferChannel::QueueGuard::QueueGuard(
@@ -148,6 +154,7 @@
mCCodecCallback(callback),
mFrameIndex(0u),
mFirstValidFrameIndex(0u),
+ mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
mIsSurfaceToDisplay(false),
mHasPresentFenceTimes(false),
mRenderingDepth(3u),
@@ -174,8 +181,7 @@
Mutexed<BlockPools>::Locked pools(mBlockPools);
pools->outputPoolId = C2BlockPool::BASIC_LINEAR;
}
- std::string value = server_configurable_flags::GetServerConfigurableFlag(
- "media_native", "ccodec_rendering_depth", "3");
+ std::string value = GetServerConfigurableFlag("media_native", "ccodec_rendering_depth", "3");
android::base::ParseInt(value, &mRenderingDepth);
mOutputSurface.lock()->maxDequeueBuffers = kSmoothnessFactor + mRenderingDepth;
}
@@ -996,7 +1002,7 @@
int64_t mediaTimeUs = 0;
(void)buffer->meta()->findInt64("timeUs", &mediaTimeUs);
- if (mIsSurfaceToDisplay) {
+ if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
trackReleasedFrame(qbo, mediaTimeUs, timestampNs);
processRenderedFrames(qbo.frameTimestamps);
} else {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 2d87aa9..41f5ae2 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -334,6 +334,7 @@
sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
std::deque<TrackedFrame> mTrackedFrames;
+ bool mAreRenderMetricsEnabled;
bool mIsSurfaceToDisplay;
bool mHasPresentFenceTimes;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 9004bcf..261fd05 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -621,8 +621,8 @@
uint8_t maxLvlChroma = colorRange == C2Color::RANGE_FULL ? 255 : 240;
#define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
- for (size_t y = 0; y < src.height(); ++y) {
- for (size_t x = 0; x < src.width(); ++x) {
+ for (size_t y = 0; y < src.crop().height; ++y) {
+ for (size_t x = 0; x < src.crop().width; ++x) {
uint8_t r = *pRed;
uint8_t g = *pGreen;
uint8_t b = *pBlue;
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 332d3ac..bb6c1b8 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -82,6 +82,7 @@
return false;
}
+ // Default scenario --- the consumer is display or GPU
const AHardwareBuffer_Desc desc = {
.width = 320,
.height = 240,
@@ -96,7 +97,40 @@
.rfu1 = 0,
};
- return AHardwareBuffer_isSupported(&desc);
+ // The consumer is a HW encoder
+ const AHardwareBuffer_Desc descHwEncoder = {
+ .width = 320,
+ .height = 240,
+ .format = format,
+ .layers = 1,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
+ AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+ AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
+ AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY |
+ AHARDWAREBUFFER_USAGE_VIDEO_ENCODE,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0,
+ };
+
+ // The consumer is a SW encoder
+ const AHardwareBuffer_Desc descSwEncoder = {
+ .width = 320,
+ .height = 240,
+ .format = format,
+ .layers = 1,
+ .usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
+ AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN |
+ AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
+ AHARDWAREBUFFER_USAGE_COMPOSER_OVERLAY,
+ .stride = 0,
+ .rfu0 = 0,
+ .rfu1 = 0,
+ };
+
+ return AHardwareBuffer_isSupported(&desc)
+ && AHardwareBuffer_isSupported(&descHwEncoder)
+ && AHardwareBuffer_isSupported(&descSwEncoder);
}
} // namespace android
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 29aad5e..320b192 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -103,6 +103,13 @@
virtual void getConsumerUsage(uint64_t *consumerUsage);
+ /**
+ * Invalidate the class.
+ *
+ * After the call, fetchGraphicBlock() will return C2_BAD_STATE.
+ */
+ virtual void invalidate();
+
private:
const std::shared_ptr<C2Allocator> mAllocator;
const local_id_t mLocalId;
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index f2cd585..5fb0c8f 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -601,6 +601,9 @@
static int kMaxIgbpRetryDelayUs = 10000;
std::unique_lock<std::mutex> lock(mMutex);
+ if (mInvalidated) {
+ return C2_BAD_STATE;
+ }
if (mLastDqLogTs == 0) {
mLastDqLogTs = getTimestampNow();
} else {
@@ -746,6 +749,11 @@
*consumeUsage = mConsumerUsage;
}
+ void invalidate() {
+ std::scoped_lock<std::mutex> lock(mMutex);
+ mInvalidated = true;
+ }
+
private:
friend struct C2BufferQueueBlockPoolData;
@@ -783,6 +791,7 @@
// if the token has been expired, the buffers will not call IGBP::cancelBuffer()
// when they are no longer used.
std::shared_ptr<int> mIgbpValidityToken;
+ bool mInvalidated{false};
};
C2BufferQueueBlockPoolData::C2BufferQueueBlockPoolData(
@@ -1103,3 +1112,9 @@
}
}
+void C2BufferQueueBlockPool::invalidate() {
+ if (mImpl) {
+ mImpl->invalidate();
+ }
+}
+
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 0365085..e0449ff 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -2099,6 +2099,11 @@
if (tsLayers > 1) {
uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
+ // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
+ // use case in case of mic only
+ if (mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+ bLayers = 0;
+ }
uint32_t pLayers = tsLayers - bLayers;
format->setString(
"ts-schema", AStringPrintf("android.generic.%u+%u", pLayers, bLayers));
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e0ebc11..2970aab 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -92,6 +92,7 @@
using aidl::android::media::IResourceManagerClient;
using aidl::android::media::IResourceManagerService;
using aidl::android::media::ClientInfoParcel;
+using server_configurable_flags::GetServerConfigurableFlag;
using FreezeEvent = VideoRenderQualityTracker::FreezeEvent;
using JudderEvent = VideoRenderQualityTracker::JudderEvent;
@@ -282,6 +283,11 @@
return (err == NO_MEMORY);
}
+static bool areRenderMetricsEnabled() {
+ std::string v = GetServerConfigurableFlag("media_native", "render_metrics_enabled", "false");
+ return v == "true";
+}
+
static const int kMaxRetry = 2;
static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
static const int kNumBuffersAlign = 16;
@@ -1025,9 +1031,10 @@
mHavePendingInputBuffers(false),
mCpuBoostRequested(false),
mIsSurfaceToDisplay(false),
+ mAreRenderMetricsEnabled(areRenderMetricsEnabled()),
mVideoRenderQualityTracker(
VideoRenderQualityTracker::Configuration::getFromServerConfigurableFlags(
- server_configurable_flags::GetServerConfigurableFlag)),
+ GetServerConfigurableFlag)),
mLatencyUnknown(0),
mBytesEncoded(0),
mEarliestEncodedPtsUs(INT64_MAX),
@@ -6044,7 +6051,7 @@
// If rendering to the screen, then schedule a time in the future to poll to see if this
// frame was ever rendered to seed onFrameRendered callbacks.
- if (mIsSurfaceToDisplay) {
+ if (mAreRenderMetricsEnabled && mIsSurfaceToDisplay) {
if (mediaTimeUs != INT64_MIN) {
noRenderTime ? mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs)
: mVideoRenderQualityTracker.onFrameReleased(mediaTimeUs,
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index 4f12a37..fbd8577 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -154,7 +154,7 @@
}
VideoRenderQualityTracker::Configuration::Configuration() {
- enabled = true;
+ enabled = false;
// Assume that the app is skipping frames because it's detected that the frame couldn't be
// rendered in time.
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index a4b3e2f..14c3fa0 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -312,8 +312,11 @@
outHeader->nFilledLen = frameSize;
List<BufferInfo *>::iterator it = outQueue.begin();
- while ((*it)->mHeader != outHeader) {
- ++it;
+ while (it != outQueue.end() && (*it)->mHeader != outHeader) {
+ ++it;
+ }
+ if (it == outQueue.end()) {
+ return;
}
BufferInfo *outInfo = *it;
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
index 04737a9..9198b7c 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP8Encoder.cpp
@@ -120,6 +120,11 @@
OMX_ERRORTYPE SoftVP8Encoder::internalGetVp8Params(
OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+ if (!isValidOMXParam(vp8Params)) {
+ android_errorWriteLog(0x534e4554, "273936274");
+ return OMX_ErrorBadParameter;
+ }
+
if (vp8Params->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
@@ -133,6 +138,11 @@
OMX_ERRORTYPE SoftVP8Encoder::internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+ if (!isValidOMXParam(vp8Params)) {
+ android_errorWriteLog(0x534e4554, "273937171");
+ return OMX_ErrorBadParameter;
+ }
+
if (vp8Params->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
index 1ea1c85..f8495c2 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVP9Encoder.cpp
@@ -119,6 +119,11 @@
OMX_ERRORTYPE SoftVP9Encoder::internalGetVp9Params(
OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+ if (!isValidOMXParam(vp9Params)) {
+ android_errorWriteLog(0x534e4554, "273936553");
+ return OMX_ErrorBadParameter;
+ }
+
if (vp9Params->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
@@ -133,6 +138,11 @@
OMX_ERRORTYPE SoftVP9Encoder::internalSetVp9Params(
const OMX_VIDEO_PARAM_VP9TYPE *vp9Params) {
+ if (!isValidOMXParam(vp9Params)) {
+ android_errorWriteLog(0x534e4554, "273937136");
+ return OMX_ErrorBadParameter;
+ }
+
if (vp9Params->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index e9b4341..cbedb72 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -485,6 +485,11 @@
OMX_ERRORTYPE SoftVPXEncoder::internalGetAndroidVpxParams(
OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+ if (!isValidOMXParam(vpxAndroidParams)) {
+ android_errorWriteLog(0x534e4554, "273936601");
+ return OMX_ErrorBadParameter;
+ }
+
if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
@@ -501,6 +506,10 @@
OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVpxParams(
const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vpxAndroidParams) {
+ if (!isValidOMXParam(vpxAndroidParams)) {
+ android_errorWriteLog(0x534e4554, "273937551");
+ return OMX_ErrorBadParameter;
+ }
if (vpxAndroidParams->nPortIndex != kOutputPortIndex) {
return OMX_ErrorUnsupportedIndex;
}
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index f91a8b2..854cc08 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -1549,7 +1549,8 @@
uint32_t u01, v01, y01, y23, y45, y67, uv0, uv1;
size_t x = 0;
- for (; x < src.cropWidth() - 3; x += 4) {
+ // x % 4 is always 0 so x + 3 will never overflow.
+ for (; x + 3 < src.cropWidth(); x += 4) {
u01 = *((uint32_t*)ptr_u); ptr_u += 2;
v01 = *((uint32_t*)ptr_v); ptr_v += 2;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 52d7d3d..163408d 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -572,6 +572,7 @@
sp<ALooper> mCryptoLooper;
bool mIsSurfaceToDisplay;
+ bool mAreRenderMetricsEnabled;
PlaybackDurationAccumulator mPlaybackDurationAccumulator;
VideoRenderQualityTracker mVideoRenderQualityTracker;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c82a303..959f43e 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -102,6 +102,21 @@
namespace android {
+static bool isValidOmxParamSize(const void *params, OMX_U32 size) {
+ // expect the vector to contain at least the size and version, two OMX_U32 entries.
+ if (size < 2 * sizeof(OMX_U32)) {
+ return false;
+ }
+
+ // expect the vector to be as large as the declared size
+ OMX_U32 *buf = (OMX_U32 *)params;
+ OMX_U32 declaredSize = *(OMX_U32*)buf;
+ if (declaredSize > size) {
+ return false;
+ }
+ return true;
+}
+
struct BufferMeta {
explicit BufferMeta(
const sp<IMemory> &mem, const sp<IHidlMemory> &hidlMemory,
@@ -688,6 +703,18 @@
status_t OMXNodeInstance::getParameter(
OMX_INDEXTYPE index, void *params, size_t size) {
+ OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
+ if (extIndex == OMX_IndexParamConsumerUsageBits) {
+ // expect the size to be 4 bytes for OMX_IndexParamConsumerUsageBits
+ if (size != sizeof(OMX_U32)) {
+ return BAD_VALUE;
+ }
+ } else {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -699,7 +726,6 @@
}
OMX_ERRORTYPE err = OMX_GetParameter(mHandle, index, params);
- OMX_INDEXEXTTYPE extIndex = (OMX_INDEXEXTTYPE)index;
// some errors are expected for getParameter
if (err != OMX_ErrorNoMore) {
CLOG_IF_ERROR(getParameter, err, "%s(%#x)", asString(extIndex), index);
@@ -710,6 +736,10 @@
status_t OMXNodeInstance::setParameter(
OMX_INDEXTYPE index, const void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -736,6 +766,9 @@
status_t OMXNodeInstance::getConfig(
OMX_INDEXTYPE index, void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
@@ -759,6 +792,10 @@
status_t OMXNodeInstance::setConfig(
OMX_INDEXTYPE index, const void *params, size_t size) {
+ if (!isValidOmxParamSize(params, size)) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
if (mHandle == NULL) {
return DEAD_OBJECT;
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index e853da9..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -616,6 +616,17 @@
DescribeHDR10PlusInfoParams* outParams =
(DescribeHDR10PlusInfoParams *)params;
+ if (!isValidOMXParam(outParams)) {
+ return OMX_ErrorBadParameter;
+ }
+ if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+ outParams->nSize) {
+ ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+ outParams->nParamSize, outParams->nSize);
+ android_errorWriteLog(0x534e4554, "329641908");
+ return OMX_ErrorBadParameter;
+ }
+
outParams->nParamSizeUsed = info->size();
// If the buffer provided by the client does not have enough
@@ -694,6 +705,10 @@
const DescribeHDR10PlusInfoParams* inParams =
(DescribeHDR10PlusInfoParams *)params;
+ if (!isValidOMXParam(inParams)) {
+ return OMX_ErrorBadParameter;
+ }
+
if (*frameConfig) {
// This is a request to append to the current frame config set.
// For now, we only support kDescribeHdr10PlusInfoIndex, which
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 41f2d67..bc83410 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -105,6 +105,7 @@
mRTCPAddr = mRTPAddr;
mRTCPAddr.sin_port = htons(ntohs(mRTPAddr.sin_port) | 1);
+ mVPSBuf = NULL;
mSPSBuf = NULL;
mPPSBuf = NULL;
diff --git a/media/module/foundation/MetaDataBase.cpp b/media/module/foundation/MetaDataBase.cpp
index 3370748..46a600a 100644
--- a/media/module/foundation/MetaDataBase.cpp
+++ b/media/module/foundation/MetaDataBase.cpp
@@ -23,6 +23,8 @@
#include <stdlib.h>
#include <string.h>
+#include <mutex>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/hexdump.h>
@@ -78,6 +80,7 @@
struct MetaDataBase::MetaDataInternal {
+ std::mutex mLock;
KeyedVector<uint32_t, MetaDataBase::typed_data> mItems;
};
@@ -102,10 +105,12 @@
}
void MetaDataBase::clear() {
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
mInternalData->mItems.clear();
}
bool MetaDataBase::remove(uint32_t key) {
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
ssize_t i = mInternalData->mItems.indexOfKey(key);
if (i < 0) {
@@ -252,6 +257,7 @@
uint32_t key, uint32_t type, const void *data, size_t size) {
bool overwrote_existing = true;
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
ssize_t i = mInternalData->mItems.indexOfKey(key);
if (i < 0) {
typed_data item;
@@ -269,6 +275,7 @@
bool MetaDataBase::findData(uint32_t key, uint32_t *type,
const void **data, size_t *size) const {
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
ssize_t i = mInternalData->mItems.indexOfKey(key);
if (i < 0) {
@@ -283,6 +290,7 @@
}
bool MetaDataBase::hasData(uint32_t key) const {
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
ssize_t i = mInternalData->mItems.indexOfKey(key);
if (i < 0) {
@@ -429,6 +437,7 @@
String8 MetaDataBase::toString() const {
String8 s;
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
for (int i = mInternalData->mItems.size(); --i >= 0;) {
int32_t key = mInternalData->mItems.keyAt(i);
char cc[5];
@@ -443,6 +452,7 @@
}
void MetaDataBase::dumpToLog() const {
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
for (int i = mInternalData->mItems.size(); --i >= 0;) {
int32_t key = mInternalData->mItems.keyAt(i);
char cc[5];
@@ -455,6 +465,7 @@
#if defined(__ANDROID__) && !defined(__ANDROID_VNDK__) && !defined(__ANDROID_APEX__)
status_t MetaDataBase::writeToParcel(Parcel &parcel) {
status_t ret;
+ std::lock_guard<std::mutex> guard(mInternalData->mLock);
size_t numItems = mInternalData->mItems.size();
ret = parcel.writeUint32(uint32_t(numItems));
if (ret) {
diff --git a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
index af53f64..6a0c09a 100644
--- a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
+++ b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -21,6 +21,7 @@
#include <aidl/android/media/IResourceObserverService.h>
#include <android/binder_manager.h>
#include <android/binder_process.h>
+#include <map>
#include <media/TranscodingResourcePolicy.h>
#include <utils/Log.h>
@@ -66,11 +67,31 @@
TranscodingResourcePolicy* mOwner;
};
+// cookie used for death recipients. The TranscodingResourcePolicy
+// that this cookie is associated with must outlive this cookie. It is
+// either deleted by binderDied, or in unregisterSelf which is also called
+// in the destructor of TranscodingResourcePolicy
+class TranscodingResourcePolicyCookie {
+ public:
+ TranscodingResourcePolicyCookie(TranscodingResourcePolicy* policy) : mPolicy(policy) {}
+ TranscodingResourcePolicyCookie() = delete;
+ TranscodingResourcePolicy* mPolicy;
+};
+
+static std::map<uintptr_t, std::unique_ptr<TranscodingResourcePolicyCookie>> sCookies;
+static uintptr_t sCookieKeyCounter;
+static std::mutex sCookiesMutex;
+
// static
void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
- TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
- if (owner != nullptr) {
- owner->unregisterSelf();
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+ if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+ ALOGI("BinderDiedCallback unregistering TranscodingResourcePolicy");
+ auto policy = reinterpret_cast<TranscodingResourcePolicy*>(it->second->mPolicy);
+ if (policy) {
+ policy->unregisterSelf();
+ }
+ sCookies.erase(it);
}
// TODO(chz): retry to connecting to IResourceObserverService after failure.
// Also need to have back-up logic if IResourceObserverService is offline for
@@ -88,6 +109,24 @@
}
TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+ {
+ std::lock_guard<std::mutex> guard(sCookiesMutex);
+
+ // delete all of the cookies associated with this TranscodingResourcePolicy
+ // instance since they are holding pointers to this object that will no
+ // longer be valid.
+ for (auto it = sCookies.begin(); it != sCookies.end();) {
+ const uintptr_t key = it->first;
+ std::lock_guard guard(mCookieKeysLock);
+ if (mCookieKeys.find(key) != mCookieKeys.end()) {
+ // No longer need to track this cookie
+ mCookieKeys.erase(key);
+ it = sCookies.erase(it);
+ } else {
+ it++;
+ }
+ }
+ }
unregisterSelf();
}
@@ -123,7 +162,16 @@
return;
}
- AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+ std::unique_ptr<TranscodingResourcePolicyCookie> cookie =
+ std::make_unique<TranscodingResourcePolicyCookie>(this);
+ uintptr_t cookieKey = sCookieKeyCounter++;
+ sCookies.emplace(cookieKey, std::move(cookie));
+ {
+ std::lock_guard guard(mCookieKeysLock);
+ mCookieKeys.insert(cookieKey);
+ }
+
+ AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(cookieKey));
ALOGD("@@@ registered observer");
mRegistered = true;
@@ -141,7 +189,6 @@
::ndk::SpAIBinder binder = mService->asBinder();
if (binder.get() != nullptr) {
Status status = mService->unregisterObserver(mObserver);
- AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
}
mService = nullptr;
diff --git a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
index ee232e7..4d762b5 100644
--- a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
+++ b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -22,6 +22,7 @@
#include <utils/Condition.h>
#include <mutex>
+#include <set>
namespace aidl {
namespace android {
namespace media {
@@ -48,6 +49,8 @@
bool mRegistered GUARDED_BY(mRegisteredLock);
std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
std::shared_ptr<ResourceObserver> mObserver;
+ mutable std::mutex mCookieKeysLock;
+ std::set<uintptr_t> mCookieKeys;
mutable std::mutex mCallbackLock;
std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
@@ -59,6 +62,7 @@
static void BinderDiedCallback(void* cookie);
void registerSelf();
+ // must delete the associated TranscodingResourcePolicyCookie any time this is called
void unregisterSelf();
void onResourceAvailable(pid_t pid);
}; // class TranscodingUidPolicy
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 2ffd775..ef8c9aa 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -297,6 +297,10 @@
}
void MtpFfsHandle::close() {
+ auto timeout = std::chrono::seconds(2);
+ std::unique_lock lk(m);
+ cv.wait_for(lk, timeout ,[this]{return child_threads==0;});
+
io_destroy(mCtx);
closeEndpoints();
closeConfig();
@@ -669,6 +673,11 @@
char *temp = new char[me.length];
memcpy(temp, me.data, me.length);
me.data = temp;
+
+ std::unique_lock lk(m);
+ child_threads++;
+ lk.unlock();
+
std::thread t([this, me]() { return this->doSendEvent(me); });
t.detach();
return 0;
@@ -680,6 +689,11 @@
if (static_cast<unsigned>(ret) != length)
PLOG(ERROR) << "Mtp error sending event thread!";
delete[] reinterpret_cast<char*>(me.data);
+
+ std::unique_lock lk(m);
+ child_threads--;
+ lk.unlock();
+ cv.notify_one();
}
} // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index e552e03..51cdef0 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -60,6 +60,10 @@
bool mCanceled;
bool mBatchCancel;
+ std::mutex m;
+ std::condition_variable cv;
+ std::atomic<int> child_threads{0};
+
android::base::unique_fd mControl;
// "in" from the host's perspective => sink for mtp server
android::base::unique_fd mBulkIn;
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
index f069a83..634aa46 100644
--- a/media/mtp/MtpPacket.cpp
+++ b/media/mtp/MtpPacket.cpp
@@ -92,24 +92,46 @@
}
uint16_t MtpPacket::getUInt16(int offset) const {
- return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+ if ((unsigned long)(offset+2) <= mBufferSize) {
+ return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+ }
+ else {
+ ALOGE("offset for buffer read is greater than buffer size!");
+ abort();
+ }
}
uint32_t MtpPacket::getUInt32(int offset) const {
- return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
- ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset];
+ if ((unsigned long)(offset+4) <= mBufferSize) {
+ return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+ ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset];
+ }
+ else {
+ ALOGE("offset for buffer read is greater than buffer size!");
+ abort();
+ }
}
void MtpPacket::putUInt16(int offset, uint16_t value) {
- mBuffer[offset++] = (uint8_t)(value & 0xFF);
- mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ if ((unsigned long)(offset+2) <= mBufferSize) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ }
+ else {
+ ALOGE("offset for buffer write is greater than buffer size!");
+ }
}
void MtpPacket::putUInt32(int offset, uint32_t value) {
- mBuffer[offset++] = (uint8_t)(value & 0xFF);
- mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
- mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
- mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+ if ((unsigned long)(offset+4) <= mBufferSize) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+ }
+ else {
+ ALOGE("offset for buffer write is greater than buffer size!");
+ }
}
uint16_t MtpPacket::getContainerCode() const {
@@ -146,8 +168,10 @@
return;
}
int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
- if (mPacketSize < offset + sizeof(uint32_t))
+ if (mPacketSize < offset + sizeof(uint32_t)) {
mPacketSize = offset + sizeof(uint32_t);
+ allocate(mPacketSize);
+ }
putUInt32(offset, value);
}
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index 36d7360..2bdbfd3 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -26,6 +26,9 @@
class MtpDataPacket;
struct MtpPropertyValue {
+ // pointer str initialized to NULL so that free operation
+ // is not called for pre-assigned value
+ MtpPropertyValue() : str (NULL) {}
union {
int8_t i8;
uint8_t u8;
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 9367949..889ae96 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -25,6 +25,7 @@
#include <sys/time.h>
#include <dlfcn.h>
+#include <android/content/pm/IPackageManagerNative.h>
#include <audio_utils/clock.h>
#include <binder/IServiceManager.h>
#include <utils/Log.h>
@@ -202,6 +203,26 @@
{
delete interface;
}
+
+namespace {
+int getTargetSdkForPackageName(std::string_view packageName) {
+ const auto binder = defaultServiceManager()->checkService(String16{"package_native"});
+ int targetSdk = -1;
+ if (binder != nullptr) {
+ const auto pm = interface_cast<content::pm::IPackageManagerNative>(binder);
+ if (pm != nullptr) {
+ const auto status = pm->getTargetSdkVersionForPackage(
+ String16{packageName.data(), packageName.size()}, &targetSdk);
+ return status.isOk() ? targetSdk : -1;
+ }
+ }
+ return targetSdk;
+}
+
+bool doesPackageTargetAtLeastU(std::string_view packageName) {
+ return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
+}
+} // anonymous
// ----------------------------------------------------------------------------
AudioPolicyService::AudioPolicyService()
@@ -1913,11 +1934,14 @@
checkOp();
mOpCallback = new RecordAudioOpCallback(this);
ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+ int flags = doesPackageTargetAtLeastU(
+ mAttributionSource.packageName.value_or("")) ?
+ AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
// TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
// since it controls the mic permission for legacy apps.
mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
mAttributionSource.packageName.value_or(""))),
- AppOpsManager::WATCH_FOREGROUND_CHANGES,
+ flags,
mOpCallback);
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index a387064..aaf5d8d 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -486,7 +486,7 @@
bufferDeferred = true;
} else {
nsecs_t presentTime = mSyncToDisplay ?
- syncTimestampToDisplayLocked(captureTime, releaseFence->dup()) : captureTime;
+ syncTimestampToDisplayLocked(captureTime, releaseFence) : captureTime;
setTransform(transform, true/*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -1410,7 +1410,7 @@
}
}
-nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, int releaseFence) {
+nsecs_t Camera3OutputStream::syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence) {
nsecs_t currentTime = systemTime();
if (!mFixedFps) {
mLastCaptureTime = t;
@@ -1458,8 +1458,8 @@
mRefVsyncData = vsyncEventData;
mReferenceCaptureTime = t;
mReferenceArrivalTime = currentTime;
- if (releaseFence != -1) {
- mReferenceFrameFence = new Fence(releaseFence);
+ if (releaseFence->isValid()) {
+ mReferenceFrameFence = new Fence(releaseFence->dup());
} else {
mFenceSignalOffset = 0;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 1435081..ebd5797 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -446,7 +446,7 @@
static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond
static constexpr float kMaxIntervalRatioDeviation = 0.05f;
static constexpr int kMaxTimelines = 2;
- nsecs_t syncTimestampToDisplayLocked(nsecs_t t, int releaseFence);
+ nsecs_t syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence);
// In case of fence being used
sp<Fence> mReferenceFrameFence;