Merge "camera: Add crop and metering region correction for max resolution requests." into sc-dev
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
index 8fe48c2..716b550 100644
--- a/PREUPLOAD.cfg
+++ b/PREUPLOAD.cfg
@@ -1,6 +1,8 @@
[Hook Scripts]
mainline_hook = ${REPO_ROOT}/frameworks/av/tools/mainline_hook_partial.sh ${REPO_ROOT} ${PREUPLOAD_FILES}
+hidden_api_txt_checksorted_hook = ${REPO_ROOT}/tools/platform-compat/hiddenapi/checksorted_sha.sh ${PREUPLOAD_COMMIT} ${REPO_ROOT}
+
[Builtin Hooks]
clang_format = true
diff --git a/apex/Android.bp b/apex/Android.bp
index a86d2b9..b9abd12 100644
--- a/apex/Android.bp
+++ b/apex/Android.bp
@@ -98,6 +98,35 @@
name: "com.android.media-bootclasspath-fragment",
contents: ["updatable-media"],
apex_available: ["com.android.media"],
+
+ api: {
+ stub_libs: [
+ // Stubs for the APIs provided by updatable-media. This has to be
+ // specified explicitly because updatable-media is not a
+ // java_sdk_library.
+ "framework-media",
+ ],
+ },
+
+ // The bootclasspath_fragments that provide APIs on which this depends.
+ fragments: [
+ {
+ apex: "com.android.art",
+ module: "art-bootclasspath-fragment",
+ },
+ ],
+
+ // Additional stubs libraries that this fragment's contents use which are
+ // not provided by another bootclasspath_fragment.
+ additional_stubs: [
+ "android-non-updatable",
+ ],
+
+ // Additional hidden API flag files to override the defaults. This must only be
+ // modified by the Soong or platform compat team.
+ hidden_api: {
+ max_target_o_low_priority: ["hiddenapi/hiddenapi-max-target-o-low-priority.txt"],
+ },
}
// Encapsulate the contributions made by the com.android.media to the systemserverclasspath.
diff --git a/apex/hiddenapi/OWNERS b/apex/hiddenapi/OWNERS
new file mode 100644
index 0000000..ac8a2b6
--- /dev/null
+++ b/apex/hiddenapi/OWNERS
@@ -0,0 +1,5 @@
+# soong-team@ as the hiddenapi files are tightly coupled with Soong
+file:platform/build/soong:/OWNERS
+
+# compat-team@ for changes to hiddenapi files
+file:tools/platform-compat:/OWNERS
diff --git a/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
new file mode 100644
index 0000000..32bbb10
--- /dev/null
+++ b/apex/hiddenapi/hiddenapi-max-target-o-low-priority.txt
@@ -0,0 +1,6 @@
+Landroid/media/MediaSession2$ControllerInfo;-><init>(Landroid/content/Context;IILjava/lang/String;Landroid/os/IInterface;)V
+Landroid/media/MediaSession2$ControllerInfo;->getPackageName()Ljava/lang/String;
+Landroid/media/MediaSession2$ControllerInfo;->getProvider()Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
+Landroid/media/MediaSession2$ControllerInfo;->getUid()I
+Landroid/media/MediaSession2$ControllerInfo;->isTrusted()Z
+Landroid/media/MediaSession2$ControllerInfo;->mProvider:Landroid/media/update/MediaSession2Provider$ControllerInfoProvider;
diff --git a/apex/manifest.json b/apex/manifest.json
index c7e56be..5d72031 100644
--- a/apex/manifest.json
+++ b/apex/manifest.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media",
- "version": 309999910,
+ "version": 319999900,
"requireNativeLibs": [
"libandroid.so",
"libbinder_ndk.so",
diff --git a/apex/manifest_codec.json b/apex/manifest_codec.json
index d36e914..b0d962d 100644
--- a/apex/manifest_codec.json
+++ b/apex/manifest_codec.json
@@ -1,6 +1,6 @@
{
"name": "com.android.media.swcodec",
- "version": 309999910,
+ "version": 319999900,
"requireNativeLibs": [
":sphal"
]
diff --git a/apex/mediatranscoding.rc b/apex/mediatranscoding.rc
index 24306a2..ae9f8ba 100644
--- a/apex/mediatranscoding.rc
+++ b/apex/mediatranscoding.rc
@@ -8,4 +8,5 @@
ioprio rt 4
# Restrict to little cores only with system-background cpuset.
writepid /dev/cpuset/system-background/tasks
+ interface aidl media.transcoding
disabled
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 3d93ba5..52cd4b4 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -1988,6 +1988,16 @@
* ACAMERA_CONTROL_ZOOM_RATIO is not 1.0, and ACAMERA_SCALER_CROP_REGION is set to be
* windowboxing, the camera framework will override the ACAMERA_SCALER_CROP_REGION to be
* the active array.</p>
+ * <p>In the capture request, if the application sets ACAMERA_CONTROL_ZOOM_RATIO to a
+ * value != 1.0, the ACAMERA_CONTROL_ZOOM_RATIO tag in the capture result reflects the
+ * effective zoom ratio achieved by the camera device, and the ACAMERA_SCALER_CROP_REGION
+ * adjusts for additional crops that are not zoom related. Otherwise, if the application
+ * sets ACAMERA_CONTROL_ZOOM_RATIO to 1.0, or does not set it at all, the
+ * ACAMERA_CONTROL_ZOOM_RATIO tag in the result metadata will also be 1.0.</p>
+ * <p>When the application requests a physical stream for a logical multi-camera, the
+ * ACAMERA_CONTROL_ZOOM_RATIO in the physical camera result metadata will be 1.0, and
+ * the ACAMERA_SCALER_CROP_REGION tag reflects the amount of zoom and crop done by the
+ * physical camera device.</p>
*
* @see ACAMERA_CONTROL_AE_REGIONS
* @see ACAMERA_CONTROL_ZOOM_RATIO
@@ -8898,13 +8908,27 @@
* camera's crop region is set to maximum size, the FOV of the physical streams for the
* ultrawide lens will be the same as the logical stream, by making the crop region
* smaller than its active array size to compensate for the smaller focal length.</p>
- * <p>Even if the underlying physical cameras have different RAW characteristics (such as
- * size or CFA pattern), a logical camera can still advertise RAW capability. In this
- * case, when the application configures a RAW stream, the camera device will make sure
- * the active physical camera will remain active to ensure consistent RAW output
- * behavior, and not switch to other physical cameras.</p>
+ * <p>There are two ways for the application to capture RAW images from a logical camera
+ * with RAW capability:</p>
+ * <ul>
+ * <li>Because the underlying physical cameras may have different RAW capabilities (such
+ * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
+ * is configured, the camera device makes sure the default active physical camera remains
+ * active and does not switch to other physical cameras. (One exception is that, if the
+ * logical camera consists of identical image sensors and advertises multiple focalLength
+ * due to different lenses, the camera device may generate RAW images from different
+ * physical cameras based on the focalLength being set by the application.) This
+ * backward-compatible approach usually results in loss of optical zoom, to telephoto
+ * lens or to ultrawide lens.</li>
+ * <li>Alternatively, to take advantage of the full zoomRatio range of the logical camera,
+ * the application should use <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
+ * to capture RAW images from the currently active physical camera. Because different
+ * physical camera may have different RAW characteristics, the application needs to use
+ * the characteristics and result metadata of the active physical camera for the
+ * relevant RAW metadata.</li>
+ * </ul>
* <p>The capture request and result metadata tags required for backward compatible camera
- * functionalities will be solely based on the logical camera capabiltity. On the other
+ * functionalities will be solely based on the logical camera capability. On the other
* hand, the use of manual capture controls (sensor or post-processing) with a
* logical camera may result in unexpected behavior when the HAL decides to switch
* between physical cameras with different characteristics under the hood. For example,
diff --git a/drm/drmserver/drmserver.rc b/drm/drmserver/drmserver.rc
index de46fb9..eb176c1 100644
--- a/drm/drmserver/drmserver.rc
+++ b/drm/drmserver/drmserver.rc
@@ -1,5 +1,12 @@
service drm /system/bin/drmserver
+ disabled
class main
user drm
group drm system inet drmrpc readproc
writepid /dev/cpuset/foreground/tasks
+
+on property:drm.service.enabled=true
+ start drm
+
+on property:drm.service.enabled=1
+ start drm
diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp
index b0a441b..a2cac3f 100644
--- a/drm/libdrmframework/DrmManagerClientImpl.cpp
+++ b/drm/libdrmframework/DrmManagerClientImpl.cpp
@@ -52,25 +52,13 @@
const sp<IDrmManagerService>& DrmManagerClientImpl::getDrmManagerService() {
Mutex::Autolock lock(sMutex);
if (NULL == sDrmManagerService.get()) {
- char value[PROPERTY_VALUE_MAX];
- if (property_get("drm.service.enabled", value, NULL) == 0) {
- // Drm is undefined for this device
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("drm.drmManager"));
+ if (binder == NULL) {
+ // Do NOT retry; IServiceManager already waits for ~5 seconds
+ // in getService if a service doesn't yet exist.
return sDrmManagerService;
}
-
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder;
- do {
- binder = sm->getService(String16("drm.drmManager"));
- if (binder != 0) {
- break;
- }
- ALOGW("DrmManagerService not published, waiting...");
- struct timespec reqt;
- reqt.tv_sec = 0;
- reqt.tv_nsec = 500000000; //0.5 sec
- nanosleep(&reqt, NULL);
- } while (true);
if (NULL == sDeathNotifier.get()) {
sDeathNotifier = new DeathNotifier();
}
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index 332696d..342d771 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -289,13 +289,14 @@
mOutputDelayRingBufferFilled = 0;
mBuffersInfo.clear();
- // To make the codec behave the same before and after a reset, we need to invalidate the
- // streaminfo struct. This does that:
- mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
-
+ status_t status = UNKNOWN_ERROR;
+ if (mAACDecoder) {
+ aacDecoder_Close(mAACDecoder);
+ status = initDecoder();
+ }
mSignalledError = false;
- return C2_OK;
+ return status == OK ? C2_OK : C2_CORRUPTED;
}
void C2SoftAacDec::onReset() {
@@ -514,8 +515,8 @@
// TODO: error handling, proper usage, etc.
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
- c2_status_t err = pool->fetchLinearBlock(
- numSamples * sizeof(int16_t), usage, &block);
+ size_t bufferSize = numSamples * sizeof(int16_t);
+ c2_status_t err = pool->fetchLinearBlock(bufferSize, usage, &block);
if (err != C2_OK) {
ALOGD("failed to fetch a linear block (%d)", err);
return std::bind(fillEmptyWork, _1, C2_NO_MEMORY);
@@ -529,7 +530,7 @@
mSignalledError = true;
return std::bind(fillEmptyWork, _1, C2_CORRUPTED);
}
- return [buffer = createLinearBuffer(block)](
+ return [buffer = createLinearBuffer(block, 0, bufferSize)](
const std::unique_ptr<C2Work> &work) {
work->result = C2_OK;
C2FrameData &output = work->worklets.front()->output;
diff --git a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
index c08e02b..e92d38d 100644
--- a/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
+++ b/media/codec2/components/amr_nb_wb/C2SoftAmrDec.cpp
@@ -143,7 +143,7 @@
if (!mIsWide) {
Speech_Decode_Frame_reset(mAmrHandle);
} else {
- pvDecoder_AmrWb_Reset(mAmrHandle, 0 /* reset_all */);
+ pvDecoder_AmrWb_Reset(mAmrHandle, 1 /* reset_all */);
}
mSignalledError = false;
mSignalledOutputEos = false;
@@ -361,7 +361,13 @@
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ // we filled the output buffer to (intptr_t)output - (intptr_t)wView.data()
+ // use calOutSize as that contains the expected number of samples
+ ALOGD_IF(calOutSize != ((intptr_t)output - (intptr_t)wView.data()),
+ "Expected %zu output bytes, but filled %lld",
+ calOutSize, (long long)((intptr_t)output - (intptr_t)wView.data()));
+ work->worklets.front()->output.buffers.push_back(
+ createLinearBuffer(block, 0, calOutSize));
work->worklets.front()->output.ordinal = work->input.ordinal;
if (eos) {
mSignalledOutputEos = true;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.cpp b/media/codec2/components/avc/C2SoftAvcEnc.cpp
index 0b121ad..d65ffa5 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.cpp
+++ b/media/codec2/components/avc/C2SoftAvcEnc.cpp
@@ -393,6 +393,61 @@
C2P<C2StreamPictureQuantizationTuning::output> &me) {
(void)mayBlock;
(void)me;
+
+ // TODO: refactor with same algorithm in the SetQp()
+ int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
+ int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
+
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ iMax = layer.max;
+ iMin = layer.min;
+ ALOGV("iMin %d iMax %d", iMin, iMax);
+ } else if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ pMax = layer.max;
+ pMin = layer.min;
+ ALOGV("pMin %d pMax %d", pMin, pMax);
+ } else if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+ bMax = layer.max;
+ bMin = layer.min;
+ ALOGV("bMin %d bMax %d", bMin, bMax);
+ }
+ }
+
+ ALOGV("PictureQuantizationSetter(entry): i %d-%d p %d-%d b %d-%d",
+ iMin, iMax, pMin, pMax, bMin, bMax);
+
+ // ensure we have legal values
+ iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
+ pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
+ bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
+ bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
+
+ // put them back into the structure
+ for (size_t i = 0; i < me.v.flexCount(); ++i) {
+ const C2PictureQuantizationStruct &layer = me.v.m.values[i];
+
+ if (layer.type_ == C2Config::picture_type_t(I_FRAME)) {
+ me.set().m.values[i].max = iMax;
+ me.set().m.values[i].min = iMin;
+ }
+ if (layer.type_ == C2Config::picture_type_t(P_FRAME)) {
+ me.set().m.values[i].max = pMax;
+ me.set().m.values[i].min = pMin;
+ }
+ if (layer.type_ == C2Config::picture_type_t(B_FRAME)) {
+ me.set().m.values[i].max = bMax;
+ me.set().m.values[i].min = bMin;
+ }
+ }
+
+ ALOGV("PictureQuantizationSetter(exit): i %d-%d p %d-%d b %d-%d",
+ iMin, iMax, pMin, pMax, bMin, bMax);
+
return C2R::Ok();
}
@@ -765,10 +820,11 @@
s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL;
s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP;
- // these are the ones we're going to set, so want them to default ....
- // to the DEFAULT values for the codec instea dof CODEC_ bounding
- int32_t iMin = INT32_MIN, pMin = INT32_MIN, bMin = INT32_MIN;
- int32_t iMax = INT32_MAX, pMax = INT32_MAX, bMax = INT32_MAX;
+ // TODO: refactor with same algorithm in the PictureQuantizationSetter()
+ int32_t iMin = DEFAULT_I_QP_MIN, pMin = DEFAULT_P_QP_MIN, bMin = DEFAULT_B_QP_MIN;
+ int32_t iMax = DEFAULT_I_QP_MAX, pMax = DEFAULT_P_QP_MAX, bMax = DEFAULT_B_QP_MAX;
+
+ IntfImpl::Lock lock = mIntf->lock();
std::shared_ptr<C2StreamPictureQuantizationTuning::output> qp =
mIntf->getPictureQuantization_l();
@@ -790,22 +846,6 @@
}
}
- // INT32_{MIN,MAX} means unspecified, so use the codec's default
- if (iMax == INT32_MAX) iMax = DEFAULT_I_QP_MAX;
- if (iMin == INT32_MIN) iMin = DEFAULT_I_QP_MIN;
- if (pMax == INT32_MAX) pMax = DEFAULT_P_QP_MAX;
- if (pMin == INT32_MIN) pMin = DEFAULT_P_QP_MIN;
- if (bMax == INT32_MAX) bMax = DEFAULT_B_QP_MAX;
- if (bMin == INT32_MIN) bMin = DEFAULT_B_QP_MIN;
-
- // ensure we have legal values
- iMax = std::clamp(iMax, CODEC_QP_MIN, CODEC_QP_MAX);
- iMin = std::clamp(iMin, CODEC_QP_MIN, CODEC_QP_MAX);
- pMax = std::clamp(pMax, CODEC_QP_MIN, CODEC_QP_MAX);
- pMin = std::clamp(pMin, CODEC_QP_MIN, CODEC_QP_MAX);
- bMax = std::clamp(bMax, CODEC_QP_MIN, CODEC_QP_MAX);
- bMin = std::clamp(bMin, CODEC_QP_MIN, CODEC_QP_MAX);
-
s_qp_ip.u4_i_qp_max = iMax;
s_qp_ip.u4_i_qp_min = iMin;
s_qp_ip.u4_p_qp_max = pMax;
@@ -818,7 +858,7 @@
s_qp_ip.u4_p_qp = std::clamp(DEFAULT_P_QP, pMin, pMax);
s_qp_ip.u4_b_qp = std::clamp(DEFAULT_B_QP, bMin, bMax);
- ALOGV("setting QP: i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
+ ALOGV("setQp(): i %d-%d p %d-%d b %d-%d", iMin, iMax, pMin, pMax, bMin, bMax);
s_qp_ip.u4_timestamp_high = -1;
diff --git a/media/codec2/components/avc/C2SoftAvcEnc.h b/media/codec2/components/avc/C2SoftAvcEnc.h
index baf33e2..1fecd9e 100644
--- a/media/codec2/components/avc/C2SoftAvcEnc.h
+++ b/media/codec2/components/avc/C2SoftAvcEnc.h
@@ -99,8 +99,10 @@
#define STRLENGTH 500
#define DEFAULT_CONSTRAINED_INTRA 0
-/** limits as specified by h264 */
-#define CODEC_QP_MIN 0
+/** limits as specified by h264
+ * (QP_MIN==4 is actually a limitation of this SW codec, not the H.264 standard)
+ **/
+#define CODEC_QP_MIN 4
#define CODEC_QP_MAX 51
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index dfad226..6c4b7d9 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -592,21 +592,11 @@
}
std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
- const std::shared_ptr<C2LinearBlock> &block) {
- return createLinearBuffer(block, block->offset(), block->size());
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createLinearBuffer(
const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size) {
return C2Buffer::CreateLinearBuffer(block->share(offset, size, ::C2Fence()));
}
std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
- const std::shared_ptr<C2GraphicBlock> &block) {
- return createGraphicBuffer(block, C2Rect(block->width(), block->height()));
-}
-
-std::shared_ptr<C2Buffer> SimpleC2Component::createGraphicBuffer(
const std::shared_ptr<C2GraphicBlock> &block, const C2Rect &crop) {
return C2Buffer::CreateGraphicBuffer(block->share(crop, ::C2Fence()));
}
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 22d5714..e5e16d8 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -140,15 +140,9 @@
std::shared_ptr<C2Buffer> createLinearBuffer(
- const std::shared_ptr<C2LinearBlock> &block);
-
- std::shared_ptr<C2Buffer> createLinearBuffer(
const std::shared_ptr<C2LinearBlock> &block, size_t offset, size_t size);
std::shared_ptr<C2Buffer> createGraphicBuffer(
- const std::shared_ptr<C2GraphicBlock> &block);
-
- std::shared_ptr<C2Buffer> createGraphicBuffer(
const std::shared_ptr<C2GraphicBlock> &block,
const C2Rect &crop);
diff --git a/media/codec2/components/g711/C2SoftG711Dec.cpp b/media/codec2/components/g711/C2SoftG711Dec.cpp
index f9299af..f952f22 100644
--- a/media/codec2/components/g711/C2SoftG711Dec.cpp
+++ b/media/codec2/components/g711/C2SoftG711Dec.cpp
@@ -199,7 +199,7 @@
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
- work->worklets.front()->output.buffers.push_back(createLinearBuffer(block));
+ work->worklets.front()->output.buffers.push_back(createLinearBuffer(block, 0, outSize));
work->worklets.front()->output.ordinal = work->input.ordinal;
if (eos) {
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index 2fa4f25..f857e87 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -26,6 +26,11 @@
#include <media/stagefright/foundation/MediaDefs.h>
namespace android {
+namespace {
+
+constexpr uint8_t NEUTRAL_UV_VALUE = 128;
+
+} // namespace
// codecname set and passed in as a compile flag from Android.bp
constexpr char COMPONENT_NAME[] = CODECNAME;
@@ -51,8 +56,8 @@
DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
.withDefault(new C2StreamPictureSizeInfo::output(0u, 320, 240))
.withFields({
- C2F(mSize, width).inRange(2, 2048, 2),
- C2F(mSize, height).inRange(2, 2048, 2),
+ C2F(mSize, width).inRange(2, 4096, 2),
+ C2F(mSize, height).inRange(2, 4096, 2),
})
.withSetter(SizeSetter)
.build());
@@ -464,7 +469,8 @@
const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- uint32_t width, uint32_t height) {
+ uint32_t width, uint32_t height,
+ bool isMonochrome) {
for (size_t i = 0; i < height; ++i) {
memcpy(dstY, srcY, width);
@@ -472,6 +478,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t i = 0; i < height / 2; ++i) {
+ memset(dstV, NEUTRAL_UV_VALUE, width / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, width / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t i = 0; i < height / 2; ++i) {
memcpy(dstV, srcV, width / 2);
srcV += srcVStride;
@@ -557,7 +574,7 @@
const uint16_t *srcY, const uint16_t *srcU, const uint16_t *srcV,
size_t srcYStride, size_t srcUStride, size_t srcVStride,
size_t dstYStride, size_t dstUVStride,
- size_t width, size_t height) {
+ size_t width, size_t height, bool isMonochrome) {
for (size_t y = 0; y < height; ++y) {
for (size_t x = 0; x < width; ++x) {
@@ -568,6 +585,17 @@
dstY += dstYStride;
}
+ if (isMonochrome) {
+ // Fill with neutral U/V values.
+ for (size_t y = 0; y < (height + 1) / 2; ++y) {
+ memset(dstV, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ memset(dstU, NEUTRAL_UV_VALUE, (width + 1) / 2);
+ dstV += dstUVStride;
+ dstU += dstUVStride;
+ }
+ return;
+ }
+
for (size_t y = 0; y < (height + 1) / 2; ++y) {
for (size_t x = 0; x < (width + 1) / 2; ++x) {
dstU[x] = (uint8_t)(srcU[x] >> 2);
@@ -623,8 +651,16 @@
}
}
- // TODO(vigneshv): Add support for monochrome videos since AV1 supports it.
- CHECK(buffer->image_format == libgav1::kImageFormatYuv420);
+ if (!(buffer->image_format == libgav1::kImageFormatYuv420 ||
+ buffer->image_format == libgav1::kImageFormatMonochrome400)) {
+ ALOGE("image_format %d not supported", buffer->image_format);
+ mSignalledError = true;
+ work->workletsProcessed = 1u;
+ work->result = C2_CORRUPTED;
+ return false;
+ }
+ const bool isMonochrome =
+ buffer->image_format == libgav1::kImageFormatMonochrome400;
std::shared_ptr<C2GraphicBlock> block;
uint32_t format = HAL_PIXEL_FORMAT_YV12;
@@ -636,6 +672,13 @@
if (defaultColorAspects->primaries == C2Color::PRIMARIES_BT2020 &&
defaultColorAspects->matrix == C2Color::MATRIX_BT2020 &&
defaultColorAspects->transfer == C2Color::TRANSFER_ST2084) {
+ if (buffer->image_format != libgav1::kImageFormatYuv420) {
+ ALOGE("Only YUV420 output is supported when targeting RGBA_1010102");
+ mSignalledError = true;
+ work->result = C2_OMITTED;
+ work->workletsProcessed = 1u;
+ return false;
+ }
format = HAL_PIXEL_FORMAT_RGBA_1010102;
}
}
@@ -682,21 +725,18 @@
(uint32_t *)dstY, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
srcVStride / 2, dstYStride / sizeof(uint32_t), mWidth, mHeight);
} else {
- convertYUV420Planar16ToYUV420Planar(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride / 2, srcUStride / 2, srcVStride / 2,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ convertYUV420Planar16ToYUV420Planar(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2, srcUStride / 2,
+ srcVStride / 2, dstYStride, dstUVStride, mWidth, mHeight,
+ isMonochrome);
}
} else {
const uint8_t *srcY = (const uint8_t *)buffer->plane[0];
const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
- copyOutputBufferToYV12Frame(dstY, dstU, dstV,
- srcY, srcU, srcV,
- srcYStride, srcUStride, srcVStride,
- dstYStride, dstUVStride,
- mWidth, mHeight);
+ copyOutputBufferToYV12Frame(
+ dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride,
+ dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
}
finishWork(buffer->user_private_data, work, std::move(block));
block = nullptr;
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 7137767..30d7394 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -16,6 +16,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "C2SoftMp3Dec"
+#include <inttypes.h>
#include <log/log.h>
#include <numeric>
@@ -485,10 +486,10 @@
}
}
- uint64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
+ int64_t outTimeStamp = mProcessedSamples * 1000000ll / samplingRate;
mProcessedSamples += ((outSize - outOffset) / (numChannels * sizeof(int16_t)));
- ALOGV("out buffer attr. offset %d size %d timestamp %u", outOffset, outSize - outOffset,
- (uint32_t)(mAnchorTimeStamp + outTimeStamp));
+ ALOGV("out buffer attr. offset %d size %d timestamp %" PRId64 " ", outOffset,
+ outSize - outOffset, mAnchorTimeStamp + outTimeStamp);
decodedSizes.clear();
work->worklets.front()->output.flags = work->input.flags;
work->worklets.front()->output.buffers.clear();
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.h b/media/codec2/components/mp3/C2SoftMp3Dec.h
index 402bdc4..e2dfcf3 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.h
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.h
@@ -63,7 +63,7 @@
bool mSignalledError;
bool mSignalledOutputEos;
bool mGaplessBytes;
- uint64_t mAnchorTimeStamp;
+ int64_t mAnchorTimeStamp;
uint64_t mProcessedSamples;
status_t initDecoder();
diff --git a/media/codec2/components/opus/C2SoftOpusEnc.cpp b/media/codec2/components/opus/C2SoftOpusEnc.cpp
index b47275f..370d33c 100644
--- a/media/codec2/components/opus/C2SoftOpusEnc.cpp
+++ b/media/codec2/components/opus/C2SoftOpusEnc.cpp
@@ -78,6 +78,19 @@
.build());
addParameter(
+ DefineParam(mBitrateMode, C2_PARAMKEY_BITRATE_MODE)
+ .withDefault(new C2StreamBitrateModeTuning::output(
+ 0u, C2Config::BITRATE_VARIABLE))
+ .withFields({
+ C2F(mBitrateMode, value).oneOf({
+ C2Config::BITRATE_CONST,
+ C2Config::BITRATE_VARIABLE})
+ })
+ .withSetter(
+ Setter<decltype(*mBitrateMode)>::StrictValueWithNoDeps)
+ .build());
+
+ addParameter(
DefineParam(mBitrate, C2_PARAMKEY_BITRATE)
.withDefault(new C2StreamBitrateInfo::output(0u, 128000))
.withFields({C2F(mBitrate, value).inRange(500, 512000)})
@@ -100,12 +113,14 @@
uint32_t getSampleRate() const { return mSampleRate->value; }
uint32_t getChannelCount() const { return mChannelCount->value; }
uint32_t getBitrate() const { return mBitrate->value; }
+ uint32_t getBitrateMode() const { return mBitrateMode->value; }
uint32_t getComplexity() const { return mComplexity->value; }
private:
std::shared_ptr<C2StreamSampleRateInfo::input> mSampleRate;
std::shared_ptr<C2StreamChannelCountInfo::input> mChannelCount;
std::shared_ptr<C2StreamBitrateInfo::output> mBitrate;
+ std::shared_ptr<C2StreamBitrateModeTuning::output> mBitrateMode;
std::shared_ptr<C2StreamComplexityTuning::output> mComplexity;
std::shared_ptr<C2StreamMaxBufferSizeInfo::input> mInputMaxBufSize;
};
@@ -135,6 +150,7 @@
mSampleRate = mIntf->getSampleRate();
mChannelCount = mIntf->getChannelCount();
uint32_t bitrate = mIntf->getBitrate();
+ uint32_t bitrateMode = mIntf->getBitrateMode();
int complexity = mIntf->getComplexity();
mNumSamplesPerFrame = mSampleRate / (1000 / mFrameDurationMs);
mNumPcmBytesPerInputFrame =
@@ -189,14 +205,24 @@
return C2_BAD_VALUE;
}
- // Constrained VBR
- if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
- ALOGE("failed to set vbr type");
- return C2_BAD_VALUE;
- }
- if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
- OPUS_OK)) {
- ALOGE("failed to set vbr constraint");
+ if (bitrateMode == C2Config::BITRATE_VARIABLE) {
+ // Constrained VBR
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(1) != OPUS_OK)) {
+ ALOGE("failed to set vbr type");
+ return C2_BAD_VALUE;
+ }
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR_CONSTRAINT(1) !=
+ OPUS_OK)) {
+ ALOGE("failed to set vbr constraint");
+ return C2_BAD_VALUE;
+ }
+ } else if (bitrateMode == C2Config::BITRATE_CONST) {
+ if (opus_multistream_encoder_ctl(mEncoder, OPUS_SET_VBR(0) != OPUS_OK)) {
+ ALOGE("failed to set cbr type");
+ return C2_BAD_VALUE;
+ }
+ } else {
+ ALOGE("unknown bitrate mode");
return C2_BAD_VALUE;
}
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 7e9090f..7486d27 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -635,7 +635,8 @@
}
work->worklets.front()->output.flags = (C2FrameData::flags_t)flags;
work->worklets.front()->output.buffers.clear();
- std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block);
+ std::shared_ptr<C2Buffer> buffer =
+ createLinearBuffer(block, 0, encoded_packet->data.frame.sz);
if (encoded_packet->data.frame.flags & VPX_FRAME_IS_KEY) {
buffer->setInfo(std::make_shared<C2StreamPictureTypeMaskInfo::output>(
0u /* stream id */, C2Config::SYNC_FRAME));
diff --git a/media/codec2/components/xaac/C2SoftXaacDec.cpp b/media/codec2/components/xaac/C2SoftXaacDec.cpp
index 6deafda..8bf4b72 100644
--- a/media/codec2/components/xaac/C2SoftXaacDec.cpp
+++ b/media/codec2/components/xaac/C2SoftXaacDec.cpp
@@ -361,9 +361,8 @@
C2WriteView wView = block->map().get();
int16_t* outBuffer = reinterpret_cast<int16_t*>(wView.data());
memcpy(outBuffer, mOutputDrainBuffer, mOutputDrainBufferWritePos);
- mOutputDrainBufferWritePos = 0;
- auto fillWork = [buffer = createLinearBuffer(block)](
+ auto fillWork = [buffer = createLinearBuffer(block, 0, mOutputDrainBufferWritePos)](
const std::unique_ptr<C2Work>& work) {
uint32_t flags = 0;
if (work->input.flags & C2FrameData::FLAG_END_OF_STREAM) {
@@ -376,6 +375,9 @@
work->worklets.front()->output.ordinal = work->input.ordinal;
work->workletsProcessed = 1u;
};
+
+ mOutputDrainBufferWritePos = 0;
+
if (work && work->input.ordinal.frameIndex == c2_cntr64_t(mCurFrameIndex)) {
fillWork(work);
} else {
diff --git a/media/codec2/core/include/C2Buffer.h b/media/codec2/core/include/C2Buffer.h
index a5d6fbf..abe343b 100644
--- a/media/codec2/core/include/C2Buffer.h
+++ b/media/codec2/core/include/C2Buffer.h
@@ -898,6 +898,12 @@
* Obtains a linear writeable block of given |capacity| and |usage|. If successful, the
* block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested block.
* \param usage the memory usage info for the requested block. Returned blocks will be
* optimized for this usage, but may be used with any usage. One exception:
@@ -926,6 +932,12 @@
* Obtains a circular writeable block of given |capacity| and |usage|. If successful, the
* block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested circular block. (note: the size of the obtained
* block could be slightly larger, e.g. to accommodate any system-required
* alignment)
@@ -956,6 +968,12 @@
* Obtains a 2D graphic block of given |width|, |height|, |format| and |usage|. If successful,
* the block is stored in |block|. Otherwise, |block| is set to 'nullptr'.
*
+ * \note The returned buffer may have a larger capacity (width and height) than requested. In
+ * this case the larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity (width and height).
+ *
* \param width the width of requested block (the obtained block could be slightly larger, e.g.
* to accommodate any system-required alignment)
* \param height the height of requested block (the obtained block could be slightly larger,
@@ -1000,6 +1018,12 @@
* fence is signalled when the temporary restriction on fetch is lifted.
* e.g. more memory is available to fetch because some meomory or prior blocks were released.
*
+ * \note The returned buffer may have a larger capacity than requested. In this case the
+ * larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity.
+ *
* \param capacity the size of requested block.
* \param usage the memory usage info for the requested block. Returned blocks will be
* optimized for this usage, but may be used with any usage. One exception:
@@ -1039,6 +1063,12 @@
* fence is signalled when the temporary restriction on fetch is lifted.
* e.g. more memory is available to fetch because some meomory or prior blocks were released.
*
+ * \note The returned buffer may have a larger capacity (width and height) than requested. In
+ * this case the larger (returned) capacity may be fully used.
+ *
+ * \note There is no guarantee on the alignedness of the returned block. The only guarantee is
+ * that its capacity is equal to or larger than the requested capacity (width and height).
+ *
* \param width the width of requested block (the obtained block could be slightly larger, e.g.
* to accommodate any system-required alignment)
* \param height the height of requested block (the obtained block could be slightly larger,
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 7caa457..2cc7ab7 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -574,7 +574,6 @@
PROFILE_MPEGH_HIGH, ///< MPEG-H High
PROFILE_MPEGH_LC, ///< MPEG-H Low-complexity
PROFILE_MPEGH_BASELINE, ///< MPEG-H Baseline
-
};
enum C2Config::level_t : uint32_t {
@@ -2393,22 +2392,24 @@
C2StreamTunnelStartRender;
constexpr char C2_PARAMKEY_TUNNEL_START_RENDER[] = "output.tunnel-start-render";
-C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
- NONE,
- S_HANDHELD,
- S_HANDHELD_PC
-);
-
-namespace android {
-
/**
* Encoding quality level signaling.
+ *
+ * Signal the 'minimum encoding quality' introduced in Android 12/S. It indicates
+ * whether the underlying codec is expected to take extra steps to ensure quality meets the
+ * appropriate minimum. A value of NONE indicates that the codec is not to apply
+ * any minimum quality bar requirements. Other values indicate that the codec is to apply
+ * a minimum quality bar, with the exact quality bar being decided by the parameter value.
*/
typedef C2GlobalParam<C2Setting,
C2SimpleValueStruct<C2EasyEnum<C2PlatformConfig::encoding_quality_level_t>>,
kParamIndexEncodingQualityLevel> C2EncodingQualityLevel;
+constexpr char C2_PARAMKEY_ENCODING_QUALITY_LEVEL[] = "algo.encoding-quality-level";
-}
+C2ENUM(C2PlatformConfig::encoding_quality_level_t, uint32_t,
+ NONE = 0,
+ S_HANDHELD = 1 // corresponds to VMAF=70
+);
/// @}
diff --git a/media/codec2/core/include/C2Work.h b/media/codec2/core/include/C2Work.h
index 67084cc..794402f 100644
--- a/media/codec2/core/include/C2Work.h
+++ b/media/codec2/core/include/C2Work.h
@@ -145,10 +145,35 @@
*/
FLAG_INCOMPLETE = (1 << 3),
/**
+ * This frame has been corrected due to a bitstream error. This is a hint, and in most cases
+ * can be ignored. This flag can be set by components on their output to signal the clients
+ * that errors may be present but the frame should be used nonetheless. It can also be set
+ * by clients to signal that the input frame has been corrected, but nonetheless should be
+ * processed.
+ */
+ FLAG_CORRECTED = (1 << 4),
+ /**
+ * This frame is corrupt due to a bitstream error. This is similar to FLAG_CORRECTED,
+ * with the exception that this is a hint that downstream components should not process this
+ * frame.
+ * <p>
+ * If set on the input by the client, the input is likely non-processable and should be
+ * handled similarly to uncorrectable bitstream error detected. For components that operat
+ * on whole access units, this flag can be propagated to the output. Other components should
+ * aim to detect access unit boundaries to determine if any part of the input frame can be
+ * processed.
+ * <p>
+ * If set by the component, this signals to the client that the output is non-usable -
+ * including possibly the metadata that may also be non-usable; -- however, the component
+ * will try to recover on successive input frames.
+ */
+ FLAG_CORRUPT = (1 << 5),
+
+ /**
* This frame contains only codec-specific configuration data, and no actual access unit.
*
- * \deprecated pass codec configuration with using the \todo codec-specific configuration
- * info together with the access unit.
+ * \deprecated pass codec configuration with using the C2InitData info parameter together
+ * with the access unit.
*/
FLAG_CODEC_CONFIG = (1u << 31),
};
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
index 58a568e..abd8b2d 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioDecTest.cpp
@@ -36,13 +36,13 @@
using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
-struct CompToURL {
+struct CompToFiles {
std::string mime;
- std::string mURL;
- std::string info;
+ std::string inputFile;
+ std::string infoFile;
};
-std::vector<CompToURL> gCompToURL = {
+std::vector<CompToFiles> gCompToFiles = {
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac", "bbb_aac_stereo_128kbps_48000hz.info"},
{"mp4a-latm", "bbb_aac_stereo_128kbps_48000hz.aac",
"bbb_aac_stereo_128kbps_48000hz_multi_frame.info"},
@@ -110,6 +110,15 @@
mTimestampUs = 0u;
mWorkResult = C2_OK;
mTimestampDevTest = false;
+
+ bool valid = getFileNames(mStreamIndex);
+ if (!valid) {
+ GTEST_SKIP() << "No test file for mime " << mMime << " index: " << mStreamIndex;
+ }
+ ALOGV("mStreamIndex : %zu", mStreamIndex);
+ ALOGV("mInputFile : %s", mInputFile.c_str());
+ ALOGV("mInfoFile : %s", mInfoFile.c_str());
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -126,7 +135,7 @@
virtual void validateTimestampList(int32_t* bitStreamInfo);
- void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+ bool getFileNames(size_t streamIndex = 0);
struct outputMetaData {
uint64_t timestampUs;
@@ -193,6 +202,10 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+ std::string mInfoFile;
+ size_t mStreamIndex = 0;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -204,6 +217,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -285,18 +299,20 @@
}
// LookUpTable of clips and metadata for component testing
-void Codec2AudioDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
+bool Codec2AudioDecHidlTestBase::getFileNames(size_t streamIndex) {
int streamCount = 0;
- for (size_t i = 0; i < gCompToURL.size(); ++i) {
- if (mMime.find(gCompToURL[i].mime) != std::string::npos) {
+
+ for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+ if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
if (streamCount == streamIndex) {
- strcat(mURL, gCompToURL[i].mURL.c_str());
- strcat(info, gCompToURL[i].info.c_str());
- return;
+ mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+ mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+ return true;
}
streamCount++;
}
}
+ return false;
}
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -429,6 +445,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = std::get<2>(GetParam());
}
};
@@ -436,22 +453,12 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::get<2>(GetParam());
bool signalEOS = std::get<3>(GetParam());
mTimestampDevTest = true;
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info, streamIndex);
- if (!strcmp(mURL, sResourceDir.c_str())) {
- ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
- return;
- }
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
// Reset total no of frames received
mFramesReceived = 0;
@@ -468,9 +475,8 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -507,15 +513,10 @@
description("Test Request for thumbnail");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
int32_t bitStreamInfo[2] = {0};
if (mMime.find("raw") != std::string::npos) {
@@ -529,7 +530,6 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
// request EOS for thumbnail
// signal EOS flag with last frame
@@ -542,7 +542,7 @@
} while (!(flags & SYNC_FRAME));
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -599,15 +599,10 @@
TEST_P(Codec2AudioDecHidlTest, FlushTest) {
description("Tests Flush calls");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
int32_t bitStreamInfo[2] = {0};
if (mMime.find("raw") != std::string::npos) {
@@ -629,9 +624,8 @@
verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// Decode 30 frames and flush.
uint32_t numFramesFlushed = FLUSH_INTERVAL;
@@ -684,15 +678,10 @@
description("Decode with multiple empty input frames");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
android::Vector<FrameInfo> Info;
int bytesCount = 0;
uint32_t frameId = 0;
@@ -730,8 +719,7 @@
return;
}
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -759,6 +747,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -768,19 +757,9 @@
description("Tests codecs for flush at different states");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
- if (!strcmp(mURL, sResourceDir.c_str())) {
- ALOGV("EMPTY INPUT sResourceDir.c_str() %s mURL %s ", sResourceDir.c_str(), mURL);
- return;
- }
- ALOGV("mURL : %s", mURL);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
int32_t bitStreamInfo[2] = {0};
@@ -797,7 +776,7 @@
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
bool signalEOS = false;
diff --git a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
index 92b53a0..d77b943 100644
--- a/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/audio/VtsHalMediaC2V1_0TargetAudioEncTest.cpp
@@ -84,6 +84,17 @@
mWorkResult = C2_OK;
mOutputSize = 0u;
getInputMaxBufSize();
+
+ c2_status_t status = getChannelCount(&mNumChannels);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported channel count";
+
+ status = getSampleRate(&mSampleRate);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported sample rate";
+
+ status = getSamplesPerFrame(mNumChannels, &mSamplesPerFrame);
+ ASSERT_EQ(status, C2_OK) << "Unable to get supported number of samples per frame";
+
+ getFile(mNumChannels, mSampleRate);
}
virtual void TearDown() override {
@@ -97,7 +108,11 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
- void GetURLForComponent(char* mURL, int32_t channelCount, int32_t sampleRate);
+ c2_status_t getChannelCount(int32_t* nChannels);
+ c2_status_t getSampleRate(int32_t* nSampleRate);
+ c2_status_t getSamplesPerFrame(int32_t nChannels, int32_t* samplesPerFrame);
+
+ void getFile(int32_t channelCount, int32_t sampleRate);
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -145,6 +160,12 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ int32_t mNumChannels;
+ int32_t mSampleRate;
+ int32_t mSamplesPerFrame;
+
+ std::string mInputFile;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -222,14 +243,13 @@
return false;
}
-c2_status_t getChannelCount(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nChannels) {
+c2_status_t Codec2AudioEncHidlTestBase::getChannelCount(int32_t* nChannels) {
std::unique_ptr<C2StreamChannelCountInfo::input> channelCount =
std::make_unique<C2StreamChannelCountInfo::input>();
std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
C2FieldSupportedValuesQuery::Current(
C2ParamField(channelCount.get(), &C2StreamChannelCountInfo::value))};
- c2_status_t c2err = component->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
+ c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_DONT_BLOCK);
if (c2err != C2_OK || validValueInfos.size() != 1u) {
ALOGE("querySupportedValues_vb failed for channelCount");
return c2err;
@@ -264,13 +284,11 @@
}
return C2_OK;
}
-
-c2_status_t getSampleRate(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nSampleRate) {
- // Use the default sample rate for components
+c2_status_t Codec2AudioEncHidlTestBase::getSampleRate(int32_t* nSampleRate) {
+ // Use the default sample rate for mComponents
std::vector<std::unique_ptr<C2Param>> queried;
- c2_status_t c2err = component->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
- C2_DONT_BLOCK, &queried);
+ c2_status_t c2err = mComponent->query({}, {C2StreamSampleRateInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
if (c2err != C2_OK || queried.size() == 0) return c2err;
size_t offset = sizeof(C2Param);
@@ -280,11 +298,11 @@
return C2_OK;
}
-c2_status_t getSamplesPerFrame(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t nChannels, int32_t* samplesPerFrame) {
+c2_status_t Codec2AudioEncHidlTestBase::getSamplesPerFrame(int32_t nChannels,
+ int32_t* samplesPerFrame) {
std::vector<std::unique_ptr<C2Param>> queried;
- c2_status_t c2err = component->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
- C2_DONT_BLOCK, &queried);
+ c2_status_t c2err = mComponent->query({}, {C2StreamMaxBufferSizeInfo::input::PARAM_TYPE},
+ C2_DONT_BLOCK, &queried);
if (c2err != C2_OK || queried.size() == 0) return c2err;
size_t offset = sizeof(C2Param);
@@ -295,24 +313,8 @@
return C2_OK;
}
-// Get config params for a component
-bool getConfigParams(const std::shared_ptr<android::Codec2Client::Component>& component,
- int32_t* nChannels, int32_t* nSampleRate, int32_t* samplesPerFrame) {
- c2_status_t status = getChannelCount(component, nChannels);
- if (status != C2_OK) return false;
-
- status = getSampleRate(component, nSampleRate);
- if (status != C2_OK) return false;
-
- status = getSamplesPerFrame(component, *nChannels, samplesPerFrame);
- if (status != C2_OK) return false;
-
- return true;
-}
-
// LookUpTable of clips and metadata for component testing
-void Codec2AudioEncHidlTestBase::GetURLForComponent(char* mURL, int32_t channelCount,
- int32_t sampleRate) {
+void Codec2AudioEncHidlTestBase::getFile(int32_t channelCount, int32_t sampleRate) {
std::string rawInput = "bbb_raw_1ch_8khz_s16le.raw";
if (channelCount == 1 && sampleRate == 16000) {
rawInput = "bbb_raw_1ch_16khz_s16le.raw";
@@ -320,7 +322,7 @@
rawInput = "bbb_raw_2ch_48khz_s16le.raw";
}
- strcat(mURL, rawInput.c_str());
+ mInputFile = sResourceDir + rawInput;
}
void encodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -440,38 +442,23 @@
bool signalEOS = std::get<2>(GetParam());
// Ratio w.r.t to mInputMaxBufSize
int32_t inputMaxBufRatio = std::get<3>(GetParam());
+ mSamplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (mNumChannels * 2));
- int32_t nChannels;
- int32_t nSampleRate;
- int32_t samplesPerFrame;
+ ALOGV("signalEOS %d mInputMaxBufSize %d mSamplesPerFrame %d", signalEOS, mInputMaxBufSize,
+ mSamplesPerFrame);
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
-
- samplesPerFrame = ((mInputMaxBufSize / inputMaxBufRatio) / (nChannels * 2));
- ALOGV("signalEOS %d mInputMaxBufSize %d samplesPerFrame %d", signalEOS, mInputMaxBufSize,
- samplesPerFrame);
-
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
+ ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+ << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+ << mSampleRate;
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFrames = 16;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
- ALOGV("mURL : %s", mURL);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(
mComponent, mQueueLock, mQueueCondition, mWorkQueue, mFlushedIndices, mLinearPool,
- eleStream, numFrames, samplesPerFrame, nChannels, nSampleRate, false, signalEOS));
+ eleStream, numFrames, mSamplesPerFrame, mNumChannels, mSampleRate, false, signalEOS));
// If EOS is not sent, sending empty input with EOS flag
if (!signalEOS) {
@@ -545,30 +532,17 @@
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
mFlushedIndices.clear();
- int32_t nChannels;
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
-
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
+ ASSERT_TRUE(setupConfigParam(mComponent, mNumChannels, mSampleRate))
+ << "Unable to configure for channels: " << mNumChannels << " and sampling rate "
+ << mSampleRate;
ASSERT_EQ(mComponent->start(), C2_OK);
std::ifstream eleStream;
uint32_t numFramesFlushed = 30;
uint32_t numFrames = 128;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -577,10 +551,9 @@
ASSERT_NO_FATAL_FAILURE(
verifyFlushOutput(flushedWork, mWorkQueue, mFlushedIndices, mQueueLock));
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
- ALOGV("mURL : %s", mURL);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFramesFlushed,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, mNumChannels, mSampleRate));
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
waitOnInputConsumption(mQueueLock, mQueueCondition, mWorkQueue,
@@ -590,8 +563,8 @@
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream,
- numFrames - numFramesFlushed, samplesPerFrame, nChannels,
- nSampleRate, true));
+ numFrames - numFramesFlushed, mSamplesPerFrame,
+ mNumChannels, mSampleRate, true));
eleStream.close();
err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
ASSERT_EQ(err, C2_OK);
@@ -609,33 +582,20 @@
description("Encodes input file for different channel count");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- int32_t nChannels;
int32_t numFrames = 16;
int32_t maxChannelCount = 8;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
uint64_t prevOutputSize = 0u;
uint32_t prevChannelCount = 0u;
// Looping through the maximum number of channel count supported by encoder
- for (nChannels = 1; nChannels < maxChannelCount; nChannels++) {
+ for (int32_t nChannels = 1; nChannels < maxChannelCount; nChannels++) {
ALOGV("Configuring encoder %s for channel count = %d", mComponentName.c_str(), nChannels);
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ if (!setupConfigParam(mComponent, nChannels, mSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -656,9 +616,9 @@
// To check if the input stream is sufficient to encode for the higher channel count
struct stat buf;
- stat(mURL, &buf);
+ stat(mInputFile.c_str(), &buf);
size_t fileSize = buf.st_size;
- int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ int32_t bytesCount = (mSamplesPerFrame * nChannels * 2) * numFrames;
if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for ChannelCount " << nChannels
<< " because of insufficient input data\n";
@@ -669,7 +629,7 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFrames,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, nChannels, mSampleRate));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when config params is not proper but config succeeded
@@ -711,24 +671,11 @@
description("Encodes input file for different SampleRate");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- int32_t nSampleRate;
- int32_t samplesPerFrame;
- int32_t nChannels;
int32_t numFrames = 16;
- if (!getConfigParams(mComponent, &nChannels, &nSampleRate, &samplesPerFrame)) {
- std::cout << "Failed to get the config params for " << mComponentName << "\n";
- std::cout << "[ WARN ] Test Skipped \n";
- return;
- }
- char mURL[512];
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL, nChannels, nSampleRate);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
int32_t sampleRateValues[] = {1000, 8000, 16000, 24000, 48000, 96000, 192000};
@@ -737,7 +684,7 @@
for (int32_t nSampleRate : sampleRateValues) {
ALOGV("Configuring encoder %s for SampleRate = %d", mComponentName.c_str(), nSampleRate);
- if (!setupConfigParam(mComponent, nChannels, nSampleRate)) {
+ if (!setupConfigParam(mComponent, mNumChannels, nSampleRate)) {
std::cout << "[ WARN ] Test Skipped \n";
return;
}
@@ -759,9 +706,9 @@
// To check if the input stream is sufficient to encode for the higher SampleRate
struct stat buf;
- stat(mURL, &buf);
+ stat(mInputFile.c_str(), &buf);
size_t fileSize = buf.st_size;
- int32_t bytesCount = (samplesPerFrame * nChannels * 2) * numFrames;
+ int32_t bytesCount = (mSamplesPerFrame * mNumChannels * 2) * numFrames;
if (fileSize < bytesCount) {
std::cout << "[ WARN ] Test Skipped for SampleRate " << nSampleRate
<< " because of insufficient input data\n";
@@ -772,7 +719,7 @@
ASSERT_NO_FATAL_FAILURE(encodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, numFrames,
- samplesPerFrame, nChannels, nSampleRate));
+ mSamplesPerFrame, mNumChannels, nSampleRate));
// mDisableTest will be set if buffer was not fetched properly.
// This may happen when config params is not proper but config succeeded
diff --git a/media/codec2/hidl/1.0/vts/functional/video/Android.bp b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
index f211ecf..ecc4f9d 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/Android.bp
+++ b/media/codec2/hidl/1.0/vts/functional/video/Android.bp
@@ -36,6 +36,8 @@
"libgui",
"libutils",
"libcrypto",
+ "libdatasource",
+ "libui",
],
data: [":media_c2_v1_video_decode_res"],
test_config: "VtsHalMediaC2V1_0TargetVideoDecTest.xml",
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 8d917b3..4c90eee 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -33,23 +33,30 @@
#include <gui/IConsumerListener.h>
#include <gui/IProducerListener.h>
#include <system/window.h>
+#include <gui/GLConsumer.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
#include "media_c2_hidl_test_common.h"
#include "media_c2_video_hidl_test_common.h"
-using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool>;
+constexpr size_t kSmoothnessFactor = 4;
+constexpr size_t kRenderingDepth = 3;
+enum surfaceMode_t { NO_SURFACE, NULL_SURFACE, SURFACE };
+
+using DecodeTestParameters = std::tuple<std::string, std::string, uint32_t, bool, surfaceMode_t>;
static std::vector<DecodeTestParameters> gDecodeTestParameters;
using CsdFlushTestParameters = std::tuple<std::string, std::string, bool>;
static std::vector<CsdFlushTestParameters> gCsdFlushTestParameters;
-struct CompToURL {
+struct CompToFiles {
std::string mime;
- std::string mURL;
- std::string info;
- std::string chksum;
+ std::string inputFile;
+ std::string infoFile;
+ std::string chksumFile;
};
-std::vector<CompToURL> gCompToURL = {
+std::vector<CompToFiles> gCompToFiles = {
{"avc", "bbb_avc_176x144_300kbps_60fps.h264", "bbb_avc_176x144_300kbps_60fps.info",
"bbb_avc_176x144_300kbps_60fps_chksum.md5"},
{"avc", "bbb_avc_640x360_768kbps_30fps.h264", "bbb_avc_640x360_768kbps_30fps.info",
@@ -92,8 +99,8 @@
// google.codec2 Video test setup
virtual void SetUp() override {
getParams();
+
mDisableTest = false;
- ALOGV("Codec2VideoDecHidlTest SetUp");
mClient = android::Codec2Client::CreateFromService(
mInstanceName.c_str(),
!bool(android::Codec2Client::CreateFromService("default", true)));
@@ -135,6 +142,15 @@
mDisableTest = true;
}
+ bool valid = getFileNames(mStreamIndex);
+ if (!valid) {
+ GTEST_SKIP() << "No test file for mime " << mMime << " index: " << mStreamIndex;
+ }
+ ALOGV("mStreamIndex : %zu", mStreamIndex);
+ ALOGV("mInputFile : %s", mInputFile.c_str());
+ ALOGV("mInfoFile : %s", mInfoFile.c_str());
+ ALOGV("mChksumFile : %s", mChksumFile.c_str());
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -149,8 +165,7 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
- void GetURLChksmForComponent(char* mURL, char* info, char* chksum, size_t streamIndex);
- void GetURLForComponent(char* mURL, char* info, size_t streamIndex = 0);
+ bool getFileNames(size_t streamIndex = 0);
/* Calculate the CKSUM for the data in inbuf */
void calc_md5_cksum(uint8_t* pu1_inbuf, uint32_t u4_stride, uint32_t u4_width,
@@ -311,6 +326,11 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+ std::string mInfoFile;
+ std::string mChksumFile;
+ size_t mStreamIndex = 0;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -322,6 +342,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -358,27 +379,54 @@
// number of elementary streams per component
#define STREAM_COUNT 3
-// LookUpTable of clips, metadata and chksum for component testing
-void Codec2VideoDecHidlTestBase::GetURLChksmForComponent(char* mURL, char* info, char* chksum,
- size_t streamIndex) {
+// number of elementary streams required for adaptive testing
+#define ADAPTIVE_STREAM_COUNT 2
+// LookUpTable of clips, metadata and mChksumFile for component testing
+bool Codec2VideoDecHidlTestBase::getFileNames(size_t streamIndex) {
int streamCount = 0;
- for (size_t i = 0; i < gCompToURL.size(); ++i) {
- if (mMime.find(gCompToURL[i].mime) != std::string::npos) {
+
+ for (size_t i = 0; i < gCompToFiles.size(); ++i) {
+ if (mMime.find(gCompToFiles[i].mime) != std::string::npos) {
if (streamCount == streamIndex) {
- strcat(mURL, gCompToURL[i].mURL.c_str());
- strcat(info, gCompToURL[i].info.c_str());
- strcat(chksum, gCompToURL[i].chksum.c_str());
- return;
+ mInputFile = sResourceDir + gCompToFiles[i].inputFile;
+ mInfoFile = sResourceDir + gCompToFiles[i].infoFile;
+ mChksumFile = sResourceDir + gCompToFiles[i].chksumFile;
+ return true;
}
streamCount++;
}
}
+ return false;
}
-void Codec2VideoDecHidlTestBase::GetURLForComponent(char* mURL, char* info, size_t streamIndex) {
- char chksum[512];
- strcpy(chksum, sResourceDir.c_str());
- GetURLChksmForComponent(mURL, info, chksum, streamIndex);
+void setOutputSurface(const std::shared_ptr<android::Codec2Client::Component>& component,
+ surfaceMode_t surfMode) {
+ using namespace android;
+ sp<IGraphicBufferProducer> producer = nullptr;
+ static std::atomic_uint32_t surfaceGeneration{0};
+ uint32_t generation =
+ (getpid() << 10) |
+ ((surfaceGeneration.fetch_add(1, std::memory_order_relaxed) + 1) & ((1 << 10) - 1));
+ int32_t maxDequeueBuffers = kSmoothnessFactor + kRenderingDepth;
+ if (surfMode == SURFACE) {
+ sp<IGraphicBufferConsumer> consumer = nullptr;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
+ ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
+
+ sp<GLConsumer> texture =
+ new GLConsumer(consumer, 0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+ true /* useFenceSync */, false /* isControlledByApp */);
+
+ sp<ANativeWindow> gSurface = new Surface(producer);
+ ASSERT_NE(gSurface, nullptr) << "getSurface failed";
+
+ producer->setGenerationNumber(generation);
+ }
+
+ c2_status_t err = component->setOutputSurface(C2BlockPool::BASIC_GRAPHIC, producer, generation,
+ maxDequeueBuffers);
+ ASSERT_EQ(err, C2_OK) << "setOutputSurface failed";
}
void decodeNFrames(const std::shared_ptr<android::Codec2Client::Component>& component,
@@ -529,6 +577,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = std::get<2>(GetParam());
}
};
@@ -537,24 +586,14 @@
description("Decodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- uint32_t streamIndex = std::get<2>(GetParam());
bool signalEOS = std::get<3>(GetParam());
+ surfaceMode_t surfMode = std::get<4>(GetParam());
mTimestampDevTest = true;
- char mURL[512], info[512], chksum[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- strcpy(chksum, sResourceDir.c_str());
-
- GetURLChksmForComponent(mURL, info, chksum, streamIndex);
- if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
- ALOGV("Skipping Test, Stream not available");
- return;
- }
mMd5Enable = true;
- if (!strcmp(chksum, sResourceDir.c_str())) mMd5Enable = false;
+ if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
if (!configPixelFormat(format)) {
@@ -565,23 +604,22 @@
mFlushedIndices.clear();
mTimestampUslist.clear();
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
ASSERT_EQ(mComponent->start(), C2_OK);
// Reset total no of frames received
mFramesReceived = 0;
mTimestampUs = 0;
- ALOGV("mURL : %s", mURL);
+
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
size_t refChksmSize = 0;
std::ifstream refChksum;
if (mMd5Enable) {
- ALOGV("chksum file name: %s", chksum);
- refChksum.open(chksum, std::ifstream::binary | std::ifstream::ate);
+ refChksum.open(mChksumFile, std::ifstream::binary | std::ifstream::ate);
ASSERT_EQ(refChksum.is_open(), true);
refChksmSize = refChksum.tellg();
refChksum.seekg(0, std::ifstream::beg);
@@ -594,6 +632,10 @@
refChksum.close();
}
+ if (surfMode != NO_SURFACE) {
+ ASSERT_NO_FATAL_FAILURE(setOutputSurface(mComponent, surfMode));
+ }
+
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
(int)Info.size(), signalEOS));
@@ -650,20 +692,17 @@
uint32_t timestampOffset = 0;
uint32_t offset = 0;
android::Vector<FrameInfo> Info;
- for (uint32_t i = 0; i < STREAM_COUNT * 2; i++) {
- char mURL[512], info[512];
+ for (uint32_t i = 0; i < ADAPTIVE_STREAM_COUNT * 2; i++) {
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info, i % STREAM_COUNT);
- if (!(strcmp(mURL, sResourceDir.c_str())) || !(strcmp(info, sResourceDir.c_str()))) {
+ bool valid = getFileNames(i % ADAPTIVE_STREAM_COUNT);
+ if (!valid) {
ALOGV("Stream not available, skipping this index");
continue;
}
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
int bytesCount = 0;
uint32_t flags = 0;
uint32_t timestamp = 0;
@@ -690,8 +729,7 @@
// Reset Total frames before second decode loop
// mFramesReceived = 0;
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info,
@@ -747,15 +785,9 @@
description("Test Request for thumbnail");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
-
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
uint32_t flags = 0;
for (size_t i = 0; i < MAX_ITERATIONS; i++) {
@@ -772,7 +804,7 @@
} while (!(flags & SYNC_FRAME));
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -834,19 +866,12 @@
ASSERT_EQ(mComponent->start(), C2_OK);
- char mURL[512], info[512];
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
mFlushedIndices.clear();
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
- ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << info;
-
- ALOGV("mURL : %s", mURL);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
+ ASSERT_GE(numCsds, 0) << "Error in parsing input info file: " << mInfoFile;
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
@@ -857,7 +882,7 @@
ASSERT_EQ(mWorkQueue.size(), MAX_INPUT_BUFFERS);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
// Decode 30 frames and flush. here 30 is chosen to ensure there is a key
// frame after this so that the below section can be covered for all
@@ -910,15 +935,10 @@
description("Decode with multiple empty input frames");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
std::ifstream eleStream, eleInfo;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- eleInfo.open(info);
- ASSERT_EQ(eleInfo.is_open(), true) << mURL << " - file not found";
+ eleInfo.open(mInfoFile);
+ ASSERT_EQ(eleInfo.is_open(), true) << mInputFile << " - file not found";
android::Vector<FrameInfo> Info;
int bytesCount = 0;
uint32_t frameId = 0;
@@ -946,8 +966,7 @@
eleInfo.close();
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
ASSERT_NO_FATAL_FAILURE(decodeNFrames(mComponent, mQueueLock, mQueueCondition, mWorkQueue,
mFlushedIndices, mLinearPool, eleStream, &Info, 0,
@@ -973,6 +992,7 @@
void getParams() {
mInstanceName = std::get<0>(GetParam());
mComponentName = std::get<1>(GetParam());
+ mStreamIndex = 0;
}
};
@@ -982,22 +1002,15 @@
description("Tests codecs for flush at different states");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512], info[512];
-
android::Vector<FrameInfo> Info;
- strcpy(mURL, sResourceDir.c_str());
- strcpy(info, sResourceDir.c_str());
- GetURLForComponent(mURL, info);
-
- int32_t numCsds = populateInfoVector(info, &Info, mTimestampDevTest, &mTimestampUslist);
+ int32_t numCsds = populateInfoVector(mInfoFile, &Info, mTimestampDevTest, &mTimestampUslist);
ASSERT_GE(numCsds, 0) << "Error in parsing input info file";
ASSERT_EQ(mComponent->start(), C2_OK);
- ALOGV("mURL : %s", mURL);
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
bool flushedDecoder = false;
bool signalEOS = false;
@@ -1090,18 +1103,23 @@
parseArgs(argc, argv);
gTestParameters = getTestParameters(C2Component::DOMAIN_VIDEO, C2Component::KIND_DECODER);
for (auto params : gTestParameters) {
+ // mOutputBufferQueue->configure() crashes when surface is NULL
+ std::initializer_list<surfaceMode_t> surfaceMode = {
+ surfaceMode_t::NO_SURFACE, surfaceMode_t::NULL_SURFACE, surfaceMode_t::SURFACE};
+ for (surfaceMode_t mode : surfaceMode) {
+ gDecodeTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false, mode));
+ gDecodeTestParameters.push_back(
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true, mode));
+ }
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 0, false));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 0, true));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 1, false));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false, NO_SURFACE));
gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 1, true));
- gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 2, false));
- gDecodeTestParameters.push_back(
- std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true));
+ std::make_tuple(std::get<0>(params), std::get<1>(params), 2, true, NO_SURFACE));
gCsdFlushTestParameters.push_back(
std::make_tuple(std::get<0>(params), std::get<1>(params), true));
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
index c557de1..a6507e7 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoEncTest.cpp
@@ -95,9 +95,10 @@
mMinWidth = INT32_MAX;
mMinHeight = INT32_MAX;
- ASSERT_EQ(getMaxMinResolutionSupported(mComponent), C2_OK);
+ ASSERT_EQ(getMaxMinResolutionSupported(), C2_OK);
mWidth = std::max(std::min(mWidth, mMaxWidth), mMinWidth);
mHeight = std::max(std::min(mHeight, mMaxHeight), mMinHeight);
+ ALOGV("mWidth %d mHeight %d", mWidth, mHeight);
C2SecureModeTuning secureModeTuning{};
mComponent->query({&secureModeTuning}, {}, C2_MAY_BLOCK, nullptr);
@@ -106,6 +107,7 @@
mDisableTest = true;
}
+ getFile();
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
@@ -119,10 +121,9 @@
// Get the test parameters from GetParam call.
virtual void getParams() {}
-
+ void getFile();
bool setupConfigParam(int32_t nWidth, int32_t nHeight, int32_t nBFrame = 0);
- c2_status_t getMaxMinResolutionSupported(
- const std::shared_ptr<android::Codec2Client::Component>& component);
+ c2_status_t getMaxMinResolutionSupported();
// callback function to process onWorkDone received by Listener
void handleWorkDone(std::list<std::unique_ptr<C2Work>>& workItems) {
@@ -215,6 +216,8 @@
std::shared_ptr<android::Codec2Client::Listener> mListener;
std::shared_ptr<android::Codec2Client::Component> mComponent;
+ std::string mInputFile;
+
protected:
static void description(const std::string& description) {
RecordProperty("description", description);
@@ -284,9 +287,8 @@
return true;
}
-// LookUpTable of clips for component testing
-void GetURLForComponent(char* URL) {
- strcat(URL, "bbb_352x288_420p_30fps_32frames.yuv");
+void Codec2VideoEncHidlTestBase::getFile() {
+ mInputFile = sResourceDir + "bbb_352x288_420p_30fps_32frames.yuv";
}
void fillByteBuffer(char* inputBuffer, char* mInputData, uint32_t nWidth, int32_t nHeight) {
@@ -332,6 +334,12 @@
int bytesCount = nWidth * nHeight * 3 >> 1;
int32_t timestampIncr = ENCODER_TIMESTAMP_INCREMENT;
c2_status_t err = C2_OK;
+
+ // Query component's memory usage flags
+ std::vector<std::unique_ptr<C2Param>> params;
+ C2StreamUsageTuning::input compUsage(0u, 0u);
+ component->query({&compUsage}, {}, C2_DONT_BLOCK, ¶ms);
+
while (1) {
if (nFrames == 0) break;
uint32_t flags = 0;
@@ -382,7 +390,8 @@
}
std::shared_ptr<C2GraphicBlock> block;
err = graphicPool->fetchGraphicBlock(nWidth, nHeight, HAL_PIXEL_FORMAT_YV12,
- {C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+ {C2MemoryUsage::CPU_READ | compUsage.value,
+ C2MemoryUsage::CPU_WRITE | compUsage.value},
&block);
if (err != C2_OK) {
fprintf(stderr, "fetchGraphicBlock failed : %d\n", err);
@@ -439,8 +448,7 @@
}
};
-c2_status_t Codec2VideoEncHidlTestBase::getMaxMinResolutionSupported(
- const std::shared_ptr<android::Codec2Client::Component>& component) {
+c2_status_t Codec2VideoEncHidlTestBase::getMaxMinResolutionSupported() {
std::unique_ptr<C2StreamPictureSizeInfo::input> param =
std::make_unique<C2StreamPictureSizeInfo::input>();
std::vector<C2FieldSupportedValuesQuery> validValueInfos = {
@@ -448,7 +456,7 @@
C2ParamField(param.get(), &C2StreamPictureSizeInfo::width)),
C2FieldSupportedValuesQuery::Current(
C2ParamField(param.get(), &C2StreamPictureSizeInfo::height))};
- c2_status_t c2err = component->querySupportedValues(validValueInfos, C2_MAY_BLOCK);
+ c2_status_t c2err = mComponent->querySupportedValues(validValueInfos, C2_MAY_BLOCK);
if (c2err != C2_OK || validValueInfos.size() != 2u) {
ALOGE("querySupportedValues_vb failed for pictureSize");
return c2err;
@@ -491,19 +499,14 @@
description("Encodes input file");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
bool signalEOS = std::get<3>(GetParam());
// Send an empty frame to receive CSD data from encoder.
bool sendEmptyFirstFrame = std::get<3>(GetParam());
mConfigBPictures = std::get<4>(GetParam());
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
mTimestampUs = 0;
mTimestampDevTest = true;
@@ -640,11 +643,6 @@
description("Test Request for flush");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
-
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
if (!setupConfigParam(mWidth, mHeight)) {
ASSERT_TRUE(false) << "Failed while configuring height and width for " << mComponentName;
}
@@ -655,9 +653,9 @@
std::ifstream eleStream;
uint32_t numFramesFlushed = 10;
uint32_t numFrames = ENC_NUM_FRAMES;
- eleStream.open(mURL, std::ifstream::binary);
+ eleStream.open(mInputFile, std::ifstream::binary);
ASSERT_EQ(eleStream.is_open(), true);
- ALOGV("mURL : %s", mURL);
+
// flush
std::list<std::unique_ptr<C2Work>> flushedWork;
c2_status_t err = mComponent->flush(C2Component::FLUSH_COMPONENT, &flushedWork);
@@ -820,15 +818,9 @@
description("Encodes input file for different bitrates");
if (mDisableTest) GTEST_SKIP() << "Test is disabled";
- char mURL[512];
-
- strcpy(mURL, sResourceDir.c_str());
- GetURLForComponent(mURL);
-
std::ifstream eleStream;
- eleStream.open(mURL, std::ifstream::binary);
- ASSERT_EQ(eleStream.is_open(), true) << mURL << " file not found";
- ALOGV("mURL : %s", mURL);
+ eleStream.open(mInputFile, std::ifstream::binary);
+ ASSERT_EQ(eleStream.is_open(), true) << mInputFile << " file not found";
mFlushedIndices.clear();
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 71857e0..42b3c43 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -21,6 +21,7 @@
#include <codec2/hidl/client.h>
#include <C2Debug.h>
#include <C2BufferPriv.h>
+#include <C2Config.h> // for C2StreamUsageTuning
#include <C2PlatformSupport.h>
#include <android/hardware/media/bufferpool/2.0/IClientManager.h>
@@ -41,7 +42,10 @@
#include <cutils/native_handle.h>
#include <gui/bufferqueue/2.0/B2HGraphicBufferProducer.h>
#include <gui/bufferqueue/2.0/H2BGraphicBufferProducer.h>
+#include <hardware/gralloc.h> // for GRALLOC_USAGE_*
#include <hidl/HidlSupport.h>
+#include <system/window.h> // for NATIVE_WINDOW_QUERY_*
+#include <media/stagefright/foundation/ADebug.h> // for asString(status_t)
#include <deque>
@@ -81,6 +85,10 @@
// c2_status_t value that corresponds to hwbinder transaction failure.
constexpr c2_status_t C2_TRANSACTION_FAILED = C2_CORRUPTED;
+// By default prepare buffer to be displayed on any of the common surfaces
+constexpr uint64_t kDefaultConsumerUsage =
+ (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
+
// Searches for a name in GetServiceNames() and returns the index found. If the
// name is not found, the returned index will be equal to
// GetServiceNames().size().
@@ -1507,8 +1515,43 @@
mOutputBufferQueue->configure(surface, generation, bqId, maxDequeueCount, mBase1_2 ?
&syncObj : nullptr);
}
- ALOGD("surface generation remote change %u HAL ver: %s",
- generation, syncObj ? "1.2" : "1.0");
+
+ // set consumer bits
+ // TODO: should this get incorporated into setOutputSurface method so that consumer bits
+ // can be set atomically?
+ uint64_t consumerUsage = kDefaultConsumerUsage;
+ {
+ if (surface) {
+ int usage = 0;
+ status_t err = surface->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+ if (err != NO_ERROR) {
+ ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
+ err, asString(err));
+ } else {
+ // Note: we are adding the default usage because components must support
+ // producing output frames that can be displayed an all output surfaces.
+
+ // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
+ // is meaningful in a tunneled scenario; on one hand output buffers exist, but
+ // they do not exist inside of C2 scope. Any buffer usage shall be communicated
+ // through the sideband channel.
+
+ // do an unsigned conversion as bit-31 may be 1
+ consumerUsage = (uint32_t)usage | kDefaultConsumerUsage;
+ }
+ }
+
+ C2StreamUsageTuning::output outputUsage{
+ 0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
+ if (err != C2_OK) {
+ ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
+ err, asString(err));
+ }
+ }
+ ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
+ generation, (long long)consumerUsage, syncObj ? " sync" : "");
Return<Status> transStatus = syncObj ?
mBase1_2->setOutputSurfaceWithSyncObj(
@@ -1517,6 +1560,7 @@
mBase1_0->setOutputSurface(
static_cast<uint64_t>(blockPoolId),
bqId == 0 ? nullHgbp : igbp);
+
if (!transStatus.isOk()) {
LOG(ERROR) << "setOutputSurface -- transaction failed.";
return C2_TRANSACTION_FAILED;
@@ -1694,4 +1738,3 @@
}
} // namespace android
-
diff --git a/media/codec2/hidl/client/output.cpp b/media/codec2/hidl/client/output.cpp
index 8cd4934..de34c24 100644
--- a/media/codec2/hidl/client/output.cpp
+++ b/media/codec2/hidl/client/output.cpp
@@ -181,7 +181,7 @@
int maxDequeueBufferCount,
std::shared_ptr<V1_2::SurfaceSyncObj> *syncObj) {
uint64_t consumerUsage = 0;
- if (igbp->getConsumerUsage(&consumerUsage) != OK) {
+ if (igbp && igbp->getConsumerUsage(&consumerUsage) != OK) {
ALOGW("failed to get consumer usage");
}
@@ -254,6 +254,9 @@
mBqId = bqId;
mOwner = std::make_shared<int>(0);
mMaxDequeueBufferCount = maxDequeueBufferCount;
+ if (igbp == nullptr) {
+ return false;
+ }
for (int i = 0; i < BufferQueueDefs::NUM_BUFFER_SLOTS; ++i) {
if (mBqId == 0 || !mBuffers[i]) {
continue;
diff --git a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
index 7de3503..b942be7 100644
--- a/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hidl/plugin/samples/SampleFilterPlugin.cpp
@@ -626,6 +626,14 @@
}
LOG(VERBOSE) << "work #" << workCount << ": flags=" << work->input.flags
<< " timestamp=" << work->input.ordinal.timestamp.peek();;
+
+ std::vector<C2Param *> configUpdate;
+ for (const std::unique_ptr<C2Param> ¶m : work->input.configUpdate) {
+ configUpdate.push_back(param.get());
+ }
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ mIntf->config_vb(configUpdate, C2_MAY_BLOCK, &failures);
+
std::shared_ptr<C2StreamHdrStaticInfo::input> hdrStaticInfo =
mIntf->getHdrStaticMetadata();
uint32_t dataspace = mIntf->getDataSpace();
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index f88408e..9e9bdfc 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -1341,6 +1341,15 @@
outputSurface,
outputGeneration,
maxDequeueCount);
+ } else {
+ // configure CPU read consumer usage
+ C2StreamUsageTuning::output outputUsage{0u, C2MemoryUsage::CPU_READ};
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ err = mComponent->config({ &outputUsage }, C2_MAY_BLOCK, &failures);
+ // do not print error message for now as most components may not yet
+ // support this setting
+ ALOGD_IF(err != C2_BAD_INDEX, "[%s] Configured output usage [%#llx]",
+ mName, (long long)outputUsage.value);
}
if (oStreamFormat.value == C2BufferData::LINEAR) {
diff --git a/media/codec2/sfplugin/CCodecBuffers.cpp b/media/codec2/sfplugin/CCodecBuffers.cpp
index e7207a5..29cc564 100644
--- a/media/codec2/sfplugin/CCodecBuffers.cpp
+++ b/media/codec2/sfplugin/CCodecBuffers.cpp
@@ -1301,17 +1301,7 @@
sp<Codec2Buffer> RawGraphicOutputBuffers::wrap(const std::shared_ptr<C2Buffer> &buffer) {
if (buffer == nullptr) {
- sp<Codec2Buffer> c2buffer = ConstGraphicBlockBuffer::AllocateEmpty(
- mFormat,
- [lbp = mLocalBufferPool](size_t capacity) {
- return lbp->newBuffer(capacity);
- });
- if (c2buffer == nullptr) {
- ALOGD("[%s] ConstGraphicBlockBuffer::AllocateEmpty failed", mName);
- return nullptr;
- }
- c2buffer->setRange(0, 0);
- return c2buffer;
+ return new Codec2Buffer(mFormat, new ABuffer(nullptr, 0));
} else {
return ConstGraphicBlockBuffer::Allocate(
mFormat,
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 2df0ba2..c275187 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -909,6 +909,8 @@
}
}));
+ add(ConfigMapper("android._encoding-quality-level", C2_PARAMKEY_ENCODING_QUALITY_LEVEL, "value")
+ .limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
add(ConfigMapper(KEY_QUALITY, C2_PARAMKEY_QUALITY, "value")
.limitTo(D::ENCODER & (D::CONFIG | D::PARAM)));
add(ConfigMapper(KEY_FLAC_COMPRESSION_LEVEL, C2_PARAMKEY_COMPLEXITY, "value")
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 34e6a88..4070478 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -491,7 +491,7 @@
* align(mHeight, 64) / plane.rowSampling;
}
- if ((maxPtr - minPtr + 1) <= planeSize) {
+ if (minPtr == mView.data()[0] && (maxPtr - minPtr + 1) <= planeSize) {
// FIXME: this is risky as reading/writing data out of bound results
// in an undefined behavior, but gralloc does assume a
// contiguous mapping
@@ -679,17 +679,20 @@
std::shared_ptr<C2Buffer> GraphicMetadataBuffer::asC2Buffer() {
#ifdef __LP64__
static std::once_flag s_checkOnce;
- static bool s_64bitonly {false};
+ static bool s_is64bitOk {true};
std::call_once(s_checkOnce, [&](){
const std::string abi32list =
::android::base::GetProperty("ro.product.cpu.abilist32", "");
- if (abi32list.empty()) {
- s_64bitonly = true;
+ if (!abi32list.empty()) {
+ int32_t inputSurfaceSetting =
+ ::android::base::GetIntProperty("debug.stagefright.c2inputsurface", int32_t(0));
+ s_is64bitOk = inputSurfaceSetting != 0;
}
});
- if (!s_64bitonly) {
- ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object");
+ if (!s_is64bitOk) {
+ ALOGE("GraphicMetadataBuffer does not work in 32+64 system if compiled as 64-bit object"\
+ "when debug.stagefright.c2inputsurface is set to 0");
return nullptr;
}
#endif
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 77a63a7..7c4bfb6 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -67,7 +67,8 @@
s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
}
-void addSupportedProfileLevels(
+// returns true if component advertised supported profile level(s)
+bool addSupportedProfileLevels(
std::shared_ptr<Codec2Client::Interface> intf,
MediaCodecInfo::CapabilitiesWriter *caps,
const Traits& trait, const std::string &mediaType) {
@@ -87,12 +88,12 @@
c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
ALOGV("query supported profiles -> %s | %s", asString(err), asString(profileQuery[0].status));
if (err != C2_OK || profileQuery[0].status != C2_OK) {
- return;
+ return false;
}
// we only handle enumerated values
if (profileQuery[0].values.type != C2FieldSupportedValues::VALUES) {
- return;
+ return false;
}
// determine if codec supports HDR
@@ -125,6 +126,8 @@
supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
+ bool added = false;
+
for (C2Value::Primitive profile : profileQuery[0].values.values) {
pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
std::vector<std::unique_ptr<C2SettingResult>> failures;
@@ -165,6 +168,7 @@
} else if (!mapper) {
caps->addProfileLevel(pl.profile, pl.level);
}
+ added = true;
// for H.263 also advertise the second highest level if the
// codec supports level 45, as level 45 only covers level 10
@@ -188,6 +192,7 @@
}
}
}
+ return added;
}
void addSupportedColorFormats(
@@ -604,7 +609,15 @@
}
}
- addSupportedProfileLevels(intf, caps.get(), trait, mediaType);
+ if (!addSupportedProfileLevels(intf, caps.get(), trait, mediaType)) {
+ // TODO(b/193279646) This will get fixed in C2InterfaceHelper
+ // Some components may not advertise supported values if they use a const
+ // param for profile/level (they support only one profile). For now cover
+ // only VP8 here until it is fixed.
+ if (mediaType == MIMETYPE_VIDEO_VP8) {
+ caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
+ }
+ }
addSupportedColorFormats(intf, caps.get(), trait, mediaType);
}
}
diff --git a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
index 66b7622..41e4fff 100644
--- a/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
+++ b/media/codec2/sfplugin/tests/CCodecBuffers_test.cpp
@@ -106,6 +106,19 @@
}
}
+TEST(RawGraphicOutputBuffersTest, WrapNullBuffer) {
+ constexpr int32_t kWidth = 320;
+ constexpr int32_t kHeight = 240;
+
+ std::shared_ptr<RawGraphicOutputBuffers> buffers =
+ GetRawGraphicOutputBuffers(kWidth, kHeight);
+
+ sp<Codec2Buffer> buffer = buffers->wrap(nullptr);
+ ASSERT_EQ(nullptr, buffer->base());
+ ASSERT_EQ(0, buffer->size());
+ ASSERT_EQ(0, buffer->offset());
+}
+
TEST(RawGraphicOutputBuffersTest, FlexYuvColorFormat) {
constexpr int32_t kWidth = 320;
constexpr int32_t kHeight = 240;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 0966988..5f87c66 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -507,9 +507,21 @@
};
}
+// Matrix coefficient to convert RGB to Planar YUV data.
+// Each sub-array represents the 3X3 coeff used with R, G and B
+static const int16_t bt601Matrix[2][3][3] = {
+ { { 76, 150, 29 }, { -43, -85, 128 }, { 128, -107, -21 } }, /* RANGE_FULL */
+ { { 66, 129, 25 }, { -38, -74, 112 }, { 112, -94, -18 } }, /* RANGE_LIMITED */
+};
+
+static const int16_t bt709Matrix[2][3][3] = {
+ { { 54, 183, 18 }, { -29, -99, 128 }, { 128, -116, -12 } }, /* RANGE_FULL */
+ { { 47, 157, 16 }, { -26, -86, 112 }, { 112, -102, -10 } }, /* RANGE_LIMITED */
+};
+
status_t ConvertRGBToPlanarYUV(
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
- const C2GraphicView &src) {
+ const C2GraphicView &src, C2Color::matrix_t colorMatrix, C2Color::range_t colorRange) {
CHECK(dstY != nullptr);
CHECK((src.width() & 1) == 0);
CHECK((src.height() & 1) == 0);
@@ -527,28 +539,38 @@
const uint8_t *pGreen = src.data()[C2PlanarLayout::PLANE_G];
const uint8_t *pBlue = src.data()[C2PlanarLayout::PLANE_B];
-#define CLIP3(x,y,z) (((z) < (x)) ? (x) : (((z) > (y)) ? (y) : (z)))
+ // set default range as limited
+ if (colorRange != C2Color::RANGE_FULL && colorRange != C2Color::RANGE_LIMITED) {
+ colorRange = C2Color::RANGE_LIMITED;
+ }
+ const int16_t (*weights)[3] =
+ (colorMatrix == C2Color::MATRIX_BT709) ?
+ bt709Matrix[colorRange - 1] : bt601Matrix[colorRange - 1];
+ uint8_t zeroLvl = colorRange == C2Color::RANGE_FULL ? 0 : 16;
+ uint8_t maxLvlLuma = colorRange == C2Color::RANGE_FULL ? 255 : 235;
+ uint8_t maxLvlChroma = colorRange == C2Color::RANGE_FULL ? 255 : 240;
+
+#define CLIP3(min,v,max) (((v) < (min)) ? (min) : (((max) > (v)) ? (v) : (max)))
for (size_t y = 0; y < src.height(); ++y) {
for (size_t x = 0; x < src.width(); ++x) {
- uint8_t red = *pRed;
- uint8_t green = *pGreen;
- uint8_t blue = *pBlue;
+ uint8_t r = *pRed;
+ uint8_t g = *pGreen;
+ uint8_t b = *pBlue;
- // using ITU-R BT.601 conversion matrix
- unsigned luma =
- CLIP3(0, (((red * 66 + green * 129 + blue * 25) >> 8) + 16), 255);
+ unsigned luma = ((r * weights[0][0] + g * weights[0][1] + b * weights[0][2]) >> 8) +
+ zeroLvl;
- dstY[x] = luma;
+ dstY[x] = CLIP3(zeroLvl, luma, maxLvlLuma);
if ((x & 1) == 0 && (y & 1) == 0) {
- unsigned U =
- CLIP3(0, (((-red * 38 - green * 74 + blue * 112) >> 8) + 128), 255);
+ unsigned U = ((r * weights[1][0] + g * weights[1][1] + b * weights[1][2]) >> 8) +
+ 128;
- unsigned V =
- CLIP3(0, (((red * 112 - green * 94 - blue * 18) >> 8) + 128), 255);
+ unsigned V = ((r * weights[2][0] + g * weights[2][1] + b * weights[2][2]) >> 8) +
+ 128;
- dstU[x >> 1] = U;
- dstV[x >> 1] = V;
+ dstU[x >> 1] = CLIP3(zeroLvl, U, maxLvlChroma);
+ dstV[x >> 1] = CLIP3(zeroLvl, V, maxLvlChroma);
}
pRed += layout.planes[C2PlanarLayout::PLANE_R].colInc;
pGreen += layout.planes[C2PlanarLayout::PLANE_G].colInc;
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index af29e81..9fa642d 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -18,6 +18,7 @@
#define CODEC2_BUFFER_UTILS_H_
#include <C2Buffer.h>
+#include <C2Config.h>
#include <C2ParamDef.h>
#include <media/hardware/VideoAPI.h>
@@ -39,7 +40,8 @@
*/
status_t ConvertRGBToPlanarYUV(
uint8_t *dstY, size_t dstStride, size_t dstVStride, size_t bufferSize,
- const C2GraphicView &src);
+ const C2GraphicView &src, C2Color::matrix_t colorMatrix = C2Color::MATRIX_BT601,
+ C2Color::range_t colorRange = C2Color::RANGE_LIMITED);
/**
* Returns a planar YUV 420 8-bit media image descriptor.
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 00bf84f..4d939fa 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -92,6 +92,7 @@
ALookup<C2Config::bitrate_mode_t, int32_t> sBitrateModes = {
{ C2Config::BITRATE_CONST, BITRATE_MODE_CBR },
+ { C2Config::BITRATE_CONST_SKIP_ALLOWED, BITRATE_MODE_CBR_FD },
{ C2Config::BITRATE_VARIABLE, BITRATE_MODE_VBR },
{ C2Config::BITRATE_IGNORE, BITRATE_MODE_CQ },
};
diff --git a/media/codec2/vndk/C2AllocatorBlob.cpp b/media/codec2/vndk/C2AllocatorBlob.cpp
index 6340cba..8cfa1d7 100644
--- a/media/codec2/vndk/C2AllocatorBlob.cpp
+++ b/media/codec2/vndk/C2AllocatorBlob.cpp
@@ -178,6 +178,8 @@
return C2_CORRUPTED;
}
+ // Note: the BLOB allocator does not support padding as this functionality is expected
+ // to be provided by the gralloc implementation.
std::shared_ptr<C2GraphicAllocation> graphicAllocation;
c2_status_t status = mC2AllocatorGralloc->newGraphicAllocation(
capacity, kLinearBufferHeight, kLinearBufferFormat, usage, &graphicAllocation);
diff --git a/media/codec2/vndk/C2AllocatorGralloc.cpp b/media/codec2/vndk/C2AllocatorGralloc.cpp
index 4ffa3f1..6a7f19c 100644
--- a/media/codec2/vndk/C2AllocatorGralloc.cpp
+++ b/media/codec2/vndk/C2AllocatorGralloc.cpp
@@ -750,6 +750,16 @@
// We really don't know what this is; lock the buffer and pass it through ---
// the client may know how to interpret it.
+
+ // unlock previous allocation if it was successful
+ if (err == OK) {
+ err = GraphicBufferMapper::get().unlock(mBuffer);
+ if (err) {
+ ALOGE("failed transaction: unlock");
+ return C2_CORRUPTED;
+ }
+ }
+
void *pointer = nullptr;
err = GraphicBufferMapper::get().lock(
const_cast<native_handle_t *>(mBuffer), grallocUsage, rect, &pointer);
diff --git a/media/codec2/vndk/C2AllocatorIon.cpp b/media/codec2/vndk/C2AllocatorIon.cpp
index a8528df..77b265a 100644
--- a/media/codec2/vndk/C2AllocatorIon.cpp
+++ b/media/codec2/vndk/C2AllocatorIon.cpp
@@ -417,15 +417,16 @@
buffer = -1;
}
}
- return new Impl(ionFd, allocSize, bufferFd, buffer, id, ret);
-
+ // the padding is not usable so deduct it from the advertised capacity
+ return new Impl(ionFd, allocSize - sPadding, bufferFd, buffer, id, ret);
} else {
ret = ion_alloc_fd(ionFd, allocSize, align, heapMask, flags, &bufferFd);
ALOGV("ion_alloc_fd(ionFd = %d, size = %zu, align = %zu, prot = %d, flags = %d) "
"returned (%d) ; bufferFd = %d",
ionFd, allocSize, align, heapMask, flags, ret, bufferFd);
- return new ImplV2(ionFd, allocSize, bufferFd, id, ret);
+ // the padding is not usable so deduct it from the advertised capacity
+ return new ImplV2(ionFd, allocSize - sPadding, bufferFd, id, ret);
}
}
diff --git a/media/codec2/vndk/C2DmaBufAllocator.cpp b/media/codec2/vndk/C2DmaBufAllocator.cpp
index 6d8552a..1aa3d69 100644
--- a/media/codec2/vndk/C2DmaBufAllocator.cpp
+++ b/media/codec2/vndk/C2DmaBufAllocator.cpp
@@ -111,8 +111,27 @@
virtual bool equals(const std::shared_ptr<C2LinearAllocation>& other) const override;
// internal methods
- C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name, unsigned flags,
- C2Allocator::id_t id);
+
+ /**
+ * Constructs an allocation via a new allocation.
+ *
+ * @param alloc allocator
+ * @param allocSize size used for the allocator
+ * @param capacity capacity advertised to the client
+ * @param heap_name name of the dmabuf heap (device)
+ * @param flags flags
+ * @param id allocator id
+ */
+ C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+ C2String heap_name, unsigned flags, C2Allocator::id_t id);
+
+ /**
+ * Constructs an allocation by wrapping an existing allocation.
+ *
+ * @param size capacity advertised to the client
+ * @param shareFd dmabuf fd of the wrapped allocation
+ * @param id allocator id
+ */
C2DmaBufAllocation(size_t size, int shareFd, C2Allocator::id_t id);
c2_status_t status() const;
@@ -246,19 +265,19 @@
}
}
-C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t size, C2String heap_name,
- unsigned flags, C2Allocator::id_t id)
- : C2LinearAllocation(size), mHandle(-1, 0) {
+C2DmaBufAllocation::C2DmaBufAllocation(BufferAllocator& alloc, size_t allocSize, size_t capacity,
+ C2String heap_name, unsigned flags, C2Allocator::id_t id)
+ : C2LinearAllocation(capacity), mHandle(-1, 0) {
int bufferFd = -1;
int ret = 0;
- bufferFd = alloc.Alloc(heap_name, size, flags);
+ bufferFd = alloc.Alloc(heap_name, allocSize, flags);
if (bufferFd < 0) {
ret = bufferFd;
}
// this may be a non-working handle if bufferFd is negative
- mHandle = C2HandleBuf(bufferFd, size);
+ mHandle = C2HandleBuf(bufferFd, capacity);
mId = id;
mInit = c2_status_t(c2_map_errno<ENOMEM, EACCES, EINVAL>(ret));
}
@@ -381,7 +400,7 @@
size_t allocSize = (size_t)capacity + sPadding;
// TODO: should we align allocation size to mBlockSize to reflect the true allocation size?
std::shared_ptr<C2DmaBufAllocation> alloc = std::make_shared<C2DmaBufAllocation>(
- mBufferAllocator, allocSize, heap_name, flags, getId());
+ mBufferAllocator, allocSize, allocSize - sPadding, heap_name, flags, getId());
ret = alloc->status();
if (ret == C2_OK) {
*allocation = alloc;
diff --git a/media/extractors/fuzzers/Android.bp b/media/extractors/fuzzers/Android.bp
index b731292..0e54b58 100644
--- a/media/extractors/fuzzers/Android.bp
+++ b/media/extractors/fuzzers/Android.bp
@@ -39,6 +39,7 @@
static_libs: [
"liblog",
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
"libmediandk_format",
"libmedia_ndkformatpriv",
diff --git a/media/extractors/mkv/Android.bp b/media/extractors/mkv/Android.bp
index 840c9fc..54c5b27 100644
--- a/media/extractors/mkv/Android.bp
+++ b/media/extractors/mkv/Android.bp
@@ -32,6 +32,7 @@
],
static_libs: [
+ "libstagefright_foundation_colorutils_ndk", // for mainline-safe ColorUtils
"libstagefright_foundation",
"libstagefright_metadatautils",
"libwebm",
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index b4e4c5d..fbcd554 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -1126,7 +1126,9 @@
void *data;
size_t size;
- if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2, &data, &size)) {
+ if (AMediaFormat_getBuffer(mLastTrack->meta, AMEDIAFORMAT_KEY_CSD_2,
+ &data, &size)
+ && size >= 5) {
const uint8_t *ptr = (const uint8_t *)data;
const uint8_t profile = ptr[2] >> 1;
const uint8_t bl_compatibility_id = (ptr[4]) >> 4;
@@ -1163,8 +1165,12 @@
mLastTrack->next = track_b;
track_b->next = NULL;
- auto id = track_b->meta->mFormat->findEntryByName(AMEDIAFORMAT_KEY_CSD_2);
- track_b->meta->mFormat->removeEntryAt(id);
+ // we want to remove the csd-2 key from the metadata, but
+ // don't have an AMediaFormat_* function to do so. Settle
+ // for replacing this csd-2 with an empty csd-2.
+ uint8_t emptybuffer[8] = {};
+ AMediaFormat_setBuffer(track_b->meta, AMEDIAFORMAT_KEY_CSD_2,
+ emptybuffer, 0);
if (4 == profile || 7 == profile || 8 == profile ) {
AMediaFormat_setString(track_b->meta,
diff --git a/media/extractors/tests/Android.bp b/media/extractors/tests/Android.bp
index e17893e..5d97d9a 100644
--- a/media/extractors/tests/Android.bp
+++ b/media/extractors/tests/Android.bp
@@ -51,6 +51,7 @@
"libstagefright_esds",
"libstagefright_mpeg2support",
"libstagefright_mpeg2extractor",
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
"libstagefright_metadatautils",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 6ca5fc8..4b08295 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -82,7 +82,9 @@
/**
* This format uses 24-bit samples packed into 3 bytes.
- * The bytes are in the native endian order.
+ * The bytes are in little-endian order, so the least significant byte
+ * comes first in the byte array.
+ *
* The maximum range of the data is -8388608 (0x800000)
* to 8388607 (0x7FFFFF).
*
diff --git a/media/libaaudio/src/core/AudioStream.cpp b/media/libaaudio/src/core/AudioStream.cpp
index 1ed240a..09d9535 100644
--- a/media/libaaudio/src/core/AudioStream.cpp
+++ b/media/libaaudio/src/core/AudioStream.cpp
@@ -452,8 +452,8 @@
void* threadArg)
{
if (mHasThread) {
- ALOGE("%s() - mHasThread already true", __func__);
- return AAUDIO_ERROR_INVALID_STATE;
+ ALOGD("%s() - previous thread was not joined, join now to be safe", __func__);
+ joinThread_l(nullptr);
}
if (threadProc == nullptr) {
return AAUDIO_ERROR_NULL;
@@ -462,6 +462,7 @@
mThreadProc = threadProc;
mThreadArg = threadArg;
setPeriodNanoseconds(periodNanoseconds);
+ mHasThread = true;
// Prevent this object from getting deleted before the thread has a chance to create
// its strong pointer. Assume the thread will call decStrong().
this->incStrong(nullptr);
@@ -470,6 +471,7 @@
android::status_t status = -errno;
ALOGE("%s() - pthread_create() failed, %d", __func__, status);
this->decStrong(nullptr); // Because the thread won't do it.
+ mHasThread = false;
return AAudioConvert_androidToAAudioResult(status);
} else {
// TODO Use AAudioThread or maybe AndroidThread
@@ -484,7 +486,6 @@
err = pthread_setname_np(mThread, name);
ALOGW_IF((err != 0), "Could not set name of AAudio thread. err = %d", err);
- mHasThread = true;
return AAUDIO_OK;
}
}
@@ -498,7 +499,7 @@
// This must be called under mStreamLock.
aaudio_result_t AudioStream::joinThread_l(void** returnArg) {
if (!mHasThread) {
- ALOGD("joinThread() - but has no thread");
+ ALOGD("joinThread() - but has no thread or already join()ed");
return AAUDIO_ERROR_INVALID_STATE;
}
aaudio_result_t result = AAUDIO_OK;
@@ -515,8 +516,7 @@
result = AAudioConvert_androidToAAudioResult(-err);
} else {
ALOGD("%s() pthread_join succeeded", __func__);
- // This must be set false so that the callback thread can be created
- // when the stream is restarted.
+ // Prevent joining a second time, which has undefined behavior.
mHasThread = false;
}
} else {
diff --git a/media/libaaudio/src/core/AudioStream.h b/media/libaaudio/src/core/AudioStream.h
index 2b45ed3..9835c8c 100644
--- a/media/libaaudio/src/core/AudioStream.h
+++ b/media/libaaudio/src/core/AudioStream.h
@@ -157,9 +157,13 @@
virtual aaudio_result_t setBufferSize(int32_t requestedFrames) = 0;
- virtual aaudio_result_t createThread_l(int64_t periodNanoseconds,
- aaudio_audio_thread_proc_t threadProc,
- void *threadArg);
+ aaudio_result_t createThread(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void *threadArg)
+ EXCLUDES(mStreamLock) {
+ std::lock_guard<std::mutex> lock(mStreamLock);
+ return createThread_l(periodNanoseconds, threadProc, threadArg);
+ }
aaudio_result_t joinThread(void **returnArg);
@@ -535,6 +539,11 @@
mSessionId = sessionId;
}
+ aaudio_result_t createThread_l(int64_t periodNanoseconds,
+ aaudio_audio_thread_proc_t threadProc,
+ void *threadArg)
+ REQUIRES(mStreamLock);
+
aaudio_result_t joinThread_l(void **returnArg) REQUIRES(mStreamLock);
std::atomic<bool> mCallbackEnabled{false};
@@ -658,6 +667,7 @@
std::atomic<pid_t> mErrorCallbackThread{CALLBACK_THREAD_NONE};
// background thread ----------------------------------
+ // Use mHasThread to prevent joining twice, which has undefined behavior.
bool mHasThread GUARDED_BY(mStreamLock) = false;
pthread_t mThread GUARDED_BY(mStreamLock) = {};
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 60eb73a..e96e134 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -94,10 +94,15 @@
AudioTrack::Buffer *audioBuffer = static_cast<AudioTrack::Buffer *>(info);
if (getState() == AAUDIO_STREAM_STATE_DISCONNECTED) {
ALOGW("processCallbackCommon() data, stream disconnected");
+ // This will kill the stream and prevent it from being restarted.
+ // That is OK because the stream is disconnected.
audioBuffer->size = SIZE_STOP_CALLBACKS;
} else if (!mCallbackEnabled.load()) {
- ALOGW("processCallbackCommon() no data because callback disabled");
- audioBuffer->size = SIZE_STOP_CALLBACKS;
+ ALOGW("processCallbackCommon() no data because callback disabled, set size=0");
+ // Do NOT use SIZE_STOP_CALLBACKS here because that will kill the stream and
+ // prevent it from being restarted. This can occur because of a race condition
+ // caused by Legacy callbacks running after the track is "stopped".
+ audioBuffer->size = 0;
} else {
if (audioBuffer->frameCount == 0) {
ALOGW("processCallbackCommon() data, frameCount is zero");
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 6765bdb..5f802de 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -472,7 +472,7 @@
status = BAD_VALUE;
goto exit;
}
- mStreamType = streamType;
+ mOriginalStreamType = streamType;
} else {
// stream type shouldn't be looked at, this track has audio attributes
@@ -481,7 +481,7 @@
" usage=%d content=%d flags=0x%x tags=[%s]",
__func__,
mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
- mStreamType = AUDIO_STREAM_DEFAULT;
+ mOriginalStreamType = AUDIO_STREAM_DEFAULT;
audio_flags_to_audio_output_flags(mAttributes.flags, &flags);
}
@@ -1605,9 +1605,6 @@
audio_stream_type_t AudioTrack::streamType() const
{
- if (mStreamType == AUDIO_STREAM_DEFAULT) {
- return AudioSystem::attributesToStreamType(mAttributes);
- }
return mStreamType;
}
@@ -1688,8 +1685,9 @@
}
IAudioFlinger::CreateTrackInput input;
- if (mStreamType != AUDIO_STREAM_DEFAULT) {
- input.attr = AudioSystem::streamTypeToAttributes(mStreamType);
+ if (mOriginalStreamType != AUDIO_STREAM_DEFAULT) {
+ // Legacy: This is based on original parameters even if the track is recreated.
+ input.attr = AudioSystem::streamTypeToAttributes(mOriginalStreamType);
} else {
input.attr = mAttributes;
}
@@ -1745,6 +1743,7 @@
mNotificationFramesAct = (uint32_t)output.notificationFrameCount;
mRoutedDeviceId = output.selectedDeviceId;
mSessionId = output.sessionId;
+ mStreamType = output.streamType;
mSampleRate = output.sampleRate;
if (mOriginalSampleRate == 0) {
@@ -3284,8 +3283,6 @@
result.appendFormat(" id(%d) status(%d), state(%d), session Id(%d), flags(%#x)\n",
mPortId, mStatus, mState, mSessionId, mFlags);
result.appendFormat(" stream type(%d), left - right volume(%f, %f)\n",
- (mStreamType == AUDIO_STREAM_DEFAULT) ?
- AudioSystem::attributesToStreamType(mAttributes) :
mStreamType,
mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
result.appendFormat(" format(%#x), channel mask(%#x), channel count(%u)\n",
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index 0564cdf..cae81f0 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -101,6 +101,8 @@
legacy2aidl_audio_port_handle_t_int32_t(selectedDeviceId));
aidl.sessionId = VALUE_OR_RETURN(legacy2aidl_audio_session_t_int32_t(sessionId));
aidl.sampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(sampleRate));
+ aidl.streamType = VALUE_OR_RETURN(
+ legacy2aidl_audio_stream_type_t_AudioStreamType(streamType));
aidl.afFrameCount = VALUE_OR_RETURN(convertIntegral<int64_t>(afFrameCount));
aidl.afSampleRate = VALUE_OR_RETURN(convertIntegral<int32_t>(afSampleRate));
aidl.afLatencyMs = VALUE_OR_RETURN(convertIntegral<int32_t>(afLatencyMs));
@@ -122,6 +124,8 @@
aidl2legacy_int32_t_audio_port_handle_t(aidl.selectedDeviceId));
legacy.sessionId = VALUE_OR_RETURN(aidl2legacy_int32_t_audio_session_t(aidl.sessionId));
legacy.sampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.sampleRate));
+ legacy.streamType = VALUE_OR_RETURN(
+ aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
legacy.afFrameCount = VALUE_OR_RETURN(convertIntegral<size_t>(aidl.afFrameCount));
legacy.afSampleRate = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afSampleRate));
legacy.afLatencyMs = VALUE_OR_RETURN(convertIntegral<uint32_t>(aidl.afLatencyMs));
@@ -661,7 +665,11 @@
status_t AudioFlingerClientAdapter::createAudioPatch(const struct audio_patch* patch,
audio_patch_handle_t* handle) {
media::AudioPatch patchAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_AudioPatch(*patch));
- int32_t aidlRet;
+ int32_t aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(
+ AUDIO_PATCH_HANDLE_NONE));
+ if (handle != nullptr) {
+ aidlRet = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_patch_handle_t_int32_t(*handle));
+ }
RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
mDelegate->createAudioPatch(patchAidl, &aidlRet)));
if (handle != nullptr) {
@@ -1136,7 +1144,8 @@
Status AudioFlingerServerAdapter::createAudioPatch(const media::AudioPatch& patch,
int32_t* _aidl_return) {
audio_patch patchLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioPatch_audio_patch(patch));
- audio_patch_handle_t handleLegacy;
+ audio_patch_handle_t handleLegacy = VALUE_OR_RETURN_BINDER(
+ aidl2legacy_int32_t_audio_patch_handle_t(*_aidl_return));
RETURN_BINDER_IF_ERROR(mDelegate->createAudioPatch(&patchLegacy, &handleLegacy));
*_aidl_return = VALUE_OR_RETURN_BINDER(legacy2aidl_audio_patch_handle_t_int32_t(handleLegacy));
return Status::ok();
diff --git a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
index 6bdd8e4..40473fa 100644
--- a/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/CreateTrackResponse.aidl
@@ -16,6 +16,7 @@
package android.media;
+import android.media.AudioStreamType;
import android.media.IAudioTrack;
/**
@@ -34,6 +35,7 @@
int selectedDeviceId;
int sessionId;
int sampleRate;
+ AudioStreamType streamType;
long afFrameCount;
int afSampleRate;
int afLatencyMs;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index f61eef2..cb00990 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1164,8 +1164,9 @@
// constant after constructor or set()
audio_format_t mFormat; // as requested by client, not forced to 16-bit
- audio_stream_type_t mStreamType; // mStreamType == AUDIO_STREAM_DEFAULT implies
- // this AudioTrack has valid attributes
+ // mOriginalStreamType == AUDIO_STREAM_DEFAULT implies this AudioTrack has valid attributes
+ audio_stream_type_t mOriginalStreamType = AUDIO_STREAM_DEFAULT;
+ audio_stream_type_t mStreamType = AUDIO_STREAM_DEFAULT;
uint32_t mChannelCount;
audio_channel_mask_t mChannelMask;
sp<IMemory> mSharedBuffer;
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 327b37e..0e059f7 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -110,6 +110,7 @@
/* output */
uint32_t sampleRate;
+ audio_stream_type_t streamType;
size_t afFrameCount;
uint32_t afSampleRate;
uint32_t afLatencyMs;
diff --git a/media/libmediaformatshaper/VQApply.cpp b/media/libmediaformatshaper/VQApply.cpp
index 585ec6c..26ff446 100644
--- a/media/libmediaformatshaper/VQApply.cpp
+++ b/media/libmediaformatshaper/VQApply.cpp
@@ -63,13 +63,62 @@
return 0;
}
- if (codec->supportedMinimumQuality() > 0) {
- // allow the codec provided minimum quality behavior to work at it
- ALOGD("minquality: codec claims to implement minquality=%d",
- codec->supportedMinimumQuality());
+ // only proceed if we're in the handheld category.
+ // We embed this information within the codec record when we build up features
+ // and pass them in from MediaCodec; it's the easiest place to store it
+ //
+ // TODO: make a #define for ' _vq_eligible.device' here and in MediaCodec.cpp
+ //
+ int32_t isVQEligible = 0;
+ (void) codec->getFeatureValue("_vq_eligible.device", &isVQEligible);
+ if (!isVQEligible) {
+ ALOGD("minquality: not an eligible device class");
return 0;
}
+ // look at resolution to determine if we want any shaping/modification at all.
+ //
+ // we currently only shape (or ask the underlying codec to shape) for
+ // resolution range 320x240 < target <= 1920x1080
+ // NB: the < vs <=, that is deliberate.
+ //
+
+ int32_t width = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
+ int32_t height = 0;
+ (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
+ int64_t pixels = ((int64_t)width) * height;
+
+ bool eligibleSize = true;
+ if (pixels <= 320 * 240) {
+ eligibleSize = false;
+ } else if (pixels > 1920 * 1088) {
+ eligibleSize = false;
+ }
+
+ if (!eligibleSize) {
+ // we won't shape, and ask that the codec not shape
+ ALOGD("minquality: %dx%d outside of shaping range", width, height);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+ return 0;
+ }
+
+ if (codec->supportedMinimumQuality() > 0) {
+ // have the codec-provided minimum quality behavior to work at it
+ ALOGD("minquality: codec claims to implement minquality=%d",
+ codec->supportedMinimumQuality());
+
+ // tell the underlying codec to do its thing; we won't try to second guess.
+ // default to 1, aka S_HANDHELD;
+ int32_t qualityTarget = 1;
+ (void) codec->getFeatureValue("_quality.target", &qualityTarget);
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", qualityTarget);
+ return 0;
+ }
+
+ // let the codec know that we'll be enforcing the minimum quality standards
+ AMediaFormat_setInt32(inFormat, "android._encoding-quality-level", 0);
+
//
// consider any and all tools available
// -- qp
@@ -84,11 +133,8 @@
bitrateConfigured = bitrateConfiguredTmp;
bitrateChosen = bitrateConfigured;
- int32_t width = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_WIDTH, &width);
- int32_t height = 0;
- (void) AMediaFormat_getInt32(inFormat, AMEDIAFORMAT_KEY_HEIGHT, &height);
- int64_t pixels = ((int64_t)width) * height;
+ // width, height, and pixels are calculated above
+
double minimumBpp = codec->getBpp(width, height);
int64_t bitrateFloor = pixels * minimumBpp;
diff --git a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
index 6dc3e3f..2aabd53 100644
--- a/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/StagefrightMetadataRetriever.cpp
@@ -233,7 +233,7 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const AString &componentName = matchingCodecs[i];
- sp<ImageDecoder> decoder = new ImageDecoder(componentName, trackMeta, source);
+ sp<MediaImageDecoder> decoder = new MediaImageDecoder(componentName, trackMeta, source);
int64_t frameTimeUs = thumbnail ? -1 : 0;
if (decoder->init(frameTimeUs, 0 /*option*/, colorFormat) == OK) {
sp<IMemory> frame = decoder->extractFrame(rect);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index d94cecf..9ae7ddb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -2856,10 +2856,43 @@
CHECK(msg->findInt32("payload-type", &payloadType));
+ int32_t rtpSeq = 0, rtpTime = 0;
+ int64_t ntpTime = 0, recvTimeUs = 0;
+
Parcel in;
in.writeInt32(payloadType);
switch (payloadType) {
+ case ARTPSource::RTP_FIRST_PACKET:
+ {
+ CHECK(msg->findInt32("rtp-time", &rtpTime));
+ CHECK(msg->findInt32("rtp-seq-num", &rtpSeq));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(rtpTime);
+ in.writeInt32(rtpSeq);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
+ case ARTPSource::RTCP_FIRST_PACKET:
+ {
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
+ case ARTPSource::RTCP_SR:
+ {
+ CHECK(msg->findInt32("rtp-time", &rtpTime));
+ CHECK(msg->findInt64("ntp-time", &ntpTime));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ in.writeInt32(rtpTime);
+ in.writeInt32(ntpTime >> 32);
+ in.writeInt32(ntpTime & 0xFFFFFFFF);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ break;
+ }
case ARTPSource::RTCP_TSFB: // RTCP TSFB
case ARTPSource::RTCP_PSFB: // RTCP PSFB
case ARTPSource::RTP_AUTODOWN:
@@ -2882,6 +2915,8 @@
int32_t feedbackType, bitrate;
int32_t highestSeqNum, baseSeqNum, prevExpected;
int32_t numBufRecv, prevNumBufRecv;
+ int32_t latestRtpTime, jbTimeMs, rtpRtcpSrTimeGapMs;
+ int64_t recvTimeUs;
CHECK(msg->findInt32("feedback-type", &feedbackType));
CHECK(msg->findInt32("bit-rate", &bitrate));
CHECK(msg->findInt32("highest-seq-num", &highestSeqNum));
@@ -2889,6 +2924,10 @@
CHECK(msg->findInt32("prev-expected", &prevExpected));
CHECK(msg->findInt32("num-buf-recv", &numBufRecv));
CHECK(msg->findInt32("prev-num-buf-recv", &prevNumBufRecv));
+ CHECK(msg->findInt32("latest-rtp-time", &latestRtpTime));
+ CHECK(msg->findInt64("recv-time-us", &recvTimeUs));
+ CHECK(msg->findInt32("rtp-jitter-time-ms", &jbTimeMs));
+ CHECK(msg->findInt32("rtp-rtcpsr-time-gap-ms", &rtpRtcpSrTimeGapMs));
in.writeInt32(feedbackType);
in.writeInt32(bitrate);
in.writeInt32(highestSeqNum);
@@ -2896,6 +2935,11 @@
in.writeInt32(prevExpected);
in.writeInt32(numBufRecv);
in.writeInt32(prevNumBufRecv);
+ in.writeInt32(latestRtpTime);
+ in.writeInt32(recvTimeUs >> 32);
+ in.writeInt32(recvTimeUs & 0xFFFFFFFF);
+ in.writeInt32(jbTimeMs);
+ in.writeInt32(rtpRtcpSrTimeGapMs);
break;
}
case ARTPSource::RTP_CVO:
diff --git a/media/libmediaplayerservice/nuplayer/RTPSource.cpp b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
index d2d978a..4d6a483 100644
--- a/media/libmediaplayerservice/nuplayer/RTPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTPSource.cpp
@@ -395,23 +395,13 @@
CHECK(msg->findInt64("ntp-time", (int64_t *)&ntpTime));
onTimeUpdate(trackIndex, rtpTime, ntpTime);
- break;
- }
-
- int32_t firstRTCP;
- if (msg->findInt32("first-rtcp", &firstRTCP)) {
- // There won't be an access unit here, it's just a notification
- // that the data communication worked since we got the first
- // rtcp packet.
- ALOGV("first-rtcp");
- break;
}
int32_t IMSRxNotice;
if (msg->findInt32("rtcp-event", &IMSRxNotice)) {
- int32_t payloadType, feedbackType;
+ int32_t payloadType = 0, feedbackType = 0;
CHECK(msg->findInt32("payload-type", &payloadType));
- CHECK(msg->findInt32("feedback-type", &feedbackType));
+ msg->findInt32("feedback-type", &feedbackType);
sp<AMessage> notify = dupNotify();
notify->setInt32("what", kWhatIMSRxNotice);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 9533ae5..8e05de8 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -145,15 +145,17 @@
return;
}
- // Close socket before posting message to RTSPSource message handler.
- if (mHandler != NULL) {
- close(mHandler->getARTSPConnection()->getSocket());
- }
-
sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
sp<AMessage> dummy;
msg->postAndAwaitResponse(&dummy);
+
+ // Close socket after posting message to RTSPSource message handler.
+ if (mHandler != NULL && mHandler->getARTSPConnection()->getSocket() >= 0) {
+ ALOGD("closing rtsp socket if not closed yet.");
+ close(mHandler->getARTSPConnection()->getSocket());
+ }
+
}
status_t NuPlayer::RTSPSource::feedMoreTSData() {
diff --git a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
index 7272a74..d21908f 100644
--- a/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
+++ b/media/libmediatranscoding/transcoder/VideoTrackTranscoder.cpp
@@ -50,6 +50,8 @@
static constexpr int32_t kDefaultBitrateMbps = 10 * 1000 * 1000;
// Default frame rate.
static constexpr int32_t kDefaultFrameRate = 30;
+// Default codec complexity
+static constexpr int32_t kDefaultCodecComplexity = 1;
template <typename T>
void VideoTrackTranscoder::BlockingQueue<T>::push(T const& value, bool front) {
@@ -247,6 +249,7 @@
SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_PRIORITY, encoderFormat, kDefaultCodecPriority);
SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_FRAME_RATE, encoderFormat, kDefaultFrameRate);
+ SetDefaultFormatValueInt32(AMEDIAFORMAT_KEY_COMPLEXITY, encoderFormat, kDefaultCodecComplexity);
AMediaFormat_setInt32(encoderFormat, AMEDIAFORMAT_KEY_COLOR_FORMAT, kColorFormatSurface);
// Always encode without rotation. The rotation degree will be transferred directly to
diff --git a/media/libstagefright/CodecBase.cpp b/media/libstagefright/CodecBase.cpp
index 5b724aa..b9fb041 100644
--- a/media/libstagefright/CodecBase.cpp
+++ b/media/libstagefright/CodecBase.cpp
@@ -40,4 +40,31 @@
buf->size = size;
}
+status_t CodecBase::querySupportedParameters(std::vector<std::string> *names) {
+ if (names == nullptr) {
+ return BAD_VALUE;
+ }
+ names->clear();
+ return OK;
+}
+
+status_t CodecBase::describeParameter(const std::string &, CodecParameterDescriptor *) {
+ return ERROR_UNSUPPORTED;
+}
+
+status_t CodecBase::subscribeToParameters(const std::vector<std::string> &names) {
+ if (names.empty()) {
+ return OK;
+ }
+ return ERROR_UNSUPPORTED;
+}
+
+status_t CodecBase::unsubscribeFromParameters(const std::vector<std::string> &names) {
+ if (names.empty()) {
+ return OK;
+ }
+ return ERROR_UNSUPPORTED;
+}
+
+
} // namespace android
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 0fd4ef2..efd4070 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -744,7 +744,7 @@
////////////////////////////////////////////////////////////////////////
-ImageDecoder::ImageDecoder(
+MediaImageDecoder::MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source)
@@ -760,7 +760,7 @@
mTargetTiles(0) {
}
-sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
+sp<AMessage> MediaImageDecoder::onGetFormatAndSeekOptions(
int64_t frameTimeUs, int /*seekMode*/,
MediaSource::ReadOptions *options, sp<Surface> * /*window*/) {
sp<MetaData> overrideMeta;
@@ -836,7 +836,7 @@
return videoFormat;
}
-status_t ImageDecoder::onExtractRect(FrameRect *rect) {
+status_t MediaImageDecoder::onExtractRect(FrameRect *rect) {
// TODO:
// This callback is for verifying whether we can decode the rect,
// and if so, set up the internal variables for decoding.
@@ -875,7 +875,7 @@
return OK;
}
-status_t ImageDecoder::onOutputReceived(
+status_t MediaImageDecoder::onOutputReceived(
const sp<MediaCodecBuffer> &videoFrameBuffer,
const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
if (outputFormat == NULL) {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f2bcebb..c03236a 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -31,6 +31,8 @@
#include "include/SoftwareRenderer.h"
#include "PlaybackDurationAccumulator.h"
+#include <android/binder_manager.h>
+#include <android/content/pm/IPackageManagerNative.h>
#include <android/hardware/cas/native/1.0/IDescrambler.h>
#include <android/hardware/media/omx/1.0/IGraphicBufferSource.h>
@@ -40,6 +42,7 @@
#include <android/binder_manager.h>
#include <android/dlext.h>
#include <binder/IMemory.h>
+#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
#include <cutils/properties.h>
#include <gui/BufferQueue.h>
@@ -1697,6 +1700,7 @@
//
static android::mediaformatshaper::FormatShaperOps_t *sShaperOps = NULL;
+static bool sIsHandheld = true;
static bool connectFormatShaper() {
static std::once_flag sCheckOnce;
@@ -1770,6 +1774,64 @@
ALOGV("connectFormatShaper: loaded libraries: %" PRId64 " us",
(loading_finished - loading_started)/1000);
+
+ // we also want to know whether this is a handheld device
+ // start with assumption that the device is handheld.
+ sIsHandheld = true;
+ sp<IServiceManager> serviceMgr = defaultServiceManager();
+ sp<content::pm::IPackageManagerNative> packageMgr;
+ if (serviceMgr.get() != nullptr) {
+ sp<IBinder> binder = serviceMgr->waitForService(String16("package_native"));
+ packageMgr = interface_cast<content::pm::IPackageManagerNative>(binder);
+ }
+ // if we didn't get serviceMgr, we'll leave packageMgr as default null
+ if (packageMgr != nullptr) {
+
+ // MUST have these
+ static const String16 featuresNeeded[] = {
+ String16("android.hardware.touchscreen")
+ };
+ // these must be present to be a handheld
+ for (::android::String16 required : featuresNeeded) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(required, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(required).c_str(), hasFeature);
+ if (!hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+
+ // MUST NOT have these
+ static const String16 featuresDisallowed[] = {
+ String16("android.hardware.type.automotive"),
+ String16("android.hardware.type.television"),
+ String16("android.hardware.type.watch")
+ };
+ // any of these present -- we aren't a handheld
+ for (::android::String16 forbidden : featuresDisallowed) {
+ bool hasFeature = false;
+ binder::Status status = packageMgr->hasSystemFeature(forbidden, 0, &hasFeature);
+ if (!status.isOk()) {
+ ALOGE("%s: hasSystemFeature failed: %s",
+ __func__, status.exceptionMessage().c_str());
+ continue;
+ }
+ ALOGV("feature %s says %d", String8(forbidden).c_str(), hasFeature);
+ if (hasFeature) {
+ ALOGV("... which means we are not handheld");
+ sIsHandheld = false;
+ break;
+ }
+ }
+ }
+
});
return true;
@@ -1848,6 +1910,18 @@
}
}
}
+
+ // we also carry in the codec description whether we are on a handheld device.
+ // this info is eventually used by both the Codec and the C2 machinery to inform
+ // the underlying codec whether to do any shaping.
+ //
+ if (sIsHandheld) {
+ // set if we are indeed a handheld device (or in future 'any eligible device'
+ // missing on devices that aren't eligible for minimum quality enforcement.
+ (void)(sShaperOps->setFeature)(shaperHandle, "_vq_eligible.device", 1);
+ // strictly speaking, it's a tuning, but those are strings and feature stores int
+ (void)(sShaperOps->setFeature)(shaperHandle, "_quality.target", 1 /* S_HANDHELD */);
+ }
}
status_t MediaCodec::setupFormatShaper(AString mediaType) {
@@ -1888,6 +1962,16 @@
// Format Shaping
// Mapping and Manipulation of encoding parameters
//
+// All of these decisions are pushed into the shaper instead of here within MediaCodec.
+// this includes decisions based on whether the codec implements minimum quality bars
+// itself or needs to be shaped outside of the codec.
+// This keeps all those decisions in one place.
+// It also means that we push some extra decision information (is this a handheld device
+// or one that is otherwise eligible for minimum quality manipulation, which generational
+// quality target is in force, etc). This allows those values to be cached in the
+// per-codec structures that are done 1 time within a process instead of for each
+// codec instantiation.
+//
status_t MediaCodec::shapeMediaFormat(
const sp<AMessage> &format,
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index a5c3ba6..6893324 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -307,8 +307,16 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ // no mime type maps to invalid
+ return -EINVAL;
+ }
*format = new AMessage();
(*format)->setString("mime", mime);
@@ -354,6 +362,11 @@
sp<MetaData> meta = mImpl->getMetaData();
+ if (meta == nullptr) {
+ //extractor did not publish file metadata
+ return -EINVAL;
+ }
+
int64_t exifOffset, exifSize;
if (meta->findInt64(kKeyExifOffset, &exifOffset)
&& meta->findInt64(kKeyExifSize, &exifSize)) {
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index a32bc26..a4e3425 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -237,7 +237,7 @@
<Limit name="sample-rate" ranges="8000,12000,16000,24000,48000" />
<Limit name="bitrate" range="500-512000" />
<Limit name="complexity" range="0-10" default="5" />
- <Feature name="bitrate-modes" value="CBR" />
+ <Feature name="bitrate-modes" value="CBR,VBR" />
</MediaCodec>
<MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
<Alias name="OMX.google.h263.encoder" />
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 6bb7b37..c2114b3 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -54,13 +54,11 @@
AMessage::AMessage(void)
: mWhat(0),
- mTarget(0),
- mNumItems(0) {
+ mTarget(0) {
}
AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler)
- : mWhat(what),
- mNumItems(0) {
+ : mWhat(what) {
setTarget(handler);
}
@@ -89,13 +87,13 @@
}
void AMessage::clear() {
- for (size_t i = 0; i < mNumItems; ++i) {
- Item *item = &mItems[i];
- delete[] item->mName;
- item->mName = NULL;
- freeItemValue(item);
+ // Item needs to be handled delicately
+ for (Item &item : mItems) {
+ delete[] item.mName;
+ item.mName = NULL;
+ freeItemValue(&item);
}
- mNumItems = 0;
+ mItems.clear();
}
void AMessage::freeItemValue(Item *item) {
@@ -157,7 +155,7 @@
size_t memchecks = 0;
#endif
size_t i = 0;
- for (; i < mNumItems; i++) {
+ for (; i < mItems.size(); i++) {
if (len != mItems[i].mNameLength) {
continue;
}
@@ -172,7 +170,7 @@
{
Mutex::Autolock _l(gLock);
++gFindItemCalls;
- gAverageNumItems += mNumItems;
+ gAverageNumItems += mItems.size();
gAverageNumMemChecks += memchecks;
gAverageNumChecks += i;
reportStats();
@@ -188,20 +186,26 @@
memcpy((void*)mName, name, len + 1);
}
+AMessage::Item::Item(const char *name, size_t len)
+ : mType(kTypeInt32) {
+ // mName and mNameLength are initialized by setName
+ setName(name, len);
+}
+
AMessage::Item *AMessage::allocateItem(const char *name) {
size_t len = strlen(name);
size_t i = findItemIndex(name, len);
Item *item;
- if (i < mNumItems) {
+ if (i < mItems.size()) {
item = &mItems[i];
freeItemValue(item);
} else {
- CHECK(mNumItems < kMaxNumItems);
- i = mNumItems++;
+ CHECK(mItems.size() < kMaxNumItems);
+ i = mItems.size();
+ // place a 'blank' item at the end - this is of type kTypeInt32
+ mItems.emplace_back(name, len);
item = &mItems[i];
- item->mType = kTypeInt32;
- item->setName(name, len);
}
return item;
@@ -210,7 +214,7 @@
const AMessage::Item *AMessage::findItem(
const char *name, Type type) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
return item->mType == type ? item : NULL;
@@ -220,7 +224,7 @@
bool AMessage::findAsFloat(const char *name, float *value) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
switch (item->mType) {
case kTypeFloat:
@@ -247,7 +251,7 @@
bool AMessage::findAsInt64(const char *name, int64_t *value) const {
size_t i = findItemIndex(name, strlen(name));
- if (i < mNumItems) {
+ if (i < mItems.size()) {
const Item *item = &mItems[i];
switch (item->mType) {
case kTypeInt64:
@@ -265,15 +269,16 @@
bool AMessage::contains(const char *name) const {
size_t i = findItemIndex(name, strlen(name));
- return i < mNumItems;
+ return i < mItems.size();
}
#define BASIC_TYPE(NAME,FIELDNAME,TYPENAME) \
void AMessage::set##NAME(const char *name, TYPENAME value) { \
Item *item = allocateItem(name); \
- \
- item->mType = kType##NAME; \
- item->u.FIELDNAME = value; \
+ if (item) { \
+ item->mType = kType##NAME; \
+ item->u.FIELDNAME = value; \
+ } \
} \
\
/* NOLINT added to avoid incorrect warning/fix from clang.tidy */ \
@@ -298,8 +303,10 @@
void AMessage::setString(
const char *name, const char *s, ssize_t len) {
Item *item = allocateItem(name);
- item->mType = kTypeString;
- item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+ if (item) {
+ item->mType = kTypeString;
+ item->u.stringValue = new AString(s, len < 0 ? strlen(s) : len);
+ }
}
void AMessage::setString(
@@ -310,10 +317,12 @@
void AMessage::setObjectInternal(
const char *name, const sp<RefBase> &obj, Type type) {
Item *item = allocateItem(name);
- item->mType = type;
+ if (item) {
+ item->mType = type;
- if (obj != NULL) { obj->incStrong(this); }
- item->u.refValue = obj.get();
+ if (obj != NULL) { obj->incStrong(this); }
+ item->u.refValue = obj.get();
+ }
}
void AMessage::setObject(const char *name, const sp<RefBase> &obj) {
@@ -326,22 +335,26 @@
void AMessage::setMessage(const char *name, const sp<AMessage> &obj) {
Item *item = allocateItem(name);
- item->mType = kTypeMessage;
+ if (item) {
+ item->mType = kTypeMessage;
- if (obj != NULL) { obj->incStrong(this); }
- item->u.refValue = obj.get();
+ if (obj != NULL) { obj->incStrong(this); }
+ item->u.refValue = obj.get();
+ }
}
void AMessage::setRect(
const char *name,
int32_t left, int32_t top, int32_t right, int32_t bottom) {
Item *item = allocateItem(name);
- item->mType = kTypeRect;
+ if (item) {
+ item->mType = kTypeRect;
- item->u.rectValue.mLeft = left;
- item->u.rectValue.mTop = top;
- item->u.rectValue.mRight = right;
- item->u.rectValue.mBottom = bottom;
+ item->u.rectValue.mLeft = left;
+ item->u.rectValue.mTop = top;
+ item->u.rectValue.mRight = right;
+ item->u.rectValue.mBottom = bottom;
+ }
}
bool AMessage::findString(const char *name, AString *value) const {
@@ -466,18 +479,18 @@
sp<AMessage> AMessage::dup() const {
sp<AMessage> msg = new AMessage(mWhat, mHandler.promote());
- msg->mNumItems = mNumItems;
+ msg->mItems = mItems;
#ifdef DUMP_STATS
{
Mutex::Autolock _l(gLock);
++gDupCalls;
- gAverageDupItems += mNumItems;
+ gAverageDupItems += mItems.size();
reportStats();
}
#endif
- for (size_t i = 0; i < mNumItems; ++i) {
+ for (size_t i = 0; i < mItems.size(); ++i) {
const Item *from = &mItems[i];
Item *to = &msg->mItems[i];
@@ -560,7 +573,7 @@
}
s.append(") = {\n");
- for (size_t i = 0; i < mNumItems; ++i) {
+ for (size_t i = 0; i < mItems.size(); ++i) {
const Item &item = mItems[i];
switch (item.mType) {
@@ -653,19 +666,20 @@
sp<AMessage> msg = new AMessage();
msg->setWhat(what);
- msg->mNumItems = static_cast<size_t>(parcel.readInt32());
- if (msg->mNumItems > kMaxNumItems) {
+ size_t numItems = static_cast<size_t>(parcel.readInt32());
+ if (numItems > kMaxNumItems) {
ALOGE("Too large number of items clipped.");
- msg->mNumItems = kMaxNumItems;
+ numItems = kMaxNumItems;
}
+ msg->mItems.resize(numItems);
- for (size_t i = 0; i < msg->mNumItems; ++i) {
+ for (size_t i = 0; i < msg->mItems.size(); ++i) {
Item *item = &msg->mItems[i];
const char *name = parcel.readCString();
if (name == NULL) {
ALOGE("Failed reading name for an item. Parsing aborted.");
- msg->mNumItems = i;
+ msg->mItems.resize(i);
break;
}
@@ -709,7 +723,7 @@
if (stringValue == NULL) {
ALOGE("Failed reading string value from a parcel. "
"Parsing aborted.");
- msg->mNumItems = i;
+ msg->mItems.resize(i);
continue;
// The loop will terminate subsequently.
} else {
@@ -754,11 +768,9 @@
void AMessage::writeToParcel(Parcel *parcel) const {
parcel->writeInt32(static_cast<int32_t>(mWhat));
- parcel->writeInt32(static_cast<int32_t>(mNumItems));
+ parcel->writeInt32(static_cast<int32_t>(mItems.size()));
- for (size_t i = 0; i < mNumItems; ++i) {
- const Item &item = mItems[i];
-
+ for (const Item &item : mItems) {
parcel->writeCString(item.mName);
parcel->writeInt32(static_cast<int32_t>(item.mType));
@@ -828,8 +840,7 @@
diff->setTarget(mHandler.promote());
}
- for (size_t i = 0; i < mNumItems; ++i) {
- const Item &item = mItems[i];
+ for (const Item &item : mItems) {
const Item *oitem = other->findItem(item.mName, item.mType);
switch (item.mType) {
case kTypeInt32:
@@ -936,11 +947,11 @@
}
size_t AMessage::countEntries() const {
- return mNumItems;
+ return mItems.size();
}
const char *AMessage::getEntryNameAt(size_t index, Type *type) const {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
*type = kTypeInt32;
return NULL;
@@ -953,7 +964,7 @@
AMessage::ItemData AMessage::getEntryAt(size_t index) const {
ItemData it;
- if (index < mNumItems) {
+ if (index < mItems.size()) {
switch (mItems[index].mType) {
case kTypeInt32: it.set(mItems[index].u.int32Value); break;
case kTypeInt64: it.set(mItems[index].u.int64Value); break;
@@ -986,7 +997,7 @@
}
status_t AMessage::setEntryNameAt(size_t index, const char *name) {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
if (name == nullptr) {
@@ -996,7 +1007,7 @@
return OK; // name has not changed
}
size_t len = strlen(name);
- if (findItemIndex(name, len) < mNumItems) {
+ if (findItemIndex(name, len) < mItems.size()) {
return ALREADY_EXISTS;
}
delete[] mItems[index].mName;
@@ -1011,7 +1022,7 @@
sp<AMessage> msgValue;
sp<ABuffer> bufValue;
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
if (!item.used()) {
@@ -1060,21 +1071,22 @@
}
status_t AMessage::removeEntryAt(size_t index) {
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
// delete entry data and objects
- --mNumItems;
delete[] mItems[index].mName;
mItems[index].mName = nullptr;
freeItemValue(&mItems[index]);
// swap entry with last entry and clear last entry's data
- if (index < mNumItems) {
- mItems[index] = mItems[mNumItems];
- mItems[mNumItems].mName = nullptr;
- mItems[mNumItems].mType = kTypeInt32;
+ size_t lastIndex = mItems.size() - 1;
+ if (index < lastIndex) {
+ mItems[index] = mItems[lastIndex];
+ mItems[lastIndex].mName = nullptr;
+ mItems[lastIndex].mType = kTypeInt32;
}
+ mItems.pop_back();
return OK;
}
@@ -1083,7 +1095,7 @@
return BAD_VALUE;
}
size_t index = findEntryByName(name);
- if (index >= mNumItems) {
+ if (index >= mItems.size()) {
return BAD_INDEX;
}
return removeEntryAt(index);
@@ -1093,7 +1105,7 @@
if (item.used()) {
Item *it = allocateItem(name);
if (it != nullptr) {
- setEntryAt(it - mItems, item);
+ setEntryAt(it - &mItems[0], item);
}
}
}
@@ -1108,11 +1120,11 @@
return;
}
- for (size_t ix = 0; ix < other->mNumItems; ++ix) {
+ for (size_t ix = 0; ix < other->mItems.size(); ++ix) {
Item *it = allocateItem(other->mItems[ix].mName);
if (it != nullptr) {
ItemData data = other->getEntryAt(ix);
- setEntryAt(it - mItems, data);
+ setEntryAt(it - &mItems[0], data);
}
}
}
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index 4bfc673..dd2c66f 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -85,6 +85,7 @@
"AudioPresentationInfo.cpp",
"ByteUtils.cpp",
"ColorUtils.cpp",
+ "ColorUtils_fill.cpp",
"FoundationUtils.cpp",
"MediaBuffer.cpp",
"MediaBufferBase.cpp",
@@ -148,3 +149,66 @@
"-DNO_IMEMORY",
],
}
+
+// this gets linked into extractors in media mainline module
+// so must be ndk api 29 so that it runs on >=Q
+cc_library_static {
+ name: "libstagefright_foundation_colorutils_ndk",
+ host_supported: true,
+ vendor_available: true,
+
+ target: {
+ darwin: {
+ enabled: false,
+ },
+ },
+
+ shared_libs: [
+ "liblog",
+ "libutils", // for sp<>
+ // actually invokes this, but called from folks who already load it
+ // "libmediandk",
+ ],
+
+ header_libs: [
+ // this is only needed for the vendor variant that removes libbinder, but vendor
+ // target below does not allow adding header_libs.
+ "libbinder_headers",
+ "libstagefright_foundation_headers",
+ "media_ndk_headers",
+ "media_plugin_headers",
+ ],
+
+ local_include_dirs: [
+ "include/media/stagefright/foundation",
+ ],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
+
+ srcs: [
+ "ColorUtils_ndk.cpp",
+ "ColorUtils_fill.cpp",
+ ],
+
+ clang: true,
+
+ sanitize: {
+ misc_undefined: [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ cfi: true,
+ },
+
+ min_sdk_version: "29",
+ apex_available: [
+ "//apex_available:platform",
+ "com.android.media",
+ "com.android.media.swcodec",
+ ],
+
+}
+
diff --git a/media/libstagefright/foundation/ColorUtils.cpp b/media/libstagefright/foundation/ColorUtils.cpp
index 3812afe..fa722b5 100644
--- a/media/libstagefright/foundation/ColorUtils.cpp
+++ b/media/libstagefright/foundation/ColorUtils.cpp
@@ -722,13 +722,6 @@
transfer, asString((ColorTransfer)transfer));
}
-
-// static
-void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
- const HDRStaticInfo &info, AMediaFormat *format) {
- setHDRStaticInfoIntoFormat(info, format->mFormat);
-}
-
// static
void ColorUtils::setHDRStaticInfoIntoFormat(
const HDRStaticInfo &info, sp<AMessage> &format) {
@@ -736,48 +729,7 @@
// Convert the data in infoBuffer to little endian format as defined by CTA-861-3
uint8_t *data = infoBuffer->data();
- // Static_Metadata_Descriptor_ID
- data[0] = info.mID;
-
- // display primary 0
- data[1] = LO_UINT16(info.sType1.mR.x);
- data[2] = HI_UINT16(info.sType1.mR.x);
- data[3] = LO_UINT16(info.sType1.mR.y);
- data[4] = HI_UINT16(info.sType1.mR.y);
-
- // display primary 1
- data[5] = LO_UINT16(info.sType1.mG.x);
- data[6] = HI_UINT16(info.sType1.mG.x);
- data[7] = LO_UINT16(info.sType1.mG.y);
- data[8] = HI_UINT16(info.sType1.mG.y);
-
- // display primary 2
- data[9] = LO_UINT16(info.sType1.mB.x);
- data[10] = HI_UINT16(info.sType1.mB.x);
- data[11] = LO_UINT16(info.sType1.mB.y);
- data[12] = HI_UINT16(info.sType1.mB.y);
-
- // white point
- data[13] = LO_UINT16(info.sType1.mW.x);
- data[14] = HI_UINT16(info.sType1.mW.x);
- data[15] = LO_UINT16(info.sType1.mW.y);
- data[16] = HI_UINT16(info.sType1.mW.y);
-
- // MaxDisplayLuminance
- data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
- data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
-
- // MinDisplayLuminance
- data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
- data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
-
- // MaxContentLightLevel
- data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
- data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
-
- // MaxFrameAverageLightLevel
- data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
- data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+ fillHdrStaticInfoBuffer(info, data);
format->setBuffer("hdr-static-info", infoBuffer);
}
diff --git a/media/libstagefright/foundation/ColorUtils_fill.cpp b/media/libstagefright/foundation/ColorUtils_fill.cpp
new file mode 100644
index 0000000..f07493e
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_fill.cpp
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+
+namespace android {
+
+// shortcut names for brevity in the following tables
+typedef ColorAspects CA;
+typedef ColorUtils CU;
+
+#define HI_UINT16(a) (((a) >> 8) & 0xFF)
+#define LO_UINT16(a) ((a) & 0xFF)
+
+//
+// static
+void ColorUtils::fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data) {
+ // Static_Metadata_Descriptor_ID
+ data[0] = info.mID;
+
+ // display primary 0
+ data[1] = LO_UINT16(info.sType1.mR.x);
+ data[2] = HI_UINT16(info.sType1.mR.x);
+ data[3] = LO_UINT16(info.sType1.mR.y);
+ data[4] = HI_UINT16(info.sType1.mR.y);
+
+ // display primary 1
+ data[5] = LO_UINT16(info.sType1.mG.x);
+ data[6] = HI_UINT16(info.sType1.mG.x);
+ data[7] = LO_UINT16(info.sType1.mG.y);
+ data[8] = HI_UINT16(info.sType1.mG.y);
+
+ // display primary 2
+ data[9] = LO_UINT16(info.sType1.mB.x);
+ data[10] = HI_UINT16(info.sType1.mB.x);
+ data[11] = LO_UINT16(info.sType1.mB.y);
+ data[12] = HI_UINT16(info.sType1.mB.y);
+
+ // white point
+ data[13] = LO_UINT16(info.sType1.mW.x);
+ data[14] = HI_UINT16(info.sType1.mW.x);
+ data[15] = LO_UINT16(info.sType1.mW.y);
+ data[16] = HI_UINT16(info.sType1.mW.y);
+
+ // MaxDisplayLuminance
+ data[17] = LO_UINT16(info.sType1.mMaxDisplayLuminance);
+ data[18] = HI_UINT16(info.sType1.mMaxDisplayLuminance);
+
+ // MinDisplayLuminance
+ data[19] = LO_UINT16(info.sType1.mMinDisplayLuminance);
+ data[20] = HI_UINT16(info.sType1.mMinDisplayLuminance);
+
+ // MaxContentLightLevel
+ data[21] = LO_UINT16(info.sType1.mMaxContentLightLevel);
+ data[22] = HI_UINT16(info.sType1.mMaxContentLightLevel);
+
+ // MaxFrameAverageLightLevel
+ data[23] = LO_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+ data[24] = HI_UINT16(info.sType1.mMaxFrameAverageLightLevel);
+}
+
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/ColorUtils_ndk.cpp b/media/libstagefright/foundation/ColorUtils_ndk.cpp
new file mode 100644
index 0000000..3ed2425
--- /dev/null
+++ b/media/libstagefright/foundation/ColorUtils_ndk.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ColorUtils"
+
+#include <inttypes.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/NdkMediaFormat.h>
+#include <utils/Log.h>
+
+namespace android {
+
+// static
+void ColorUtils::setHDRStaticInfoIntoAMediaFormat(
+ const HDRStaticInfo &info, AMediaFormat *format) {
+ uint8_t *data = (uint8_t *) malloc(25);
+ if (data != NULL) {
+ fillHdrStaticInfoBuffer(info, data);
+ AMediaFormat_setBuffer(format, "hdr-static-info", data, 25);
+ free(data);
+ }
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
index 98d6147..960212a 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/AMessage.h
@@ -24,6 +24,8 @@
#include <utils/KeyedVector.h>
#include <utils/RefBase.h>
+#include <vector>
+
namespace android {
struct ABuffer;
@@ -95,6 +97,7 @@
void setTarget(const sp<const AHandler> &handler);
+ // removes all items
void clear();
void setInt32(const char *name, int32_t value);
@@ -302,16 +305,39 @@
size_t mNameLength;
Type mType;
void setName(const char *name, size_t len);
+ Item() : mName(nullptr), mNameLength(0), mType(kTypeInt32) { }
+ Item(const char *name, size_t length);
};
enum {
- kMaxNumItems = 64
+ kMaxNumItems = 256
};
- Item mItems[kMaxNumItems];
- size_t mNumItems;
+ std::vector<Item> mItems;
+ /**
+ * Allocates an item with the given key |name|. If the key already exists, the corresponding
+ * item value is freed. Otherwise a new item is added.
+ *
+ * This method currently asserts if the number of elements would exceed the max number of
+ * elements allowed (kMaxNumItems). This is a security precaution to avoid arbitrarily large
+ * AMessage structures.
+ *
+ * @todo(b/192153245) Either revisit this security precaution, or change the behavior to
+ * silently ignore keys added after the max number of elements are reached.
+ *
+ * @note All previously returned Item* pointers are deemed invalid after this call. (E.g. from
+ * allocateItem or findItem)
+ *
+ * @param name the key for the requested item.
+ *
+ * @return Item* a pointer to the item.
+ */
Item *allocateItem(const char *name);
+
+ /** Frees the value for the item. */
void freeItemValue(Item *item);
+
+ /** Finds an item with given key |name| and |type|. Returns nullptr if item is not found. */
const Item *findItem(const char *name, Type type) const;
void setObjectInternal(
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
index 9e3f718..a2b6c4f 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ColorUtils.h
@@ -191,6 +191,8 @@
static void setHDRStaticInfoIntoFormat(const HDRStaticInfo &info, sp<AMessage> &format);
// writes |info| into format.
static void setHDRStaticInfoIntoAMediaFormat(const HDRStaticInfo &info, AMediaFormat *format);
+ // (internal) used by the setHDRStaticInfoInfo* routines
+ static void fillHdrStaticInfoBuffer( const HDRStaticInfo &info, uint8_t *data);
};
inline static const char *asString(android::ColorUtils::ColorStandard i, const char *def = "??") {
diff --git a/media/libstagefright/foundation/tests/colorutils/Android.bp b/media/libstagefright/foundation/tests/colorutils/Android.bp
index 5a17e4b..0fea0d5 100644
--- a/media/libstagefright/foundation/tests/colorutils/Android.bp
+++ b/media/libstagefright/foundation/tests/colorutils/Android.bp
@@ -40,6 +40,7 @@
],
static_libs: [
+ "libstagefright_foundation_colorutils_ndk",
"libstagefright_foundation",
],
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index bca7f01..d59e4f5 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -146,8 +146,8 @@
status_t captureSurface();
};
-struct ImageDecoder : public FrameDecoder {
- ImageDecoder(
+struct MediaImageDecoder : public FrameDecoder {
+ MediaImageDecoder(
const AString &componentName,
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source);
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index efb2f86..48721ec 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -252,9 +252,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t querySupportedParameters([[maybe_unused]] std::vector<std::string> *names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t querySupportedParameters(std::vector<std::string> *names);
/**
* Fill |desc| with description of the parameter with |name|.
*
@@ -267,10 +265,8 @@
* ERROR_UNSUPPORTED if not supported.
*/
virtual status_t describeParameter(
- [[maybe_unused]] const std::string &name,
- [[maybe_unused]] CodecParameterDescriptor *desc) {
- return ERROR_UNSUPPORTED;
- }
+ const std::string &name,
+ CodecParameterDescriptor *desc);
/**
* Subscribe to parameters in |names| and get output format change event
* when they change.
@@ -281,10 +277,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t subscribeToParameters(
- [[maybe_unused]] const std::vector<std::string> &names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t subscribeToParameters(const std::vector<std::string> &names);
/**
* Unsubscribe from parameters in |names| and no longer get
* output format change event when they change.
@@ -295,10 +288,7 @@
* INVALID_OPERATION if already released;
* ERROR_UNSUPPORTED if not supported.
*/
- virtual status_t unsubscribeFromParameters(
- [[maybe_unused]] const std::vector<std::string> &names) {
- return ERROR_UNSUPPORTED;
- }
+ virtual status_t unsubscribeFromParameters(const std::vector<std::string> &names);
typedef CodecBase *(*CreateCodecFunc)(void);
typedef PersistentSurface *(*CreateInputSurfaceFunc)(void);
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 6371769..4237e8c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -557,12 +557,14 @@
}
constexpr int32_t BITRATE_MODE_CBR = 2;
+constexpr int32_t BITRATE_MODE_CBR_FD = 3;
constexpr int32_t BITRATE_MODE_CQ = 0;
constexpr int32_t BITRATE_MODE_VBR = 1;
inline static const char *asString_BitrateMode(int32_t i, const char *def = "??") {
switch (i) {
case BITRATE_MODE_CBR: return "CBR";
+ case BITRATE_MODE_CBR_FD: return "CBR_FD";
case BITRATE_MODE_CQ: return "CQ";
case BITRATE_MODE_VBR: return "VBR";
default: return def;
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index e1cc5ec..3f4d662 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -44,6 +44,7 @@
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
+ mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
mLastRtpTimeJitterDataUs(0),
mWidth(0),
@@ -137,7 +138,7 @@
}
source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -195,33 +196,38 @@
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
+ // set an alarm for jitter buffer time expiration.
+ // adding 1ms because jitter buffer time is keep changing.
+ int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+ source->setJbAlarmTime(nowTimeUs, expTimeUs);
return NOT_ENOUGH_DATA;
}
if (isFirstLineBroken) {
- if (isSecondLineBroken) {
- int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
- ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ String8 info;
+ info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
"JitterMs %d + (%d + %d * %.3f)",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
buffer->int32Data(), mNextExpectedSeqNo,
jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ if (isSecondLineBroken) {
+ ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
} else {
- ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
- jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ ALOGW("%s", info.string());
}
}
if (mNextExpectedSeqNoValid) {
- int32_t size = queue->size();
+ mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
if (cntRemove > 0) {
+ int32_t size = queue->size();
source->noticeAbandonBuffer(cntRemove);
ALOGW("delete %d of %d buffers", cntRemove, size);
}
@@ -441,7 +447,6 @@
uint32_t rtpTimeStartAt;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
uint32_t startSeqNo = buffer->int32Data();
- bool pFrame = nalType == 0x1;
if (data[1] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -451,8 +456,6 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
- int32_t connected = 1;
- bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -463,33 +466,26 @@
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
ALOGD("sequence not complete, expected seqNo %u, got %u, nalType %u",
- expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
- snapped = true;
-
- if (!pFrame) {
- return WRONG_SEQUENCE_NUMBER;
- }
- }
-
- if (!snapped) {
- connected++;
+ expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
}
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- if (size < 2
- || data[0] != indicator
+ if (size < 2) {
+ ALOGV("Ignoring malformed FU buffer.");
+ it = queue->erase(it);
+ continue;
+ }
+ if (data[0] != indicator
|| (data[1] & 0x1f) != nalType
|| (data[1] & 0x80)
|| rtpTime != rtpTimeStartAt) {
- ALOGV("Ignoring malformed FU buffer.");
-
- // Delete the whole start of the FU.
-
- mNextExpectedSeqNo = expectedSeqNo + 1;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
+ // Assembler already have given enough time by jitter buffer
+ ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+ startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+ expectedSeqNo = (uint32_t)buffer->int32Data();
+ complete = true;
+ break;
}
totalSize += size - 2;
@@ -498,14 +494,6 @@
expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[1] & 0x40) {
- if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
- connected, totalCount, 0.5f)) {
- mNextExpectedSeqNo = expectedSeqNo;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
- }
-
// This is the last fragment.
complete = true;
break;
@@ -557,6 +545,9 @@
if (cvo >= 0) {
unit->meta()->setInt32("cvo", cvo);
+ mLastCvo = cvo;
+ } else if (mLastCvo >= 0) {
+ unit->meta()->setInt32("cvo", mLastCvo);
}
if (source != nullptr) {
unit->meta()->setObject("source", source);
@@ -621,35 +612,32 @@
msg->post();
}
-int32_t AAVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AAVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ // pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
- int32_t nextSeqNo = buffer->int32Data();
+ int32_t firstSeqNo = buffer->int32Data();
- Queue::const_iterator it = queue->begin();
- while (it != queue->end()) {
- int64_t rtpTime = findRTPTime(first, *it);
- // if pkt in time exists, that should be the next pivot
+ // This only works for FU-A type & non-start sequence
+ unsigned nalType = buffer->data()[0] & 0x1f;
+ if (nalType != 28 || buffer->data()[1] & 0x80) {
+ return firstSeqNo;
+ }
+
+ for (auto it : *queue) {
+ const uint8_t *data = it->data();
+ int64_t rtpTime = findRTPTime(first, it);
if (rtpTime + jit >= play) {
- nextSeqNo = (*it)->int32Data();
break;
}
- it++;
+ if ((data[1] & 0x80)) {
+ const int32_t seqNo = it->int32Data();
+ ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+ firstSeqNo = seqNo;
+ break;
+ }
}
- return nextSeqNo;
-}
-
-bool AAVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio) {
- float total = end - start;
- float valid = connected;
- float exist = avail;
- bool isRecycle = (valid / total) >= goodRatio;
-
- ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
- exist, valid, total, isRecycle);
-
- return isRecycle;
+ return firstSeqNo;
}
int32_t AAVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AAVCAssembler.h b/media/libstagefright/rtsp/AAVCAssembler.h
index 8d19773..2f8b8ba 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.h
+++ b/media/libstagefright/rtsp/AAVCAssembler.h
@@ -22,6 +22,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
+#include <utils/String8.h>
namespace android {
@@ -47,6 +48,7 @@
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
+ int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
@@ -64,9 +66,7 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
- bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio);
+ int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
int32_t deleteUnitUnderSeq(Queue *q, uint32_t seq);
DISALLOW_EVIL_CONSTRUCTORS(AAVCAssembler);
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index d32e85d..b240339 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -51,6 +51,7 @@
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false),
mFirstIFrameProvided(false),
+ mLastCvo(-1),
mLastIFrameProvidedAtMs(0),
mLastRtpTimeJitterDataUs(0),
mWidth(0),
@@ -147,7 +148,7 @@
}
source->putInterArrivalJitterData(rtpTime, nowTimeUs);
- const int64_t startTimeMs = source->mFirstSysTime / 1000;
+ const int64_t startTimeMs = source->mSysAnchorTime / 1000;
const int64_t nowTimeMs = nowTimeUs / 1000;
const int32_t staticJitterTimeMs = source->getStaticJitterTimeMs();
const int32_t baseJitterTimeMs = source->getBaseJitterTimeMs();
@@ -205,33 +206,38 @@
if (!isExpired) {
ALOGV("buffering in jitter buffer.");
+ // set an alarm for jitter buffer time expiration.
+ // adding 1ms because jitter buffer time is keep changing.
+ int64_t expTimeUs = (RtpToMs(std::abs(diffTimeRtp), clockRate) + 1) * 1000;
+ source->setJbAlarmTime(nowTimeUs, expTimeUs);
return NOT_ENOUGH_DATA;
}
if (isFirstLineBroken) {
- if (isSecondLineBroken) {
- int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
- ALOGE("buffer too late... \t RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
+ int64_t totalDiffTimeMs = RtpToMs(diffTimeRtp + jitterTimeRtp, clockRate);
+ String8 info;
+ info.appendFormat("RTP diff from exp =%lld \t MS diff from stamp = %lld\t\t"
"Seq# %d \t ExpSeq# %d \t"
"JitterMs %d + (%d + %d * %.3f)",
(long long)diffTimeRtp, (long long)totalDiffTimeMs,
buffer->int32Data(), mNextExpectedSeqNo,
jitterTimeMs, tryJbTimeMs, dynamicJbTimeMs, JITTER_MULTIPLE);
+ if (isSecondLineBroken) {
+ ALOGE("%s", info.string());
printNowTimeMs(startTimeMs, nowTimeMs, playedTimeMs);
printRTPTime(rtpTime, playedTimeRtp, expiredTimeRtp, isExpired);
- mNextExpectedSeqNo = pickProperSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
} else {
- ALOGW("=== WARNING === buffer arrived after %d + %d = %d ms === WARNING === ",
- jitterTimeMs, tryJbTimeMs, jitterTimeMs + tryJbTimeMs);
+ ALOGW("%s", info.string());
}
}
if (mNextExpectedSeqNoValid) {
- int32_t size = queue->size();
+ mNextExpectedSeqNo = pickStartSeq(queue, firstRTPTime, playedTimeRtp, jitterTimeRtp);
int32_t cntRemove = deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
if (cntRemove > 0) {
+ int32_t size = queue->size();
source->noticeAbandonBuffer(cntRemove);
ALOGW("delete %d of %d buffers", cntRemove, size);
}
@@ -466,7 +472,6 @@
uint32_t rtpTimeStartAt;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTimeStartAt));
uint32_t startSeqNo = buffer->int32Data();
- bool pFrame = (nalType < 0x10);
if (data[2] & 0x40) {
// Huh? End bit also set on the first buffer.
@@ -476,8 +481,6 @@
complete = true;
} else {
List<sp<ABuffer> >::iterator it = ++queue->begin();
- int32_t connected = 1;
- bool snapped = false;
while (it != queue->end()) {
ALOGV("sequence length %zu", totalCount);
@@ -488,33 +491,26 @@
if ((uint32_t)buffer->int32Data() != expectedSeqNo) {
ALOGV("sequence not complete, expected seqNo %u, got %u, nalType %u",
- expectedSeqNo, (uint32_t)buffer->int32Data(), nalType);
- snapped = true;
-
- if (!pFrame) {
- return WRONG_SEQUENCE_NUMBER;
- }
- }
-
- if (!snapped) {
- connected++;
+ expectedSeqNo, (unsigned)buffer->int32Data(), nalType);
}
uint32_t rtpTime;
CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
- if (size < 3
- || ((data[0] >> 1) & H265_NALU_MASK) != indicator
+ if (size < 3) {
+ ALOGV("Ignoring malformed FU buffer.");
+ it = queue->erase(it);
+ continue;
+ }
+ if (((data[0] >> 1) & H265_NALU_MASK) != indicator
|| (data[2] & H265_NALU_MASK) != nalType
|| (data[2] & 0x80)
|| rtpTime != rtpTimeStartAt) {
- ALOGV("Ignoring malformed FU buffer.");
-
- // Delete the whole start of the FU.
-
- mNextExpectedSeqNo = expectedSeqNo + 1;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
+ // Assembler already have given enough time by jitter buffer
+ ALOGD("Seems another frame. Incomplete frame [%d ~ %d) \t %d FUs",
+ startSeqNo, expectedSeqNo, (int)queue->distance(queue->begin(), it));
+ expectedSeqNo = (uint32_t)buffer->int32Data();
+ complete = true;
+ break;
}
totalSize += size - 3;
@@ -523,13 +519,6 @@
expectedSeqNo = (uint32_t)buffer->int32Data() + 1;
if (data[2] & 0x40) {
- if (pFrame && !recycleUnit(startSeqNo, expectedSeqNo,
- connected, totalCount, 0.5f)) {
- mNextExpectedSeqNo = expectedSeqNo;
- deleteUnitUnderSeq(queue, mNextExpectedSeqNo);
-
- return MALFORMED_PACKET;
- }
// This is the last fragment.
complete = true;
break;
@@ -579,6 +568,9 @@
if (cvo >= 0) {
unit->meta()->setInt32("cvo", cvo);
+ mLastCvo = cvo;
+ } else if (mLastCvo >= 0) {
+ unit->meta()->setInt32("cvo", mLastCvo);
}
addSingleNALUnit(unit);
@@ -635,35 +627,32 @@
msg->post();
}
-int32_t AHEVCAssembler::pickProperSeq(const Queue *queue,
+int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
uint32_t first, int64_t play, int64_t jit) {
+ // pick the first sequence number has the start bit.
sp<ABuffer> buffer = *(queue->begin());
- int32_t nextSeqNo = buffer->int32Data();
+ int32_t firstSeqNo = buffer->int32Data();
- Queue::const_iterator it = queue->begin();
- while (it != queue->end()) {
- int64_t rtpTime = findRTPTime(first, *it);
- // if pkt in time exists, that should be the next pivot
+ // This only works for FU-A type & non-start sequence
+ unsigned nalType = buffer->data()[0] & 0x1f;
+ if (nalType != 28 || buffer->data()[2] & 0x80) {
+ return firstSeqNo;
+ }
+
+ for (auto it : *queue) {
+ const uint8_t *data = it->data();
+ int64_t rtpTime = findRTPTime(first, it);
if (rtpTime + jit >= play) {
- nextSeqNo = (*it)->int32Data();
break;
}
- it++;
+ if ((data[2] & 0x80)) {
+ const int32_t seqNo = it->int32Data();
+ ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
+ firstSeqNo = seqNo;
+ break;
+ }
}
- return nextSeqNo;
-}
-
-bool AHEVCAssembler::recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio) {
- float total = end - start;
- float valid = connected;
- float exist = avail;
- bool isRecycle = (valid / total) >= goodRatio;
-
- ALOGV("checking p-frame losses.. recvBufs %f valid %f diff %f recycle? %d",
- exist, valid, total, isRecycle);
-
- return isRecycle;
+ return firstSeqNo;
}
int32_t AHEVCAssembler::deleteUnitUnderSeq(Queue *queue, uint32_t seq) {
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.h b/media/libstagefright/rtsp/AHEVCAssembler.h
index 68777a7..9575d8c 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.h
+++ b/media/libstagefright/rtsp/AHEVCAssembler.h
@@ -22,6 +22,7 @@
#include <utils/List.h>
#include <utils/RefBase.h>
+#include <utils/String8.h>
namespace android {
@@ -48,6 +49,7 @@
uint32_t mNextExpectedSeqNo;
bool mAccessUnitDamaged;
bool mFirstIFrameProvided;
+ int32_t mLastCvo;
uint64_t mLastIFrameProvidedAtMs;
int64_t mLastRtpTimeJitterDataUs;
int32_t mWidth;
@@ -65,9 +67,7 @@
void submitAccessUnit();
- int32_t pickProperSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
- bool recycleUnit(uint32_t start, uint32_t end, uint32_t connected,
- size_t avail, float goodRatio);
+ int32_t pickStartSeq(const Queue *q, uint32_t first, int64_t play, int64_t jit);
int32_t deleteUnitUnderSeq(Queue *queue, uint32_t seq);
DISALLOW_EVIL_CONSTRUCTORS(AHEVCAssembler);
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 8f4df8e..169df46 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -594,4 +594,15 @@
return mFormat;
}
+bool APacketSource::isVideo() {
+ bool isVideo = false;
+
+ const char *mime;
+ if (mFormat->findCString(kKeyMIMEType, &mime)) {
+ isVideo = !strncasecmp(mime, "video/", 6);
+ }
+
+ return isVideo;
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/APacketSource.h b/media/libstagefright/rtsp/APacketSource.h
index 530e537..2b9b5ba 100644
--- a/media/libstagefright/rtsp/APacketSource.h
+++ b/media/libstagefright/rtsp/APacketSource.h
@@ -33,6 +33,8 @@
virtual sp<MetaData> getFormat();
+ bool isVideo();
+
protected:
virtual ~APacketSource();
diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp
index 33c85a7..ffccbb1 100644
--- a/media/libstagefright/rtsp/ARTPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTPConnection.cpp
@@ -18,9 +18,7 @@
#define LOG_TAG "ARTPConnection"
#include <utils/Log.h>
-#include "ARTPAssembler.h"
#include "ARTPConnection.h"
-
#include "ARTPSource.h"
#include "ASessionDescription.h"
@@ -104,6 +102,11 @@
msg->post();
}
+void ARTPConnection::seekStream() {
+ sp<AMessage> msg = new AMessage(kWhatSeekStream, this);
+ msg->post();
+}
+
void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) {
sp<AMessage> msg = new AMessage(kWhatRemoveStream, this);
msg->setInt32("rtp-socket", rtpSocket);
@@ -283,6 +286,12 @@
break;
}
+ case kWhatSeekStream:
+ {
+ onSeekStream(msg);
+ break;
+ }
+
case kWhatRemoveStream:
{
onRemoveStream(msg);
@@ -295,6 +304,12 @@
break;
}
+ case kWhatAlarmStream:
+ {
+ onAlarmStream(msg);
+ break;
+ }
+
case kWhatInjectPacket:
{
onInjectPacket(msg);
@@ -353,6 +368,18 @@
}
}
+void ARTPConnection::onSeekStream(const sp<AMessage> &msg) {
+ (void)msg; // unused param as of now.
+ List<StreamInfo>::iterator it = mStreams.begin();
+ while (it != mStreams.end()) {
+ for (size_t i = 0; i < it->mSources.size(); ++i) {
+ sp<ARTPSource> source = it->mSources.valueAt(i);
+ source->timeReset();
+ }
+ ++it;
+ }
+}
+
void ARTPConnection::onRemoveStream(const sp<AMessage> &msg) {
int32_t rtpSocket, rtcpSocket;
CHECK(msg->findInt32("rtp-socket", &rtpSocket));
@@ -440,14 +467,16 @@
if (err == -ECONNRESET) {
// socket failure, this stream is dead, Jim.
- sp<AMessage> notify = it->mNotifyMsg->dup();
- notify->setInt32("rtcp-event", 1);
- notify->setInt32("payload-type", 400);
- notify->setInt32("feedback-type", 1);
- notify->setInt32("sender", it->mSources.valueAt(0)->getSelfID());
- notify->post();
+ for (size_t i = 0; i < it->mSources.size(); ++i) {
+ sp<AMessage> notify = it->mNotifyMsg->dup();
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", 400);
+ notify->setInt32("feedback-type", 1);
+ notify->setInt32("sender", it->mSources.valueAt(i)->getSelfID());
+ notify->post();
- ALOGW("failed to receive RTP/RTCP datagram.");
+ ALOGW("failed to receive RTP/RTCP datagram.");
+ }
it = mStreams.erase(it);
continue;
}
@@ -548,6 +577,13 @@
}
}
+void ARTPConnection::onAlarmStream(const sp<AMessage> msg) {
+ sp<ARTPSource> source = nullptr;
+ if (msg->findObject("source", (sp<android::RefBase>*)&source)) {
+ source->processRTPPacket();
+ }
+}
+
status_t ARTPConnection::receive(StreamInfo *s, bool receiveRTP) {
ALOGV("receiving %s", receiveRTP ? "RTP" : "RTCP");
@@ -633,12 +669,6 @@
}
status_t ARTPConnection::parseRTP(StreamInfo *s, const sp<ABuffer> &buffer) {
- if (s->mNumRTPPacketsReceived++ == 0) {
- sp<AMessage> notify = s->mNotifyMsg->dup();
- notify->setInt32("first-rtp", true);
- notify->post();
- }
-
size_t size = buffer->size();
if (size < 12) {
@@ -720,9 +750,23 @@
meta->setInt32("cvo", cvoDegrees);
}
- buffer->setInt32Data(u16at(&data[2]));
+ int32_t seq = u16at(&data[2]);
+ buffer->setInt32Data(seq);
buffer->setRange(payloadOffset, size - payloadOffset);
+ if (s->mNumRTPPacketsReceived++ == 0) {
+ sp<AMessage> notify = s->mNotifyMsg->dup();
+ notify->setInt32("first-rtp", true);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTP_FIRST_PACKET);
+ notify->setInt32("rtp-time", (int32_t)rtpTime);
+ notify->setInt32("rtp-seq-num", seq);
+ notify->setInt64("recv-time-us", ALooper::GetNowUs());
+ notify->post();
+
+ ALOGD("send first-rtp event to upper layer");
+ }
+
source->processRTPPacket(buffer);
return OK;
@@ -779,14 +823,12 @@
if (s->mNumRTCPPacketsReceived++ == 0) {
sp<AMessage> notify = s->mNotifyMsg->dup();
notify->setInt32("first-rtcp", true);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", ARTPSource::RTCP_FIRST_PACKET);
+ notify->setInt64("recv-time-us", ALooper::GetNowUs());
notify->post();
- ALOGI("send first-rtcp event to upper layer as ImsRxNotice");
- sp<AMessage> imsNotify = s->mNotifyMsg->dup();
- imsNotify->setInt32("rtcp-event", 1);
- imsNotify->setInt32("payload-type", 101);
- imsNotify->setInt32("feedback-type", 0);
- imsNotify->post();
+ ALOGD("send first-rtcp event to upper layer");
}
const uint8_t *data = buffer->data();
@@ -883,7 +925,7 @@
int64_t nowUs = ALooper::GetNowUs();
int32_t timeDiff = (nowUs - mLastBitrateReportTimeUs) / 1000000ll;
int32_t bitrate = mCumulativeBytes * 8 / timeDiff;
- source->notifyPktInfo(bitrate, true /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
source->byeReceived();
@@ -1065,11 +1107,14 @@
srcId, info->mSessionDesc, info->mIndex, info->mNotifyMsg);
if (mFlags & kViLTEConnection) {
+ setStaticJitterTimeMs(50);
source->setPeriodicFIR(false);
}
source->setSelfID(mSelfID);
source->setStaticJitterTimeMs(mStaticJitterTimeMs);
+ sp<AMessage> timer = new AMessage(kWhatAlarmStream, this);
+ source->setJbTimer(timer);
info->mSources.add(srcId, source);
} else {
source = info->mSources.valueAt(index);
@@ -1117,7 +1162,7 @@
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
if (source->isNeedToEarlyNotify()) {
- source->notifyPktInfo(bitrate, false /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, false /* isRegular */);
mLastEarlyNotifyTimeUs = nowUs + (1000000ll * 3600 * 24); // after 1 day
}
}
@@ -1148,7 +1193,7 @@
buffer->setRange(0, 0);
for (size_t i = 0; i < s->mSources.size(); ++i) {
sp<ARTPSource> source = s->mSources.valueAt(i);
- source->notifyPktInfo(bitrate, true /* isRegular */);
+ source->notifyPktInfo(bitrate, nowUs, true /* isRegular */);
}
++it;
}
diff --git a/media/libstagefright/rtsp/ARTPConnection.h b/media/libstagefright/rtsp/ARTPConnection.h
index ea0a374..36cca31 100644
--- a/media/libstagefright/rtsp/ARTPConnection.h
+++ b/media/libstagefright/rtsp/ARTPConnection.h
@@ -40,7 +40,7 @@
const sp<ASessionDescription> &sessionDesc, size_t index,
const sp<AMessage> ¬ify,
bool injected);
-
+ void seekStream();
void removeStream(int rtpSocket, int rtcpSocket);
void injectPacket(int index, const sp<ABuffer> &buffer);
@@ -69,9 +69,11 @@
private:
enum {
kWhatAddStream,
+ kWhatSeekStream,
kWhatRemoveStream,
kWhatPollStreams,
kWhatInjectPacket,
+ kWhatAlarmStream,
};
static const int64_t kSelectTimeoutUs;
@@ -94,8 +96,10 @@
int32_t mCumulativeBytes;
void onAddStream(const sp<AMessage> &msg);
+ void onSeekStream(const sp<AMessage> &msg);
void onRemoveStream(const sp<AMessage> &msg);
void onPollStreams();
+ void onAlarmStream(const sp<AMessage> msg);
void onInjectPacket(const sp<AMessage> &msg);
void onSendReceiverReports();
void checkRxBitrate(int64_t nowUs);
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 8787d65..38a370b 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -44,10 +44,11 @@
uint32_t id,
const sp<ASessionDescription> &sessionDesc, size_t index,
const sp<AMessage> ¬ify)
- : mFirstSeqNumber(0),
- mFirstRtpTime(0),
+ : mFirstRtpTime(0),
mFirstSysTime(0),
mClockRate(0),
+ mSysAnchorTime(0),
+ mLastSysAnchorTimeUpdatedUs(0),
mFirstSsrc(0),
mHighestNackNumber(0),
mID(id),
@@ -58,9 +59,14 @@
mPrevNumBuffersReceived(0),
mPrevExpectedForRR(0),
mPrevNumBuffersReceivedForRR(0),
+ mLatestRtpTime(0),
mStaticJbTimeMs(kStaticJitterTimeMs),
- mLastNTPTime(0),
- mLastNTPTimeUpdateUs(0),
+ mLastSrRtpTime(0),
+ mLastSrNtpTime(0),
+ mLastSrUpdateTimeUs(0),
+ mIsFirstRtpRtcpGap(true),
+ mAvgRtpRtcpGapMs(0),
+ mAvgUnderlineDelayMs(0),
mIssueFIRRequests(false),
mIssueFIRByAssembler(false),
mLastFIRRequestUs(-1),
@@ -106,6 +112,7 @@
int32_t clockRate, numChannels;
ASessionDescription::ParseFormatDesc(desc.c_str(), &clockRate, &numChannels);
mClockRate = clockRate;
+ mLastJbAlarmTimeUs = 0;
mJitterCalc = new JitterCalc(mClockRate);
}
@@ -119,34 +126,144 @@
}
}
+void ARTPSource::processRTPPacket() {
+ if (mAssembler != NULL && !mQueue.empty()) {
+ mAssembler->onPacketReceived(this);
+ }
+}
+
void ARTPSource::timeUpdate(uint32_t rtpTime, uint64_t ntpTime) {
- mLastNTPTime = ntpTime;
- mLastNTPTimeUpdateUs = ALooper::GetNowUs();
+ mLastSrRtpTime = rtpTime;
+ mLastSrNtpTime = ntpTime;
+ mLastSrUpdateTimeUs = ALooper::GetNowUs();
sp<AMessage> notify = mNotify->dup();
notify->setInt32("time-update", true);
notify->setInt32("rtp-time", rtpTime);
notify->setInt64("ntp-time", ntpTime);
+ notify->setInt32("rtcp-event", 1);
+ notify->setInt32("payload-type", RTCP_SR);
+ notify->setInt64("recv-time-us", mLastSrUpdateTimeUs);
notify->post();
}
-bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
- uint32_t seqNum = (uint32_t)buffer->int32Data();
+void ARTPSource::timeReset() {
+ mFirstRtpTime = 0;
+ mFirstSysTime = 0;
+ mSysAnchorTime = 0;
+ mLastSysAnchorTimeUpdatedUs = 0;
+ mFirstSsrc = 0;
+ mHighestNackNumber = 0;
+ mHighestSeqNumber = 0;
+ mPrevExpected = 0;
+ mBaseSeqNumber = 0;
+ mNumBuffersReceived = 0;
+ mPrevNumBuffersReceived = 0;
+ mPrevExpectedForRR = 0;
+ mPrevNumBuffersReceivedForRR = 0;
+ mLatestRtpTime = 0;
+ mLastSrRtpTime = 0;
+ mLastSrNtpTime = 0;
+ mLastSrUpdateTimeUs = 0;
+ mIsFirstRtpRtcpGap = true;
+ mAvgRtpRtcpGapMs = 0;
+ mAvgUnderlineDelayMs = 0;
+ mIssueFIRByAssembler = false;
+ mLastFIRRequestUs = -1;
+}
- int32_t ssrc = 0;
+void ARTPSource::calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs) {
+ if (mLastSrUpdateTimeUs == 0) {
+ return;
+ }
+
+ int64_t elapsedMs = (nowUs - mLastSrUpdateTimeUs) / 1000;
+ int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+ uint32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+
+ int64_t anchorRtpTime = mLastSrRtpTime + elapsedRtpTime;
+ int64_t rtpTimeGap = anchorRtpTime - rtpTime;
+ // rtpTime can not be faster than it's anchor time.
+ // because rtpTime(of rtp packet) represents it's a frame captured time and
+ // anchorRtpTime(of rtcp:sr packet) represents it's a rtp packetized time.
+ if (rtpTimeGap < 0 || rtpTimeGap > (mClockRate * 60)) {
+ // ignore invalid delay gap such as negative delay or later than 1 min.
+ return;
+ }
+
+ int64_t rtpTimeGapMs = (rtpTimeGap * 1000 / mClockRate);
+ if (mIsFirstRtpRtcpGap) {
+ mIsFirstRtpRtcpGap = false;
+ mAvgRtpRtcpGapMs = rtpTimeGapMs;
+ } else {
+ // This is measuring avg rtp timestamp distance between rtp and rtcp:sr packet.
+ // Rtp timestamp of rtp packet represents it's raw frame captured time.
+ // Rtp timestamp of rtcp:sr packet represents it's packetization time.
+ // So that, this value is showing how much time delayed to be a rtp packet
+ // from a raw frame captured time.
+ // This value maybe referred to know a/v sync and sender's own delay of this media stream.
+ mAvgRtpRtcpGapMs = ((mAvgRtpRtcpGapMs * 15) + rtpTimeGapMs) / 16;
+ }
+}
+
+void ARTPSource::calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs) {
+ int64_t elapsedMs = (nowUs - mSysAnchorTime) / 1000;
+ int64_t elapsedRtpTime = (elapsedMs * (mClockRate / 1000));
+ int64_t expectedRtpTime = mFirstRtpTime + elapsedRtpTime;
+
+ int32_t rtpTime;
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ int32_t delayMs = (expectedRtpTime - rtpTime) / (mClockRate / 1000);
+
+ mAvgUnderlineDelayMs = ((mAvgUnderlineDelayMs * 15) + delayMs) / 16;
+}
+
+void ARTPSource::adjustAnchorTimeIfRequired(int64_t nowUs) {
+ if (nowUs - mLastSysAnchorTimeUpdatedUs < 1000000L) {
+ return;
+ }
+
+ if (mAvgUnderlineDelayMs < -30) {
+ // adjust underline delay a quarter of desired delay like step by step.
+ mSysAnchorTime += (int64_t)(mAvgUnderlineDelayMs * 1000 / 4);
+ ALOGD("anchor time updated: original(%lld), anchor(%lld), diffMs(%lld)",
+ (long long)mFirstSysTime, (long long)mSysAnchorTime,
+ (long long)(mFirstSysTime - mSysAnchorTime) / 1000);
+
+ mAvgUnderlineDelayMs = 0;
+ mLastSysAnchorTimeUpdatedUs = nowUs;
+
+ // reset a jitter stastics since an anchor time adjusted.
+ mJitterCalc->init(mFirstRtpTime, mSysAnchorTime, 0, mStaticJbTimeMs * 1000);
+ }
+}
+
+bool ARTPSource::queuePacket(const sp<ABuffer> &buffer) {
+ int64_t nowUs = ALooper::GetNowUs();
+ uint32_t seqNum = (uint32_t)buffer->int32Data();
+ int32_t ssrc = 0, rtpTime = 0;
+
buffer->meta()->findInt32("ssrc", &ssrc);
+ CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&rtpTime));
+ mLatestRtpTime = rtpTime;
if (mNumBuffersReceived++ == 0 && mFirstSysTime == 0) {
- uint32_t firstRtpTime;
- CHECK(buffer->meta()->findInt32("rtp-time", (int32_t *)&firstRtpTime));
- mFirstSysTime = ALooper::GetNowUs();
+ mFirstSysTime = nowUs;
+ mSysAnchorTime = nowUs;
+ mLastSysAnchorTimeUpdatedUs = nowUs;
mHighestSeqNumber = seqNum;
mBaseSeqNumber = seqNum;
- mFirstRtpTime = firstRtpTime;
+ mFirstRtpTime = rtpTime;
mFirstSsrc = ssrc;
ALOGD("first-rtp arrived: first-rtp-time=%u, sys-time=%lld, seq-num=%u, ssrc=%d",
mFirstRtpTime, (long long)mFirstSysTime, mHighestSeqNumber, mFirstSsrc);
mJitterCalc->init(mFirstRtpTime, mFirstSysTime, 0, mStaticJbTimeMs * 1000);
+ if (mQueue.size() > 0) {
+ ALOGD("clearing buffers which belonged to previous timeline"
+ " since a base timeline has been changed.");
+ mQueue.clear();
+ }
mQueue.push_back(buffer);
return true;
}
@@ -156,6 +273,10 @@
return false;
}
+ calcTimeGapRtpRtcp(buffer, nowUs);
+ calcUnderlineDelay(buffer, nowUs);
+ adjustAnchorTimeIfRequired(nowUs);
+
// Only the lower 16-bit of the sequence numbers are transmitted,
// derive the high-order bits by choosing the candidate closest
// to the highest sequence number (extended to 32 bits) received so far.
@@ -340,11 +461,11 @@
uint32_t LSR = 0;
uint32_t DLSR = 0;
- if (mLastNTPTime != 0) {
- LSR = (mLastNTPTime >> 16) & 0xffffffff;
+ if (mLastSrNtpTime != 0) {
+ LSR = (mLastSrNtpTime >> 16) & 0xffffffff;
DLSR = (uint32_t)
- ((ALooper::GetNowUs() - mLastNTPTimeUpdateUs) * 65536.0 / 1E6);
+ ((ALooper::GetNowUs() - mLastSrUpdateTimeUs) * 65536.0 / 1E6);
}
data[24] = LSR >> 24;
@@ -543,6 +664,35 @@
mJitterCalc->putInterArrivalData(timeStamp, arrivalTime);
}
+void ARTPSource::setJbTimer(const sp<AMessage> timer) {
+ mJbTimer = timer;
+}
+
+void ARTPSource::setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs) {
+ if (mJbTimer == NULL) {
+ return;
+ }
+ int64_t alarmTimeUs = nowTimeUs + alarmAfterUs;
+ bool alarm = false;
+ if (mLastJbAlarmTimeUs <= nowTimeUs) {
+ // no more alarm in pending.
+ mLastJbAlarmTimeUs = nowTimeUs + alarmAfterUs;
+ alarm = true;
+ } else if (mLastJbAlarmTimeUs > alarmTimeUs + 5000L) {
+ // bring an alarm forward more than 5ms.
+ mLastJbAlarmTimeUs = alarmTimeUs;
+ alarm = true;
+ } else {
+ // would not set alarm if it is close with before one.
+ }
+
+ if (alarm) {
+ sp<AMessage> notify = mJbTimer->dup();
+ notify->setObject("source", this);
+ notify->post(alarmAfterUs);
+ }
+}
+
bool ARTPSource::isNeedToEarlyNotify() {
uint32_t expected = mHighestSeqNumber - mBaseSeqNumber + 1;
int32_t intervalExpectedInNow = expected - mPrevExpected;
@@ -553,7 +703,7 @@
return false;
}
-void ARTPSource::notifyPktInfo(int32_t bitrate, bool isRegular) {
+void ARTPSource::notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular) {
int32_t payloadType = isRegular ? RTP_QUALITY : RTP_QUALITY_EMC;
sp<AMessage> notify = mNotify->dup();
@@ -567,6 +717,11 @@
notify->setInt32("prev-expected", mPrevExpected);
notify->setInt32("num-buf-recv", mNumBuffersReceived);
notify->setInt32("prev-num-buf-recv", mPrevNumBuffersReceived);
+ notify->setInt32("latest-rtp-time", mLatestRtpTime);
+ notify->setInt64("recv-time-us", nowUs);
+ notify->setInt32("rtp-jitter-time-ms",
+ std::max(getBaseJitterTimeMs(), getStaticJitterTimeMs()));
+ notify->setInt32("rtp-rtcpsr-time-gap-ms", (int32_t)mAvgRtpRtcpGapMs);
notify->post();
if (isRegular) {
diff --git a/media/libstagefright/rtsp/ARTPSource.h b/media/libstagefright/rtsp/ARTPSource.h
index 0edff23..4984e91 100644
--- a/media/libstagefright/rtsp/ARTPSource.h
+++ b/media/libstagefright/rtsp/ARTPSource.h
@@ -31,7 +31,7 @@
namespace android {
-const uint32_t kStaticJitterTimeMs = 50; // 50ms
+const uint32_t kStaticJitterTimeMs = 100; // 100ms
struct ABuffer;
struct AMessage;
@@ -49,6 +49,8 @@
RTCP_FIRST_PACKET = 101,
RTP_QUALITY = 102,
RTP_QUALITY_EMC = 103,
+ RTCP_SR = 200,
+ RTCP_RR = 201,
RTCP_TSFB = 205,
RTCP_PSFB = 206,
RTP_CVO = 300,
@@ -56,6 +58,8 @@
};
void processRTPPacket(const sp<ABuffer> &buffer);
+ void processRTPPacket();
+ void timeReset();
void timeUpdate(uint32_t rtpTime, uint64_t ntpTime);
void byeReceived();
@@ -76,19 +80,23 @@
void setStaticJitterTimeMs(const uint32_t jbTimeMs);
void putBaseJitterData(uint32_t timeStamp, int64_t arrivalTime);
void putInterArrivalJitterData(uint32_t timeStamp, int64_t arrivalTime);
+ void setJbTimer(const sp<AMessage> timer);
+ void setJbAlarmTime(int64_t nowTimeUs, int64_t alarmAfterUs);
bool isNeedToEarlyNotify();
- void notifyPktInfo(int32_t bitrate, bool isRegular);
+ void notifyPktInfo(int32_t bitrate, int64_t nowUs, bool isRegular);
// FIR needs to be sent by missing packet or broken video image.
void onIssueFIRByAssembler();
void noticeAbandonBuffer(int cnt=1);
- int32_t mFirstSeqNumber;
uint32_t mFirstRtpTime;
int64_t mFirstSysTime;
int32_t mClockRate;
+ int64_t mSysAnchorTime;
+ int64_t mLastSysAnchorTimeUpdatedUs;
+
int32_t mFirstSsrc;
int32_t mHighestNackNumber;
@@ -103,11 +111,14 @@
uint32_t mPrevExpectedForRR;
int32_t mPrevNumBuffersReceivedForRR;
+ uint32_t mLatestRtpTime;
+
List<sp<ABuffer> > mQueue;
sp<ARTPAssembler> mAssembler;
int32_t mStaticJbTimeMs;
sp<JitterCalc> mJitterCalc;
+ sp<AMessage> mJbTimer;
typedef struct infoNACK {
uint16_t seqNum;
@@ -120,8 +131,14 @@
std::map<uint16_t, infoNACK> mNACKMap;
int getSeqNumToNACK(List<int>& list, int size);
- uint64_t mLastNTPTime;
- int64_t mLastNTPTimeUpdateUs;
+ uint32_t mLastSrRtpTime;
+ uint64_t mLastSrNtpTime;
+ int64_t mLastSrUpdateTimeUs;
+
+ bool mIsFirstRtpRtcpGap;
+ double mAvgRtpRtcpGapMs;
+ double mAvgUnderlineDelayMs;
+ int64_t mLastJbAlarmTimeUs;
bool mIssueFIRRequests;
bool mIssueFIRByAssembler;
@@ -130,6 +147,10 @@
sp<AMessage> mNotify;
+ void calcTimeGapRtpRtcp(const sp<ABuffer> &buffer, int64_t nowUs);
+ void calcUnderlineDelay(const sp<ABuffer> &buffer, int64_t nowUs);
+ void adjustAnchorTimeIfRequired(int64_t nowUs);
+
bool queuePacket(const sp<ABuffer> &buffer);
DISALLOW_EVIL_CONSTRUCTORS(ARTPSource);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index ec70952..11c7aeb 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -204,8 +204,6 @@
mRTPTimeBase = 0;
mNumRTPSent = 0;
mNumRTPOctetsSent = 0;
- mLastRTPTime = 0;
- mLastNTPTime = 0;
mOpponentID = 0;
mBitrate = 192000;
@@ -216,6 +214,7 @@
mRTPSockNetwork = 0;
mMode = INVALID;
+ mClockRate = 16000;
}
status_t ARTPWriter::addSource(const sp<MediaSource> &source) {
@@ -265,15 +264,28 @@
updateSocketNetwork(sockNetwork);
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ // rfc6184: RTP Payload Format for H.264 Video
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
mMode = H264;
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ // rfc7798: RTP Payload Format for High Efficiency Video Coding (HEVC)
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
mMode = H265;
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
mMode = H263;
+ // rfc4629: RTP Payload Format for ITU-T Rec. H.263 Video
+ // The clock rate in the "a=rtpmap" line MUST be 90000.
+ mClockRate = 90000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
mMode = AMR_NB;
+ // rfc4867: RTP Payload Format ... (AMR) and (AMR-WB)
+ // The RTP clock rate in "a=rtpmap" MUST be 8000 for AMR and 16000 for AMR-WB
+ mClockRate = 8000;
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
mMode = AMR_WB;
+ mClockRate = 16000;
} else {
TRESPASS();
}
@@ -327,16 +339,17 @@
while (buffer->range_length() > 0) {
const uint8_t *NALPtr = (const uint8_t *)buffer->data() + buffer->range_offset();
+ uint8_t nalType = (*NALPtr) & H264_NALU_MASK;
MediaBufferBase **targetPtr = NULL;
- if ((*NALPtr & H264_NALU_MASK) == H264_NALU_SPS) {
+ if (nalType == H264_NALU_SPS) {
targetPtr = spsBuffer;
- } else if ((*NALPtr & H264_NALU_MASK) == H264_NALU_PPS) {
+ } else if (nalType == H264_NALU_PPS) {
targetPtr = ppsBuffer;
} else {
return;
}
- ALOGV("SPS(7) or PPS(8) found. Type %d", *NALPtr & H264_NALU_MASK);
+ ALOGV("SPS(7) or PPS(8) found. Type %d", nalType);
uint32_t bufferSize = buffer->range_length();
MediaBufferBase *&target = *targetPtr;
@@ -417,18 +430,18 @@
}
}
+ uint32_t targetSize;
if (target != NULL) {
target->release();
}
- uint32_t targetSize;
// note that targetSize is never 0 as the first byte is never part
// of a start prefix
if (isBoundFound) {
targetSize = i - SPCSize + 1;
- target = MediaBufferBase::Create(j);
+ target = MediaBufferBase::Create(targetSize);
memcpy(target->data(),
(const uint8_t *)buffer->data() + buffer->range_offset(),
- j);
+ targetSize);
buffer->set_range(buffer->range_offset() + targetSize + SPCSize,
buffer->range_length() - targetSize - SPCSize);
} else {
@@ -645,19 +658,27 @@
data[6] = (mSourceID >> 8) & 0xff;
data[7] = mSourceID & 0xff;
- data[8] = mLastNTPTime >> (64 - 8);
- data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
- data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
- data[11] = (mLastNTPTime >> 32) & 0xff;
- data[12] = (mLastNTPTime >> 24) & 0xff;
- data[13] = (mLastNTPTime >> 16) & 0xff;
- data[14] = (mLastNTPTime >> 8) & 0xff;
- data[15] = mLastNTPTime & 0xff;
+ uint64_t ntpTime = GetNowNTP();
+ data[8] = ntpTime >> (64 - 8);
+ data[9] = (ntpTime >> (64 - 16)) & 0xff;
+ data[10] = (ntpTime >> (64 - 24)) & 0xff;
+ data[11] = (ntpTime >> 32) & 0xff;
+ data[12] = (ntpTime >> 24) & 0xff;
+ data[13] = (ntpTime >> 16) & 0xff;
+ data[14] = (ntpTime >> 8) & 0xff;
+ data[15] = ntpTime & 0xff;
- data[16] = (mLastRTPTime >> 24) & 0xff;
- data[17] = (mLastRTPTime >> 16) & 0xff;
- data[18] = (mLastRTPTime >> 8) & 0xff;
- data[19] = mLastRTPTime & 0xff;
+ // A current rtpTime can be calculated from ALooper::GetNowUs().
+ // This is expecting a timestamp of raw frame from a media source is
+ // on the same time context across components in android media framework
+ // which can be queried by ALooper::GetNowUs().
+ // In other words, ALooper::GetNowUs() is on the same timeline as the time
+ // of kKeyTime in a MediaBufferBase
+ uint32_t rtpTime = getRtpTime(ALooper::GetNowUs());
+ data[16] = (rtpTime >> 24) & 0xff;
+ data[17] = (rtpTime >> 16) & 0xff;
+ data[18] = (rtpTime >> 8) & 0xff;
+ data[19] = rtpTime & 0xff;
data[20] = mNumRTPSent >> 24;
data[21] = (mNumRTPSent >> 16) & 0xff;
@@ -779,6 +800,13 @@
return (hi << 32) | lo;
}
+uint32_t ARTPWriter::getRtpTime(int64_t timeUs) {
+ int32_t clockPerMs = mClockRate / 1000;
+ int64_t rtpTime = mRTPTimeBase + (timeUs * clockPerMs / 1000LL);
+
+ return (uint32_t)rtpTime;
+}
+
void ARTPWriter::dumpSessionDesc() {
AString sdp;
sdp = "v=0\r\n";
@@ -980,7 +1008,7 @@
sendVPSSPSPPSIfIFrame(mediaBuf, timeUs);
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100ll);
+ uint32_t rtpTime = getRtpTime(timeUs);
CHECK(mediaBuf->range_length() > 0);
const uint8_t *mediaData =
@@ -994,12 +1022,14 @@
}
sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
-
if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+ RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
// The data fits into a single packet
uint8_t *data = buffer->data();
data[0] = 0x80;
+ if (mRTPCVOExtMap > 0) {
+ data[0] |= 0x10;
+ }
if (isNonVCL) {
data[1] = mPayloadType; // Marker bit should not be set in case of Non-VCL
} else {
@@ -1016,144 +1046,6 @@
data[10] = (mSourceID >> 8) & 0xff;
data[11] = mSourceID & 0xff;
- memcpy(&data[12],
- mediaData, mediaBuf->range_length());
-
- buffer->setRange(0, mediaBuf->range_length() + 12);
-
- send(buffer, false /* isRTCP */);
-
- ++mSeqNo;
- ++mNumRTPSent;
- mNumRTPOctetsSent += buffer->size() - 12;
- } else {
- // FU-A
-
- unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
- ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
- size_t offset = 2; //H265 payload header is 16 bit.
-
- bool firstPacket = true;
- while (offset < mediaBuf->range_length()) {
- size_t size = mediaBuf->range_length() - offset;
- bool lastPacket = true;
- if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
- RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
- lastPacket = false;
- size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
- RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
- }
-
- uint8_t *data = buffer->data();
- data[0] = 0x80;
- data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType; // M-bit
- data[2] = (mSeqNo >> 8) & 0xff;
- data[3] = mSeqNo & 0xff;
- data[4] = rtpTime >> 24;
- data[5] = (rtpTime >> 16) & 0xff;
- data[6] = (rtpTime >> 8) & 0xff;
- data[7] = rtpTime & 0xff;
- data[8] = mSourceID >> 24;
- data[9] = (mSourceID >> 16) & 0xff;
- data[10] = (mSourceID >> 8) & 0xff;
- data[11] = mSourceID & 0xff;
-
- /* H265 payload header is 16 bit
- 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- |F| Type | Layer ID | TID |
- +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
- */
- ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
- // excludes Type from 1st byte of H265 payload header.
- data[12] = mediaData[0] & 0x81;
- // fills Type as FU (49 == 0x31)
- data[12] = data[12] | (0x31 << 1);
- data[13] = mediaData[1];
-
- ALOGV("H265 FU header 0x%x %x", data[12], data[13]);
-
- CHECK(!firstPacket || !lastPacket);
- /*
- FU INDICATOR HDR
- 0 1 2 3 4 5 6 7
- +-+-+-+-+-+-+-+
- |S|E| Type |
- +-+-+-+-+-+-+-+
- */
-
- data[14] =
- (firstPacket ? 0x80 : 0x00)
- | (lastPacket ? 0x40 : 0x00)
- | (nalType & H265_NALU_MASK);
- ALOGV("H265 FU indicator 0x%x", data[14]);
-
- memcpy(&data[15], &mediaData[offset], size);
-
- buffer->setRange(0, 15 + size);
-
- send(buffer, false /* isRTCP */);
-
- ++mSeqNo;
- ++mNumRTPSent;
- mNumRTPOctetsSent += buffer->size() - 12;
-
- firstPacket = false;
- offset += size;
- }
- }
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
-
-}
-
-void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
- // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
- CHECK_GE(kMaxPacketSize, 12u + 2u);
-
- int64_t timeUs;
- CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
-
- sendSPSPPSIfIFrame(mediaBuf, timeUs);
-
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
-
- CHECK(mediaBuf->range_length() > 0);
- const uint8_t *mediaData =
- (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
-
- int32_t sps, pps;
- bool isSpsPps = false;
- if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
- mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
- isSpsPps = true;
- }
-
- mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
- sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
- if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
- + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
- // The data fits into a single packet
- uint8_t *data = buffer->data();
- data[0] = 0x80;
- if (mRTPCVOExtMap > 0)
- data[0] |= 0x10;
- if (isSpsPps)
- data[1] = mPayloadType; // Marker bit should not be set in case of sps/pps
- else
- data[1] = (1 << 7) | mPayloadType;
- data[2] = (mSeqNo >> 8) & 0xff;
- data[3] = mSeqNo & 0xff;
- data[4] = rtpTime >> 24;
- data[5] = (rtpTime >> 16) & 0xff;
- data[6] = (rtpTime >> 8) & 0xff;
- data[7] = rtpTime & 0xff;
- data[8] = mSourceID >> 24;
- data[9] = (mSourceID >> 16) & 0xff;
- data[10] = (mSourceID >> 8) & 0xff;
- data[11] = mSourceID & 0xff;
-
int rtpExtIndex = 0;
if (mRTPCVOExtMap > 0) {
/*
@@ -1202,8 +1094,9 @@
} else {
// FU-A
- unsigned nalType = mediaData[0];
- size_t offset = 1;
+ unsigned nalType = (mediaData[0] >> 1) & H265_NALU_MASK;
+ ALOGV("H265 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+ size_t offset = 2; //H265 payload header is 16 bit.
bool firstPacket = true;
while (offset < mediaBuf->range_length()) {
@@ -1218,8 +1111,9 @@
uint8_t *data = buffer->data();
data[0] = 0x80;
- if (lastPacket && mRTPCVOExtMap > 0)
+ if (lastPacket && mRTPCVOExtMap > 0) {
data[0] |= 0x10;
+ }
data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType; // M-bit
data[2] = (mSeqNo >> 8) & 0xff;
data[3] = mSeqNo & 0xff;
@@ -1245,14 +1139,219 @@
rtpExtIndex = 8;
}
- data[12 + rtpExtIndex] = 28 | (nalType & 0xe0);
+ /* H265 payload header is 16 bit
+ 0 1 2 3 4 5 6 7 0 1 2 3 4 5 6 7
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ |F| Type | Layer ID | TID |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ */
+ ALOGV("H265 payload header 0x%x %x", mediaData[0], mediaData[1]);
+ // excludes Type from 1st byte of H265 payload header.
+ data[12 + rtpExtIndex] = mediaData[0] & 0x81;
+ // fills Type as FU (49 == 0x31)
+ data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | (0x31 << 1);
+ data[13 + rtpExtIndex] = mediaData[1];
+
+ ALOGV("H265 FU header 0x%x %x", data[12 + rtpExtIndex], data[13 + rtpExtIndex]);
CHECK(!firstPacket || !lastPacket);
+ /*
+ FU INDICATOR HDR
+ 0 1 2 3 4 5 6 7
+ +-+-+-+-+-+-+-+-+
+ |S|E| Type |
+ +-+-+-+-+-+-+-+-+
+ */
+
+ data[14 + rtpExtIndex] =
+ (firstPacket ? 0x80 : 0x00)
+ | (lastPacket ? 0x40 : 0x00)
+ | (nalType & H265_NALU_MASK);
+ ALOGV("H265 FU indicator 0x%x", data[14]);
+
+ memcpy(&data[15 + rtpExtIndex], &mediaData[offset], size);
+
+ buffer->setRange(0, 15 + rtpExtIndex + size);
+
+ send(buffer, false /* isRTCP */);
+
+ ++mSeqNo;
+ ++mNumRTPSent;
+ mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+
+ firstPacket = false;
+ offset += size;
+ }
+ }
+}
+
+void ARTPWriter::sendAVCData(MediaBufferBase *mediaBuf) {
+ // 12 bytes RTP header + 2 bytes for the FU-indicator and FU-header.
+ CHECK_GE(kMaxPacketSize, 12u + 2u);
+
+ int64_t timeUs;
+ CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
+
+ sendSPSPPSIfIFrame(mediaBuf, timeUs);
+
+ uint32_t rtpTime = getRtpTime(timeUs);
+
+ CHECK(mediaBuf->range_length() > 0);
+ const uint8_t *mediaData =
+ (const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
+
+ int32_t sps, pps;
+ bool isSpsPps = false;
+ if (mediaBuf->meta_data().findInt32(kKeySps, &sps) ||
+ mediaBuf->meta_data().findInt32(kKeyPps, &pps)) {
+ isSpsPps = true;
+ }
+
+ mTrafficRec->updateClock(ALooper::GetNowUs() / 1000);
+ sp<ABuffer> buffer = new ABuffer(kMaxPacketSize);
+ if (mediaBuf->range_length() + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE
+ + RTP_PAYLOAD_ROOM_SIZE <= buffer->capacity()) {
+ // The data fits into a single packet
+ uint8_t *data = buffer->data();
+ data[0] = 0x80;
+ if (mRTPCVOExtMap > 0) {
+ data[0] |= 0x10;
+ }
+ if (isSpsPps) {
+ data[1] = mPayloadType; // Marker bit should not be set in case of sps/pps
+ } else {
+ data[1] = (1 << 7) | mPayloadType;
+ }
+ data[2] = (mSeqNo >> 8) & 0xff;
+ data[3] = mSeqNo & 0xff;
+ data[4] = rtpTime >> 24;
+ data[5] = (rtpTime >> 16) & 0xff;
+ data[6] = (rtpTime >> 8) & 0xff;
+ data[7] = rtpTime & 0xff;
+ data[8] = mSourceID >> 24;
+ data[9] = (mSourceID >> 16) & 0xff;
+ data[10] = (mSourceID >> 8) & 0xff;
+ data[11] = mSourceID & 0xff;
+
+ int rtpExtIndex = 0;
+ if (mRTPCVOExtMap > 0) {
+ /*
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | 0xBE | 0xDE | length=3 |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | ID | L=0 | data | ID | L=1 | data...
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ ...data | 0 (pad) | 0 (pad) | ID | L=3 |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | data |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+ In the one-byte header form of extensions, the 16-bit value required
+ by the RTP specification for a header extension, labeled in the RTP
+ specification as "defined by profile", takes the fixed bit pattern
+ 0xBEDE (the first version of this specification was written on the
+ feast day of the Venerable Bede).
+ */
+ data[12] = 0xBE;
+ data[13] = 0xDE;
+ // put a length of RTP Extension.
+ data[14] = 0x00;
+ data[15] = 0x01;
+ // put extmap of RTP assigned for CVO.
+ data[16] = (mRTPCVOExtMap << 4) | 0x0;
+ // put image degrees as per CVO specification.
+ data[17] = mRTPCVODegrees;
+ data[18] = 0x0;
+ data[19] = 0x0;
+ rtpExtIndex = 8;
+ }
+
+ memcpy(&data[12 + rtpExtIndex],
+ mediaData, mediaBuf->range_length());
+
+ buffer->setRange(0, mediaBuf->range_length() + (12 + rtpExtIndex));
+
+ send(buffer, false /* isRTCP */);
+
+ ++mSeqNo;
+ ++mNumRTPSent;
+ mNumRTPOctetsSent += buffer->size() - (12 + rtpExtIndex);
+ } else {
+ // FU-A
+
+ unsigned nalType = mediaData[0] & H264_NALU_MASK;
+ ALOGV("H264 nalType 0x%x, data[0]=0x%x", nalType, mediaData[0]);
+ size_t offset = 1;
+
+ bool firstPacket = true;
+ while (offset < mediaBuf->range_length()) {
+ size_t size = mediaBuf->range_length() - offset;
+ bool lastPacket = true;
+ if (size + TCPIP_HEADER_SIZE + RTP_HEADER_SIZE + RTP_HEADER_EXT_SIZE +
+ RTP_FU_HEADER_SIZE + RTP_PAYLOAD_ROOM_SIZE > buffer->capacity()) {
+ lastPacket = false;
+ size = buffer->capacity() - TCPIP_HEADER_SIZE - RTP_HEADER_SIZE -
+ RTP_HEADER_EXT_SIZE - RTP_FU_HEADER_SIZE - RTP_PAYLOAD_ROOM_SIZE;
+ }
+
+ uint8_t *data = buffer->data();
+ data[0] = 0x80;
+ if (lastPacket && mRTPCVOExtMap > 0) {
+ data[0] |= 0x10;
+ }
+ data[1] = (lastPacket ? (1 << 7) : 0x00) | mPayloadType; // M-bit
+ data[2] = (mSeqNo >> 8) & 0xff;
+ data[3] = mSeqNo & 0xff;
+ data[4] = rtpTime >> 24;
+ data[5] = (rtpTime >> 16) & 0xff;
+ data[6] = (rtpTime >> 8) & 0xff;
+ data[7] = rtpTime & 0xff;
+ data[8] = mSourceID >> 24;
+ data[9] = (mSourceID >> 16) & 0xff;
+ data[10] = (mSourceID >> 8) & 0xff;
+ data[11] = mSourceID & 0xff;
+
+ int rtpExtIndex = 0;
+ if (lastPacket && mRTPCVOExtMap > 0) {
+ data[12] = 0xBE;
+ data[13] = 0xDE;
+ data[14] = 0x00;
+ data[15] = 0x01;
+ data[16] = (mRTPCVOExtMap << 4) | 0x0;
+ data[17] = mRTPCVODegrees;
+ data[18] = 0x0;
+ data[19] = 0x0;
+ rtpExtIndex = 8;
+ }
+
+ /* H264 payload header is 8 bit
+ 0 1 2 3 4 5 6 7
+ +-+-+-+-+-+-+-+-+
+ |F|NRI| Type |
+ +-+-+-+-+-+-+-+-+
+ */
+ ALOGV("H264 payload header 0x%x", mediaData[0]);
+ // excludes Type from 1st byte of H264 payload header.
+ data[12 + rtpExtIndex] = mediaData[0] & 0xe0;
+ // fills Type as FU (28 == 0x1C)
+ data[12 + rtpExtIndex] = data[12 + rtpExtIndex] | 0x1C;
+
+ CHECK(!firstPacket || !lastPacket);
+ /*
+ FU header
+ 0 1 2 3 4 5 6 7
+ +-+-+-+-+-+-+-+-+
+ |S|E|R| Type |
+ +-+-+-+-+-+-+-+-+
+ */
data[13 + rtpExtIndex] =
(firstPacket ? 0x80 : 0x00)
| (lastPacket ? 0x40 : 0x00)
- | (nalType & 0x1f);
+ | (nalType & H264_NALU_MASK);
+ ALOGV("H264 FU header 0x%x", data[13]);
memcpy(&data[14 + rtpExtIndex], &mediaData[offset], size);
@@ -1268,9 +1367,6 @@
offset += size;
}
}
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::sendH263Data(MediaBufferBase *mediaBuf) {
@@ -1279,7 +1375,7 @@
int64_t timeUs;
CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
- uint32_t rtpTime = mRTPTimeBase + (timeUs * 9 / 100LL);
+ uint32_t rtpTime = getRtpTime(timeUs);
const uint8_t *mediaData =
(const uint8_t *)mediaBuf->data() + mediaBuf->range_offset();
@@ -1330,9 +1426,6 @@
++mNumRTPSent;
mNumRTPOctetsSent += buffer->size() - 12;
}
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::updateCVODegrees(int32_t cvoDegrees) {
@@ -1415,7 +1508,7 @@
int64_t timeUs;
CHECK(mediaBuf->meta_data().findInt64(kKeyTime, &timeUs));
- uint32_t rtpTime = mRTPTimeBase + (timeUs / (isWide ? 250 : 125));
+ uint32_t rtpTime = getRtpTime(timeUs);
// hexdump(mediaData, mediaLength);
@@ -1489,9 +1582,6 @@
++mSeqNo;
++mNumRTPSent;
mNumRTPOctetsSent += buffer->size() - 12;
-
- mLastRTPTime = rtpTime;
- mLastNTPTime = GetNowNTP();
}
void ARTPWriter::makeSocketPairAndBind(String8& localIp, int localPort,
diff --git a/media/libstagefright/rtsp/ARTPWriter.h b/media/libstagefright/rtsp/ARTPWriter.h
index 28d6ec5..2982cf6 100644
--- a/media/libstagefright/rtsp/ARTPWriter.h
+++ b/media/libstagefright/rtsp/ARTPWriter.h
@@ -108,14 +108,13 @@
MediaBufferBase *mSPSBuf;
MediaBufferBase *mPPSBuf;
+ uint32_t mClockRate;
uint32_t mSourceID;
uint32_t mPayloadType;
uint32_t mSeqNo;
uint32_t mRTPTimeBase;
uint32_t mNumRTPSent;
uint32_t mNumRTPOctetsSent;
- uint32_t mLastRTPTime;
- uint64_t mLastNTPTime;
uint32_t mOpponentID;
uint32_t mBitrate;
@@ -136,6 +135,7 @@
} mMode;
static uint64_t GetNowNTP();
+ uint32_t getRtpTime(int64_t timeUs);
void initState();
void onRead(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 0fdf431..988cec7 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -74,7 +74,8 @@
// The allowed maximum number of stale access units at the beginning of
// a new sequence.
-static int32_t kMaxAllowedStaleAccessUnits = 20;
+static int32_t kMaxAllowedStaleAudioAccessUnits = 20;
+static int32_t kMaxAllowedStaleVideoAccessUnits = 400;
static int64_t kTearDownTimeoutUs = 3000000ll;
@@ -108,6 +109,10 @@
}
}
+static int32_t GetMaxAllowedStaleCount(bool isVideo) {
+ return isVideo ? kMaxAllowedStaleVideoAccessUnits : kMaxAllowedStaleAudioAccessUnits;
+}
+
struct MyHandler : public AHandler {
enum {
kWhatConnected = 'conn',
@@ -1330,6 +1335,8 @@
ALOGV("rtp-info: %s", response->mHeaders.valueAt(i).c_str());
+ mRTPConn->seekStream();
+
ALOGI("seek completed.");
}
}
@@ -1514,7 +1521,7 @@
TrackInfo *info = &mTracks.editItemAt(trackIndex);
info->mFirstSeqNumInSegment = seq;
info->mNewSegment = true;
- info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+ info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
CHECK(GetAttribute((*it).c_str(), "rtptime", &val));
@@ -1556,6 +1563,7 @@
int mRTPSocket;
int mRTCPSocket;
bool mUsingInterleavedTCP;
+ bool mIsVideo;
uint32_t mFirstSeqNumInSegment;
bool mNewSegment;
int32_t mAllowedStaleAccessUnits;
@@ -1640,9 +1648,10 @@
info->mURL = trackURL;
info->mPacketSource = source;
info->mUsingInterleavedTCP = false;
+ info->mIsVideo = source->isVideo();
info->mFirstSeqNumInSegment = 0;
info->mNewSegment = true;
- info->mAllowedStaleAccessUnits = kMaxAllowedStaleAccessUnits;
+ info->mAllowedStaleAccessUnits = GetMaxAllowedStaleCount(info->mIsVideo);
info->mRTPSocket = -1;
info->mRTCPSocket = -1;
info->mRTPAnchor = 0;
@@ -1838,11 +1847,12 @@
// by ARTPSource. Only the low 16 bits of seq in RTP-Info of reply of
// RTSP "PLAY" command should be used to detect the first RTP packet
// after seeking.
+ int32_t maxAllowedStaleAccessUnits = GetMaxAllowedStaleCount(track->mIsVideo);
if (mSeekable) {
if (track->mAllowedStaleAccessUnits > 0) {
uint32_t seqNum16 = seqNum & 0xffff;
uint32_t firstSeqNumInSegment16 = track->mFirstSeqNumInSegment & 0xffff;
- if (seqNum16 > firstSeqNumInSegment16 + kMaxAllowedStaleAccessUnits
+ if (seqNum16 > firstSeqNumInSegment16 + maxAllowedStaleAccessUnits
|| seqNum16 < firstSeqNumInSegment16) {
// Not the first rtp packet of the stream after seeking, discarding.
track->mAllowedStaleAccessUnits--;
@@ -1857,7 +1867,7 @@
mNumAccessUnitsReceived = 0;
ALOGW_IF(track->mAllowedStaleAccessUnits == 0,
"Still no first rtp packet after %d stale ones",
- kMaxAllowedStaleAccessUnits);
+ maxAllowedStaleAccessUnits);
track->mAllowedStaleAccessUnits = -1;
return UNKNOWN_ERROR;
}
diff --git a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
index a628c70..c251479 100644
--- a/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/FrameDecoderFuzzer.cpp
@@ -42,7 +42,7 @@
if (isVideoDecoder) {
decoder = new VideoFrameDecoder(componentName, trackMeta, source);
} else {
- decoder = new ImageDecoder(componentName, trackMeta, source);
+ decoder = new MediaImageDecoder(componentName, trackMeta, source);
}
while (fdp.remaining_bytes()) {
@@ -80,4 +80,3 @@
}
} // namespace android
-
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 3e4e4932..e25658f 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -35,7 +35,6 @@
"android.hardware.media.omx@1.0",
"libandroidicu",
"libfmq",
- "libbase",
"libbinder",
"libhidlbase",
"liblog",
@@ -53,9 +52,12 @@
"frameworks/av/services/mediaresourcemanager",
],
- // mediaserver has only been verified on 32-bit, see b/126502613
- // use "prefer32" to *only* enable 64-bit builds on 64-bit-only lunch
- // targets, which allows them to reach 'boot_complete'.
+ // By default mediaserver runs in 32-bit to save memory, except
+ // on 64-bit-only lunch targets.
+ // ****************************************************************
+ // TO ENABLE 64-BIT MEDIASERVER ON MIXED 32/64-BIT DEVICES, COMMENT
+ // OUT THE FOLLOWING LINE:
+ // ****************************************************************
compile_multilib: "prefer32",
init_rc: ["mediaserver.rc"],
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index dc1b9b8..58e2d2a 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "mediaserver"
//#define LOG_NDEBUG 0
-#include <android-base/properties.h>
#include <binder/IPCThreadState.h>
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
@@ -43,12 +42,6 @@
ResourceManagerService::instantiate();
registerExtensions();
::android::hardware::configureRpcThreadpool(16, false);
-
- if (!android::base::GetBoolProperty("ro.config.low_ram", false)) {
- // Start the media.transcoding service if the device is not low ram
- // device.
- android::base::SetProperty("ctl.start", "media.transcoding");
- }
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
::android::hardware::joinRpcThreadpool();
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 54a6425..65a163f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -947,6 +947,7 @@
output.frameCount = input.frameCount;
output.notificationFrameCount = input.notificationFrameCount;
output.flags = input.flags;
+ output.streamType = streamType;
track = thread->createTrack_l(client, streamType, localAttr, &output.sampleRate,
input.config.format, input.config.channel_mask,
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 88aa7cb..e8552c4 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -21,42 +21,6 @@
#error This header file should only be included from AudioFlinger.h
#endif
-// Checks and monitors app ops for audio record
-class OpRecordAudioMonitor : public RefBase {
-public:
- ~OpRecordAudioMonitor() override;
- bool hasOp() const;
- int32_t getOp() const { return mAppOp; }
-
- static sp<OpRecordAudioMonitor> createIfNeeded(const AttributionSourceState& attributionSource,
- const audio_attributes_t& attr);
-
-private:
- OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp);
-
- void onFirstRef() override;
-
- AppOpsManager mAppOpsManager;
-
- class RecordAudioOpCallback : public BnAppOpsCallback {
- public:
- explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
- void opChanged(int32_t op, const String16& packageName) override;
-
- private:
- const wp<OpRecordAudioMonitor> mMonitor;
- };
-
- sp<RecordAudioOpCallback> mOpCallback;
- // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
- // in AppOp callback and in onFirstRef()
- void checkOp();
-
- std::atomic_bool mHasOp;
- const AttributionSourceState mAttributionSource;
- const int32_t mAppOp;
-};
-
// record track
class RecordTrack : public TrackBase {
public:
@@ -107,7 +71,7 @@
{ return (mFlags & AUDIO_INPUT_FLAG_DIRECT) != 0; }
void setSilenced(bool silenced) { if (!isPatchTrack()) mSilenced = silenced; }
- bool isSilenced() const;
+ bool isSilenced() const { return mSilenced; }
status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
@@ -154,8 +118,6 @@
bool mSilenced;
- // used to enforce the audio record app op corresponding to this track's audio source
- sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
std::string mSharedAudioPackageName = {};
int32_t mStartFrames = -1;
};
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 9e099ce..b9cdab8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8220,6 +8220,7 @@
status_t AudioFlinger::RecordThread::shareAudioHistory_l(
const std::string& sharedAudioPackageName, audio_session_t sharedSessionId,
int64_t sharedAudioStartMs) {
+
if ((hasAudioSession_l(sharedSessionId) & ThreadBase::TRACK_SESSION) == 0) {
return BAD_VALUE;
}
@@ -8234,18 +8235,21 @@
// after one wraparound
// We assume recent wraparounds on mRsmpInRear only given it is unlikely that the requesting
// app waits several hours after the start time was computed.
- const int64_t sharedAudioStartFrames = sharedAudioStartMs * mSampleRate / 1000;
+ int64_t sharedAudioStartFrames = sharedAudioStartMs * mSampleRate / 1000;
const int32_t sharedOffset = audio_utils::safe_sub_overflow(mRsmpInRear,
(int32_t)sharedAudioStartFrames);
- if (sharedOffset < 0
- || sharedOffset > mRsmpInFrames) {
- return BAD_VALUE;
+ // Bring the start frame position within the input buffer to match the documented
+ // "best effort" behavior of the API.
+ if (sharedOffset < 0) {
+ sharedAudioStartFrames = mRsmpInRear;
+ } else if (sharedOffset > mRsmpInFrames) {
+ sharedAudioStartFrames =
+ audio_utils::safe_sub_overflow(mRsmpInRear, (int32_t)mRsmpInFrames);
}
mSharedAudioPackageName = sharedAudioPackageName;
if (mSharedAudioPackageName.empty()) {
- mSharedAudioSessionId = AUDIO_SESSION_NONE;
- mSharedAudioStartFrames = -1;
+ resetAudioHistory_l();
} else {
mSharedAudioSessionId = sharedSessionId;
mSharedAudioStartFrames = (int32_t)sharedAudioStartFrames;
@@ -8253,6 +8257,12 @@
return NO_ERROR;
}
+void AudioFlinger::RecordThread::resetAudioHistory_l() {
+ mSharedAudioSessionId = AUDIO_SESSION_NONE;
+ mSharedAudioStartFrames = -1;
+ mSharedAudioPackageName = "";
+}
+
void AudioFlinger::RecordThread::updateMetadata_l()
{
if (!isStreamInitialized() || !mActiveTracks.readAndClearHasChanged()) {
@@ -8862,23 +8872,22 @@
int32_t AudioFlinger::RecordThread::getOldestFront_l()
{
if (mTracks.size() == 0) {
- return 0;
+ return mRsmpInRear;
}
int32_t oldestFront = mRsmpInRear;
int32_t maxFilled = 0;
for (size_t i = 0; i < mTracks.size(); i++) {
int32_t front = mTracks[i]->mResamplerBufferProvider->getFront();
int32_t filled;
- if (front <= mRsmpInRear) {
- filled = mRsmpInRear - front;
- } else {
- filled = (int32_t)((int64_t)mRsmpInRear + UINT32_MAX + 1 - front);
- }
+ (void)__builtin_sub_overflow(mRsmpInRear, front, &filled);
if (filled > maxFilled) {
oldestFront = front;
maxFilled = filled;
}
}
+ if (maxFilled > mRsmpInFrames) {
+ (void)__builtin_sub_overflow(mRsmpInRear, mRsmpInFrames, &oldestFront);
+ }
return oldestFront;
}
@@ -8928,7 +8937,7 @@
"resizeInputBuffer_l() called with shared history and unallocated buffer");
size_t rsmpInFrames = (size_t)maxSharedAudioHistoryMs * mSampleRate / 1000;
// never reduce resampler input buffer size
- if (rsmpInFrames < mRsmpInFrames) {
+ if (rsmpInFrames <= mRsmpInFrames) {
return;
}
mRsmpInFrames = rsmpInFrames;
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index eee1f2b..16082a9 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1789,6 +1789,7 @@
status_t shareAudioHistory_l(const std::string& sharedAudioPackageName,
audio_session_t sharedSessionId = AUDIO_SESSION_NONE,
int64_t sharedAudioStartMs = -1);
+ void resetAudioHistory_l();
virtual bool isStreamInitialized() {
return !(mInput == nullptr || mInput->stream == nullptr);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8d98afe..d2a30b1 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -2247,109 +2247,6 @@
// ----------------------------------------------------------------------------
-// ----------------------------------------------------------------------------
-// AppOp for audio recording
-// -------------------------------
-
-#undef LOG_TAG
-#define LOG_TAG "AF::OpRecordAudioMonitor"
-
-// static
-sp<AudioFlinger::RecordThread::OpRecordAudioMonitor>
-AudioFlinger::RecordThread::OpRecordAudioMonitor::createIfNeeded(
- const AttributionSourceState& attributionSource, const audio_attributes_t& attr)
-{
- if (isServiceUid(attributionSource.uid)) {
- ALOGV("not silencing record for service %s",
- attributionSource.toString().c_str());
- return nullptr;
- }
-
- // Capturing from FM TUNER output is not controlled by an app op
- // because it does not affect users privacy as does capturing from an actual microphone.
- if (attr.source == AUDIO_SOURCE_FM_TUNER) {
- ALOGV("not muting FM TUNER capture for uid %d", attributionSource.uid);
- return nullptr;
- }
-
- AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
- attributionSource);
- if (!checkedAttributionSource.packageName.has_value()
- || checkedAttributionSource.packageName.value().size() == 0) {
- return nullptr;
- }
- return new OpRecordAudioMonitor(checkedAttributionSource, getOpForSource(attr.source));
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::OpRecordAudioMonitor(
- const AttributionSourceState& attributionSource, int32_t appOp)
- : mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp)
-{
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::~OpRecordAudioMonitor()
-{
- if (mOpCallback != 0) {
- mAppOpsManager.stopWatchingMode(mOpCallback);
- }
- mOpCallback.clear();
-}
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::onFirstRef()
-{
- checkOp();
- mOpCallback = new RecordAudioOpCallback(this);
- ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
- // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))),
- mOpCallback);
-}
-
-bool AudioFlinger::RecordThread::OpRecordAudioMonitor::hasOp() const {
- return mHasOp.load();
-}
-
-// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
-// is updated in AppOp callback and in onFirstRef()
-// Note this method is never called (and never to be) for audio server / root track
-// due to the UID in createIfNeeded(). As a result for those record track, it's:
-// - not called from constructor,
-// - not called from RecordAudioOpCallback because the callback is not installed in this case
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::checkOp()
-{
- // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
- // since it controls the mic permission for legacy apps.
- const int32_t mode = mAppOpsManager.checkOp(mAppOp,
- mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
- mAttributionSource.packageName.value_or(""))));
- const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
- // verbose logging only log when appOp changed
- ALOGI_IF(hasIt != mHasOp.load(),
- "App op %d missing, %ssilencing record %s",
- mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
- mHasOp.store(hasIt);
-}
-
-AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
- const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
-{ }
-
-void AudioFlinger::RecordThread::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
- const String16& packageName) {
- UNUSED(packageName);
- sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
- if (monitor != NULL) {
- if (op != monitor->getOp()) {
- return;
- }
- monitor->checkOp();
- }
-}
-
-
-
#undef LOG_TAG
#define LOG_TAG "AF::RecordHandle"
@@ -2450,7 +2347,6 @@
mRecordBufferConverter(NULL),
mFlags(flags),
mSilenced(false),
- mOpRecordAudioMonitor(OpRecordAudioMonitor::createIfNeeded(attributionSource, attr)),
mStartFrames(startFrames)
{
if (mCblk == NULL) {
@@ -2562,7 +2458,7 @@
RecordThread *recordThread = (RecordThread *) thread.get();
priorState = mState;
if (!mSharedAudioPackageName.empty()) {
- recordThread->shareAudioHistory_l("");
+ recordThread->resetAudioHistory_l();
}
recordThread->destroyTrack_l(this); // move mState to STOPPED, terminate
}
@@ -2709,14 +2605,6 @@
mServerLatencyMs.store(latencyMs);
}
-bool AudioFlinger::RecordThread::RecordTrack::isSilenced() const {
- if (mSilenced) {
- return true;
- }
- // The monitor is only created for record tracks that can be silenced.
- return mOpRecordAudioMonitor ? !mOpRecordAudioMonitor->hasOp() : false;
-}
-
status_t AudioFlinger::RecordThread::RecordTrack::getActiveMicrophones(
std::vector<media::MicrophoneInfo>* activeMicrophones)
{
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index ca8e96c..c73c17d 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -393,12 +393,14 @@
|| outputs.isActiveLocally(
toVolumeSource(AUDIO_STREAM_ACCESSIBILITY),
SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY);
- // - for STRATEGY_SONIFICATION:
+
+ bool ringActiveLocally = outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING), 0);
+ // - for STRATEGY_SONIFICATION and ringtone active:
// if SPEAKER was selected, and SPEAKER_SAFE is available, use SPEAKER_SAFE instead
// - for STRATEGY_SONIFICATION_RESPECTFUL:
// if no media is playing on the device, check for mandatory use of "safe" speaker
// when media would have played on speaker, and the safe speaker path is available
- if (strategy == STRATEGY_SONIFICATION
+ if (strategy == STRATEGY_SONIFICATION || ringActiveLocally
|| (strategy == STRATEGY_SONIFICATION_RESPECTFUL && !mediaActiveLocally)) {
devices.replaceDevicesByType(
AUDIO_DEVICE_OUT_SPEAKER,
@@ -506,7 +508,7 @@
switch (commDeviceType) {
case AUDIO_DEVICE_OUT_BLE_HEADSET:
device = availableDevices.getDevice(
- AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
+ AUDIO_DEVICE_IN_BLE_HEADSET, String8(""), AUDIO_FORMAT_DEFAULT);
break;
case AUDIO_DEVICE_OUT_SPEAKER:
device = availableDevices.getFirstExistingDevice({
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 2e866ff..cc2d8e8 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2531,12 +2531,14 @@
ALOGW("%s input %d client %d already stopped", __FUNCTION__, input, client->portId());
return INVALID_OPERATION;
}
-
+ auto old_source = inputDesc->source();
inputDesc->setClientActive(client, false);
inputDesc->stop();
if (inputDesc->isActive()) {
- setInputDevice(input, getNewInputDevice(inputDesc), false /* force */);
+ auto current_source = inputDesc->source();
+ setInputDevice(input, getNewInputDevice(inputDesc),
+ old_source != current_source /* force */);
} else {
sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
// if input maps to a dynamic policy with an activity listener, notify of state change
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 0273d29..454c020 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -38,6 +38,7 @@
"libmedia_helper",
"libmediametrics",
"libmediautils",
+ "libpermission",
"libsensorprivacy",
"libutils",
"audioclient-types-aidl-cpp",
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 3298f6b..9987252 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -674,7 +674,8 @@
sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
selectedDeviceId, adjAttributionSource,
- canCaptureOutput, canCaptureHotword);
+ canCaptureOutput, canCaptureHotword,
+ mOutputCommandThread);
mAudioRecordClients.add(portId, client);
}
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 201273e..4d0e1f1 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -730,7 +730,10 @@
&& !(isTopOrLatestSensitive || current->canCaptureOutput))
&& canCaptureIfInCallOrCommunication(current);
- if (isVirtualSource(source)) {
+ if (!current->hasOp()) {
+ // Never allow capture if app op is denied
+ allowCapture = false;
+ } else if (isVirtualSource(source)) {
// Allow capture for virtual (remote submix, call audio TX or RX...) sources
allowCapture = true;
} else if (mUidPolicy->isAssistantUid(currentUid)) {
@@ -830,6 +833,19 @@
return false;
}
+/* static */
+bool AudioPolicyService::isAppOpSource(audio_source_t source)
+{
+ switch (source) {
+ case AUDIO_SOURCE_FM_TUNER:
+ case AUDIO_SOURCE_ECHO_REFERENCE:
+ return false;
+ default:
+ break;
+ }
+ return true;
+}
+
void AudioPolicyService::setAppState_l(sp<AudioRecordClient> client, app_state_t state)
{
AutoCallerClear acc;
@@ -1418,6 +1434,109 @@
return binder::Status::ok();
}
+// ----------- AudioPolicyService::OpRecordAudioMonitor implementation ----------
+
+// static
+sp<AudioPolicyService::OpRecordAudioMonitor>
+AudioPolicyService::OpRecordAudioMonitor::createIfNeeded(
+ const AttributionSourceState& attributionSource, const audio_attributes_t& attr,
+ wp<AudioCommandThread> commandThread)
+{
+ if (isAudioServerOrRootUid(attributionSource.uid)) {
+ ALOGV("not silencing record for audio or root source %s",
+ attributionSource.toString().c_str());
+ return nullptr;
+ }
+
+ if (!AudioPolicyService::isAppOpSource(attr.source)) {
+ ALOGD("not monitoring app op for uid %d and source %d",
+ attributionSource.uid, attr.source);
+ return nullptr;
+ }
+
+ if (!attributionSource.packageName.has_value()
+ || attributionSource.packageName.value().size() == 0) {
+ return nullptr;
+ }
+ return new OpRecordAudioMonitor(attributionSource, getOpForSource(attr.source), commandThread);
+}
+
+AudioPolicyService::OpRecordAudioMonitor::OpRecordAudioMonitor(
+ const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread) :
+ mHasOp(true), mAttributionSource(attributionSource), mAppOp(appOp),
+ mCommandThread(commandThread)
+{
+}
+
+AudioPolicyService::OpRecordAudioMonitor::~OpRecordAudioMonitor()
+{
+ if (mOpCallback != 0) {
+ mAppOpsManager.stopWatchingMode(mOpCallback);
+ }
+ mOpCallback.clear();
+}
+
+void AudioPolicyService::OpRecordAudioMonitor::onFirstRef()
+{
+ checkOp();
+ mOpCallback = new RecordAudioOpCallback(this);
+ ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
+ // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))),
+ mOpCallback);
+}
+
+bool AudioPolicyService::OpRecordAudioMonitor::hasOp() const {
+ return mHasOp.load();
+}
+
+// Called by RecordAudioOpCallback when the app op corresponding to this OpRecordAudioMonitor
+// is updated in AppOp callback and in onFirstRef()
+// Note this method is never called (and never to be) for audio server / root track
+// due to the UID in createIfNeeded(). As a result for those record track, it's:
+// - not called from constructor,
+// - not called from RecordAudioOpCallback because the callback is not installed in this case
+void AudioPolicyService::OpRecordAudioMonitor::checkOp(bool updateUidStates)
+{
+ // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
+ // since it controls the mic permission for legacy apps.
+ const int32_t mode = mAppOpsManager.checkOp(mAppOp,
+ mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+ mAttributionSource.packageName.value_or(""))));
+ const bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+ // verbose logging only log when appOp changed
+ ALOGI_IF(hasIt != mHasOp.load(),
+ "App op %d missing, %ssilencing record %s",
+ mAppOp, hasIt ? "un" : "", mAttributionSource.toString().c_str());
+ mHasOp.store(hasIt);
+
+ if (updateUidStates) {
+ sp<AudioCommandThread> commandThread = mCommandThread.promote();
+ if (commandThread != nullptr) {
+ commandThread->updateUidStatesCommand();
+ }
+ }
+}
+
+AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::RecordAudioOpCallback(
+ const wp<OpRecordAudioMonitor>& monitor) : mMonitor(monitor)
+{ }
+
+void AudioPolicyService::OpRecordAudioMonitor::RecordAudioOpCallback::opChanged(int32_t op,
+ const String16& packageName __unused) {
+ sp<OpRecordAudioMonitor> monitor = mMonitor.promote();
+ if (monitor != NULL) {
+ if (op != monitor->getOp()) {
+ return;
+ }
+ monitor->checkOp(true);
+ }
+}
+
+
// ----------- AudioPolicyService::AudioCommandThread implementation ----------
AudioPolicyService::AudioCommandThread::AudioCommandThread(String8 name,
@@ -1634,6 +1753,17 @@
mLock.lock();
} break;
+ case UPDATE_UID_STATES: {
+ ALOGV("AudioCommandThread() processing updateUID states");
+ svc = mService.promote();
+ if (svc == 0) {
+ break;
+ }
+ mLock.unlock();
+ svc->updateUidStates();
+ mLock.lock();
+ } break;
+
default:
ALOGW("AudioCommandThread() unknown command %d", command->mCommand);
}
@@ -1847,6 +1977,14 @@
sendCommand(command);
}
+void AudioPolicyService::AudioCommandThread::updateUidStatesCommand()
+{
+ sp<AudioCommand> command = new AudioCommand();
+ command->mCommand = UPDATE_UID_STATES;
+ ALOGV("AudioCommandThread() adding update UID states");
+ sendCommand(command);
+}
+
void AudioPolicyService::AudioCommandThread::updateAudioPatchListCommand()
{
sp<AudioCommand>command = new AudioCommand();
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index ac9c20f..3b77ed8 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -27,6 +27,7 @@
#include <utils/Vector.h>
#include <utils/SortedVector.h>
#include <binder/ActivityManager.h>
+#include <binder/AppOpsManager.h>
#include <binder/BinderService.h>
#include <binder/IUidObserver.h>
#include <system/audio.h>
@@ -358,6 +359,13 @@
static bool isVirtualSource(audio_source_t source);
+ /** returns true if the audio source must be silenced when the corresponding app op is denied.
+ * false if the audio source does not actually capture from the microphone while still
+ * being mapped to app op OP_RECORD_AUDIO and not a specialized op tracked separately.
+ * See getOpForSource().
+ */
+ static bool isAppOpSource(audio_source_t source);
+
// If recording we need to make sure the UID is allowed to do that. If the UID is idle
// then it cannot record and gets buffers with zeros - silence. As soon as the UID
// transitions to an active state we will start reporting buffers with data. This approach
@@ -467,6 +475,7 @@
SET_EFFECT_SUSPENDED,
AUDIO_MODULES_UPDATE,
ROUTING_UPDATED,
+ UPDATE_UID_STATES
};
AudioCommandThread (String8 name, const wp<AudioPolicyService>& service);
@@ -514,6 +523,7 @@
bool suspended);
void audioModulesUpdateCommand();
void routingChangedCommand();
+ void updateUidStatesCommand();
void insertCommand_l(AudioCommand *command, int delayMs = 0);
private:
class AudioCommandData;
@@ -814,6 +824,47 @@
bool active; // Playback/Capture is active or inactive
};
+ // Checks and monitors app ops for AudioRecordClient
+ class OpRecordAudioMonitor : public RefBase {
+ public:
+ ~OpRecordAudioMonitor() override;
+ bool hasOp() const;
+ int32_t getOp() const { return mAppOp; }
+
+ static sp<OpRecordAudioMonitor> createIfNeeded(
+ const AttributionSourceState& attributionSource,
+ const audio_attributes_t& attr, wp<AudioCommandThread> commandThread);
+
+ private:
+ OpRecordAudioMonitor(const AttributionSourceState& attributionSource, int32_t appOp,
+ wp<AudioCommandThread> commandThread);
+
+ void onFirstRef() override;
+
+ AppOpsManager mAppOpsManager;
+
+ class RecordAudioOpCallback : public BnAppOpsCallback {
+ public:
+ explicit RecordAudioOpCallback(const wp<OpRecordAudioMonitor>& monitor);
+ void opChanged(int32_t op, const String16& packageName) override;
+
+ private:
+ const wp<OpRecordAudioMonitor> mMonitor;
+ };
+
+ sp<RecordAudioOpCallback> mOpCallback;
+ // called by RecordAudioOpCallback when the app op for this OpRecordAudioMonitor is updated
+ // in AppOp callback and in onFirstRef()
+ // updateUidStates is true when the silenced state of active AudioRecordClients must be
+ // re-evaluated
+ void checkOp(bool updateUidStates = false);
+
+ std::atomic_bool mHasOp;
+ const AttributionSourceState mAttributionSource;
+ const int32_t mAppOp;
+ wp<AudioCommandThread> mCommandThread;
+ };
+
// --- AudioRecordClient ---
// Information about each registered AudioRecord client
// (between calls to getInputForAttr() and releaseInput())
@@ -824,20 +875,32 @@
const audio_session_t session, audio_port_handle_t portId,
const audio_port_handle_t deviceId,
const AttributionSourceState& attributionSource,
- bool canCaptureOutput, bool canCaptureHotword) :
+ bool canCaptureOutput, bool canCaptureHotword,
+ wp<AudioCommandThread> commandThread) :
AudioClient(attributes, io, attributionSource,
session, portId, deviceId), attributionSource(attributionSource),
startTimeNs(0), canCaptureOutput(canCaptureOutput),
- canCaptureHotword(canCaptureHotword), silenced(false) {}
+ canCaptureHotword(canCaptureHotword), silenced(false),
+ mOpRecordAudioMonitor(
+ OpRecordAudioMonitor::createIfNeeded(attributionSource,
+ attributes, commandThread)) {}
~AudioRecordClient() override = default;
+ bool hasOp() const {
+ return mOpRecordAudioMonitor ? mOpRecordAudioMonitor->hasOp() : true;
+ }
+
const AttributionSourceState attributionSource; // attribution source of client
nsecs_t startTimeNs;
const bool canCaptureOutput;
const bool canCaptureHotword;
bool silenced;
+
+ private:
+ sp<OpRecordAudioMonitor> mOpRecordAudioMonitor;
};
+
// --- AudioPlaybackClient ---
// Information about each registered AudioTrack client
// (between calls to getOutputForAttr() and releaseOutput())
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 3deea6b..dc101ff 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -237,10 +237,16 @@
}
}
- //Derive primary rear/front cameras, and filter their charactierstics.
- //This needs to be done after all cameras are enumerated and camera ids are sorted.
+ // Derive primary rear/front cameras, and filter their charactierstics.
+ // This needs to be done after all cameras are enumerated and camera ids are sorted.
if (SessionConfigurationUtils::IS_PERF_CLASS) {
- filterSPerfClassCharacteristics();
+ // Assume internal cameras are advertised from the same
+ // provider. If multiple providers are registered at different time,
+ // and each provider contains multiple internal color cameras, the current
+ // logic may filter the characteristics of more than one front/rear color
+ // cameras.
+ Mutex::Autolock l(mServiceLock);
+ filterSPerfClassCharacteristicsLocked();
}
return OK;
@@ -313,7 +319,7 @@
filterAPI1SystemCameraLocked(mNormalDeviceIds);
}
-void CameraService::filterSPerfClassCharacteristics() {
+void CameraService::filterSPerfClassCharacteristicsLocked() {
// To claim to be S Performance primary cameras, the cameras must be
// backward compatible. So performance class primary camera Ids must be API1
// compatible.
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 1fb7104..9021170 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -945,9 +945,10 @@
void updateCameraNumAndIds();
/**
- * Filter camera characteristics for S Performance class primary cameras
+ * Filter camera characteristics for S Performance class primary cameras.
+ * mServiceLock should be locked.
*/
- void filterSPerfClassCharacteristics();
+ void filterSPerfClassCharacteristicsLocked();
// File descriptor to temp file used for caching previous open
// session dumpsys info.
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index bd2e7dc..80508e4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -29,6 +29,7 @@
#include "Parameters.h"
#include "system/camera.h"
+#include <android-base/properties.h>
#include <android/hardware/ICamera.h>
#include <media/MediaProfiles.h>
#include <media/mediarecorder.h>
@@ -1247,6 +1248,7 @@
}
}
fastInfo.maxZslSize = maxPrivInputSize;
+ fastInfo.usedZslSize = maxPrivInputSize;
} else {
fastInfo.maxZslSize = {0, 0};
}
@@ -2047,12 +2049,33 @@
slowJpegMode = false;
Size pictureSize = { pictureWidth, pictureHeight };
- int64_t minFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
- if (previewFpsRange[1] > 1e9/minFrameDurationNs + FPS_MARGIN) {
+ bool zslFrameRateSupported = false;
+ int64_t jpegMinFrameDurationNs = getJpegStreamMinFrameDurationNs(pictureSize);
+ if (previewFpsRange[1] > 1e9/jpegMinFrameDurationNs + FPS_MARGIN) {
slowJpegMode = true;
}
- if (isDeviceZslSupported || slowJpegMode ||
- property_get_bool("camera.disable_zsl_mode", false)) {
+ if (isZslReprocessPresent) {
+ unsigned int firstApiLevel =
+ android::base::GetUintProperty<unsigned int>("ro.product.first_api_level", 0);
+ Size chosenSize;
+ if ((firstApiLevel >= __ANDROID_API_S__) &&
+ !android::base::GetBoolProperty("ro.camera.enableCamera1MaxZsl", false)) {
+ chosenSize = pictureSize;
+ } else {
+ // follow old behavior of keeping max zsl size as the input / output
+ // zsl stream size
+ chosenSize = fastInfo.maxZslSize;
+ }
+ int64_t zslMinFrameDurationNs = getZslStreamMinFrameDurationNs(chosenSize);
+ if (zslMinFrameDurationNs > 0 &&
+ previewFpsRange[1] <= (1e9/zslMinFrameDurationNs + FPS_MARGIN)) {
+ zslFrameRateSupported = true;
+ fastInfo.usedZslSize = chosenSize;
+ }
+ }
+
+ if (isDeviceZslSupported || slowJpegMode || !zslFrameRateSupported ||
+ android::base::GetBoolProperty("camera.disable_zsl_mode", false)) {
allowZslMode = false;
} else {
allowZslMode = isZslReprocessPresent;
@@ -3056,6 +3079,10 @@
return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_BLOB);
}
+int64_t Parameters::getZslStreamMinFrameDurationNs(Parameters::Size size) {
+ return getMinFrameDurationNs(size, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+}
+
int64_t Parameters::getMinFrameDurationNs(Parameters::Size size, int fmt) {
const int STREAM_DURATION_SIZE = 4;
const int STREAM_FORMAT_OFFSET = 0;
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 02ac638..e2f8d011 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
@@ -248,6 +248,7 @@
bool useFlexibleYuv;
Size maxJpegSize;
Size maxZslSize;
+ Size usedZslSize;
bool supportsPreferredConfigs;
} fastInfo;
@@ -426,6 +427,11 @@
// return -1 if input jpeg size cannot be found in supported size list
int64_t getJpegStreamMinFrameDurationNs(Parameters::Size size);
+ // Helper function to get minimum frame duration for a
+ // IMPLEMENTATION_DEFINED stream of size 'size'
+ // return -1 if input size cannot be found in supported size list
+ int64_t getZslStreamMinFrameDurationNs(Parameters::Size size);
+
// Helper function to get minimum frame duration for a size/format combination
// return -1 if input size/format combination cannot be found.
int64_t getMinFrameDurationNs(Parameters::Size size, int format);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 8e598f1..1321e6b 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -235,8 +235,8 @@
}
if (mInputStreamId == NO_STREAM) {
- res = device->createInputStream(params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createInputStream(params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
/*isMultiResolution*/false, &mInputStreamId);
if (res != OK) {
ALOGE("%s: Camera %d: Can't create input stream: "
@@ -258,8 +258,8 @@
mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
sp<Surface> outSurface = new Surface(producer);
- res = device->createStream(outSurface, params.fastInfo.maxZslSize.width,
- params.fastInfo.maxZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
+ params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
HAL_DATASPACE_UNKNOWN, CAMERA_STREAM_ROTATION_0, &mZslStreamId,
String8(), std::unordered_set<int32_t>{ANDROID_SENSOR_PIXEL_MODE_DEFAULT});
if (res != OK) {
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 19b54e0..a66a592 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -42,17 +42,29 @@
mDepthBufferAcquired(false),
mBlobBufferAcquired(false),
mProducerListener(new ProducerListener()),
- mMaxJpegSize(-1),
+ mMaxJpegBufferSize(-1),
+ mUHRMaxJpegBufferSize(-1),
mIsLogicalCamera(false) {
if (device != nullptr) {
CameraMetadata staticInfo = device->info();
auto entry = staticInfo.find(ANDROID_JPEG_MAX_SIZE);
if (entry.count > 0) {
- mMaxJpegSize = entry.data.i32[0];
+ mMaxJpegBufferSize = entry.data.i32[0];
} else {
ALOGW("%s: Maximum jpeg size absent from camera characteristics", __FUNCTION__);
}
+ mUHRMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+ /*ultraHighResolution*/true);
+ mDefaultMaxJpegSize =
+ SessionConfigurationUtils::getMaxJpegResolution(staticInfo,
+ /*isUltraHighResolution*/false);
+
+ mUHRMaxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(mUHRMaxJpegSize, mDefaultMaxJpegSize,
+ mMaxJpegBufferSize);
+
entry = staticInfo.find(ANDROID_LENS_INTRINSIC_CALIBRATION);
if (entry.count == 5) {
mIntrinsicCalibration.reserve(5);
@@ -243,13 +255,22 @@
jpegSize = inputFrame.jpegBuffer.width;
}
- size_t maxDepthJpegSize;
- if (mMaxJpegSize > 0) {
- maxDepthJpegSize = mMaxJpegSize;
+ size_t maxDepthJpegBufferSize = 0;
+ if (mMaxJpegBufferSize > 0) {
+ // If this is an ultra high resolution sensor and the input frames size
+ // is > default res jpeg.
+ if (mUHRMaxJpegSize.width != 0 &&
+ inputFrame.jpegBuffer.width * inputFrame.jpegBuffer.height >
+ mDefaultMaxJpegSize.width * mDefaultMaxJpegSize.height) {
+ maxDepthJpegBufferSize = mUHRMaxJpegBufferSize;
+ } else {
+ maxDepthJpegBufferSize = mMaxJpegBufferSize;
+ }
} else {
- maxDepthJpegSize = std::max<size_t> (jpegSize,
+ maxDepthJpegBufferSize = std::max<size_t> (jpegSize,
inputFrame.depthBuffer.width * inputFrame.depthBuffer.height * 3 / 2);
}
+
uint8_t jpegQuality = 100;
auto entry = inputFrame.result.find(ANDROID_JPEG_QUALITY);
if (entry.count > 0) {
@@ -259,7 +280,7 @@
// The final depth photo will consist of the main jpeg buffer, the depth map buffer (also in
// jpeg format) and confidence map (jpeg as well). Assume worst case that all 3 jpeg need
// max jpeg size.
- size_t finalJpegBufferSize = maxDepthJpegSize * 3;
+ size_t finalJpegBufferSize = maxDepthJpegBufferSize * 3;
if ((res = native_window_set_buffers_dimensions(mOutputSurface.get(), finalJpegBufferSize, 1))
!= OK) {
@@ -302,7 +323,7 @@
depthPhoto.mDepthMapStride = inputFrame.depthBuffer.stride;
depthPhoto.mJpegQuality = jpegQuality;
depthPhoto.mIsLogical = mIsLogicalCamera;
- depthPhoto.mMaxJpegSize = maxDepthJpegSize;
+ depthPhoto.mMaxJpegSize = maxDepthJpegBufferSize;
// The camera intrinsic calibration layout is as follows:
// [focalLengthX, focalLengthY, opticalCenterX, opticalCenterY, skew]
if (mIntrinsicCalibration.size() == 5) {
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index a520bbf..c1c75c1 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -132,7 +132,12 @@
sp<Surface> mDepthSurface, mBlobSurface, mOutputSurface;
sp<ProducerListener> mProducerListener;
- ssize_t mMaxJpegSize;
+ ssize_t mMaxJpegBufferSize;
+ ssize_t mUHRMaxJpegBufferSize;
+
+ camera3::Size mDefaultMaxJpegSize;
+ camera3::Size mUHRMaxJpegSize;
+
std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizes;
std::vector<std::tuple<size_t, size_t>> mSupportedDepthSizesMaximumResolution;
std::vector<float> mIntrinsicCalibration, mLensDistortion;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 582001d..a73ffb9 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -16,6 +16,7 @@
#define LOG_TAG "Camera3-HeicCompositeStream"
#define ATRACE_TAG ATRACE_TAG_CAMERA
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
//#define LOG_NDEBUG 0
#include <linux/memfd.h>
@@ -1380,7 +1381,9 @@
mOutputWidth = width;
mOutputHeight = height;
mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
- mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
+ mMaxHeicBufferSize =
+ ALIGN(mOutputWidth, HeicEncoderInfoManager::kGridWidth) *
+ ALIGN(mOutputHeight, HeicEncoderInfoManager::kGridHeight) * 3 / 2 + mAppSegmentMaxSize;
return OK;
}
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
index 58edba2..a65be9c 100644
--- a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -38,6 +38,7 @@
bool isSizeSupported(int32_t width, int32_t height,
bool* useHeic, bool* useGrid, int64_t* stall, AString* hevcName) const;
+ // kGridWidth and kGridHeight should be 2^n
static const auto kGridWidth = 512;
static const auto kGridHeight = 512;
private:
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 7045128..4f2b878 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -476,15 +476,16 @@
const hardware::hidl_string& /*fqName*/,
const hardware::hidl_string& name,
bool preexisting) {
+ status_t res = OK;
std::lock_guard<std::mutex> providerLock(mProviderLifecycleLock);
{
std::lock_guard<std::mutex> lock(mInterfaceMutex);
- addProviderLocked(name, preexisting);
+ res = addProviderLocked(name, preexisting);
}
sp<StatusListener> listener = getStatusListener();
- if (nullptr != listener.get()) {
+ if (nullptr != listener.get() && res == OK) {
listener->onNewProviderRegistered();
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 7542c29..d572d57 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -511,42 +511,6 @@
return gotLock;
}
-camera3::Size Camera3Device::getMaxJpegResolution() const {
- int32_t maxJpegWidth = 0, maxJpegHeight = 0;
- const int STREAM_CONFIGURATION_SIZE = 4;
- const int STREAM_FORMAT_OFFSET = 0;
- const int STREAM_WIDTH_OFFSET = 1;
- const int STREAM_HEIGHT_OFFSET = 2;
- const int STREAM_IS_INPUT_OFFSET = 3;
- bool isHighResolutionSensor =
- camera3::SessionConfigurationUtils::isUltraHighResolutionSensor(mDeviceInfo);
- int32_t scalerSizesTag = isHighResolutionSensor ?
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
- camera_metadata_ro_entry_t availableStreamConfigs =
- mDeviceInfo.find(scalerSizesTag);
- if (availableStreamConfigs.count == 0 ||
- availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
- return camera3::Size(0, 0);
- }
-
- // Get max jpeg size (area-wise).
- for (size_t i=0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
- int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
- int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
- int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
- int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
- if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
- && format == HAL_PIXEL_FORMAT_BLOB &&
- (width * height > maxJpegWidth * maxJpegHeight)) {
- maxJpegWidth = width;
- maxJpegHeight = height;
- }
- }
-
- return camera3::Size(maxJpegWidth, maxJpegHeight);
-}
-
nsecs_t Camera3Device::getMonoToBoottimeOffset() {
// try three times to get the clock offset, choose the one
// with the minimum gap in measurements.
@@ -637,13 +601,26 @@
}
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
- // Get max jpeg size (area-wise).
- camera3::Size maxJpegResolution = getMaxJpegResolution();
- if (maxJpegResolution.width == 0) {
+ // Get max jpeg size (area-wise) for default sensor pixel mode
+ camera3::Size maxDefaultJpegResolution =
+ SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+ /*isUltraHighResolutionSensor*/false);
+ // Get max jpeg size (area-wise) for max resolution sensor pixel mode / 0 if
+ // not ultra high res sensor
+ camera3::Size uhrMaxJpegResolution =
+ SessionConfigurationUtils::getMaxJpegResolution(mDeviceInfo,
+ /*isUltraHighResolution*/true);
+ if (maxDefaultJpegResolution.width == 0) {
ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
__FUNCTION__, mId.string());
return BAD_VALUE;
}
+ bool useMaxSensorPixelModeThreshold = false;
+ if (uhrMaxJpegResolution.width != 0 &&
+ width * height > maxDefaultJpegResolution.width * maxDefaultJpegResolution.height) {
+ // Use the ultra high res max jpeg size and max jpeg buffer size
+ useMaxSensorPixelModeThreshold = true;
+ }
// Get max jpeg buffer size
ssize_t maxJpegBufferSize = 0;
@@ -654,11 +631,19 @@
return BAD_VALUE;
}
maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
+
+ camera3::Size chosenMaxJpegResolution = maxDefaultJpegResolution;
+ if (useMaxSensorPixelModeThreshold) {
+ maxJpegBufferSize =
+ SessionConfigurationUtils::getUHRMaxJpegBufferSize(uhrMaxJpegResolution,
+ maxDefaultJpegResolution, maxJpegBufferSize);
+ chosenMaxJpegResolution = uhrMaxJpegResolution;
+ }
assert(kMinJpegBufferSize < maxJpegBufferSize);
// Calculate final jpeg buffer size for the given resolution.
float scaleFactor = ((float) (width * height)) /
- (maxJpegResolution.width * maxJpegResolution.height);
+ (chosenMaxJpegResolution.width * chosenMaxJpegResolution.height);
ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) +
kMinJpegBufferSize;
if (jpegBufferSize > maxJpegBufferSize) {
@@ -666,7 +651,6 @@
__FUNCTION__, maxJpegBufferSize);
jpegBufferSize = maxJpegBufferSize;
}
-
return jpegBufferSize;
}
@@ -791,16 +775,21 @@
}
lines = String8(" In-flight requests:\n");
- if (mInFlightMap.size() == 0) {
- lines.append(" None\n");
- } else {
- for (size_t i = 0; i < mInFlightMap.size(); i++) {
- InFlightRequest r = mInFlightMap.valueAt(i);
- lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
- " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
- r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
- r.numBuffersLeft);
+ if (mInFlightLock.try_lock()) {
+ if (mInFlightMap.size() == 0) {
+ lines.append(" None\n");
+ } else {
+ for (size_t i = 0; i < mInFlightMap.size(); i++) {
+ InFlightRequest r = mInFlightMap.valueAt(i);
+ lines.appendFormat(" Frame %d | Timestamp: %" PRId64 ", metadata"
+ " arrived: %s, buffers left: %d\n", mInFlightMap.keyAt(i),
+ r.shutterTimestamp, r.haveResultMetadata ? "true" : "false",
+ r.numBuffersLeft);
+ }
}
+ mInFlightLock.unlock();
+ } else {
+ lines.append(" Failed to acquire In-flight lock!\n");
}
write(fd, lines.string(), lines.size());
@@ -2473,9 +2462,9 @@
auto testPatternDataEntry =
newRequest->mSettingsList.begin()->metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
- if (testPatternDataEntry.count > 0) {
- memcpy(newRequest->mOriginalTestPatternData, testPatternModeEntry.data.i32,
- sizeof(newRequest->mOriginalTestPatternData));
+ if (testPatternDataEntry.count >= 4) {
+ memcpy(newRequest->mOriginalTestPatternData, testPatternDataEntry.data.i32,
+ sizeof(CaptureRequest::mOriginalTestPatternData));
} else {
newRequest->mOriginalTestPatternData[0] = 0;
newRequest->mOriginalTestPatternData[1] = 0;
@@ -5873,6 +5862,13 @@
const sp<CaptureRequest> &request) {
ATRACE_CALL();
+ {
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent != nullptr) {
+ if (!parent->supportsCameraMute()) return false;
+ }
+ }
+
Mutex::Autolock l(mTriggerMutex);
bool changed = false;
@@ -5908,16 +5904,16 @@
}
auto testPatternColor = metadata.find(ANDROID_SENSOR_TEST_PATTERN_DATA);
- if (testPatternColor.count > 0) {
+ if (testPatternColor.count >= 4) {
for (size_t i = 0; i < 4; i++) {
- if (testPatternColor.data.i32[i] != (int32_t)testPatternData[i]) {
+ if (testPatternColor.data.i32[i] != testPatternData[i]) {
testPatternColor.data.i32[i] = testPatternData[i];
changed = true;
}
}
} else {
metadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA,
- (int32_t*)testPatternData, 4);
+ testPatternData, 4);
changed = true;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d010d67..733ed15 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -780,12 +780,6 @@
bool tryLockSpinRightRound(Mutex& lock);
/**
- * Helper function to get the largest Jpeg resolution (in area)
- * Return Size(0, 0) if static metatdata is invalid
- */
- camera3::Size getMaxJpegResolution() const;
-
- /**
* Helper function to get the offset between MONOTONIC and BOOTTIME
* timestamp.
*/
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 221bebb..03b77fc 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -654,19 +654,20 @@
* Then there is circular locking dependency.
*/
sp<Surface> consumer = mConsumer;
- size_t remainingBuffers = camera_stream::max_buffers - mHandoutTotalBufferCount;
+ size_t remainingBuffers = (mState == STATE_PREPARING ? mTotalBufferCount :
+ camera_stream::max_buffers) - mHandoutTotalBufferCount;
mLock.unlock();
- std::unique_lock<std::mutex> batchLock(mBatchLock);
nsecs_t dequeueStart = systemTime(SYSTEM_TIME_MONOTONIC);
- if (mBatchSize == 1) {
+ size_t batchSize = mBatchSize.load();
+ if (batchSize == 1) {
sp<ANativeWindow> anw = consumer;
res = anw->dequeueBuffer(anw.get(), anb, fenceFd);
} else {
+ std::unique_lock<std::mutex> batchLock(mBatchLock);
res = OK;
if (mBatchedBuffers.size() == 0) {
- size_t batchSize = mBatchSize;
if (remainingBuffers == 0) {
ALOGE("%s: cannot get buffer while all buffers are handed out", __FUNCTION__);
return INVALID_OPERATION;
@@ -674,13 +675,17 @@
if (batchSize > remainingBuffers) {
batchSize = remainingBuffers;
}
+ batchLock.unlock();
// Refill batched buffers
- mBatchedBuffers.resize(batchSize);
- res = consumer->dequeueBuffers(&mBatchedBuffers);
+ std::vector<Surface::BatchBuffer> batchedBuffers;
+ batchedBuffers.resize(batchSize);
+ res = consumer->dequeueBuffers(&batchedBuffers);
+ batchLock.lock();
if (res != OK) {
ALOGE("%s: batch dequeueBuffers call failed! %s (%d)",
__FUNCTION__, strerror(-res), res);
- mBatchedBuffers.clear();
+ } else {
+ mBatchedBuffers = std::move(batchedBuffers);
}
}
@@ -691,7 +696,6 @@
mBatchedBuffers.pop_back();
}
}
- batchLock.unlock();
nsecs_t dequeueEnd = systemTime(SYSTEM_TIME_MONOTONIC);
mDequeueBufferLatency.add(dequeueStart, dequeueEnd);
@@ -1091,7 +1095,7 @@
time_t now = time(0);
tm *localTime = localtime(&now);
snprintf(imageFileName, sizeof(imageFileName), "IMG_%4d%02d%02d_%02d%02d%02d_%" PRId64 ".%s",
- 1900 + localTime->tm_year, localTime->tm_mon, localTime->tm_mday,
+ 1900 + localTime->tm_year, localTime->tm_mon + 1, localTime->tm_mday,
localTime->tm_hour, localTime->tm_min, localTime->tm_sec,
timestamp, fileExtension.c_str());
@@ -1128,7 +1132,6 @@
status_t Camera3OutputStream::setBatchSize(size_t batchSize) {
Mutex::Autolock l(mLock);
- std::lock_guard<std::mutex> lock(mBatchLock);
if (batchSize == 0) {
ALOGE("%s: invalid batch size 0", __FUNCTION__);
return BAD_VALUE;
@@ -1144,31 +1147,36 @@
return INVALID_OPERATION;
}
- if (batchSize != mBatchSize) {
- if (mBatchedBuffers.size() != 0) {
- ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
- __FUNCTION__, mBatchSize, batchSize);
- return INVALID_OPERATION;
- }
-
- if (camera_stream::max_buffers < batchSize) {
- ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
- camera_stream::max_buffers);
- batchSize = camera_stream::max_buffers;
- }
- mBatchSize = batchSize;
+ if (camera_stream::max_buffers < batchSize) {
+ ALOGW("%s: batch size is capped by max_buffers %d", __FUNCTION__,
+ camera_stream::max_buffers);
+ batchSize = camera_stream::max_buffers;
}
+
+ size_t defaultBatchSize = 1;
+ if (!mBatchSize.compare_exchange_strong(defaultBatchSize, batchSize)) {
+ ALOGE("%s: change batch size from %zu to %zu dynamically is not supported",
+ __FUNCTION__, defaultBatchSize, batchSize);
+ return INVALID_OPERATION;
+ }
+
return OK;
}
void Camera3OutputStream::returnPrefetchedBuffersLocked() {
- std::lock_guard<std::mutex> batchLock(mBatchLock);
- if (mBatchedBuffers.size() != 0) {
- ALOGW("%s: %zu extra prefetched buffers detected. Returning",
- __FUNCTION__, mBatchedBuffers.size());
+ std::vector<Surface::BatchBuffer> batchedBuffers;
- mConsumer->cancelBuffers(mBatchedBuffers);
- mBatchedBuffers.clear();
+ {
+ std::lock_guard<std::mutex> batchLock(mBatchLock);
+ if (mBatchedBuffers.size() != 0) {
+ ALOGW("%s: %zu extra prefetched buffers detected. Returning",
+ __FUNCTION__, mBatchedBuffers.size());
+ batchedBuffers = std::move(mBatchedBuffers);
+ }
+ }
+
+ if (batchedBuffers.size() > 0) {
+ mConsumer->cancelBuffers(batchedBuffers);
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 00e4854..ad03b53 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -312,15 +312,14 @@
bool mDropBuffers;
- // Protecting batch states below, must be acquired after mLock
- std::mutex mBatchLock;
// The batch size for buffer operation
- size_t mBatchSize = 1;
+ std::atomic_size_t mBatchSize = 1;
+ // Protecting batch states below, must be acquired after mLock
+ std::mutex mBatchLock;
// Prefetched buffers (ready to be handed to client)
std::vector<Surface::BatchBuffer> mBatchedBuffers;
-
// ---- End of mBatchLock protected scope ----
/**
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index 5a7166c..c1fcfb8 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -36,6 +36,48 @@
bool SessionConfigurationUtils::IS_PERF_CLASS = (PERF_CLASS_LEVEL == SDK_VERSION_S);
+camera3::Size SessionConfigurationUtils::getMaxJpegResolution(const CameraMetadata &metadata,
+ bool ultraHighResolution) {
+ int32_t maxJpegWidth = 0, maxJpegHeight = 0;
+ const int STREAM_CONFIGURATION_SIZE = 4;
+ const int STREAM_FORMAT_OFFSET = 0;
+ const int STREAM_WIDTH_OFFSET = 1;
+ const int STREAM_HEIGHT_OFFSET = 2;
+ const int STREAM_IS_INPUT_OFFSET = 3;
+
+ int32_t scalerSizesTag = ultraHighResolution ?
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION :
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ camera_metadata_ro_entry_t availableStreamConfigs =
+ metadata.find(scalerSizesTag);
+ if (availableStreamConfigs.count == 0 ||
+ availableStreamConfigs.count % STREAM_CONFIGURATION_SIZE != 0) {
+ return camera3::Size(0, 0);
+ }
+
+ // Get max jpeg size (area-wise).
+ for (size_t i= 0; i < availableStreamConfigs.count; i+= STREAM_CONFIGURATION_SIZE) {
+ int32_t format = availableStreamConfigs.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = availableStreamConfigs.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = availableStreamConfigs.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = availableStreamConfigs.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
+ && format == HAL_PIXEL_FORMAT_BLOB &&
+ (width * height > maxJpegWidth * maxJpegHeight)) {
+ maxJpegWidth = width;
+ maxJpegHeight = height;
+ }
+ }
+
+ return camera3::Size(maxJpegWidth, maxJpegHeight);
+}
+
+size_t SessionConfigurationUtils::getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+ camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize) {
+ return (uhrMaxJpegSize.width * uhrMaxJpegSize.height) /
+ (defaultMaxJpegSize.width * defaultMaxJpegSize.height) * defaultMaxJpegBufferSize;
+}
+
void StreamConfiguration::getStreamConfigurations(
const CameraMetadata &staticInfo, int configuration,
std::unordered_map<int, std::vector<StreamConfiguration>> *scm) {
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 4e6eb2b..192e241 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -71,6 +71,12 @@
class SessionConfigurationUtils {
public:
+ static camera3::Size getMaxJpegResolution(const CameraMetadata &metadata,
+ bool ultraHighResolution);
+
+ static size_t getUHRMaxJpegBufferSize(camera3::Size uhrMaxJpegSize,
+ camera3::Size defaultMaxJpegSize, size_t defaultMaxJpegBufferSize);
+
static int64_t euclidDistSquare(int32_t x0, int32_t y0, int32_t x1, int32_t y1);
// Find the closest dimensions for a given format in available stream configurations with
diff --git a/services/mediametrics/statsd_codec.cpp b/services/mediametrics/statsd_codec.cpp
index 065c594..46cbdc8 100644
--- a/services/mediametrics/statsd_codec.cpp
+++ b/services/mediametrics/statsd_codec.cpp
@@ -34,6 +34,7 @@
#include "cleaner.h"
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -227,6 +228,7 @@
std::string sessionId;
if (item->getString("android.media.mediacodec.log-session-id", &sessionId)) {
+ sessionId = mediametrics::stringutils::sanitizeLogSessionId(sessionId);
metrics_proto.set_log_session_id(sessionId);
}
AStatsEvent_writeString(event, codec.c_str());
diff --git a/services/mediametrics/statsd_extractor.cpp b/services/mediametrics/statsd_extractor.cpp
index 4ac5621..bcf2e0a 100644
--- a/services/mediametrics/statsd_extractor.cpp
+++ b/services/mediametrics/statsd_extractor.cpp
@@ -32,6 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -85,6 +86,7 @@
std::string log_session_id;
if (item->getString("android.media.mediaextractor.logSessionId", &log_session_id)) {
+ log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
metrics_proto.set_log_session_id(log_session_id);
}
diff --git a/services/mediametrics/statsd_mediaparser.cpp b/services/mediametrics/statsd_mediaparser.cpp
index af2946b..921b320 100644
--- a/services/mediametrics/statsd_mediaparser.cpp
+++ b/services/mediametrics/statsd_mediaparser.cpp
@@ -31,6 +31,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/enums/stats/mediametrics/mediametrics.pb.h"
#include "iface_statsd.h"
@@ -80,6 +81,7 @@
std::string logSessionId;
item->getString("android.media.mediaparser.logSessionId", &logSessionId);
+ logSessionId = mediametrics::stringutils::sanitizeLogSessionId(logSessionId);
int result = android::util::stats_write(android::util::MEDIAMETRICS_MEDIAPARSER_REPORTED,
timestamp_nanos,
diff --git a/services/mediametrics/statsd_recorder.cpp b/services/mediametrics/statsd_recorder.cpp
index 1b312b5..b29ad73 100644
--- a/services/mediametrics/statsd_recorder.cpp
+++ b/services/mediametrics/statsd_recorder.cpp
@@ -32,6 +32,7 @@
#include <statslog.h>
#include "MediaMetricsService.h"
+#include "StringUtils.h"
#include "frameworks/proto_logging/stats/message/mediametrics_message.pb.h"
#include "iface_statsd.h"
@@ -58,6 +59,7 @@
// string kRecorderLogSessionId = "android.media.mediarecorder.log-session-id";
std::string log_session_id;
if (item->getString("android.media.mediarecorder.log-session-id", &log_session_id)) {
+ log_session_id = mediametrics::stringutils::sanitizeLogSessionId(log_session_id);
metrics_proto.set_log_session_id(log_session_id);
}
// string kRecorderAudioMime = "android.media.mediarecorder.audio.mime";
diff --git a/services/mediatranscoding/MediaTranscodingService.cpp b/services/mediatranscoding/MediaTranscodingService.cpp
index e387800..2a20981 100644
--- a/services/mediatranscoding/MediaTranscodingService.cpp
+++ b/services/mediatranscoding/MediaTranscodingService.cpp
@@ -132,10 +132,10 @@
void MediaTranscodingService::instantiate() {
std::shared_ptr<MediaTranscodingService> service =
::ndk::SharedRefBase::make<MediaTranscodingService>();
- binder_status_t status =
- AServiceManager_addService(service->asBinder().get(), getServiceName());
- if (status != STATUS_OK) {
- return;
+ if (__builtin_available(android __TRANSCODING_MIN_API__, *)) {
+ // Once service is started, we want it to stay even is client side perished.
+ AServiceManager_forceLazyServicesPersist(true /*persist*/);
+ (void)AServiceManager_registerLazyService(service->asBinder().get(), getServiceName());
}
}
diff --git a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
index 20e4bfb..0cb2fad 100644
--- a/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
+++ b/services/mediatranscoding/tests/MediaTranscodingServiceTestHelper.h
@@ -481,7 +481,7 @@
// Need thread pool to receive callbacks, otherwise oneway callbacks are
// silently ignored.
ABinderProcess_startThreadPool();
- ::ndk::SpAIBinder binder(AServiceManager_getService("media.transcoding"));
+ ::ndk::SpAIBinder binder(AServiceManager_waitForService("media.transcoding"));
mService = IMediaTranscodingService::fromBinder(binder);
if (mService == nullptr) {
ALOGE("Failed to connect to the media.trascoding service.");
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 117218a..a08098c 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -237,6 +237,12 @@
result = AAUDIO_ERROR_INTERNAL;
goto error;
}
+ // Call to HAL to make sure the transport FD was able to be closed by binder.
+ // This is a tricky workaround for a problem in Binder.
+ // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+ struct audio_mmap_position position;
+ mMmapStream->getMmapPosition(&position);
+
mFramesPerBurst = mMmapBufferinfo.burst_size_frames;
setFormat(config.format);
setSampleRate(config.sample_rate);
diff --git a/services/oboeservice/AAudioServiceEndpointShared.cpp b/services/oboeservice/AAudioServiceEndpointShared.cpp
index 0d453cf..5fbcadb 100644
--- a/services/oboeservice/AAudioServiceEndpointShared.cpp
+++ b/services/oboeservice/AAudioServiceEndpointShared.cpp
@@ -126,9 +126,9 @@
// Prevent this object from getting deleted before the thread has a chance to create
// its strong pointer. Assume the thread will call decStrong().
this->incStrong(nullptr);
- aaudio_result_t result = getStreamInternal()->createThread_l(periodNanos,
- aaudio_endpoint_thread_proc,
- this);
+ aaudio_result_t result = getStreamInternal()->createThread(periodNanos,
+ aaudio_endpoint_thread_proc,
+ this);
if (result != AAUDIO_OK) {
this->decStrong(nullptr); // Because the thread won't do it.
}