Merge "Spatializer: track scheduler statistics" into tm-dev
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index 05341bf..8088d06 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -375,6 +375,12 @@
return err;
}
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
mCameraId = id;
mFacing = facing;
mNewCameraState = newCameraState;
@@ -389,6 +395,7 @@
mResultErrorCount = resultErrorCount;
mDeviceError = deviceError;
mStreamStats = std::move(streamStats);
+ mUserTag = userTag;
return OK;
}
@@ -471,6 +478,10 @@
return err;
}
+ if ((err = parcel->writeString16(mUserTag)) != OK) {
+ ALOGE("%s: Failed to write user tag!", __FUNCTION__);
+ return err;
+ }
return OK;
}
diff --git a/camera/VendorTagDescriptor.cpp b/camera/VendorTagDescriptor.cpp
index 24fa912..b37803a 100644
--- a/camera/VendorTagDescriptor.cpp
+++ b/camera/VendorTagDescriptor.cpp
@@ -760,7 +760,7 @@
Mutex::Autolock al(sLock);
if (sGlobalVendorTagDescriptorCache == NULL) {
ALOGE("%s: Vendor tag descriptor cache not initialized.", __FUNCTION__);
- return VENDOR_TAG_NAME_ERR;
+ return VENDOR_TAG_TYPE_ERR;
}
return sGlobalVendorTagDescriptorCache->getTagType(tag, id);
}
diff --git a/camera/camera2/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index ebc09d7..7a8a4ba 100644
--- a/camera/camera2/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -146,6 +146,20 @@
mSurfaceIdxList.push_back(surfaceIdx);
}
+ int32_t hasUserTag;
+ if ((err = parcel->readInt32(&hasUserTag)) != OK) {
+ ALOGE("%s: Failed to read user tag availability flag", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (hasUserTag) {
+ String16 userTag;
+ if ((err = parcel->readString16(&userTag)) != OK) {
+ ALOGE("%s: Failed to read user tag!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ mUserTag = String8(userTag).c_str();
+ }
+
return OK;
}
@@ -213,6 +227,14 @@
return err;
}
}
+
+ if (mUserTag.empty()) {
+ parcel->writeInt32(0);
+ } else {
+ parcel->writeInt32(1);
+ parcel->writeString16(String16(mUserTag.c_str()));
+ }
+
return OK;
}
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 15f5622..e1ec6cf 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -136,6 +136,7 @@
// Whether the device runs into an error state
bool mDeviceError;
std::vector<CameraStreamStats> mStreamStats;
+ String16 mUserTag;
// Constructors
CameraSessionStats();
diff --git a/camera/include/camera/camera2/CaptureRequest.h b/camera/include/camera/camera2/CaptureRequest.h
index 506abab..28dbc7c 100644
--- a/camera/include/camera/camera2/CaptureRequest.h
+++ b/camera/include/camera/camera2/CaptureRequest.h
@@ -63,6 +63,8 @@
void* mContext; // arbitrary user context from NDK apps, null for java apps
+ std::string mUserTag; // The string representation of object passed into setTag.
+
/**
* Keep impl up-to-date with CaptureRequest.java in frameworks/base
*/
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 4891034..3f7ff8b 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -521,6 +521,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AE metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AE regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same aeRegions values at different
@@ -722,6 +730,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AF metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AF regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same afRegions values at different
@@ -917,6 +933,14 @@
* region and output only the intersection rectangle as the metering region in the result
* metadata. If the region is entirely outside the crop region, it will be ignored and
* not reported in the result metadata.</p>
+ * <p>When setting the AWB metering regions, the application must consider the additional
+ * crop resulted from the aspect ratio differences between the preview stream and
+ * ACAMERA_SCALER_CROP_REGION. For example, if the ACAMERA_SCALER_CROP_REGION is the full
+ * active array size with 4:3 aspect ratio, and the preview stream is 16:9,
+ * the boundary of AWB regions will be [0, y_crop] and
+ * [active_width, active_height - 2 * y_crop] rather than [0, 0] and
+ * [active_width, active_height], where y_crop is the additional crop due to aspect ratio
+ * mismatch.</p>
* <p>Starting from API level 30, the coordinate system of activeArraySize or
* preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not
* pre-zoom field of view. This means that the same awbRegions values at different
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 2b9ec7d..3302dd3 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -1803,9 +1803,16 @@
if (tryAndReportOnError(setRunning) != OK) {
return;
}
+
+ err2 = mChannel->requestInitialInputBuffers();
+
+ if (err2 != OK) {
+ ALOGE("Initial request for Input Buffers failed");
+ mCallback->onError(err2,ACTION_CODE_FATAL);
+ return;
+ }
mCallback->onStartCompleted();
- (void)mChannel->requestInitialInputBuffers();
}
void CCodec::initiateShutdown(bool keepComponentAllocated) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 870660e..674714f 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "CCodecBufferChannel"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <algorithm>
#include <atomic>
@@ -326,6 +328,8 @@
}
c2_status_t err = C2_OK;
if (!items.empty()) {
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::queue(%s@ts=%lld)", mName, (long long)timeUs).c_str());
{
Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
PipelineWatcher::Clock::time_point now = PipelineWatcher::Clock::now();
@@ -923,6 +927,11 @@
hdr.validTypes |= HdrMetadata::CTA861_3;
hdr.cta8613 = cta861_meta;
}
+
+ // does not have valid info
+ if (!(hdr.validTypes & (HdrMetadata::SMPTE2086 | HdrMetadata::CTA861_3))) {
+ hdrStaticInfo.reset();
+ }
}
if (hdrDynamicInfo
&& hdrDynamicInfo->m.type_ == C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40) {
@@ -1908,6 +1917,8 @@
// When using input surface we need to restore the original input timestamp.
timestamp = work->input.ordinal.customOrdinal;
}
+ ScopedTrace trace(ATRACE_TAG, android::base::StringPrintf(
+ "CCodecBufferChannel::onWorkDone(%s@ts=%lld)", mName, timestamp.peekll()).c_str());
ALOGV("[%s] onWorkDone: input %lld, codec %lld => output %lld => %lld",
mName,
work->input.ordinal.customOrdinal.peekll(),
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2d3c70a..c2405e8 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2Buffer"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <aidl/android/hardware/graphics/common/Cta861_3.h>
#include <aidl/android/hardware/graphics/common/Smpte2086.h>
@@ -229,6 +231,7 @@
mAllocatedDepth(0),
mBackBufferSize(0),
mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+ ATRACE_CALL();
if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
mClientColorFormat = COLOR_FormatYUV420Flexible;
}
@@ -581,6 +584,7 @@
* Copy C2GraphicView to MediaImage2.
*/
status_t copyToMediaImage() {
+ ATRACE_CALL();
if (mInitCheck != OK) {
return mInitCheck;
}
@@ -619,7 +623,9 @@
const sp<AMessage> &format,
const std::shared_ptr<C2GraphicBlock> &block,
std::function<sp<ABuffer>(size_t)> alloc) {
+ ATRACE_BEGIN("GraphicBlockBuffer::Allocate block->map()");
C2GraphicView view(block->map().get());
+ ATRACE_END();
if (view.error() != C2_OK) {
ALOGD("C2GraphicBlock::map failed: %d", view.error());
return nullptr;
@@ -664,6 +670,7 @@
}
std::shared_ptr<C2Buffer> GraphicBlockBuffer::asC2Buffer() {
+ ATRACE_CALL();
uint32_t width = mView.width();
uint32_t height = mView.height();
if (!mWrapped) {
@@ -752,8 +759,10 @@
ALOGD("C2Buffer precond fail");
return nullptr;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::Allocate block->map()");
std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
buffer->data().graphicBlocks()[0].map().get()));
+ ATRACE_END();
std::unique_ptr<const C2GraphicView> holder;
GraphicView2MediaImageConverter converter(*view, format, false /* copy */);
@@ -854,11 +863,13 @@
return false;
}
+ ATRACE_BEGIN("ConstGraphicBlockBuffer::canCopy block->map()");
GraphicView2MediaImageConverter converter(
buffer->data().graphicBlocks()[0].map().get(),
// FIXME: format() is not const, but we cannot change it, so do a const cast here
const_cast<ConstGraphicBlockBuffer *>(this)->format(),
true /* copy */);
+ ATRACE_END();
if (converter.initCheck() != OK) {
ALOGD("ConstGraphicBlockBuffer::canCopy: converter init failed: %d", converter.initCheck());
return false;
@@ -973,16 +984,47 @@
return sMapper;
}
-class NativeHandleDeleter {
+class Gralloc4Buffer {
public:
- explicit NativeHandleDeleter(native_handle_t *handle) : mHandle(handle) {}
- ~NativeHandleDeleter() {
- if (mHandle) {
- native_handle_delete(mHandle);
+ Gralloc4Buffer(const C2Handle *const handle) : mBuffer(nullptr) {
+ sp<IMapper4> mapper = GetMapper4();
+ if (!mapper) {
+ return;
+ }
+ // Unwrap raw buffer handle from the C2Handle
+ native_handle_t *nh = UnwrapNativeCodec2GrallocHandle(handle);
+ if (!nh) {
+ return;
+ }
+ // Import the raw handle so IMapper can use the buffer. The imported
+ // handle must be freed when the client is done with the buffer.
+ mapper->importBuffer(
+ hardware::hidl_handle(nh),
+ [&](const Error4 &error, void *buffer) {
+ if (error == Error4::NONE) {
+ mBuffer = buffer;
+ }
+ });
+
+ // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
+ // does not clone the fds. Thus we need to delete the handle
+ // without closing it.
+ native_handle_delete(nh);
+ }
+
+ ~Gralloc4Buffer() {
+ sp<IMapper4> mapper = GetMapper4();
+ if (mapper && mBuffer) {
+ // Free the imported buffer handle. This does not release the
+ // underlying buffer itself.
+ mapper->freeBuffer(mBuffer);
}
}
+
+ void *get() const { return mBuffer; }
+ operator bool() const { return (mBuffer != nullptr); }
private:
- native_handle_t *mHandle;
+ void *mBuffer;
};
} // namspace
@@ -992,24 +1034,15 @@
std::shared_ptr<C2StreamHdrStaticMetadataInfo::input> *staticInfo,
std::shared_ptr<C2StreamHdrDynamicMetadataInfo::input> *dynamicInfo) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- // NativeHandle cannot solve this problem, as it would close and
- // delete the handle, while we need delete only.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
Error4 mapperErr = Error4::NONE;
if (staticInfo) {
+ ALOGV("Grabbing static HDR info from gralloc4 metadata");
staticInfo->reset(new C2StreamHdrStaticMetadataInfo::input(0u));
memset(&(*staticInfo)->mastering, 0, sizeof((*staticInfo)->mastering));
(*staticInfo)->maxCll = 0;
@@ -1038,7 +1071,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2086, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2086, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1059,7 +1092,7 @@
mapperErr = Error4::BAD_VALUE;
}
};
- ret = mapper->get(nativeHandle, MetadataType_Cta861_3, cb);
+ ret = mapper->get(buffer.get(), MetadataType_Cta861_3, cb);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (mapperErr != Error4::NONE) {
@@ -1067,6 +1100,7 @@
}
}
if (dynamicInfo) {
+ ALOGV("Grabbing dynamic HDR info from gralloc4 metadata");
dynamicInfo->reset();
IMapper4::get_cb cb = [&mapperErr, dynamicInfo](Error4 err, const hidl_vec<uint8_t> &vec) {
mapperErr = err;
@@ -1080,7 +1114,7 @@
vec.size(), 0u, C2Config::HDR_DYNAMIC_METADATA_TYPE_SMPTE_2094_40);
memcpy((*dynamicInfo)->m.data, vec.data(), vec.size());
};
- Return<void> ret = mapper->get(nativeHandle, MetadataType_Smpte2094_40, cb);
+ Return<void> ret = mapper->get(buffer.get(), MetadataType_Smpte2094_40, cb);
if (!ret.isOk() || mapperErr != Error4::NONE) {
dynamicInfo->reset();
}
@@ -1094,21 +1128,14 @@
const std::shared_ptr<const C2StreamHdrDynamicMetadataInfo::output> &dynamicInfo,
const C2Handle *const handle) {
c2_status_t err = C2_OK;
- native_handle_t *nativeHandle = UnwrapNativeCodec2GrallocHandle(handle);
- if (nativeHandle == nullptr) {
- // Nothing to do
- return err;
- }
- // TRICKY: UnwrapNativeCodec2GrallocHandle creates a new handle but
- // does not clone the fds. Thus we need to delete the handle
- // without closing it when going out of scope.
- NativeHandleDeleter nhd(nativeHandle);
sp<IMapper4> mapper = GetMapper4();
- if (!mapper) {
+ Gralloc4Buffer buffer(handle);
+ if (!mapper || !buffer) {
// Gralloc4 not supported; nothing to do
return err;
}
if (staticInfo && *staticInfo) {
+ ALOGV("Setting static HDR info as gralloc4 metadata");
std::optional<Smpte2086> smpte2086 = Smpte2086{
{staticInfo->mastering.red.x, staticInfo->mastering.red.y},
{staticInfo->mastering.green.x, staticInfo->mastering.green.y},
@@ -1118,8 +1145,17 @@
staticInfo->mastering.minLuminance,
};
hidl_vec<uint8_t> vec;
- if (gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Smpte2086, vec);
+ if (0.0 <= smpte2086->primaryRed.x && smpte2086->primaryRed.x <= 1.0
+ && 0.0 <= smpte2086->primaryRed.y && smpte2086->primaryRed.y <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.x && smpte2086->primaryGreen.x <= 1.0
+ && 0.0 <= smpte2086->primaryGreen.y && smpte2086->primaryGreen.y <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.x && smpte2086->primaryBlue.x <= 1.0
+ && 0.0 <= smpte2086->primaryBlue.y && smpte2086->primaryBlue.y <= 1.0
+ && 0.0 <= smpte2086->whitePoint.x && smpte2086->whitePoint.x <= 1.0
+ && 0.0 <= smpte2086->whitePoint.y && smpte2086->whitePoint.y <= 1.0
+ && 0.0 <= smpte2086->maxLuminance && 0.0 <= smpte2086->minLuminance
+ && gralloc4::encodeSmpte2086(smpte2086, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Smpte2086, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1130,8 +1166,9 @@
staticInfo->maxCll,
staticInfo->maxFall,
};
- if (gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
- Return<Error4> ret = mapper->set(nativeHandle, MetadataType_Cta861_3, vec);
+ if (0.0 <= cta861_3->maxContentLightLevel && 0.0 <= cta861_3->maxFrameAverageLightLevel
+ && gralloc4::encodeCta861_3(cta861_3, &vec) == OK) {
+ Return<Error4> ret = mapper->set(buffer.get(), MetadataType_Cta861_3, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
@@ -1139,7 +1176,8 @@
}
}
}
- if (dynamicInfo && *dynamicInfo) {
+ if (dynamicInfo && *dynamicInfo && dynamicInfo->flexCount() > 0) {
+ ALOGV("Setting dynamic HDR info as gralloc4 metadata");
hidl_vec<uint8_t> vec;
vec.resize(dynamicInfo->flexCount());
memcpy(vec.data(), dynamicInfo->m.data, dynamicInfo->flexCount());
@@ -1153,7 +1191,7 @@
break;
}
if (metadataType) {
- Return<Error4> ret = mapper->set(nativeHandle, *metadataType, vec);
+ Return<Error4> ret = mapper->set(buffer.get(), *metadataType, vec);
if (!ret.isOk()) {
err = C2_REFUSED;
} else if (ret != Error4::NONE) {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index bff9db5..7fc4c27 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -16,7 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Codec2BufferUtils"
+#define ATRACE_TAG ATRACE_TAG_VIDEO
#include <utils/Log.h>
+#include <utils/Trace.h>
#include <libyuv.h>
@@ -36,8 +38,8 @@
namespace {
/**
- * A flippable, optimizable memcpy. Constructs such as (from ? src : dst) do not work as the results are
- * always const.
+ * A flippable, optimizable memcpy. Constructs such as (from ? src : dst)
+ * do not work as the results are always const.
*/
template<bool ToA, size_t S>
struct MemCopier {
@@ -139,15 +141,18 @@
if (IsNV12(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -155,15 +160,18 @@
}
} else if (IsNV21(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -171,22 +179,26 @@
}
} else if (IsI420(view)) {
if (IsNV12(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(img)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<true>(view, img, imgBase);
}
@@ -210,15 +222,18 @@
int height = view.crop().height;
if (IsNV12(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV12");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width, height / 2);
return OK;
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->NV21");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_u, src_stride_u,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV12->I420");
if (!libyuv::NV12ToI420(src_y, src_stride_y, src_u, src_stride_u, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -226,15 +241,18 @@
}
} else if (IsNV21(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV12");
if (!libyuv::NV21ToNV12(src_y, src_stride_y, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->NV21");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width, height / 2);
return OK;
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: NV21->I420");
if (!libyuv::NV21ToI420(src_y, src_stride_y, src_v, src_stride_v, dst_y, dst_stride_y,
dst_u, dst_stride_u, dst_v, dst_stride_v, width, height)) {
return OK;
@@ -242,22 +260,26 @@
}
} else if (IsI420(img)) {
if (IsNV12(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV12");
if (!libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_u, dst_stride_u, width, height)) {
return OK;
}
} else if (IsNV21(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->NV21");
if (!libyuv::I420ToNV21(src_y, src_stride_y, src_u, src_stride_u, src_v, src_stride_v,
dst_y, dst_stride_y, dst_v, dst_stride_v, width, height)) {
return OK;
}
} else if (IsI420(view)) {
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: I420->I420");
libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, width, height);
libyuv::CopyPlane(src_u, src_stride_u, dst_u, dst_stride_u, width / 2, height / 2);
libyuv::CopyPlane(src_v, src_stride_v, dst_v, dst_stride_v, width / 2, height / 2);
return OK;
}
}
+ ScopedTrace trace(ATRACE_TAG, "ImageCopy: generic");
return _ImageCopy<false>(view, img, imgBase);
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 50a3f0d..2f3792e 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1460,9 +1460,14 @@
if (mDomain == DOMAIN_VIDEO) {
// video codec needs dedicated looper
if (mCodecLooper == NULL) {
+ status_t err = OK;
mCodecLooper = new ALooper;
mCodecLooper->setName("CodecLooper");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ err = mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+ if (OK != err) {
+ ALOGE("Codec Looper failed to start");
+ return err;
+ }
}
mCodecLooper->registerHandler(mCodec);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 701206a..1f0e095 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -518,6 +518,10 @@
metadataRequestList.push_back(physicalSettingsList);
surfaceMapList.push_back(surfaceMap);
+
+ if (!request.mUserTag.empty()) {
+ mUserTag = request.mUserTag;
+ }
}
mRequestIdCounter++;
@@ -1964,7 +1968,8 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
- Camera2ClientBase::notifyIdle(requestCount, resultErrorCount, deviceError, streamStats);
+ Camera2ClientBase::notifyIdleWithUserTag(requestCount, resultErrorCount, deviceError,
+ streamStats, mUserTag);
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 9d1deb1..3af0b80 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -349,6 +349,9 @@
// Override the camera characteristics for performance class primary cameras.
bool mOverrideForPerfClass;
+
+ // The string representation of object passed into CaptureRequest.setTag.
+ std::string mUserTag;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 6ed3c02..0ac047a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -332,9 +332,10 @@
}
template <typename TClientBase>
-void Camera2ClientBase<TClientBase>::notifyIdle(
+void Camera2ClientBase<TClientBase>::notifyIdleWithUserTag(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
- const std::vector<hardware::CameraStreamStats>& streamStats) {
+ const std::vector<hardware::CameraStreamStats>& streamStats,
+ const std::string& userTag) {
if (mDeviceActive) {
status_t res = TClientBase::finishCameraStreamingOps();
if (res != OK) {
@@ -342,7 +343,7 @@
TClientBase::mCameraIdStr.string(), res);
}
CameraServiceProxyWrapper::logIdle(TClientBase::mCameraIdStr,
- requestCount, resultErrorCount, deviceError, streamStats);
+ requestCount, resultErrorCount, deviceError, userTag, streamStats);
}
mDeviceActive = false;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 6b90f5e..9cba2f1 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -75,9 +75,9 @@
const CaptureResultExtras& resultExtras);
// Returns errors on app ops permission failures
virtual status_t notifyActive(float maxPreviewFps);
- virtual void notifyIdle(int64_t requestCount, int64_t resultErrorCount,
- bool deviceError,
- const std::vector<hardware::CameraStreamStats>& streamStats);
+ virtual void notifyIdle(int64_t /*requestCount*/, int64_t /*resultErrorCount*/,
+ bool /*deviceError*/,
+ const std::vector<hardware::CameraStreamStats>&) {}
virtual void notifyShutter(const CaptureResultExtras& resultExtras,
nsecs_t timestamp);
virtual void notifyAutoFocus(uint8_t newState, int triggerId);
@@ -88,6 +88,11 @@
virtual void notifyRequestQueueEmpty();
virtual void notifyRepeatingRequestError(long lastFrameNumber);
+ void notifyIdleWithUserTag(int64_t requestCount, int64_t resultErrorCount,
+ bool deviceError,
+ const std::vector<hardware::CameraStreamStats>& streamStats,
+ const std::string& userTag);
+
int getCameraId() const;
const sp<CameraDeviceBase>&
getCameraDevice();
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index b822178..37f9227 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -379,8 +379,6 @@
nsecs_t captureTime = (mSyncToDisplay ? readoutTimestamp : timestamp) - mTimestampOffset;
nsecs_t presentTime = mSyncToDisplay ?
syncTimestampToDisplayLocked(captureTime) : captureTime;
- mLastCaptureTime = captureTime;
- mLastPresentTime = presentTime;
setTransform(transform, true/*mayChangeMirror*/);
res = native_window_set_buffers_timestamp(mConsumer.get(), presentTime);
@@ -1267,6 +1265,8 @@
if (res != OK) {
ALOGE("%s: Stream %d: Error getting latest vsync event data: %s (%d)",
__FUNCTION__, mId, strerror(-res), res);
+ mLastCaptureTime = t;
+ mLastPresentTime = t;
return t;
}
@@ -1286,7 +1286,7 @@
}
nsecs_t idealPresentT = t + mCaptureToPresentOffset;
- nsecs_t expectedPresentT = 0;
+ nsecs_t expectedPresentT = mLastPresentTime;
nsecs_t minDiff = INT64_MAX;
// Derive minimum intervals between presentation times based on minimal
// expected duration.
@@ -1306,7 +1306,14 @@
minDiff = std::abs(vsyncTime.expectedPresentationTime - idealPresentT);
}
}
- return expectedPresentT;
+ mLastCaptureTime = t;
+ mLastPresentTime = expectedPresentT;
+
+ // Move the expected presentation time back by 1/3 of frame interval to
+ // mitigate the time drift. Due to time drift, if we directly use the
+ // expected presentation time, often times 2 expected presentation time
+ // falls into the same VSYNC interval.
+ return expectedPresentT - vsyncEventData.frameInterval/3;
}
}; // namespace camera3
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 82d58e0..a00b221 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -80,6 +80,7 @@
void CameraServiceProxyWrapper::CameraSessionStatsWrapper::onIdle(
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats) {
Mutex::Autolock l(mLock);
@@ -87,6 +88,7 @@
mSessionStats.mRequestCount = requestCount;
mSessionStats.mResultErrorCount = resultErrorCount;
mSessionStats.mDeviceError = deviceError;
+ mSessionStats.mUserTag = String16(userTag.c_str());
mSessionStats.mStreamStats = streamStats;
updateProxyDeviceState(mSessionStats);
@@ -177,6 +179,7 @@
void CameraServiceProxyWrapper::logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats) {
std::shared_ptr<CameraSessionStatsWrapper> sessionStats;
{
@@ -190,8 +193,9 @@
return;
}
- ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d",
- __FUNCTION__, id.c_str(), requestCount, resultErrorCount, deviceError);
+ ALOGV("%s: id %s, requestCount %" PRId64 ", resultErrorCount %" PRId64 ", deviceError %d"
+ ", userTag %s", __FUNCTION__, id.c_str(), requestCount, resultErrorCount,
+ deviceError, userTag.c_str());
for (size_t i = 0; i < streamStats.size(); i++) {
ALOGV("%s: streamStats[%zu]: w %d h %d, requestedCount %" PRId64 ", dropCount %"
PRId64 ", startTimeMs %d" ,
@@ -200,7 +204,7 @@
streamStats[i].mStartLatencyMs);
}
- sessionStats->onIdle(requestCount, resultErrorCount, deviceError, streamStats);
+ sessionStats->onIdle(requestCount, resultErrorCount, deviceError, userTag, streamStats);
}
void CameraServiceProxyWrapper::logOpen(const String8& id, int facing,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index 037316d..6604aa1 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -50,6 +50,7 @@
void onStreamConfigured(int operatingMode, bool internalReconfig, int32_t latencyMs);
void onActive(float maxPreviewFps);
void onIdle(int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats);
};
@@ -86,6 +87,7 @@
// Session state becomes idle
static void logIdle(const String8& id,
int64_t requestCount, int64_t resultErrorCount, bool deviceError,
+ const std::string& userTag,
const std::vector<hardware::CameraStreamStats>& streamStats);
// Ping camera service proxy for user update
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
index 7a7707c..c3aac72 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.cpp
@@ -60,6 +60,7 @@
mErrorResultCount = 0;
mCounterStopped = false;
mDeviceError = false;
+ mUserTag.clear();
for (auto& streamStats : mStatsMap) {
StreamStats& streamStat = streamStats.second;
streamStat.mRequestedFrameCount = 0;
diff --git a/services/camera/libcameraservice/utils/SessionStatsBuilder.h b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
index c23abb6..2936531 100644
--- a/services/camera/libcameraservice/utils/SessionStatsBuilder.h
+++ b/services/camera/libcameraservice/utils/SessionStatsBuilder.h
@@ -84,6 +84,7 @@
int64_t mErrorResultCount;
bool mCounterStopped;
bool mDeviceError;
+ std::string mUserTag;
// Map from stream id to stream statistics
std::map<int, StreamStats> mStatsMap;
};
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 1c09544..e0584df 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -483,7 +483,7 @@
}
}
if (info.cookie == 0 && client != nullptr) {
- info.cookie = addCookieAndLink_l(client->asBinder(),
+ info.cookie = addCookieAndLink_l(client,
new DeathNotifier(ref<ResourceManagerService>(), pid, clientId));
}
if (mObserverService != nullptr && !resourceAdded.empty()) {
@@ -592,7 +592,7 @@
onLastRemoved(it->second, info);
}
- removeCookieAndUnlink_l(info.client->asBinder(), info.cookie);
+ removeCookieAndUnlink_l(info.client, info.cookie);
if (mObserverService != nullptr && !info.resources.empty()) {
mObserverService->onResourceRemoved(info.uid, pid, info.resources);
@@ -812,7 +812,7 @@
return Status::fromServiceSpecificError(BAD_VALUE);
}
- uintptr_t cookie = addCookieAndLink_l(client->asBinder(),
+ uintptr_t cookie = addCookieAndLink_l(client,
new OverrideProcessInfoDeathNotifier(ref<ResourceManagerService>(), pid));
mProcessInfoOverrideMap.emplace(pid, ProcessInfoOverride{cookie, client});
@@ -820,23 +820,29 @@
return Status::ok();
}
-uintptr_t ResourceManagerService::addCookieAndLink_l(::ndk::SpAIBinder binder,
- const sp<DeathNotifier>& notifier) {
+uintptr_t ResourceManagerService::addCookieAndLink_l(
+ const std::shared_ptr<IResourceManagerClient>& client, const sp<DeathNotifier>& notifier) {
+ if (client == nullptr) {
+ return 0;
+ }
std::scoped_lock lock{sCookieLock};
uintptr_t cookie;
// Need to skip cookie 0 (if it wraps around). ResourceInfo has cookie initialized to 0
// indicating the death notifier is not created yet.
while ((cookie = ++sCookieCounter) == 0);
- AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ AIBinder_linkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
sCookieToDeathNotifierMap.emplace(cookie, notifier);
return cookie;
}
-void ResourceManagerService::removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie) {
+void ResourceManagerService::removeCookieAndUnlink_l(
+ const std::shared_ptr<IResourceManagerClient>& client, uintptr_t cookie) {
std::scoped_lock lock{sCookieLock};
- AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), (void*)cookie);
+ if (client != nullptr) {
+ AIBinder_unlinkToDeath(client->asBinder().get(), mDeathRecipient.get(), (void*)cookie);
+ }
sCookieToDeathNotifierMap.erase(cookie);
}
@@ -854,7 +860,7 @@
mProcessInfo->removeProcessInfoOverride(pid);
- removeCookieAndUnlink_l(it->second.client->asBinder(), it->second.cookie);
+ removeCookieAndUnlink_l(it->second.client, it->second.cookie);
mProcessInfoOverrideMap.erase(pid);
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 6551371..c636a0f 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -166,8 +166,10 @@
void removeProcessInfoOverride(int pid);
void removeProcessInfoOverride_l(int pid);
- uintptr_t addCookieAndLink_l(::ndk::SpAIBinder binder, const sp<DeathNotifier>& notifier);
- void removeCookieAndUnlink_l(::ndk::SpAIBinder binder, uintptr_t cookie);
+ uintptr_t addCookieAndLink_l(const std::shared_ptr<IResourceManagerClient>& client,
+ const sp<DeathNotifier>& notifier);
+ void removeCookieAndUnlink_l(const std::shared_ptr<IResourceManagerClient>& client,
+ uintptr_t cookie);
mutable Mutex mLock;
sp<ProcessInfoInterface> mProcessInfo;