Merge "aaudio: improve accuracy of timestamps" into oc-mr1-dev
diff --git a/media/libaudiohal/StreamHalHidl.cpp b/media/libaudiohal/StreamHalHidl.cpp
index 176abee..0cafa36 100644
--- a/media/libaudiohal/StreamHalHidl.cpp
+++ b/media/libaudiohal/StreamHalHidl.cpp
@@ -47,7 +47,8 @@
StreamHalHidl::StreamHalHidl(IStream *stream)
: ConversionHelperHidl("Stream"),
mStream(stream),
- mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT) {
+ mHalThreadPriority(HAL_THREAD_PRIORITY_DEFAULT),
+ mCachedBufferSize(0){
// Instrument audio signal power logging.
// Note: This assumes channel mask, format, and sample rate do not change after creation.
@@ -73,7 +74,11 @@
status_t StreamHalHidl::getBufferSize(size_t *size) {
if (!mStream) return NO_INIT;
- return processReturn("getBufferSize", mStream->getBufferSize(), size);
+ status_t status = processReturn("getBufferSize", mStream->getBufferSize(), size);
+ if (status == OK) {
+ mCachedBufferSize = *size;
+ }
+ return status;
}
status_t StreamHalHidl::getChannelMask(audio_channel_mask_t *mask) {
@@ -202,6 +207,14 @@
return OK;
}
+status_t StreamHalHidl::getCachedBufferSize(size_t *size) {
+ if (mCachedBufferSize != 0) {
+ *size = mCachedBufferSize;
+ return OK;
+ }
+ return getBufferSize(size);
+}
+
bool StreamHalHidl::requestHalThreadPriority(pid_t threadPid, pid_t threadId) {
if (mHalThreadPriority == HAL_THREAD_PRIORITY_DEFAULT) {
return true;
@@ -320,8 +333,19 @@
}
status_t status;
- if (!mDataMQ && (status = prepareForWriting(bytes)) != OK) {
- return status;
+ if (!mDataMQ) {
+ // In case if playback starts close to the end of a compressed track, the bytes
+ // that need to be written is less than the actual buffer size. Need to use
+ // full buffer size for the MQ since otherwise after seeking back to the middle
+ // data will be truncated.
+ size_t bufferSize;
+ if ((status = getCachedBufferSize(&bufferSize)) != OK) {
+ return status;
+ }
+ if (bytes > bufferSize) bufferSize = bytes;
+ if ((status = prepareForWriting(bufferSize)) != OK) {
+ return status;
+ }
}
status = callWriterThread(
diff --git a/media/libaudiohal/StreamHalHidl.h b/media/libaudiohal/StreamHalHidl.h
index a69cce8..d4ab943 100644
--- a/media/libaudiohal/StreamHalHidl.h
+++ b/media/libaudiohal/StreamHalHidl.h
@@ -102,6 +102,8 @@
// The destructor automatically closes the stream.
virtual ~StreamHalHidl();
+ status_t getCachedBufferSize(size_t *size);
+
bool requestHalThreadPriority(pid_t threadPid, pid_t threadId);
// mStreamPowerLog is used for audio signal power logging.
@@ -111,6 +113,7 @@
const int HAL_THREAD_PRIORITY_DEFAULT = -1;
IStream *mStream;
int mHalThreadPriority;
+ size_t mCachedBufferSize;
};
class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index 03dc9df..57af772 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -422,15 +422,8 @@
&& trackMeta->findInt32(kKeyDisplayHeight, &height)
&& frame->mDisplayWidth > 0 && frame->mDisplayHeight > 0
&& width > 0 && height > 0) {
- if (frame->mDisplayHeight * (int64_t)width / height > (int64_t)frame->mDisplayWidth) {
- frame->mDisplayHeight =
- (int32_t)(height * (int64_t)frame->mDisplayWidth / width);
- } else {
- frame->mDisplayWidth =
- (int32_t)(frame->mDisplayHeight * (int64_t)width / height);
- }
- ALOGV("thumbNail width and height are overridden to %d x %d",
- frame->mDisplayWidth, frame->mDisplayHeight);
+ frame->mDisplayWidth = width;
+ frame->mDisplayHeight = height;
}
}
diff --git a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
index d2b2454..fcf1092 100644
--- a/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
+++ b/media/libstagefright/omx/1.0/WGraphicBufferProducer.cpp
@@ -64,10 +64,9 @@
sp<Fence> fence;
::android::FrameEventHistoryDelta outTimestamps;
status_t status = mBase->dequeueBuffer(
- &slot, &fence,
- width, height,
- static_cast<::android::PixelFormat>(format), usage,
- getFrameTimestamps ? &outTimestamps : nullptr);
+ &slot, &fence, width, height,
+ static_cast<::android::PixelFormat>(format), usage, nullptr,
+ getFrameTimestamps ? &outTimestamps : nullptr);
hidl_handle tFence;
FrameEventHistoryDelta tOutTimestamps;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 5d1a20b..a450dd3 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -44,7 +44,11 @@
const char* AImageReader::kGraphicBufferKey = "GraphicBuffer";
bool
-AImageReader::isSupportedFormat(int32_t format) {
+AImageReader::isSupportedFormatAndUsage(int32_t format, uint64_t usage) {
+ // Check whether usage has either CPU_READ_OFTEN or CPU_READ set. Note that check against
+ // AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN (0x6) is sufficient as it implies
+ // AHARDWAREBUFFER_USAGE_CPU_READ (0x2).
+ bool hasCpuUsage = usage & AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN;
switch (format) {
case AIMAGE_FORMAT_RGBA_8888:
case AIMAGE_FORMAT_RGBX_8888:
@@ -60,6 +64,9 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
return true;
+ case AIMAGE_FORMAT_PRIVATE:
+ // For private format, cpu usage is prohibited.
+ return !hasCpuUsage;
default:
return false;
}
@@ -83,6 +90,8 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
return 1;
+ case AIMAGE_FORMAT_PRIVATE:
+ return 0;
default:
return -1;
}
@@ -606,9 +615,9 @@
return AMEDIA_ERROR_INVALID_PARAMETER;
}
- if (!AImageReader::isSupportedFormat(format)) {
- ALOGE("%s: format %d is not supported by AImageReader",
- __FUNCTION__, format);
+ if (!AImageReader::isSupportedFormatAndUsage(format, usage)) {
+ ALOGE("%s: format %d is not supported with usage 0x%" PRIx64 " by AImageReader",
+ __FUNCTION__, format, usage);
return AMEDIA_ERROR_INVALID_PARAMETER;
}
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 989c1fd..989b937 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -49,7 +49,7 @@
struct AImageReader : public RefBase {
public:
- static bool isSupportedFormat(int32_t format);
+ static bool isSupportedFormatAndUsage(int32_t format, uint64_t usage0);
static int getNumPlanesForFormat(int32_t format);
AImageReader(int32_t width,
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index d7443be..1931496 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -495,7 +495,13 @@
/**
* Android private opaque image format.
*
- * <p>This format is not currently supported by {@link AImageReader}.</p>
+ * <p>The choices of the actual format and pixel data layout are entirely up to the
+ * device-specific and framework internal implementations, and may vary depending on use cases
+ * even for the same device. Also note that the contents of these buffers are not directly
+ * accessible to the application.</p>
+ *
+ * <p>When an {@link AImage} of this format is obtained from an {@link AImageReader} or
+ * {@link AImage_getNumberOfPlanes()} method will return zero.</p>
*/
AIMAGE_FORMAT_PRIVATE = 0x22
};
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 59ae507..7a0c17b 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -70,7 +70,9 @@
* @param height The default height in pixels of the Images that this reader will produce.
* @param format The format of the Image that this reader will produce. This must be one of the
* AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}. Note that not all
- * formats are supported, like {@link AIMAGE_FORMAT_PRIVATE}.
+ * formats are supported. One example is {@link AIMAGE_FORMAT_PRIVATE}, as it is not
+ * intended to be read by applications directly. That format is supported by
+ * {@link AImageReader_newWithUsage} introduced in API 26.
* @param maxImages The maximum number of images the user will want to access simultaneously. This
* should be as small as possible to limit memory use. Once maxImages Images are obtained
* by the user, one of them has to be released before a new {@link AImage} will become
@@ -307,6 +309,28 @@
* for the consumer usage. All other parameters and the return values are identical to those passed
* to {@line AImageReader_new}.
*
+ * <p>If the {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, the created {@link AImageReader}
+ * will produce images whose contents are not directly accessible by the application. The application can
+ * still acquire images from this {@link AImageReader} and access {@link AHardwareBuffer} via
+ * {@link AImage_getHardwareBuffer()}. The {@link AHardwareBuffer} gained this way can then
+ * be passed back to hardware (such as GPU or hardware encoder if supported) for future processing.
+ * For example, you can obtain an {@link EGLClientBuffer} from the {@link AHardwareBuffer} by using
+ * {@link eglGetNativeClientBufferANDROID} extension and pass that {@link EGLClientBuffer} to {@link
+ * eglCreateImageKHR} to create an {@link EGLImage} resource type, which may then be bound to a
+ * texture via {@link glEGLImageTargetTexture2DOES} on supported devices. This can be useful for
+ * transporting textures that may be shared cross-process.</p>
+ * <p>In general, when software access to image data is not necessary, an {@link AImageReader}
+ * created with {@link AIMAGE_FORMAT_PRIVATE} format is more efficient, compared with {@link
+ * AImageReader}s using other format such as {@link AIMAGE_FORMAT_YUV_420_888}.</p>
+ *
+ * <p>Note that not all format and usage flag combination is supported by the {@link AImageReader},
+ * especially if {@code format} is {@link AIMAGE_FORMAT_PRIVATE}, {@code usage} must not include either
+ * {@link AHARDWAREBUFFER_USAGE_READ_RARELY} or {@link AHARDWAREBUFFER_USAGE_READ_OFTEN}</p>
+ *
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ * AIMAGE_FORMAT_* enum value defined in {@link AIMAGE_FORMATS}.
* @param usage specifies how the consumer will access the AImage, using combination of the
* AHARDWAREBUFFER_USAGE flags described in {@link hardware_buffer.h}.
* Passing {@link AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN} is equivalent to calling
@@ -331,6 +355,11 @@
* {@link AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE}, or combined</td>
* </tr>
* </table>
+ * @return <ul>
+ * <li>{@link AMEDIA_OK} if the method call succeeds.</li>
+ * <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL, or one or more of width,
+ * height, format, maxImages, or usage arguments is not supported.</li>
+ * <li>{@link AMEDIA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
*
* @see AImage
* @see AImageReader_new