Merge "audioflinger: Add tracing for direct tracks"
diff --git a/camera/ndk/impl/ACameraCaptureSession.cpp b/camera/ndk/impl/ACameraCaptureSession.cpp
index d6f1412..68db233 100644
--- a/camera/ndk/impl/ACameraCaptureSession.cpp
+++ b/camera/ndk/impl/ACameraCaptureSession.cpp
@@ -33,7 +33,9 @@
dev->unlockDevice();
}
// Fire onClosed callback
- (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+ if (mUserSessionCallback.onClosed != nullptr) {
+ (*mUserSessionCallback.onClosed)(mUserSessionCallback.context, this);
+ }
ALOGV("~ACameraCaptureSession: %p is deleted", this);
}
diff --git a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
index 7ab0124..938b5f5 100644
--- a/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
+++ b/camera/ndk/ndk_vendor/tests/AImageReaderVendorTest.cpp
@@ -253,21 +253,9 @@
return true;
}
- static void onDeviceDisconnected(void* /*obj*/, ACameraDevice* /*device*/) {}
-
- static void onDeviceError(void* /*obj*/, ACameraDevice* /*device*/, int /*errorCode*/) {}
-
- static void onSessionClosed(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
- static void onSessionReady(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
- static void onSessionActive(void* /*obj*/, ACameraCaptureSession* /*session*/) {}
-
private:
- ACameraDevice_StateCallbacks mDeviceCb{this, onDeviceDisconnected,
- onDeviceError};
- ACameraCaptureSession_stateCallbacks mSessionCb{
- this, onSessionClosed, onSessionReady, onSessionActive};
+ ACameraDevice_StateCallbacks mDeviceCb{this, nullptr, nullptr};
+ ACameraCaptureSession_stateCallbacks mSessionCb{ this, nullptr, nullptr, nullptr};
native_handle_t* mImgReaderAnw = nullptr; // not owned by us.
diff --git a/include/media/ExtendedAudioBufferProvider.h b/include/media/ExtendedAudioBufferProvider.h
index d653cc3..99d3c13 120000
--- a/include/media/ExtendedAudioBufferProvider.h
+++ b/include/media/ExtendedAudioBufferProvider.h
@@ -1 +1 @@
-../../media/libmedia/include/media/ExtendedAudioBufferProvider.h
\ No newline at end of file
+../../media/libaudioclient/include/media/ExtendedAudioBufferProvider.h
\ No newline at end of file
diff --git a/include/media/SingleStateQueue.h b/include/media/SingleStateQueue.h
deleted file mode 120000
index 619f6ee..0000000
--- a/include/media/SingleStateQueue.h
+++ /dev/null
@@ -1 +0,0 @@
-../../media/libmedia/include/media/SingleStateQueue.h
\ No newline at end of file
diff --git a/include/media/nbaio/SingleStateQueue.h b/include/media/nbaio/SingleStateQueue.h
new file mode 120000
index 0000000..d3e0553
--- /dev/null
+++ b/include/media/nbaio/SingleStateQueue.h
@@ -0,0 +1 @@
+../../../media/libnbaio/include_mono/media/nbaio/SingleStateQueue.h
\ No newline at end of file
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index 5f19f74..1b1f149 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -28,7 +28,7 @@
#include <media/AudioResamplerPublic.h>
#include <media/AudioTimestamp.h>
#include <media/Modulo.h>
-#include <media/SingleStateQueue.h>
+#include <media/nbaio/SingleStateQueue.h>
namespace android {
diff --git a/media/codec2/components/aac/C2SoftAacEnc.cpp b/media/codec2/components/aac/C2SoftAacEnc.cpp
index 1dc676b..a8f39d5 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.cpp
+++ b/media/codec2/components/aac/C2SoftAacEnc.cpp
@@ -159,7 +159,8 @@
mInputSize(0),
mNextFrameTimestampUs(0),
mSignalledError(false),
- mOutIndex(0u) {
+ mOutIndex(0u),
+ mRemainderLen(0u) {
}
C2SoftAacEnc::~C2SoftAacEnc() {
@@ -185,6 +186,7 @@
mInputSize = 0u;
mNextFrameTimestampUs = 0;
mSignalledError = false;
+ mRemainderLen = 0;
return C2_OK;
}
@@ -369,18 +371,21 @@
mInputTimeSet = true;
}
- size_t numFrames = (capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
- / mNumBytesPerInputFrame;
+ size_t numFrames =
+ (mRemainderLen + capacity + mInputSize + (eos ? mNumBytesPerInputFrame - 1 : 0))
+ / mNumBytesPerInputFrame;
ALOGV("capacity = %zu; mInputSize = %zu; numFrames = %zu "
- "mNumBytesPerInputFrame = %u inputTS = %lld",
+ "mNumBytesPerInputFrame = %u inputTS = %lld remaining = %zu",
capacity, mInputSize, numFrames,
- mNumBytesPerInputFrame, work->input.ordinal.timestamp.peekll());
+ mNumBytesPerInputFrame, work->input.ordinal.timestamp.peekll(),
+ mRemainderLen);
std::shared_ptr<C2LinearBlock> block;
std::unique_ptr<C2WriteView> wView;
uint8_t *outPtr = temp;
size_t outAvailable = 0u;
uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
+ size_t bytesPerSample = channelCount * sizeof(int16_t);
AACENC_InArgs inargs;
AACENC_OutArgs outargs;
@@ -449,7 +454,25 @@
};
std::list<OutputBuffer> outputBuffers;
- while (encoderErr == AACENC_OK && inargs.numInSamples > 0) {
+ if (mRemainderLen > 0) {
+ size_t offset = 0;
+ for (; mRemainderLen < bytesPerSample && offset < capacity; ++offset) {
+ mRemainder[mRemainderLen++] = data[offset];
+ }
+ data += offset;
+ capacity -= offset;
+ if (mRemainderLen == bytesPerSample) {
+ inBuffer[0] = mRemainder;
+ inBufferSize[0] = bytesPerSample;
+ inargs.numInSamples = channelCount;
+ mRemainderLen = 0;
+ ALOGV("Processing remainder");
+ } else {
+ // We have exhausted the input already
+ inargs.numInSamples = 0;
+ }
+ }
+ while (encoderErr == AACENC_OK && inargs.numInSamples >= channelCount) {
if (numFrames && !block) {
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
// TODO: error handling, proper usage, etc.
@@ -486,7 +509,7 @@
mNextFrameTimestampUs = work->input.ordinal.timestamp
+ (consumed * 1000000ll / channelCount / sampleRate);
std::shared_ptr<C2Buffer> buffer = createLinearBuffer(block, 0, outargs.numOutBytes);
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
+#if 0
hexdump(outPtr, std::min(outargs.numOutBytes, 256));
#endif
outPtr = temp;
@@ -503,12 +526,17 @@
inBufferSize[0] -= outargs.numInSamples * sizeof(int16_t);
inargs.numInSamples -= outargs.numInSamples;
}
+
+ if (inBuffer[0] == mRemainder) {
+ inBuffer[0] = const_cast<uint8_t *>(data);
+ inBufferSize[0] = capacity;
+ inargs.numInSamples = capacity / sizeof(int16_t);
+ }
}
ALOGV("encoderErr = %d mInputSize = %zu "
"inargs.numInSamples = %d, mNextFrameTimestampUs = %lld",
encoderErr, mInputSize, inargs.numInSamples, mNextFrameTimestampUs.peekll());
}
-
if (eos && inBufferSize[0] > 0) {
if (numFrames && !block) {
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
@@ -539,6 +567,14 @@
&outBufDesc,
&inargs,
&outargs);
+ inBufferSize[0] = 0;
+ }
+
+ if (inBufferSize[0] > 0) {
+ for (size_t i = 0; i < inBufferSize[0]; ++i) {
+ mRemainder[i] = static_cast<uint8_t *>(inBuffer[0])[i];
+ }
+ mRemainderLen = inBufferSize[0];
}
while (outputBuffers.size() > 1) {
diff --git a/media/codec2/components/aac/C2SoftAacEnc.h b/media/codec2/components/aac/C2SoftAacEnc.h
index 2655039..6ecfbdd 100644
--- a/media/codec2/components/aac/C2SoftAacEnc.h
+++ b/media/codec2/components/aac/C2SoftAacEnc.h
@@ -61,6 +61,10 @@
bool mSignalledError;
std::atomic_uint64_t mOutIndex;
+ // We support max 6 channels
+ uint8_t mRemainder[6 * sizeof(int16_t)];
+ size_t mRemainderLen;
+
status_t initEncoder();
status_t setAudioParams();
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 6b75eba..745d701 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -117,8 +117,9 @@
}
}
- // For VP9, the static info is always propagated by framework.
+ // For VP9/AV1, the static info is always propagated by framework.
supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
+ supportsHdr |= (mediaType == MIMETYPE_VIDEO_AV1);
for (C2Value::Primitive profile : profileQuery[0].values.values) {
pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index 7334834..ef6af48 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -382,10 +382,11 @@
// TODO: will need to disambiguate between Main8 and Main10
{ C2Config::PROFILE_AV1_0, AV1ProfileMain8 },
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
+ { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
+ { C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10Plus },
};
ALookup<C2Config::profile_t, int32_t> sAv1HdrProfiles = {
- { C2Config::PROFILE_AV1_0, AV1ProfileMain10 },
{ C2Config::PROFILE_AV1_0, AV1ProfileMain10HDR10 },
};
@@ -662,6 +663,8 @@
return std::make_shared<HevcProfileLevelMapper>(true, isHdr10Plus);
} else if (mediaType == MIMETYPE_VIDEO_VP9) {
return std::make_shared<Vp9ProfileLevelMapper>(true, isHdr10Plus);
+ } else if (mediaType == MIMETYPE_VIDEO_AV1) {
+ return std::make_shared<Av1ProfileLevelMapper>(true, isHdr10Plus);
}
return nullptr;
}
diff --git a/media/libmedia/include/media/ExtendedAudioBufferProvider.h b/media/libaudioclient/include/media/ExtendedAudioBufferProvider.h
similarity index 100%
rename from media/libmedia/include/media/ExtendedAudioBufferProvider.h
rename to media/libaudioclient/include/media/ExtendedAudioBufferProvider.h
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 9803473..5837fcf 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -52,4 +52,10 @@
name: "libaudiohal_headers",
export_include_dirs: ["include"],
+
+ // This is needed because the stream interface includes media/MicrophoneInfo.h
+ // which is not in any library but has a dependency on headers from libbinder.
+ header_libs: ["libbinder_headers"],
+
+ export_header_lib_headers: ["libbinder_headers"],
}
diff --git a/media/libheif/HeifDecoderImpl.cpp b/media/libheif/HeifDecoderImpl.cpp
index a977300..a7c0f84 100644
--- a/media/libheif/HeifDecoderImpl.cpp
+++ b/media/libheif/HeifDecoderImpl.cpp
@@ -31,6 +31,7 @@
#include <private/media/VideoFrame.h>
#include <utils/Log.h>
#include <utils/RefBase.h>
+#include <vector>
HeifDecoder* createHeifDecoder() {
return new android::HeifDecoderImpl();
@@ -38,6 +39,23 @@
namespace android {
+void initFrameInfo(HeifFrameInfo *info, const VideoFrame *videoFrame) {
+ info->mWidth = videoFrame->mWidth;
+ info->mHeight = videoFrame->mHeight;
+ info->mRotationAngle = videoFrame->mRotationAngle;
+ info->mBytesPerPixel = videoFrame->mBytesPerPixel;
+ // TODO: retrieve per-frame duration from extractor/metadataretriever.
+ info->mDurationUs = 33333;
+ if (videoFrame->mIccSize > 0) {
+ info->mIccData.assign(
+ videoFrame->getFlattenedIccData(),
+ videoFrame->getFlattenedIccData() + videoFrame->mIccSize);
+ } else {
+ // clear old Icc data if there is no Icc data.
+ info->mIccData.clear();
+ }
+}
+
/*
* HeifDataSource
*
@@ -293,11 +311,11 @@
// it's not, default to HAL_PIXEL_FORMAT_RGB_565.
mOutputColor(HAL_PIXEL_FORMAT_RGB_565),
mCurScanline(0),
- mWidth(0),
- mHeight(0),
+ mTotalScanline(0),
mFrameDecoded(false),
mHasImage(false),
mHasVideo(false),
+ mSequenceLength(0),
mAvailableLines(0),
mNumSlices(1),
mSliceHeight(0),
@@ -336,48 +354,94 @@
mHasImage = hasImage && !strcasecmp(hasImage, "yes");
mHasVideo = hasVideo && !strcasecmp(hasVideo, "yes");
- sp<IMemory> sharedMem;
+
+ HeifFrameInfo* defaultInfo = nullptr;
if (mHasImage) {
// image index < 0 to retrieve primary image
- sharedMem = mRetriever->getImageAtIndex(
+ sp<IMemory> sharedMem = mRetriever->getImageAtIndex(
-1, mOutputColor, true /*metaOnly*/);
- } else if (mHasVideo) {
- sharedMem = mRetriever->getFrameAtTime(0,
- MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
- mOutputColor, true /*metaOnly*/);
+
+ if (sharedMem == nullptr || sharedMem->pointer() == nullptr) {
+ ALOGE("init: videoFrame is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->pointer());
+
+ ALOGV("Image dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mDisplayWidth,
+ videoFrame->mDisplayHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mIccSize);
+
+ initFrameInfo(&mImageInfo, videoFrame);
+
+ if (videoFrame->mTileHeight >= 512) {
+ // Try decoding in slices only if the image has tiles and is big enough.
+ mSliceHeight = videoFrame->mTileHeight;
+ ALOGV("mSliceHeight %u", mSliceHeight);
+ }
+
+ defaultInfo = &mImageInfo;
}
- if (sharedMem == nullptr || sharedMem->pointer() == nullptr) {
- ALOGE("getFrameAtTime: videoFrame is a nullptr");
+ if (mHasVideo) {
+ sp<IMemory> sharedMem = mRetriever->getFrameAtTime(0,
+ MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
+ mOutputColor, true /*metaOnly*/);
+
+ if (sharedMem == nullptr || sharedMem->pointer() == nullptr) {
+ ALOGE("init: videoFrame is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->pointer());
+
+ ALOGV("Sequence dimension %dx%d, display %dx%d, angle %d, iccSize %d",
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mDisplayWidth,
+ videoFrame->mDisplayHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mIccSize);
+
+ initFrameInfo(&mSequenceInfo, videoFrame);
+
+ mSequenceLength = atoi(mRetriever->extractMetadata(METADATA_KEY_VIDEO_FRAME_COUNT));
+
+ if (defaultInfo == nullptr) {
+ defaultInfo = &mSequenceInfo;
+ }
+ }
+
+ if (defaultInfo == nullptr) {
+ ALOGD("No valid image or sequence available");
return false;
}
- VideoFrame* videoFrame = static_cast<VideoFrame*>(sharedMem->pointer());
-
- ALOGV("Meta dimension %dx%d, display %dx%d, angle %d, iccSize %d",
- videoFrame->mWidth,
- videoFrame->mHeight,
- videoFrame->mDisplayWidth,
- videoFrame->mDisplayHeight,
- videoFrame->mRotationAngle,
- videoFrame->mIccSize);
-
if (frameInfo != nullptr) {
- frameInfo->set(
- videoFrame->mWidth,
- videoFrame->mHeight,
- videoFrame->mRotationAngle,
- videoFrame->mBytesPerPixel,
- videoFrame->mIccSize,
- videoFrame->getFlattenedIccData());
+ *frameInfo = *defaultInfo;
}
- mWidth = videoFrame->mWidth;
- mHeight = videoFrame->mHeight;
- if (mHasImage && videoFrame->mTileHeight >= 512 && mWidth >= 3000 && mHeight >= 2000 ) {
- // Try decoding in slices only if the image has tiles and is big enough.
- mSliceHeight = videoFrame->mTileHeight;
- mNumSlices = (videoFrame->mHeight + mSliceHeight - 1) / mSliceHeight;
- ALOGV("mSliceHeight %u, mNumSlices %zu", mSliceHeight, mNumSlices);
+
+ // default total scanline, this might change if decodeSequence() is used
+ mTotalScanline = defaultInfo->mHeight;
+
+ return true;
+}
+
+bool HeifDecoderImpl::getSequenceInfo(
+ HeifFrameInfo* frameInfo, size_t *frameCount) {
+ ALOGV("%s", __FUNCTION__);
+ if (!mHasVideo) {
+ return false;
+ }
+ if (frameInfo != nullptr) {
+ *frameInfo = mSequenceInfo;
+ }
+ if (frameCount != nullptr) {
+ *frameCount = mSequenceLength;
}
return true;
}
@@ -416,11 +480,11 @@
ALOGV("decodeAsync(): decoding slice %zu", i);
size_t top = i * mSliceHeight;
size_t bottom = (i + 1) * mSliceHeight;
- if (bottom > mHeight) {
- bottom = mHeight;
+ if (bottom > mImageInfo.mHeight) {
+ bottom = mImageInfo.mHeight;
}
sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
- -1, mOutputColor, 0, top, mWidth, bottom);
+ -1, mOutputColor, 0, top, mImageInfo.mWidth, bottom);
{
Mutex::Autolock autolock(mLock);
@@ -452,42 +516,44 @@
// See if we want to decode in slices to allow client to start
// scanline processing in parallel with decode. If this fails
// we fallback to decoding the full frame.
- if (mHasImage && mNumSlices > 1) {
- // get first slice and metadata
- sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
- -1, mOutputColor, 0, 0, mWidth, mSliceHeight);
-
- if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
- ALOGE("decode: metadata is a nullptr");
- return false;
+ if (mHasImage) {
+ if (mSliceHeight >= 512 &&
+ mImageInfo.mWidth >= 3000 &&
+ mImageInfo.mHeight >= 2000 ) {
+ // Try decoding in slices only if the image has tiles and is big enough.
+ mNumSlices = (mImageInfo.mHeight + mSliceHeight - 1) / mSliceHeight;
+ ALOGV("mSliceHeight %u, mNumSlices %zu", mSliceHeight, mNumSlices);
}
- VideoFrame* videoFrame = static_cast<VideoFrame*>(frameMemory->pointer());
+ if (mNumSlices > 1) {
+ // get first slice and metadata
+ sp<IMemory> frameMemory = mRetriever->getImageRectAtIndex(
+ -1, mOutputColor, 0, 0, mImageInfo.mWidth, mSliceHeight);
- if (frameInfo != nullptr) {
- frameInfo->set(
- videoFrame->mWidth,
- videoFrame->mHeight,
- videoFrame->mRotationAngle,
- videoFrame->mBytesPerPixel,
- videoFrame->mIccSize,
- videoFrame->getFlattenedIccData());
+ if (frameMemory == nullptr || frameMemory->pointer() == nullptr) {
+ ALOGE("decode: metadata is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(frameMemory->pointer());
+
+ if (frameInfo != nullptr) {
+ initFrameInfo(frameInfo, videoFrame);
+ }
+ mFrameMemory = frameMemory;
+ mAvailableLines = mSliceHeight;
+ mThread = new DecodeThread(this);
+ if (mThread->run("HeifDecode", ANDROID_PRIORITY_FOREGROUND) == OK) {
+ mFrameDecoded = true;
+ return true;
+ }
+ // Fallback to decode without slicing
+ mThread.clear();
+ mNumSlices = 1;
+ mSliceHeight = 0;
+ mAvailableLines = 0;
+ mFrameMemory.clear();
}
-
- mFrameMemory = frameMemory;
- mAvailableLines = mSliceHeight;
- mThread = new DecodeThread(this);
- if (mThread->run("HeifDecode", ANDROID_PRIORITY_FOREGROUND) == OK) {
- mFrameDecoded = true;
- return true;
- }
-
- // Fallback to decode without slicing
- mThread.clear();
- mNumSlices = 1;
- mSliceHeight = 0;
- mAvailableLines = 0;
- mFrameMemory.clear();
}
if (mHasImage) {
@@ -520,13 +586,8 @@
videoFrame->mSize);
if (frameInfo != nullptr) {
- frameInfo->set(
- videoFrame->mWidth,
- videoFrame->mHeight,
- videoFrame->mRotationAngle,
- videoFrame->mBytesPerPixel,
- videoFrame->mIccSize,
- videoFrame->getFlattenedIccData());
+ initFrameInfo(frameInfo, videoFrame);
+
}
mFrameDecoded = true;
@@ -536,6 +597,50 @@
return true;
}
+bool HeifDecoderImpl::decodeSequence(int frameIndex, HeifFrameInfo* frameInfo) {
+ ALOGV("%s: frame index %d", __FUNCTION__, frameIndex);
+ if (!mHasVideo) {
+ return false;
+ }
+
+ if (frameIndex < 0 || frameIndex >= mSequenceLength) {
+ ALOGE("invalid frame index: %d, total frames %zu", frameIndex, mSequenceLength);
+ return false;
+ }
+
+ mCurScanline = 0;
+
+ // set total scanline to sequence height now
+ mTotalScanline = mSequenceInfo.mHeight;
+
+ mFrameMemory = mRetriever->getFrameAtIndex(frameIndex, mOutputColor);
+ if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
+ ALOGE("decode: videoFrame is a nullptr");
+ return false;
+ }
+
+ VideoFrame* videoFrame = static_cast<VideoFrame*>(mFrameMemory->pointer());
+ if (videoFrame->mSize == 0 ||
+ mFrameMemory->size() < videoFrame->getFlattenedSize()) {
+ ALOGE("decode: videoFrame size is invalid");
+ return false;
+ }
+
+ ALOGV("Decoded dimension %dx%d, display %dx%d, angle %d, rowbytes %d, size %d",
+ videoFrame->mWidth,
+ videoFrame->mHeight,
+ videoFrame->mDisplayWidth,
+ videoFrame->mDisplayHeight,
+ videoFrame->mRotationAngle,
+ videoFrame->mRowBytes,
+ videoFrame->mSize);
+
+ if (frameInfo != nullptr) {
+ initFrameInfo(frameInfo, videoFrame);
+ }
+ return true;
+}
+
bool HeifDecoderImpl::getScanlineInner(uint8_t* dst) {
if (mFrameMemory == nullptr || mFrameMemory->pointer() == nullptr) {
return false;
@@ -547,7 +652,7 @@
}
bool HeifDecoderImpl::getScanline(uint8_t* dst) {
- if (mCurScanline >= mHeight) {
+ if (mCurScanline >= mTotalScanline) {
ALOGE("no more scanline available");
return false;
}
@@ -567,8 +672,8 @@
size_t HeifDecoderImpl::skipScanlines(size_t count) {
uint32_t oldScanline = mCurScanline;
mCurScanline += count;
- if (mCurScanline > mHeight) {
- mCurScanline = mHeight;
+ if (mCurScanline > mTotalScanline) {
+ mCurScanline = mTotalScanline;
}
return (mCurScanline > oldScanline) ? (mCurScanline - oldScanline) : 0;
}
diff --git a/media/libheif/HeifDecoderImpl.h b/media/libheif/HeifDecoderImpl.h
index 528ee3b..69c74a7 100644
--- a/media/libheif/HeifDecoderImpl.h
+++ b/media/libheif/HeifDecoderImpl.h
@@ -40,12 +40,16 @@
bool init(HeifStream* stream, HeifFrameInfo* frameInfo) override;
+ bool getSequenceInfo(HeifFrameInfo* frameInfo, size_t *frameCount) override;
+
bool getEncodedColor(HeifEncodedColor* outColor) const override;
bool setOutputColor(HeifColorFormat heifColor) override;
bool decode(HeifFrameInfo* frameInfo) override;
+ bool decodeSequence(int frameIndex, HeifFrameInfo* frameInfo) override;
+
bool getScanline(uint8_t* dst) override;
size_t skipScanlines(size_t count) override;
@@ -56,13 +60,15 @@
sp<IDataSource> mDataSource;
sp<MediaMetadataRetriever> mRetriever;
sp<IMemory> mFrameMemory;
+ HeifFrameInfo mImageInfo;
+ HeifFrameInfo mSequenceInfo;
android_pixel_format_t mOutputColor;
size_t mCurScanline;
- uint32_t mWidth;
- uint32_t mHeight;
+ size_t mTotalScanline;
bool mFrameDecoded;
bool mHasImage;
bool mHasVideo;
+ size_t mSequenceLength;
// Slice decoding only
Mutex mLock;
diff --git a/media/libheif/include/HeifDecoderAPI.h b/media/libheif/include/HeifDecoderAPI.h
index aa10f33..9073672 100644
--- a/media/libheif/include/HeifDecoderAPI.h
+++ b/media/libheif/include/HeifDecoderAPI.h
@@ -17,7 +17,7 @@
#ifndef _HEIF_DECODER_API_
#define _HEIF_DECODER_API_
-#include <memory>
+#include <vector>
/*
* The output color pixel format of heif decoder.
@@ -40,41 +40,13 @@
/*
* Represents a color converted (RGB-based) video frame
*/
-struct HeifFrameInfo
-{
- HeifFrameInfo() :
- mWidth(0), mHeight(0), mRotationAngle(0), mBytesPerPixel(0),
- mIccSize(0), mIccData(nullptr) {}
-
- // update the frame info, will make a copy of |iccData| internally
- void set(uint32_t width, uint32_t height, int32_t rotation, uint32_t bpp,
- uint32_t iccSize, uint8_t* iccData) {
- mWidth = width;
- mHeight = height;
- mRotationAngle = rotation;
- mBytesPerPixel = bpp;
-
- if (mIccData != nullptr) {
- mIccData.reset(nullptr);
- }
- mIccSize = iccSize;
- if (iccSize > 0) {
- mIccData.reset(new uint8_t[iccSize]);
- if (mIccData.get() != nullptr) {
- memcpy(mIccData.get(), iccData, iccSize);
- } else {
- mIccSize = 0;
- }
- }
- }
-
- // Intentional public access modifiers:
+struct HeifFrameInfo {
uint32_t mWidth;
uint32_t mHeight;
int32_t mRotationAngle; // Rotation angle, clockwise, should be multiple of 90
uint32_t mBytesPerPixel; // Number of bytes for one pixel
- uint32_t mIccSize; // Number of bytes in mIccData
- std::unique_ptr<uint8_t[]> mIccData; // Actual ICC data, memory is owned by this structure
+ int64_t mDurationUs; // Duration of the frame in us
+ std::vector<uint8_t> mIccData; // ICC data array
};
/*
@@ -113,8 +85,8 @@
virtual size_t getLength() const = 0;
private:
- HeifStream(const HeifFrameInfo&) = delete;
- HeifStream& operator=(const HeifFrameInfo&) = delete;
+ HeifStream(const HeifStream&) = delete;
+ HeifStream& operator=(const HeifStream&) = delete;
};
/*
@@ -146,6 +118,14 @@
virtual bool init(HeifStream* stream, HeifFrameInfo* frameInfo) = 0;
/*
+ * Returns true if the stream contains an image sequence and false otherwise.
+ * |frameInfo| will be filled with information of pictures in the sequence
+ * and |frameCount| the length of the sequence upon success and unmodified
+ * upon failure.
+ */
+ virtual bool getSequenceInfo(HeifFrameInfo* frameInfo, size_t *frameCount) = 0;
+
+ /*
* Decode the picture internally, returning whether it succeeded. |frameInfo|
* will be filled with information of the primary picture upon success and
* unmodified upon failure.
@@ -156,6 +136,20 @@
virtual bool decode(HeifFrameInfo* frameInfo) = 0;
/*
+ * Decode the picture from the image sequence at index |frameIndex|.
+ * |frameInfo| will be filled with information of the decoded picture upon
+ * success and unmodified upon failure.
+ *
+ * |frameIndex| is the 0-based index of the video frame to retrieve. The frame
+ * index must be that of a valid frame. The total number of frames available for
+ * retrieval was reported via getSequenceInfo().
+ *
+ * After this succeeded, getScanline can be called to read the scanlines
+ * that were decoded.
+ */
+ virtual bool decodeSequence(int frameIndex, HeifFrameInfo* frameInfo) = 0;
+
+ /*
* Read the next scanline (in top-down order), returns true upon success
* and false otherwise.
*/
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
index 9724fc1..00f9b88 100644
--- a/media/libmedia/IResourceManagerService.cpp
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -72,12 +72,14 @@
virtual void addResource(
int pid,
+ int uid,
int64_t clientId,
const sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources) {
Parcel data, reply;
data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
data.writeInt32(pid);
+ data.writeInt32(uid);
data.writeInt64(clientId);
data.writeStrongBinder(IInterface::asBinder(client));
writeToParcel(&data, resources);
@@ -129,6 +131,7 @@
case ADD_RESOURCE: {
CHECK_INTERFACE(IResourceManagerService, data, reply);
int pid = data.readInt32();
+ int uid = data.readInt32();
int64_t clientId = data.readInt64();
sp<IResourceManagerClient> client(
interface_cast<IResourceManagerClient>(data.readStrongBinder()));
@@ -137,7 +140,7 @@
}
Vector<MediaResource> resources;
readFromParcel(data, &resources);
- addResource(pid, clientId, client, resources);
+ addResource(pid, uid, clientId, client, resources);
return NO_ERROR;
} break;
diff --git a/media/libmedia/include/media/IResourceManagerService.h b/media/libmedia/include/media/IResourceManagerService.h
index 1e4f6de..404519b 100644
--- a/media/libmedia/include/media/IResourceManagerService.h
+++ b/media/libmedia/include/media/IResourceManagerService.h
@@ -39,6 +39,7 @@
virtual void addResource(
int pid,
+ int uid,
int64_t clientId,
const sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources) = 0;
diff --git a/media/libmedia/include/media/MediaResource.h b/media/libmedia/include/media/MediaResource.h
index e1fdb9b..10d0e3b 100644
--- a/media/libmedia/include/media/MediaResource.h
+++ b/media/libmedia/include/media/MediaResource.h
@@ -31,12 +31,13 @@
kNonSecureCodec,
kGraphicMemory,
kCpuBoost,
+ kBattery,
};
enum SubType {
kUnspecifiedSubType = 0,
kAudioCodec,
- kVideoCodec
+ kVideoCodec,
};
MediaResource();
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index bf14ec2..83da092 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -144,6 +144,10 @@
if (mLooper == NULL) {
return;
}
+
+ // Close socket before posting message to RTSPSource message handler.
+ close(mHandler->getARTSPConnection()->getSocket());
+
sp<AMessage> msg = new AMessage(kWhatDisconnect, this);
sp<AMessage> dummy;
diff --git a/media/libnbaio/Android.bp b/media/libnbaio/Android.bp
index 6345742..04ddcff 100644
--- a/media/libnbaio/Android.bp
+++ b/media/libnbaio/Android.bp
@@ -8,15 +8,14 @@
header_libs: [
"libaudioclient_headers",
"libaudio_system_headers",
- "libmedia_headers",
],
export_header_lib_headers: [
"libaudioclient_headers",
- "libmedia_headers",
],
shared_libs: [
"libaudioutils",
+ "libcutils",
"liblog",
"libutils",
],
@@ -25,6 +24,11 @@
],
export_include_dirs: ["include_mono"],
+
+ cflags: [
+ "-Werror",
+ "-Wall",
+ ],
}
// libnbaio_mono is the part of libnbaio that is available for vendors to use. Vendor modules can't
@@ -55,18 +59,7 @@
// ],
// static_libs: ["libsndfile"],
- shared_libs: [
- "libaudioutils",
- "libbinder",
- "libcutils",
- "liblog",
- "libutils",
- ],
-
- cflags: [
- "-Werror",
- "-Wall",
- ],
+ header_libs: ["libaudiohal_headers"],
export_include_dirs: ["include"],
}
diff --git a/media/libnbaio/include_mono/media/nbaio/MonoPipe.h b/media/libnbaio/include_mono/media/nbaio/MonoPipe.h
index c51d0fe..926d84a 100644
--- a/media/libnbaio/include_mono/media/nbaio/MonoPipe.h
+++ b/media/libnbaio/include_mono/media/nbaio/MonoPipe.h
@@ -19,7 +19,7 @@
#include <time.h>
#include <audio_utils/fifo.h>
-#include <media/SingleStateQueue.h>
+#include <media/nbaio/SingleStateQueue.h>
#include <media/nbaio/NBAIO.h>
namespace android {
diff --git a/media/libmedia/include/media/SingleStateQueue.h b/media/libnbaio/include_mono/media/nbaio/SingleStateQueue.h
similarity index 100%
rename from media/libmedia/include/media/SingleStateQueue.h
rename to media/libnbaio/include_mono/media/nbaio/SingleStateQueue.h
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 9170805..7eab230 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -129,7 +129,6 @@
"CameraSource.cpp",
"CameraSourceTimeLapse.cpp",
"DataConverter.cpp",
- "DataSourceBase.cpp",
"DataSourceFactory.cpp",
"DataURISource.cpp",
"ClearFileSource.cpp",
@@ -267,7 +266,6 @@
srcs: [
"ClearFileSource.cpp",
"DataURISource.cpp",
- "DataSourceBase.cpp",
"HTTPBase.cpp",
"HevcUtils.cpp",
"MediaClock.cpp",
diff --git a/media/libstagefright/DataSourceBase.cpp b/media/libstagefright/DataSourceBase.cpp
deleted file mode 100644
index 8f47ee5..0000000
--- a/media/libstagefright/DataSourceBase.cpp
+++ /dev/null
@@ -1,130 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DataSourceBase"
-
-#include <media/DataSourceBase.h>
-#include <media/stagefright/foundation/ByteUtils.h>
-#include <media/stagefright/MediaErrors.h>
-#include <utils/String8.h>
-
-namespace android {
-
-bool DataSourceBase::getUInt16(off64_t offset, uint16_t *x) {
- *x = 0;
-
- uint8_t byte[2];
- if (readAt(offset, byte, 2) != 2) {
- return false;
- }
-
- *x = (byte[0] << 8) | byte[1];
-
- return true;
-}
-
-bool DataSourceBase::getUInt24(off64_t offset, uint32_t *x) {
- *x = 0;
-
- uint8_t byte[3];
- if (readAt(offset, byte, 3) != 3) {
- return false;
- }
-
- *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
-
- return true;
-}
-
-bool DataSourceBase::getUInt32(off64_t offset, uint32_t *x) {
- *x = 0;
-
- uint32_t tmp;
- if (readAt(offset, &tmp, 4) != 4) {
- return false;
- }
-
- *x = ntohl(tmp);
-
- return true;
-}
-
-bool DataSourceBase::getUInt64(off64_t offset, uint64_t *x) {
- *x = 0;
-
- uint64_t tmp;
- if (readAt(offset, &tmp, 8) != 8) {
- return false;
- }
-
- *x = ntoh64(tmp);
-
- return true;
-}
-
-bool DataSourceBase::getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
- if (size == 2) {
- return getUInt16(offset, x);
- }
- if (size == 1) {
- uint8_t tmp;
- if (readAt(offset, &tmp, 1) == 1) {
- *x = tmp;
- return true;
- }
- }
- return false;
-}
-
-bool DataSourceBase::getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
- if (size == 4) {
- return getUInt32(offset, x);
- }
- if (size == 2) {
- uint16_t tmp;
- if (getUInt16(offset, &tmp)) {
- *x = tmp;
- return true;
- }
- }
- return false;
-}
-
-bool DataSourceBase::getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
- if (size == 8) {
- return getUInt64(offset, x);
- }
- if (size == 4) {
- uint32_t tmp;
- if (getUInt32(offset, &tmp)) {
- *x = tmp;
- return true;
- }
- }
- return false;
-}
-
-status_t DataSourceBase::getSize(off64_t *size) {
- *size = 0;
-
- return ERROR_UNSUPPORTED;
-}
-
-bool DataSourceBase::getUri(char *uriString __unused, size_t bufferSize __unused) {
- return false;
-}
-
-} // namespace android
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 2f13dc9..f130c9b 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -1635,8 +1635,13 @@
return BAD_VALUE;
}
+ // Increase moovExtraSize once only irrespective of how many times
+ // setCaptureRate is called.
+ bool containsCaptureFps = mMetaKeys->contains(kMetaKey_CaptureFps);
mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps);
- mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+ if (!containsCaptureFps) {
+ mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32;
+ }
return OK;
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index f579e9d..a6a856c 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -57,7 +57,6 @@
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/PersistentSurface.h>
#include <media/stagefright/SurfaceUtils.h>
-#include <mediautils/BatteryNotifier.h>
#include <private/android_filesystem_config.h>
#include <utils/Singleton.h>
@@ -166,8 +165,9 @@
DISALLOW_EVIL_CONSTRUCTORS(ResourceManagerClient);
};
-MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(pid_t pid)
- : mPid(pid) {
+MediaCodec::ResourceManagerServiceProxy::ResourceManagerServiceProxy(
+ pid_t pid, uid_t uid)
+ : mPid(pid), mUid(uid) {
if (mPid == MediaCodec::kNoPid) {
mPid = IPCThreadState::self()->getCallingPid();
}
@@ -204,7 +204,7 @@
if (mService == NULL) {
return;
}
- mService->addResource(mPid, clientId, client, resources);
+ mService->addResource(mPid, mUid, clientId, client, resources);
}
void MediaCodec::ResourceManagerServiceProxy::removeResource(int64_t clientId) {
@@ -517,8 +517,6 @@
mStickyError(OK),
mSoftRenderer(NULL),
mAnalyticsItem(NULL),
- mResourceManagerClient(new ResourceManagerClient(this)),
- mResourceManagerService(new ResourceManagerServiceProxy(pid)),
mBatteryStatNotified(false),
mIsVideo(false),
mVideoWidth(0),
@@ -537,6 +535,8 @@
} else {
mUid = uid;
}
+ mResourceManagerClient = new ResourceManagerClient(this);
+ mResourceManagerService = new ResourceManagerServiceProxy(pid, mUid);
initAnalyticsItem();
}
@@ -1977,6 +1977,11 @@
if (mIsVideo) {
// audio codec is currently ignored.
addResource(resourceType, MediaResource::kVideoCodec, 1);
+ // TODO: track battery on/off by actual queueing/dequeueing
+ // For now, keep existing behavior and request battery on/off
+ // together with codec init/uninit. We'll improve the tracking
+ // later by adding/removing this based on queue/dequeue timing.
+ addResource(MediaResource::kBattery, MediaResource::kVideoCodec, 1);
}
(new AMessage)->postReply(mReplyID);
@@ -3126,8 +3131,6 @@
mState = newState;
cancelPendingDequeueOperations();
-
- updateBatteryStat();
}
void MediaCodec::returnBuffersToCodec(bool isReclaim) {
@@ -3631,20 +3634,6 @@
return OK;
}
-void MediaCodec::updateBatteryStat() {
- if (!mIsVideo) {
- return;
- }
-
- if (mState == CONFIGURED && !mBatteryStatNotified) {
- BatteryNotifier::getInstance().noteStartVideo(mUid);
- mBatteryStatNotified = true;
- } else if (mState == UNINITIALIZED && mBatteryStatNotified) {
- BatteryNotifier::getInstance().noteStopVideo(mUid);
- mBatteryStatNotified = false;
- }
-}
-
std::string MediaCodec::stateString(State state) {
const char *rval = NULL;
char rawbuffer[16]; // room for "%d"
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index 9ba2add..7ebdb1a 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -96,10 +96,18 @@
sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
status_t result = mWriter->addSource(newTrack);
- if (result == OK) {
- return mTrackList.add(newTrack);
+ if (result != OK) {
+ return -1;
}
- return -1;
+ float captureFps = -1.0;
+ if (format->findAsFloat("time-lapse-fps", &captureFps)) {
+ ALOGV("addTrack() time-lapse-fps: %f", captureFps);
+ result = mWriter->setCaptureRate(captureFps);
+ if (result != OK) {
+ ALOGW("addTrack() setCaptureRate failed :%d", result);
+ }
+ }
+ return mTrackList.add(newTrack);
}
status_t MediaMuxer::setOrientationHint(int degrees) {
diff --git a/media/libstagefright/include/media/stagefright/DataSourceBase.h b/media/libstagefright/include/media/stagefright/DataSourceBase.h
index af5b83d..c607c91 100644
--- a/media/libstagefright/include/media/stagefright/DataSourceBase.h
+++ b/media/libstagefright/include/media/stagefright/DataSourceBase.h
@@ -18,6 +18,8 @@
#define DATA_SOURCE_BASE_H_
+#include <media/stagefright/foundation/ByteUtils.h>
+#include <media/stagefright/MediaErrors.h>
#include <sys/types.h>
#include <utils/Errors.h>
@@ -45,20 +47,106 @@
virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
// Convenience methods:
- bool getUInt16(off64_t offset, uint16_t *x);
- bool getUInt24(off64_t offset, uint32_t *x); // 3 byte int, returned as a 32-bit int
- bool getUInt32(off64_t offset, uint32_t *x);
- bool getUInt64(off64_t offset, uint64_t *x);
+ bool getUInt16(off64_t offset, uint16_t *x) {
+ *x = 0;
+
+ uint8_t byte[2];
+ if (readAt(offset, byte, 2) != 2) {
+ return false;
+ }
+
+ *x = (byte[0] << 8) | byte[1];
+
+ return true;
+ }
+ // 3 byte int, returned as a 32-bit int
+ bool getUInt24(off64_t offset, uint32_t *x) {
+ *x = 0;
+
+ uint8_t byte[3];
+ if (readAt(offset, byte, 3) != 3) {
+ return false;
+ }
+
+ *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
+
+ return true;
+ }
+ bool getUInt32(off64_t offset, uint32_t *x) {
+ *x = 0;
+
+ uint32_t tmp;
+ if (readAt(offset, &tmp, 4) != 4) {
+ return false;
+ }
+
+ *x = ntohl(tmp);
+
+ return true;
+ }
+ bool getUInt64(off64_t offset, uint64_t *x) {
+ *x = 0;
+
+ uint64_t tmp;
+ if (readAt(offset, &tmp, 8) != 8) {
+ return false;
+ }
+
+ *x = ntoh64(tmp);
+
+ return true;
+ }
// read either int<N> or int<2N> into a uint<2N>_t, size is the int size in bytes.
- bool getUInt16Var(off64_t offset, uint16_t *x, size_t size);
- bool getUInt32Var(off64_t offset, uint32_t *x, size_t size);
- bool getUInt64Var(off64_t offset, uint64_t *x, size_t size);
+ bool getUInt16Var(off64_t offset, uint16_t *x, size_t size) {
+ if (size == 2) {
+ return getUInt16(offset, x);
+ }
+ if (size == 1) {
+ uint8_t tmp;
+ if (readAt(offset, &tmp, 1) == 1) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+ }
+ bool getUInt32Var(off64_t offset, uint32_t *x, size_t size) {
+ if (size == 4) {
+ return getUInt32(offset, x);
+ }
+ if (size == 2) {
+ uint16_t tmp;
+ if (getUInt16(offset, &tmp)) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+ }
+ bool getUInt64Var(off64_t offset, uint64_t *x, size_t size) {
+ if (size == 8) {
+ return getUInt64(offset, x);
+ }
+ if (size == 4) {
+ uint32_t tmp;
+ if (getUInt32(offset, &tmp)) {
+ *x = tmp;
+ return true;
+ }
+ }
+ return false;
+ }
// May return ERROR_UNSUPPORTED.
- virtual status_t getSize(off64_t *size);
+ virtual status_t getSize(off64_t *size) {
+ *size = 0;
+ return ERROR_UNSUPPORTED;
+ }
- virtual bool getUri(char *uriString, size_t bufferSize);
+ virtual bool getUri(char * /*uriString*/, size_t /*bufferSize*/) {
+ return false;
+ }
virtual uint32_t flags() {
return 0;
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 89cca63..0218a88 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -283,7 +283,7 @@
};
struct ResourceManagerServiceProxy : public IBinder::DeathRecipient {
- ResourceManagerServiceProxy(pid_t pid);
+ ResourceManagerServiceProxy(pid_t pid, uid_t uid);
~ResourceManagerServiceProxy();
void init();
@@ -304,6 +304,7 @@
Mutex mLock;
sp<IResourceManagerService> mService;
pid_t mPid;
+ uid_t mUid;
};
State mState;
@@ -425,7 +426,6 @@
status_t onSetParameters(const sp<AMessage> ¶ms);
status_t amendOutputFormatWithCodecSpecificData(const sp<MediaCodecBuffer> &buffer);
- void updateBatteryStat();
bool isExecuting() const;
uint64_t getGraphicBufferSize();
diff --git a/media/libstagefright/include/media/stagefright/MediaWriter.h b/media/libstagefright/include/media/stagefright/MediaWriter.h
index 2c12a87..972ae1d 100644
--- a/media/libstagefright/include/media/stagefright/MediaWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaWriter.h
@@ -35,6 +35,10 @@
virtual status_t start(MetaData *params = NULL) = 0;
virtual status_t stop() = 0;
virtual status_t pause() = 0;
+ virtual status_t setCaptureRate(float /* captureFps */) {
+ ALOGW("setCaptureRate unsupported");
+ return ERROR_UNSUPPORTED;
+ }
virtual void setMaxFileSize(int64_t bytes) { mMaxFileSizeLimitBytes = bytes; }
virtual void setMaxFileDuration(int64_t durationUs) { mMaxFileDurationLimitUs = durationUs; }
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 8df2676..56b604d 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -46,6 +46,8 @@
const char *url, AString *host, unsigned *port, AString *path,
AString *user, AString *pass);
+ int getSocket() { return mSocket; }
+
protected:
virtual ~ARTSPConnection();
virtual void onMessageReceived(const sp<AMessage> &msg);
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 48bc8ce..85ffba2 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -257,6 +257,10 @@
msg->post();
}
+ sp<ARTSPConnection> getARTSPConnection() {
+ return mConn;
+ }
+
static void addRR(const sp<ABuffer> &buf) {
uint8_t *ptr = buf->data() + buf->size();
ptr[0] = 0x80 | 0;
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 28bfd3f..117a211 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -21,8 +21,11 @@
#include <binder/IMediaResourceMonitor.h>
#include <binder/IServiceManager.h>
+#include <cutils/sched_policy.h>
#include <dirent.h>
#include <media/stagefright/ProcessInfo.h>
+#include <mediautils/BatteryNotifier.h>
+#include <mediautils/SchedulingPolicyService.h>
#include <string.h>
#include <sys/types.h>
#include <sys/stat.h>
@@ -31,8 +34,7 @@
#include "ResourceManagerService.h"
#include "ServiceLog.h"
-#include "mediautils/SchedulingPolicyService.h"
-#include <cutils/sched_policy.h>
+
namespace android {
namespace {
@@ -101,6 +103,7 @@
}
static ResourceInfo& getResourceInfoForEdit(
+ uid_t uid,
int64_t clientId,
const sp<IResourceManagerClient>& client,
ResourceInfos& infos) {
@@ -110,9 +113,11 @@
}
}
ResourceInfo info;
+ info.uid = uid;
info.clientId = clientId;
info.client = client;
info.cpuBoost = false;
+ info.batteryNoted = false;
infos.push_back(info);
return infos.editItemAt(infos.size() - 1);
}
@@ -204,7 +209,9 @@
mServiceLog(new ServiceLog()),
mSupportsMultipleSecureCodecs(true),
mSupportsSecureWithNonSecureCodec(true),
- mCpuBoostCount(0) {}
+ mCpuBoostCount(0) {
+ BatteryNotifier::getInstance().noteResetVideo();
+}
ResourceManagerService::~ResourceManagerService() {}
@@ -226,6 +233,7 @@
void ResourceManagerService::addResource(
int pid,
+ int uid,
int64_t clientId,
const sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources) {
@@ -239,7 +247,7 @@
return;
}
ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
- ResourceInfo& info = getResourceInfoForEdit(clientId, client, infos);
+ ResourceInfo& info = getResourceInfoForEdit(uid, clientId, client, infos);
// TODO: do the merge instead of append.
info.resources.appendVector(resources);
@@ -253,6 +261,11 @@
ALOGW("couldn't request cpuset boost");
}
mCpuBoostCount++;
+ } else if (resources[i].mType == MediaResource::kBattery
+ && resources[i].mSubType == MediaResource::kVideoCodec
+ && !info.batteryNoted) {
+ info.batteryNoted = true;
+ BatteryNotifier::getInstance().noteStartVideo(info.uid);
}
}
if (info.deathNotifier == nullptr) {
@@ -291,6 +304,9 @@
requestCpusetBoost(false, this);
}
}
+ if (infos[j].batteryNoted) {
+ BatteryNotifier::getInstance().noteStopVideo(infos[j].uid);
+ }
IInterface::asBinder(infos[j].client)->unlinkToDeath(infos[j].deathNotifier);
j = infos.removeAt(j);
found = true;
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 82d2a0b..741ef73 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -35,10 +35,12 @@
struct ResourceInfo {
int64_t clientId;
+ uid_t uid;
sp<IResourceManagerClient> client;
sp<IBinder::DeathRecipient> deathNotifier;
Vector<MediaResource> resources;
bool cpuBoost;
+ bool batteryNoted;
};
typedef Vector<ResourceInfo> ResourceInfos;
@@ -61,6 +63,7 @@
virtual void addResource(
int pid,
+ int uid,
int64_t clientId,
const sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources);
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index ed0b0ef..8a3987a 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -86,7 +86,10 @@
};
static const int kTestPid1 = 30;
+static const int kTestUid1 = 1010;
+
static const int kTestPid2 = 20;
+static const int kTestUid2 = 1011;
static const int kLowPriorityPid = 40;
static const int kMidPriorityPid = 25;
@@ -115,8 +118,11 @@
return true;
}
- static void expectEqResourceInfo(const ResourceInfo &info, sp<IResourceManagerClient> client,
+ static void expectEqResourceInfo(const ResourceInfo &info,
+ int uid,
+ sp<IResourceManagerClient> client,
const Vector<MediaResource> &resources) {
+ EXPECT_EQ(uid, info.uid);
EXPECT_EQ(client, info.client);
EXPECT_TRUE(isEqualResources(resources, info.resources));
}
@@ -153,24 +159,24 @@
// kTestPid1 mTestClient1
Vector<MediaResource> resources1;
resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
- mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources1);
+ mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources1);
resources1.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
Vector<MediaResource> resources11;
resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
- mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources11);
+ mService->addResource(kTestPid1, kTestUid1, getId(mTestClient1), mTestClient1, resources11);
// kTestPid2 mTestClient2
Vector<MediaResource> resources2;
resources2.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
resources2.push_back(MediaResource(MediaResource::kGraphicMemory, 300));
- mService->addResource(kTestPid2, getId(mTestClient2), mTestClient2, resources2);
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient2), mTestClient2, resources2);
// kTestPid2 mTestClient3
Vector<MediaResource> resources3;
- mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
resources3.push_back(MediaResource(MediaResource::kSecureCodec, 1));
resources3.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
- mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
+ mService->addResource(kTestPid2, kTestUid2, getId(mTestClient3), mTestClient3, resources3);
const PidResourceInfosMap &map = mService->mMap;
EXPECT_EQ(2u, map.size());
@@ -178,14 +184,14 @@
ASSERT_GE(index1, 0);
const ResourceInfos &infos1 = map[index1];
EXPECT_EQ(1u, infos1.size());
- expectEqResourceInfo(infos1[0], mTestClient1, resources1);
+ expectEqResourceInfo(infos1[0], kTestUid1, mTestClient1, resources1);
ssize_t index2 = map.indexOfKey(kTestPid2);
ASSERT_GE(index2, 0);
const ResourceInfos &infos2 = map[index2];
EXPECT_EQ(2u, infos2.size());
- expectEqResourceInfo(infos2[0], mTestClient2, resources2);
- expectEqResourceInfo(infos2[1], mTestClient3, resources3);
+ expectEqResourceInfo(infos2[0], kTestUid2, mTestClient2, resources2);
+ expectEqResourceInfo(infos2[1], kTestUid2, mTestClient3, resources3);
}
void testConfig() {