Merge "Add Winscope sync metadata to screen recording"
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 38fd34a..0c8d44a 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -153,9 +153,9 @@
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- filters/argbtorgba.rs \
- filters/nightvision.rs \
- filters/saturation.rs \
+ filters/argbtorgba.rscript \
+ filters/nightvision.rscript \
+ filters/saturation.rscript \
mediafilter.cpp \
LOCAL_SHARED_LIBRARIES := \
diff --git a/cmds/stagefright/filters/argbtorgba.rs b/cmds/stagefright/filters/argbtorgba.rscript
similarity index 100%
rename from cmds/stagefright/filters/argbtorgba.rs
rename to cmds/stagefright/filters/argbtorgba.rscript
diff --git a/cmds/stagefright/filters/nightvision.rs b/cmds/stagefright/filters/nightvision.rscript
similarity index 100%
rename from cmds/stagefright/filters/nightvision.rs
rename to cmds/stagefright/filters/nightvision.rscript
diff --git a/cmds/stagefright/filters/saturation.rs b/cmds/stagefright/filters/saturation.rscript
similarity index 100%
rename from cmds/stagefright/filters/saturation.rs
rename to cmds/stagefright/filters/saturation.rscript
diff --git a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index 0e20b47..5e28750 100644
--- a/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -124,6 +124,13 @@
mTimestampUs = 0u;
mTimestampDevTest = false;
if (mCompName == unknown_comp) mDisableTest = true;
+
+ C2SecureModeTuning secureModeTuning{};
+ mComponent->query({ &secureModeTuning }, {}, C2_MAY_BLOCK, nullptr);
+ if (secureModeTuning.value == C2Config::SM_READ_PROTECTED) {
+ mDisableTest = true;
+ }
+
if (mDisableTest) std::cout << "[ WARN ] Test Disabled \n";
}
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 2b417a6..5ed54f1 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -959,9 +959,9 @@
std::shared_ptr<Codec2Client::InputSurface> Codec2Client::CreateInputSurface(
char const* serviceName) {
- uint32_t inputSurfaceSetting = ::android::base::GetUintProperty(
- "debug.stagefright.c2inputsurface", uint32_t(0));
- if (inputSurfaceSetting == 0) {
+ int32_t inputSurfaceSetting = ::android::base::GetIntProperty(
+ "debug.stagefright.c2inputsurface", int32_t(0));
+ if (inputSurfaceSetting <= 0) {
return nullptr;
}
size_t index = GetServiceNames().size();
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 8ae80ee..9c84c71 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -9,6 +9,7 @@
"CCodecConfig.cpp",
"Codec2Buffer.cpp",
"Codec2InfoBuilder.cpp",
+ "Omx2IGraphicBufferSource.cpp",
"PipelineWatcher.cpp",
"ReflectedParamUpdater.cpp",
"SkipCutBuffer.cpp",
@@ -41,8 +42,10 @@
"libmedia",
"libmedia_omx",
"libsfplugin_ccodec_utils",
+ "libstagefright_bufferqueue_helper",
"libstagefright_codecbase",
"libstagefright_foundation",
+ "libstagefright_omx",
"libstagefright_omx_utils",
"libstagefright_xmlparser",
"libui",
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index aa7189c..8223273 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -45,6 +45,7 @@
#include "CCodec.h"
#include "CCodecBufferChannel.h"
#include "InputSurfaceWrapper.h"
+#include "Omx2IGraphicBufferSource.h"
extern "C" android::PersistentSurface *CreateInputSurface();
@@ -374,7 +375,11 @@
// consumer usage is queried earlier.
- ALOGD("ISConfig%s", status.str().c_str());
+ if (status.str().empty()) {
+ ALOGD("ISConfig not changed");
+ } else {
+ ALOGD("ISConfig%s", status.str().c_str());
+ }
return err;
}
@@ -1067,6 +1072,7 @@
OmxStatus s;
android::sp<HGraphicBufferProducer> gbp;
android::sp<HGraphicBufferSource> gbs;
+
using ::android::hardware::Return;
Return<void> transStatus = omx->createInputSurface(
[&s, &gbp, &gbs](
@@ -1852,15 +1858,30 @@
// Create Codec 2.0 input surface
extern "C" android::PersistentSurface *CreateInputSurface() {
+ using namespace android;
// Attempt to create a Codec2's input surface.
- std::shared_ptr<android::Codec2Client::InputSurface> inputSurface =
- android::Codec2Client::CreateInputSurface();
+ std::shared_ptr<Codec2Client::InputSurface> inputSurface =
+ Codec2Client::CreateInputSurface();
if (!inputSurface) {
- return nullptr;
+ if (property_get_int32("debug.stagefright.c2inputsurface", 0) == -1) {
+ sp<IGraphicBufferProducer> gbp;
+ sp<OmxGraphicBufferSource> gbs = new OmxGraphicBufferSource();
+ status_t err = gbs->initCheck();
+ if (err != OK) {
+ ALOGE("Failed to create persistent input surface: error %d", err);
+ return nullptr;
+ }
+ return new PersistentSurface(
+ gbs->getIGraphicBufferProducer(),
+ sp<IGraphicBufferSource>(
+ new Omx2IGraphicBufferSource(gbs)));
+ } else {
+ return nullptr;
+ }
}
- return new android::PersistentSurface(
+ return new PersistentSurface(
inputSurface->getGraphicBufferProducer(),
- static_cast<android::sp<android::hidl::base::V1_0::IBase>>(
+ static_cast<sp<android::hidl::base::V1_0::IBase>>(
inputSurface->getHalInterface()));
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 0b4c2d7..1548a89 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -896,6 +896,9 @@
input->buffers.reset(new DummyInputBuffers(mName));
} else if (mMetaMode == MODE_ANW) {
input->buffers.reset(new GraphicMetadataInputBuffers(mName));
+ // This is to ensure buffers do not get released prematurely.
+ // TODO: handle this without going into array mode
+ forceArrayMode = true;
} else {
input->buffers.reset(new GraphicInputBuffers(numInputSlots, mName));
}
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index 1cfdc19..5adcd94 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -235,7 +235,10 @@
const std::vector<ConfigMapper> &getConfigMappersForSdkKey(std::string key) const {
auto it = mConfigMappers.find(key);
if (it == mConfigMappers.end()) {
- ALOGD("no c2 equivalents for %s", key.c_str());
+ if (mComplained.count(key) == 0) {
+ ALOGD("no c2 equivalents for %s", key.c_str());
+ mComplained.insert(key);
+ }
return NO_MAPPERS;
}
ALOGV("found %zu eqs for %s", it->second.size(), key.c_str());
@@ -304,6 +307,7 @@
private:
std::map<SdkKey, std::vector<ConfigMapper>> mConfigMappers;
+ mutable std::set<std::string> mComplained;
};
const std::vector<ConfigMapper> StandardParams::NO_MAPPERS;
@@ -508,7 +512,8 @@
.limitTo(D::ENCODER & D::VIDEO));
// convert to timestamp base
add(ConfigMapper(KEY_I_FRAME_INTERVAL, C2_PARAMKEY_SYNC_FRAME_INTERVAL, "value")
- .withMappers([](C2Value v) -> C2Value {
+ .limitTo(D::VIDEO & D::ENCODER & D::CONFIG)
+ .withMapper([](C2Value v) -> C2Value {
// convert from i32 to float
int32_t i32Value;
float fpValue;
@@ -518,12 +523,6 @@
return int64_t(c2_min(1000000 * fpValue + 0.5, (double)INT64_MAX));
}
return C2Value();
- }, [](C2Value v) -> C2Value {
- int64_t i64;
- if (v.get(&i64)) {
- return float(i64) / 1000000;
- }
- return C2Value();
}));
// remove when codecs switch to proper coding.gop (add support for calculating gop)
deprecated(ConfigMapper("i-frame-period", "coding.gop", "intra-period")
@@ -1033,7 +1032,25 @@
}
ReflectedParamUpdater::Dict reflected = mParamUpdater->getParams(paramPointers);
- ALOGD("c2 config is %s", reflected.debugString().c_str());
+ std::string config = reflected.debugString();
+ std::set<std::string> configLines;
+ std::string diff;
+ for (size_t start = 0; start != std::string::npos; ) {
+ size_t end = config.find('\n', start);
+ size_t count = (end == std::string::npos)
+ ? std::string::npos
+ : end - start + 1;
+ std::string line = config.substr(start, count);
+ configLines.insert(line);
+ if (mLastConfig.count(line) == 0) {
+ diff.append(line);
+ }
+ start = (end == std::string::npos) ? std::string::npos : end + 1;
+ }
+ if (!diff.empty()) {
+ ALOGD("c2 config diff is %s", diff.c_str());
+ }
+ mLastConfig.swap(configLines);
bool changed = false;
if (domain & mInputDomain) {
diff --git a/media/codec2/sfplugin/CCodecConfig.h b/media/codec2/sfplugin/CCodecConfig.h
index 3bafe3f..a61c8b7 100644
--- a/media/codec2/sfplugin/CCodecConfig.h
+++ b/media/codec2/sfplugin/CCodecConfig.h
@@ -134,6 +134,8 @@
/// For now support a validation function.
std::map<C2Param::Index, LocalParamValidator> mLocalParams;
+ std::set<std::string> mLastConfig;
+
CCodecConfig();
/// initializes the members required to manage the format: descriptors, reflector,
diff --git a/media/codec2/sfplugin/Omx2IGraphicBufferSource.cpp b/media/codec2/sfplugin/Omx2IGraphicBufferSource.cpp
new file mode 100644
index 0000000..764fa00
--- /dev/null
+++ b/media/codec2/sfplugin/Omx2IGraphicBufferSource.cpp
@@ -0,0 +1,185 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "Omx2IGraphicBufferSource"
+#include <android-base/logging.h>
+
+#include "Omx2IGraphicBufferSource.h"
+
+#include <android/BnOMXBufferSource.h>
+#include <media/OMXBuffer.h>
+#include <media/stagefright/omx/OMXUtils.h>
+
+#include <OMX_Component.h>
+#include <OMX_Index.h>
+#include <OMX_IndexExt.h>
+
+namespace android {
+
+namespace /* unnamed */ {
+
+// OmxGraphicBufferSource -> IOMXBufferSource
+
+struct OmxGbs2IOmxBs : public BnOMXBufferSource {
+ sp<OmxGraphicBufferSource> mBase;
+ OmxGbs2IOmxBs(sp<OmxGraphicBufferSource> const& base) : mBase{base} {}
+ BnStatus onOmxExecuting() override {
+ return mBase->onOmxExecuting();
+ }
+ BnStatus onOmxIdle() override {
+ return mBase->onOmxIdle();
+ }
+ BnStatus onOmxLoaded() override {
+ return mBase->onOmxLoaded();
+ }
+ BnStatus onInputBufferAdded(int32_t bufferId) override {
+ return mBase->onInputBufferAdded(bufferId);
+ }
+ BnStatus onInputBufferEmptied(
+ int32_t bufferId,
+ OMXFenceParcelable const& fenceParcel) override {
+ return mBase->onInputBufferEmptied(bufferId, fenceParcel.get());
+ }
+};
+
+struct OmxNodeWrapper : public IOmxNodeWrapper {
+ sp<IOMXNode> mBase;
+ OmxNodeWrapper(sp<IOMXNode> const& base) : mBase{base} {}
+ status_t emptyBuffer(
+ int32_t bufferId, uint32_t flags,
+ const sp<GraphicBuffer> &buffer,
+ int64_t timestamp, int fenceFd) override {
+ return mBase->emptyBuffer(bufferId, buffer, flags, timestamp, fenceFd);
+ }
+ void dispatchDataSpaceChanged(
+ int32_t dataSpace, int32_t aspects, int32_t pixelFormat) override {
+ omx_message msg{};
+ msg.type = omx_message::EVENT;
+ msg.fenceFd = -1;
+ msg.u.event_data.event = OMX_EventDataSpaceChanged;
+ msg.u.event_data.data1 = dataSpace;
+ msg.u.event_data.data2 = aspects;
+ msg.u.event_data.data3 = pixelFormat;
+ mBase->dispatchMessage(msg);
+ }
+};
+
+} // unnamed namespace
+
+// Omx2IGraphicBufferSource
+Omx2IGraphicBufferSource::Omx2IGraphicBufferSource(
+ sp<OmxGraphicBufferSource> const& base)
+ : mBase{base},
+ mOMXBufferSource{new OmxGbs2IOmxBs(base)} {
+}
+
+BnStatus Omx2IGraphicBufferSource::setSuspend(
+ bool suspend, int64_t timeUs) {
+ return BnStatus::fromStatusT(mBase->setSuspend(suspend, timeUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::setRepeatPreviousFrameDelayUs(
+ int64_t repeatAfterUs) {
+ return BnStatus::fromStatusT(mBase->setRepeatPreviousFrameDelayUs(repeatAfterUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::setMaxFps(float maxFps) {
+ return BnStatus::fromStatusT(mBase->setMaxFps(maxFps));
+}
+
+BnStatus Omx2IGraphicBufferSource::setTimeLapseConfig(
+ double fps, double captureFps) {
+ return BnStatus::fromStatusT(mBase->setTimeLapseConfig(fps, captureFps));
+}
+
+BnStatus Omx2IGraphicBufferSource::setStartTimeUs(
+ int64_t startTimeUs) {
+ return BnStatus::fromStatusT(mBase->setStartTimeUs(startTimeUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::setStopTimeUs(
+ int64_t stopTimeUs) {
+ return BnStatus::fromStatusT(mBase->setStopTimeUs(stopTimeUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::getStopTimeOffsetUs(
+ int64_t *stopTimeOffsetUs) {
+ return BnStatus::fromStatusT(mBase->getStopTimeOffsetUs(stopTimeOffsetUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::setColorAspects(
+ int32_t aspects) {
+ return BnStatus::fromStatusT(mBase->setColorAspects(aspects));
+}
+
+BnStatus Omx2IGraphicBufferSource::setTimeOffsetUs(
+ int64_t timeOffsetsUs) {
+ return BnStatus::fromStatusT(mBase->setTimeOffsetUs(timeOffsetsUs));
+}
+
+BnStatus Omx2IGraphicBufferSource::signalEndOfInputStream() {
+ return BnStatus::fromStatusT(mBase->signalEndOfInputStream());
+}
+
+BnStatus Omx2IGraphicBufferSource::configure(
+ const sp<IOMXNode>& omxNode, int32_t dataSpace) {
+ if (omxNode == NULL) {
+ return BnStatus::fromServiceSpecificError(BAD_VALUE);
+ }
+
+ // Do setInputSurface() first, the node will try to enable metadata
+ // mode on input, and does necessary error checking. If this fails,
+ // we can't use this input surface on the node.
+ status_t err = omxNode->setInputSurface(mOMXBufferSource);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set input surface: %d", err);
+ return BnStatus::fromServiceSpecificError(err);
+ }
+
+ uint32_t consumerUsage;
+ if (omxNode->getParameter(
+ (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
+ &consumerUsage, sizeof(consumerUsage)) != OK) {
+ consumerUsage = 0;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = 0; // kPortIndexInput
+
+ err = omxNode->getParameter(
+ OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != NO_ERROR) {
+ ALOGE("Failed to get port definition: %d", err);
+ return BnStatus::fromServiceSpecificError(UNKNOWN_ERROR);
+ }
+
+ return BnStatus::fromStatusT(mBase->configure(
+ new OmxNodeWrapper(omxNode),
+ dataSpace,
+ def.nBufferCountActual,
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ consumerUsage));
+}
+
+} // namespace android
+
diff --git a/media/codec2/sfplugin/Omx2IGraphicBufferSource.h b/media/codec2/sfplugin/Omx2IGraphicBufferSource.h
new file mode 100644
index 0000000..20fd1ec
--- /dev/null
+++ b/media/codec2/sfplugin/Omx2IGraphicBufferSource.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMX_2_IGRAPHICBUFFERSOURCE_H_
+#define OMX_2_IGRAPHICBUFFERSOURCE_H_
+
+#include <android/BnGraphicBufferSource.h>
+#include <media/stagefright/omx/OmxGraphicBufferSource.h>
+
+namespace android {
+
+using BnStatus = ::android::binder::Status;
+
+struct Omx2IGraphicBufferSource : public BnGraphicBufferSource {
+ sp<OmxGraphicBufferSource> mBase;
+ sp<IOMXBufferSource> mOMXBufferSource;
+ Omx2IGraphicBufferSource(sp<OmxGraphicBufferSource> const& base);
+ BnStatus configure(const sp<IOMXNode>& omxNode, int32_t dataSpace) override;
+ BnStatus setSuspend(bool suspend, int64_t timeUs) override;
+ BnStatus setRepeatPreviousFrameDelayUs(int64_t repeatAfterUs) override;
+ BnStatus setMaxFps(float maxFps) override;
+ BnStatus setTimeLapseConfig(double fps, double captureFps) override;
+ BnStatus setStartTimeUs(int64_t startTimeUs) override;
+ BnStatus setStopTimeUs(int64_t stopTimeUs) override;
+ BnStatus getStopTimeOffsetUs(int64_t *stopTimeOffsetUs) override;
+ BnStatus setColorAspects(int32_t aspects) override;
+ BnStatus setTimeOffsetUs(int64_t timeOffsetsUs) override;
+ BnStatus signalEndOfInputStream() override;
+};
+
+} // namespace android
+
+#endif // OMX_2_IGRAPHICBUFFERSOURCE_H_
+
diff --git a/media/libaaudio/src/Android.bp b/media/libaaudio/src/Android.bp
index 4090286..1eedb12 100644
--- a/media/libaaudio/src/Android.bp
+++ b/media/libaaudio/src/Android.bp
@@ -73,6 +73,5 @@
"libcutils",
"libutils",
"libbinder",
- "libaudiomanager",
],
}
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index 028bea1..d95bc8e 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -213,15 +213,14 @@
return interface_cast<IMemory>(reply.readStrongBinder());
}
- status_t getFrameAtIndex(std::vector<sp<IMemory> > *frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly)
+ sp<IMemory> getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly)
{
- ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d) metaOnly(%d)",
- frameIndex, numFrames, colorFormat, metaOnly);
+ ALOGV("getFrameAtIndex: index(%d), colorFormat(%d) metaOnly(%d)",
+ index, colorFormat, metaOnly);
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
- data.writeInt32(frameIndex);
- data.writeInt32(numFrames);
+ data.writeInt32(index);
data.writeInt32(colorFormat);
data.writeInt32(metaOnly);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
@@ -230,16 +229,9 @@
remote()->transact(GET_FRAME_AT_INDEX, data, &reply);
status_t ret = reply.readInt32();
if (ret != NO_ERROR) {
- return ret;
+ return NULL;
}
- int retNumFrames = reply.readInt32();
- if (retNumFrames < numFrames) {
- numFrames = retNumFrames;
- }
- for (int i = 0; i < numFrames; i++) {
- frames->push_back(interface_cast<IMemory>(reply.readStrongBinder()));
- }
- return OK;
+ return interface_cast<IMemory>(reply.readStrongBinder());
}
sp<IMemory> extractAlbumArt()
@@ -442,24 +434,20 @@
case GET_FRAME_AT_INDEX: {
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
- int frameIndex = data.readInt32();
- int numFrames = data.readInt32();
+ int index = data.readInt32();
int colorFormat = data.readInt32();
bool metaOnly = (data.readInt32() != 0);
- ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d), metaOnly(%d)",
- frameIndex, numFrames, colorFormat, metaOnly);
+ ALOGV("getFrameAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
+ index, colorFormat, metaOnly);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
setSchedPolicy(data);
#endif
- std::vector<sp<IMemory> > frames;
- status_t err = getFrameAtIndex(
- &frames, frameIndex, numFrames, colorFormat, metaOnly);
- reply->writeInt32(err);
- if (OK == err) {
- reply->writeInt32(frames.size());
- for (size_t i = 0; i < frames.size(); i++) {
- reply->writeStrongBinder(IInterface::asBinder(frames[i]));
- }
+ sp<IMemory> frame = getFrameAtIndex(index, colorFormat, metaOnly);
+ if (frame != nullptr) { // Don't send NULL across the binder interface
+ reply->writeInt32(NO_ERROR);
+ reply->writeStrongBinder(IInterface::asBinder(frame));
+ } else {
+ reply->writeInt32(UNKNOWN_ERROR);
}
#ifndef DISABLE_GROUP_SCHEDULE_HACK
restoreSchedPolicy();
diff --git a/media/libmedia/include/media/IMediaMetadataRetriever.h b/media/libmedia/include/media/IMediaMetadataRetriever.h
index c6f422d..28d2192 100644
--- a/media/libmedia/include/media/IMediaMetadataRetriever.h
+++ b/media/libmedia/include/media/IMediaMetadataRetriever.h
@@ -48,9 +48,8 @@
int index, int colorFormat, bool metaOnly, bool thumbnail) = 0;
virtual sp<IMemory> getImageRectAtIndex(
int index, int colorFormat, int left, int top, int right, int bottom) = 0;
- virtual status_t getFrameAtIndex(
- std::vector<sp<IMemory> > *frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly) = 0;
+ virtual sp<IMemory> getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly) = 0;
virtual sp<IMemory> extractAlbumArt() = 0;
virtual const char* extractMetadata(int keyCode) = 0;
};
diff --git a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
index 98d300f..37dc401 100644
--- a/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
+++ b/media/libmedia/include/media/MediaMetadataRetrieverInterface.h
@@ -49,9 +49,8 @@
int index, int colorFormat, bool metaOnly, bool thumbnail) = 0;
virtual sp<IMemory> getImageRectAtIndex(
int index, int colorFormat, int left, int top, int right, int bottom) = 0;
- virtual status_t getFrameAtIndex(
- std::vector<sp<IMemory> >* frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly) = 0;
+ virtual sp<IMemory> getFrameAtIndex(
+ int frameIndex, int colorFormat, bool metaOnly) = 0;
virtual MediaAlbumArt* extractAlbumArt() = 0;
virtual const char* extractMetadata(int keyCode) = 0;
};
diff --git a/media/libmedia/include/media/mediametadataretriever.h b/media/libmedia/include/media/mediametadataretriever.h
index d29e97d..138a014 100644
--- a/media/libmedia/include/media/mediametadataretriever.h
+++ b/media/libmedia/include/media/mediametadataretriever.h
@@ -98,9 +98,8 @@
int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false, bool thumbnail = false);
sp<IMemory> getImageRectAtIndex(
int index, int colorFormat, int left, int top, int right, int bottom);
- status_t getFrameAtIndex(
- std::vector<sp<IMemory> > *frames, int frameIndex, int numFrames = 1,
- int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
+ sp<IMemory> getFrameAtIndex(
+ int index, int colorFormat = HAL_PIXEL_FORMAT_RGB_565, bool metaOnly = false);
sp<IMemory> extractAlbumArt();
const char* extractMetadata(int keyCode);
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index e61b04d..2ae76b3 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -179,18 +179,16 @@
index, colorFormat, left, top, right, bottom);
}
-status_t MediaMetadataRetriever::getFrameAtIndex(
- std::vector<sp<IMemory> > *frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
- ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d) metaOnly(%d)",
- frameIndex, numFrames, colorFormat, metaOnly);
+sp<IMemory> MediaMetadataRetriever::getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly) {
+ ALOGV("getFrameAtIndex: index(%d), colorFormat(%d) metaOnly(%d)",
+ index, colorFormat, metaOnly);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
- return INVALID_OPERATION;
+ return NULL;
}
- return mRetriever->getFrameAtIndex(
- frames, frameIndex, numFrames, colorFormat, metaOnly);
+ return mRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
}
const char* MediaMetadataRetriever::extractMetadata(int keyCode)
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 40b17bf..4a3c65e 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -242,31 +242,27 @@
sp<IMemory> frame = mRetriever->getImageRectAtIndex(
index, colorFormat, left, top, right, bottom);
if (frame == NULL) {
- ALOGE("failed to extract image");
- return NULL;
+ ALOGE("failed to extract image at index %d", index);
}
return frame;
}
-status_t MetadataRetrieverClient::getFrameAtIndex(
- std::vector<sp<IMemory> > *frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
- ALOGV("getFrameAtIndex: frameIndex(%d), numFrames(%d), colorFormat(%d), metaOnly(%d)",
- frameIndex, numFrames, colorFormat, metaOnly);
+sp<IMemory> MetadataRetrieverClient::getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly) {
+ ALOGV("getFrameAtIndex: index(%d), colorFormat(%d), metaOnly(%d)",
+ index, colorFormat, metaOnly);
Mutex::Autolock lock(mLock);
Mutex::Autolock glock(sLock);
if (mRetriever == NULL) {
ALOGE("retriever is not initialized");
- return INVALID_OPERATION;
+ return NULL;
}
- status_t err = mRetriever->getFrameAtIndex(
- frames, frameIndex, numFrames, colorFormat, metaOnly);
- if (err != OK) {
- frames->clear();
- return err;
+ sp<IMemory> frame = mRetriever->getFrameAtIndex(index, colorFormat, metaOnly);
+ if (frame == NULL) {
+ ALOGE("failed to extract frame at index %d", index);
}
- return OK;
+ return frame;
}
sp<IMemory> MetadataRetrieverClient::extractAlbumArt()
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index 272d093..8020441 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -56,9 +56,8 @@
int index, int colorFormat, bool metaOnly, bool thumbnail);
virtual sp<IMemory> getImageRectAtIndex(
int index, int colorFormat, int left, int top, int right, int bottom);
- virtual status_t getFrameAtIndex(
- std::vector<sp<IMemory> > *frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly);
+ virtual sp<IMemory> getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly);
virtual sp<IMemory> extractAlbumArt();
virtual const char* extractMetadata(int keyCode);
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 18a6bd8..7c620a0 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -195,14 +195,14 @@
}
status_t FrameDecoder::init(
- int64_t frameTimeUs, size_t numFrames, int option, int colorFormat) {
+ int64_t frameTimeUs, int option, int colorFormat) {
if (!getDstColorFormat(
(android_pixel_format_t)colorFormat, &mDstFormat, &mDstBpp)) {
return ERROR_UNSUPPORTED;
}
sp<AMessage> videoFormat = onGetFormatAndSeekOptions(
- frameTimeUs, numFrames, option, &mReadOptions);
+ frameTimeUs, option, &mReadOptions);
if (videoFormat == NULL) {
ALOGE("video format or seek mode not supported");
return ERROR_UNSUPPORTED;
@@ -253,19 +253,7 @@
return NULL;
}
- return mFrames.size() > 0 ? mFrames[0] : NULL;
-}
-
-status_t FrameDecoder::extractFrames(std::vector<sp<IMemory> >* frames) {
- status_t err = extractInternal();
- if (err != OK) {
- return err;
- }
-
- for (size_t i = 0; i < mFrames.size(); i++) {
- frames->push_back(mFrames[i]);
- }
- return OK;
+ return mFrameMemory;
}
status_t FrameDecoder::extractInternal() {
@@ -404,22 +392,20 @@
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source)
: FrameDecoder(componentName, trackMeta, source),
+ mFrame(NULL),
mIsAvcOrHevc(false),
mSeekMode(MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC),
- mTargetTimeUs(-1LL),
- mNumFrames(0),
- mNumFramesDecoded(0) {
+ mTargetTimeUs(-1LL) {
}
sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
- int64_t frameTimeUs, size_t numFrames, int seekMode, MediaSource::ReadOptions *options) {
+ int64_t frameTimeUs, int seekMode, MediaSource::ReadOptions *options) {
mSeekMode = static_cast<MediaSource::ReadOptions::SeekMode>(seekMode);
if (mSeekMode < MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC ||
mSeekMode > MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
ALOGE("Unknown seek mode: %d", mSeekMode);
return NULL;
}
- mNumFrames = numFrames;
const char *mime;
if (!trackMeta()->findCString(kKeyMIMEType, &mime)) {
@@ -495,7 +481,7 @@
return OK;
}
- *done = (++mNumFramesDecoded >= mNumFrames);
+ *done = true;
if (outputFormat == NULL) {
return ERROR_MALFORMED;
@@ -518,15 +504,18 @@
crop_bottom = height - 1;
}
- sp<IMemory> frameMem = allocVideoFrame(
- trackMeta(),
- (crop_right - crop_left + 1),
- (crop_bottom - crop_top + 1),
- 0,
- 0,
- dstBpp());
- addFrame(frameMem);
- VideoFrame* frame = static_cast<VideoFrame*>(frameMem->pointer());
+ if (mFrame == NULL) {
+ sp<IMemory> frameMem = allocVideoFrame(
+ trackMeta(),
+ (crop_right - crop_left + 1),
+ (crop_bottom - crop_top + 1),
+ 0,
+ 0,
+ dstBpp());
+ mFrame = static_cast<VideoFrame*>(frameMem->pointer());
+
+ setFrame(frameMem);
+ }
ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
@@ -547,8 +536,8 @@
(const uint8_t *)videoFrameBuffer->data(),
width, height, stride,
crop_left, crop_top, crop_right, crop_bottom,
- frame->getFlattenedData(),
- frame->mWidth, frame->mHeight, frame->mRowBytes,
+ mFrame->getFlattenedData(),
+ mFrame->mWidth, mFrame->mHeight, mFrame->mRowBytes,
crop_left, crop_top, crop_right, crop_bottom);
return OK;
}
@@ -577,8 +566,7 @@
}
sp<AMessage> ImageDecoder::onGetFormatAndSeekOptions(
- int64_t frameTimeUs, size_t /*numFrames*/,
- int /*seekMode*/, MediaSource::ReadOptions *options) {
+ int64_t frameTimeUs, int /*seekMode*/, MediaSource::ReadOptions *options) {
sp<MetaData> overrideMeta;
if (frameTimeUs < 0) {
uint32_t type;
@@ -705,7 +693,7 @@
trackMeta(), mWidth, mHeight, mTileWidth, mTileHeight, dstBpp());
mFrame = static_cast<VideoFrame*>(frameMem->pointer());
- addFrame(frameMem);
+ setFrame(frameMem);
}
int32_t srcFormat;
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index fa3d372..6f536a9 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -44,7 +44,7 @@
StagefrightMetadataRetriever::StagefrightMetadataRetriever()
: mParsedMetaData(false),
mAlbumArt(NULL),
- mLastImageIndex(-1) {
+ mLastDecodedIndex(-1) {
ALOGV("StagefrightMetadataRetriever()");
}
@@ -143,8 +143,8 @@
FrameRect rect = {left, top, right, bottom};
- if (mImageDecoder != NULL && index == mLastImageIndex) {
- return mImageDecoder->extractFrame(&rect);
+ if (mDecoder != NULL && index == mLastDecodedIndex) {
+ return mDecoder->extractFrame(&rect);
}
return getImageInternal(
@@ -153,6 +153,8 @@
sp<IMemory> StagefrightMetadataRetriever::getImageInternal(
int index, int colorFormat, bool metaOnly, bool thumbnail, FrameRect* rect) {
+ mDecoder.clear();
+ mLastDecodedIndex = -1;
if (mExtractor.get() == NULL) {
ALOGE("no extractor.");
@@ -227,14 +229,14 @@
const AString &componentName = matchingCodecs[i];
sp<ImageDecoder> decoder = new ImageDecoder(componentName, trackMeta, source);
int64_t frameTimeUs = thumbnail ? -1 : 0;
- if (decoder->init(frameTimeUs, 1 /*numFrames*/, 0 /*option*/, colorFormat) == OK) {
+ if (decoder->init(frameTimeUs, 0 /*option*/, colorFormat) == OK) {
sp<IMemory> frame = decoder->extractFrame(rect);
if (frame != NULL) {
if (rect != NULL) {
// keep the decoder if slice decoding
- mImageDecoder = decoder;
- mLastImageIndex = index;
+ mDecoder = decoder;
+ mLastDecodedIndex = index;
}
return frame;
}
@@ -242,6 +244,7 @@
ALOGV("%s failed to extract thumbnail, trying next decoder.", componentName.c_str());
}
+ ALOGE("all codecs failed to extract frame.");
return NULL;
}
@@ -250,36 +253,40 @@
ALOGV("getFrameAtTime: %" PRId64 " us option: %d colorFormat: %d, metaOnly: %d",
timeUs, option, colorFormat, metaOnly);
- sp<IMemory> frame;
- status_t err = getFrameInternal(
- timeUs, 1, option, colorFormat, metaOnly, &frame, NULL /*outFrames*/);
- return (err == OK) ? frame : NULL;
+ return getFrameInternal(timeUs, option, colorFormat, metaOnly);
}
-status_t StagefrightMetadataRetriever::getFrameAtIndex(
- std::vector<sp<IMemory> >* frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly) {
- ALOGV("getFrameAtIndex: frameIndex %d, numFrames %d, colorFormat: %d, metaOnly: %d",
- frameIndex, numFrames, colorFormat, metaOnly);
+sp<IMemory> StagefrightMetadataRetriever::getFrameAtIndex(
+ int frameIndex, int colorFormat, bool metaOnly) {
+ ALOGV("getFrameAtIndex: frameIndex %d, colorFormat: %d, metaOnly: %d",
+ frameIndex, colorFormat, metaOnly);
+ if (mDecoder != NULL && frameIndex == mLastDecodedIndex + 1) {
+ sp<IMemory> frame = mDecoder->extractFrame();
+ if (frame != nullptr) {
+ mLastDecodedIndex = frameIndex;
+ }
+ return frame;
+ }
- return getFrameInternal(
- frameIndex, numFrames, MediaSource::ReadOptions::SEEK_FRAME_INDEX,
- colorFormat, metaOnly, NULL /*outFrame*/, frames);
+ return getFrameInternal(frameIndex,
+ MediaSource::ReadOptions::SEEK_FRAME_INDEX, colorFormat, metaOnly);
}
-status_t StagefrightMetadataRetriever::getFrameInternal(
- int64_t timeUs, int numFrames, int option, int colorFormat, bool metaOnly,
- sp<IMemory>* outFrame, std::vector<sp<IMemory> >* outFrames) {
+sp<IMemory> StagefrightMetadataRetriever::getFrameInternal(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly) {
+ mDecoder.clear();
+ mLastDecodedIndex = -1;
+
if (mExtractor.get() == NULL) {
ALOGE("no extractor.");
- return NO_INIT;
+ return NULL;
}
sp<MetaData> fileMeta = mExtractor->getMetaData();
if (fileMeta == NULL) {
ALOGE("extractor doesn't publish metadata, failed to initialize?");
- return NO_INIT;
+ return NULL;
}
size_t n = mExtractor->countTracks();
@@ -300,30 +307,24 @@
if (i == n) {
ALOGE("no video track found.");
- return INVALID_OPERATION;
+ return NULL;
}
sp<MetaData> trackMeta = mExtractor->getTrackMetaData(
i, MediaExtractor::kIncludeExtensiveMetaData);
if (!trackMeta) {
- return UNKNOWN_ERROR;
+ return NULL;
}
if (metaOnly) {
- if (outFrame != NULL) {
- *outFrame = FrameDecoder::getMetadataOnly(trackMeta, colorFormat);
- if (*outFrame != NULL) {
- return OK;
- }
- }
- return UNKNOWN_ERROR;
+ return FrameDecoder::getMetadataOnly(trackMeta, colorFormat);
}
sp<IMediaSource> source = mExtractor->getTrack(i);
if (source.get() == NULL) {
ALOGV("unable to instantiate video track.");
- return UNKNOWN_ERROR;
+ return NULL;
}
const void *data;
@@ -350,24 +351,22 @@
for (size_t i = 0; i < matchingCodecs.size(); ++i) {
const AString &componentName = matchingCodecs[i];
sp<VideoFrameDecoder> decoder = new VideoFrameDecoder(componentName, trackMeta, source);
- if (decoder->init(timeUs, numFrames, option, colorFormat) == OK) {
- if (outFrame != NULL) {
- *outFrame = decoder->extractFrame();
- if (*outFrame != NULL) {
- return OK;
+ if (decoder->init(timeUs, option, colorFormat) == OK) {
+ sp<IMemory> frame = decoder->extractFrame();
+ if (frame != nullptr) {
+ // keep the decoder if seeking by frame index
+ if (option == MediaSource::ReadOptions::SEEK_FRAME_INDEX) {
+ mDecoder = decoder;
+ mLastDecodedIndex = timeUs;
}
- } else if (outFrames != NULL) {
- status_t err = decoder->extractFrames(outFrames);
- if (err == OK) {
- return OK;
- }
+ return frame;
}
}
ALOGV("%s failed to extract frame, trying next decoder.", componentName.c_str());
}
ALOGE("all codecs failed to extract frame.");
- return UNKNOWN_ERROR;
+ return NULL;
}
MediaAlbumArt *StagefrightMetadataRetriever::extractAlbumArt() {
diff --git a/media/libstagefright/filters/Android.bp b/media/libstagefright/filters/Android.bp
index 7a67e55..b1f62c7 100644
--- a/media/libstagefright/filters/Android.bp
+++ b/media/libstagefright/filters/Android.bp
@@ -8,7 +8,7 @@
"MediaFilter.cpp",
"RSFilter.cpp",
"SaturationFilter.cpp",
- "saturationARGB.rs",
+ "saturationARGB.rscript",
"SimpleFilter.cpp",
"ZeroFilter.cpp",
],
diff --git a/media/libstagefright/filters/saturation.rs b/media/libstagefright/filters/saturation.rscript
similarity index 100%
rename from media/libstagefright/filters/saturation.rs
rename to media/libstagefright/filters/saturation.rscript
diff --git a/media/libstagefright/filters/saturationARGB.rs b/media/libstagefright/filters/saturationARGB.rscript
similarity index 100%
rename from media/libstagefright/filters/saturationARGB.rs
rename to media/libstagefright/filters/saturationARGB.rscript
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index dc58c15..ce37ae8 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -44,13 +44,10 @@
const sp<MetaData> &trackMeta,
const sp<IMediaSource> &source);
- status_t init(
- int64_t frameTimeUs, size_t numFrames, int option, int colorFormat);
+ status_t init(int64_t frameTimeUs, int option, int colorFormat);
sp<IMemory> extractFrame(FrameRect *rect = NULL);
- status_t extractFrames(std::vector<sp<IMemory> >* frames);
-
static sp<IMemory> getMetadataOnly(
const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail = false);
@@ -59,7 +56,6 @@
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
- size_t numFrames,
int seekMode,
MediaSource::ReadOptions *options) = 0;
@@ -80,10 +76,7 @@
sp<MetaData> trackMeta() const { return mTrackMeta; }
OMX_COLOR_FORMATTYPE dstFormat() const { return mDstFormat; }
int32_t dstBpp() const { return mDstBpp; }
-
- void addFrame(const sp<IMemory> &frame) {
- mFrames.push_back(frame);
- }
+ void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
private:
AString mComponentName;
@@ -91,7 +84,7 @@
sp<IMediaSource> mSource;
OMX_COLOR_FORMATTYPE mDstFormat;
int32_t mDstBpp;
- std::vector<sp<IMemory> > mFrames;
+ sp<IMemory> mFrameMemory;
MediaSource::ReadOptions mReadOptions;
sp<MediaCodec> mDecoder;
sp<AMessage> mOutputFormat;
@@ -112,7 +105,6 @@
protected:
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
- size_t numFrames,
int seekMode,
MediaSource::ReadOptions *options) override;
@@ -134,11 +126,10 @@
bool *done) override;
private:
+ VideoFrame *mFrame;
bool mIsAvcOrHevc;
MediaSource::ReadOptions::SeekMode mSeekMode;
int64_t mTargetTimeUs;
- size_t mNumFrames;
- size_t mNumFramesDecoded;
};
struct ImageDecoder : public FrameDecoder {
@@ -150,7 +141,6 @@
protected:
virtual sp<AMessage> onGetFormatAndSeekOptions(
int64_t frameTimeUs,
- size_t numFrames,
int seekMode,
MediaSource::ReadOptions *options) override;
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index c50677a..ee51290 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -26,7 +26,7 @@
namespace android {
class DataSource;
-struct ImageDecoder;
+struct FrameDecoder;
struct FrameRect;
struct StagefrightMetadataRetriever : public MediaMetadataRetrieverBase {
@@ -47,9 +47,8 @@
int index, int colorFormat, bool metaOnly, bool thumbnail);
virtual sp<IMemory> getImageRectAtIndex(
int index, int colorFormat, int left, int top, int right, int bottom);
- virtual status_t getFrameAtIndex(
- std::vector<sp<IMemory> >* frames,
- int frameIndex, int numFrames, int colorFormat, bool metaOnly);
+ virtual sp<IMemory> getFrameAtIndex(
+ int index, int colorFormat, bool metaOnly);
virtual MediaAlbumArt *extractAlbumArt();
virtual const char *extractMetadata(int keyCode);
@@ -62,17 +61,17 @@
KeyedVector<int, String8> mMetaData;
MediaAlbumArt *mAlbumArt;
- sp<ImageDecoder> mImageDecoder;
- int mLastImageIndex;
+ sp<FrameDecoder> mDecoder;
+ int mLastDecodedIndex;
void parseMetaData();
void parseColorAspects(const sp<MetaData>& meta);
// Delete album art and clear metadata.
void clearMetadata();
- status_t getFrameInternal(
- int64_t timeUs, int numFrames, int option, int colorFormat, bool metaOnly,
- sp<IMemory>* outFrame, std::vector<sp<IMemory> >* outFrames);
- virtual sp<IMemory> getImageInternal(
+ sp<IMemory> getFrameInternal(
+ int64_t timeUs, int option, int colorFormat, bool metaOnly);
+
+ sp<IMemory> getImageInternal(
int index, int colorFormat, bool metaOnly, bool thumbnail, FrameRect* rect);
StagefrightMetadataRetriever(const StagefrightMetadataRetriever &);
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index e260cae..7d03d98 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -72,7 +72,6 @@
cfi: true,
},
- compile_multilib: "32",
}
cc_library_shared {
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
index 1f8799f..8cc9a9a 100644
--- a/media/mtp/MtpServer.h
+++ b/media/mtp/MtpServer.h
@@ -34,8 +34,11 @@
class IMtpDatabase;
class MtpStorage;
+class MtpMockServer;
class MtpServer {
+ // libFuzzer testing
+ friend class MtpMockServer;
private:
IMtpDatabase* mDatabase;
diff --git a/media/ndk/NdkImage.cpp b/media/ndk/NdkImage.cpp
index 1883f63..1145b7b 100644
--- a/media/ndk/NdkImage.cpp
+++ b/media/ndk/NdkImage.cpp
@@ -35,6 +35,7 @@
int64_t timestamp, int32_t width, int32_t height, int32_t numPlanes) :
mReader(reader), mFormat(format), mUsage(usage), mBuffer(buffer), mLockedBuffer(nullptr),
mTimestamp(timestamp), mWidth(width), mHeight(height), mNumPlanes(numPlanes) {
+ LOG_FATAL_IF(reader == nullptr, "AImageReader shouldn't be null while creating AImage");
}
AImage::~AImage() {
@@ -57,14 +58,9 @@
if (mIsClosed) {
return;
}
- sp<AImageReader> reader = mReader.promote();
- if (reader != nullptr) {
- reader->releaseImageLocked(this, releaseFenceFd);
- } else if (mBuffer != nullptr) {
- LOG_ALWAYS_FATAL("%s: parent AImageReader closed without releasing image %p",
- __FUNCTION__, this);
+ if (!mReader->mIsClosed) {
+ mReader->releaseImageLocked(this, releaseFenceFd);
}
-
// Should have been set to nullptr in releaseImageLocked
// Set to nullptr here for extra safety only
mBuffer = nullptr;
@@ -83,22 +79,12 @@
void
AImage::lockReader() const {
- sp<AImageReader> reader = mReader.promote();
- if (reader == nullptr) {
- // Reader has been closed
- return;
- }
- reader->mLock.lock();
+ mReader->mLock.lock();
}
void
AImage::unlockReader() const {
- sp<AImageReader> reader = mReader.promote();
- if (reader == nullptr) {
- // Reader has been closed
- return;
- }
- reader->mLock.unlock();
+ mReader->mLock.unlock();
}
media_status_t
diff --git a/media/ndk/NdkImagePriv.h b/media/ndk/NdkImagePriv.h
index e0f16da..0e8cbcb 100644
--- a/media/ndk/NdkImagePriv.h
+++ b/media/ndk/NdkImagePriv.h
@@ -72,7 +72,7 @@
uint32_t getJpegSize() const;
// When reader is close, AImage will only accept close API call
- wp<AImageReader> mReader;
+ const sp<AImageReader> mReader;
const int32_t mFormat;
const uint64_t mUsage; // AHARDWAREBUFFER_USAGE_* flags.
BufferItem* mBuffer;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 830f752..c0ceb3d 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -272,6 +272,11 @@
mFrameListener(new FrameListener(this)),
mBufferRemovedListener(new BufferRemovedListener(this)) {}
+AImageReader::~AImageReader() {
+ Mutex::Autolock _l(mLock);
+ LOG_FATAL_IF("AImageReader not closed before destruction", mIsClosed != true);
+}
+
media_status_t
AImageReader::init() {
PublicFormat publicFormat = static_cast<PublicFormat>(mFormat);
@@ -347,8 +352,12 @@
return AMEDIA_OK;
}
-AImageReader::~AImageReader() {
+void AImageReader::close() {
Mutex::Autolock _l(mLock);
+ if (mIsClosed) {
+ return;
+ }
+ mIsClosed = true;
AImageReader_ImageListener nullListener = {nullptr, nullptr};
setImageListenerLocked(&nullListener);
@@ -741,6 +750,7 @@
void AImageReader_delete(AImageReader* reader) {
ALOGV("%s", __FUNCTION__);
if (reader != nullptr) {
+ reader->close();
reader->decStrong((void*) AImageReader_delete);
}
return;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 19bd704..0779a71 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -76,6 +76,7 @@
int32_t getHeight() const { return mHeight; };
int32_t getFormat() const { return mFormat; };
int32_t getMaxImages() const { return mMaxImages; };
+ void close();
private:
@@ -165,6 +166,7 @@
native_handle_t* mWindowHandle = nullptr;
List<AImage*> mAcquiredImages;
+ bool mIsClosed = false;
Mutex mLock;
};
diff --git a/services/audiopolicy/config/audio_policy_volumes.xml b/services/audiopolicy/config/audio_policy_volumes.xml
index ec64a7c..27bd3ff 100644
--- a/services/audiopolicy/config/audio_policy_volumes.xml
+++ b/services/audiopolicy/config/audio_policy_volumes.xml
@@ -44,7 +44,7 @@
<volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_EXT_MEDIA"
ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
<volume stream="AUDIO_STREAM_VOICE_CALL" deviceCategory="DEVICE_CATEGORY_HEARING_AID"
- ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+ ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
<volume stream="AUDIO_STREAM_SYSTEM" deviceCategory="DEVICE_CATEGORY_HEADSET">
<point>1,-3000</point>
<point>33,-2600</point>
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index 6e72f2a..885b5fa 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -3,7 +3,6 @@
export_include_dirs: ["include"],
include_dirs: [
"external/libxml2/include",
- "external/icu/icu4c/source/common",
],
srcs: [
"src/EngineConfig.cpp",
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 50016e7..0694584 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -2240,16 +2240,22 @@
return status;
}
- // increment activity count before calling getNewInputDevice() below as only active sessions
+ // increment activity count before calling getNewInputDevice() below as only active sessions
// are considered for device selection
inputDesc->setClientActive(client, true);
// indicate active capture to sound trigger service if starting capture from a mic on
// primary HW module
sp<DeviceDescriptor> device = getNewInputDevice(inputDesc);
- setInputDevice(input, device, true /* force */);
+ if (device != nullptr) {
+ status = setInputDevice(input, device, true /* force */);
+ } else {
+ ALOGW("%s no new input device can be found for descriptor %d",
+ __FUNCTION__, inputDesc->getId());
+ status = BAD_VALUE;
+ }
- if (inputDesc->activeCount() == 1) {
+ if (status == NO_ERROR && inputDesc->activeCount() == 1) {
sp<AudioPolicyMix> policyMix = inputDesc->mPolicyMix.promote();
// if input maps to a dynamic policy with an activity listener, notify of state change
if ((policyMix != NULL)
@@ -2280,11 +2286,16 @@
address, "remote-submix", AUDIO_FORMAT_DEFAULT);
}
}
+ } else if (status != NO_ERROR) {
+ // Restore client activity state.
+ inputDesc->setClientActive(client, false);
+ inputDesc->stop();
}
- ALOGV("%s input %d source = %d exit", __FUNCTION__, input, client->source());
+ ALOGV("%s input %d source = %d status = %d exit",
+ __FUNCTION__, input, client->source(), status);
- return NO_ERROR;
+ return status;
}
status_t AudioPolicyManager::stopInput(audio_port_handle_t portId)
@@ -2397,7 +2408,8 @@
for (size_t i = 0; i < mInputs.size(); i++) {
const sp<AudioInputDescriptor> input = mInputs.valueAt(i);
if (input->clientsList().size() == 0
- || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+ || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())
+ || (input->getAudioPort()->getFlags() & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
inputsToClose.push_back(mInputs.keyAt(i));
} else {
bool close = false;
@@ -5683,8 +5695,9 @@
const auto ringVolumeSrc = toVolumeSource(AUDIO_STREAM_RING);
const auto musicVolumeSrc = toVolumeSource(AUDIO_STREAM_MUSIC);
const auto alarmVolumeSrc = toVolumeSource(AUDIO_STREAM_ALARM);
+ const auto a11yVolumeSrc = toVolumeSource(AUDIO_STREAM_ACCESSIBILITY);
- if (volumeSource == toVolumeSource(AUDIO_STREAM_ACCESSIBILITY)
+ if (volumeSource == a11yVolumeSrc
&& (AUDIO_MODE_RINGTONE == mEngine->getPhoneState()) &&
mOutputs.isActive(ringVolumeSrc, 0)) {
auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
@@ -5701,7 +5714,7 @@
volumeSource == toVolumeSource(AUDIO_STREAM_NOTIFICATION) ||
volumeSource == toVolumeSource(AUDIO_STREAM_ENFORCED_AUDIBLE) ||
volumeSource == toVolumeSource(AUDIO_STREAM_DTMF) ||
- volumeSource == toVolumeSource(AUDIO_STREAM_ACCESSIBILITY))) {
+ volumeSource == a11yVolumeSrc)) {
auto &voiceCurves = getVolumeCurves(callVolumeSrc);
int voiceVolumeIndex = voiceCurves.getVolumeIndex(device);
const float maxVoiceVolDb =
@@ -5713,7 +5726,9 @@
// VOICE_CALL stream has minVolumeIndex > 0 : Users cannot set the volume of voice calls to
// 0. We don't want to cap volume when the system has programmatically muted the voice call
// stream. See setVolumeCurveIndex() for more information.
- bool exemptFromCapping = (volumeSource == ringVolumeSrc) && (voiceVolumeIndex == 0);
+ bool exemptFromCapping =
+ ((volumeSource == ringVolumeSrc) || (volumeSource == a11yVolumeSrc))
+ && (voiceVolumeIndex == 0);
ALOGV_IF(exemptFromCapping, "%s volume source %d at vol=%f not capped", __func__,
volumeSource, volumeDb);
if ((volumeDb > maxVoiceVolDb) && !exemptFromCapping) {