Merge "Fix StagefrightMetadataRetriever calling MediaSource::start twice" into nyc-dev
diff --git a/camera/CaptureResult.cpp b/camera/CaptureResult.cpp
index 58d9b43..0a447e7 100644
--- a/camera/CaptureResult.cpp
+++ b/camera/CaptureResult.cpp
@@ -38,6 +38,7 @@
parcel->readInt32(&precaptureTriggerId);
parcel->readInt64(&frameNumber);
parcel->readInt32(&partialResultCount);
+ parcel->readInt32(&errorStreamId);
return OK;
}
@@ -54,6 +55,7 @@
parcel->writeInt32(precaptureTriggerId);
parcel->writeInt64(frameNumber);
parcel->writeInt32(partialResultCount);
+ parcel->writeInt32(errorStreamId);
return OK;
}
diff --git a/include/camera/CaptureResult.h b/include/camera/CaptureResult.h
index ff0e3d3..45e4518 100644
--- a/include/camera/CaptureResult.h
+++ b/include/camera/CaptureResult.h
@@ -64,6 +64,12 @@
int32_t partialResultCount;
/**
+ * For buffer drop errors, the stream ID for the stream that lost a buffer.
+ * Otherwise -1.
+ */
+ int32_t errorStreamId;
+
+ /**
* Constructor initializes object as invalid by setting requestId to be -1.
*/
CaptureResultExtras()
@@ -72,7 +78,8 @@
afTriggerId(0),
precaptureTriggerId(0),
frameNumber(0),
- partialResultCount(0) {
+ partialResultCount(0),
+ errorStreamId(-1) {
}
/**
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 3f211bf..19c7955 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -121,7 +121,7 @@
// This will set *type to resulting metadata buffer type on OMX error (not on binder error) as
// well as on success.
virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer,
MetadataBufferType *type = NULL) = 0;
@@ -186,6 +186,7 @@
INTERNAL_OPTION_MAX_FPS, // data is float
INTERNAL_OPTION_START_TIME, // data is an int64_t
INTERNAL_OPTION_TIME_LAPSE, // data is an int64_t[2]
+ INTERNAL_OPTION_COLOR_ASPECTS, // data is ColorAspects
};
virtual status_t setInternalOption(
node_id node,
diff --git a/include/media/MediaResource.h b/include/media/MediaResource.h
index 20f2cad..1957a45 100644
--- a/include/media/MediaResource.h
+++ b/include/media/MediaResource.h
@@ -23,17 +23,24 @@
namespace android {
-extern const char kResourceSecureCodec[];
-extern const char kResourceNonSecureCodec[];
-extern const char kResourceAudioCodec[];
-extern const char kResourceVideoCodec[];
-extern const char kResourceGraphicMemory[];
-
class MediaResource {
public:
+ enum Type {
+ kUnspecified = 0,
+ kSecureCodec,
+ kNonSecureCodec,
+ kGraphicMemory
+ };
+
+ enum SubType {
+ kUnspecifiedSubType = 0,
+ kAudioCodec,
+ kVideoCodec
+ };
+
MediaResource();
- MediaResource(String8 type, uint64_t value);
- MediaResource(String8 type, String8 subType, uint64_t value);
+ MediaResource(Type type, uint64_t value);
+ MediaResource(Type type, SubType subType, uint64_t value);
void readFromParcel(const Parcel &parcel);
void writeToParcel(Parcel *parcel) const;
@@ -43,11 +50,30 @@
bool operator==(const MediaResource &other) const;
bool operator!=(const MediaResource &other) const;
- String8 mType;
- String8 mSubType;
+ Type mType;
+ SubType mSubType;
uint64_t mValue;
};
+inline static const char *asString(MediaResource::Type i, const char *def = "??") {
+ switch (i) {
+ case MediaResource::kUnspecified: return "unspecified";
+ case MediaResource::kSecureCodec: return "secure-codec";
+ case MediaResource::kNonSecureCodec: return "non-secure-codec";
+ case MediaResource::kGraphicMemory: return "graphic-memory";
+ default: return def;
+ }
+}
+
+inline static const char *asString(MediaResource::SubType i, const char *def = "??") {
+ switch (i) {
+ case MediaResource::kUnspecifiedSubType: return "unspecified";
+ case MediaResource::kAudioCodec: return "audio-codec";
+ case MediaResource::kVideoCodec: return "video-codec";
+ default: return def;
+ }
+}
+
}; // namespace android
#endif // ANDROID_MEDIA_RESOURCE_H
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index 761f182..bd633f7 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -255,7 +255,7 @@
List<sp<AMessage> > mDeferredQueue;
- bool mSentFormat;
+ sp<AMessage> mLastOutputFormat;
bool mIsVideo;
bool mIsEncoder;
bool mFatalError;
@@ -443,7 +443,10 @@
void notifyOfRenderedFrames(
bool dropIncomplete = false, FrameRenderTracker::Info *until = NULL);
- void sendFormatChange(const sp<AMessage> &reply);
+ void onOutputFormatChanged();
+ void addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify);
+ void sendFormatChange();
+
status_t getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify);
void signalError(
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index 2bb1291..fe579b7 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -394,7 +394,7 @@
bool isExecuting() const;
uint64_t getGraphicBufferSize();
- void addResource(const String8 &type, const String8 &subtype, uint64_t value);
+ void addResource(MediaResource::Type type, MediaResource::SubType subtype, uint64_t value);
bool hasPendingBuffer(int portIndex);
bool hasPendingBuffer();
diff --git a/include/media/stagefright/foundation/ALookup.h b/include/media/stagefright/foundation/ALookup.h
index 571eda2..d8af407 100644
--- a/include/media/stagefright/foundation/ALookup.h
+++ b/include/media/stagefright/foundation/ALookup.h
@@ -29,8 +29,12 @@
bool lookup(const T& from, U *to);
bool rlookup(const U& from, T *to);
- inline bool map(const T& from, U *to) { return lookup(from, to); }
- inline bool map(const U& from, T *to) { return rlookup(from, to); }
+
+ template<typename V, typename = typename std::enable_if<!std::is_same<T, V>::value>::type>
+ inline bool map(const T& from, V *to) { return lookup(from, to); }
+
+ template<typename V, typename = typename std::enable_if<!std::is_same<T, V>::value>::type>
+ inline bool map(const V& from, T *to) { return rlookup(from, to); }
private:
std::vector<std::pair<T, U>> mTable;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index e6b4347..1ac098c 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -313,13 +313,14 @@
}
virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
Parcel data, reply;
status_t err;
data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
data.writeInt32((int32_t)node);
data.writeInt32(port_index);
+ data.writeInt32(dataSpace);
err = remote()->transact(CREATE_INPUT_SURFACE, data, &reply);
if (err != OK) {
ALOGW("binder transaction failed: %d", err);
@@ -908,10 +909,11 @@
node_id node = (node_id)data.readInt32();
OMX_U32 port_index = data.readInt32();
+ android_dataspace dataSpace = (android_dataspace)data.readInt32();
sp<IGraphicBufferProducer> bufferProducer;
MetadataBufferType type = kMetadataBufferTypeInvalid;
- status_t err = createInputSurface(node, port_index, &bufferProducer, &type);
+ status_t err = createInputSurface(node, port_index, dataSpace, &bufferProducer, &type);
if ((err != OK) && (type == kMetadataBufferTypeInvalid)) {
android_errorWriteLog(0x534e4554, "26324358");
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
index 40ec0cb..e636a50 100644
--- a/media/libmedia/MediaResource.cpp
+++ b/media/libmedia/MediaResource.cpp
@@ -21,38 +21,36 @@
namespace android {
-const char kResourceSecureCodec[] = "secure-codec";
-const char kResourceNonSecureCodec[] = "non-secure-codec";
-const char kResourceAudioCodec[] = "audio-codec";
-const char kResourceVideoCodec[] = "video-codec";
-const char kResourceGraphicMemory[] = "graphic-memory";
+MediaResource::MediaResource()
+ : mType(kUnspecified),
+ mSubType(kUnspecifiedSubType),
+ mValue(0) {}
-MediaResource::MediaResource() : mValue(0) {}
-
-MediaResource::MediaResource(String8 type, uint64_t value)
+MediaResource::MediaResource(Type type, uint64_t value)
: mType(type),
+ mSubType(kUnspecifiedSubType),
mValue(value) {}
-MediaResource::MediaResource(String8 type, String8 subType, uint64_t value)
+MediaResource::MediaResource(Type type, SubType subType, uint64_t value)
: mType(type),
mSubType(subType),
mValue(value) {}
void MediaResource::readFromParcel(const Parcel &parcel) {
- mType = parcel.readString8();
- mSubType = parcel.readString8();
+ mType = static_cast<Type>(parcel.readInt32());
+ mSubType = static_cast<SubType>(parcel.readInt32());
mValue = parcel.readUint64();
}
void MediaResource::writeToParcel(Parcel *parcel) const {
- parcel->writeString8(mType);
- parcel->writeString8(mSubType);
+ parcel->writeInt32(static_cast<int32_t>(mType));
+ parcel->writeInt32(static_cast<int32_t>(mSubType));
parcel->writeUint64(mValue);
}
String8 MediaResource::toString() const {
String8 str;
- str.appendFormat("%s/%s:%llu", mType.string(), mSubType.string(), (unsigned long long)mValue);
+ str.appendFormat("%s/%s:%llu", asString(mType), asString(mSubType), (unsigned long long)mValue);
return str;
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index def9e25..9eab66f 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -489,7 +489,6 @@
: mQuirks(0),
mNode(0),
mNativeWindowUsageBits(0),
- mSentFormat(false),
mIsVideo(false),
mIsEncoder(false),
mFatalError(false),
@@ -1646,8 +1645,8 @@
encoder = false;
}
- sp<AMessage> inputFormat = new AMessage();
- sp<AMessage> outputFormat = mNotify->dup(); // will use this for kWhatOutputFormatChanged
+ sp<AMessage> inputFormat = new AMessage;
+ sp<AMessage> outputFormat = new AMessage;
mIsEncoder = encoder;
@@ -2198,6 +2197,8 @@
}
mBaseOutputFormat = outputFormat;
+ // trigger a kWhatOutputFormatChanged msg on first buffer
+ mLastOutputFormat.clear();
err = getPortFormat(kPortIndexInput, inputFormat);
if (err == OK) {
@@ -4702,29 +4703,41 @@
return OK;
}
-void ACodec::sendFormatChange(const sp<AMessage> &reply) {
- sp<AMessage> notify = mBaseOutputFormat->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
+void ACodec::onOutputFormatChanged() {
+ // store new output format
+ mOutputFormat = mBaseOutputFormat->dup();
- if (getPortFormat(kPortIndexOutput, notify) != OK) {
+ if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) {
ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str());
return;
}
+ if (mTunneled) {
+ sendFormatChange();
+ }
+}
+
+void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> &reply) {
AString mime;
- CHECK(notify->findString("mime", &mime));
+ CHECK(mOutputFormat->findString("mime", &mime));
int32_t left, top, right, bottom;
if (mime == MEDIA_MIMETYPE_VIDEO_RAW &&
mNativeWindow != NULL &&
- notify->findRect("crop", &left, &top, &right, &bottom)) {
+ mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) {
// notify renderer of the crop change
// NOTE: native window uses extended right-bottom coordinate
reply->setRect("crop", left, top, right + 1, bottom + 1);
- } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW &&
- (mEncoderDelay || mEncoderPadding)) {
+ }
+}
+
+void ACodec::sendFormatChange() {
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) {
int32_t channelCount;
- CHECK(notify->findInt32("channel-count", &channelCount));
+ CHECK(mOutputFormat->findInt32("channel-count", &channelCount));
if (mSkipCutBuffer != NULL) {
size_t prevbufsize = mSkipCutBuffer->size();
if (prevbufsize != 0) {
@@ -4734,9 +4747,13 @@
mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount);
}
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+ notify->setMessage("format", mOutputFormat);
notify->post();
- mSentFormat = true;
+ // mLastOutputFormat is not used when tunneled; doing this just to stay consistent
+ mLastOutputFormat = mOutputFormat;
}
void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) {
@@ -5440,9 +5457,11 @@
sp<AMessage> reply =
new AMessage(kWhatOutputBufferDrained, mCodec);
- if (!mCodec->mSentFormat && rangeLength > 0) {
- mCodec->sendFormatChange(reply);
+ if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) {
+ mCodec->addKeyFormatChangesToRenderBufferNotification(reply);
+ mCodec->sendFormatChange();
}
+
if (mCodec->usingMetadataOnEncoderOutput()) {
native_handle_t *handle = NULL;
VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data();
@@ -6105,9 +6124,11 @@
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
+ android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN;
+
sp<IGraphicBufferProducer> bufferProducer;
status_t err = mCodec->mOMX->createInputSurface(
- mCodec->mNode, kPortIndexInput, &bufferProducer, &mCodec->mInputMetadataType);
+ mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType);
if (err == OK) {
err = setupInputSurface();
@@ -6684,6 +6705,8 @@
{
CHECK_EQ(data1, (OMX_U32)kPortIndexOutput);
+ mCodec->onOutputFormatChanged();
+
if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
mCodec->mMetadataBuffersToSubmit = 0;
CHECK_EQ(mCodec->mOMX->sendCommand(
@@ -6694,15 +6717,8 @@
mCodec->freeOutputBuffersNotOwnedByComponent();
mCodec->changeState(mCodec->mOutputPortSettingsChangedState);
- } else if (data2 == OMX_IndexConfigCommonOutputCrop
- || data2 == OMX_IndexConfigAndroidIntraRefresh) {
- mCodec->mSentFormat = false;
-
- if (mCodec->mTunneled) {
- sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec);
- mCodec->sendFormatChange(dummy);
- }
- } else {
+ } else if (data2 != OMX_IndexConfigCommonOutputCrop
+ && data2 != OMX_IndexConfigAndroidIntraRefresh) {
ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x",
mCodec->mComponentName.c_str(), data2);
}
@@ -6829,13 +6845,6 @@
return false;
}
- mCodec->mSentFormat = false;
-
- if (mCodec->mTunneled) {
- sp<AMessage> dummy = new AMessage(kWhatOutputBufferDrained, mCodec);
- mCodec->sendFormatChange(dummy);
- }
-
ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str());
if (mCodec->mExecutingState->active()) {
@@ -6894,7 +6903,7 @@
ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str());
mComponentNowIdle = false;
- mCodec->mSentFormat = false;
+ mCodec->mLastOutputFormat.clear();
}
bool ACodec::ExecutingToIdleState::onOMXEvent(
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index fbdf56f..84ccd2d 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -399,9 +399,11 @@
status_t err;
Vector<MediaResource> resources;
- const char *type = secureCodec ? kResourceSecureCodec : kResourceNonSecureCodec;
- const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
- resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+ MediaResource::Type type =
+ secureCodec ? MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+ MediaResource::SubType subtype =
+ mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+ resources.push_back(MediaResource(type, subtype, 1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
@@ -468,13 +470,14 @@
status_t err;
Vector<MediaResource> resources;
- const char *type = (mFlags & kFlagIsSecure) ?
- kResourceSecureCodec : kResourceNonSecureCodec;
- const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
- resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+ MediaResource::Type type = (mFlags & kFlagIsSecure) ?
+ MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+ MediaResource::SubType subtype =
+ mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+ resources.push_back(MediaResource(type, subtype, 1));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
- resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+ resources.push_back(MediaResource(MediaResource::kGraphicMemory, 1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
@@ -553,7 +556,8 @@
return size;
}
-void MediaCodec::addResource(const String8 &type, const String8 &subtype, uint64_t value) {
+void MediaCodec::addResource(
+ MediaResource::Type type, MediaResource::SubType subtype, uint64_t value) {
Vector<MediaResource> resources;
resources.push_back(MediaResource(type, subtype, value));
mResourceManagerService->addResource(
@@ -565,13 +569,14 @@
status_t err;
Vector<MediaResource> resources;
- const char *type = (mFlags & kFlagIsSecure) ?
- kResourceSecureCodec : kResourceNonSecureCodec;
- const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
- resources.push_back(MediaResource(String8(type), String8(subtype), 1));
+ MediaResource::Type type = (mFlags & kFlagIsSecure) ?
+ MediaResource::kSecureCodec : MediaResource::kNonSecureCodec;
+ MediaResource::SubType subtype =
+ mIsVideo ? MediaResource::kVideoCodec : MediaResource::kAudioCodec;
+ resources.push_back(MediaResource(type, subtype, 1));
// Don't know the buffer size at this point, but it's fine to use 1 because
// the reclaimResource call doesn't consider the requester's buffer size for now.
- resources.push_back(MediaResource(String8(kResourceGraphicMemory), 1));
+ resources.push_back(MediaResource(MediaResource::kGraphicMemory, 1));
for (int i = 0; i <= kMaxRetry; ++i) {
if (i > 0) {
// Don't try to reclaim resource for the first time.
@@ -1228,18 +1233,18 @@
mFlags &= ~kFlagUsesSoftwareRenderer;
}
- String8 resourceType;
+ MediaResource::Type resourceType;
if (mComponentName.endsWith(".secure")) {
mFlags |= kFlagIsSecure;
- resourceType = String8(kResourceSecureCodec);
+ resourceType = MediaResource::kSecureCodec;
} else {
mFlags &= ~kFlagIsSecure;
- resourceType = String8(kResourceNonSecureCodec);
+ resourceType = MediaResource::kNonSecureCodec;
}
if (mIsVideo) {
// audio codec is currently ignored.
- addResource(resourceType, String8(kResourceVideoCodec), 1);
+ addResource(resourceType, MediaResource::kVideoCodec, 1);
}
(new AMessage)->postReply(mReplyID);
@@ -1376,10 +1381,9 @@
// allocating input buffers, so this is a good
// indication that now all buffers are allocated.
if (mIsVideo) {
- String8 subtype;
addResource(
- String8(kResourceGraphicMemory),
- subtype,
+ MediaResource::kGraphicMemory,
+ MediaResource::kUnspecifiedSubType,
getGraphicBufferSize());
}
setState(STARTED);
@@ -1396,19 +1400,19 @@
{
ALOGV("codec output format changed");
+ CHECK(msg->findMessage("format", &mOutputFormat));
+
if (mSoftRenderer == NULL &&
mSurface != NULL &&
(mFlags & kFlagUsesSoftwareRenderer)) {
AString mime;
- CHECK(msg->findString("mime", &mime));
+ CHECK(mOutputFormat->findString("mime", &mime));
if (mime.startsWithIgnoreCase("video/")) {
mSoftRenderer = new SoftwareRenderer(mSurface, mRotationDegrees);
}
}
- mOutputFormat = msg;
-
if (mFlags & kFlagIsEncoder) {
// Before we announce the format change we should
// collect codec specific data and amend the output
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 8e72405..a523d0e 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -112,7 +112,7 @@
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type);
virtual status_t createPersistentInputSurface(
@@ -388,10 +388,10 @@
}
status_t MuxOMX::createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
status_t err = getOMX(node)->createInputSurface(
- node, port_index, bufferProducer, type);
+ node, port_index, dataSpace, bufferProducer, type);
return err;
}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 3fcca07..2008574 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -208,6 +208,7 @@
mEnableAltRef = DEFAULT_ENABLE_ALT_REF;
mEncSpeed = DEFAULT_ENC_SPEED;
mIntra4x4 = DEFAULT_INTRA4x4;
+ mConstrainedIntraFlag = DEFAULT_CONSTRAINED_INTRA;
mAIRMode = DEFAULT_AIR;
mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD;
mPSNREnable = DEFAULT_PSNR_ENABLE;
@@ -305,6 +306,7 @@
s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4;
s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed;
+ s_ipe_params_ip.u4_constrained_intra_pred = mConstrainedIntraFlag;
s_ipe_params_ip.u4_timestamp_high = -1;
s_ipe_params_ip.u4_timestamp_low = -1;
@@ -1062,7 +1064,8 @@
return OMX_ErrorUndefined;
}
- intraRefreshParams->nRefreshPeriod = mAIRRefreshPeriod;
+ intraRefreshParams->nRefreshPeriod =
+ (mAIRMode == IVE_AIR_MODE_NONE) ? 0 : mAIRRefreshPeriod;
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
index 232c6e0..cf6f899 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h
@@ -95,8 +95,7 @@
#define DEFAULT_SOC SOC_GENERIC
#define DEFAULT_INTRA4x4 0
#define STRLENGTH 500
-
-
+#define DEFAULT_CONSTRAINED_INTRA 0
#define MIN(a, b) ((a) < (b))? (a) : (b)
#define MAX(a, b) ((a) > (b))? (a) : (b)
@@ -182,6 +181,7 @@
bool mReconEnable;
bool mPSNREnable;
bool mEntropyMode;
+ bool mConstrainedIntraFlag;
IVE_SPEED_CONFIG mEncSpeed;
uint8_t *mConversionBuffers[MAX_CONVERSION_BUFFERS];
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index c715939..9726741 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -94,7 +94,7 @@
const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
virtual status_t createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer,
MetadataBufferType *type);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b1cb91d..4220358 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -80,7 +80,8 @@
OMX::buffer_id buffer);
status_t createInputSurface(
- OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer,
+ OMX_U32 portIndex, android_dataspace dataSpace,
+ sp<IGraphicBufferProducer> *bufferProducer,
MetadataBufferType *type);
static status_t createPersistentInputSurface(
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index acdc4b0..a7c2279 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -117,6 +117,7 @@
mNodeInstance(nodeInstance),
mExecuting(false),
mSuspended(false),
+ mLastDataSpace(HAL_DATASPACE_UNKNOWN),
mIsPersistent(false),
mConsumer(consumer),
mNumFramesAvailable(0),
@@ -189,6 +190,8 @@
return;
}
+ memset(&mColorAspects, 0, sizeof(mColorAspects));
+
CHECK(mInitCheck == NO_ERROR);
}
@@ -925,6 +928,12 @@
ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams");
}
+void GraphicBufferSource::setDefaultDataSpace(android_dataspace dataSpace) {
+ ALOGD("setting dataspace: %#x", dataSpace);
+ mConsumer->setDefaultBufferDataSpace(dataSpace);
+ mLastDataSpace = dataSpace;
+}
+
status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) {
Mutex::Autolock autoLock(mMutex);
@@ -974,19 +983,24 @@
(skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
}
-status_t GraphicBufferSource::setTimeLapseUs(int64_t* data) {
+status_t GraphicBufferSource::setTimeLapseConfig(const TimeLapseConfig &config) {
Mutex::Autolock autoLock(mMutex);
- if (mExecuting || data[0] <= 0ll || data[1] <= 0ll) {
+ if (mExecuting || config.mTimePerFrameUs <= 0ll || config.mTimePerCaptureUs <= 0ll) {
return INVALID_OPERATION;
}
- mTimePerFrameUs = data[0];
- mTimePerCaptureUs = data[1];
+ mTimePerFrameUs = config.mTimePerFrameUs;
+ mTimePerCaptureUs = config.mTimePerCaptureUs;
return OK;
}
+void GraphicBufferSource::setColorAspects(const ColorAspects &aspects) {
+ Mutex::Autolock autoLock(mMutex);
+ mColorAspects = aspects;
+}
+
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRepeatLastFrame:
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 7150684..87f4a60 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -23,6 +23,7 @@
#include <utils/RefBase.h>
#include <OMX_Core.h>
+#include <VideoAPI.h>
#include "../include/OMXNodeInstance.h"
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandlerReflector.h>
@@ -73,6 +74,9 @@
return mProducer;
}
+ // Sets the default buffer data space
+ void setDefaultDataSpace(android_dataspace dataSpace);
+
// This is called when OMX transitions to OMX_StateExecuting, which means
// we can start handing it buffers. If we already have buffers of data
// sitting in the BufferQueue, this will send them to the codec.
@@ -130,17 +134,23 @@
// When set, the max frame rate fed to the encoder will be capped at maxFps.
status_t setMaxFps(float maxFps);
+ struct TimeLapseConfig {
+ int64_t mTimePerFrameUs; // the time (us) between two frames for playback
+ int64_t mTimePerCaptureUs; // the time (us) between two frames for capture
+ };
+
// Sets the time lapse (or slow motion) parameters.
- // data[0] is the time (us) between two frames for playback
- // data[1] is the time (us) between two frames for capture
// When set, the sample's timestamp will be modified to playback framerate,
// and capture timestamp will be modified to capture rate.
- status_t setTimeLapseUs(int64_t* data);
+ status_t setTimeLapseConfig(const TimeLapseConfig &config);
// Sets the start time us (in system time), samples before which should
// be dropped and not submitted to encoder
void setSkipFramesBeforeUs(int64_t startTimeUs);
+ // Sets the desired color aspects, e.g. to be used when producer does not specify a dataspace.
+ void setColorAspects(const ColorAspects &aspects);
+
protected:
// BufferQueue::ConsumerListener interface, called when a new frame of
// data is available. If we're executing and a codec buffer is
@@ -252,6 +262,9 @@
bool mSuspended;
+ // Last dataspace seen
+ android_dataspace mLastDataSpace;
+
// Our BufferQueue interfaces. mProducer is passed to the producer through
// getIGraphicBufferProducer, and mConsumer is used internally to retrieve
// the buffers queued by the producer.
@@ -321,6 +334,7 @@
int64_t mPrevFrameUs;
MetadataBufferType mMetadataBufferType;
+ ColorAspects mColorAspects;
void onMessageReceived(const sp<AMessage> &msg);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 970f6f5..759648b 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -484,7 +484,7 @@
}
status_t OMX::createInputSurface(
- node_id node, OMX_U32 port_index,
+ node_id node, OMX_U32 port_index, android_dataspace dataSpace,
sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
OMXNodeInstance *instance = findInstance(node);
@@ -493,7 +493,7 @@
}
return instance->createInputSurface(
- port_index, bufferProducer, type);
+ port_index, dataSpace, bufferProducer, type);
}
status_t OMX::createPersistentInputSurface(
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 82d5ba3..278d23c 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -933,7 +933,8 @@
}
status_t OMXNodeInstance::createInputSurface(
- OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
+ OMX_U32 portIndex, android_dataspace dataSpace,
+ sp<IGraphicBufferProducer> *bufferProducer, MetadataBufferType *type) {
if (bufferProducer == NULL) {
ALOGE("b/25884056");
return BAD_VALUE;
@@ -946,6 +947,8 @@
return err;
}
+ mGraphicBufferSource->setDefaultDataSpace(dataSpace);
+
*bufferProducer = mGraphicBufferSource->getIGraphicBufferProducer();
return OK;
}
@@ -1359,6 +1362,16 @@
}
}
+template<typename T>
+static bool getInternalOption(
+ const void *data, size_t size, T *out) {
+ if (size != sizeof(T)) {
+ return false;
+ }
+ *out = *(T*)data;
+ return true;
+}
+
status_t OMXNodeInstance::setInternalOption(
OMX_U32 portIndex,
IOMX::InternalOptionType type,
@@ -1373,6 +1386,7 @@
case IOMX::INTERNAL_OPTION_MAX_FPS:
case IOMX::INTERNAL_OPTION_START_TIME:
case IOMX::INTERNAL_OPTION_TIME_LAPSE:
+ case IOMX::INTERNAL_OPTION_COLOR_ASPECTS:
{
const sp<GraphicBufferSource> &bufferSource =
getGraphicBufferSource();
@@ -1383,58 +1397,63 @@
}
if (type == IOMX::INTERNAL_OPTION_SUSPEND) {
- if (size != sizeof(bool)) {
+ bool suspend;
+ if (!getInternalOption(data, size, &suspend)) {
return INVALID_OPERATION;
}
- bool suspend = *(bool *)data;
CLOG_CONFIG(setInternalOption, "suspend=%d", suspend);
bufferSource->suspend(suspend);
- } else if (type ==
- IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY){
- if (size != sizeof(int64_t)) {
+ } else if (type == IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY) {
+ int64_t delayUs;
+ if (!getInternalOption(data, size, &delayUs)) {
return INVALID_OPERATION;
}
- int64_t delayUs = *(int64_t *)data;
CLOG_CONFIG(setInternalOption, "delayUs=%lld", (long long)delayUs);
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
- } else if (type ==
- IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
- if (size != sizeof(int64_t)) {
+ } else if (type == IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP) {
+ int64_t maxGapUs;
+ if (!getInternalOption(data, size, &maxGapUs)) {
return INVALID_OPERATION;
}
- int64_t maxGapUs = *(int64_t *)data;
CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs);
return bufferSource->setMaxTimestampGapUs(maxGapUs);
} else if (type == IOMX::INTERNAL_OPTION_MAX_FPS) {
- if (size != sizeof(float)) {
+ float maxFps;
+ if (!getInternalOption(data, size, &maxFps)) {
return INVALID_OPERATION;
}
- float maxFps = *(float *)data;
CLOG_CONFIG(setInternalOption, "maxFps=%f", maxFps);
return bufferSource->setMaxFps(maxFps);
} else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
- if (size != sizeof(int64_t)) {
+ int64_t skipFramesBeforeUs;
+ if (!getInternalOption(data, size, &skipFramesBeforeUs)) {
return INVALID_OPERATION;
}
- int64_t skipFramesBeforeUs = *(int64_t *)data;
CLOG_CONFIG(setInternalOption, "beforeUs=%lld", (long long)skipFramesBeforeUs);
bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
- } else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
- if (size != sizeof(int64_t) * 2) {
+ } else if (type == IOMX::INTERNAL_OPTION_TIME_LAPSE) {
+ GraphicBufferSource::TimeLapseConfig config;
+ if (!getInternalOption(data, size, &config)) {
return INVALID_OPERATION;
}
- int64_t timePerFrameUs = ((int64_t *)data)[0];
- int64_t timePerCaptureUs = ((int64_t *)data)[1];
CLOG_CONFIG(setInternalOption, "perFrameUs=%lld perCaptureUs=%lld",
- (long long)timePerFrameUs, (long long)timePerCaptureUs);
+ (long long)config.mTimePerFrameUs, (long long)config.mTimePerCaptureUs);
- bufferSource->setTimeLapseUs((int64_t *)data);
+ return bufferSource->setTimeLapseConfig(config);
+ } else if (type == IOMX::INTERNAL_OPTION_COLOR_ASPECTS) {
+ ColorAspects aspects;
+ if (!getInternalOption(data, size, &aspects)) {
+ return INVALID_OPERATION;
+ }
+
+ CLOG_CONFIG(setInternalOption, "setting color aspects");
+ bufferSource->setColorAspects(aspects);
}
return OK;
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 787f53f..cf7c8fc 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -50,15 +50,11 @@
{
// Devices are considered equal if they:
// - are of the same type (a device type cannot be AUDIO_DEVICE_NONE)
- // - have the same address or one device does not specify the address
- // - have the same channel mask or one device does not specify the channel mask
+ // - have the same address
if (other == 0) {
return false;
}
- return (mDeviceType == other->mDeviceType) &&
- (mAddress == "" || other->mAddress == "" || mAddress == other->mAddress) &&
- (mChannelMask == 0 || other->mChannelMask == 0 ||
- mChannelMask == other->mChannelMask);
+ return (mDeviceType == other->mDeviceType) && (mAddress == other->mAddress);
}
void DeviceVector::refreshTypes()
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index dd2a60a..b7c7879 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -310,12 +310,6 @@
if (!deviceList.isEmpty()) {
return deviceList.itemAt(0);
}
- deviceList = hwModule->getDeclaredDevices().getDevicesFromType(device);
- if (!deviceList.isEmpty()) {
- deviceList.itemAt(0)->setName(String8(device_name));
- deviceList.itemAt(0)->mAddress = address;
- return deviceList.itemAt(0);
- }
}
sp<DeviceDescriptor> devDesc = new DeviceDescriptor(device);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 21107a1..c3b1529 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -459,10 +459,7 @@
// pertaining to sonification strategy see handleIncallSonification()
if (isStateInCall(oldState)) {
ALOGV("setPhoneState() in call state management: new state is %d", state);
- for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
- if (stream == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
handleIncallSonification((audio_stream_type_t)stream, false, true);
}
@@ -538,10 +535,7 @@
// pertaining to sonification strategy see handleIncallSonification()
if (isStateInCall(state)) {
ALOGV("setPhoneState() in call state management: new state is %d", state);
- for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
- if (stream == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
handleIncallSonification((audio_stream_type_t)stream, true, true);
}
@@ -1796,10 +1790,8 @@
mVolumeCurves->initStreamVolume(stream, indexMin, indexMax);
// initialize other private stream volumes which follow this one
- routing_strategy strategy = getStrategy(stream);
- for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
- routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
- if (!strategiesMatchForvolume(strategy, curStrategy)) {
+ for (int curStream = 0; curStream < AUDIO_STREAM_FOR_POLICY_CNT; curStream++) {
+ if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
mVolumeCurves->initStreamVolume((audio_stream_type_t)curStream, indexMin, indexMax);
@@ -1832,10 +1824,8 @@
}
// update other private stream volumes which follow this one
- routing_strategy strategy = getStrategy(stream);
- for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
- routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
- if (!strategiesMatchForvolume(strategy, curStrategy)) {
+ for (int curStream = 0; curStream < AUDIO_STREAM_FOR_POLICY_CNT; curStream++) {
+ if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
mVolumeCurves->addCurrentVolumeIndex((audio_stream_type_t)curStream, device, index);
@@ -1847,11 +1837,11 @@
for (size_t i = 0; i < mOutputs.size(); i++) {
sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
audio_devices_t curDevice = Volume::getDeviceForVolume(desc->device());
- for (int curStream = 0; curStream < AUDIO_STREAM_CNT; curStream++) {
- routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
- if (!strategiesMatchForvolume(strategy, curStrategy)) {
+ for (int curStream = 0; curStream < AUDIO_STREAM_FOR_POLICY_CNT; curStream++) {
+ if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
+ routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
audio_devices_t curStreamDevice = getDeviceForStrategy(curStrategy, true /*fromCache*/);
// it is possible that the requested device is not selected by the strategy
// (e.g an explicit audio patch is active causing getDevicesForStream()
@@ -1970,15 +1960,12 @@
bool AudioPolicyManager::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const
{
bool active = false;
- routing_strategy strategy = getStrategy(stream);
- for (int curStream = 0; curStream < AUDIO_STREAM_CNT && !active; curStream++) {
- routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
- if (!strategiesMatchForvolume(strategy, curStrategy)) {
+ for (int curStream = 0; curStream < AUDIO_STREAM_FOR_POLICY_CNT && !active; curStream++) {
+ if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
active = mOutputs.isStreamActive((audio_stream_type_t)curStream, inPastMs);
}
-
return active;
}
@@ -2734,10 +2721,7 @@
// invalidate all tracks in this strategy to force re connection.
// Otherwise select new device on the output mix.
if (outputs.indexOf(mOutputs.keyAt(j)) < 0) {
- for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
- if (stream == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
if (getStrategy((audio_stream_type_t)stream) == strategy) {
mpClientInterface->invalidateStream((audio_stream_type_t)stream);
}
@@ -4097,10 +4081,7 @@
}
}
// Move tracks associated to this strategy from previous output to new output
- for (int i = 0; i < AUDIO_STREAM_CNT; i++) {
- if (i == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int i = 0; i < AUDIO_STREAM_FOR_POLICY_CNT; i++) {
if (getStrategy((audio_stream_type_t)i) == strategy) {
mpClientInterface->invalidateStream((audio_stream_type_t)i);
}
@@ -4251,11 +4232,11 @@
return device;
}
-bool AudioPolicyManager::strategiesMatchForvolume(routing_strategy strategy1,
- routing_strategy strategy2) {
- return ((strategy1 == strategy2) ||
- ((strategy1 == STRATEGY_ACCESSIBILITY) && (strategy2 == STRATEGY_MEDIA)) ||
- ((strategy1 == STRATEGY_MEDIA) && (strategy2 == STRATEGY_ACCESSIBILITY)));
+bool AudioPolicyManager::streamsMatchForvolume(audio_stream_type_t stream1,
+ audio_stream_type_t stream2) {
+ return ((stream1 == stream2) ||
+ ((stream1 == AUDIO_STREAM_ACCESSIBILITY) && (stream2 == AUDIO_STREAM_MUSIC)) ||
+ ((stream1 == AUDIO_STREAM_MUSIC) && (stream2 == AUDIO_STREAM_ACCESSIBILITY)));
}
uint32_t AudioPolicyManager::getStrategyForStream(audio_stream_type_t stream) {
@@ -4270,17 +4251,17 @@
return AUDIO_DEVICE_NONE;
}
audio_devices_t devices = AUDIO_DEVICE_NONE;
- routing_strategy strategy = getStrategy(stream);
- for (int curStrategy = 0; curStrategy < NUM_STRATEGIES; curStrategy++) {
- if (!strategiesMatchForvolume(strategy, (routing_strategy)curStrategy)) {
+ for (int curStream = 0; curStream < AUDIO_STREAM_FOR_POLICY_CNT; curStream++) {
+ if (!streamsMatchForvolume(stream, (audio_stream_type_t)curStream)) {
continue;
}
+ routing_strategy curStrategy = getStrategy((audio_stream_type_t)curStream);
audio_devices_t curDevices =
getDeviceForStrategy((routing_strategy)curStrategy, true /*fromCache*/);
SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(curDevices, mOutputs);
for (size_t i = 0; i < outputs.size(); i++) {
sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]);
- if (isStrategyActive(outputDesc, (routing_strategy)curStrategy)) {
+ if (outputDesc->isStreamActive((audio_stream_type_t)curStream)) {
curDevices |= outputDesc->device();
}
}
@@ -4897,10 +4878,7 @@
{
ALOGVV("applyStreamVolumes() for device %08x", device);
- for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
- if (stream == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
checkAndSetVolume((audio_stream_type_t)stream,
mVolumeCurves->getVolumeIndex((audio_stream_type_t)stream, device),
outputDesc,
@@ -4918,10 +4896,7 @@
{
ALOGVV("setStrategyMute() strategy %d, mute %d, output ID %d",
strategy, on, outputDesc->getId());
- for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) {
- if (stream == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int stream = 0; stream < AUDIO_STREAM_FOR_POLICY_CNT; stream++) {
if (getStrategy((audio_stream_type_t)stream) == strategy) {
setStreamMute((audio_stream_type_t)stream, on, outputDesc, delayMs, device);
}
@@ -5101,10 +5076,7 @@
if ((sysTime == 0) && (inPastMs != 0)) {
sysTime = systemTime();
}
- for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) {
- if (i == AUDIO_STREAM_PATCH) {
- continue;
- }
+ for (int i = 0; i < (int)AUDIO_STREAM_FOR_POLICY_CNT; i++) {
if (((getStrategy((audio_stream_type_t)i) == strategy) ||
(NUM_STRATEGIES == strategy)) &&
outputDesc->isStreamActive((audio_stream_type_t)i, inPastMs, sysTime)) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index d6e48ab..1ef896f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -506,8 +506,8 @@
void clearAudioSources(uid_t uid);
- static bool strategiesMatchForvolume(routing_strategy strategy1,
- routing_strategy strategy2);
+ static bool streamsMatchForvolume(audio_stream_type_t stream1,
+ audio_stream_type_t stream2);
uid_t mUidCached;
AudioPolicyClientInterface *mpClientInterface; // audio policy client interface
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 6490682..4d12015 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -53,7 +53,13 @@
// Check if lens is fixed-focus
if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
+ } else {
+ m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
}
+ m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+ m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
+ m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
+ m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
}
}
@@ -253,80 +259,99 @@
if (frameNumber <= mLast3AFrameNumber) {
ALOGV("%s: Already sent 3A for frame number %d, skipping",
__FUNCTION__, frameNumber);
+
+ // Remove the entry if there is one for this frame number in mPending3AStates.
+ mPending3AStates.removeItem(frameNumber);
return OK;
}
- mLast3AFrameNumber = frameNumber;
+ AlgState pendingState;
- // Get 3A states from result metadata
+ ssize_t index = mPending3AStates.indexOfKey(frameNumber);
+ if (index != NAME_NOT_FOUND) {
+ pendingState = mPending3AStates.valueAt(index);
+ }
+
+ // Update 3A states from the result.
bool gotAllStates = true;
- AlgState new3aState;
-
// TODO: Also use AE mode, AE trigger ID
+ gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
+ &pendingState.afMode, frameNumber, cameraId);
- gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
- &new3aState.afMode, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
+ &pendingState.awbMode, frameNumber, cameraId);
- gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
- &new3aState.awbMode, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
+ &pendingState.aeState, frameNumber, cameraId);
- gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
- &new3aState.aeState, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
+ &pendingState.afState, frameNumber, cameraId);
- gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
- &new3aState.afState, frameNumber, cameraId);
-
- gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
- &new3aState.awbState, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
+ &pendingState.awbState, frameNumber, cameraId);
if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
- new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
- new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
+ pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
+ pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
} else {
- gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
- &new3aState.afTriggerId, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<int32_t>(metadata,
+ ANDROID_CONTROL_AF_TRIGGER_ID, &pendingState.afTriggerId, frameNumber, cameraId);
- gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
- &new3aState.aeTriggerId, frameNumber, cameraId);
+ gotAllStates &= updatePendingState<int32_t>(metadata,
+ ANDROID_CONTROL_AE_PRECAPTURE_ID, &pendingState.aeTriggerId, frameNumber, cameraId);
}
- if (!gotAllStates) return BAD_VALUE;
+ if (!gotAllStates) {
+ // If not all states are received, put the pending state to mPending3AStates.
+ if (index == NAME_NOT_FOUND) {
+ mPending3AStates.add(frameNumber, pendingState);
+ } else {
+ mPending3AStates.replaceValueAt(index, pendingState);
+ }
+ return NOT_ENOUGH_DATA;
+ }
- if (new3aState.aeState != m3aState.aeState) {
+ // Once all 3A states are received, notify the client about 3A changes.
+ if (pendingState.aeState != m3aState.aeState) {
ALOGV("%s: Camera %d: AE state %d->%d",
__FUNCTION__, cameraId,
- m3aState.aeState, new3aState.aeState);
- client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
+ m3aState.aeState, pendingState.aeState);
+ client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
}
- if (new3aState.afState != m3aState.afState ||
- new3aState.afMode != m3aState.afMode ||
- new3aState.afTriggerId != m3aState.afTriggerId) {
+ if (pendingState.afState != m3aState.afState ||
+ pendingState.afMode != m3aState.afMode ||
+ pendingState.afTriggerId != m3aState.afTriggerId) {
ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
__FUNCTION__, cameraId,
- m3aState.afState, new3aState.afState,
- m3aState.afMode, new3aState.afMode,
- m3aState.afTriggerId, new3aState.afTriggerId);
- client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
+ m3aState.afState, pendingState.afState,
+ m3aState.afMode, pendingState.afMode,
+ m3aState.afTriggerId, pendingState.afTriggerId);
+ client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
}
- if (new3aState.awbState != m3aState.awbState ||
- new3aState.awbMode != m3aState.awbMode) {
+ if (pendingState.awbState != m3aState.awbState ||
+ pendingState.awbMode != m3aState.awbMode) {
ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
__FUNCTION__, cameraId,
- m3aState.awbState, new3aState.awbState,
- m3aState.awbMode, new3aState.awbMode);
- client->notifyAutoWhitebalance(new3aState.awbState,
- new3aState.aeTriggerId);
+ m3aState.awbState, pendingState.awbState,
+ m3aState.awbMode, pendingState.awbMode);
+ client->notifyAutoWhitebalance(pendingState.awbState,
+ pendingState.aeTriggerId);
}
- m3aState = new3aState;
+ if (index != NAME_NOT_FOUND) {
+ mPending3AStates.removeItemsAt(index);
+ }
+
+ m3aState = pendingState;
+ mLast3AFrameNumber = frameNumber;
return OK;
}
template<typename Src, typename T>
-bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
+bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
T* value, int32_t frameNumber, int cameraId) {
camera_metadata_ro_entry_t entry;
if (value == NULL) {
@@ -335,9 +360,14 @@
return false;
}
+ // Already got the value for this tag.
+ if (*value != static_cast<T>(NOT_SET)) {
+ return true;
+ }
+
entry = result.find(tag);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
+ ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
__FUNCTION__, cameraId,
get_camera_metadata_tag_name(tag), frameNumber);
return false;
diff --git a/services/camera/libcameraservice/api1/client2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index 68cf55b..a5b81a7 100644
--- a/services/camera/libcameraservice/api1/client2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -43,6 +43,8 @@
~FrameProcessor();
private:
+ static const int32_t NOT_SET = -1;
+
wp<Camera2Client> mClient;
bool mSynthesize3ANotify;
@@ -63,7 +65,7 @@
// Helper for process3aState
template<typename Src, typename T>
- bool get3aResult(const CameraMetadata& result, int32_t tag, T* value,
+ bool updatePendingState(const CameraMetadata& result, int32_t tag, T* value,
int32_t frameNumber, int cameraId);
@@ -81,15 +83,20 @@
// These defaults need to match those in Parameters.cpp
AlgState() :
- afMode(ANDROID_CONTROL_AF_MODE_AUTO),
- awbMode(ANDROID_CONTROL_AWB_MODE_AUTO),
- aeState(ANDROID_CONTROL_AE_STATE_INACTIVE),
- afState(ANDROID_CONTROL_AF_STATE_INACTIVE),
- awbState(ANDROID_CONTROL_AWB_STATE_INACTIVE),
- afTriggerId(0),
- aeTriggerId(0) {
+ afMode((camera_metadata_enum_android_control_af_mode)NOT_SET),
+ awbMode((camera_metadata_enum_android_control_awb_mode)NOT_SET),
+ aeState((camera_metadata_enum_android_control_ae_state)NOT_SET),
+ afState((camera_metadata_enum_android_control_af_state)NOT_SET),
+ awbState((camera_metadata_enum_android_control_awb_state)NOT_SET),
+ afTriggerId(NOT_SET),
+ aeTriggerId(NOT_SET) {
}
- } m3aState;
+ };
+
+ AlgState m3aState;
+
+ // frame number -> pending 3A states that not all data are received yet.
+ KeyedVector<int32_t, AlgState> mPending3AStates;
// Whether the partial result is enabled for this device
bool mUsePartialResult;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9f8eac6..05c5323 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2058,176 +2058,6 @@
return OK;
}
-/**
- * Check if all 3A fields are ready, and send off a partial 3A-only result
- * to the output frame queue
- */
-bool Camera3Device::processPartial3AResult(
- uint32_t frameNumber,
- const CameraMetadata& partial, const CaptureResultExtras& resultExtras) {
-
- // Check if all 3A states are present
- // The full list of fields is
- // android.control.afMode
- // android.control.awbMode
- // android.control.aeState
- // android.control.awbState
- // android.control.afState
- // android.control.afTriggerID
- // android.control.aePrecaptureID
- // TODO: Add android.control.aeMode
-
- bool gotAllStates = true;
-
- uint8_t afMode;
- uint8_t awbMode;
- uint8_t aeState;
- uint8_t afState;
- uint8_t awbState;
-
- gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_MODE,
- &afMode, frameNumber);
-
- gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_MODE,
- &awbMode, frameNumber);
-
- gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AE_STATE,
- &aeState, frameNumber);
-
- gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AF_STATE,
- &afState, frameNumber);
-
- gotAllStates &= get3AResult(partial, ANDROID_CONTROL_AWB_STATE,
- &awbState, frameNumber);
-
- if (!gotAllStates) return false;
-
- ALOGVV("%s: Camera %d: Frame %d, Request ID %d: AF mode %d, AWB mode %d, "
- "AF state %d, AE state %d, AWB state %d, "
- "AF trigger %d, AE precapture trigger %d",
- __FUNCTION__, mId, frameNumber, resultExtras.requestId,
- afMode, awbMode,
- afState, aeState, awbState,
- resultExtras.afTriggerId, resultExtras.precaptureTriggerId);
-
- // Got all states, so construct a minimal result to send
- // In addition to the above fields, this means adding in
- // android.request.frameCount
- // android.request.requestId
- // android.quirks.partialResult (for HAL version below HAL3.2)
-
- const size_t kMinimal3AResultEntries = 10;
-
- Mutex::Autolock l(mOutputLock);
-
- CaptureResult captureResult;
- captureResult.mResultExtras = resultExtras;
- captureResult.mMetadata = CameraMetadata(kMinimal3AResultEntries, /*dataCapacity*/ 0);
- // TODO: change this to sp<CaptureResult>. This will need other changes, including,
- // but not limited to CameraDeviceBase::getNextResult
- CaptureResult& min3AResult =
- *mResultQueue.insert(mResultQueue.end(), captureResult);
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_FRAME_COUNT,
- // TODO: This is problematic casting. Need to fix CameraMetadata.
- reinterpret_cast<int32_t*>(&frameNumber), frameNumber)) {
- return false;
- }
-
- int32_t requestId = resultExtras.requestId;
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_REQUEST_ID,
- &requestId, frameNumber)) {
- return false;
- }
-
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
- static const uint8_t partialResult = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_QUIRKS_PARTIAL_RESULT,
- &partialResult, frameNumber)) {
- return false;
- }
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_MODE,
- &afMode, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_MODE,
- &awbMode, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_STATE,
- &aeState, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_STATE,
- &afState, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AWB_STATE,
- &awbState, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AF_TRIGGER_ID,
- &resultExtras.afTriggerId, frameNumber)) {
- return false;
- }
-
- if (!insert3AResult(min3AResult.mMetadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
- &resultExtras.precaptureTriggerId, frameNumber)) {
- return false;
- }
-
- // We only send the aggregated partial when all 3A related metadata are available
- // For both API1 and API2.
- // TODO: we probably should pass through all partials to API2 unconditionally.
- mResultSignal.signal();
-
- return true;
-}
-
-template<typename T>
-bool Camera3Device::get3AResult(const CameraMetadata& result, int32_t tag,
- T* value, uint32_t frameNumber) {
- (void) frameNumber;
-
- camera_metadata_ro_entry_t entry;
-
- entry = result.find(tag);
- if (entry.count == 0) {
- ALOGVV("%s: Camera %d: Frame %d: No %s provided by HAL!", __FUNCTION__,
- mId, frameNumber, get_camera_metadata_tag_name(tag));
- return false;
- }
-
- if (sizeof(T) == sizeof(uint8_t)) {
- *value = entry.data.u8[0];
- } else if (sizeof(T) == sizeof(int32_t)) {
- *value = entry.data.i32[0];
- } else {
- ALOGE("%s: Unexpected type", __FUNCTION__);
- return false;
- }
- return true;
-}
-
-template<typename T>
-bool Camera3Device::insert3AResult(CameraMetadata& result, int32_t tag,
- const T* value, uint32_t frameNumber) {
- if (result.update(tag, value, 1) != NO_ERROR) {
- mResultQueue.erase(--mResultQueue.end(), mResultQueue.end());
- SET_ERR("Frame %d: Failed to set %s in partial metadata",
- frameNumber, get_camera_metadata_tag_name(tag));
- return false;
- }
- return true;
-}
-
void Camera3Device::returnOutputBuffers(
const camera3_stream_buffer_t *outputBuffers, size_t numBuffers,
nsecs_t timestamp) {
@@ -2295,6 +2125,48 @@
}
}
+void Camera3Device::insertResultLocked(CaptureResult *result, uint32_t frameNumber,
+ const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ if (result == nullptr) return;
+
+ if (result->mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
+ (int32_t*)&frameNumber, 1) != OK) {
+ SET_ERR("Failed to set frame number %d in metadata", frameNumber);
+ return;
+ }
+
+ if (result->mMetadata.update(ANDROID_REQUEST_ID, &result->mResultExtras.requestId, 1) != OK) {
+ SET_ERR("Failed to set request ID in metadata for frame %d", frameNumber);
+ return;
+ }
+
+ overrideResultForPrecaptureCancel(&result->mMetadata, aeTriggerCancelOverride);
+
+ // Valid result, insert into queue
+ List<CaptureResult>::iterator queuedResult =
+ mResultQueue.insert(mResultQueue.end(), CaptureResult(*result));
+ ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32, __FUNCTION__,
+ queuedResult->mResultExtras.requestId,
+ queuedResult->mResultExtras.frameNumber,
+ queuedResult->mResultExtras.burstId);
+
+ mResultSignal.signal();
+}
+
+
+void Camera3Device::sendPartialCaptureResult(const camera_metadata_t * partialResult,
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber,
+ const AeTriggerCancelOverride_t &aeTriggerCancelOverride) {
+ Mutex::Autolock l(mOutputLock);
+
+ CaptureResult captureResult;
+ captureResult.mResultExtras = resultExtras;
+ captureResult.mMetadata = partialResult;
+
+ insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
+}
+
void Camera3Device::sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
@@ -2330,16 +2202,6 @@
captureResult.mResultExtras = resultExtras;
captureResult.mMetadata = pendingMetadata;
- if (captureResult.mMetadata.update(ANDROID_REQUEST_FRAME_COUNT,
- (int32_t*)&frameNumber, 1) != OK) {
- SET_ERR("Failed to set frame# in metadata (%d)",
- frameNumber);
- return;
- } else {
- ALOGVV("%s: Camera %d: Set frame# in metadata (%d)",
- __FUNCTION__, mId, frameNumber);
- }
-
// Append any previous partials to form a complete result
if (mUsePartialResult && !collectedPartialResult.isEmpty()) {
captureResult.mMetadata.append(collectedPartialResult);
@@ -2348,26 +2210,14 @@
captureResult.mMetadata.sort();
// Check that there's a timestamp in the result metadata
- camera_metadata_entry entry =
- captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
+ camera_metadata_entry entry = captureResult.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
if (entry.count == 0) {
SET_ERR("No timestamp provided by HAL for frame %d!",
frameNumber);
return;
}
- overrideResultForPrecaptureCancel(&captureResult.mMetadata, aeTriggerCancelOverride);
-
- // Valid result, insert into queue
- List<CaptureResult>::iterator queuedResult =
- mResultQueue.insert(mResultQueue.end(), CaptureResult(captureResult));
- ALOGVV("%s: result requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32, __FUNCTION__,
- queuedResult->mResultExtras.requestId,
- queuedResult->mResultExtras.frameNumber,
- queuedResult->mResultExtras.burstId);
-
- mResultSignal.signal();
+ insertResultLocked(&captureResult, frameNumber, aeTriggerCancelOverride);
}
/**
@@ -2444,7 +2294,7 @@
}
isPartialResult = (result->partial_result < mNumPartialResults);
if (isPartialResult) {
- request.partialResult.collectedResult.append(result->result);
+ request.collectedPartialResult.append(result->result);
}
} else {
camera_metadata_ro_entry_t partialResultEntry;
@@ -2457,21 +2307,17 @@
// A partial result. Flag this as such, and collect this
// set of metadata into the in-flight entry.
isPartialResult = true;
- request.partialResult.collectedResult.append(
+ request.collectedPartialResult.append(
result->result);
- request.partialResult.collectedResult.erase(
+ request.collectedPartialResult.erase(
ANDROID_QUIRKS_PARTIAL_RESULT);
}
}
if (isPartialResult) {
- // Fire off a 3A-only result if possible
- if (!request.partialResult.haveSent3A) {
- request.partialResult.haveSent3A =
- processPartial3AResult(frameNumber,
- request.partialResult.collectedResult,
- request.resultExtras);
- }
+ // Send partial capture result
+ sendPartialCaptureResult(result->result, request.resultExtras, frameNumber,
+ request.aeTriggerCancelOverride);
}
}
@@ -2486,9 +2332,9 @@
return;
}
if (mUsePartialResult &&
- !request.partialResult.collectedResult.isEmpty()) {
+ !request.collectedPartialResult.isEmpty()) {
collectedPartialResult.acquire(
- request.partialResult.collectedResult);
+ request.collectedPartialResult);
}
request.haveResultMetadata = true;
}
@@ -2531,7 +2377,7 @@
if (result->result != NULL && !isPartialResult) {
if (shutterTimestamp == 0) {
request.pendingMetadata = result->result;
- request.partialResult.collectedResult = collectedPartialResult;
+ request.collectedPartialResult = collectedPartialResult;
} else {
CameraMetadata metadata;
metadata = result->result;
@@ -2649,6 +2495,7 @@
resultExtras.frameNumber);
}
}
+ resultExtras.errorStreamId = streamId;
if (listener != NULL) {
listener->notifyError(errorCode, resultExtras);
} else {
@@ -2709,7 +2556,7 @@
// send pending result and buffers
sendCaptureResult(r.pendingMetadata, r.resultExtras,
- r.partialResult.collectedResult, msg.frame_number,
+ r.collectedPartialResult, msg.frame_number,
r.hasInputBuffer, r.aeTriggerCancelOverride);
returnOutputBuffers(r.pendingOutputBuffers.array(),
r.pendingOutputBuffers.size(), r.shutterTimestamp);
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index bee69ee..5b1c87e 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -648,6 +648,10 @@
// receives the shutter event.
CameraMetadata pendingMetadata;
+ // The metadata of the partial results that framework receives from HAL so far
+ // and has sent out.
+ CameraMetadata collectedPartialResult;
+
// Buffers are added by process_capture_result when output buffers
// return from HAL but framework has not yet received the shutter
// event. They will be returned to the streams when framework receives
@@ -658,19 +662,6 @@
// CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
AeTriggerCancelOverride_t aeTriggerCancelOverride;
-
- // Fields used by the partial result only
- struct PartialResultInFlight {
- // Set by process_capture_result once 3A has been sent to clients
- bool haveSent3A;
- // Result metadata collected so far, when partial results are in use
- CameraMetadata collectedResult;
-
- PartialResultInFlight():
- haveSent3A(false) {
- }
- } partialResult;
-
// Default constructor needed by KeyedVector
InFlightRequest() :
shutterTimestamp(0),
@@ -706,23 +697,6 @@
const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
/**
- * For the partial result, check if all 3A state fields are available
- * and if so, queue up 3A-only result to the client. Returns true if 3A
- * is sent.
- */
- bool processPartial3AResult(uint32_t frameNumber,
- const CameraMetadata& partial, const CaptureResultExtras& resultExtras);
-
- // Helpers for reading and writing 3A metadata into to/from partial results
- template<typename T>
- bool get3AResult(const CameraMetadata& result, int32_t tag,
- T* value, uint32_t frameNumber);
-
- template<typename T>
- bool insert3AResult(CameraMetadata &result, int32_t tag, const T* value,
- uint32_t frameNumber);
-
- /**
* Override result metadata for cancelling AE precapture trigger applied in
* handleAePrecaptureCancelRequest().
*/
@@ -820,13 +794,24 @@
void returnOutputBuffers(const camera3_stream_buffer_t *outputBuffers,
size_t numBuffers, nsecs_t timestamp);
- // Insert the capture result given the pending metadata, result extras,
+ // Send a partial capture result.
+ void sendPartialCaptureResult(const camera_metadata_t * partialResult,
+ const CaptureResultExtras &resultExtras, uint32_t frameNumber,
+ const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+
+ // Send a total capture result given the pending metadata and result extras,
// partial results, and the frame number to the result queue.
void sendCaptureResult(CameraMetadata &pendingMetadata,
CaptureResultExtras &resultExtras,
CameraMetadata &collectedPartialResult, uint32_t frameNumber,
bool reprocess, const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+ // Insert the result to the result queue after updating frame number and overriding AE
+ // trigger cancel.
+ // mOutputLock must be held when calling this function.
+ void insertResultLocked(CaptureResult *result, uint32_t frameNumber,
+ const AeTriggerCancelOverride_t &aeTriggerCancelOverride);
+
/**** Scope for mInFlightLock ****/
// Remove the in-flight request of the given index from mInFlightMap
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 3d4e0b5..e1235b8 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -43,7 +43,7 @@
return itemsStr;
}
-static bool hasResourceType(String8 type, Vector<MediaResource> resources) {
+static bool hasResourceType(MediaResource::Type type, Vector<MediaResource> resources) {
for (size_t i = 0; i < resources.size(); ++i) {
if (resources[i].mType == type) {
return true;
@@ -52,7 +52,7 @@
return false;
}
-static bool hasResourceType(String8 type, ResourceInfos infos) {
+static bool hasResourceType(MediaResource::Type type, ResourceInfos infos) {
for (size_t i = 0; i < infos.size(); ++i) {
if (hasResourceType(type, infos[i].resources)) {
return true;
@@ -96,8 +96,8 @@
if (binder != NULL) {
sp<IMediaResourceMonitor> service = interface_cast<IMediaResourceMonitor>(binder);
for (size_t i = 0; i < resources.size(); ++i) {
- service->notifyResourceGranted(pid, String16(resources[i].mType),
- String16(resources[i].mSubType), resources[i].mValue);
+ service->notifyResourceGranted(pid, String16(asString(resources[i].mType)),
+ String16(asString(resources[i].mSubType)), resources[i].mValue);
}
}
}
@@ -275,12 +275,12 @@
const MediaResource *nonSecureCodec = NULL;
const MediaResource *graphicMemory = NULL;
for (size_t i = 0; i < resources.size(); ++i) {
- String8 type = resources[i].mType;
- if (resources[i].mType == kResourceSecureCodec) {
+ MediaResource::Type type = resources[i].mType;
+ if (resources[i].mType == MediaResource::kSecureCodec) {
secureCodec = &resources[i];
- } else if (type == kResourceNonSecureCodec) {
+ } else if (type == MediaResource::kNonSecureCodec) {
nonSecureCodec = &resources[i];
- } else if (type == kResourceGraphicMemory) {
+ } else if (type == MediaResource::kGraphicMemory) {
graphicMemory = &resources[i];
}
}
@@ -288,19 +288,19 @@
// first pass to handle secure/non-secure codec conflict
if (secureCodec != NULL) {
if (!mSupportsMultipleSecureCodecs) {
- if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ if (!getAllClients_l(callingPid, MediaResource::kSecureCodec, &clients)) {
return false;
}
}
if (!mSupportsSecureWithNonSecureCodec) {
- if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) {
+ if (!getAllClients_l(callingPid, MediaResource::kNonSecureCodec, &clients)) {
return false;
}
}
}
if (nonSecureCodec != NULL) {
if (!mSupportsSecureWithNonSecureCodec) {
- if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ if (!getAllClients_l(callingPid, MediaResource::kSecureCodec, &clients)) {
return false;
}
}
@@ -320,11 +320,11 @@
if (clients.size() == 0) {
// if we are here, run the fourth pass to free one codec with the different type.
if (secureCodec != NULL) {
- MediaResource temp(String8(kResourceNonSecureCodec), 1);
+ MediaResource temp(MediaResource::kNonSecureCodec, 1);
getClientForResource_l(callingPid, &temp, &clients);
}
if (nonSecureCodec != NULL) {
- MediaResource temp(String8(kResourceSecureCodec), 1);
+ MediaResource temp(MediaResource::kSecureCodec, 1);
getClientForResource_l(callingPid, &temp, &clients);
}
}
@@ -374,7 +374,7 @@
}
bool ResourceManagerService::getAllClients_l(
- int callingPid, const String8 &type, Vector<sp<IResourceManagerClient>> *clients) {
+ int callingPid, MediaResource::Type type, Vector<sp<IResourceManagerClient>> *clients) {
Vector<sp<IResourceManagerClient>> temp;
for (size_t i = 0; i < mMap.size(); ++i) {
ResourceInfos &infos = mMap.editValueAt(i);
@@ -384,7 +384,7 @@
// some higher/equal priority process owns the resource,
// this request can't be fulfilled.
ALOGE("getAllClients_l: can't reclaim resource %s from pid %d",
- type.string(), mMap.keyAt(i));
+ asString(type), mMap.keyAt(i));
return false;
}
temp.push_back(infos[j].client);
@@ -392,7 +392,7 @@
}
}
if (temp.size() == 0) {
- ALOGV("getAllClients_l: didn't find any resource %s", type.string());
+ ALOGV("getAllClients_l: didn't find any resource %s", asString(type));
return true;
}
clients->appendVector(temp);
@@ -400,7 +400,7 @@
}
bool ResourceManagerService::getLowestPriorityBiggestClient_l(
- int callingPid, const String8 &type, sp<IResourceManagerClient> *client) {
+ int callingPid, MediaResource::Type type, sp<IResourceManagerClient> *client) {
int lowestPriorityPid;
int lowestPriority;
int callingPriority;
@@ -425,7 +425,7 @@
}
bool ResourceManagerService::getLowestPriorityPid_l(
- const String8 &type, int *lowestPriorityPid, int *lowestPriority) {
+ MediaResource::Type type, int *lowestPriorityPid, int *lowestPriority) {
int pid = -1;
int priority = -1;
for (size_t i = 0; i < mMap.size(); ++i) {
@@ -472,7 +472,7 @@
}
bool ResourceManagerService::getBiggestClient_l(
- int pid, const String8 &type, sp<IResourceManagerClient> *client) {
+ int pid, MediaResource::Type type, sp<IResourceManagerClient> *client) {
ssize_t index = mMap.indexOfKey(pid);
if (index < 0) {
ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
@@ -495,7 +495,7 @@
}
if (clientTemp == NULL) {
- ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", type.string(), pid);
+ ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", asString(type), pid);
return false;
}
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
index 4769373..8f6fe9a 100644
--- a/services/mediaresourcemanager/ResourceManagerService.h
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -79,22 +79,22 @@
// Gets the list of all the clients who own the specified resource type.
// Returns false if any client belongs to a process with higher priority than the
// calling process. The clients will remain unchanged if returns false.
- bool getAllClients_l(int callingPid, const String8 &type,
+ bool getAllClients_l(int callingPid, MediaResource::Type type,
Vector<sp<IResourceManagerClient>> *clients);
// Gets the client who owns specified resource type from lowest possible priority process.
// Returns false if the calling process priority is not higher than the lowest process
// priority. The client will remain unchanged if returns false.
- bool getLowestPriorityBiggestClient_l(int callingPid, const String8 &type,
+ bool getLowestPriorityBiggestClient_l(int callingPid, MediaResource::Type type,
sp<IResourceManagerClient> *client);
// Gets lowest priority process that has the specified resource type.
// Returns false if failed. The output parameters will remain unchanged if failed.
- bool getLowestPriorityPid_l(const String8 &type, int *pid, int *priority);
+ bool getLowestPriorityPid_l(MediaResource::Type type, int *pid, int *priority);
// Gets the client who owns biggest piece of specified resource type from pid.
// Returns false if failed. The client will remain unchanged if failed.
- bool getBiggestClient_l(int pid, const String8 &type, sp<IResourceManagerClient> *client);
+ bool getBiggestClient_l(int pid, MediaResource::Type type, sp<IResourceManagerClient> *client);
bool isCallingPriorityHigher_l(int callingPid, int pid);
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
index cffedc6..62b7711 100644
--- a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -152,24 +152,24 @@
void addResource() {
// kTestPid1 mTestClient1
Vector<MediaResource> resources1;
- resources1.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+ resources1.push_back(MediaResource(MediaResource::kSecureCodec, 1));
mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources1);
- resources1.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+ resources1.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
Vector<MediaResource> resources11;
- resources11.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+ resources11.push_back(MediaResource(MediaResource::kGraphicMemory, 200));
mService->addResource(kTestPid1, getId(mTestClient1), mTestClient1, resources11);
// kTestPid2 mTestClient2
Vector<MediaResource> resources2;
- resources2.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
- resources2.push_back(MediaResource(String8(kResourceGraphicMemory), 300));
+ resources2.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+ resources2.push_back(MediaResource(MediaResource::kGraphicMemory, 300));
mService->addResource(kTestPid2, getId(mTestClient2), mTestClient2, resources2);
// kTestPid2 mTestClient3
Vector<MediaResource> resources3;
mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
- resources3.push_back(MediaResource(String8(kResourceSecureCodec), 1));
- resources3.push_back(MediaResource(String8(kResourceGraphicMemory), 100));
+ resources3.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+ resources3.push_back(MediaResource(MediaResource::kGraphicMemory, 100));
mService->addResource(kTestPid2, getId(mTestClient3), mTestClient3, resources3);
const PidResourceInfosMap &map = mService->mMap;
@@ -237,14 +237,12 @@
void testGetAllClients() {
addResource();
- String8 type = String8(kResourceSecureCodec);
- String8 unknowType = String8("unknowType");
+ MediaResource::Type type = MediaResource::kSecureCodec;
Vector<sp<IResourceManagerClient> > clients;
EXPECT_FALSE(mService->getAllClients_l(kLowPriorityPid, type, &clients));
// some higher priority process (e.g. kTestPid2) owns the resource, so getAllClients_l
// will fail.
EXPECT_FALSE(mService->getAllClients_l(kMidPriorityPid, type, &clients));
- EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, unknowType, &clients));
EXPECT_TRUE(mService->getAllClients_l(kHighPriorityPid, type, &clients));
EXPECT_EQ(2u, clients.size());
@@ -254,8 +252,8 @@
void testReclaimResourceSecure() {
Vector<MediaResource> resources;
- resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
- resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+ resources.push_back(MediaResource(MediaResource::kSecureCodec, 1));
+ resources.push_back(MediaResource(MediaResource::kGraphicMemory, 150));
// ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
{
@@ -356,7 +354,7 @@
mService->mSupportsSecureWithNonSecureCodec = true;
Vector<MediaResource> resources;
- resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+ resources.push_back(MediaResource(MediaResource::kSecureCodec, 1));
EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// secure codec from lowest process got reclaimed
@@ -374,8 +372,8 @@
void testReclaimResourceNonSecure() {
Vector<MediaResource> resources;
- resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
- resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+ resources.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
+ resources.push_back(MediaResource(MediaResource::kGraphicMemory, 150));
// ### secure codec can't coexist with non-secure codec ###
{
@@ -429,7 +427,7 @@
mService->mSupportsSecureWithNonSecureCodec = true;
Vector<MediaResource> resources;
- resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
+ resources.push_back(MediaResource(MediaResource::kNonSecureCodec, 1));
EXPECT_TRUE(mService->reclaimResource(kHighPriorityPid, resources));
// one non secure codec from lowest process got reclaimed
@@ -445,7 +443,7 @@
}
void testGetLowestPriorityBiggestClient() {
- String8 type = String8(kResourceGraphicMemory);
+ MediaResource::Type type = MediaResource::kGraphicMemory;
sp<IResourceManagerClient> client;
EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
@@ -454,8 +452,8 @@
EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(kLowPriorityPid, type, &client));
EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(kHighPriorityPid, type, &client));
- // kTestPid1 is the lowest priority process with kResourceGraphicMemory.
- // mTestClient1 has the largest kResourceGraphicMemory within kTestPid1.
+ // kTestPid1 is the lowest priority process with MediaResource::kGraphicMemory.
+ // mTestClient1 has the largest MediaResource::kGraphicMemory within kTestPid1.
EXPECT_EQ(mTestClient1, client);
}
@@ -464,7 +462,7 @@
int priority;
TestProcessInfo processInfo;
- String8 type = String8(kResourceGraphicMemory);
+ MediaResource::Type type = MediaResource::kGraphicMemory;
EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority));
addResource();
@@ -475,7 +473,7 @@
processInfo.getPriority(kTestPid1, &priority1);
EXPECT_EQ(priority1, priority);
- type = String8(kResourceNonSecureCodec);
+ type = MediaResource::kNonSecureCodec;
EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
EXPECT_EQ(kTestPid2, pid);
int priority2;
@@ -484,7 +482,7 @@
}
void testGetBiggestClient() {
- String8 type = String8(kResourceGraphicMemory);
+ MediaResource::Type type = MediaResource::kGraphicMemory;
sp<IResourceManagerClient> client;
EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client));