am f9ecac70: Enable signed-integer-overflow in rtsp
* commit 'f9ecac709222be2eb49f06f18f3df75840b44cef':
Enable signed-integer-overflow in rtsp
diff --git a/camera/ICameraServiceProxy.cpp b/camera/ICameraServiceProxy.cpp
index 06a5afb..694e9c3 100644
--- a/camera/ICameraServiceProxy.cpp
+++ b/camera/ICameraServiceProxy.cpp
@@ -29,11 +29,21 @@
BpCameraServiceProxy(const sp<IBinder>& impl) : BpInterface<ICameraServiceProxy>(impl) {}
virtual void pingForUserUpdate() {
- Parcel data, reply;
+ Parcel data;
data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
- remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, &reply,
+ remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, nullptr,
IBinder::FLAG_ONEWAY);
}
+
+ virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) {
+ Parcel data;
+ data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
+ data.writeString16(cameraId);
+ data.writeInt32(newCameraState);
+ remote()->transact(BnCameraServiceProxy::NOTIFY_CAMERA_STATE, data, nullptr,
+ IBinder::FLAG_ONEWAY);
+ }
+
};
@@ -47,9 +57,16 @@
pingForUserUpdate();
return NO_ERROR;
} break;
+ case NOTIFY_CAMERA_STATE: {
+ CHECK_INTERFACE(ICameraServiceProxy, data, reply);
+ String16 cameraId = data.readString16();
+ CameraState newCameraState =
+ static_cast<CameraState>(data.readInt32());
+ notifyCameraState(cameraId, newCameraState);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
}; // namespace android
-
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index d2dc200..2a9fd2b 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -49,7 +49,8 @@
WAIT_UNTIL_IDLE,
FLUSH,
PREPARE,
- TEAR_DOWN
+ TEAR_DOWN,
+ PREPARE2
};
namespace {
@@ -366,6 +367,21 @@
return reply.readInt32();
}
+ virtual status_t prepare2(int maxCount, int streamId)
+ {
+ ALOGV("prepare2");
+ Parcel data, reply;
+
+ data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+ data.writeInt32(maxCount);
+ data.writeInt32(streamId);
+
+ remote()->transact(PREPARE2, data, &reply);
+
+ reply.readExceptionCode();
+ return reply.readInt32();
+ }
+
virtual status_t tearDown(int streamId)
{
ALOGV("tearDown");
@@ -592,7 +608,14 @@
reply->writeInt32(tearDown(streamId));
return NO_ERROR;
} break;
-
+ case PREPARE2: {
+ CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+ int maxCount = data.readInt32();
+ int streamId = data.readInt32();
+ reply->writeNoException();
+ reply->writeInt32(prepare2(maxCount, streamId));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/include/camera/ICameraServiceProxy.h b/include/camera/ICameraServiceProxy.h
index 12a555f..2613c01 100644
--- a/include/camera/ICameraServiceProxy.h
+++ b/include/camera/ICameraServiceProxy.h
@@ -23,15 +23,30 @@
namespace android {
+/**
+ * Interface from native camera service to managed-side camera service proxy.
+ *
+ * Keep in sync with frameworks/base/core/java/android/hardware/ICameraServiceProxy.aidl
+ *
+ */
class ICameraServiceProxy : public IInterface {
public:
enum {
PING_FOR_USER_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+ NOTIFY_CAMERA_STATE
+ };
+
+ enum CameraState {
+ CAMERA_STATE_OPEN,
+ CAMERA_STATE_ACTIVE,
+ CAMERA_STATE_IDLE,
+ CAMERA_STATE_CLOSED
};
DECLARE_META_INTERFACE(CameraServiceProxy);
virtual void pingForUserUpdate() = 0;
+ virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) = 0;
};
class BnCameraServiceProxy: public BnInterface<ICameraServiceProxy>
@@ -48,5 +63,3 @@
}; // namespace android
#endif // ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
-
-
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index a7bf8ab..4d8eb53 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -140,6 +140,11 @@
virtual status_t prepare(int streamId) = 0;
/**
+ * Preallocate up to maxCount buffers for a given output stream asynchronously.
+ */
+ virtual status_t prepare2(int maxCount, int streamId) = 0;
+
+ /**
* Free all unused buffers for a given output stream.
*/
virtual status_t tearDown(int streamId) = 0;
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
index 3dcedc5..1d8df9c 100644
--- a/media/img_utils/include/img_utils/DngUtils.h
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -138,6 +138,34 @@
double opticalCenterY,
const double* kCoeffs);
+
+ /**
+ * Add FixBadPixelsList opcode for the given metadata parameters.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addBadPixelListForMetadata(const uint32_t* hotPixels,
+ uint32_t xyPairCount,
+ uint32_t colorFilterArrangement);
+
+ /**
+ * Add FixBadPixelsList opcode.
+ *
+ * bayerPhase - 0=top-left of image is red, 1=top-left of image is green pixel in red row,
+ * 2=top-left of image is green pixel in blue row, 3=top-left of image is
+ * blue.
+ * badPointCount - number of (x,y) pairs of bad pixels are given in badPointRowColPairs.
+ * badRectCount - number of (top, left, bottom, right) tuples are given in
+ * badRectTopLeftBottomRightTuples
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addBadPixelList(uint32_t bayerPhase,
+ uint32_t badPointCount,
+ uint32_t badRectCount,
+ const uint32_t* badPointRowColPairs,
+ const uint32_t* badRectTopLeftBottomRightTuples);
+
// TODO: Add other Opcode methods
protected:
static const uint32_t FLAG_OPTIONAL = 0x1u;
@@ -146,6 +174,7 @@
// Opcode IDs
enum {
WARP_RECTILINEAR_ID = 1,
+ FIX_BAD_PIXELS_LIST = 5,
GAIN_MAP_ID = 9,
};
@@ -161,6 +190,8 @@
ByteArrayOutput mOpList;
EndianOutput mEndianOut;
+ status_t addOpcodePreamble(uint32_t opcodeId);
+
};
} /*namespace img_utils*/
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
index b213403..9473dce 100644
--- a/media/img_utils/src/DngUtils.cpp
+++ b/media/img_utils/src/DngUtils.cpp
@@ -224,13 +224,7 @@
uint32_t mapPlanes,
const float* mapGains) {
- uint32_t opcodeId = GAIN_MAP_ID;
-
- status_t err = mEndianOut.write(&opcodeId, 0, 1);
- if (err != OK) return err;
-
- uint8_t version[] = {1, 3, 0, 0};
- err = mEndianOut.write(version, 0, NELEMS(version));
+ status_t err = addOpcodePreamble(GAIN_MAP_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
@@ -334,13 +328,7 @@
double opticalCenterY,
const double* kCoeffs) {
- uint32_t opcodeId = WARP_RECTILINEAR_ID;
-
- status_t err = mEndianOut.write(&opcodeId, 0, 1);
- if (err != OK) return err;
-
- uint8_t version[] = {1, 3, 0, 0};
- err = mEndianOut.write(version, 0, NELEMS(version));
+ status_t err = addOpcodePreamble(WARP_RECTILINEAR_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
@@ -373,5 +361,74 @@
return OK;
}
+status_t OpcodeListBuilder::addBadPixelListForMetadata(const uint32_t* hotPixels,
+ uint32_t xyPairCount,
+ uint32_t colorFilterArrangement) {
+ if (colorFilterArrangement > 3) {
+ ALOGE("%s: Unknown color filter arrangement %" PRIu32, __FUNCTION__,
+ colorFilterArrangement);
+ return BAD_VALUE;
+ }
+
+ return addBadPixelList(colorFilterArrangement, xyPairCount, 0, hotPixels, nullptr);
+}
+
+status_t OpcodeListBuilder::addBadPixelList(uint32_t bayerPhase,
+ uint32_t badPointCount,
+ uint32_t badRectCount,
+ const uint32_t* badPointRowColPairs,
+ const uint32_t* badRectTopLeftBottomRightTuples) {
+
+ status_t err = addOpcodePreamble(FIX_BAD_PIXELS_LIST);
+ if (err != OK) return err;
+
+ // Allow this opcode to be skipped if not supported
+ uint32_t flags = FLAG_OPTIONAL;
+
+ err = mEndianOut.write(&flags, 0, 1);
+ if (err != OK) return err;
+
+ const uint32_t NUM_NON_VARLEN_FIELDS = 3;
+ const uint32_t SIZE_OF_POINT = 2;
+ const uint32_t SIZE_OF_RECT = 4;
+
+ uint32_t totalSize = (NUM_NON_VARLEN_FIELDS + badPointCount * SIZE_OF_POINT +
+ badRectCount * SIZE_OF_RECT) * sizeof(uint32_t);
+ err = mEndianOut.write(&totalSize, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&bayerPhase, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&badPointCount, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&badRectCount, 0, 1);
+ if (err != OK) return err;
+
+ if (badPointCount > 0) {
+ err = mEndianOut.write(badPointRowColPairs, 0, SIZE_OF_POINT * badPointCount);
+ if (err != OK) return err;
+ }
+
+ if (badRectCount > 0) {
+ err = mEndianOut.write(badRectTopLeftBottomRightTuples, 0, SIZE_OF_RECT * badRectCount);
+ if (err != OK) return err;
+ }
+
+ mCount++;
+ return OK;
+}
+
+status_t OpcodeListBuilder::addOpcodePreamble(uint32_t opcodeId) {
+ status_t err = mEndianOut.write(&opcodeId, 0, 1);
+ if (err != OK) return err;
+
+ uint8_t version[] = {1, 3, 0, 0};
+ err = mEndianOut.write(version, 0, NELEMS(version));
+ if (err != OK) return err;
+ return OK;
+}
+
} /*namespace img_utils*/
} /*namespace android*/
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index a3f014b..47ed359 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -937,7 +937,7 @@
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
int clientUid = data.readInt32();
- status_t status;
+ status_t status = NO_ERROR;
sp<IAudioTrack> track;
if ((haveSharedBuffer && (buffer == 0)) ||
((buffer != 0) && (buffer->pointer() == NULL))) {
@@ -972,7 +972,7 @@
size_t notificationFrames = data.readInt64();
sp<IMemory> cblk;
sp<IMemory> buffers;
- status_t status;
+ status_t status = NO_ERROR;
sp<IAudioRecord> record = openRecord(input,
sampleRate, format, channelMask, opPackageName, &frameCount, &flags, tid,
clientUid, &sessionId, ¬ificationFrames, cblk, buffers, &status);
@@ -1104,12 +1104,14 @@
case OPEN_OUTPUT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
- audio_config_t config;
- data.read(&config, sizeof(audio_config_t));
+ audio_config_t config = {};
+ if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
+ ALOGE("b/23905951");
+ }
audio_devices_t devices = (audio_devices_t)data.readInt32();
String8 address(data.readString8());
audio_output_flags_t flags = (audio_output_flags_t) data.readInt32();
- uint32_t latencyMs;
+ uint32_t latencyMs = 0;
audio_io_handle_t output;
status_t status = openOutput(module, &output, &config,
&devices, address, &latencyMs, flags);
@@ -1149,8 +1151,10 @@
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_module_handle_t module = (audio_module_handle_t)data.readInt32();
audio_io_handle_t input = (audio_io_handle_t)data.readInt32();
- audio_config_t config;
- data.read(&config, sizeof(audio_config_t));
+ audio_config_t config = {};
+ if (data.read(&config, sizeof(audio_config_t)) != NO_ERROR) {
+ ALOGE("b/23905951");
+ }
audio_devices_t device = (audio_devices_t)data.readInt32();
String8 address(data.readString8());
audio_source_t source = (audio_source_t)data.readInt32();
@@ -1186,8 +1190,8 @@
case GET_RENDER_POSITION: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- uint32_t halFrames;
- uint32_t dspFrames;
+ uint32_t halFrames = 0;
+ uint32_t dspFrames = 0;
status_t status = getRenderPosition(&halFrames, &dspFrames, output);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1223,7 +1227,7 @@
} break;
case QUERY_NUM_EFFECTS: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- uint32_t numEffects;
+ uint32_t numEffects = 0;
status_t status = queryNumberEffects(&numEffects);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1233,7 +1237,7 @@
}
case QUERY_EFFECT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- effect_descriptor_t desc;
+ effect_descriptor_t desc = {};
status_t status = queryEffect(data.readInt32(), &desc);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1245,7 +1249,7 @@
CHECK_INTERFACE(IAudioFlinger, data, reply);
effect_uuid_t uuid;
data.read(&uuid, sizeof(effect_uuid_t));
- effect_descriptor_t desc;
+ effect_descriptor_t desc = {};
status_t status = getEffectDescriptor(&uuid, &desc);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1255,16 +1259,18 @@
}
case CREATE_EFFECT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- effect_descriptor_t desc;
- data.read(&desc, sizeof(effect_descriptor_t));
+ effect_descriptor_t desc = {};
+ if (data.read(&desc, sizeof(effect_descriptor_t)) != NO_ERROR) {
+ ALOGE("b/23905951");
+ }
sp<IEffectClient> client = interface_cast<IEffectClient>(data.readStrongBinder());
int32_t priority = data.readInt32();
audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
int sessionId = data.readInt32();
const String16 opPackageName = data.readString16();
- status_t status;
- int id;
- int enabled;
+ status_t status = NO_ERROR;
+ int id = 0;
+ int enabled = 0;
sp<IEffect> effect = createEffect(&desc, client, priority, output, sessionId,
opPackageName, &status, &id, &enabled);
@@ -1333,8 +1339,10 @@
} break;
case GET_AUDIO_PORT: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- struct audio_port port;
- data.read(&port, sizeof(struct audio_port));
+ struct audio_port port = {};
+ if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
+ ALOGE("b/23905951");
+ }
status_t status = getAudioPort(&port);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1346,8 +1354,10 @@
CHECK_INTERFACE(IAudioFlinger, data, reply);
struct audio_patch patch;
data.read(&patch, sizeof(struct audio_patch));
- audio_patch_handle_t handle;
- data.read(&handle, sizeof(audio_patch_handle_t));
+ audio_patch_handle_t handle = {};
+ if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
+ ALOGE("b/23905951");
+ }
status_t status = createAudioPatch(&patch, &handle);
reply->writeInt32(status);
if (status == NO_ERROR) {
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 3348441..76b5924 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -877,7 +877,7 @@
if (hasOffloadInfo) {
data.read(&offloadInfo, sizeof(audio_offload_info_t));
}
- audio_io_handle_t output;
+ audio_io_handle_t output = 0;
status_t status = getOutputForAttr(hasAttributes ? &attr : NULL,
&output, session, &stream, uid,
samplingRate, format, channelMask,
@@ -932,7 +932,7 @@
audio_channel_mask_t channelMask = data.readInt32();
audio_input_flags_t flags = (audio_input_flags_t) data.readInt32();
audio_port_handle_t selectedDeviceId = (audio_port_handle_t) data.readInt32();
- audio_io_handle_t input;
+ audio_io_handle_t input = {};
status_t status = getInputForAttr(&attr, &input, session, uid,
samplingRate, format, channelMask,
flags, selectedDeviceId);
@@ -994,7 +994,7 @@
audio_stream_type_t stream =
static_cast <audio_stream_type_t>(data.readInt32());
audio_devices_t device = static_cast <audio_devices_t>(data.readInt32());
- int index;
+ int index = 0;
status_t status = getStreamVolumeIndex(stream, &index, device);
reply->writeInt32(index);
reply->writeInt32(static_cast <uint32_t>(status));
@@ -1148,8 +1148,10 @@
case GET_AUDIO_PORT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
- struct audio_port port;
- data.read(&port, sizeof(struct audio_port));
+ struct audio_port port = {};
+ if (data.read(&port, sizeof(struct audio_port)) != NO_ERROR) {
+ ALOGE("b/23912202");
+ }
status_t status = getAudioPort(&port);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1162,8 +1164,10 @@
CHECK_INTERFACE(IAudioPolicyService, data, reply);
struct audio_patch patch;
data.read(&patch, sizeof(struct audio_patch));
- audio_patch_handle_t handle;
- data.read(&handle, sizeof(audio_patch_handle_t));
+ audio_patch_handle_t handle = {};
+ if (data.read(&handle, sizeof(audio_patch_handle_t)) != NO_ERROR) {
+ ALOGE("b/23912202");
+ }
status_t status = createAudioPatch(&patch, &handle);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1238,9 +1242,9 @@
CHECK_INTERFACE(IAudioPolicyService, data, reply);
sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
data.readStrongBinder());
- audio_session_t session;
- audio_io_handle_t ioHandle;
- audio_devices_t device;
+ audio_session_t session = {};
+ audio_io_handle_t ioHandle = {};
+ audio_devices_t device = {};
status_t status = acquireSoundTriggerSession(&session, &ioHandle, &device);
reply->writeInt32(status);
if (status == NO_ERROR) {
@@ -1292,7 +1296,7 @@
data.read(&source, sizeof(struct audio_port_config));
audio_attributes_t attributes;
data.read(&attributes, sizeof(audio_attributes_t));
- audio_io_handle_t handle;
+ audio_io_handle_t handle = {};
status_t status = startAudioSource(&source, &attributes, &handle);
reply->writeInt32(status);
reply->writeInt32(handle);
diff --git a/media/libmedia/ICrypto.cpp b/media/libmedia/ICrypto.cpp
index 947294f..9703b0d 100644
--- a/media/libmedia/ICrypto.cpp
+++ b/media/libmedia/ICrypto.cpp
@@ -303,7 +303,25 @@
AString errorDetailMsg;
ssize_t result;
- if (offset + totalSize > sharedBuffer->size()) {
+ size_t sumSubsampleSizes = 0;
+ bool overflow = false;
+ for (int32_t i = 0; i < numSubSamples; ++i) {
+ CryptoPlugin::SubSample &ss = subSamples[i];
+ if (sumSubsampleSizes <= SIZE_MAX - ss.mNumBytesOfEncryptedData) {
+ sumSubsampleSizes += ss.mNumBytesOfEncryptedData;
+ } else {
+ overflow = true;
+ }
+ if (sumSubsampleSizes <= SIZE_MAX - ss.mNumBytesOfClearData) {
+ sumSubsampleSizes += ss.mNumBytesOfClearData;
+ } else {
+ overflow = true;
+ }
+ }
+
+ if (overflow || sumSubsampleSizes != totalSize) {
+ result = -EINVAL;
+ } else if (offset + totalSize > sharedBuffer->size()) {
result = -EINVAL;
} else {
result = decrypt(
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index eb4b098..faf5795 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -85,13 +85,15 @@
data.writeInt32(size);
status_t status = remote()->transact(COMMAND, data, &reply);
+ if (status == NO_ERROR) {
+ status = reply.readInt32();
+ }
if (status != NO_ERROR) {
if (pReplySize != NULL)
*pReplySize = 0;
return status;
}
- status = reply.readInt32();
size = reply.readInt32();
if (size != 0 && pReplyData != NULL && pReplySize != NULL) {
reply.read(pReplyData, size);
@@ -154,23 +156,34 @@
uint32_t cmdSize = data.readInt32();
char *cmd = NULL;
if (cmdSize) {
- cmd = (char *)malloc(cmdSize);
+ cmd = (char *)calloc(cmdSize, 1);
+ if (cmd == NULL) {
+ reply->writeInt32(NO_MEMORY);
+ return NO_ERROR;
+ }
data.read(cmd, cmdSize);
}
uint32_t replySize = data.readInt32();
uint32_t replySz = replySize;
char *resp = NULL;
if (replySize) {
- resp = (char *)malloc(replySize);
+ resp = (char *)calloc(replySize, 1);
+ if (resp == NULL) {
+ free(cmd);
+ reply->writeInt32(NO_MEMORY);
+ return NO_ERROR;
+ }
}
status_t status = command(cmdCode, cmdSize, cmd, &replySz, resp);
reply->writeInt32(status);
- if (replySz < replySize) {
- replySize = replySz;
- }
- reply->writeInt32(replySize);
- if (replySize) {
- reply->write(resp, replySize);
+ if (status == NO_ERROR) {
+ if (replySz < replySize) {
+ replySize = replySz;
+ }
+ reply->writeInt32(replySize);
+ if (replySize) {
+ reply->write(resp, replySize);
+ }
}
if (cmd) {
free(cmd);
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index bde35f2..942aec3 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -566,7 +566,7 @@
} break;
case GET_CURRENT_POSITION: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- int msec;
+ int msec = 0;
status_t ret = getCurrentPosition(&msec);
reply->writeInt32(msec);
reply->writeInt32(ret);
@@ -574,7 +574,7 @@
} break;
case GET_DURATION: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
- int msec;
+ int msec = 0;
status_t ret = getDuration(&msec);
reply->writeInt32(msec);
reply->writeInt32(ret);
@@ -653,6 +653,7 @@
CHECK_INTERFACE(IMediaPlayer, data, reply);
struct sockaddr_in endpoint;
+ memset(&endpoint, 0, sizeof(endpoint));
int amt = data.readInt32();
if (amt == sizeof(endpoint)) {
data.read(&endpoint, sizeof(struct sockaddr_in));
@@ -667,6 +668,7 @@
CHECK_INTERFACE(IMediaPlayer, data, reply);
struct sockaddr_in endpoint;
+ memset(&endpoint, 0, sizeof(endpoint));
status_t res = getRetransmitEndpoint(&endpoint);
reply->writeInt32(res);
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 05a65a3..8db07ca 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1730,7 +1730,7 @@
t->setVolume(mLeftVolume, mRightVolume);
mSampleRateHz = sampleRate;
- mFlags = t->getFlags(); // we suggest the flags above, but new AudioTrack() may not grant it.
+ mFlags = flags;
mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
mFrameSize = t->frameSize();
uint32_t pos;
@@ -1742,7 +1742,7 @@
status_t res = NO_ERROR;
// Note some output devices may give us a direct track even though we don't specify it.
// Example: Line application b/17459982.
- if ((mFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
+ if ((t->getFlags() & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
res = t->setPlaybackRate(mPlaybackRate);
if (res == NO_ERROR) {
t->setAuxEffectSendLevel(mSendLevel);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index 7dc9be7..b3eb5fd 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -467,9 +467,17 @@
void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) {
if (err != OK) {
- mDataSource.clear();
- mCachedSource.clear();
- mHttpSource.clear();
+ {
+ sp<DataSource> dataSource = mDataSource;
+ sp<NuCachedSource2> cachedSource = mCachedSource;
+ sp<DataSource> httpSource = mHttpSource;
+ {
+ Mutex::Autolock _l(mDisconnectLock);
+ mDataSource.clear();
+ mCachedSource.clear();
+ mHttpSource.clear();
+ }
+ }
mBitrate = -1;
cancelPollBuffering();
@@ -522,13 +530,20 @@
}
void NuPlayer::GenericSource::disconnect() {
- if (mDataSource != NULL) {
+ sp<DataSource> dataSource, httpSource;
+ {
+ Mutex::Autolock _l(mDisconnectLock);
+ dataSource = mDataSource;
+ httpSource = mHttpSource;
+ }
+
+ if (dataSource != NULL) {
// disconnect data source
- if (mDataSource->flags() & DataSource::kIsCachingDataSource) {
- static_cast<NuCachedSource2 *>(mDataSource.get())->disconnect();
+ if (dataSource->flags() & DataSource::kIsCachingDataSource) {
+ static_cast<NuCachedSource2 *>(dataSource.get())->disconnect();
}
- } else if (mHttpSource != NULL) {
- static_cast<HTTPBase *>(mHttpSource.get())->disconnect();
+ } else if (httpSource != NULL) {
+ static_cast<HTTPBase *>(httpSource.get())->disconnect();
}
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index dc85d2d..ac980ef 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -153,6 +153,7 @@
int32_t mPrevBufferPercentage;
mutable Mutex mReadBufferLock;
+ mutable Mutex mDisconnectLock;
sp<ALooper> mLooper;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 77b9799..c0146d5 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1488,7 +1488,9 @@
}
status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) {
- if (*decoder != NULL) {
+ // The audio decoder could be cleared by tear down. If still in shut down
+ // process, no need to create a new audio decoder.
+ if (*decoder != NULL || (audio && mFlushingAudio == SHUT_DOWN)) {
return OK;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 04a46f4..776dba8 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -106,6 +106,7 @@
mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
+ mPauseDrainAudioAllowedUs(0),
mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
@@ -630,6 +631,14 @@
return;
}
+ // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
+ if (mPaused) {
+ const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
+ if (diffUs > delayUs) {
+ delayUs = diffUs;
+ }
+ }
+
mDrainAudioQueuePending = true;
sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
msg->setInt32("drainGeneration", mAudioDrainGeneration);
@@ -798,6 +807,10 @@
}
bool NuPlayer::Renderer::onDrainAudioQueue() {
+ // do not drain audio during teardown as queued buffers may be invalid.
+ if (mAudioTornDown) {
+ return false;
+ }
// TODO: This call to getPosition checks if AudioTrack has been created
// in AudioSink before draining audio. If AudioTrack doesn't exist, then
// CHECKs on getPosition will fail.
@@ -877,6 +890,8 @@
ALOGV("AudioSink write would block when writing %zu bytes", copy);
} else {
ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ // This can only happen when AudioSink was opened with doNotReconnect flag set to
+ // true, in which case the NuPlayer will handle the reconnect.
notifyAudioTearDown();
}
break;
@@ -943,6 +958,10 @@
int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
int32_t sampleRate = offloadingAudio() ?
mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+ if (sampleRate == 0) {
+ ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
+ return 0;
+ }
// TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
}
@@ -1338,8 +1357,16 @@
mAudioSink->flush();
// Call stop() to signal to the AudioSink to completely fill the
// internal buffer before resuming playback.
+ // FIXME: this is ignored after flush().
mAudioSink->stop();
- if (!mPaused) {
+ if (mPaused) {
+ // Race condition: if renderer is paused and audio sink is stopped,
+ // we need to make sure that the audio track buffer fully drains
+ // before delivering data.
+ // FIXME: remove this if we can detect if stop() is complete.
+ const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
+ mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
+ } else {
mAudioSink->start();
}
mNumFramesWritten = 0;
@@ -1471,6 +1498,7 @@
cancelAudioOffloadPauseTimeout();
status_t err = mAudioSink->start();
if (err != OK) {
+ ALOGE("cannot start AudioSink err %d", err);
notifyAudioTearDown();
}
}
@@ -1764,6 +1792,12 @@
const uint32_t frameCount =
(unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
+ // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
+ // AudioSink. We don't want this when there's video because it will cause a video seek to
+ // the previous I frame. But we do want this when there's only audio because it will give
+ // NuPlayer a chance to switch from non-offload mode to offload mode.
+ // So we only set doNotReconnect when there's no video.
+ const bool doNotReconnect = !hasVideo;
status_t err = mAudioSink->open(
sampleRate,
numChannels,
@@ -1774,13 +1808,14 @@
mUseAudioCallback ? this : NULL,
(audio_output_flags_t)pcmFlags,
NULL,
- true /* doNotReconnect */,
+ doNotReconnect,
frameCount);
if (err == OK) {
err = mAudioSink->setPlaybackRate(mPlaybackSettings);
}
if (err != OK) {
ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+ mAudioSink->close();
mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
return err;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 3e65649..87bcbf9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -170,6 +170,7 @@
// modified on only renderer's thread.
bool mPaused;
+ int64_t mPauseDrainAudioAllowedUs; // time when we can drain/deliver audio in pause mode.
bool mVideoSampleReceived;
bool mVideoRenderingStarted;
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 58ff113..af0351e 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -134,7 +134,9 @@
return;
}
}
- mHandler->pause();
+ if (mHandler != NULL) {
+ mHandler->pause();
+ }
}
void NuPlayer::RTSPSource::resume() {
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 3505844..6e4a1dd 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -290,6 +290,10 @@
status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
int64_t timeUs = systemTime() / 1000ll;
+ // Estimate the real sampling time of the 1st sample in this buffer
+ // from AudioRecord's latency. (Apply this adjustment first so that
+ // the start time logic is not affected.)
+ timeUs -= mRecord->latency() * 1000LL;
ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index b451021..ba59e00 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -2291,11 +2291,11 @@
// The widevine extractor does its own caching.
#if 0
- mCachedSource = new NuCachedSource2(
+ mCachedSource = NuCachedSource2::Create(
new ThrottledSource(
mConnectingDataSource, 50 * 1024 /* bytes/sec */));
#else
- mCachedSource = new NuCachedSource2(
+ mCachedSource = NuCachedSource2::Create(
mConnectingDataSource,
cacheConfig.isEmpty() ? NULL : cacheConfig.string(),
disconnectAtHighwatermark);
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 75ef288..5020c6c 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -246,7 +246,7 @@
*contentType = httpSource->getMIMEType();
}
- source = new NuCachedSource2(
+ source = NuCachedSource2::Create(
httpSource,
cacheConfig.isEmpty() ? NULL : cacheConfig.string(),
disconnectAtHighwatermark);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index a76334f..38ae6f3 100755
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1952,15 +1952,14 @@
return ERROR_IO;
}
- if (mLastTrack == NULL)
- return ERROR_MALFORMED;
-
uint32_t type = ntohl(buffer);
// For the 3GPP file format, the handler-type within the 'hdlr' box
// shall be 'text'. We also want to support 'sbtl' handler type
// for a practical reason as various MPEG4 containers use it.
if (type == FOURCC('t', 'e', 'x', 't') || type == FOURCC('s', 'b', 't', 'l')) {
- mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP);
+ if (mLastTrack != NULL) {
+ mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP);
+ }
}
break;
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index cd59709..7019537 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -572,6 +572,7 @@
}
status_t MediaCodec::reclaim() {
+ ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
sp<AMessage> msg = new AMessage(kWhatRelease, this);
msg->setInt32("reclaimed", 1);
@@ -1154,8 +1155,10 @@
resourceType = String8(kResourceNonSecureCodec);
}
- const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
- addResource(resourceType, String8(subtype), 1);
+ if (mIsVideo) {
+ // audio codec is currently ignored.
+ addResource(resourceType, String8(kResourceVideoCodec), 1);
+ }
(new AMessage)->postReply(mReplyID);
break;
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index f82636b..d6255d6 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -224,9 +224,6 @@
// So whenever we call DataSource::readAt it may end up in a call to
// IMediaHTTPConnection::readAt and therefore call back into JAVA.
mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
-
- Mutex::Autolock autoLock(mLock);
- (new AMessage(kWhatFetchMore, mReflector))->post();
}
NuCachedSource2::~NuCachedSource2() {
@@ -237,6 +234,18 @@
mCache = NULL;
}
+// static
+sp<NuCachedSource2> NuCachedSource2::Create(
+ const sp<DataSource> &source,
+ const char *cacheConfig,
+ bool disconnectAtHighwatermark) {
+ sp<NuCachedSource2> instance = new NuCachedSource2(
+ source, cacheConfig, disconnectAtHighwatermark);
+ Mutex::Autolock autoLock(instance->mLock);
+ (new AMessage(kWhatFetchMore, instance->mReflector))->post();
+ return instance;
+}
+
status_t NuCachedSource2::getEstimatedBandwidthKbps(int32_t *kbps) {
if (mSource->flags() & kIsHTTPBasedSource) {
HTTPBase* source = static_cast<HTTPBase *>(mSource.get());
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 692445a..5b50212 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -23,6 +23,7 @@
#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
@@ -1207,83 +1208,18 @@
}
-// The returned buffer should be free()d.
-static uint8_t *DecodeBase64(const char *s, size_t size, size_t *outSize) {
- *outSize = 0;
-
- if ((size % 4) != 0) {
- return NULL;
- }
-
- size_t n = size;
- size_t padding = 0;
- if (n >= 1 && s[n - 1] == '=') {
- padding = 1;
-
- if (n >= 2 && s[n - 2] == '=') {
- padding = 2;
- }
- }
-
- size_t outLen = 3 * size / 4 - padding;
-
- *outSize = outLen;
-
- void *buffer = malloc(outLen);
-
- uint8_t *out = (uint8_t *)buffer;
- size_t j = 0;
- uint32_t accum = 0;
- for (size_t i = 0; i < n; ++i) {
- char c = s[i];
- unsigned value;
- if (c >= 'A' && c <= 'Z') {
- value = c - 'A';
- } else if (c >= 'a' && c <= 'z') {
- value = 26 + c - 'a';
- } else if (c >= '0' && c <= '9') {
- value = 52 + c - '0';
- } else if (c == '+') {
- value = 62;
- } else if (c == '/') {
- value = 63;
- } else if (c != '=') {
- return NULL;
- } else {
- if (i < n - padding) {
- return NULL;
- }
-
- value = 0;
- }
-
- accum = (accum << 6) | value;
-
- if (((i + 1) % 4) == 0) {
- out[j++] = (accum >> 16);
-
- if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
- if (j < outLen) { out[j++] = accum & 0xff; }
-
- accum = 0;
- }
- }
-
- return (uint8_t *)buffer;
-}
-
static void extractAlbumArt(
const sp<MetaData> &fileMeta, const void *data, size_t size) {
ALOGV("extractAlbumArt from '%s'", (const char *)data);
- size_t flacSize;
- uint8_t *flac = DecodeBase64((const char *)data, size, &flacSize);
-
- if (flac == NULL) {
+ sp<ABuffer> flacBuffer = decodeBase64(AString((const char *)data, size));
+ if (flacBuffer == NULL) {
ALOGE("malformed base64 encoded data.");
return;
}
+ size_t flacSize = flacBuffer->size();
+ uint8_t *flac = flacBuffer->data();
ALOGV("got flac of size %zu", flacSize);
uint32_t picType;
@@ -1293,24 +1229,24 @@
char type[128];
if (flacSize < 8) {
- goto exit;
+ return;
}
picType = U32_AT(flac);
if (picType != 3) {
// This is not a front cover.
- goto exit;
+ return;
}
typeLen = U32_AT(&flac[4]);
if (typeLen > sizeof(type) - 1) {
- goto exit;
+ return;
}
// we've already checked above that flacSize >= 8
if (flacSize - 8 < typeLen) {
- goto exit;
+ return;
}
memcpy(type, &flac[8], typeLen);
@@ -1320,7 +1256,7 @@
if (!strcmp(type, "-->")) {
// This is not inline cover art, but an external url instead.
- goto exit;
+ return;
}
descLen = U32_AT(&flac[8 + typeLen]);
@@ -1328,7 +1264,7 @@
if (flacSize < 32 ||
flacSize - 32 < typeLen ||
flacSize - 32 - typeLen < descLen) {
- goto exit;
+ return;
}
dataLen = U32_AT(&flac[8 + typeLen + 4 + descLen + 16]);
@@ -1336,7 +1272,7 @@
// we've already checked above that (flacSize - 32 - typeLen - descLen) >= 0
if (flacSize - 32 - typeLen - descLen < dataLen) {
- goto exit;
+ return;
}
ALOGV("got image data, %zu trailing bytes",
@@ -1346,10 +1282,6 @@
kKeyAlbumArt, 0, &flac[8 + typeLen + 4 + descLen + 20], dataLen);
fileMeta->setCString(kKeyAlbumArtMIME, type);
-
-exit:
- free(flac);
- flac = NULL;
}
////////////////////////////////////////////////////////////////////////////////
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index f0a7277..7c8d441 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -211,8 +211,10 @@
const uint8_t *ptr = (const uint8_t *)data;
- CHECK(size >= 7);
- CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ if (size < 7 || ptr[0] != 1) { // configurationVersion == 1
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
uint8_t profile __unused = ptr[1];
uint8_t level __unused = ptr[3];
@@ -238,7 +240,10 @@
buffer->setRange(0, 0);
for (size_t i = 0; i < numSeqParameterSets; ++i) {
- CHECK(size >= 2);
+ if (size < 2) {
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
size_t length = U16_AT(ptr);
ptr += 2;
@@ -267,13 +272,19 @@
}
buffer->setRange(0, 0);
- CHECK(size >= 1);
+ if (size < 1) {
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
size_t numPictureParameterSets = *ptr;
++ptr;
--size;
for (size_t i = 0; i < numPictureParameterSets; ++i) {
- CHECK(size >= 2);
+ if (size < 2) {
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
size_t length = U16_AT(ptr);
ptr += 2;
@@ -297,8 +308,10 @@
} else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
const uint8_t *ptr = (const uint8_t *)data;
- CHECK(size >= 7);
- CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ if (size < 23 || ptr[0] != 1) { // configurationVersion == 1
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
uint8_t profile __unused = ptr[1] & 31;
uint8_t level __unused = ptr[12];
ptr += 22;
@@ -317,6 +330,10 @@
buffer->setRange(0, 0);
for (i = 0; i < numofArrays; i++) {
+ if (size < 3) {
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
ptr += 1;
size -= 1;
@@ -327,7 +344,10 @@
size -= 2;
for (j = 0; j < numofNals; j++) {
- CHECK(size >= 2);
+ if (size < 2) {
+ ALOGE("b/23680780");
+ return BAD_VALUE;
+ }
size_t length = U16_AT(ptr);
ptr += 2;
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index 3750e2e..76a7f40 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -98,7 +98,7 @@
libstagefright_amrnbdec libsndfile
LOCAL_SHARED_LIBRARIES := \
- libstagefright_amrnb_common libaudioutils
+ libstagefright_amrnb_common libaudioutils liblog
LOCAL_MODULE := libstagefright_amrnbdec_test
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp b/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
index fb7cff3..696d2da 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
@@ -59,6 +59,8 @@
/*----------------------------------------------------------------------------
; INCLUDES
----------------------------------------------------------------------------*/
+#include <log/log.h>
+
#include "a_refl.h"
#include "typedef.h"
#include "cnst.h"
@@ -291,7 +293,8 @@
{
refl[i] = 0;
}
- break;
+ ALOGE("b/23609206");
+ return;
}
bState[j] = extract_l(L_temp);
diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp
index dcf5bef..7da7db9 100644
--- a/media/libstagefright/foundation/base64.cpp
+++ b/media/libstagefright/foundation/base64.cpp
@@ -22,11 +22,11 @@
namespace android {
sp<ABuffer> decodeBase64(const AString &s) {
- if ((s.size() % 4) != 0) {
+ size_t n = s.size();
+ if ((n % 4) != 0) {
return NULL;
}
- size_t n = s.size();
size_t padding = 0;
if (n >= 1 && s.c_str()[n - 1] == '=') {
padding = 1;
@@ -40,11 +40,16 @@
}
}
- size_t outLen = 3 * s.size() / 4 - padding;
+ // We divide first to avoid overflow. It's OK to do this because we
+ // already made sure that n % 4 == 0.
+ size_t outLen = (n / 4) * 3 - padding;
sp<ABuffer> buffer = new ABuffer(outLen);
uint8_t *out = buffer->data();
+ if (out == NULL || buffer->size() < outLen) {
+ return NULL;
+ }
size_t j = 0;
uint32_t accum = 0;
for (size_t i = 0; i < n; ++i) {
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 4252706..a29bdf9 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -28,7 +28,7 @@
struct PageCache;
struct NuCachedSource2 : public DataSource {
- NuCachedSource2(
+ static sp<NuCachedSource2> Create(
const sp<DataSource> &source,
const char *cacheConfig = NULL,
bool disconnectAtHighwatermark = false);
@@ -72,6 +72,11 @@
private:
friend struct AHandlerReflector<NuCachedSource2>;
+ NuCachedSource2(
+ const sp<DataSource> &source,
+ const char *cacheConfig,
+ bool disconnectAtHighwatermark);
+
enum {
kPageSize = 65536,
kDefaultHighWaterThreshold = 20 * 1024 * 1024,
diff --git a/media/libstagefright/timedtext/TextDescriptions.cpp b/media/libstagefright/timedtext/TextDescriptions.cpp
index f9c1fe0..c762a74 100644
--- a/media/libstagefright/timedtext/TextDescriptions.cpp
+++ b/media/libstagefright/timedtext/TextDescriptions.cpp
@@ -30,9 +30,9 @@
if (flags & IN_BAND_TEXT_3GPP) {
if (flags & GLOBAL_DESCRIPTIONS) {
- return extract3GPPGlobalDescriptions(data, size, parcel, 0);
+ return extract3GPPGlobalDescriptions(data, size, parcel);
} else if (flags & LOCAL_DESCRIPTIONS) {
- return extract3GPPLocalDescriptions(data, size, timeMs, parcel, 0);
+ return extract3GPPLocalDescriptions(data, size, timeMs, parcel);
}
} else if (flags & OUT_OF_BAND_TEXT_SRT) {
if (flags & LOCAL_DESCRIPTIONS) {
@@ -69,49 +69,361 @@
// styles, and 'krok' box contains karaoke timing and positions.
status_t TextDescriptions::extract3GPPLocalDescriptions(
const uint8_t *data, ssize_t size,
- int timeMs, Parcel *parcel, int depth) {
- if (depth == 0) {
- parcel->writeInt32(KEY_LOCAL_SETTING);
+ int timeMs, Parcel *parcel) {
- // write start time to display this text sample
- parcel->writeInt32(KEY_START_TIME);
- parcel->writeInt32(timeMs);
+ parcel->writeInt32(KEY_LOCAL_SETTING);
- ssize_t textLen = (*data) << 8 | (*(data + 1));
+ // write start time to display this text sample
+ parcel->writeInt32(KEY_START_TIME);
+ parcel->writeInt32(timeMs);
- // write text sample length and text sample itself
- parcel->writeInt32(KEY_STRUCT_TEXT);
- parcel->writeInt32(textLen);
- parcel->writeInt32(textLen);
- parcel->write(data + 2, textLen);
-
- if (size > textLen) {
- data += (textLen + 2);
- size -= (textLen + 2);
- } else {
- return OK;
- }
+ if (size < 2) {
+ return OK;
}
+ ssize_t textLen = (*data) << 8 | (*(data + 1));
- const uint8_t *tmpData = data;
- ssize_t chunkSize = U32_AT(tmpData);
- uint32_t chunkType = U32_AT(tmpData + 4);
-
- if (chunkSize <= 0) {
+ if (size < textLen + 2) {
return OK;
}
- tmpData += 8;
+ // write text sample length and text sample itself
+ parcel->writeInt32(KEY_STRUCT_TEXT);
+ parcel->writeInt32(textLen);
+ parcel->writeInt32(textLen);
+ parcel->write(data + 2, textLen);
- switch(chunkType) {
- // 'styl' box specifies the style of the text.
- case FOURCC('s', 't', 'y', 'l'):
- {
- uint16_t count = U16_AT(tmpData);
+ if (size > textLen + 2) {
+ data += (textLen + 2);
+ size -= (textLen + 2);
+ } else {
+ return OK;
+ }
- tmpData += 2;
+ while (size >= 8) {
+ const uint8_t *tmpData = data;
+ ssize_t chunkSize = U32_AT(tmpData); // size includes size and type
+ uint32_t chunkType = U32_AT(tmpData + 4);
- for (int i = 0; i < count; i++) {
+ if (chunkSize <= 8 || chunkSize > size) {
+ return OK;
+ }
+
+ size_t remaining = chunkSize - 8;
+
+ tmpData += 8;
+
+ switch(chunkType) {
+ // 'styl' box specifies the style of the text.
+ case FOURCC('s', 't', 'y', 'l'):
+ {
+ if (remaining < 2) {
+ return OK;
+ }
+ size_t dataPos = parcel->dataPosition();
+ uint16_t count = U16_AT(tmpData);
+
+ tmpData += 2;
+ remaining -= 2;
+
+ for (int i = 0; i < count; i++) {
+ if (remaining < 12) {
+ // roll back
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+ parcel->writeInt32(KEY_STRUCT_STYLE_LIST);
+ parcel->writeInt32(KEY_START_CHAR);
+ parcel->writeInt32(U16_AT(tmpData));
+
+ parcel->writeInt32(KEY_END_CHAR);
+ parcel->writeInt32(U16_AT(tmpData + 2));
+
+ parcel->writeInt32(KEY_FONT_ID);
+ parcel->writeInt32(U16_AT(tmpData + 4));
+
+ parcel->writeInt32(KEY_STYLE_FLAGS);
+ parcel->writeInt32(*(tmpData + 6));
+
+ parcel->writeInt32(KEY_FONT_SIZE);
+ parcel->writeInt32(*(tmpData + 7));
+
+ parcel->writeInt32(KEY_TEXT_COLOR_RGBA);
+ uint32_t rgba = *(tmpData + 8) << 24 | *(tmpData + 9) << 16
+ | *(tmpData + 10) << 8 | *(tmpData + 11);
+ parcel->writeInt32(rgba);
+
+ tmpData += 12;
+ remaining -= 12;
+ }
+
+ break;
+ }
+ // 'krok' box. The number of highlight events is specified, and each
+ // event is specified by a starting and ending char offset and an end
+ // time for the event.
+ case FOURCC('k', 'r', 'o', 'k'):
+ {
+ if (remaining < 6) {
+ return OK;
+ }
+ size_t dataPos = parcel->dataPosition();
+
+ parcel->writeInt32(KEY_STRUCT_KARAOKE_LIST);
+
+ int startTime = U32_AT(tmpData);
+ uint16_t count = U16_AT(tmpData + 4);
+ parcel->writeInt32(count);
+
+ tmpData += 6;
+ remaining -= 6;
+ int lastEndTime = 0;
+
+ for (int i = 0; i < count; i++) {
+ if (remaining < 8) {
+ // roll back
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+ parcel->writeInt32(startTime + lastEndTime);
+
+ lastEndTime = U32_AT(tmpData);
+ parcel->writeInt32(lastEndTime);
+
+ parcel->writeInt32(U16_AT(tmpData + 4));
+ parcel->writeInt32(U16_AT(tmpData + 6));
+
+ tmpData += 8;
+ remaining -= 8;
+ }
+
+ break;
+ }
+ // 'hlit' box specifies highlighted text
+ case FOURCC('h', 'l', 'i', 't'):
+ {
+ if (remaining < 4) {
+ return OK;
+ }
+
+ parcel->writeInt32(KEY_STRUCT_HIGHLIGHT_LIST);
+
+ // the start char offset to highlight
+ parcel->writeInt32(U16_AT(tmpData));
+ // the last char offset to highlight
+ parcel->writeInt32(U16_AT(tmpData + 2));
+
+ tmpData += 4;
+ remaining -= 4;
+ break;
+ }
+ // 'hclr' box specifies the RGBA color: 8 bits each of
+ // red, green, blue, and an alpha(transparency) value
+ case FOURCC('h', 'c', 'l', 'r'):
+ {
+ if (remaining < 4) {
+ return OK;
+ }
+ parcel->writeInt32(KEY_HIGHLIGHT_COLOR_RGBA);
+
+ uint32_t rgba = *(tmpData) << 24 | *(tmpData + 1) << 16
+ | *(tmpData + 2) << 8 | *(tmpData + 3);
+ parcel->writeInt32(rgba);
+
+ tmpData += 4;
+ remaining -= 4;
+ break;
+ }
+ // 'dlay' box specifies a delay after a scroll in and/or
+ // before scroll out.
+ case FOURCC('d', 'l', 'a', 'y'):
+ {
+ if (remaining < 4) {
+ return OK;
+ }
+ parcel->writeInt32(KEY_SCROLL_DELAY);
+
+ uint32_t delay = *(tmpData) << 24 | *(tmpData + 1) << 16
+ | *(tmpData + 2) << 8 | *(tmpData + 3);
+ parcel->writeInt32(delay);
+
+ tmpData += 4;
+ remaining -= 4;
+ break;
+ }
+ // 'href' box for hyper text link
+ case FOURCC('h', 'r', 'e', 'f'):
+ {
+ if (remaining < 5) {
+ return OK;
+ }
+
+ size_t dataPos = parcel->dataPosition();
+
+ parcel->writeInt32(KEY_STRUCT_HYPER_TEXT_LIST);
+
+ // the start offset of the text to be linked
+ parcel->writeInt32(U16_AT(tmpData));
+ // the end offset of the text
+ parcel->writeInt32(U16_AT(tmpData + 2));
+
+ // the number of bytes in the following URL
+ size_t len = *(tmpData + 4);
+ parcel->writeInt32(len);
+
+ remaining -= 5;
+
+ if (remaining < len) {
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+ // the linked-to URL
+ parcel->writeInt32(len);
+ parcel->write(tmpData + 5, len);
+
+ tmpData += (5 + len);
+ remaining -= len;
+
+ if (remaining < 1) {
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+
+ // the number of bytes in the following "alt" string
+ len = *tmpData;
+ parcel->writeInt32(len);
+
+ tmpData += 1;
+ remaining -= 1;
+ if (remaining < len) {
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+
+ // an "alt" string for user display
+ parcel->writeInt32(len);
+ parcel->write(tmpData, len);
+
+ tmpData += 1;
+ remaining -= 1;
+ break;
+ }
+ // 'tbox' box to indicate the position of the text with values
+ // of top, left, bottom and right
+ case FOURCC('t', 'b', 'o', 'x'):
+ {
+ if (remaining < 8) {
+ return OK;
+ }
+ parcel->writeInt32(KEY_STRUCT_TEXT_POS);
+ parcel->writeInt32(U16_AT(tmpData));
+ parcel->writeInt32(U16_AT(tmpData + 2));
+ parcel->writeInt32(U16_AT(tmpData + 4));
+ parcel->writeInt32(U16_AT(tmpData + 6));
+
+ tmpData += 8;
+ remaining -= 8;
+ break;
+ }
+ // 'blnk' to specify the char range to be blinked
+ case FOURCC('b', 'l', 'n', 'k'):
+ {
+ if (remaining < 4) {
+ return OK;
+ }
+
+ parcel->writeInt32(KEY_STRUCT_BLINKING_TEXT_LIST);
+
+ // start char offset
+ parcel->writeInt32(U16_AT(tmpData));
+ // end char offset
+ parcel->writeInt32(U16_AT(tmpData + 2));
+
+ tmpData += 4;
+ remaining -= 4;
+ break;
+ }
+ // 'twrp' box specifies text wrap behavior. If the value if 0x00,
+ // then no wrap. If it's 0x01, then automatic 'soft' wrap is enabled.
+ // 0x02-0xff are reserved.
+ case FOURCC('t', 'w', 'r', 'p'):
+ {
+ if (remaining < 1) {
+ return OK;
+ }
+ parcel->writeInt32(KEY_WRAP_TEXT);
+ parcel->writeInt32(*tmpData);
+
+ tmpData += 1;
+ remaining -= 1;
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+
+ data += chunkSize;
+ size -= chunkSize;
+ }
+
+ return OK;
+}
+
+// To extract box 'tx3g' defined in 3GPP TS 26.245, and store it in a Parcel
+status_t TextDescriptions::extract3GPPGlobalDescriptions(
+ const uint8_t *data, ssize_t size, Parcel *parcel) {
+
+ parcel->writeInt32(KEY_GLOBAL_SETTING);
+
+ while (size >= 8) {
+ ssize_t chunkSize = U32_AT(data);
+ uint32_t chunkType = U32_AT(data + 4);
+ const uint8_t *tmpData = data;
+ tmpData += 8;
+ size_t remaining = size - 8;
+
+ if (size < chunkSize) {
+ return OK;
+ }
+ switch(chunkType) {
+ case FOURCC('t', 'x', '3', 'g'):
+ {
+ if (remaining < 18) { // 8 just below, and another 10 a little further down
+ return OK;
+ }
+ tmpData += 8; // skip the first 8 bytes
+ remaining -=8;
+ parcel->writeInt32(KEY_DISPLAY_FLAGS);
+ parcel->writeInt32(U32_AT(tmpData));
+
+ parcel->writeInt32(KEY_STRUCT_JUSTIFICATION);
+ parcel->writeInt32(tmpData[4]);
+ parcel->writeInt32(tmpData[5]);
+
+ parcel->writeInt32(KEY_BACKGROUND_COLOR_RGBA);
+ uint32_t rgba = *(tmpData + 6) << 24 | *(tmpData + 7) << 16
+ | *(tmpData + 8) << 8 | *(tmpData + 9);
+ parcel->writeInt32(rgba);
+
+ tmpData += 10;
+ remaining -= 10;
+
+ if (remaining < 8) {
+ return OK;
+ }
+ parcel->writeInt32(KEY_STRUCT_TEXT_POS);
+ parcel->writeInt32(U16_AT(tmpData));
+ parcel->writeInt32(U16_AT(tmpData + 2));
+ parcel->writeInt32(U16_AT(tmpData + 4));
+ parcel->writeInt32(U16_AT(tmpData + 6));
+
+ tmpData += 8;
+ remaining -= 8;
+
+ if (remaining < 12) {
+ return OK;
+ }
parcel->writeInt32(KEY_STRUCT_STYLE_LIST);
parcel->writeInt32(KEY_START_CHAR);
parcel->writeInt32(U16_AT(tmpData));
@@ -129,254 +441,65 @@
parcel->writeInt32(*(tmpData + 7));
parcel->writeInt32(KEY_TEXT_COLOR_RGBA);
- uint32_t rgba = *(tmpData + 8) << 24 | *(tmpData + 9) << 16
+ rgba = *(tmpData + 8) << 24 | *(tmpData + 9) << 16
| *(tmpData + 10) << 8 | *(tmpData + 11);
parcel->writeInt32(rgba);
tmpData += 12;
+ remaining -= 12;
+
+ if (remaining < 2) {
+ return OK;
+ }
+
+ size_t dataPos = parcel->dataPosition();
+
+ parcel->writeInt32(KEY_STRUCT_FONT_LIST);
+ uint16_t count = U16_AT(tmpData);
+ parcel->writeInt32(count);
+
+ tmpData += 2;
+ remaining -= 2;
+
+ for (int i = 0; i < count; i++) {
+ if (remaining < 3) {
+ // roll back
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+ // font ID
+ parcel->writeInt32(U16_AT(tmpData));
+
+ // font name length
+ parcel->writeInt32(*(tmpData + 2));
+
+ size_t len = *(tmpData + 2);
+
+ tmpData += 3;
+ remaining -= 3;
+
+ if (remaining < len) {
+ // roll back
+ parcel->setDataPosition(dataPos);
+ return OK;
+ }
+
+ parcel->write(tmpData, len);
+ tmpData += len;
+ remaining -= len;
+ }
+
+ // there is a "DisparityBox" after this according to the spec, but we ignore it
+ break;
}
-
- break;
- }
- // 'krok' box. The number of highlight events is specified, and each
- // event is specified by a starting and ending char offset and an end
- // time for the event.
- case FOURCC('k', 'r', 'o', 'k'):
- {
-
- parcel->writeInt32(KEY_STRUCT_KARAOKE_LIST);
-
- int startTime = U32_AT(tmpData);
- uint16_t count = U16_AT(tmpData + 4);
- parcel->writeInt32(count);
-
- tmpData += 6;
- int lastEndTime = 0;
-
- for (int i = 0; i < count; i++) {
- parcel->writeInt32(startTime + lastEndTime);
-
- lastEndTime = U32_AT(tmpData);
- parcel->writeInt32(lastEndTime);
-
- parcel->writeInt32(U16_AT(tmpData + 4));
- parcel->writeInt32(U16_AT(tmpData + 6));
-
- tmpData += 8;
+ default:
+ {
+ break;
}
-
- break;
}
- // 'hlit' box specifies highlighted text
- case FOURCC('h', 'l', 'i', 't'):
- {
- parcel->writeInt32(KEY_STRUCT_HIGHLIGHT_LIST);
- // the start char offset to highlight
- parcel->writeInt32(U16_AT(tmpData));
- // the last char offset to highlight
- parcel->writeInt32(U16_AT(tmpData + 2));
-
- break;
- }
- // 'hclr' box specifies the RGBA color: 8 bits each of
- // red, green, blue, and an alpha(transparency) value
- case FOURCC('h', 'c', 'l', 'r'):
- {
- parcel->writeInt32(KEY_HIGHLIGHT_COLOR_RGBA);
-
- uint32_t rgba = *(tmpData) << 24 | *(tmpData + 1) << 16
- | *(tmpData + 2) << 8 | *(tmpData + 3);
- parcel->writeInt32(rgba);
-
- break;
- }
- // 'dlay' box specifies a delay after a scroll in and/or
- // before scroll out.
- case FOURCC('d', 'l', 'a', 'y'):
- {
- parcel->writeInt32(KEY_SCROLL_DELAY);
-
- uint32_t delay = *(tmpData) << 24 | *(tmpData + 1) << 16
- | *(tmpData + 2) << 8 | *(tmpData + 3);
- parcel->writeInt32(delay);
-
- break;
- }
- // 'href' box for hyper text link
- case FOURCC('h', 'r', 'e', 'f'):
- {
- parcel->writeInt32(KEY_STRUCT_HYPER_TEXT_LIST);
-
- // the start offset of the text to be linked
- parcel->writeInt32(U16_AT(tmpData));
- // the end offset of the text
- parcel->writeInt32(U16_AT(tmpData + 2));
-
- // the number of bytes in the following URL
- int len = *(tmpData + 4);
- parcel->writeInt32(len);
-
- // the linked-to URL
- parcel->writeInt32(len);
- parcel->write(tmpData + 5, len);
-
- tmpData += (5 + len);
-
- // the number of bytes in the following "alt" string
- len = *tmpData;
- parcel->writeInt32(len);
-
- // an "alt" string for user display
- parcel->writeInt32(len);
- parcel->write(tmpData + 1, len);
-
- break;
- }
- // 'tbox' box to indicate the position of the text with values
- // of top, left, bottom and right
- case FOURCC('t', 'b', 'o', 'x'):
- {
- parcel->writeInt32(KEY_STRUCT_TEXT_POS);
- parcel->writeInt32(U16_AT(tmpData));
- parcel->writeInt32(U16_AT(tmpData + 2));
- parcel->writeInt32(U16_AT(tmpData + 4));
- parcel->writeInt32(U16_AT(tmpData + 6));
-
- break;
- }
- // 'blnk' to specify the char range to be blinked
- case FOURCC('b', 'l', 'n', 'k'):
- {
- parcel->writeInt32(KEY_STRUCT_BLINKING_TEXT_LIST);
-
- // start char offset
- parcel->writeInt32(U16_AT(tmpData));
- // end char offset
- parcel->writeInt32(U16_AT(tmpData + 2));
-
- break;
- }
- // 'twrp' box specifies text wrap behavior. If the value if 0x00,
- // then no wrap. If it's 0x01, then automatic 'soft' wrap is enabled.
- // 0x02-0xff are reserved.
- case FOURCC('t', 'w', 'r', 'p'):
- {
- parcel->writeInt32(KEY_WRAP_TEXT);
- parcel->writeInt32(*tmpData);
-
- break;
- }
- default:
- {
- break;
- }
- }
-
- if (size > chunkSize) {
data += chunkSize;
size -= chunkSize;
- // continue to parse next box
- return extract3GPPLocalDescriptions(data, size, 0, parcel, 1);
- }
-
- return OK;
-}
-
-// To extract box 'tx3g' defined in 3GPP TS 26.245, and store it in a Parcel
-status_t TextDescriptions::extract3GPPGlobalDescriptions(
- const uint8_t *data, ssize_t size, Parcel *parcel, int depth) {
-
- ssize_t chunkSize = U32_AT(data);
- uint32_t chunkType = U32_AT(data + 4);
- const uint8_t *tmpData = data;
- tmpData += 8;
-
- if (size < chunkSize) {
- return OK;
- }
-
- if (depth == 0) {
- parcel->writeInt32(KEY_GLOBAL_SETTING);
- }
- switch(chunkType) {
- case FOURCC('t', 'x', '3', 'g'):
- {
- tmpData += 8; // skip the first 8 bytes
- parcel->writeInt32(KEY_DISPLAY_FLAGS);
- parcel->writeInt32(U32_AT(tmpData));
-
- parcel->writeInt32(KEY_STRUCT_JUSTIFICATION);
- parcel->writeInt32(tmpData[4]);
- parcel->writeInt32(tmpData[5]);
-
- parcel->writeInt32(KEY_BACKGROUND_COLOR_RGBA);
- uint32_t rgba = *(tmpData + 6) << 24 | *(tmpData + 7) << 16
- | *(tmpData + 8) << 8 | *(tmpData + 9);
- parcel->writeInt32(rgba);
-
- tmpData += 10;
- parcel->writeInt32(KEY_STRUCT_TEXT_POS);
- parcel->writeInt32(U16_AT(tmpData));
- parcel->writeInt32(U16_AT(tmpData + 2));
- parcel->writeInt32(U16_AT(tmpData + 4));
- parcel->writeInt32(U16_AT(tmpData + 6));
-
- tmpData += 8;
- parcel->writeInt32(KEY_STRUCT_STYLE_LIST);
- parcel->writeInt32(KEY_START_CHAR);
- parcel->writeInt32(U16_AT(tmpData));
-
- parcel->writeInt32(KEY_END_CHAR);
- parcel->writeInt32(U16_AT(tmpData + 2));
-
- parcel->writeInt32(KEY_FONT_ID);
- parcel->writeInt32(U16_AT(tmpData + 4));
-
- parcel->writeInt32(KEY_STYLE_FLAGS);
- parcel->writeInt32(*(tmpData + 6));
-
- parcel->writeInt32(KEY_FONT_SIZE);
- parcel->writeInt32(*(tmpData + 7));
-
- parcel->writeInt32(KEY_TEXT_COLOR_RGBA);
- rgba = *(tmpData + 8) << 24 | *(tmpData + 9) << 16
- | *(tmpData + 10) << 8 | *(tmpData + 11);
- parcel->writeInt32(rgba);
-
- tmpData += 12;
- parcel->writeInt32(KEY_STRUCT_FONT_LIST);
- uint16_t count = U16_AT(tmpData);
- parcel->writeInt32(count);
-
- tmpData += 2;
- for (int i = 0; i < count; i++) {
- // font ID
- parcel->writeInt32(U16_AT(tmpData));
-
- // font name length
- parcel->writeInt32(*(tmpData + 2));
-
- int len = *(tmpData + 2);
-
- parcel->write(tmpData + 3, len);
- tmpData += 3 + len;
- }
-
- break;
- }
- default:
- {
- break;
- }
- }
-
- data += chunkSize;
- size -= chunkSize;
-
- if (size > 0) {
- // continue to extract next 'tx3g'
- return extract3GPPGlobalDescriptions(data, size, parcel, 1);
}
return OK;
diff --git a/media/libstagefright/timedtext/TextDescriptions.h b/media/libstagefright/timedtext/TextDescriptions.h
index 0144917..bf67f3f 100644
--- a/media/libstagefright/timedtext/TextDescriptions.h
+++ b/media/libstagefright/timedtext/TextDescriptions.h
@@ -72,10 +72,10 @@
int timeMs, Parcel *parcel);
static status_t extract3GPPGlobalDescriptions(
const uint8_t *data, ssize_t size,
- Parcel *parcel, int depth);
+ Parcel *parcel);
static status_t extract3GPPLocalDescriptions(
const uint8_t *data, ssize_t size,
- int timeMs, Parcel *parcel, int depth);
+ int timeMs, Parcel *parcel);
DISALLOW_EVIL_CONSTRUCTORS(TextDescriptions);
};
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 0a7d4a2..5bd9149 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -3479,6 +3479,12 @@
if (state->mCommand != FastMixerState::MIX_WRITE &&
(kUseFastMixer != FastMixer_Dynamic || state->mTrackMask > 1)) {
if (state->mCommand == FastMixerState::COLD_IDLE) {
+
+ // FIXME workaround for first HAL write being CPU bound on some devices
+ ATRACE_BEGIN("write");
+ mOutput->write((char *)mSinkBuffer, 0);
+ ATRACE_END();
+
int32_t old = android_atomic_inc(&mFastMixerFutex);
if (old == -1) {
(void) syscall(__NR_futex, &mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f7da209..b3fac0b 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -432,7 +432,10 @@
}
// only allocate a fast track index if we were able to allocate a normal track name
if (flags & IAudioFlinger::TRACK_FAST) {
- mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads();
+ // FIXME: Not calling framesReadyIsCalledByMultipleThreads() exposes a potential
+ // race with setSyncEvent(). However, if we call it, we cannot properly start
+ // static fast tracks (SoundPool) immediately after stopping.
+ //mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads();
ALOG_ASSERT(thread->mFastTrackAvailMask != 0);
int i = __builtin_ctz(thread->mFastTrackAvailMask);
ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks);
diff --git a/services/audiopolicy/enginedefault/src/Gains.cpp b/services/audiopolicy/enginedefault/src/Gains.cpp
index 78f2909..d06365c 100644
--- a/services/audiopolicy/enginedefault/src/Gains.cpp
+++ b/services/audiopolicy/enginedefault/src/Gains.cpp
@@ -171,10 +171,10 @@
},
{ // AUDIO_STREAM_TTS
// "Transmitted Through Speaker": always silent except on DEVICE_CATEGORY_SPEAKER
- Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET
- Gains::sLinearVolumeCurve, // DEVICE_CATEGORY_SPEAKER
- Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE
- Gains::sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
+ Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET
+ Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER
+ Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE
+ Gains::sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
},
{ // AUDIO_STREAM_ACCESSIBILITY
Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e7f6864..dfb477d 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -1066,7 +1066,7 @@
*delayMs = 0;
if (stream == AUDIO_STREAM_TTS) {
ALOGV("\t found BEACON stream");
- if (mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
+ if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
return INVALID_OPERATION;
} else {
beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
@@ -1718,7 +1718,9 @@
status = volStatus;
}
}
- if ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & accessibilityDevice) != 0)) {
+ if ((accessibilityDevice != AUDIO_DEVICE_NONE) &&
+ ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & accessibilityDevice) != 0)))
+ {
status_t volStatus = checkAndSetVolume(AUDIO_STREAM_ACCESSIBILITY,
index, desc, curDevice);
}
@@ -2000,6 +2002,9 @@
snprintf(buffer, SIZE, " Force use for hdmi system audio %d\n",
mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO));
result.append(buffer);
+ snprintf(buffer, SIZE, " TTS output %s\n", mTtsOutputAvailable ? "available" : "not available");
+ result.append(buffer);
+
write(fd, result.string(), result.size());
mAvailableOutputDevices.dump(fd, String8("output"));
@@ -2680,7 +2685,8 @@
mAudioPortGeneration(1),
mBeaconMuteRefCount(0),
mBeaconPlayingRefCount(0),
- mBeaconMuted(false)
+ mBeaconMuted(false),
+ mTtsOutputAvailable(false)
{
audio_policy::EngineInstance *engineInstance = audio_policy::EngineInstance::getInstance();
if (!engineInstance) {
@@ -2737,6 +2743,9 @@
ALOGW("Output profile contains no device on module %s", mHwModules[i]->mName);
continue;
}
+ if ((outProfile->mFlags & AUDIO_OUTPUT_FLAG_TTS) != 0) {
+ mTtsOutputAvailable = true;
+ }
if ((outProfile->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
continue;
@@ -4036,6 +4045,12 @@
}
uint32_t AudioPolicyManager::handleEventForBeacon(int event) {
+
+ // skip beacon mute management if a dedicated TTS output is available
+ if (mTtsOutputAvailable) {
+ return 0;
+ }
+
switch(event) {
case STARTING_OUTPUT:
mBeaconMuteRefCount++;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index bf3ae4a..bbdf396 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -548,6 +548,7 @@
uint32_t mBeaconMuteRefCount; // ref count for stream that would mute beacon
uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams
bool mBeaconMuted; // has STREAM_TTS been muted
+ bool mTtsOutputAvailable; // true if a dedicated output for TTS stream is available
AudioPolicyMixCollection mPolicyMixes; // list of registered mixes
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 43a8ec4..2aaefe9 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -15,6 +15,7 @@
*/
#define LOG_TAG "CameraService"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <algorithm>
@@ -33,7 +34,6 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/ProcessInfoService.h>
-#include <camera/ICameraServiceProxy.h>
#include <cutils/atomic.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
@@ -157,7 +157,6 @@
}
mModule = new CameraModule(rawModule);
- ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
err = mModule->init();
if (err != OK) {
ALOGE("Could not initialize camera HAL module: %d (%s)", err,
@@ -169,6 +168,7 @@
mModule = nullptr;
return;
}
+ ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
mNumberOfCameras = mModule->getNumberOfCameras();
mNumberOfNormalCameras = mNumberOfCameras;
@@ -250,13 +250,19 @@
CameraService::pingCameraServiceProxy();
}
-void CameraService::pingCameraServiceProxy() {
+sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera.proxy"));
if (binder == nullptr) {
- return;
+ return nullptr;
}
sp<ICameraServiceProxy> proxyBinder = interface_cast<ICameraServiceProxy>(binder);
+ return proxyBinder;
+}
+
+void CameraService::pingCameraServiceProxy() {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
proxyBinder->pingForUserUpdate();
}
@@ -363,7 +369,7 @@
{
// Update battery life logging for flashlight
- Mutex::Autolock al(mTorchClientMapMutex);
+ Mutex::Autolock al(mTorchUidMapMutex);
auto iter = mTorchUidMap.find(cameraId);
if (iter != mTorchUidMap.end()) {
int oldUid = iter->second.second;
@@ -398,10 +404,12 @@
}
int32_t CameraService::getNumberOfCameras() {
+ ATRACE_CALL();
return getNumberOfCameras(CAMERA_TYPE_BACKWARD_COMPATIBLE);
}
int32_t CameraService::getNumberOfCameras(int type) {
+ ATRACE_CALL();
switch (type) {
case CAMERA_TYPE_BACKWARD_COMPATIBLE:
return mNumberOfNormalCameras;
@@ -416,6 +424,7 @@
status_t CameraService::getCameraInfo(int cameraId,
struct CameraInfo* cameraInfo) {
+ ATRACE_CALL();
if (!mModule) {
return -ENODEV;
}
@@ -443,6 +452,7 @@
}
status_t CameraService::generateShimMetadata(int cameraId, /*out*/CameraMetadata* cameraInfo) {
+ ATRACE_CALL();
status_t ret = OK;
struct CameraInfo info;
if ((ret = getCameraInfo(cameraId, &info)) != OK) {
@@ -529,6 +539,7 @@
status_t CameraService::getCameraCharacteristics(int cameraId,
CameraMetadata* cameraInfo) {
+ ATRACE_CALL();
if (!cameraInfo) {
ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
return BAD_VALUE;
@@ -597,10 +608,16 @@
procState);
return -1;
}
+ // Treat sleeping TOP processes the same as regular TOP processes, for
+ // access priority. This is important for lock-screen camera launch scenarios
+ if (procState == PROCESS_STATE_TOP_SLEEPING) {
+ procState = PROCESS_STATE_TOP;
+ }
return INT_MAX - procState;
}
status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescriptor>& desc) {
+ ATRACE_CALL();
if (!mModule) {
ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
return -ENODEV;
@@ -611,6 +628,7 @@
}
int CameraService::getDeviceVersion(int cameraId, int* facing) {
+ ATRACE_CALL();
struct camera_info info;
if (mModule->getCameraInfo(cameraId, &info) != OK) {
return -1;
@@ -642,6 +660,7 @@
}
bool CameraService::setUpVendorTags() {
+ ATRACE_CALL();
vendor_tag_ops_t vOps = vendor_tag_ops_t();
// Check if vendor operations have been implemented
@@ -650,9 +669,7 @@
return false;
}
- ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops");
mModule->getVendorTagOps(&vOps);
- ATRACE_END();
// Ensure all vendor operations are present
if (vOps.get_tag_count == NULL || vOps.get_all_tags == NULL ||
@@ -935,6 +952,16 @@
LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, clients not evicted properly",
__FUNCTION__);
}
+
+ // And register a death notification for the client callback. Do
+ // this last to avoid Binder policy where a nested Binder
+ // transaction might be pre-empted to service the client death
+ // notification if the client process dies before linkToDeath is
+ // invoked.
+ sp<IBinder> remoteCallback = client->getRemote();
+ if (remoteCallback != nullptr) {
+ remoteCallback->linkToDeath(this);
+ }
}
status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid,
@@ -942,7 +969,7 @@
/*out*/
sp<BasicClient>* client,
std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) {
-
+ ATRACE_CALL();
status_t ret = NO_ERROR;
std::vector<DescriptorPtr> evictedClients;
DescriptorPtr clientDescriptor;
@@ -1131,6 +1158,7 @@
/*out*/
sp<ICamera>& device) {
+ ATRACE_CALL();
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<Client> client = nullptr;
@@ -1155,6 +1183,7 @@
/*out*/
sp<ICamera>& device) {
+ ATRACE_CALL();
String8 id = String8::format("%d", cameraId);
int apiVersion = mModule->getModuleApiVersion();
if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
@@ -1195,6 +1224,7 @@
/*out*/
sp<ICameraDeviceUser>& device) {
+ ATRACE_CALL();
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<CameraDeviceClient> client = nullptr;
@@ -1214,6 +1244,8 @@
status_t CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
+
+ ATRACE_CALL();
if (enabled && clientBinder == nullptr) {
ALOGE("%s: torch client binder is NULL", __FUNCTION__);
return -EINVAL;
@@ -1262,7 +1294,7 @@
{
// Update UID map - this is used in the torch status changed callbacks, so must be done
// before setTorchMode
- Mutex::Autolock al(mTorchClientMapMutex);
+ Mutex::Autolock al(mTorchUidMapMutex);
if (mTorchUidMap.find(id) == mTorchUidMap.end()) {
mTorchUidMap[id].first = uid;
mTorchUidMap[id].second = uid;
@@ -1302,6 +1334,8 @@
}
void CameraService::notifySystemEvent(int32_t eventId, const int32_t* args, size_t length) {
+ ATRACE_CALL();
+
switch(eventId) {
case ICameraService::USER_SWITCHED: {
doUserSwitch(/*newUserIds*/args, /*length*/length);
@@ -1317,6 +1351,8 @@
}
status_t CameraService::addListener(const sp<ICameraServiceListener>& listener) {
+ ATRACE_CALL();
+
ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
if (listener == nullptr) {
@@ -1365,6 +1401,8 @@
}
status_t CameraService::removeListener(const sp<ICameraServiceListener>& listener) {
+ ATRACE_CALL();
+
ALOGV("%s: Remove listener %p", __FUNCTION__, listener.get());
if (listener == 0) {
@@ -1391,6 +1429,8 @@
}
status_t CameraService::getLegacyParameters(int cameraId, /*out*/String16* parameters) {
+
+ ATRACE_CALL();
ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId);
if (parameters == NULL) {
@@ -1415,6 +1455,8 @@
}
status_t CameraService::supportsCameraApi(int cameraId, int apiVersion) {
+ ATRACE_CALL();
+
ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId);
switch (apiVersion) {
@@ -1782,6 +1824,8 @@
}
void CameraService::loadSound() {
+ ATRACE_CALL();
+
Mutex::Autolock lock(mSoundLock);
LOG1("CameraService::loadSound ref=%d", mSoundRef);
if (mSoundRef++) return;
@@ -1804,6 +1848,8 @@
}
void CameraService::playSound(sound_kind kind) {
+ ATRACE_CALL();
+
LOG1("playSound(%d)", kind);
Mutex::Autolock lock(mSoundLock);
sp<MediaPlayer> player = mSoundPlayer[kind];
@@ -1874,11 +1920,9 @@
void CameraService::BasicClient::disconnect() {
if (mDisconnected) {
- ALOGE("%s: Disconnect called on already disconnected client for device %d", __FUNCTION__,
- mCameraId);
return;
}
- mDisconnected = true;;
+ mDisconnected = true;
mCameraService->removeByClient(this);
mCameraService->logDisconnected(String8::format("%d", mCameraId), mClientPid,
@@ -1915,6 +1959,8 @@
}
status_t CameraService::BasicClient::startCameraOps() {
+ ATRACE_CALL();
+
int32_t res;
// Notify app ops that the camera is not available
mOpsCallback = new OpsCallback(this);
@@ -1948,10 +1994,16 @@
mCameraService->updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE,
String8::format("%d", mCameraId));
+ // Transition device state to OPEN
+ mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
+ String8::format("%d", mCameraId));
+
return OK;
}
status_t CameraService::BasicClient::finishCameraOps() {
+ ATRACE_CALL();
+
// Check if startCameraOps succeeded, and if so, finish the camera op
if (mOpsActive) {
// Notify app ops that the camera is available again
@@ -1966,6 +2018,10 @@
mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT,
String8::format("%d", mCameraId), rejected);
+ // Transition device state to CLOSED
+ mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
+ String8::format("%d", mCameraId));
+
// Notify flashlight that a camera device is closed.
mCameraService->mFlashlight->deviceClosed(
String8::format("%d", mCameraId));
@@ -1980,6 +2036,8 @@
}
void CameraService::BasicClient::opChanged(int32_t op, const String16& packageName) {
+ ATRACE_CALL();
+
String8 name(packageName);
String8 myName(mClientPackageName);
@@ -2203,9 +2261,11 @@
}
status_t CameraService::dump(int fd, const Vector<String16>& args) {
+ ATRACE_CALL();
+
String8 result("Dump of the Camera Service:\n");
if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
- result.appendFormat("Permission Denial: "
+ result = result.format("Permission Denial: "
"can't dump CameraService from pid=%d, uid=%d\n",
getCallingPid(),
getCallingUid());
@@ -2466,6 +2526,14 @@
});
}
+void CameraService::updateProxyDeviceState(ICameraServiceProxy::CameraState newState,
+ const String8& cameraId) {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
+ String16 id(cameraId);
+ proxyBinder->notifyCameraState(id, newState);
+}
+
status_t CameraService::getTorchStatusLocked(
const String8& cameraId,
ICameraServiceListener::TorchStatus *status) const {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 7f4d43f..cd97b08 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -24,6 +24,7 @@
#include <binder/BinderService.h>
#include <binder/IAppOpsCallback.h>
#include <camera/ICameraService.h>
+#include <camera/ICameraServiceProxy.h>
#include <hardware/camera.h>
#include <camera/ICamera.h>
@@ -74,6 +75,8 @@
// Process state (mirrors frameworks/base/core/java/android/app/ActivityManager.java)
static const int PROCESS_STATE_NONEXISTENT = -1;
+ static const int PROCESS_STATE_TOP = 2;
+ static const int PROCESS_STATE_TOP_SLEEPING = 5;
// 3 second busy timeout when other clients are connecting
static const nsecs_t DEFAULT_CONNECT_TIMEOUT_NS = 3000000000;
@@ -164,6 +167,14 @@
void playSound(sound_kind kind);
void releaseSound();
+ /**
+ * Update the state of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ static void updateProxyDeviceState(
+ ICameraServiceProxy::CameraState newState,
+ const String8& cameraId);
+
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
int getDeviceVersion(int cameraId, int* facing = NULL);
@@ -648,8 +659,10 @@
sp<CameraFlashlight> mFlashlight;
// guard mTorchStatusMap
Mutex mTorchStatusMutex;
- // guard mTorchClientMap, mTorchUidMap
+ // guard mTorchClientMap
Mutex mTorchClientMapMutex;
+ // guard mTorchUidMap
+ Mutex mTorchUidMapMutex;
// camera id -> torch status
KeyedVector<String8, ICameraServiceListener::TorchStatus> mTorchStatusMap;
// camera id -> torch client binder
@@ -728,6 +741,7 @@
static String8 toString(std::set<userid_t> intSet);
+ static sp<ICameraServiceProxy> getCameraServiceProxy();
static void pingCameraServiceProxy();
};
@@ -859,11 +873,6 @@
return ret;
}
- sp<IBinder> remoteCallback = client->getRemote();
- if (remoteCallback != nullptr) {
- remoteCallback->linkToDeath(this);
- }
-
// Update shim paremeters for legacy clients
if (effectiveApiLevel == API_1) {
// Assume we have always received a Client subclass for API1
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 36e99dd..48b5a26 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1912,6 +1912,8 @@
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
mCaptureSequencer->notifyShutter(resultExtras, timestamp);
+
+ Camera2ClientBase::notifyShutter(resultExtras, timestamp);
}
camera2::SharedParameters& Camera2Client::getParameters() {
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index e552633..38e35cd 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -251,6 +251,9 @@
// Turn off all messages.
disableMsgType(CAMERA_MSG_ALL_MSGS);
mHardware->stopPreview();
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
mHardware->cancelPicture();
// Release the hardware resources.
mHardware->release();
@@ -409,7 +412,11 @@
}
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
-
+ if (result == NO_ERROR) {
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ String8::format("%d", mCameraId));
+ }
return result;
}
@@ -449,7 +456,9 @@
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
-
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
mPreviewBuffer.clear();
}
@@ -790,6 +799,12 @@
}
disableMsgType(CAMERA_MSG_SHUTTER);
+ // Shutters only happen in response to takePicture, so mark device as
+ // idle now, until preview is restarted
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
+
mLock.unlock();
}
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 442eb75..44447b4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -214,8 +214,8 @@
supportedPreviewFormats);
}
- previewFpsRange[0] = availableFpsRanges.data.i32[0];
- previewFpsRange[1] = availableFpsRanges.data.i32[1];
+ previewFpsRange[0] = fastInfo.bestStillCaptureFpsRange[0];
+ previewFpsRange[1] = fastInfo.bestStillCaptureFpsRange[1];
// PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
// still have to do something sane for them
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index c717a56..0c531c3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -719,6 +719,43 @@
return res;
}
+status_t CameraDeviceClient::prepare2(int maxCount, int streamId) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+
+ status_t res = OK;
+ if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ // Guard against trying to prepare non-created streams
+ ssize_t index = NAME_NOT_FOUND;
+ for (size_t i = 0; i < mStreamMap.size(); ++i) {
+ if (streamId == mStreamMap.valueAt(i)) {
+ index = i;
+ break;
+ }
+ }
+
+ if (index == NAME_NOT_FOUND) {
+ ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream created yet",
+ __FUNCTION__, mCameraId, streamId);
+ return BAD_VALUE;
+ }
+
+ if (maxCount <= 0) {
+ ALOGE("%s: Camera %d: Invalid maxCount (%d) specified, must be greater than 0.",
+ __FUNCTION__, mCameraId, maxCount);
+ return BAD_VALUE;
+ }
+
+ // Also returns BAD_VALUE if stream ID was not valid, or stream already
+ // has been used
+ res = mDevice->prepare(maxCount, streamId);
+
+ return res;
+}
+
status_t CameraDeviceClient::tearDown(int streamId) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
@@ -799,6 +836,7 @@
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+ Camera2ClientBase::notifyIdle();
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -808,6 +846,7 @@
if (remoteCb != 0) {
remoteCb->onCaptureStarted(resultExtras, timestamp);
}
+ Camera2ClientBase::notifyShutter(resultExtras, timestamp);
}
void CameraDeviceClient::notifyPrepared(int streamId) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 1f8b39d..d1e692c 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -114,6 +114,9 @@
// Tear down stream resources by freeing its unused buffers
virtual status_t tearDown(int streamId);
+ // Prepare stream by preallocating up to maxCount of its buffers
+ virtual status_t prepare2(int maxCount, int streamId);
+
/**
* Interface used by CameraService
*/
@@ -189,6 +192,7 @@
Vector<int> mStreamingRequestList;
int32_t mRequestIdCounter;
+
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index ba0b264..5732f80 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -55,7 +55,8 @@
TClientBase(cameraService, remoteCallback, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
mSharedCameraCallbacks(remoteCallback),
- mDeviceVersion(cameraService->getDeviceVersion(cameraId))
+ mDeviceVersion(cameraService->getDeviceVersion(cameraId)),
+ mDeviceActive(false)
{
ALOGI("Camera %d: Opened. Client: %s (PID %d, UID %d)", cameraId,
String8(clientPackageName).string(), clientPid, clientUid);
@@ -235,6 +236,13 @@
template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyIdle() {
+ if (mDeviceActive) {
+ getCameraService()->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", TClientBase::mCameraId));
+ }
+ mDeviceActive = false;
+
ALOGV("Camera device is now idle");
}
@@ -244,6 +252,13 @@
(void)resultExtras;
(void)timestamp;
+ if (!mDeviceActive) {
+ getCameraService()->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ String8::format("%d", TClientBase::mCameraId));
+ }
+ mDeviceActive = true;
+
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index f1cacdf..220c5ad 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -136,6 +136,8 @@
status_t checkPid(const char *checkLocation) const;
virtual void detachDevice();
+
+ bool mDeviceActive;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index cd25949..7b083a3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -294,6 +294,12 @@
virtual status_t tearDown(int streamId) = 0;
/**
+ * Prepare stream by preallocating up to maxCount buffers for it asynchronously.
+ * Calls notifyPrepared() once allocation is complete.
+ */
+ virtual status_t prepare(int maxCount, int streamId) = 0;
+
+ /**
* Get the HAL device version.
*/
virtual uint32_t getDeviceVersion() = 0;
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index 6a4dfe0..16b8aba 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -15,14 +15,18 @@
*/
#define LOG_TAG "CameraModule"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Trace.h>
+
#include "CameraModule.h"
namespace android {
void CameraModule::deriveCameraCharacteristicsKeys(
uint32_t deviceVersion, CameraMetadata &chars) {
+ ATRACE_CALL();
// HAL1 devices should not reach here
if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) {
ALOGV("%s: Cannot derive keys for HAL version < 2.0");
@@ -150,9 +154,7 @@
ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
assert(0);
}
-
mModule = module;
- mCameraInfoMap.setCapacity(getNumberOfCameras());
}
CameraModule::~CameraModule()
@@ -168,14 +170,20 @@
}
int CameraModule::init() {
+ ATRACE_CALL();
+ int res = OK;
if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 &&
mModule->init != NULL) {
- return mModule->init();
+ ATRACE_BEGIN("camera_module->init");
+ res = mModule->init();
+ ATRACE_END();
}
- return OK;
+ mCameraInfoMap.setCapacity(getNumberOfCameras());
+ return res;
}
int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
+ ATRACE_CALL();
Mutex::Autolock lock(mCameraInfoLock);
if (cameraId < 0) {
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
@@ -185,14 +193,20 @@
// Only override static_camera_characteristics for API2 devices
int apiVersion = mModule->common.module_api_version;
if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) {
- return mModule->get_camera_info(cameraId, info);
+ int ret;
+ ATRACE_BEGIN("camera_module->get_camera_info");
+ ret = mModule->get_camera_info(cameraId, info);
+ ATRACE_END();
+ return ret;
}
ssize_t index = mCameraInfoMap.indexOfKey(cameraId);
if (index == NAME_NOT_FOUND) {
// Get camera info from raw module and cache it
camera_info rawInfo, cameraInfo;
+ ATRACE_BEGIN("camera_module->get_camera_info");
int ret = mModule->get_camera_info(cameraId, &rawInfo);
+ ATRACE_END();
if (ret != 0) {
return ret;
}
@@ -217,20 +231,36 @@
}
int CameraModule::open(const char* id, struct hw_device_t** device) {
- return filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
+ int res;
+ ATRACE_BEGIN("camera_module->open");
+ res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
+ ATRACE_END();
+ return res;
}
int CameraModule::openLegacy(
const char* id, uint32_t halVersion, struct hw_device_t** device) {
- return mModule->open_legacy(&mModule->common, id, halVersion, device);
+ int res;
+ ATRACE_BEGIN("camera_module->open_legacy");
+ res = mModule->open_legacy(&mModule->common, id, halVersion, device);
+ ATRACE_END();
+ return res;
}
int CameraModule::getNumberOfCameras() {
- return mModule->get_number_of_cameras();
+ int numCameras;
+ ATRACE_BEGIN("camera_module->get_number_of_cameras");
+ numCameras = mModule->get_number_of_cameras();
+ ATRACE_END();
+ return numCameras;
}
int CameraModule::setCallbacks(const camera_module_callbacks_t *callbacks) {
- return mModule->set_callbacks(callbacks);
+ int res;
+ ATRACE_BEGIN("camera_module->set_callbacks");
+ res = mModule->set_callbacks(callbacks);
+ ATRACE_END();
+ return res;
}
bool CameraModule::isVendorTagDefined() {
@@ -239,12 +269,18 @@
void CameraModule::getVendorTagOps(vendor_tag_ops_t* ops) {
if (mModule->get_vendor_tag_ops) {
+ ATRACE_BEGIN("camera_module->get_vendor_tag_ops");
mModule->get_vendor_tag_ops(ops);
+ ATRACE_END();
}
}
int CameraModule::setTorchMode(const char* camera_id, bool enable) {
- return mModule->set_torch_mode(camera_id, enable);
+ int res;
+ ATRACE_BEGIN("camera_module->set_torch_mode");
+ res = mModule->set_torch_mode(camera_id, enable);
+ ATRACE_END();
+ return res;
}
status_t CameraModule::filterOpenErrorCode(status_t err) {
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index c9c990c..d74f976 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -632,6 +632,12 @@
return NO_INIT;
}
+status_t Camera2Device::prepare(int maxCount, int streamId) {
+ ATRACE_CALL();
+ ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
+ return NO_INIT;
+}
+
uint32_t Camera2Device::getDeviceVersion() {
ATRACE_CALL();
return mDeviceVersion;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 34c1ded..b4d343c 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -88,6 +88,7 @@
// Prepare and tearDown are no-ops
virtual status_t prepare(int streamId);
virtual status_t tearDown(int streamId);
+ virtual status_t prepare(int maxCount, int streamId);
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0c941fb..433a745 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -285,19 +285,27 @@
mStatusTracker->join();
}
+ camera3_device_t *hal3Device;
{
Mutex::Autolock l(mLock);
mRequestThread.clear();
mStatusTracker.clear();
- if (mHal3Device != NULL) {
- ATRACE_BEGIN("camera3->close");
- mHal3Device->common.close(&mHal3Device->common);
- ATRACE_END();
- mHal3Device = NULL;
- }
+ hal3Device = mHal3Device;
+ }
+ // Call close without internal mutex held, as the HAL close may need to
+ // wait on assorted callbacks,etc, to complete before it can return.
+ if (hal3Device != NULL) {
+ ATRACE_BEGIN("camera3->close");
+ hal3Device->common.close(&hal3Device->common);
+ ATRACE_END();
+ }
+
+ {
+ Mutex::Autolock l(mLock);
+ mHal3Device = NULL;
internalUpdateStatusLocked(STATUS_UNINITIALIZED);
}
@@ -557,6 +565,18 @@
ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId);
}
+
+ // Setup batch size if this is a high speed video recording request.
+ if (mIsConstrainedHighSpeedConfiguration && requestList->size() > 0) {
+ auto firstRequest = requestList->begin();
+ for (auto& outputStream : (*firstRequest)->mOutputStreams) {
+ if (outputStream->isVideoStream()) {
+ (*firstRequest)->mBatchSize = requestList->size();
+ break;
+ }
+ }
+ }
+
return OK;
}
@@ -1398,7 +1418,7 @@
status_t res;
if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
- res = mHal3Device->ops->flush(mHal3Device);
+ res = mRequestThread->flush();
} else {
Mutex::Autolock l(mLock);
res = waitUntilDrainedLocked();
@@ -1408,6 +1428,10 @@
}
status_t Camera3Device::prepare(int streamId) {
+ return prepare(camera3::Camera3StreamInterface::ALLOCATE_PIPELINE_MAX, streamId);
+}
+
+status_t Camera3Device::prepare(int maxCount, int streamId) {
ATRACE_CALL();
ALOGV("%s: Camera %d: Preparing stream %d", __FUNCTION__, mId, streamId);
Mutex::Autolock il(mInterfaceLock);
@@ -1432,7 +1456,7 @@
return BAD_VALUE;
}
- return mPreparerThread->prepare(stream);
+ return mPreparerThread->prepare(maxCount, stream);
}
status_t Camera3Device::tearDown(int streamId) {
@@ -1583,6 +1607,7 @@
newRequest->mOutputStreams.push(stream);
}
newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS);
+ newRequest->mBatchSize = 1;
return newRequest;
}
@@ -2754,6 +2779,17 @@
return OK;
}
+status_t Camera3Device::RequestThread::flush() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mFlushLock);
+
+ if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
+ return mHal3Device->ops->flush(mHal3Device);
+ }
+
+ return -ENOTSUP;
+}
+
void Camera3Device::RequestThread::setPaused(bool paused) {
Mutex::Autolock l(mPauseLock);
mDoPause = paused;
@@ -2844,7 +2880,7 @@
}
bool Camera3Device::RequestThread::threadLoop() {
-
+ ATRACE_CALL();
status_t res;
// Handle paused state.
@@ -2852,147 +2888,31 @@
return true;
}
- // Get work to do
-
- sp<CaptureRequest> nextRequest = waitForNextRequest();
- if (nextRequest == NULL) {
+ // Wait for the next batch of requests.
+ waitForNextRequestBatch();
+ if (mNextRequests.size() == 0) {
return true;
}
- // Create request to HAL
- camera3_capture_request_t request = camera3_capture_request_t();
- request.frame_number = nextRequest->mResultExtras.frameNumber;
- Vector<camera3_stream_buffer_t> outputBuffers;
-
- // Get the request ID, if any
- int requestId;
- camera_metadata_entry_t requestIdEntry =
- nextRequest->mSettings.find(ANDROID_REQUEST_ID);
+ // Get the latest request ID, if any
+ int latestRequestId;
+ camera_metadata_entry_t requestIdEntry = mNextRequests[mNextRequests.size() - 1].
+ captureRequest->mSettings.find(ANDROID_REQUEST_ID);
if (requestIdEntry.count > 0) {
- requestId = requestIdEntry.data.i32[0];
+ latestRequestId = requestIdEntry.data.i32[0];
} else {
- ALOGW("%s: Did not have android.request.id set in the request",
- __FUNCTION__);
- requestId = NAME_NOT_FOUND;
+ ALOGW("%s: Did not have android.request.id set in the request.", __FUNCTION__);
+ latestRequestId = NAME_NOT_FOUND;
}
- // Insert any queued triggers (before metadata is locked)
- int32_t triggerCount;
- res = insertTriggers(nextRequest);
- if (res < 0) {
- SET_ERR("RequestThread: Unable to insert triggers "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
- }
- triggerCount = res;
-
- bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
-
- // If the request is the same as last, or we had triggers last time
- if (mPrevRequest != nextRequest || triggersMixedIn) {
- /**
- * HAL workaround:
- * Insert a dummy trigger ID if a trigger is set but no trigger ID is
- */
- res = addDummyTriggerIds(nextRequest);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
- }
-
- /**
- * The request should be presorted so accesses in HAL
- * are O(logn). Sidenote, sorting a sorted metadata is nop.
- */
- nextRequest->mSettings.sort();
- request.settings = nextRequest->mSettings.getAndLock();
- mPrevRequest = nextRequest;
- ALOGVV("%s: Request settings are NEW", __FUNCTION__);
-
- IF_ALOGV() {
- camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
- find_camera_metadata_ro_entry(
- request.settings,
- ANDROID_CONTROL_AF_TRIGGER,
- &e
- );
- if (e.count > 0) {
- ALOGV("%s: Request (frame num %d) had AF trigger 0x%x",
- __FUNCTION__,
- request.frame_number,
- e.data.u8[0]);
- }
- }
- } else {
- // leave request.settings NULL to indicate 'reuse latest given'
- ALOGVV("%s: Request settings are REUSED",
- __FUNCTION__);
- }
-
- uint32_t totalNumBuffers = 0;
-
- // Fill in buffers
- if (nextRequest->mInputStream != NULL) {
- request.input_buffer = &nextRequest->mInputBuffer;
- totalNumBuffers += 1;
- } else {
- request.input_buffer = NULL;
- }
-
- outputBuffers.insertAt(camera3_stream_buffer_t(), 0,
- nextRequest->mOutputStreams.size());
- request.output_buffers = outputBuffers.array();
- for (size_t i = 0; i < nextRequest->mOutputStreams.size(); i++) {
- res = nextRequest->mOutputStreams.editItemAt(i)->
- getBuffer(&outputBuffers.editItemAt(i));
- if (res != OK) {
- // Can't get output buffer from gralloc queue - this could be due to
- // abandoned queue or other consumer misbehavior, so not a fatal
- // error
- ALOGE("RequestThread: Can't get output buffer, skipping request:"
- " %s (%d)", strerror(-res), res);
- {
- Mutex::Autolock l(mRequestLock);
- if (mListener != NULL) {
- mListener->notifyError(
- ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- nextRequest->mResultExtras);
- }
- }
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return true;
- }
- request.num_output_buffers++;
- }
- totalNumBuffers += request.num_output_buffers;
-
- // Log request in the in-flight queue
- sp<Camera3Device> parent = mParent.promote();
- if (parent == NULL) {
- // Should not happen, and nowhere to send errors to, so just log it
- CLOGE("RequestThread: Parent is gone");
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
- }
-
- res = parent->registerInFlight(request.frame_number,
- totalNumBuffers, nextRequest->mResultExtras,
- /*hasInput*/request.input_buffer != NULL,
- nextRequest->mAeTriggerCancelOverride);
- ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32 ".",
- __FUNCTION__,
- nextRequest->mResultExtras.requestId, nextRequest->mResultExtras.frameNumber,
- nextRequest->mResultExtras.burstId);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to register new in-flight request:"
- " %s (%d)", strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
+ // Prepare a batch of HAL requests and output buffers.
+ res = prepareHalRequests();
+ if (res == TIMED_OUT) {
+ // Not a fatal error if getting output buffers time out.
+ cleanUpFailedRequests(/*sendRequestError*/ true);
+ return true;
+ } else if (res != OK) {
+ cleanUpFailedRequests(/*sendRequestError*/ false);
return false;
}
@@ -3000,57 +2920,210 @@
{
Mutex::Autolock al(mLatestRequestMutex);
- mLatestRequestId = requestId;
+ mLatestRequestId = latestRequestId;
mLatestRequestSignal.signal();
}
- // Submit request and block until ready for next one
- ATRACE_ASYNC_BEGIN("frame capture", request.frame_number);
- ATRACE_BEGIN("camera3->process_capture_request");
- res = mHal3Device->ops->process_capture_request(mHal3Device, &request);
- ATRACE_END();
+ // Submit a batch of requests to HAL.
+ // Use flush lock only when submitting multilple requests in a batch.
+ // TODO: The problem with flush lock is flush() will be blocked by process_capture_request()
+ // which may take a long time to finish so synchronizing flush() and
+ // process_capture_request() defeats the purpose of cancelling requests ASAP with flush().
+ // For now, only synchronize for high speed recording and we should figure something out for
+ // removing the synchronization.
+ bool useFlushLock = mNextRequests.size() > 1;
- if (res != OK) {
- // Should only get a failure here for malformed requests or device-level
- // errors, so consider all errors fatal. Bad metadata failures should
- // come through notify.
- SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
- " device: %s (%d)", request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
+ if (useFlushLock) {
+ mFlushLock.lock();
}
- // Update the latest request sent to HAL
- if (request.settings != NULL) { // Don't update them if they were unchanged
- Mutex::Autolock al(mLatestRequestMutex);
+ ALOGVV("%s: %d: submitting %d requests in a batch.", __FUNCTION__, __LINE__,
+ mNextRequests.size());
+ for (auto& nextRequest : mNextRequests) {
+ // Submit request and block until ready for next one
+ ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
+ ATRACE_BEGIN("camera3->process_capture_request");
+ res = mHal3Device->ops->process_capture_request(mHal3Device, &nextRequest.halRequest);
+ ATRACE_END();
- camera_metadata_t* cloned = clone_camera_metadata(request.settings);
- mLatestRequest.acquire(cloned);
+ if (res != OK) {
+ // Should only get a failure here for malformed requests or device-level
+ // errors, so consider all errors fatal. Bad metadata failures should
+ // come through notify.
+ SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
+ " device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
+ res);
+ cleanUpFailedRequests(/*sendRequestError*/ false);
+ if (useFlushLock) {
+ mFlushLock.unlock();
+ }
+ return false;
+ }
+
+ // Mark that the request has be submitted successfully.
+ nextRequest.submitted = true;
+
+ // Update the latest request sent to HAL
+ if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+ mLatestRequest.acquire(cloned);
+ }
+
+ if (nextRequest.halRequest.settings != NULL) {
+ nextRequest.captureRequest->mSettings.unlock(nextRequest.halRequest.settings);
+ }
+
+ // Remove any previously queued triggers (after unlock)
+ res = removeTriggers(mPrevRequest);
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to remove triggers "
+ "(capture request %d, HAL device: %s (%d)",
+ nextRequest.halRequest.frame_number, strerror(-res), res);
+ cleanUpFailedRequests(/*sendRequestError*/ false);
+ if (useFlushLock) {
+ mFlushLock.unlock();
+ }
+ return false;
+ }
}
- if (request.settings != NULL) {
- nextRequest->mSettings.unlock(request.settings);
+ if (useFlushLock) {
+ mFlushLock.unlock();
}
// Unset as current request
{
Mutex::Autolock l(mRequestLock);
- mNextRequest.clear();
+ mNextRequests.clear();
}
- // Remove any previously queued triggers (after unlock)
- res = removeTriggers(mPrevRequest);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to remove triggers "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- return false;
- }
- mPrevTriggers = triggerCount;
-
return true;
}
+status_t Camera3Device::RequestThread::prepareHalRequests() {
+ ATRACE_CALL();
+
+ for (auto& nextRequest : mNextRequests) {
+ sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+ camera3_capture_request_t* halRequest = &nextRequest.halRequest;
+ Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
+
+ // Prepare a request to HAL
+ halRequest->frame_number = captureRequest->mResultExtras.frameNumber;
+
+ // Insert any queued triggers (before metadata is locked)
+ status_t res = insertTriggers(captureRequest);
+
+ if (res < 0) {
+ SET_ERR("RequestThread: Unable to insert triggers "
+ "(capture request %d, HAL device: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ int triggerCount = res;
+ bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
+ mPrevTriggers = triggerCount;
+
+ // If the request is the same as last, or we had triggers last time
+ if (mPrevRequest != captureRequest || triggersMixedIn) {
+ /**
+ * HAL workaround:
+ * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+ */
+ res = addDummyTriggerIds(captureRequest);
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+ "(capture request %d, HAL device: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+
+ /**
+ * The request should be presorted so accesses in HAL
+ * are O(logn). Sidenote, sorting a sorted metadata is nop.
+ */
+ captureRequest->mSettings.sort();
+ halRequest->settings = captureRequest->mSettings.getAndLock();
+ mPrevRequest = captureRequest;
+ ALOGVV("%s: Request settings are NEW", __FUNCTION__);
+
+ IF_ALOGV() {
+ camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
+ find_camera_metadata_ro_entry(
+ halRequest->settings,
+ ANDROID_CONTROL_AF_TRIGGER,
+ &e
+ );
+ if (e.count > 0) {
+ ALOGV("%s: Request (frame num %d) had AF trigger 0x%x",
+ __FUNCTION__,
+ halRequest->frame_number,
+ e.data.u8[0]);
+ }
+ }
+ } else {
+ // leave request.settings NULL to indicate 'reuse latest given'
+ ALOGVV("%s: Request settings are REUSED",
+ __FUNCTION__);
+ }
+
+ uint32_t totalNumBuffers = 0;
+
+ // Fill in buffers
+ if (captureRequest->mInputStream != NULL) {
+ halRequest->input_buffer = &captureRequest->mInputBuffer;
+ totalNumBuffers += 1;
+ } else {
+ halRequest->input_buffer = NULL;
+ }
+
+ outputBuffers->insertAt(camera3_stream_buffer_t(), 0,
+ captureRequest->mOutputStreams.size());
+ halRequest->output_buffers = outputBuffers->array();
+ for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) {
+ res = captureRequest->mOutputStreams.editItemAt(i)->
+ getBuffer(&outputBuffers->editItemAt(i));
+ if (res != OK) {
+ // Can't get output buffer from gralloc queue - this could be due to
+ // abandoned queue or other consumer misbehavior, so not a fatal
+ // error
+ ALOGE("RequestThread: Can't get output buffer, skipping request:"
+ " %s (%d)", strerror(-res), res);
+
+ return TIMED_OUT;
+ }
+ halRequest->num_output_buffers++;
+ }
+ totalNumBuffers += halRequest->num_output_buffers;
+
+ // Log request in the in-flight queue
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent == NULL) {
+ // Should not happen, and nowhere to send errors to, so just log it
+ CLOGE("RequestThread: Parent is gone");
+ return INVALID_OPERATION;
+ }
+ res = parent->registerInFlight(halRequest->frame_number,
+ totalNumBuffers, captureRequest->mResultExtras,
+ /*hasInput*/halRequest->input_buffer != NULL,
+ captureRequest->mAeTriggerCancelOverride);
+ ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32 ".",
+ __FUNCTION__,
+ captureRequest->mResultExtras.requestId, captureRequest->mResultExtras.frameNumber,
+ captureRequest->mResultExtras.burstId);
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to register new in-flight request:"
+ " %s (%d)", strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ }
+
+ return OK;
+}
+
CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
Mutex::Autolock al(mLatestRequestMutex);
@@ -3063,11 +3136,13 @@
sp<Camera3StreamInterface>& stream) {
Mutex::Autolock l(mRequestLock);
- if (mNextRequest != nullptr) {
- for (const auto& s : mNextRequest->mOutputStreams) {
- if (stream == s) return true;
+ for (const auto& nextRequest : mNextRequests) {
+ if (!nextRequest.submitted) {
+ for (const auto& s : nextRequest.captureRequest->mOutputStreams) {
+ if (stream == s) return true;
+ }
+ if (stream == nextRequest.captureRequest->mInputStream) return true;
}
- if (stream == mNextRequest->mInputStream) return true;
}
for (const auto& request : mRequestQueue) {
@@ -3087,37 +3162,95 @@
return false;
}
-void Camera3Device::RequestThread::cleanUpFailedRequest(
- camera3_capture_request_t &request,
- sp<CaptureRequest> &nextRequest,
- Vector<camera3_stream_buffer_t> &outputBuffers) {
+void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
+ if (mNextRequests.empty()) {
+ return;
+ }
- if (request.settings != NULL) {
- nextRequest->mSettings.unlock(request.settings);
- }
- if (nextRequest->mInputStream != NULL) {
- nextRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mInputStream->returnInputBuffer(nextRequest->mInputBuffer);
- }
- for (size_t i = 0; i < request.num_output_buffers; i++) {
- outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mOutputStreams.editItemAt(i)->returnBuffer(
- outputBuffers[i], 0);
+ for (auto& nextRequest : mNextRequests) {
+ // Skip the ones that have been submitted successfully.
+ if (nextRequest.submitted) {
+ continue;
+ }
+
+ sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+ camera3_capture_request_t* halRequest = &nextRequest.halRequest;
+ Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
+
+ if (halRequest->settings != NULL) {
+ captureRequest->mSettings.unlock(halRequest->settings);
+ }
+
+ if (captureRequest->mInputStream != NULL) {
+ captureRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ captureRequest->mInputStream->returnInputBuffer(captureRequest->mInputBuffer);
+ }
+
+ for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
+ outputBuffers->editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
+ captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0);
+ }
+
+ if (sendRequestError) {
+ Mutex::Autolock l(mRequestLock);
+ if (mListener != NULL) {
+ mListener->notifyError(
+ ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ captureRequest->mResultExtras);
+ }
+ }
}
Mutex::Autolock l(mRequestLock);
- mNextRequest.clear();
+ mNextRequests.clear();
}
-sp<Camera3Device::CaptureRequest>
- Camera3Device::RequestThread::waitForNextRequest() {
- status_t res;
- sp<CaptureRequest> nextRequest;
-
+void Camera3Device::RequestThread::waitForNextRequestBatch() {
// Optimized a bit for the simple steady-state case (single repeating
// request), to avoid putting that request in the queue temporarily.
Mutex::Autolock l(mRequestLock);
+ assert(mNextRequests.empty());
+
+ NextRequest nextRequest;
+ nextRequest.captureRequest = waitForNextRequestLocked();
+ if (nextRequest.captureRequest == nullptr) {
+ return;
+ }
+
+ nextRequest.halRequest = camera3_capture_request_t();
+ nextRequest.submitted = false;
+ mNextRequests.add(nextRequest);
+
+ // Wait for additional requests
+ const size_t batchSize = nextRequest.captureRequest->mBatchSize;
+
+ for (size_t i = 1; i < batchSize; i++) {
+ NextRequest additionalRequest;
+ additionalRequest.captureRequest = waitForNextRequestLocked();
+ if (additionalRequest.captureRequest == nullptr) {
+ break;
+ }
+
+ additionalRequest.halRequest = camera3_capture_request_t();
+ additionalRequest.submitted = false;
+ mNextRequests.add(additionalRequest);
+ }
+
+ if (mNextRequests.size() < batchSize) {
+ ALOGE("RequestThread: only get %d out of %d requests. Skipping requests.",
+ mNextRequests.size(), batchSize);
+ cleanUpFailedRequests(/*sendRequestError*/true);
+ }
+
+ return;
+}
+
+sp<Camera3Device::CaptureRequest>
+ Camera3Device::RequestThread::waitForNextRequestLocked() {
+ status_t res;
+ sp<CaptureRequest> nextRequest;
+
while (mRequestQueue.empty()) {
if (!mRepeatingRequests.empty()) {
// Always atomically enqueue all requests in a repeating request
@@ -3212,8 +3345,6 @@
handleAePrecaptureCancelRequest(nextRequest);
- mNextRequest = nextRequest;
-
return nextRequest;
}
@@ -3478,12 +3609,12 @@
clear();
}
-status_t Camera3Device::PreparerThread::prepare(sp<Camera3StreamInterface>& stream) {
+status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
status_t res;
Mutex::Autolock l(mLock);
- res = stream->startPrepare();
+ res = stream->startPrepare(maxCount);
if (res == OK) {
// No preparation needed, fire listener right off
ALOGV("%s: Stream %d already prepared", __FUNCTION__, stream->getId());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5287058..9d3c533 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -62,6 +62,7 @@
public CameraDeviceBase,
private camera3_callback_ops {
public:
+
Camera3Device(int id);
virtual ~Camera3Device();
@@ -143,6 +144,8 @@
virtual status_t tearDown(int streamId);
+ virtual status_t prepare(int maxCount, int streamId);
+
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
@@ -261,6 +264,11 @@
// Used to cancel AE precapture trigger for devices doesn't support
// CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
AeTriggerCancelOverride_t mAeTriggerCancelOverride;
+ // The number of requests that should be submitted to HAL at a time.
+ // For example, if batch size is 8, this request and the following 7
+ // requests will be submitted to HAL at a time. The batch size for
+ // the following 7 requests will be ignored by the request thread.
+ int mBatchSize;
};
typedef List<sp<CaptureRequest> > RequestList;
@@ -438,6 +446,11 @@
int64_t *lastFrameNumber = NULL);
/**
+ * Flush all pending requests in HAL.
+ */
+ status_t flush();
+
+ /**
* Queue a trigger to be dispatched with the next outgoing
* process_capture_request. The settings for that request only
* will be temporarily rewritten to add the trigger tag/value.
@@ -498,16 +511,30 @@
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
- // Waits for a request, or returns NULL if times out.
- sp<CaptureRequest> waitForNextRequest();
+ // Used to prepare a batch of requests.
+ struct NextRequest {
+ sp<CaptureRequest> captureRequest;
+ camera3_capture_request_t halRequest;
+ Vector<camera3_stream_buffer_t> outputBuffers;
+ bool submitted;
+ };
- // Return buffers, etc, for a request that couldn't be fully
- // constructed. The buffers will be returned in the ERROR state
- // to mark them as not having valid data.
- // All arguments will be modified.
- void cleanUpFailedRequest(camera3_capture_request_t &request,
- sp<CaptureRequest> &nextRequest,
- Vector<camera3_stream_buffer_t> &outputBuffers);
+ // Wait for the next batch of requests and put them in mNextRequests. mNextRequests will
+ // be empty if it times out.
+ void waitForNextRequestBatch();
+
+ // Waits for a request, or returns NULL if times out. Must be called with mRequestLock hold.
+ sp<CaptureRequest> waitForNextRequestLocked();
+
+ // Prepare HAL requests and output buffers in mNextRequests. Return TIMED_OUT if getting any
+ // output buffer timed out. If an error is returned, the caller should clean up the pending
+ // request batch.
+ status_t prepareHalRequests();
+
+ // Return buffers, etc, for requests in mNextRequests that couldn't be fully constructed and
+ // send request errors if sendRequestError is true. The buffers will be returned in the
+ // ERROR state to mark them as not having valid data. mNextRequests will be cleared.
+ void cleanUpFailedRequests(bool sendRequestError);
// Pause handling
bool waitIfPaused();
@@ -536,10 +563,13 @@
Condition mRequestSignal;
RequestList mRequestQueue;
RequestList mRepeatingRequests;
- // The next request being prepped for submission to the HAL, no longer
+ // The next batch of requests being prepped for submission to the HAL, no longer
// on the request queue. Read-only even with mRequestLock held, outside
// of threadLoop
- sp<const CaptureRequest> mNextRequest;
+ Vector<NextRequest> mNextRequests;
+
+ // To protect flush() and sending a request batch to HAL.
+ Mutex mFlushLock;
bool mReconfigured;
@@ -698,10 +728,11 @@
void setNotificationListener(NotificationListener *listener);
/**
- * Queue up a stream to be prepared. Streams are processed by
- * a background thread in FIFO order
+ * Queue up a stream to be prepared. Streams are processed by a background thread in FIFO
+ * order. Pre-allocate up to maxCount buffers for the stream, or the maximum number needed
+ * for the pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*/
- status_t prepare(sp<camera3::Camera3StreamInterface>& stream);
+ status_t prepare(int maxCount, sp<camera3::Camera3StreamInterface>& stream);
/**
* Cancel all current and pending stream preparation
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index ecb8ac8..1d9d04f 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -92,6 +92,10 @@
return OK;
}
+bool Camera3DummyStream::isVideoStream() const {
+ return false;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 3a3dbf4..97c0c96 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -54,6 +54,11 @@
status_t setTransform(int transform);
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ bool isVideoStream() const;
+
protected:
/**
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 8c611d5..3f0a736 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -426,6 +426,17 @@
return res;
}
+bool Camera3OutputStream::isVideoStream() const {
+ uint32_t usage = 0;
+ status_t res = getEndpointUsage(&usage);
+ if (res != OK) {
+ ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
+ return false;
+ }
+
+ return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 941d693..3c083ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -64,6 +64,11 @@
*/
status_t setTransform(int transform);
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ bool isVideoStream() const;
+
protected:
Camera3OutputStream(int id, camera3_stream_type_t type,
uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index aae72cf..df89b34 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -34,6 +34,11 @@
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
virtual status_t setTransform(int transform) = 0;
+
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ virtual bool isVideoStream() const = 0;
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 2527fd6..96299b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -53,7 +53,8 @@
mName(String8::format("Camera3Stream[%d]", id)),
mMaxSize(maxSize),
mState(STATE_CONSTRUCTED),
- mStatusId(StatusTracker::NO_STATUS_ID) {
+ mStatusId(StatusTracker::NO_STATUS_ID),
+ mLastMaxCount(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX) {
camera3_stream::stream_type = type;
camera3_stream::width = width;
@@ -252,12 +253,18 @@
return mStreamUnpreparable;
}
-status_t Camera3Stream::startPrepare() {
+status_t Camera3Stream::startPrepare(int maxCount) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
status_t res = OK;
+ if (maxCount < 0) {
+ ALOGE("%s: Stream %d: Can't prepare stream if max buffer count (%d) is < 0",
+ __FUNCTION__, mId, maxCount);
+ return BAD_VALUE;
+ }
+
// This function should be only called when the stream is configured already.
if (mState != STATE_CONFIGURED) {
ALOGE("%s: Stream %d: Can't prepare stream if stream is not in CONFIGURED "
@@ -279,9 +286,19 @@
return INVALID_OPERATION;
}
+
+
+ size_t pipelineMax = getBufferCountLocked();
+ size_t clampedCount = (pipelineMax < static_cast<size_t>(maxCount)) ?
+ pipelineMax : static_cast<size_t>(maxCount);
+ size_t bufferCount = (maxCount == Camera3StreamInterface::ALLOCATE_PIPELINE_MAX) ?
+ pipelineMax : clampedCount;
+
+ mPrepared = bufferCount <= mLastMaxCount;
+
if (mPrepared) return OK;
- size_t bufferCount = getBufferCountLocked();
+ mLastMaxCount = bufferCount;
mPreparedBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
mPreparedBufferIdx = 0;
@@ -438,8 +455,9 @@
res = mOutputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
if (res != OK) {
if (res == TIMED_OUT) {
- ALOGE("%s: wait for output buffer return timed out after %lldms", __FUNCTION__,
- kWaitForBufferDuration / 1000000LL);
+ ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
+ __FUNCTION__, kWaitForBufferDuration / 1000000LL,
+ camera3_stream::max_buffers);
}
return res;
}
@@ -469,9 +487,12 @@
status_t res = returnBufferLocked(buffer, timestamp);
if (res == OK) {
fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);
- mOutputBufferReturnedSignal.signal();
}
+ // Even if returning the buffer failed, we still want to signal whoever is waiting for the
+ // buffer to be returned.
+ mOutputBufferReturnedSignal.signal();
+
return res;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index bab2177..753280b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -188,7 +188,9 @@
/**
* Start stream preparation. May only be called in the CONFIGURED state,
- * when no valid buffers have yet been returned to this stream.
+ * when no valid buffers have yet been returned to this stream. Prepares
+ * up to maxCount buffers, or the maximum number of buffers needed by the
+ * pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*
* If no prepartion is necessary, returns OK and does not transition to
* PREPARING state. Otherwise, returns NOT_ENOUGH_DATA and transitions
@@ -204,7 +206,7 @@
* INVALID_OPERATION if called when not in CONFIGURED state, or a
* valid buffer has already been returned to this stream.
*/
- status_t startPrepare();
+ status_t startPrepare(int maxCount);
/**
* Check if the stream is mid-preparing.
@@ -444,6 +446,9 @@
Vector<camera3_stream_buffer_t> mPreparedBuffers;
size_t mPreparedBufferIdx;
+ // Number of buffers allocated on last prepare call.
+ int mLastMaxCount;
+
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index c086eaf..54009ae 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -34,6 +34,11 @@
*/
class Camera3StreamInterface : public virtual RefBase {
public:
+
+ enum {
+ ALLOCATE_PIPELINE_MAX = 0, // Allocate max buffers used by a given surface
+ };
+
/**
* Get the stream's ID
*/
@@ -98,7 +103,9 @@
/**
* Start stream preparation. May only be called in the CONFIGURED state,
- * when no valid buffers have yet been returned to this stream.
+ * when no valid buffers have yet been returned to this stream. Prepares
+ * up to maxCount buffers, or the maximum number of buffers needed by the
+ * pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*
* If no prepartion is necessary, returns OK and does not transition to
* PREPARING state. Otherwise, returns NOT_ENOUGH_DATA and transitions
@@ -112,7 +119,7 @@
* INVALID_OPERATION if called when not in CONFIGURED state, or a
* valid buffer has already been returned to this stream.
*/
- virtual status_t startPrepare() = 0;
+ virtual status_t startPrepare(int maxCount) = 0;
/**
* Check if the stream is mid-preparing.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index e54cc5a..4790754 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -96,6 +96,15 @@
const size_t SIZE = 256;
char buffer[SIZE];
+ if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
+ result.format("Permission Denial: "
+ "can't dump ResourceManagerService from pid=%d, uid=%d\n",
+ IPCThreadState::self()->getCallingPid(),
+ IPCThreadState::self()->getCallingUid());
+ write(fd, result.string(), result.size());
+ return PERMISSION_DENIED;
+ }
+
snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
result.append(buffer);
result.append(" Policies:\n");