Merge "audio policy: disable silencing of background UID record" into pi-dev
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 5fd4886..2829b90 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4807,6 +4807,8 @@
* of points can be less than max (that is, the request doesn't have to
* always provide a curve with number of points equivalent to
* ACAMERA_TONEMAP_MAX_CURVE_POINTS).</p>
+ * <p>For devices with MONOCHROME capability, only red channel is used. Green and blue channels
+ * are ignored.</p>
* <p>A few examples, and their corresponding graphical mappings; these
* only specify the red channel and the precision is limited to 4
* digits, for conciseness.</p>
@@ -7094,6 +7096,12 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA = 11,
+ /**
+ * <p>The camera device is a monochrome camera that doesn't contain a color filter array,
+ * and the pixel values on U and Y planes are all 128.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 50c1295..ac2e46e 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2248,6 +2248,16 @@
staticPosition = mStaticProxy->getPosition().unsignedValue();
}
+ // See b/74409267. Connecting to a BT A2DP device supporting multiple codecs
+ // causes a lot of churn on the service side, and it can reject starting
+ // playback of a previously created track. May also apply to other cases.
+ const int INITIAL_RETRIES = 3;
+ int retries = INITIAL_RETRIES;
+retry:
+ if (retries < INITIAL_RETRIES) {
+ // See the comment for clearAudioConfigCache at the start of the function.
+ AudioSystem::clearAudioConfigCache();
+ }
mFlags = mOrigFlags;
// If a new IAudioTrack is successfully created, createTrack_l() will modify the
@@ -2256,7 +2266,10 @@
// If a new IAudioTrack cannot be created, the previous (dead) instance will be left intact.
status_t result = createTrack_l();
- if (result == NO_ERROR) {
+ if (result != NO_ERROR) {
+ ALOGW("%s(): createTrack_l failed, do not retry", __func__);
+ retries = 0;
+ } else {
// take the frames that will be lost by track recreation into account in saved position
// For streaming tracks, this is the amount we obtained from the user/client
// (not the number actually consumed at the server - those are already lost).
@@ -2301,7 +2314,10 @@
mFramesWrittenAtRestore = mFramesWrittenServerOffset;
}
if (result != NO_ERROR) {
- ALOGW("restoreTrack_l() failed status %d", result);
+ ALOGW("%s() failed status %d, retries %d", __func__, result, retries);
+ if (--retries > 0) {
+ goto retry;
+ }
mState = STATE_STOPPED;
mReleased = 0;
}
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index a20f1f2..77cfe4d 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -87,7 +87,7 @@
GET_AUDIO_HW_SYNC_FOR_SESSION,
SYSTEM_READY,
FRAME_COUNT_HAL,
- LIST_MICROPHONES,
+ GET_MICROPHONES,
};
#define MAX_ITEMS_PER_LIST 1024
@@ -849,7 +849,7 @@
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
- status_t status = remote()->transact(LIST_MICROPHONES, data, &reply);
+ status_t status = remote()->transact(GET_MICROPHONES, data, &reply);
if (status != NO_ERROR ||
(status = (status_t)reply.readInt32()) != NO_ERROR) {
return status;
@@ -1444,7 +1444,7 @@
reply->writeInt64( frameCountHAL((audio_io_handle_t) data.readInt32()) );
return NO_ERROR;
} break;
- case LIST_MICROPHONES: {
+ case GET_MICROPHONES: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
std::vector<media::MicrophoneInfo> microphones;
status_t status = getMicrophones(µphones);
diff --git a/media/libaudiohal/2.0/DeviceHalHidl.cpp b/media/libaudiohal/2.0/DeviceHalHidl.cpp
index 0d9c6c4..5b99d70 100644
--- a/media/libaudiohal/2.0/DeviceHalHidl.cpp
+++ b/media/libaudiohal/2.0/DeviceHalHidl.cpp
@@ -53,7 +53,7 @@
audio_devices_t device, const char* halAddress, DeviceAddress* address) {
address->device = AudioDevice(device);
- if (address == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
+ if (halAddress == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
return OK;
}
const bool isInput = (device & AUDIO_DEVICE_BIT_IN) != 0;
@@ -346,6 +346,12 @@
return processReturn("setAudioPortConfig", mDevice->setAudioPortConfig(hidlConfig));
}
+status_t DeviceHalHidl::getMicrophones(
+ std::vector<media::MicrophoneInfo> *microphonesInfo __unused) {
+ if (mDevice == 0) return NO_INIT;
+ return INVALID_OPERATION;
+}
+
status_t DeviceHalHidl::dump(int fd) {
if (mDevice == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/2.0/DeviceHalHidl.h b/media/libaudiohal/2.0/DeviceHalHidl.h
index 8651b51..3c1cb59 100644
--- a/media/libaudiohal/2.0/DeviceHalHidl.h
+++ b/media/libaudiohal/2.0/DeviceHalHidl.h
@@ -107,6 +107,9 @@
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+ // List microphones
+ virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
virtual status_t dump(int fd);
private:
diff --git a/media/libaudiohal/2.0/DeviceHalLocal.cpp b/media/libaudiohal/2.0/DeviceHalLocal.cpp
index fc098f5..ec3bf78 100644
--- a/media/libaudiohal/2.0/DeviceHalLocal.cpp
+++ b/media/libaudiohal/2.0/DeviceHalLocal.cpp
@@ -184,6 +184,11 @@
return INVALID_OPERATION;
}
+status_t DeviceHalLocal::getMicrophones(
+ std::vector<media::MicrophoneInfo> *microphones __unused) {
+ return INVALID_OPERATION;
+}
+
status_t DeviceHalLocal::dump(int fd) {
return mDev->dump(mDev, fd);
}
diff --git a/media/libaudiohal/2.0/DeviceHalLocal.h b/media/libaudiohal/2.0/DeviceHalLocal.h
index 865f296..aec201a 100644
--- a/media/libaudiohal/2.0/DeviceHalLocal.h
+++ b/media/libaudiohal/2.0/DeviceHalLocal.h
@@ -100,6 +100,9 @@
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+ // List microphones
+ virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
virtual status_t dump(int fd);
void closeOutputStream(struct audio_stream_out *stream_out);
diff --git a/media/libaudiohal/2.0/StreamHalHidl.cpp b/media/libaudiohal/2.0/StreamHalHidl.cpp
index 0cafa36..9869cd2 100644
--- a/media/libaudiohal/2.0/StreamHalHidl.cpp
+++ b/media/libaudiohal/2.0/StreamHalHidl.cpp
@@ -555,6 +555,11 @@
}
}
+status_t StreamOutHalHidl::updateSourceMetadata(const SourceMetadata& /* sourceMetadata */) {
+ // Audio HAL V2.0 does not support propagating source metadata
+ return INVALID_OPERATION;
+}
+
void StreamOutHalHidl::onWriteReady() {
sp<StreamOutHalInterfaceCallback> callback = mCallback.promote();
if (callback == 0) return;
@@ -749,4 +754,15 @@
}
}
+status_t StreamInHalHidl::getActiveMicrophones(
+ std::vector<media::MicrophoneInfo> *microphones __unused) {
+ if (mStream == 0) return NO_INIT;
+ return INVALID_OPERATION;
+}
+
+status_t StreamInHalHidl::updateSinkMetadata(const SinkMetadata& /* sinkMetadata */) {
+ // Audio HAL V2.0 does not support propagating sink metadata
+ return INVALID_OPERATION;
+}
+
} // namespace android
diff --git a/media/libaudiohal/2.0/StreamHalHidl.h b/media/libaudiohal/2.0/StreamHalHidl.h
index d4ab943..ebad8ae 100644
--- a/media/libaudiohal/2.0/StreamHalHidl.h
+++ b/media/libaudiohal/2.0/StreamHalHidl.h
@@ -161,6 +161,9 @@
// Return a recent count of the number of audio frames presented to an external observer.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+ // Called when the metadata of the stream's source has been changed.
+ status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
// Methods used by StreamOutCallback (HIDL).
void onWriteReady();
void onDrainReady();
@@ -210,6 +213,12 @@
// the clock time associated with that frame count.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
+ // Get active microphones
+ virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
+ // Called when the metadata of the stream's sink has been changed.
+ status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
private:
friend class DeviceHalHidl;
typedef MessageQueue<ReadParameters, hardware::kSynchronizedReadWrite> CommandMQ;
diff --git a/media/libaudiohal/2.0/StreamHalLocal.cpp b/media/libaudiohal/2.0/StreamHalLocal.cpp
index 8d61e24..98107e5 100644
--- a/media/libaudiohal/2.0/StreamHalLocal.cpp
+++ b/media/libaudiohal/2.0/StreamHalLocal.cpp
@@ -231,6 +231,19 @@
return mStream->get_presentation_position(mStream, frames, timestamp);
}
+status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
+ if (mStream->update_source_metadata == nullptr) {
+ return INVALID_OPERATION;
+ }
+ const source_metadata_t metadata {
+ .track_count = sourceMetadata.tracks.size(),
+ // const cast is fine as it is in a const structure
+ .tracks = const_cast<playback_track_metadata*>(sourceMetadata.tracks.data()),
+ };
+ mStream->update_source_metadata(mStream, &metadata);
+ return OK;
+}
+
status_t StreamOutHalLocal::start() {
if (mStream->start == NULL) return INVALID_OPERATION;
return mStream->start(mStream);
@@ -292,6 +305,19 @@
return mStream->get_capture_position(mStream, frames, time);
}
+status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
+ if (mStream->update_sink_metadata == nullptr) {
+ return INVALID_OPERATION;
+ }
+ const sink_metadata_t metadata {
+ .track_count = sinkMetadata.tracks.size(),
+ // const cast is fine as it is in a const structure
+ .tracks = const_cast<record_track_metadata*>(sinkMetadata.tracks.data()),
+ };
+ mStream->update_sink_metadata(mStream, &metadata);
+ return OK;
+}
+
status_t StreamInHalLocal::start() {
if (mStream->start == NULL) return INVALID_OPERATION;
return mStream->start(mStream);
@@ -313,4 +339,9 @@
return mStream->get_mmap_position(mStream, position);
}
+status_t StreamInHalLocal::getActiveMicrophones(
+ std::vector<media::MicrophoneInfo> *microphones __unused) {
+ return INVALID_OPERATION;
+}
+
} // namespace android
diff --git a/media/libaudiohal/2.0/StreamHalLocal.h b/media/libaudiohal/2.0/StreamHalLocal.h
index c7136df..cda8d0c 100644
--- a/media/libaudiohal/2.0/StreamHalLocal.h
+++ b/media/libaudiohal/2.0/StreamHalLocal.h
@@ -149,6 +149,9 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ // Called when the metadata of the stream's source has been changed.
+ status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
private:
audio_stream_out_t *mStream;
wp<StreamOutHalInterfaceCallback> mCallback;
@@ -194,6 +197,12 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ // Get active microphones
+ virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
+ // Called when the metadata of the stream's sink has been changed.
+ status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
private:
audio_stream_in_t *mStream;
diff --git a/media/libaudiohal/4.0/Android.bp b/media/libaudiohal/4.0/Android.bp
index 3d104ab..833defa 100644
--- a/media/libaudiohal/4.0/Android.bp
+++ b/media/libaudiohal/4.0/Android.bp
@@ -26,6 +26,7 @@
shared_libs: [
"libaudiohal_deathhandler",
"libaudioutils",
+ "libbinder",
"libcutils",
"liblog",
"libutils",
diff --git a/media/libaudiohal/4.0/ConversionHelperHidl.cpp b/media/libaudiohal/4.0/ConversionHelperHidl.cpp
index a3cc28f..fe27504 100644
--- a/media/libaudiohal/4.0/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/4.0/ConversionHelperHidl.cpp
@@ -22,6 +22,11 @@
#include "ConversionHelperHidl.h"
+using ::android::hardware::audio::V4_0::AudioMicrophoneChannelMapping;
+using ::android::hardware::audio::V4_0::AudioMicrophoneDirectionality;
+using ::android::hardware::audio::V4_0::AudioMicrophoneLocation;
+using ::android::hardware::audio::V4_0::DeviceAddress;
+using ::android::hardware::audio::V4_0::MicrophoneInfo;
using ::android::hardware::audio::V4_0::Result;
namespace android {
@@ -101,5 +106,132 @@
ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
}
+// TODO: Use the same implementation in the hal when it moves to a util library.
+std::string deviceAddressToHal(const DeviceAddress& address) {
+ // HAL assumes that the address is NUL-terminated.
+ char halAddress[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+ memset(halAddress, 0, sizeof(halAddress));
+ audio_devices_t halDevice = static_cast<audio_devices_t>(address.device);
+ const bool isInput = (halDevice & AUDIO_DEVICE_BIT_IN) != 0;
+ if (isInput) halDevice &= ~AUDIO_DEVICE_BIT_IN;
+ if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_A2DP) != 0) ||
+ (isInput && (halDevice & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) != 0)) {
+ snprintf(halAddress, sizeof(halAddress), "%02X:%02X:%02X:%02X:%02X:%02X",
+ address.address.mac[0], address.address.mac[1], address.address.mac[2],
+ address.address.mac[3], address.address.mac[4], address.address.mac[5]);
+ } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_IP) != 0) ||
+ (isInput && (halDevice & AUDIO_DEVICE_IN_IP) != 0)) {
+ snprintf(halAddress, sizeof(halAddress), "%d.%d.%d.%d", address.address.ipv4[0],
+ address.address.ipv4[1], address.address.ipv4[2], address.address.ipv4[3]);
+ } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_ALL_USB) != 0) ||
+ (isInput && (halDevice & AUDIO_DEVICE_IN_ALL_USB) != 0)) {
+ snprintf(halAddress, sizeof(halAddress), "card=%d;device=%d", address.address.alsa.card,
+ address.address.alsa.device);
+ } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_BUS) != 0) ||
+ (isInput && (halDevice & AUDIO_DEVICE_IN_BUS) != 0)) {
+ snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
+ } else if ((!isInput && (halDevice & AUDIO_DEVICE_OUT_REMOTE_SUBMIX)) != 0 ||
+ (isInput && (halDevice & AUDIO_DEVICE_IN_REMOTE_SUBMIX) != 0)) {
+ snprintf(halAddress, sizeof(halAddress), "%s", address.rSubmixAddress.c_str());
+ } else {
+ snprintf(halAddress, sizeof(halAddress), "%s", address.busAddress.c_str());
+ }
+ return halAddress;
+}
+
+//local conversion helpers
+
+audio_microphone_channel_mapping_t channelMappingToHal(AudioMicrophoneChannelMapping mapping) {
+ switch (mapping) {
+ case AudioMicrophoneChannelMapping::UNUSED:
+ return AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
+ case AudioMicrophoneChannelMapping::DIRECT:
+ return AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT;
+ case AudioMicrophoneChannelMapping::PROCESSED:
+ return AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED;
+ default:
+ LOG_ALWAYS_FATAL("Unknown channelMappingToHal conversion %d", mapping);
+ }
+}
+
+audio_microphone_location_t locationToHal(AudioMicrophoneLocation location) {
+ switch (location) {
+ case AudioMicrophoneLocation::UNKNOWN:
+ return AUDIO_MICROPHONE_LOCATION_UNKNOWN;
+ case AudioMicrophoneLocation::MAINBODY:
+ return AUDIO_MICROPHONE_LOCATION_MAINBODY;
+ case AudioMicrophoneLocation::MAINBODY_MOVABLE:
+ return AUDIO_MICROPHONE_LOCATION_MAINBODY_MOVABLE;
+ case AudioMicrophoneLocation::PERIPHERAL:
+ return AUDIO_MICROPHONE_LOCATION_PERIPHERAL;
+ default:
+ LOG_ALWAYS_FATAL("Unknown locationToHal conversion %d", location);
+ }
+}
+audio_microphone_directionality_t directionalityToHal(AudioMicrophoneDirectionality dir) {
+ switch (dir) {
+ case AudioMicrophoneDirectionality::UNKNOWN:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_UNKNOWN;
+ case AudioMicrophoneDirectionality::OMNI:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
+ case AudioMicrophoneDirectionality::BI_DIRECTIONAL:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_BI_DIRECTIONAL;
+ case AudioMicrophoneDirectionality::CARDIOID:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_CARDIOID;
+ case AudioMicrophoneDirectionality::HYPER_CARDIOID:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_HYPER_CARDIOID;
+ case AudioMicrophoneDirectionality::SUPER_CARDIOID:
+ return AUDIO_MICROPHONE_DIRECTIONALITY_SUPER_CARDIOID;
+ default:
+ LOG_ALWAYS_FATAL("Unknown directionalityToHal conversion %d", dir);
+ }
+}
+
+// static
+void ConversionHelperHidl::microphoneInfoToHal(const MicrophoneInfo& src,
+ audio_microphone_characteristic_t *pDst) {
+ if (pDst != NULL) {
+ snprintf(pDst->device_id, sizeof(pDst->device_id),
+ "%s", src.deviceId.c_str());
+ pDst->device = static_cast<audio_devices_t>(src.deviceAddress.device);
+ snprintf(pDst->address, sizeof(pDst->address),
+ "%s", deviceAddressToHal(src.deviceAddress).c_str());
+ if (src.channelMapping.size() > AUDIO_CHANNEL_COUNT_MAX) {
+ ALOGW("microphoneInfoToStruct found %zu channelMapping elements. Max expected is %d",
+ src.channelMapping.size(), AUDIO_CHANNEL_COUNT_MAX);
+ }
+ size_t ch;
+ for (ch = 0; ch < src.channelMapping.size() && ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
+ pDst->channel_mapping[ch] = channelMappingToHal(src.channelMapping[ch]);
+ }
+ for (; ch < AUDIO_CHANNEL_COUNT_MAX; ch++) {
+ pDst->channel_mapping[ch] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
+ }
+ pDst->location = locationToHal(src.location);
+ pDst->group = (audio_microphone_group_t)src.group;
+ pDst->index_in_the_group = (unsigned int)src.indexInTheGroup;
+ pDst->sensitivity = src.sensitivity;
+ pDst->max_spl = src.maxSpl;
+ pDst->min_spl = src.minSpl;
+ pDst->directionality = directionalityToHal(src.directionality);
+ pDst->num_frequency_responses = (unsigned int)src.frequencyResponse.size();
+ if (pDst->num_frequency_responses > AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES) {
+ ALOGW("microphoneInfoToStruct found %d frequency responses. Max expected is %d",
+ pDst->num_frequency_responses, AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES);
+ pDst->num_frequency_responses = AUDIO_MICROPHONE_MAX_FREQUENCY_RESPONSES;
+ }
+ for (size_t k = 0; k < pDst->num_frequency_responses; k++) {
+ pDst->frequency_responses[0][k] = src.frequencyResponse[k].frequency;
+ pDst->frequency_responses[1][k] = src.frequencyResponse[k].level;
+ }
+ pDst->geometric_location.x = src.position.x;
+ pDst->geometric_location.y = src.position.y;
+ pDst->geometric_location.z = src.position.z;
+ pDst->orientation.x = src.orientation.x;
+ pDst->orientation.y = src.orientation.y;
+ pDst->orientation.z = src.orientation.z;
+ }
+}
+
} // namespace V4_0
} // namespace android
diff --git a/media/libaudiohal/4.0/ConversionHelperHidl.h b/media/libaudiohal/4.0/ConversionHelperHidl.h
index ddc8569..8823a8d 100644
--- a/media/libaudiohal/4.0/ConversionHelperHidl.h
+++ b/media/libaudiohal/4.0/ConversionHelperHidl.h
@@ -19,9 +19,11 @@
#include <android/hardware/audio/4.0/types.h>
#include <hidl/HidlSupport.h>
+#include <system/audio.h>
#include <utils/String8.h>
using ::android::hardware::audio::V4_0::ParameterValue;
+using ::android::hardware::audio::V4_0::MicrophoneInfo;
using ::android::hardware::Return;
using ::android::hardware::hidl_string;
using ::android::hardware::hidl_vec;
@@ -34,6 +36,8 @@
static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
static void parametersToHal(const hidl_vec<ParameterValue>& parameters, String8 *values);
+ static void microphoneInfoToHal(const MicrophoneInfo& src,
+ audio_microphone_characteristic_t *pDst);
ConversionHelperHidl(const char* className);
diff --git a/media/libaudiohal/4.0/DeviceHalHidl.cpp b/media/libaudiohal/4.0/DeviceHalHidl.cpp
index 8da1051..6facca9 100644
--- a/media/libaudiohal/4.0/DeviceHalHidl.cpp
+++ b/media/libaudiohal/4.0/DeviceHalHidl.cpp
@@ -59,7 +59,7 @@
audio_devices_t device, const char* halAddress, DeviceAddress* address) {
address->device = AudioDevice(device);
- if (address == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
+ if (halAddress == nullptr || strnlen(halAddress, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
return OK;
}
const bool isInput = (device & AUDIO_DEVICE_BIT_IN) != 0;
@@ -359,6 +359,23 @@
return processReturn("setAudioPortConfig", mDevice->setAudioPortConfig(hidlConfig));
}
+status_t DeviceHalHidl::getMicrophones(std::vector<media::MicrophoneInfo> *microphonesInfo) {
+ if (mDevice == 0) return NO_INIT;
+ Result retval;
+ Return<void> ret = mDevice->getMicrophones(
+ [&](Result r, hidl_vec<MicrophoneInfo> micArrayHal) {
+ retval = r;
+ for (size_t k = 0; k < micArrayHal.size(); k++) {
+ audio_microphone_characteristic_t dst;
+ //convert
+ microphoneInfoToHal(micArrayHal[k], &dst);
+ media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
+ microphonesInfo->push_back(microphone);
+ }
+ });
+ return processReturn("getMicrophones", ret, retval);
+}
+
status_t DeviceHalHidl::dump(int fd) {
if (mDevice == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/4.0/DeviceHalHidl.h b/media/libaudiohal/4.0/DeviceHalHidl.h
index f460add..0bd2175 100644
--- a/media/libaudiohal/4.0/DeviceHalHidl.h
+++ b/media/libaudiohal/4.0/DeviceHalHidl.h
@@ -108,6 +108,9 @@
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+ // List microphones
+ virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
virtual status_t dump(int fd);
private:
diff --git a/media/libaudiohal/4.0/DeviceHalLocal.cpp b/media/libaudiohal/4.0/DeviceHalLocal.cpp
index e64eee1..a245dd9 100644
--- a/media/libaudiohal/4.0/DeviceHalLocal.cpp
+++ b/media/libaudiohal/4.0/DeviceHalLocal.cpp
@@ -185,6 +185,18 @@
return INVALID_OPERATION;
}
+status_t DeviceHalLocal::getMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
+ if (mDev->get_microphones == NULL) return INVALID_OPERATION;
+ size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
+ audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
+ status_t status = mDev->get_microphones(mDev, &mic_array[0], &actual_mics);
+ for (size_t i = 0; i < actual_mics; i++) {
+ media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
+ microphones->push_back(microphoneInfo);
+ }
+ return status;
+}
+
status_t DeviceHalLocal::dump(int fd) {
return mDev->dump(mDev, fd);
}
diff --git a/media/libaudiohal/4.0/DeviceHalLocal.h b/media/libaudiohal/4.0/DeviceHalLocal.h
index daafdc7..08341a4 100644
--- a/media/libaudiohal/4.0/DeviceHalLocal.h
+++ b/media/libaudiohal/4.0/DeviceHalLocal.h
@@ -101,6 +101,9 @@
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config);
+ // List microphones
+ virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
virtual status_t dump(int fd);
void closeOutputStream(struct audio_stream_out *stream_out);
diff --git a/media/libaudiohal/4.0/StreamHalHidl.cpp b/media/libaudiohal/4.0/StreamHalHidl.cpp
index de16e98..1c2fdb0 100644
--- a/media/libaudiohal/4.0/StreamHalHidl.cpp
+++ b/media/libaudiohal/4.0/StreamHalHidl.cpp
@@ -28,14 +28,20 @@
#include "VersionUtils.h"
using ::android::hardware::audio::common::V4_0::AudioChannelMask;
+using ::android::hardware::audio::common::V4_0::AudioContentType;
using ::android::hardware::audio::common::V4_0::AudioFormat;
+using ::android::hardware::audio::common::V4_0::AudioSource;
+using ::android::hardware::audio::common::V4_0::AudioUsage;
using ::android::hardware::audio::common::V4_0::ThreadInfo;
using ::android::hardware::audio::V4_0::AudioDrain;
using ::android::hardware::audio::V4_0::IStreamOutCallback;
using ::android::hardware::audio::V4_0::MessageQueueFlagBits;
+using ::android::hardware::audio::V4_0::MicrophoneInfo;
using ::android::hardware::audio::V4_0::MmapBufferInfo;
using ::android::hardware::audio::V4_0::MmapPosition;
using ::android::hardware::audio::V4_0::ParameterValue;
+using ::android::hardware::audio::V4_0::PlaybackTrackMetadata;
+using ::android::hardware::audio::V4_0::RecordTrackMetadata;
using ::android::hardware::audio::V4_0::Result;
using ::android::hardware::audio::V4_0::TimeSpec;
using ::android::hardware::MQDescriptorSync;
@@ -560,6 +566,28 @@
}
}
+/** Transform a standard collection to an HIDL vector. */
+template <class Values, class ElementConverter>
+static auto transformToHidlVec(const Values& values, ElementConverter converter) {
+ hidl_vec<decltype(converter(*values.begin()))> result{values.size()};
+ using namespace std;
+ transform(begin(values), end(values), begin(result), converter);
+ return result;
+}
+
+status_t StreamOutHalHidl::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
+ hardware::audio::V4_0::SourceMetadata halMetadata = {
+ .tracks = transformToHidlVec(sourceMetadata.tracks,
+ [](const playback_track_metadata& metadata) -> PlaybackTrackMetadata {
+ return {
+ .usage=static_cast<AudioUsage>(metadata.usage),
+ .contentType=static_cast<AudioContentType>(metadata.content_type),
+ .gain=metadata.gain,
+ };
+ })};
+ return processReturn("updateSourceMetadata", mStream->updateSourceMetadata(halMetadata));
+}
+
void StreamOutHalHidl::onWriteReady() {
sp<StreamOutHalInterfaceCallback> callback = mCallback.promote();
if (callback == 0) return;
@@ -754,5 +782,36 @@
}
}
+
+status_t StreamInHalHidl::getActiveMicrophones(
+ std::vector<media::MicrophoneInfo> *microphonesInfo) {
+ if (!mStream) return NO_INIT;
+ Result retval;
+ Return<void> ret = mStream->getActiveMicrophones(
+ [&](Result r, hidl_vec<MicrophoneInfo> micArrayHal) {
+ retval = r;
+ for (size_t k = 0; k < micArrayHal.size(); k++) {
+ audio_microphone_characteristic_t dst;
+ // convert
+ microphoneInfoToHal(micArrayHal[k], &dst);
+ media::MicrophoneInfo microphone = media::MicrophoneInfo(dst);
+ microphonesInfo->push_back(microphone);
+ }
+ });
+ return processReturn("getActiveMicrophones", ret, retval);
+}
+
+status_t StreamInHalHidl::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
+ hardware::audio::V4_0::SinkMetadata halMetadata = {
+ .tracks = transformToHidlVec(sinkMetadata.tracks,
+ [](const record_track_metadata& metadata) -> RecordTrackMetadata {
+ return {
+ .source=static_cast<AudioSource>(metadata.source),
+ .gain=metadata.gain,
+ };
+ })};
+ return processReturn("updateSinkMetadata", mStream->updateSinkMetadata(halMetadata));
+}
+
} // namespace V4_0
} // namespace android
diff --git a/media/libaudiohal/4.0/StreamHalHidl.h b/media/libaudiohal/4.0/StreamHalHidl.h
index 8d4dc8c..2dda0f8 100644
--- a/media/libaudiohal/4.0/StreamHalHidl.h
+++ b/media/libaudiohal/4.0/StreamHalHidl.h
@@ -162,6 +162,9 @@
// Return a recent count of the number of audio frames presented to an external observer.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+ // Called when the metadata of the stream's source has been changed.
+ status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
// Methods used by StreamOutCallback (HIDL).
void onWriteReady();
void onDrainReady();
@@ -211,6 +214,12 @@
// the clock time associated with that frame count.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
+ // Get active microphones
+ virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
+ // Called when the metadata of the stream's sink has been changed.
+ status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
private:
friend class DeviceHalHidl;
typedef MessageQueue<ReadParameters, hardware::kSynchronizedReadWrite> CommandMQ;
diff --git a/media/libaudiohal/4.0/StreamHalLocal.cpp b/media/libaudiohal/4.0/StreamHalLocal.cpp
index 592a931..e9d96bf 100644
--- a/media/libaudiohal/4.0/StreamHalLocal.cpp
+++ b/media/libaudiohal/4.0/StreamHalLocal.cpp
@@ -233,6 +233,19 @@
return mStream->get_presentation_position(mStream, frames, timestamp);
}
+status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
+ if (mStream->update_source_metadata == nullptr) {
+ return INVALID_OPERATION;
+ }
+ const source_metadata_t metadata {
+ .track_count = sourceMetadata.tracks.size(),
+ // const cast is fine as it is in a const structure
+ .tracks = const_cast<playback_track_metadata*>(sourceMetadata.tracks.data()),
+ };
+ mStream->update_source_metadata(mStream, &metadata);
+ return OK;
+}
+
status_t StreamOutHalLocal::start() {
if (mStream->start == NULL) return INVALID_OPERATION;
return mStream->start(mStream);
@@ -294,6 +307,19 @@
return mStream->get_capture_position(mStream, frames, time);
}
+status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
+ if (mStream->update_sink_metadata == nullptr) {
+ return INVALID_OPERATION;
+ }
+ const sink_metadata_t metadata {
+ .track_count = sinkMetadata.tracks.size(),
+ // const cast is fine as it is in a const structure
+ .tracks = const_cast<record_track_metadata*>(sinkMetadata.tracks.data()),
+ };
+ mStream->update_sink_metadata(mStream, &metadata);
+ return OK;
+}
+
status_t StreamInHalLocal::start() {
if (mStream->start == NULL) return INVALID_OPERATION;
return mStream->start(mStream);
@@ -315,5 +341,17 @@
return mStream->get_mmap_position(mStream, position);
}
+status_t StreamInHalLocal::getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
+ if (mStream->get_active_microphones == NULL) return INVALID_OPERATION;
+ size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
+ audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
+ status_t status = mStream->get_active_microphones(mStream, &mic_array[0], &actual_mics);
+ for (size_t i = 0; i < actual_mics; i++) {
+ media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
+ microphones->push_back(microphoneInfo);
+ }
+ return status;
+}
+
} // namespace V4_0
} // namespace android
diff --git a/media/libaudiohal/4.0/StreamHalLocal.h b/media/libaudiohal/4.0/StreamHalLocal.h
index 076bc4c..7237509 100644
--- a/media/libaudiohal/4.0/StreamHalLocal.h
+++ b/media/libaudiohal/4.0/StreamHalLocal.h
@@ -150,6 +150,9 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ // Called when the metadata of the stream's source has been changed.
+ status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
+
private:
audio_stream_out_t *mStream;
wp<StreamOutHalInterfaceCallback> mCallback;
@@ -195,6 +198,12 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ // Get active microphones
+ virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
+
+ // Called when the metadata of the stream's sink has been changed.
+ status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
+
private:
audio_stream_in_t *mStream;
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index caf01be..7de8eb3 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
#define ANDROID_HARDWARE_DEVICE_HAL_INTERFACE_H
+#include <media/MicrophoneInfo.h>
#include <system/audio.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -105,6 +106,9 @@
// Set audio port configuration.
virtual status_t setAudioPortConfig(const struct audio_port_config *config) = 0;
+ // List microphones
+ virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
+
virtual status_t dump(int fd) = 0;
protected:
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 7419c34..c969e28 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -17,7 +17,10 @@
#ifndef ANDROID_HARDWARE_STREAM_HAL_INTERFACE_H
#define ANDROID_HARDWARE_STREAM_HAL_INTERFACE_H
+#include <vector>
+
#include <media/audiohal/EffectHalInterface.h>
+#include <media/MicrophoneInfo.h>
#include <system/audio.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -142,6 +145,15 @@
// Return a recent count of the number of audio frames presented to an external observer.
virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
+ struct SourceMetadata {
+ std::vector<playback_track_metadata_t> tracks;
+ };
+ /**
+ * Called when the metadata of the stream's source has been changed.
+ * @param sourceMetadata Description of the audio that is played by the clients.
+ */
+ virtual status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) = 0;
+
protected:
virtual ~StreamOutHalInterface() {}
};
@@ -161,6 +173,18 @@
// the clock time associated with that frame count.
virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
+ // Get active microphones
+ virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) = 0;
+
+ struct SinkMetadata {
+ std::vector<record_track_metadata_t> tracks;
+ };
+ /**
+ * Called when the metadata of the stream's sink has been changed.
+ * @param sinkMetadata Description of the audio that is suggested by the clients.
+ */
+ virtual status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) = 0;
+
protected:
virtual ~StreamInHalInterface() {}
};
diff --git a/media/libeffects/config/src/EffectsConfig.cpp b/media/libeffects/config/src/EffectsConfig.cpp
index 4ed3ba8..d79501f 100644
--- a/media/libeffects/config/src/EffectsConfig.cpp
+++ b/media/libeffects/config/src/EffectsConfig.cpp
@@ -203,7 +203,7 @@
auto parseProxy = [&xmlEffect, &parseImpl](const char* tag, EffectImpl& proxyLib) {
auto* xmlProxyLib = xmlEffect.FirstChildElement(tag);
if (xmlProxyLib == nullptr) {
- ALOGE("effectProxy must contain a <%s>: %s", tag, dump(*xmlProxyLib));
+ ALOGE("effectProxy must contain a <%s>: %s", tag, dump(xmlEffect));
return false;
}
return parseImpl(*xmlProxyLib, proxyLib);
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 5ad4c01..b874df4 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -19,6 +19,7 @@
#include <utils/Log.h>
#include <inttypes.h>
+#include <stdlib.h>
#include "include/SecureBuffer.h"
#include "include/SharedMemoryBuffer.h"
@@ -88,6 +89,22 @@
static const char *kCodecMaxHeight = "android.media.mediacodec.maxheight"; /* 0..n */
static const char *kCodecError = "android.media.mediacodec.errcode";
static const char *kCodecErrorState = "android.media.mediacodec.errstate";
+static const char *kCodecLatencyMax = "android.media.mediacodec.latency.max"; /* in us */
+static const char *kCodecLatencyMin = "android.media.mediacodec.latency.min"; /* in us */
+static const char *kCodecLatencyAvg = "android.media.mediacodec.latency.avg"; /* in us */
+static const char *kCodecLatencyCount = "android.media.mediacodec.latency.n";
+static const char *kCodecLatencyHist = "android.media.mediacodec.latency.hist"; /* in us */
+static const char *kCodecLatencyUnknown = "android.media.mediacodec.latency.unknown";
+
+// the kCodecRecent* fields appear only in getMetrics() results
+static const char *kCodecRecentLatencyMax = "android.media.mediacodec.recent.max"; /* in us */
+static const char *kCodecRecentLatencyMin = "android.media.mediacodec.recent.min"; /* in us */
+static const char *kCodecRecentLatencyAvg = "android.media.mediacodec.recent.avg"; /* in us */
+static const char *kCodecRecentLatencyCount = "android.media.mediacodec.recent.n";
+static const char *kCodecRecentLatencyHist = "android.media.mediacodec.recent.hist"; /* in us */
+
+// XXX suppress until we get our representation right
+static bool kEmitHistogram = false;
static int64_t getId(const sp<IResourceManagerClient> &client) {
@@ -506,12 +523,14 @@
mDequeueOutputTimeoutGeneration(0),
mDequeueOutputReplyID(0),
mHaveInputSurface(false),
- mHavePendingInputBuffers(false) {
+ mHavePendingInputBuffers(false),
+ mLatencyUnknown(0) {
if (uid == kNoUid) {
mUid = IPCThreadState::self()->getCallingUid();
} else {
mUid = uid;
}
+
initAnalyticsItem();
}
@@ -523,16 +542,90 @@
}
void MediaCodec::initAnalyticsItem() {
- CHECK(mAnalyticsItem == NULL);
- // set up our new record, get a sessionID, put it into the in-progress list
- mAnalyticsItem = new MediaAnalyticsItem(kCodecKeyName);
- if (mAnalyticsItem != NULL) {
- // don't record it yet; only at the end, when we have decided that we have
- // data worth writing (e.g. .count() > 0)
+ if (mAnalyticsItem == NULL) {
+ mAnalyticsItem = new MediaAnalyticsItem(kCodecKeyName);
+ }
+
+ mLatencyHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
+
+ {
+ Mutex::Autolock al(mRecentLock);
+ for (int i = 0; i<kRecentLatencyFrames; i++) {
+ mRecentSamples[i] = kRecentSampleInvalid;
+ }
+ mRecentHead = 0;
+ }
+}
+
+void MediaCodec::updateAnalyticsItem() {
+ ALOGV("MediaCodec::updateAnalyticsItem");
+ if (mAnalyticsItem == NULL) {
+ return;
+ }
+
+ if (mLatencyHist.getCount() != 0 ) {
+ mAnalyticsItem->setInt64(kCodecLatencyMax, mLatencyHist.getMax());
+ mAnalyticsItem->setInt64(kCodecLatencyMin, mLatencyHist.getMin());
+ mAnalyticsItem->setInt64(kCodecLatencyAvg, mLatencyHist.getAvg());
+ mAnalyticsItem->setInt64(kCodecLatencyCount, mLatencyHist.getCount());
+
+ if (kEmitHistogram) {
+ // and the histogram itself
+ std::string hist = mLatencyHist.emit();
+ mAnalyticsItem->setCString(kCodecLatencyHist, hist.c_str());
+ }
+ }
+ if (mLatencyUnknown > 0) {
+ mAnalyticsItem->setInt64(kCodecLatencyUnknown, mLatencyUnknown);
+ }
+
+#if 0
+ // enable for short term, only while debugging
+ updateEphemeralAnalytics(mAnalyticsItem);
+#endif
+}
+
+void MediaCodec::updateEphemeralAnalytics(MediaAnalyticsItem *item) {
+ ALOGD("MediaCodec::updateEphemeralAnalytics()");
+
+ if (item == NULL) {
+ return;
+ }
+
+ Histogram recentHist;
+
+ // build an empty histogram
+ recentHist.setup(kLatencyHistBuckets, kLatencyHistWidth, kLatencyHistFloor);
+
+ // stuff it with the samples in the ring buffer
+ {
+ Mutex::Autolock al(mRecentLock);
+
+ for (int i=0; i<kRecentLatencyFrames; i++) {
+ if (mRecentSamples[i] != kRecentSampleInvalid) {
+ recentHist.insert(mRecentSamples[i]);
+ }
+ }
+ }
+
+
+ // spit the data (if any) into the supplied analytics record
+ if (recentHist.getCount()!= 0 ) {
+ item->setInt64(kCodecRecentLatencyMax, recentHist.getMax());
+ item->setInt64(kCodecRecentLatencyMin, recentHist.getMin());
+ item->setInt64(kCodecRecentLatencyAvg, recentHist.getAvg());
+ item->setInt64(kCodecRecentLatencyCount, recentHist.getCount());
+
+ if (kEmitHistogram) {
+ // and the histogram itself
+ std::string hist = recentHist.emit();
+ item->setCString(kCodecRecentLatencyHist, hist.c_str());
+ }
}
}
void MediaCodec::flushAnalyticsItem() {
+ updateAnalyticsItem();
if (mAnalyticsItem != NULL) {
// don't log empty records
if (mAnalyticsItem->count() > 0) {
@@ -543,6 +636,190 @@
}
}
+bool MediaCodec::Histogram::setup(int nbuckets, int64_t width, int64_t floor)
+{
+ if (nbuckets <= 0 || width <= 0) {
+ return false;
+ }
+
+ // get histogram buckets
+ if (nbuckets == mBucketCount && mBuckets != NULL) {
+ // reuse our existing buffer
+ memset(mBuckets, 0, sizeof(*mBuckets) * mBucketCount);
+ } else {
+ // get a new pre-zeroed buffer
+ int64_t *newbuckets = (int64_t *)calloc(nbuckets, sizeof (*mBuckets));
+ if (newbuckets == NULL) {
+ goto bad;
+ }
+ if (mBuckets != NULL)
+ free(mBuckets);
+ mBuckets = newbuckets;
+ }
+
+ mWidth = width;
+ mFloor = floor;
+ mCeiling = floor + nbuckets * width;
+ mBucketCount = nbuckets;
+
+ mMin = INT64_MAX;
+ mMax = INT64_MIN;
+ mSum = 0;
+ mCount = 0;
+ mBelow = mAbove = 0;
+
+ return true;
+
+ bad:
+ if (mBuckets != NULL) {
+ free(mBuckets);
+ mBuckets = NULL;
+ }
+
+ return false;
+}
+
+void MediaCodec::Histogram::insert(int64_t sample)
+{
+ // histogram is not set up
+ if (mBuckets == NULL) {
+ return;
+ }
+
+ mCount++;
+ mSum += sample;
+ if (mMin > sample) mMin = sample;
+ if (mMax < sample) mMax = sample;
+
+ if (sample < mFloor) {
+ mBelow++;
+ } else if (sample >= mCeiling) {
+ mAbove++;
+ } else {
+ int64_t slot = (sample - mFloor) / mWidth;
+ CHECK(slot < mBucketCount);
+ mBuckets[slot]++;
+ }
+ return;
+}
+
+std::string MediaCodec::Histogram::emit()
+{
+ std::string value;
+ char buffer[64];
+
+ // emits: width,Below{bucket0,bucket1,...., bucketN}above
+ // unconfigured will emit: 0,0{}0
+ // XXX: is this best representation?
+ snprintf(buffer, sizeof(buffer), "%" PRId64 ",%" PRId64 ",%" PRId64 "{",
+ mFloor, mWidth, mBelow);
+ value = buffer;
+ for (int i = 0; i < mBucketCount; i++) {
+ if (i != 0) {
+ value = value + ",";
+ }
+ snprintf(buffer, sizeof(buffer), "%" PRId64, mBuckets[i]);
+ value = value + buffer;
+ }
+ snprintf(buffer, sizeof(buffer), "}%" PRId64 , mAbove);
+ value = value + buffer;
+ return value;
+}
+
+// when we send a buffer to the codec;
+void MediaCodec::statsBufferSent(int64_t presentationUs) {
+
+ // only enqueue if we have a legitimate time
+ if (presentationUs <= 0) {
+ ALOGV("presentation time: %" PRId64, presentationUs);
+ return;
+ }
+
+ const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+ BufferFlightTiming_t startdata = { presentationUs, nowNs };
+
+ {
+ // mutex access to mBuffersInFlight and other stats
+ Mutex::Autolock al(mLatencyLock);
+
+
+ // XXX: we *could* make sure that the time is later than the end of queue
+ // as part of a consistency check...
+ mBuffersInFlight.push_back(startdata);
+ }
+}
+
+// when we get a buffer back from the codec
+void MediaCodec::statsBufferReceived(int64_t presentationUs) {
+
+ CHECK_NE(mState, UNINITIALIZED);
+
+ // mutex access to mBuffersInFlight and other stats
+ Mutex::Autolock al(mLatencyLock);
+
+ // how long this buffer took for the round trip through the codec
+ // NB: pipelining can/will make these times larger. e.g., if each packet
+ // is always 2 msec and we have 3 in flight at any given time, we're going to
+ // see "6 msec" as an answer.
+
+ // ignore stuff with no presentation time
+ if (presentationUs <= 0) {
+ ALOGD("-- returned buffer has bad timestamp %" PRId64 ", ignore it", presentationUs);
+ mLatencyUnknown++;
+ return;
+ }
+
+ BufferFlightTiming_t startdata;
+ bool valid = false;
+ while (mBuffersInFlight.size() > 0) {
+ startdata = *mBuffersInFlight.begin();
+ ALOGV("-- Looking at startdata. presentation %" PRId64 ", start %" PRId64,
+ startdata.presentationUs, startdata.startedNs);
+ if (startdata.presentationUs == presentationUs) {
+ // a match
+ ALOGV("-- match entry for %" PRId64 ", hits our frame of %" PRId64,
+ startdata.presentationUs, presentationUs);
+ mBuffersInFlight.pop_front();
+ valid = true;
+ break;
+ } else if (startdata.presentationUs < presentationUs) {
+ // we must have missed the match for this, drop it and keep looking
+ ALOGV("-- drop entry for %" PRId64 ", before our frame of %" PRId64,
+ startdata.presentationUs, presentationUs);
+ mBuffersInFlight.pop_front();
+ continue;
+ } else {
+ // head is after, so we don't have a frame for ourselves
+ ALOGV("-- found entry for %" PRId64 ", AFTER our frame of %" PRId64
+ " we have nothing to pair with",
+ startdata.presentationUs, presentationUs);
+ mLatencyUnknown++;
+ return;
+ }
+ }
+ if (!valid) {
+ ALOGV("-- empty queue, so ignore that.");
+ mLatencyUnknown++;
+ return;
+ }
+
+ // nowNs start our calculations
+ const int64_t nowNs = systemTime(SYSTEM_TIME_MONOTONIC);
+ int64_t latencyUs = (nowNs - startdata.startedNs + 500) / 1000;
+
+ mLatencyHist.insert(latencyUs);
+
+ // push into the recent samples
+ {
+ Mutex::Autolock al(mRecentLock);
+
+ if (mRecentHead >= kRecentLatencyFrames) {
+ mRecentHead = 0;
+ }
+ mRecentSamples[mRecentHead++] = latencyUs;
+ }
+}
+
// static
status_t MediaCodec::PostAndAwaitResponse(
const sp<AMessage> &msg, sp<AMessage> *response) {
@@ -778,7 +1055,6 @@
msg->setPointer("descrambler", descrambler.get());
}
if (mAnalyticsItem != NULL) {
- // XXX: save indication that it's crypto in some way...
mAnalyticsItem->setInt32(kCodecCrypto, 1);
}
} else if (mFlags & kFlagIsSecure) {
@@ -1245,11 +1521,14 @@
return UNKNOWN_ERROR;
}
- // XXX: go get current values for whatever in-flight data we want
+ // update any in-flight data that's not carried within the record
+ updateAnalyticsItem();
// send it back to the caller.
reply = mAnalyticsItem->dup();
+ updateEphemeralAnalytics(reply);
+
return OK;
}
@@ -1435,6 +1714,8 @@
int64_t timeUs;
CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ statsBufferReceived(timeUs);
+
response->setInt64("timeUs", timeUs);
int32_t flags;
@@ -2919,6 +3200,9 @@
Mutex::Autolock al(mBufferLock);
info->mOwnedByClient = false;
info->mData.clear();
+
+ statsBufferSent(timeUs);
+
if (mAnalyticsItem != NULL) {
mAnalyticsItem->addInt64(kCodecBytesIn, size);
}
@@ -3138,6 +3422,8 @@
msg->setInt64("timeUs", timeUs);
+ statsBufferReceived(timeUs);
+
int32_t flags;
CHECK(buffer->meta()->findInt32("flags", &flags));
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
index d0b72b7..13b6d05 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
@@ -45,9 +45,11 @@
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
mFLACDecoder(NULL),
- mHasStreamInfo(false),
mInputBufferCount(0),
+ mHasStreamInfo(false),
mSignalledError(false),
+ mSawInputEOS(false),
+ mFinishedDecoder(false),
mOutputPortSettingsChange(NONE) {
ALOGV("ctor:");
memset(&mStreamInfo, 0, sizeof(mStreamInfo));
@@ -292,7 +294,6 @@
}
void SoftFlacDecoder::onQueueFilled(OMX_U32 /* portIndex */) {
- ALOGV("onQueueFilled:");
if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -300,96 +301,101 @@
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
- while (!inQueue.empty() && !outQueue.empty()) {
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ ALOGV("onQueueFilled %d/%d:", inQueue.empty(), outQueue.empty());
+ while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty()) {
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- uint8_t* inBuffer = inHeader->pBuffer + inHeader->nOffset;
- uint32_t inBufferLength = inHeader->nFilledLen;
- bool endOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
+ short *outBuffer = reinterpret_cast<short *>(outHeader->pBuffer + outHeader->nOffset);
+ size_t outBufferSize = outHeader->nAllocLen - outHeader->nOffset;
+ int64_t timeStamp = 0;
- if (inHeader->nFilledLen == 0) {
- if (endOfInput) {
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- outInfo->mOwnedByUs = false;
- outQueue.erase(outQueue.begin());
- notifyFillBufferDone(outHeader);
- } else {
- ALOGE("onQueueFilled: emptyInputBuffer received");
+ if (!inQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ uint8_t* inBuffer = inHeader->pBuffer + inHeader->nOffset;
+ uint32_t inBufferLength = inHeader->nFilledLen;
+ ALOGV("input: %u bytes", inBufferLength);
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("saw EOS");
+ mSawInputEOS = true;
}
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- notifyEmptyBufferDone(inHeader);
- return;
- }
- if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
- ALOGE("onQueueFilled: first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
- inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
- }
- if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
- status_t decoderErr = mFLACDecoder->parseMetadata(inBuffer, inBufferLength);
- mInputBufferCount++;
- if (decoderErr != OK && decoderErr != WOULD_BLOCK) {
- ALOGE("onQueueFilled: FLACDecoder parseMetaData returns error %d", decoderErr);
+ if (mInputBufferCount == 0 && !(inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
+ ALOGE("onQueueFilled: first buffer should have OMX_BUFFERFLAG_CODECCONFIG set");
+ inHeader->nFlags |= OMX_BUFFERFLAG_CODECCONFIG;
+ }
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ ALOGV("received config buffer of size %u", inBufferLength);
+ status_t decoderErr = mFLACDecoder->parseMetadata(inBuffer, inBufferLength);
+ mInputBufferCount++;
+
+ if (decoderErr != OK && decoderErr != WOULD_BLOCK) {
+ ALOGE("onQueueFilled: FLACDecoder parseMetaData returns error %d", decoderErr);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
+ return;
+ }
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ notifyEmptyBufferDone(inHeader);
+
+ if (decoderErr == WOULD_BLOCK) {
+ continue;
+ }
+ mStreamInfo = mFLACDecoder->getStreamInfo();
+ mHasStreamInfo = true;
+
+ // Only send out port settings changed event if both sample rate
+ // and numChannels are valid.
+ if (mStreamInfo.sample_rate && mStreamInfo.channels) {
+ ALOGD("onQueueFilled: initially configuring decoder: %d Hz, %d channels",
+ mStreamInfo.sample_rate, mStreamInfo.channels);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+ return;
+ }
+
+ status_t decoderErr = mFLACDecoder->decodeOneFrame(
+ inBuffer, inBufferLength, outBuffer, &outBufferSize);
+ if (decoderErr != OK) {
+ ALOGE("onQueueFilled: FLACDecoder decodeOneFrame returns error %d", decoderErr);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
return;
}
+ mInputBufferCount++;
+ timeStamp = inHeader->nTimeStamp;
inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
notifyEmptyBufferDone(inHeader);
- if (decoderErr == WOULD_BLOCK) {
+ if (outBufferSize == 0) {
+ ALOGV("no output, trying again");
continue;
}
- mStreamInfo = mFLACDecoder->getStreamInfo();
- mHasStreamInfo = true;
-
- // Only send out port settings changed event if both sample rate
- // and numChannels are valid.
- if (mStreamInfo.sample_rate && mStreamInfo.channels) {
- ALOGD("onQueueFilled: initially configuring decoder: %d Hz, %d channels",
- mStreamInfo.sample_rate, mStreamInfo.channels);
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
+ } else if (mSawInputEOS && !mFinishedDecoder) {
+ status_t decoderErr = mFLACDecoder->decodeOneFrame(NULL, 0, outBuffer, &outBufferSize);
+ mFinishedDecoder = true;
+ if (decoderErr != OK) {
+ ALOGE("onQueueFilled: FLACDecoder finish returns error %d", decoderErr);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
+ return;
}
- return;
- }
-
- short *outBuffer =
- reinterpret_cast<short *>(outHeader->pBuffer + outHeader->nOffset);
- size_t outBufferSize = outHeader->nAllocLen - outHeader->nOffset;
-
- status_t decoderErr = mFLACDecoder->decodeOneFrame(
- inBuffer, inBufferLength, outBuffer, &outBufferSize);
- if (decoderErr != OK) {
- ALOGE("onQueueFilled: FLACDecoder decodeOneFrame returns error %d", decoderErr);
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorStreamCorrupt, decoderErr, NULL);
- return;
- }
-
- mInputBufferCount++;
- int64_t ts = inHeader->nTimeStamp;
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- notifyEmptyBufferDone(inHeader);
-
- if (endOfInput) {
outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- } else if (outBufferSize == 0) {
- continue;
} else {
- outHeader->nFlags = 0;
+ ALOGE("no input buffer but did not get EOS");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, 0, NULL);
+ return;
}
outHeader->nFilledLen = outBufferSize;
- outHeader->nTimeStamp = ts;
+ outHeader->nTimeStamp = timeStamp;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
index 0f17ed8..b63f7ad 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.h
@@ -52,9 +52,11 @@
FLACDecoder *mFLACDecoder;
FLAC__StreamMetadata_StreamInfo mStreamInfo;
- bool mHasStreamInfo;
size_t mInputBufferCount;
+ bool mHasStreamInfo;
bool mSignalledError;
+ bool mSawInputEOS;
+ bool mFinishedDecoder;
enum {
NONE,
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 56d2d69..a0e46c3 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -56,12 +56,13 @@
mCompressionLevel(FLAC_COMPRESSION_LEVEL_DEFAULT),
mEncoderWriteData(false),
mEncoderReturnedEncodedData(false),
+ mSawInputEOS(false),
+ mSentOutputEOS(false),
mEncoderReturnedNbBytes(0),
- mInputBufferPcm32(NULL)
-#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
- , mHeaderOffset(0)
- , mWroteHeader(false)
-#endif
+ mInputBufferPcm32(NULL),
+ mHeaderOffset(0),
+ mHeaderComplete(false),
+ mWroteHeader(false)
{
ALOGV("SoftFlacEncoder::SoftFlacEncoder(name=%s)", name);
initPorts();
@@ -354,55 +355,55 @@
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
- while (!inQueue.empty() && !outQueue.empty()) {
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ FLAC__bool ok = true;
+
+ while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty()) {
+ if (!inQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ ALOGV("saw EOS on buffer of size %u", inHeader->nFilledLen);
+ mSawInputEOS = true;
+ }
+
+ if (inHeader->nFilledLen > kMaxInputBufferSize) {
+ ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ assert(mNumChannels != 0);
+ mEncoderWriteData = true;
+ mEncoderReturnedEncodedData = false;
+ mEncoderReturnedNbBytes = 0;
+ mCurrentInputTimeStamp = inHeader->nTimeStamp;
+
+ const unsigned nbInputFrames = inHeader->nFilledLen / (2 * mNumChannels);
+ const unsigned nbInputSamples = inHeader->nFilledLen / 2;
+ const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer);
+
+ CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame);
+ for (unsigned i=0 ; i < nbInputSamples ; i++) {
+ mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
+ }
+ ALOGV(" about to encode %u samples per channel", nbInputFrames);
+ ok = FLAC__stream_encoder_process_interleaved(
+ mFlacStreamEncoder,
+ mInputBufferPcm32,
+ nbInputFrames /*samples per channel*/ );
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
-
- return;
- }
-
- if (inHeader->nFilledLen > kMaxInputBufferSize) {
- ALOGE("input buffer too large (%d).", inHeader->nFilledLen);
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
-
- assert(mNumChannels != 0);
- mEncoderWriteData = true;
- mEncoderReturnedEncodedData = false;
- mEncoderReturnedNbBytes = 0;
- mCurrentInputTimeStamp = inHeader->nTimeStamp;
-
- const unsigned nbInputFrames = inHeader->nFilledLen / (2 * mNumChannels);
- const unsigned nbInputSamples = inHeader->nFilledLen / 2;
- const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer);
-
- CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame);
- for (unsigned i=0 ; i < nbInputSamples ; i++) {
- mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
- }
- ALOGV(" about to encode %u samples per channel", nbInputFrames);
- FLAC__bool ok = FLAC__stream_encoder_process_interleaved(
- mFlacStreamEncoder,
- mInputBufferPcm32,
- nbInputFrames /*samples per channel*/ );
-
if (ok) {
if (mEncoderReturnedEncodedData && (mEncoderReturnedNbBytes != 0)) {
ALOGV(" dequeueing buffer on output port after writing data");
@@ -414,6 +415,21 @@
mEncoderReturnedEncodedData = false;
} else {
ALOGV(" encoder process_interleaved returned without data to write");
+ if (mSawInputEOS && !mSentOutputEOS) {
+ ALOGV("finishing encoder");
+ mSentOutputEOS = true;
+ FLAC__stream_encoder_finish(mFlacStreamEncoder);
+ if (mEncoderReturnedEncodedData && (mEncoderReturnedNbBytes != 0)) {
+ ALOGV(" dequeueing residual buffer on output port after writing data");
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ mEncoderReturnedEncodedData = false;
+ }
+ }
}
} else {
ALOGE(" error encountered during encoding");
@@ -422,11 +438,6 @@
return;
}
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
}
}
@@ -438,16 +449,22 @@
ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)",
bytes, samples, current_frame);
-#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
if (samples == 0) {
- ALOGI(" saving %zu bytes of header", bytes);
- memcpy(mHeader + mHeaderOffset, buffer, bytes);
- mHeaderOffset += bytes;// will contain header size when finished receiving header
+ ALOGV("saving %zu bytes of header", bytes);
+ if (mHeaderOffset + bytes > sizeof(mHeader) || mHeaderComplete) {
+ ALOGW("header is too big, or header already received");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ } else {
+ memcpy(mHeader + mHeaderOffset, buffer, bytes);
+ mHeaderOffset += bytes;// will contain header size when finished receiving header
+ if (buffer[0] & 0x80) {
+ mHeaderComplete = true;
+ }
+ }
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
}
-#endif
-
if ((samples == 0) || !mEncoderWriteData) {
// called by the encoder because there's header data to save, but it's not the role
// of this component (unless WRITE_FLAC_HEADER_IN_FIRST_BUFFER is defined)
@@ -460,16 +477,23 @@
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
- if (!mWroteHeader) {
- ALOGI(" writing %d bytes of header on output port", mHeaderOffset);
+ if (mHeaderComplete && !mWroteHeader) {
+ ALOGV(" writing %d bytes of header on output port", mHeaderOffset);
memcpy(outHeader->pBuffer + outHeader->nOffset + outHeader->nFilledLen,
mHeader, mHeaderOffset);
outHeader->nFilledLen += mHeaderOffset;
- outHeader->nOffset += mHeaderOffset;
mWroteHeader = true;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG;
+ notifyFillBufferDone(outHeader);
+ outInfo = NULL;
+ outHeader = NULL;
+ // get the next buffer for the rest of the data
+ CHECK(!outQueue.empty());
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
}
-#endif
// write encoded data
ALOGV(" writing %zu bytes of encoded data on output port", bytes);
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index f4f0655..64a6b1e 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -22,10 +22,6 @@
#include "FLAC/stream_encoder.h"
-// use this symbol to have the first output buffer start with FLAC frame header so a dump of
-// all the output buffers can be opened as a .flac file
-//#define WRITE_FLAC_HEADER_IN_FIRST_BUFFER
-
namespace android {
struct SoftFlacEncoder : public SimpleSoftOMXComponent {
@@ -62,6 +58,8 @@
// should the data received by the callback be written to the output port
bool mEncoderWriteData;
bool mEncoderReturnedEncodedData;
+ bool mSawInputEOS;
+ bool mSentOutputEOS;
size_t mEncoderReturnedNbBytes;
OMX_TICKS mCurrentInputTimeStamp;
@@ -85,11 +83,10 @@
// before passing the input data to the encoder
FLAC__int32* mInputBufferPcm32;
-#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
unsigned mHeaderOffset;
+ bool mHeaderComplete;
bool mWroteHeader;
char mHeader[128];
-#endif
DISALLOW_EVIL_CONSTRUCTORS(SoftFlacEncoder);
};
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index e0e9211..a2b6ab7 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -423,22 +423,16 @@
short *outBuffer, size_t *outBufferLen) {
ALOGV("decodeOneFrame: input size(%zu)", inBufferLen);
- if (inBufferLen == 0) {
- ALOGV("decodeOneFrame: no input data");
- if (outBufferLen) {
- *outBufferLen = 0;
- }
- return OK;
- }
-
if (!mStreamInfoValid) {
ALOGW("decodeOneFrame: no streaminfo metadata block");
}
- status_t err = addDataToBuffer(inBuffer, inBufferLen);
- if (err != OK) {
- ALOGW("decodeOneFrame: addDataToBuffer returns error %d", err);
- return err;
+ if (inBufferLen != 0) {
+ status_t err = addDataToBuffer(inBuffer, inBufferLen);
+ if (err != OK) {
+ ALOGW("decodeOneFrame: addDataToBuffer returns error %d", err);
+ return err;
+ }
}
mWriteRequested = true;
diff --git a/media/libstagefright/foundation/Android.bp b/media/libstagefright/foundation/Android.bp
index b343c16..f663542 100644
--- a/media/libstagefright/foundation/Android.bp
+++ b/media/libstagefright/foundation/Android.bp
@@ -24,10 +24,12 @@
header_libs: [
"libhardware_headers",
"libstagefright_foundation_headers",
+ "media_plugin_headers",
],
export_header_lib_headers: [
"libstagefright_foundation_headers",
+ "media_plugin_headers",
],
export_shared_lib_headers: [
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index ef8de1f..48a1224 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -320,7 +320,9 @@
MediaAnalyticsItem *mAnalyticsItem;
void initAnalyticsItem();
+ void updateAnalyticsItem();
void flushAnalyticsItem();
+ void updateEphemeralAnalytics(MediaAnalyticsItem *item);
sp<AMessage> mOutputFormat;
sp<AMessage> mInputFormat;
@@ -441,6 +443,63 @@
void onReleaseCrypto(const sp<AMessage>& msg);
+ // managing time-of-flight aka latency
+ typedef struct {
+ int64_t presentationUs;
+ int64_t startedNs;
+ } BufferFlightTiming_t;
+ std::deque<BufferFlightTiming_t> mBuffersInFlight;
+ Mutex mLatencyLock;
+ int64_t mLatencyUnknown; // buffers for which we couldn't calculate latency
+
+ void statsBufferSent(int64_t presentationUs);
+ void statsBufferReceived(int64_t presentationUs);
+
+ enum {
+ // the default shape of our latency histogram buckets
+ // XXX: should these be configurable in some way?
+ kLatencyHistBuckets = 20,
+ kLatencyHistWidth = 2000,
+ kLatencyHistFloor = 2000,
+
+ // how many samples are in the 'recent latency' histogram
+ // 300 frames = 5 sec @ 60fps or ~12 sec @ 24fps
+ kRecentLatencyFrames = 300,
+
+ // how we initialize mRecentSamples
+ kRecentSampleInvalid = -1,
+ };
+
+ int64_t mRecentSamples[kRecentLatencyFrames];
+ int mRecentHead;
+ Mutex mRecentLock;
+
+ class Histogram {
+ public:
+ Histogram() : mFloor(0), mWidth(0), mBelow(0), mAbove(0),
+ mMin(INT64_MAX), mMax(INT64_MIN), mSum(0), mCount(0),
+ mBucketCount(0), mBuckets(NULL) {};
+ ~Histogram() { clear(); };
+ void clear() { if (mBuckets != NULL) free(mBuckets); mBuckets = NULL; };
+ bool setup(int nbuckets, int64_t width, int64_t floor = 0);
+ void insert(int64_t sample);
+ int64_t getMin() const { return mMin; }
+ int64_t getMax() const { return mMax; }
+ int64_t getCount() const { return mCount; }
+ int64_t getSum() const { return mSum; }
+ int64_t getAvg() const { return mSum / (mCount == 0 ? 1 : mCount); }
+ std::string emit();
+ private:
+ int64_t mFloor, mCeiling, mWidth;
+ int64_t mBelow, mAbove;
+ int64_t mMin, mMax, mSum, mCount;
+
+ int mBucketCount;
+ int64_t *mBuckets;
+ };
+
+ Histogram mLatencyHist;
+
DISALLOW_EVIL_CONSTRUCTORS(MediaCodec);
};
diff --git a/media/mtp/Android.bp b/media/mtp/Android.bp
index acea373..2cf9b82 100644
--- a/media/mtp/Android.bp
+++ b/media/mtp/Android.bp
@@ -49,7 +49,6 @@
shared_libs: [
"libasyncio",
"libbase",
- "libutils",
"liblog",
"libusbhost",
],
diff --git a/media/mtp/IMtpDatabase.h b/media/mtp/IMtpDatabase.h
index d09a984..1245092 100644
--- a/media/mtp/IMtpDatabase.h
+++ b/media/mtp/IMtpDatabase.h
@@ -24,6 +24,7 @@
class MtpDataPacket;
class MtpProperty;
class MtpObjectInfo;
+class MtpStringBuffer;
class IMtpDatabase {
public:
@@ -86,7 +87,7 @@
virtual void* getThumbnail(MtpObjectHandle handle, size_t& outThumbSize) = 0;
virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle,
- MtpString& outFilePath,
+ MtpStringBuffer& outFilePath,
int64_t& outFileLength,
MtpObjectFormat& outFormat) = 0;
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
index d1c71d7..992dc9a 100644
--- a/media/mtp/MtpDataPacket.cpp
+++ b/media/mtp/MtpDataPacket.cpp
@@ -19,6 +19,7 @@
#include "MtpDataPacket.h"
#include <algorithm>
+#include <errno.h>
#include <fcntl.h>
#include <stdio.h>
#include <sys/types.h>
@@ -129,7 +130,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -145,7 +146,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -161,7 +162,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -177,7 +178,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -193,7 +194,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -209,7 +210,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -225,7 +226,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
@@ -241,7 +242,7 @@
delete result;
return NULL;
}
- result->push(value);
+ result->push_back(value);
}
return result;
}
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
index 5b53e31..8d48273 100644
--- a/media/mtp/MtpDebug.h
+++ b/media/mtp/MtpDebug.h
@@ -18,10 +18,10 @@
#define _MTP_DEBUG_H
// #define LOG_NDEBUG 0
-#include <utils/Log.h>
-
#include "MtpTypes.h"
+#include <log/log.h>
+
namespace android {
class MtpDebug {
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
index 0bf7854..993797a 100644
--- a/media/mtp/MtpDevice.cpp
+++ b/media/mtp/MtpDevice.cpp
@@ -262,7 +262,7 @@
MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
MtpProperty* property = getDevicePropDesc(propCode);
if (property)
- mDeviceProperties.push(property);
+ mDeviceProperties.push_back(property);
}
}
}
@@ -327,7 +327,7 @@
}
bool MtpDevice::openSession() {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mSessionID = 0;
mTransactionID = 0;
@@ -353,7 +353,7 @@
}
MtpDeviceInfo* MtpDevice::getDeviceInfo() {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
@@ -372,7 +372,7 @@
}
MtpStorageIDList* MtpDevice::getStorageIDs() {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
@@ -387,7 +387,7 @@
}
MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, storageID);
@@ -408,7 +408,7 @@
MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
MtpObjectFormat format, MtpObjectHandle parent) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, storageID);
@@ -426,7 +426,7 @@
}
MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
// FIXME - we might want to add some caching here
@@ -448,7 +448,7 @@
}
void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -463,7 +463,7 @@
}
MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
MtpObjectHandle parent = info->mParent;
@@ -517,7 +517,7 @@
}
bool MtpDevice::sendObject(MtpObjectHandle handle, int size, int srcFD) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
if (mLastSendObjectInfoTransactionID + 1 != mTransactionID ||
mLastSendObjectInfoObjectHandle != handle) {
@@ -537,7 +537,7 @@
}
bool MtpDevice::deleteObject(MtpObjectHandle handle) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -572,7 +572,7 @@
}
MtpObjectPropertyList* MtpDevice::getObjectPropsSupported(MtpObjectFormat format) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, format);
@@ -589,7 +589,7 @@
}
MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, code);
@@ -609,7 +609,7 @@
}
MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, code);
@@ -633,7 +633,7 @@
if (property == nullptr)
return false;
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -684,7 +684,7 @@
ReadObjectCallback callback,
const uint32_t* expectedLength,
void* clientData) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -806,7 +806,7 @@
uint32_t *writtenSize,
ReadObjectCallback callback,
void* clientData) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -828,7 +828,7 @@
uint32_t *writtenSize,
ReadObjectCallback callback,
void* clientData) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
mRequest.reset();
mRequest.setParameter(1, handle);
@@ -908,7 +908,7 @@
}
int MtpDevice::submitEventRequest() {
- if (mEventMutex.tryLock()) {
+ if (!mEventMutex.try_lock()) {
// An event is being reaped on another thread.
return -1;
}
@@ -916,7 +916,7 @@
// An event request was submitted, but no reapEventRequest called so far.
return -1;
}
- Mutex::Autolock autoLock(mEventMutexForInterrupt);
+ std::lock_guard<std::mutex> lg(mEventMutexForInterrupt);
mEventPacket.sendRequest(mRequestIntr);
const int currentHandle = ++mCurrentEventHandle;
mProcessingEvent = true;
@@ -925,7 +925,7 @@
}
int MtpDevice::reapEventRequest(int handle, uint32_t (*parameters)[3]) {
- Mutex::Autolock autoLock(mEventMutex);
+ std::lock_guard<std::mutex> lg(mEventMutex);
if (!mProcessingEvent || mCurrentEventHandle != handle || !parameters) {
return -1;
}
@@ -940,7 +940,7 @@
}
void MtpDevice::discardEventRequest(int handle) {
- Mutex::Autolock autoLock(mEventMutexForInterrupt);
+ std::lock_guard<std::mutex> lg(mEventMutexForInterrupt);
if (mCurrentEventHandle != handle) {
return;
}
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
index a9a3e0e..8cf9e5e 100644
--- a/media/mtp/MtpDevice.h
+++ b/media/mtp/MtpDevice.h
@@ -23,7 +23,7 @@
#include "MtpResponsePacket.h"
#include "MtpTypes.h"
-#include <utils/threads.h>
+#include <mutex>
struct usb_device;
struct usb_request;
@@ -67,9 +67,9 @@
MtpObjectHandle mLastSendObjectInfoObjectHandle;
// to ensure only one MTP transaction at a time
- Mutex mMutex;
- Mutex mEventMutex;
- Mutex mEventMutexForInterrupt;
+ std::mutex mMutex;
+ std::mutex mEventMutex;
+ std::mutex mEventMutexForInterrupt;
// Remember the device's packet division mode.
UrbPacketDivisionMode mPacketDivisionMode;
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
index 3f3b6a3..94d6ebf 100644
--- a/media/mtp/MtpEventPacket.h
+++ b/media/mtp/MtpEventPacket.h
@@ -20,6 +20,8 @@
#include "MtpPacket.h"
#include "mtp.h"
+#include <errno.h>
+
class IMtpHandle;
namespace android {
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
index d47c91d..9842b28 100644
--- a/media/mtp/MtpPacket.h
+++ b/media/mtp/MtpPacket.h
@@ -19,6 +19,7 @@
#include <android-base/macros.h>
+#include "MtpDebug.h"
#include "MtpTypes.h"
struct usb_device;
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index 039e4f5..5c02a0d 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -18,6 +18,10 @@
#include <inttypes.h>
#include <cutils/compiler.h>
+#include <iomanip>
+#include <sstream>
+#include <string>
+
#include "MtpDataPacket.h"
#include "MtpDebug.h"
#include "MtpProperty.h"
@@ -336,7 +340,7 @@
}
void MtpProperty::print() {
- MtpString buffer;
+ std::string buffer;
bool deviceProp = isDeviceProperty();
if (deviceProp)
ALOGI(" %s (%04X)", MtpDebug::getDevicePropCodeName(mCode), mCode);
@@ -346,11 +350,11 @@
ALOGI(" writeable %s", (mWriteable ? "true" : "false"));
buffer = " default value: ";
print(mDefaultValue, buffer);
- ALOGI("%s", (const char *)buffer);
+ ALOGI("%s", buffer.c_str());
if (deviceProp) {
buffer = " current value: ";
print(mCurrentValue, buffer);
- ALOGI("%s", (const char *)buffer);
+ ALOGI("%s", buffer.c_str());
}
switch (mFormFlag) {
case kFormNone:
@@ -363,7 +367,7 @@
buffer += ", ";
print(mStepSize, buffer);
buffer += ")";
- ALOGI("%s", (const char *)buffer);
+ ALOGI("%s", buffer.c_str());
break;
case kFormEnum:
buffer = " Enum { ";
@@ -372,7 +376,7 @@
buffer += " ";
}
buffer += "}";
- ALOGI("%s", (const char *)buffer);
+ ALOGI("%s", buffer.c_str());
break;
case kFormDateTime:
ALOGI(" DateTime\n");
@@ -383,42 +387,47 @@
}
}
-void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) {
+void MtpProperty::print(MtpPropertyValue& value, std::string& buffer) {
+ std::ostringstream s;
switch (mType) {
case MTP_TYPE_INT8:
- buffer.appendFormat("%d", value.u.i8);
+ buffer += std::to_string(value.u.i8);
break;
case MTP_TYPE_UINT8:
- buffer.appendFormat("%d", value.u.u8);
+ buffer += std::to_string(value.u.u8);
break;
case MTP_TYPE_INT16:
- buffer.appendFormat("%d", value.u.i16);
+ buffer += std::to_string(value.u.i16);
break;
case MTP_TYPE_UINT16:
- buffer.appendFormat("%d", value.u.u16);
+ buffer += std::to_string(value.u.u16);
break;
case MTP_TYPE_INT32:
- buffer.appendFormat("%d", value.u.i32);
+ buffer += std::to_string(value.u.i32);
break;
case MTP_TYPE_UINT32:
- buffer.appendFormat("%d", value.u.u32);
+ buffer += std::to_string(value.u.u32);
break;
case MTP_TYPE_INT64:
- buffer.appendFormat("%" PRId64, value.u.i64);
+ buffer += std::to_string(value.u.i64);
break;
case MTP_TYPE_UINT64:
- buffer.appendFormat("%" PRIu64, value.u.u64);
+ buffer += std::to_string(value.u.u64);
break;
case MTP_TYPE_INT128:
- buffer.appendFormat("%08X%08X%08X%08X", value.u.i128[0], value.u.i128[1],
- value.u.i128[2], value.u.i128[3]);
+ for (auto i : value.u.i128) {
+ s << std::hex << std::setfill('0') << std::uppercase << i;
+ }
+ buffer += s.str();
break;
case MTP_TYPE_UINT128:
- buffer.appendFormat("%08X%08X%08X%08X", value.u.u128[0], value.u.u128[1],
- value.u.u128[2], value.u.u128[3]);
+ for (auto i : value.u.u128) {
+ s << std::hex << std::setfill('0') << std::uppercase << i;
+ }
+ buffer += s.str();
break;
case MTP_TYPE_STR:
- buffer.appendFormat("%s", value.str);
+ buffer += value.str;
break;
default:
ALOGE("unsupported type for MtpProperty::print\n");
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
index 03c08e1..bfd5f7f 100644
--- a/media/mtp/MtpProperty.h
+++ b/media/mtp/MtpProperty.h
@@ -19,6 +19,8 @@
#include "MtpTypes.h"
+#include <string>
+
namespace android {
class MtpDataPacket;
@@ -97,7 +99,6 @@
void setFormDateTime();
void print();
- void print(MtpPropertyValue& value, MtpString& buffer);
inline bool isDeviceProperty() const {
return ( ((mCode & 0xF000) == 0x5000)
@@ -110,6 +111,7 @@
MtpPropertyValue* readArrayValues(MtpDataPacket& packet, uint32_t& length);
void writeArrayValues(MtpDataPacket& packet,
MtpPropertyValue* values, uint32_t length);
+ void print(MtpPropertyValue& value, std::string& buffer);
};
}; // namespace android
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index e4ac8b0..86d59dd 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -102,10 +102,10 @@
};
MtpServer::MtpServer(IMtpDatabase* database, int controlFd, bool ptp,
- const MtpString& deviceInfoManufacturer,
- const MtpString& deviceInfoModel,
- const MtpString& deviceInfoDeviceVersion,
- const MtpString& deviceInfoSerialNumber)
+ const char *deviceInfoManufacturer,
+ const char *deviceInfoModel,
+ const char *deviceInfoDeviceVersion,
+ const char *deviceInfoSerialNumber)
: mDatabase(database),
mPtp(ptp),
mDeviceInfoManufacturer(deviceInfoManufacturer),
@@ -132,14 +132,14 @@
}
void MtpServer::addStorage(MtpStorage* storage) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
- mStorages.push(storage);
+ mStorages.push_back(storage);
sendStoreAdded(storage->getStorageID());
}
void MtpServer::removeStorage(MtpStorage* storage) {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
auto iter = std::find(mStorages.begin(), mStorages.end(), storage);
if (iter != mStorages.end()) {
sendStoreRemoved(storage->getStorageID());
@@ -284,10 +284,10 @@
}
}
-void MtpServer::addEditObject(MtpObjectHandle handle, MtpString& path,
+void MtpServer::addEditObject(MtpObjectHandle handle, MtpStringBuffer& path,
uint64_t size, MtpObjectFormat format, int fd) {
ObjectEdit* edit = new ObjectEdit(handle, path, size, format, fd);
- mObjectEditList.add(edit);
+ mObjectEditList.push_back(edit);
}
MtpServer::ObjectEdit* MtpServer::getEditObject(MtpObjectHandle handle) {
@@ -305,7 +305,7 @@
ObjectEdit* edit = mObjectEditList[i];
if (edit->mHandle == handle) {
delete edit;
- mObjectEditList.removeAt(i);
+ mObjectEditList.erase(mObjectEditList.begin() + i);
return;
}
}
@@ -318,7 +318,7 @@
bool MtpServer::handleRequest() {
- Mutex::Autolock autoLock(mMutex);
+ std::lock_guard<std::mutex> lg(mMutex);
MtpOperationCode operation = mRequest.getOperationCode();
MtpResponseCode response;
@@ -769,7 +769,7 @@
if (mRequest.getParameterCount() < 1)
return MTP_RESPONSE_INVALID_PARAMETER;
MtpObjectHandle handle = mRequest.getParameter(1);
- MtpString pathBuf;
+ MtpStringBuffer pathBuf;
int64_t fileLength;
MtpObjectFormat format;
int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
@@ -855,7 +855,7 @@
// standard GetPartialObject
length = mRequest.getParameter(3);
}
- MtpString pathBuf;
+ MtpStringBuffer pathBuf;
int64_t fileLength;
MtpObjectFormat format;
int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format);
@@ -892,7 +892,7 @@
}
MtpResponseCode MtpServer::doSendObjectInfo() {
- MtpString path;
+ MtpStringBuffer path;
uint16_t temp16;
uint32_t temp32;
@@ -906,7 +906,7 @@
// special case the root
if (parent == MTP_PARENT_ROOT) {
- path = storage->getPath();
+ path.set(storage->getPath());
parent = 0;
} else {
int64_t length;
@@ -938,7 +938,7 @@
if (!mData.getUInt32(temp32)) return MTP_RESPONSE_INVALID_PARAMETER; // sequence number
MtpStringBuffer name, created, modified;
if (!mData.getString(name)) return MTP_RESPONSE_INVALID_PARAMETER; // file name
- if (name.getCharCount() == 0) {
+ if (name.isEmpty()) {
ALOGE("empty name");
return MTP_RESPONSE_INVALID_PARAMETER;
}
@@ -952,8 +952,8 @@
modifiedTime = 0;
if (path[path.size() - 1] != '/')
- path += "/";
- path += (const char *)name;
+ path.append("/");
+ path.append(name);
// check space first
if (mSendObjectFileSize > storage->getFreeSpace())
@@ -1006,10 +1006,10 @@
MtpObjectHandle parent = mRequest.getParameter(3);
if (!storage)
return MTP_RESPONSE_INVALID_STORAGE_ID;
- MtpString path;
+ MtpStringBuffer path;
MtpResponseCode result;
- MtpString fromPath;
+ MtpStringBuffer fromPath;
int64_t fileLength;
MtpObjectFormat format;
MtpObjectInfo info(objectHandle);
@@ -1022,7 +1022,7 @@
// special case the root
if (parent == 0) {
- path = storage->getPath();
+ path.set(storage->getPath());
} else {
int64_t parentLength;
MtpObjectFormat parentFormat;
@@ -1034,8 +1034,8 @@
}
if (path[path.size() - 1] != '/')
- path += "/";
- path += info.mName;
+ path.append("/");
+ path.append(info.mName);
result = mDatabase->beginMoveObject(objectHandle, parent, storageID);
if (result != MTP_RESPONSE_OK)
@@ -1085,9 +1085,9 @@
MtpObjectHandle parent = mRequest.getParameter(3);
if (!storage)
return MTP_RESPONSE_INVALID_STORAGE_ID;
- MtpString path;
+ MtpStringBuffer path;
- MtpString fromPath;
+ MtpStringBuffer fromPath;
int64_t fileLength;
MtpObjectFormat format;
MtpObjectInfo info(objectHandle);
@@ -1100,7 +1100,7 @@
// special case the root
if (parent == 0) {
- path = storage->getPath();
+ path.set(storage->getPath());
} else {
int64_t parentLength;
MtpObjectFormat parentFormat;
@@ -1116,8 +1116,8 @@
return MTP_RESPONSE_STORAGE_FULL;
if (path[path.size() - 1] != '/')
- path += "/";
- path += info.mName;
+ path.append("/");
+ path.append(info.mName);
MtpObjectHandle handle = mDatabase->beginCopyObject(objectHandle, parent, storageID);
if (handle == kInvalidObjectHandle) {
@@ -1264,7 +1264,7 @@
// FIXME - support deleting all objects if handle is 0xFFFFFFFF
// FIXME - implement deleting objects by format
- MtpString filePath;
+ MtpStringBuffer filePath;
int64_t fileLength;
int result = mDatabase->getObjectFilePath(handle, filePath, fileLength, format);
if (result != MTP_RESPONSE_OK)
@@ -1414,7 +1414,7 @@
return MTP_RESPONSE_GENERAL_ERROR;
}
- MtpString path;
+ MtpStringBuffer path;
int64_t fileLength;
MtpObjectFormat format;
int result = mDatabase->getObjectFilePath(handle, path, fileLength, format);
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
index e633c52..f6939d7 100644
--- a/media/mtp/MtpServer.h
+++ b/media/mtp/MtpServer.h
@@ -21,14 +21,14 @@
#include "MtpDataPacket.h"
#include "MtpResponsePacket.h"
#include "MtpEventPacket.h"
+#include "MtpStringBuffer.h"
#include "mtp.h"
#include "MtpUtils.h"
#include "IMtpHandle.h"
-#include <utils/threads.h>
-#include <queue>
#include <memory>
#include <mutex>
+#include <queue>
namespace android {
@@ -44,13 +44,13 @@
bool mPtp;
// Manufacturer to report in DeviceInfo
- MtpString mDeviceInfoManufacturer;
+ MtpStringBuffer mDeviceInfoManufacturer;
// Model to report in DeviceInfo
- MtpString mDeviceInfoModel;
+ MtpStringBuffer mDeviceInfoModel;
// Device version to report in DeviceInfo
- MtpString mDeviceInfoDeviceVersion;
+ MtpStringBuffer mDeviceInfoDeviceVersion;
// Serial number to report in DeviceInfo
- MtpString mDeviceInfoSerialNumber;
+ MtpStringBuffer mDeviceInfoSerialNumber;
// current session ID
MtpSessionID mSessionID;
@@ -70,18 +70,18 @@
// handle for new object, set by SendObjectInfo and used by SendObject
MtpObjectHandle mSendObjectHandle;
MtpObjectFormat mSendObjectFormat;
- MtpString mSendObjectFilePath;
+ MtpStringBuffer mSendObjectFilePath;
size_t mSendObjectFileSize;
time_t mSendObjectModifiedTime;
- Mutex mMutex;
+ std::mutex mMutex;
// represents an MTP object that is being edited using the android extensions
// for direct editing (BeginEditObject, SendPartialObject, TruncateObject and EndEditObject)
class ObjectEdit {
public:
MtpObjectHandle mHandle;
- MtpString mPath;
+ MtpStringBuffer mPath;
uint64_t mSize;
MtpObjectFormat mFormat;
int mFD;
@@ -95,14 +95,14 @@
close(mFD);
}
};
- Vector<ObjectEdit*> mObjectEditList;
+ std::vector<ObjectEdit*> mObjectEditList;
public:
MtpServer(IMtpDatabase* database, int controlFd, bool ptp,
- const MtpString& deviceInfoManufacturer,
- const MtpString& deviceInfoModel,
- const MtpString& deviceInfoDeviceVersion,
- const MtpString& deviceInfoSerialNumber);
+ const char *deviceInfoManufacturer,
+ const char *deviceInfoModel,
+ const char *deviceInfoDeviceVersion,
+ const char *deviceInfoSerialNumber);
virtual ~MtpServer();
MtpStorage* getStorage(MtpStorageID id);
@@ -122,7 +122,7 @@
void sendStoreRemoved(MtpStorageID id);
void sendEvent(MtpEventCode code, uint32_t param1);
- void addEditObject(MtpObjectHandle handle, MtpString& path,
+ void addEditObject(MtpObjectHandle handle, MtpStringBuffer& path,
uint64_t size, MtpObjectFormat format, int fd);
ObjectEdit* getEditObject(MtpObjectHandle handle);
void removeEditObject(MtpObjectHandle handle);
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
index cb7e333..e9518dd 100644
--- a/media/mtp/MtpStorage.h
+++ b/media/mtp/MtpStorage.h
@@ -17,6 +17,7 @@
#ifndef _MTP_STORAGE_H
#define _MTP_STORAGE_H
+#include "MtpStringBuffer.h"
#include "MtpTypes.h"
#include "mtp.h"
@@ -28,8 +29,8 @@
private:
MtpStorageID mStorageID;
- MtpString mFilePath;
- MtpString mDescription;
+ MtpStringBuffer mFilePath;
+ MtpStringBuffer mDescription;
uint64_t mMaxCapacity;
uint64_t mMaxFileSize;
bool mRemovable;
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
index df04694..cd379bf 100644
--- a/media/mtp/MtpStringBuffer.cpp
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -16,168 +16,97 @@
#define LOG_TAG "MtpStringBuffer"
-#include <string.h>
+#include <codecvt>
+#include <locale>
+#include <string>
+#include <vector>
#include "MtpDataPacket.h"
#include "MtpStringBuffer.h"
-namespace android {
+namespace {
-MtpStringBuffer::MtpStringBuffer()
- : mCharCount(0),
- mByteCount(1)
-{
- mBuffer[0] = 0;
+std::wstring_convert<std::codecvt_utf8_utf16<char16_t>,char16_t> gConvert;
+
+static std::string utf16ToUtf8(std::u16string input_str) {
+ return gConvert.to_bytes(input_str);
}
+static std::u16string utf8ToUtf16(std::string input_str) {
+ return gConvert.from_bytes(input_str);
+}
+
+} // namespace
+
+namespace android {
+
MtpStringBuffer::MtpStringBuffer(const char* src)
- : mCharCount(0),
- mByteCount(1)
{
set(src);
}
MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
- : mCharCount(0),
- mByteCount(1)
{
set(src);
}
MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
- : mCharCount(src.mCharCount),
- mByteCount(src.mByteCount)
{
- memcpy(mBuffer, src.mBuffer, mByteCount);
-}
-
-
-MtpStringBuffer::~MtpStringBuffer() {
+ mString = src.mString;
}
void MtpStringBuffer::set(const char* src) {
- // count the characters
- int count = 0;
- char ch;
- char* dest = (char*)mBuffer;
-
- while ((ch = *src++) != 0 && count < MTP_STRING_MAX_CHARACTER_NUMBER) {
- if ((ch & 0x80) == 0) {
- // single byte character
- *dest++ = ch;
- } else if ((ch & 0xE0) == 0xC0) {
- // two byte character
- char ch1 = *src++;
- if (! ch1) {
- // last character was truncated, so ignore last byte
- break;
- }
-
- *dest++ = ch;
- *dest++ = ch1;
- } else if ((ch & 0xF0) == 0xE0) {
- // 3 byte char
- char ch1 = *src++;
- if (! ch1) {
- // last character was truncated, so ignore last byte
- break;
- }
- char ch2 = *src++;
- if (! ch2) {
- // last character was truncated, so ignore last byte
- break;
- }
-
- *dest++ = ch;
- *dest++ = ch1;
- *dest++ = ch2;
- }
- count++;
- }
-
- *dest++ = 0;
- mByteCount = dest - (char*)mBuffer;
- mCharCount = count;
+ mString = std::string(src);
}
void MtpStringBuffer::set(const uint16_t* src) {
- int count = 0;
- uint16_t ch;
- uint8_t* dest = mBuffer;
-
- while ((ch = *src++) != 0 && count < MTP_STRING_MAX_CHARACTER_NUMBER) {
- if (ch >= 0x0800) {
- *dest++ = (uint8_t)(0xE0 | (ch >> 12));
- *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
- *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
- } else if (ch >= 0x80) {
- *dest++ = (uint8_t)(0xC0 | (ch >> 6));
- *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
- } else {
- *dest++ = ch;
- }
- count++;
- }
- *dest++ = 0;
- mCharCount = count;
- mByteCount = dest - mBuffer;
+ mString = utf16ToUtf8(std::u16string((const char16_t*)src));
}
bool MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
uint8_t count;
if (!packet->getUInt8(count))
return false;
+ if (count == 0)
+ return true;
- uint8_t* dest = mBuffer;
+ std::vector<char16_t> buffer(count);
for (int i = 0; i < count; i++) {
uint16_t ch;
-
if (!packet->getUInt16(ch))
return false;
- if (ch >= 0x0800) {
- *dest++ = (uint8_t)(0xE0 | (ch >> 12));
- *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
- *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
- } else if (ch >= 0x80) {
- *dest++ = (uint8_t)(0xC0 | (ch >> 6));
- *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
- } else {
- *dest++ = ch;
- }
+ buffer[i] = ch;
}
- *dest++ = 0;
- mCharCount = count;
- mByteCount = dest - mBuffer;
+ if (buffer[count-1] != '\0') {
+ ALOGE("Mtp string not null terminated\n");
+ return false;
+ }
+ mString = utf16ToUtf8(std::u16string(buffer.data()));
return true;
}
void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
- int count = mCharCount;
- const uint8_t* src = mBuffer;
- packet->putUInt8(count > 0 ? count + 1 : 0);
+ std::u16string src16 = utf8ToUtf16(mString);
+ int count = src16.length();
- // expand utf8 to 16 bit chars
- for (int i = 0; i < count; i++) {
- uint16_t ch;
- uint16_t ch1 = *src++;
- if ((ch1 & 0x80) == 0) {
- // single byte character
- ch = ch1;
- } else if ((ch1 & 0xE0) == 0xC0) {
- // two byte character
- uint16_t ch2 = *src++;
- ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
- } else {
- // three byte character
- uint16_t ch2 = *src++;
- uint16_t ch3 = *src++;
- ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+ if (count == 0) {
+ packet->putUInt8(0);
+ return;
+ }
+ packet->putUInt8(std::min(count + 1, MTP_STRING_MAX_CHARACTER_NUMBER));
+
+ int i = 0;
+ for (char16_t &c : src16) {
+ if (i == MTP_STRING_MAX_CHARACTER_NUMBER - 1) {
+ // Leave a slot for null termination.
+ ALOGI("Mtp truncating long string\n");
+ break;
}
- packet->putUInt16(ch);
+ packet->putUInt16(c);
+ i++;
}
// only terminate with zero if string is not empty
- if (count > 0)
- packet->putUInt16(0);
+ packet->putUInt16(0);
}
} // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
index bcf2a48..4cec58a 100644
--- a/media/mtp/MtpStringBuffer.h
+++ b/media/mtp/MtpStringBuffer.h
@@ -17,7 +17,9 @@
#ifndef _MTP_STRING_BUFFER_H
#define _MTP_STRING_BUFFER_H
+#include <log/log.h>
#include <stdint.h>
+#include <string>
// Max Character number of a MTP String
#define MTP_STRING_MAX_CHARACTER_NUMBER 255
@@ -30,31 +32,39 @@
class MtpStringBuffer {
private:
- // mBuffer contains string in UTF8 format
- // maximum 3 bytes/character, with 1 extra for zero termination
- uint8_t mBuffer[MTP_STRING_MAX_CHARACTER_NUMBER * 3 + 1];
- int mCharCount;
- int mByteCount;
+ std::string mString;
public:
- MtpStringBuffer();
+ MtpStringBuffer() {};
+ ~MtpStringBuffer() {};
+
explicit MtpStringBuffer(const char* src);
explicit MtpStringBuffer(const uint16_t* src);
MtpStringBuffer(const MtpStringBuffer& src);
- virtual ~MtpStringBuffer();
void set(const char* src);
void set(const uint16_t* src);
+ inline void append(const char* other);
+ inline void append(MtpStringBuffer &other);
+
bool readFromPacket(MtpDataPacket* packet);
void writeToPacket(MtpDataPacket* packet) const;
- inline int getCharCount() const { return mCharCount; }
- inline int getByteCount() const { return mByteCount; }
+ inline bool isEmpty() const { return mString.empty(); }
+ inline int size() const { return mString.length(); }
- inline operator const char*() const { return (const char *)mBuffer; }
+ inline operator const char*() const { return mString.c_str(); }
};
+inline void MtpStringBuffer::append(const char* other) {
+ mString += other;
+}
+
+inline void MtpStringBuffer::append(MtpStringBuffer &other) {
+ mString += other.mString;
+}
+
}; // namespace android
#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
index c749c66..e6ac23c 100644
--- a/media/mtp/MtpTypes.h
+++ b/media/mtp/MtpTypes.h
@@ -18,8 +18,7 @@
#define _MTP_TYPES_H
#include <stdint.h>
-#include "utils/String8.h"
-#include "utils/Vector.h"
+#include <vector>
namespace android {
@@ -51,18 +50,18 @@
class MtpDevice;
class MtpProperty;
-typedef Vector<MtpStorage *> MtpStorageList;
-typedef Vector<MtpDevice*> MtpDeviceList;
-typedef Vector<MtpProperty*> MtpPropertyList;
+typedef std::vector<MtpStorage *> MtpStorageList;
+typedef std::vector<MtpDevice*> MtpDeviceList;
+typedef std::vector<MtpProperty*> MtpPropertyList;
-typedef Vector<uint8_t> UInt8List;
-typedef Vector<uint16_t> UInt16List;
-typedef Vector<uint32_t> UInt32List;
-typedef Vector<uint64_t> UInt64List;
-typedef Vector<int8_t> Int8List;
-typedef Vector<int16_t> Int16List;
-typedef Vector<int32_t> Int32List;
-typedef Vector<int64_t> Int64List;
+typedef std::vector<uint8_t> UInt8List;
+typedef std::vector<uint16_t> UInt16List;
+typedef std::vector<uint32_t> UInt32List;
+typedef std::vector<uint64_t> UInt64List;
+typedef std::vector<int8_t> Int8List;
+typedef std::vector<int16_t> Int16List;
+typedef std::vector<int32_t> Int32List;
+typedef std::vector<int64_t> Int64List;
typedef UInt16List MtpObjectPropertyList;
typedef UInt16List MtpDevicePropertyList;
@@ -71,8 +70,6 @@
typedef UInt16List MtpObjectPropertyList;
typedef UInt32List MtpStorageIDList;
-typedef String8 MtpString;
-
enum UrbPacketDivisionMode {
// First packet only contains a header.
FIRST_PACKET_ONLY_HEADER,
diff --git a/media/mtp/tests/MtpFfsHandle_test.cpp b/media/mtp/tests/MtpFfsHandle_test.cpp
index 2174893..d11fe07 100644
--- a/media/mtp/tests/MtpFfsHandle_test.cpp
+++ b/media/mtp/tests/MtpFfsHandle_test.cpp
@@ -23,7 +23,7 @@
#include <random>
#include <string>
#include <unistd.h>
-#include <utils/Log.h>
+#include <log/log.h>
#include "MtpDescriptors.h"
#include "MtpFfsHandle.h"
diff --git a/media/mtp/tests/PosixAsyncIO_test.cpp b/media/mtp/tests/PosixAsyncIO_test.cpp
index 63b9a35..9e337aa 100644
--- a/media/mtp/tests/PosixAsyncIO_test.cpp
+++ b/media/mtp/tests/PosixAsyncIO_test.cpp
@@ -20,7 +20,7 @@
#include <gtest/gtest.h>
#include <string>
#include <unistd.h>
-#include <utils/Log.h>
+#include <log/log.h>
#include "PosixAsyncIO.h"
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 99cf5d5..19df760 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -72,14 +72,15 @@
AIMAGE_FORMAT_RGBA_8888 = 0x1,
/**
- * 32 bits RGBX format, 8 bits for each of the four channels.
+ * 32 bits RGBX format, 8 bits for each of the four channels. The values
+ * of the alpha channel bits are ignored (image is assumed to be opaque).
*
* <p>
* Corresponding formats:
* <ul>
* <li>AHardwareBuffer: AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM</li>
* <li>Vulkan: VK_FORMAT_R8G8B8A8_UNORM</li>
- * <li>OpenGL ES: GL_RGBA8</li>
+ * <li>OpenGL ES: GL_RGB8</li>
* </ul>
* </p>
*
@@ -717,7 +718,7 @@
#if __ANDROID_API__ >= 26
-/*
+/**
* Return the image back the the system and delete the AImage object from memory asynchronously.
*
* <p>Similar to {@link AImage_delete}, do NOT use the image pointer after this method returns.
@@ -746,8 +747,9 @@
* AHardwareBuffer_acquire} to acquire an extra reference, and call {@link AHardwareBuffer_release}
* once it has finished using it in order to properly deallocate the underlying memory managed by
* {@link AHardwareBuffer}. If the caller has acquired extra reference on an {@link AHardwareBuffer}
- * returned from this function, it must also listen to {@link onBufferFreed} callback to be
- * notified when the buffer is no longer used by {@link AImageReader}.</p>
+ * returned from this function, it must also register a listener using the function
+ * {@link AImageReader_setBufferRemovedListener} to be notified when the buffer is no longer used
+ * by {@link AImageReader}.</p>
*
* @param image the {@link AImage} of interest.
* @param outBuffer The memory area pointed to by buffer will contain the acquired AHardwareBuffer
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index e3b99d0..571410b 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -256,7 +256,7 @@
/**
- * The definition of {@link AImageReader} new image available callback.
+ * Signature of the callback which is called when a new image is available from {@link AImageReader}.
*
* @param context The optional application context provided by user in
* {@link AImageReader_setImageListener}.
@@ -265,11 +265,11 @@
typedef void (*AImageReader_ImageCallback)(void* context, AImageReader* reader);
typedef struct AImageReader_ImageListener {
- /// optional application context.
+ /// Optional application context passed as the first parameter of the callback.
void* context;
/**
- * This callback is called when there is a new image available for in the image reader's queue.
+ * This callback is called when there is a new image available in the image reader's queue.
*
* <p>The callback happens on one dedicated thread per {@link AImageReader} instance. It is okay
* to use AImageReader_* and AImage_* methods within the callback. Note that it is possible that
@@ -285,11 +285,11 @@
/**
* Set the onImageAvailable listener of this image reader.
*
- * <p>Note that calling this method will replace previously registered listeners.</p>
+ * Calling this method will replace previously registered listeners.
*
* @param reader The image reader of interest.
- * @param listener the {@link AImageReader_ImageListener} to be registered. Set this to NULL if
- * application no longer needs to listen to new images.
+ * @param listener The {@link AImageReader_ImageListener} to be registered. Set this to NULL if
+ * the application no longer needs to listen to new images.
*
* @return <ul>
* <li>{@link AMEDIA_OK} if the method call succeeds.</li>
@@ -406,7 +406,7 @@
media_status_t AImageReader_acquireLatestImageAsync(
AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd);
/**
- * The definition of {@link AImageReader} buffer removed callback.
+ * Signature of the callback which is called when {@link AImageReader} is about to remove a buffer.
*
* @param context The optional application context provided by user in
* {@link AImageReader_setBufferRemovedListener}.
@@ -418,7 +418,7 @@
AHardwareBuffer* buffer);
typedef struct AImageReader_BufferRemovedListener {
- /// optional application context.
+ /// Optional application context passed as the first parameter of the callback.
void* context;
/**
diff --git a/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java b/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
index e099d95..4ec6042 100644
--- a/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
+++ b/packages/MediaComponents/src/com/android/media/MediaSession2Impl.java
@@ -1242,7 +1242,8 @@
// Ask server whether the controller is trusted.
// App cannot know this because apps cannot query enabled notification listener for
// another package, but system server can do.
- mIsTrusted = manager.isTrusted(packageName, pid, uid);
+ mIsTrusted = manager.isTrustedForMediaControl(
+ new MediaSessionManager.RemoteUserInfo(packageName, pid, uid));
}
@Override
diff --git a/packages/MediaComponents/src/com/android/widget/VideoView2Impl.java b/packages/MediaComponents/src/com/android/widget/VideoView2Impl.java
index b2acc26..97279d6 100644
--- a/packages/MediaComponents/src/com/android/widget/VideoView2Impl.java
+++ b/packages/MediaComponents/src/com/android/widget/VideoView2Impl.java
@@ -284,14 +284,8 @@
mSurfaceView.setLayoutParams(params);
mTextureView.setSurfaceListener(this);
mSurfaceView.setSurfaceListener(this);
-
- // TODO: Choose TextureView when SurfaceView cannot be created.
- // Choose surface view by default
- mTextureView.setVisibility(View.GONE);
- mSurfaceView.setVisibility(View.VISIBLE);
mInstance.addView(mTextureView);
mInstance.addView(mSurfaceView);
- mCurrentView = mSurfaceView;
mSubtitleView = new SubtitleView(mInstance.getContext());
mSubtitleView.setLayoutParams(params);
@@ -309,16 +303,22 @@
"http://schemas.android.com/apk/res/android",
"enableSubtitle", false);
+ // TODO: Choose TextureView when SurfaceView cannot be created.
+ // Choose surface view by default
int viewType = (attrs == null) ? VideoView2.VIEW_TYPE_SURFACEVIEW
: attrs.getAttributeIntValue(
"http://schemas.android.com/apk/res/android",
- "viewType", 0);
- if (viewType == 0) {
+ "viewType", VideoView2.VIEW_TYPE_SURFACEVIEW);
+ if (viewType == VideoView2.VIEW_TYPE_SURFACEVIEW) {
Log.d(TAG, "viewType attribute is surfaceView.");
- // TODO: implement
- } else if (viewType == 1) {
+ mTextureView.setVisibility(View.GONE);
+ mSurfaceView.setVisibility(View.VISIBLE);
+ mCurrentView = mSurfaceView;
+ } else if (viewType == VideoView2.VIEW_TYPE_TEXTUREVIEW) {
Log.d(TAG, "viewType attribute is textureView.");
- // TODO: implement
+ mTextureView.setVisibility(View.VISIBLE);
+ mSurfaceView.setVisibility(View.GONE);
+ mCurrentView = mTextureView;
}
MediaRouteSelector.Builder builder = new MediaRouteSelector.Builder();
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index ea06b6c..b38d37f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -753,8 +753,8 @@
output.notificationFrameCount = input.notificationFrameCount;
output.flags = input.flags;
- track = thread->createTrack_l(client, streamType, &output.sampleRate, input.config.format,
- input.config.channel_mask,
+ track = thread->createTrack_l(client, streamType, input.attr, &output.sampleRate,
+ input.config.format, input.config.channel_mask,
&output.frameCount, &output.notificationFrameCount,
input.notificationsPerBuffer, input.speed,
input.sharedBuffer, sessionId, &output.flags,
@@ -1673,7 +1673,7 @@
output.frameCount = input.frameCount;
output.notificationFrameCount = input.notificationFrameCount;
- recordTrack = thread->createRecordTrack_l(client, &output.sampleRate,
+ recordTrack = thread->createRecordTrack_l(client, input.attr, &output.sampleRate,
input.config.format, input.config.channel_mask,
&output.frameCount, sessionId,
&output.notificationFrameCount,
@@ -1962,39 +1962,10 @@
status_t AudioFlinger::getMicrophones(std::vector<media::MicrophoneInfo> *microphones)
{
- // Fake data
- size_t fakeNum = 2;
- audio_devices_t fakeTypes[] = { AUDIO_DEVICE_IN_BUILTIN_MIC, AUDIO_DEVICE_IN_BACK_MIC };
- for (size_t i = 0; i < fakeNum; i++) {
- struct audio_microphone_characteristic_t characteristics;
- sprintf(characteristics.device_id, "microphone:%zu", i);
- characteristics.device = fakeTypes[i];
- sprintf(characteristics.address, "");
- characteristics.location = AUDIO_MICROPHONE_LOCATION_MAINBODY;
- characteristics.group = 0;
- characteristics.index_in_the_group = i;
- characteristics.sensitivity = 1.0f;
- characteristics.max_spl = 100.0f;
- characteristics.min_spl = 0.0f;
- characteristics.directionality = AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
- characteristics.num_frequency_responses = 5 - i;
- for (size_t j = 0; j < characteristics.num_frequency_responses; j++) {
- characteristics.frequency_responses[0][j] = 100.0f - j;
- characteristics.frequency_responses[1][j] = 100.0f + j;
- }
- for (size_t j = 0; j < AUDIO_CHANNEL_COUNT_MAX; j++) {
- characteristics.channel_mapping[j] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
- }
- characteristics.geometric_location.x = 0.1f;
- characteristics.geometric_location.y = 0.2f;
- characteristics.geometric_location.z = 0.3f;
- characteristics.orientation.x = 0.0f;
- characteristics.orientation.y = 1.0f;
- characteristics.orientation.z = 0.0f;
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(characteristics);
- microphones->push_back(microphoneInfo);
- }
- return NO_ERROR;
+ AutoMutex lock(mHardwareLock);
+ sp<DeviceHalInterface> dev = mPrimaryHardwareDev->hwDevice();
+ status_t status = dev->getMicrophones(microphones);
+ return status;
}
// setAudioHwSyncForSession_l() must be called with AudioFlinger::mLock held
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 366a164..a210a1b 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -23,6 +23,7 @@
class MmapTrack : public TrackBase {
public:
MmapTrack(ThreadBase *thread,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 6454be5..ea01a25 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -25,6 +25,7 @@
Track( PlaybackThread *thread,
const sp<Client>& client,
audio_stream_type_t streamType,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
diff --git a/services/audioflinger/RecordTracks.h b/services/audioflinger/RecordTracks.h
index 1733ef5..2b993ee 100644
--- a/services/audioflinger/RecordTracks.h
+++ b/services/audioflinger/RecordTracks.h
@@ -24,6 +24,7 @@
public:
RecordTrack(RecordThread *thread,
const sp<Client>& client,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 62e9fe7..b5b50f8 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -57,6 +57,7 @@
#include <powermanager/PowerManager.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
+#include <media/audiohal/StreamHalInterface.h>
#include "AudioFlinger.h"
#include "FastMixer.h"
@@ -1554,6 +1555,7 @@
mActiveTracksGeneration++;
mLatestActiveTrack = track;
++mBatteryCounter[track->uid()].second;
+ mHasChanged = true;
return mActiveTracks.add(track);
}
@@ -1568,6 +1570,7 @@
mActiveTracksGeneration++;
--mBatteryCounter[track->uid()].second;
// mLatestActiveTrack is not cleared even if is the same as track.
+ mHasChanged = true;
return index;
}
@@ -1578,6 +1581,7 @@
logTrack("clear", track);
}
mLastActiveTracksGeneration = mActiveTracksGeneration;
+ if (!mActiveTracks.empty()) { mHasChanged = true; }
mActiveTracks.clear();
mLatestActiveTrack.clear();
mBatteryCounter.clear();
@@ -1615,6 +1619,13 @@
}
template <typename T>
+bool AudioFlinger::ThreadBase::ActiveTracks<T>::readAndClearHasChanged() {
+ const bool hasChanged = mHasChanged;
+ mHasChanged = false;
+ return hasChanged;
+}
+
+template <typename T>
void AudioFlinger::ThreadBase::ActiveTracks<T>::logTrack(
const char *funcName, const sp<T> &track) const {
if (mLocalLog != nullptr) {
@@ -1847,6 +1858,7 @@
sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrack_l(
const sp<AudioFlinger::Client>& client,
audio_stream_type_t streamType,
+ const audio_attributes_t& attr,
uint32_t *pSampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -2125,7 +2137,7 @@
}
}
- track = new Track(this, client, streamType, sampleRate, format,
+ track = new Track(this, client, streamType, attr, sampleRate, format,
channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
sessionId, uid, *flags, TrackBase::TYPE_DEFAULT, portId);
@@ -2609,6 +2621,24 @@
}
}
+void AudioFlinger::PlaybackThread::updateMetadata_l()
+{
+ // TODO: add volume support
+ if (mOutput == nullptr || mOutput->stream == nullptr ||
+ !mActiveTracks.readAndClearHasChanged()) {
+ return;
+ }
+ StreamOutHalInterface::SourceMetadata metadata;
+ for (const sp<Track> &track : mActiveTracks) {
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ metadata.tracks.push_back({
+ .usage = track->attributes().usage,
+ .content_type = track->attributes().content_type,
+ .gain = 1,
+ });
+ }
+ mOutput->stream->updateSourceMetadata(metadata);
+}
status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, uint32_t *dspFrames)
{
@@ -3306,6 +3336,8 @@
mActiveTracks.updatePowerState(this);
+ updateMetadata_l();
+
// prevent any changes in effect chain list and in each effect chain
// during mixing and effect process as the audio buffers could be deleted
// or modified if an effect is created or deleted
@@ -6117,6 +6149,17 @@
return true;
}
+void AudioFlinger::DuplicatingThread::updateMetadata_l()
+{
+ // TODO: The duplicated track metadata are stored in other threads
+ // (accessible through mActiveTracks::OutputTrack::thread()::mActiveTracks::Track::attributes())
+ // but this information can be mutated at any time by the owning threads.
+ // Taking the lock of any other owning threads is no possible due to timing constrains.
+ // Similarly, the other threads can not push the metadatas in this thread as cross deadlock
+ // would be possible.
+ // A lock-free structure needs to be used to shared the metadata (maybe an atomic shared_ptr ?).
+}
+
uint32_t AudioFlinger::DuplicatingThread::activeSleepTimeUs() const
{
return (mWaitTimeMs * 1000) / 2;
@@ -6444,6 +6487,8 @@
mActiveTracks.updatePowerState(this);
+ updateMetadata_l();
+
if (allStopped) {
standbyIfNotAlreadyInStandby();
}
@@ -6808,6 +6853,7 @@
// RecordThread::createRecordTrack_l() must be called with AudioFlinger::mLock held
sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRecordTrack_l(
const sp<AudioFlinger::Client>& client,
+ const audio_attributes_t& attr,
uint32_t *pSampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -6941,7 +6987,7 @@
{ // scope for mLock
Mutex::Autolock _l(mLock);
- track = new RecordTrack(this, client, sampleRate,
+ track = new RecordTrack(this, client, attr, sampleRate,
format, channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, uid,
*flags, TrackBase::TYPE_DEFAULT, portId);
@@ -7129,42 +7175,25 @@
{
ALOGV("RecordThread::getActiveMicrophones");
AutoMutex _l(mLock);
- // Fake data
- struct audio_microphone_characteristic_t characteristic;
- sprintf(characteristic.device_id, "builtin_mic");
- characteristic.device = AUDIO_DEVICE_IN_BUILTIN_MIC;
- sprintf(characteristic.address, "");
- characteristic.location = AUDIO_MICROPHONE_LOCATION_MAINBODY;
- characteristic.group = 0;
- characteristic.index_in_the_group = 0;
- characteristic.sensitivity = 1.0f;
- characteristic.max_spl = 100.0f;
- characteristic.min_spl = 0.0f;
- characteristic.directionality = AUDIO_MICROPHONE_DIRECTIONALITY_OMNI;
- characteristic.num_frequency_responses = 5;
- for (size_t i = 0; i < characteristic.num_frequency_responses; i++) {
- characteristic.frequency_responses[0][i] = 100.0f - i;
- characteristic.frequency_responses[1][i] = 100.0f + i;
+ status_t status = mInput->stream->getActiveMicrophones(activeMicrophones);
+ return status;
+}
+
+void AudioFlinger::RecordThread::updateMetadata_l()
+{
+ if (mInput == nullptr || mInput->stream == nullptr ||
+ !mActiveTracks.readAndClearHasChanged()) {
+ return;
}
- for (size_t i = 0; i < AUDIO_CHANNEL_COUNT_MAX; i++) {
- characteristic.channel_mapping[i] = AUDIO_MICROPHONE_CHANNEL_MAPPING_UNUSED;
+ StreamInHalInterface::SinkMetadata metadata;
+ for (const sp<RecordTrack> &track : mActiveTracks) {
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ metadata.tracks.push_back({
+ .source = track->attributes().source,
+ .gain = 1, // capture tracks do not have volumes
+ });
}
- audio_microphone_channel_mapping_t channel_mappings[] = {
- AUDIO_MICROPHONE_CHANNEL_MAPPING_DIRECT,
- AUDIO_MICROPHONE_CHANNEL_MAPPING_PROCESSED,
- };
- for (size_t i = 0; i < mChannelCount; i++) {
- characteristic.channel_mapping[i] = channel_mappings[i % 2];
- }
- characteristic.geometric_location.x = 0.1f;
- characteristic.geometric_location.y = 0.2f;
- characteristic.geometric_location.z = 0.3f;
- characteristic.orientation.x = 0.0f;
- characteristic.orientation.y = 1.0f;
- characteristic.orientation.z = 0.0f;
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(characteristic);
- activeMicrophones->push_back(microphoneInfo);
- return NO_ERROR;
+ mInput->stream->updateSinkMetadata(metadata);
}
// destroyTrack_l() must be called with ThreadBase::mLock held
@@ -7994,7 +8023,8 @@
return PERMISSION_DENIED;
}
- sp<MmapTrack> track = new MmapTrack(this, mSampleRate, mFormat, mChannelMask, mSessionId,
+ // Given that MmapThread::mAttr is mutable, should a MmapTrack have attributes ?
+ sp<MmapTrack> track = new MmapTrack(this, mAttr, mSampleRate, mFormat, mChannelMask, mSessionId,
client.clientUid, client.clientPid, portId);
mActiveTracks.add(track);
@@ -8130,6 +8160,8 @@
mActiveTracks.updatePowerState(this);
+ updateMetadata_l();
+
lockEffectChains_l(effectChains);
for (size_t i = 0; i < effectChains.size(); i ++) {
effectChains[i]->process_l();
@@ -8677,6 +8709,24 @@
}
}
+void AudioFlinger::MmapPlaybackThread::updateMetadata_l()
+{
+ if (mOutput == nullptr || mOutput->stream == nullptr ||
+ !mActiveTracks.readAndClearHasChanged()) {
+ return;
+ }
+ StreamOutHalInterface::SourceMetadata metadata;
+ for (const sp<MmapTrack> &track : mActiveTracks) {
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ metadata.tracks.push_back({
+ .usage = track->attributes().usage,
+ .content_type = track->attributes().content_type,
+ .gain = mHalVolFloat, // TODO: propagate from aaudio pre-mix volume
+ });
+ }
+ mOutput->stream->updateSourceMetadata(metadata);
+}
+
void AudioFlinger::MmapPlaybackThread::checkSilentMode_l()
{
if (!mMasterMute) {
@@ -8721,4 +8771,22 @@
mInput = NULL;
return input;
}
+
+void AudioFlinger::MmapCaptureThread::updateMetadata_l()
+{
+ if (mInput == nullptr || mInput->stream == nullptr ||
+ !mActiveTracks.readAndClearHasChanged()) {
+ return;
+ }
+ StreamInHalInterface::SinkMetadata metadata;
+ for (const sp<MmapTrack> &track : mActiveTracks) {
+ // No track is invalid as this is called after prepareTrack_l in the same critical section
+ metadata.tracks.push_back({
+ .source = track->attributes().source,
+ .gain = 1, // capture tracks do not have volumes
+ });
+ }
+ mInput->stream->updateSinkMetadata(metadata);
+}
+
} // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 7cd46a7..bb81224 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -425,6 +425,9 @@
// check if some effects must be suspended when an effect chain is added
void checkSuspendOnAddEffectChain_l(const sp<EffectChain>& chain);
+ // sends the metadata of the active tracks to the HAL
+ virtual void updateMetadata_l() = 0;
+
String16 getWakeLockTag();
virtual void preExit() { }
@@ -563,6 +566,10 @@
// periodically called in the threadLoop() to update power state uids.
void updatePowerState(sp<ThreadBase> thread, bool force = false);
+ /** @return true if the active tracks have changed since the last time
+ * this function was called or the vector was created. */
+ bool readAndClearHasChanged();
+
private:
void logTrack(const char *funcName, const sp<T> &track) const;
@@ -581,6 +588,8 @@
int mLastActiveTracksGeneration;
wp<T> mLatestActiveTrack; // latest track added to ActiveTracks
SimpleLog * const mLocalLog;
+ // If the active tracks have changed since last call to readAndClearHasChanged
+ bool mHasChanged = false;
};
SimpleLog mLocalLog;
@@ -706,6 +715,7 @@
sp<Track> createTrack_l(
const sp<AudioFlinger::Client>& client,
audio_stream_type_t streamType,
+ const audio_attributes_t& attr,
uint32_t *sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -917,6 +927,7 @@
void removeTrack_l(const sp<Track>& track);
void readOutputParameters_l();
+ void updateMetadata_l() override;
virtual void dumpInternals(int fd, const Vector<String16>& args);
void dumpTracks(int fd, const Vector<String16>& args);
@@ -1275,6 +1286,8 @@
void addOutputTrack(MixerThread* thread);
void removeOutputTrack(MixerThread* thread);
uint32_t waitTimeMs() const { return mWaitTimeMs; }
+
+ void updateMetadata_l() override;
protected:
virtual uint32_t activeSleepTimeUs() const;
@@ -1387,6 +1400,7 @@
sp<AudioFlinger::RecordThread::RecordTrack> createRecordTrack_l(
const sp<AudioFlinger::Client>& client,
+ const audio_attributes_t& attr,
uint32_t *pSampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -1461,6 +1475,8 @@
status_t getActiveMicrophones(std::vector<media::MicrophoneInfo>* activeMicrophones);
+ void updateMetadata_l() override;
+
private:
// Enter standby if not already in standby, and set mStandby flag
void standbyIfNotAlreadyInStandby();
@@ -1658,6 +1674,8 @@
virtual bool isOutput() const override { return true; }
+ void updateMetadata_l() override;
+
protected:
audio_stream_type_t mStreamType;
@@ -1684,6 +1702,8 @@
virtual bool isOutput() const override { return false; }
+ void updateMetadata_l() override;
+
protected:
AudioStreamIn* mInput;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index a7e966f..ccfb69f 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -61,6 +61,7 @@
TrackBase(ThreadBase *thread,
const sp<Client>& client,
+ const audio_attributes_t& mAttr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -97,6 +98,7 @@
virtual void invalidate() { mIsInvalid = true; }
bool isInvalid() const { return mIsInvalid; }
+ audio_attributes_t attributes() const { return mAttr; }
protected:
DISALLOW_COPY_AND_ASSIGN(TrackBase);
@@ -188,6 +190,7 @@
size_t mBufferSize; // size of mBuffer in bytes
// we don't really need a lock for these
track_state mState;
+ const audio_attributes_t mAttr;
const uint32_t mSampleRate; // initial sample rate only; for tracks which
// support dynamic rates, the current value is in control block
const audio_format_t mFormat;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 9b93939..44ce3aa 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -63,6 +63,7 @@
AudioFlinger::ThreadBase::TrackBase::TrackBase(
ThreadBase *thread,
const sp<Client>& client,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -81,6 +82,7 @@
mCblk(NULL),
// mBuffer, mBufferSize
mState(IDLE),
+ mAttr(attr),
mSampleRate(sampleRate),
mFormat(format),
mChannelMask(channelMask),
@@ -372,6 +374,7 @@
PlaybackThread *thread,
const sp<Client>& client,
audio_stream_type_t streamType,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -384,7 +387,7 @@
audio_output_flags_t flags,
track_type type,
audio_port_handle_t portId)
- : TrackBase(thread, client, sampleRate, format, channelMask, frameCount,
+ : TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
(sharedBuffer != 0) ? sharedBuffer->pointer() : buffer,
(sharedBuffer != 0) ? sharedBuffer->size() : bufferSize,
sessionId, uid, true /*isOut*/,
@@ -1259,6 +1262,7 @@
size_t frameCount,
uid_t uid)
: Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
+ audio_attributes_t{} /* currently unused for output track */,
sampleRate, format, channelMask, frameCount,
nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
AUDIO_SESSION_NONE, uid, AUDIO_OUTPUT_FLAG_NONE,
@@ -1461,6 +1465,7 @@
size_t bufferSize,
audio_output_flags_t flags)
: Track(playbackThread, NULL, streamType,
+ audio_attributes_t{} /* currently unused for patch track */,
sampleRate, format, channelMask, frameCount,
buffer, bufferSize, nullptr /* sharedBuffer */,
AUDIO_SESSION_NONE, getuid(), flags, TYPE_PATCH),
@@ -1595,6 +1600,7 @@
AudioFlinger::RecordThread::RecordTrack::RecordTrack(
RecordThread *thread,
const sp<Client>& client,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -1606,7 +1612,7 @@
audio_input_flags_t flags,
track_type type,
audio_port_handle_t portId)
- : TrackBase(thread, client, sampleRate, format,
+ : TrackBase(thread, client, attr, sampleRate, format,
channelMask, frameCount, buffer, bufferSize, sessionId, uid, false /*isOut*/,
(type == TYPE_DEFAULT) ?
((flags & AUDIO_INPUT_FLAG_FAST) ? ALLOC_PIPE : ALLOC_CBLK) :
@@ -1821,7 +1827,9 @@
void *buffer,
size_t bufferSize,
audio_input_flags_t flags)
- : RecordTrack(recordThread, NULL, sampleRate, format, channelMask, frameCount,
+ : RecordTrack(recordThread, NULL,
+ audio_attributes_t{} /* currently unused for patch track */,
+ sampleRate, format, channelMask, frameCount,
buffer, bufferSize, AUDIO_SESSION_NONE, getuid(), flags, TYPE_PATCH),
mProxy(new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, false, true))
{
@@ -1882,6 +1890,7 @@
AudioFlinger::MmapThread::MmapTrack::MmapTrack(ThreadBase *thread,
+ const audio_attributes_t& attr,
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
@@ -1889,7 +1898,7 @@
uid_t uid,
pid_t pid,
audio_port_handle_t portId)
- : TrackBase(thread, NULL, sampleRate, format,
+ : TrackBase(thread, NULL, attr, sampleRate, format,
channelMask, (size_t)0 /* frameCount */,
nullptr /* buffer */, (size_t)0 /* bufferSize */,
sessionId, uid, false /* isOut */,
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 92a2030..8f6db46 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -3777,6 +3777,16 @@
ALOGE("Default device %08x is unreachable", mDefaultOutputDevice->type());
status = NO_INIT;
}
+ // If microphones address is empty, set it according to device type
+ for (size_t i = 0; i < mAvailableInputDevices.size(); i++) {
+ if (mAvailableInputDevices[i]->mAddress.isEmpty()) {
+ if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+ mAvailableInputDevices[i]->mAddress = String8(AUDIO_BOTTOM_MICROPHONE_ADDRESS);
+ } else if (mAvailableInputDevices[i]->type() == AUDIO_DEVICE_IN_BACK_MIC) {
+ mAvailableInputDevices[i]->mAddress = String8(AUDIO_BACK_MICROPHONE_ADDRESS);
+ }
+ }
+ }
if (mPrimaryOutput == 0) {
ALOGE("Failed to open primary output");