Merge "media: Support recording exceeds filesize limit by splitting files."
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 12d0da8..9cd3a47 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -21,8 +21,9 @@
#include <utils/Log.h>
#include <camera/camera2/OutputConfiguration.h>
-#include <gui/Surface.h>
#include <binder/Parcel.h>
+#include <gui/Surface.h>
+#include <utils/String8.h>
namespace android {
@@ -30,8 +31,9 @@
const int OutputConfiguration::INVALID_ROTATION = -1;
const int OutputConfiguration::INVALID_SET_ID = -1;
-sp<IGraphicBufferProducer> OutputConfiguration::getGraphicBufferProducer() const {
- return mGbp;
+const std::vector<sp<IGraphicBufferProducer>>&
+ OutputConfiguration::getGraphicBufferProducers() const {
+ return mGbps;
}
int OutputConfiguration::getRotation() const {
@@ -103,37 +105,60 @@
return err;
}
- view::Surface surfaceShim;
- if ((err = surfaceShim.readFromParcel(parcel)) != OK) {
- // Read surface failure for deferred surface configuration is expected.
- if (surfaceType == SURFACE_TYPE_SURFACE_VIEW ||
- surfaceType == SURFACE_TYPE_SURFACE_TEXTURE) {
- ALOGV("%s: Get null surface from a deferred surface configuration (%dx%d)",
- __FUNCTION__, width, height);
- err = OK;
- } else {
- ALOGE("%s: Failed to read surface from parcel", __FUNCTION__);
- return err;
- }
+ // numSurfaces is the total number of surfaces for this OutputConfiguration,
+ // regardless the surface is deferred or not.
+ int numSurfaces = 0;
+ if ((err = parcel->readInt32(&numSurfaces)) != OK) {
+ ALOGE("%s: Failed to read maxSurfaces from parcel", __FUNCTION__);
+ return err;
+ }
+ if (numSurfaces < 1) {
+ ALOGE("%s: there has to be at least 1 surface per"
+ " outputConfiguration", __FUNCTION__);
+ return BAD_VALUE;
}
- mGbp = surfaceShim.graphicBufferProducer;
+ // Read all surfaces from parcel. If a surface is deferred, readFromPacel
+ // returns error, and a null surface is put into the mGbps. We assume all
+ // deferred surfaces are after non-deferred surfaces in the parcel.
+ // TODO: Need better way to detect deferred surface than using error
+ // return from readFromParcel.
+ std::vector<sp<IGraphicBufferProducer>> gbps;
+ for (int i = 0; i < numSurfaces; i++) {
+ view::Surface surfaceShim;
+ if ((err = surfaceShim.readFromParcel(parcel)) != OK) {
+ // Read surface failure for deferred surface configuration is expected.
+ if ((surfaceType == SURFACE_TYPE_SURFACE_VIEW ||
+ surfaceType == SURFACE_TYPE_SURFACE_TEXTURE)) {
+ ALOGV("%s: Get null surface from a deferred surface configuration (%dx%d)",
+ __FUNCTION__, width, height);
+ err = OK;
+ } else {
+ ALOGE("%s: Failed to read surface from parcel", __FUNCTION__);
+ return err;
+ }
+ }
+ gbps.push_back(surfaceShim.graphicBufferProducer);
+ ALOGV("%s: OutputConfiguration: gbps[%d] : %p, name %s", __FUNCTION__,
+ i, gbps[i].get(), String8(surfaceShim.name).string());
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
mWidth = width;
mHeight = height;
+ mGbps = std::move(gbps);
- ALOGV("%s: OutputConfiguration: bp = %p, name = %s, rotation = %d, setId = %d,"
- "surfaceType = %d", __FUNCTION__, mGbp.get(), String8(surfaceShim.name).string(),
- mRotation, mSurfaceSetID, mSurfaceType);
+ ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d",
+ __FUNCTION__, mRotation, mSurfaceSetID, mSurfaceType);
return err;
}
OutputConfiguration::OutputConfiguration(sp<IGraphicBufferProducer>& gbp, int rotation,
int surfaceSetID) {
- mGbp = gbp;
+ mGbps.push_back(gbp);
mRotation = rotation;
mSurfaceSetID = surfaceSetID;
}
@@ -158,14 +183,53 @@
err = parcel->writeInt32(mHeight);
if (err != OK) return err;
- view::Surface surfaceShim;
- surfaceShim.name = String16("unknown_name"); // name of surface
- surfaceShim.graphicBufferProducer = mGbp;
-
- err = surfaceShim.writeToParcel(parcel);
+ int numSurfaces = mGbps.size();
+ err = parcel->writeInt32(numSurfaces);
if (err != OK) return err;
+ for (int i = 0; i < numSurfaces; i++) {
+ view::Surface surfaceShim;
+ surfaceShim.name = String16("unknown_name"); // name of surface
+ surfaceShim.graphicBufferProducer = mGbps[i];
+
+ err = surfaceShim.writeToParcel(parcel);
+ if (err != OK) return err;
+ }
+
return OK;
}
+bool OutputConfiguration::gbpsEqual(const OutputConfiguration& other) const {
+ const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
+ other.getGraphicBufferProducers();
+
+ if (mGbps.size() != otherGbps.size()) {
+ return false;
+ }
+
+ for (size_t i = 0; i < mGbps.size(); i++) {
+ if (mGbps[i] != otherGbps[i]) {
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool OutputConfiguration::gbpsLessThan(const OutputConfiguration& other) const {
+ const std::vector<sp<IGraphicBufferProducer> >& otherGbps =
+ other.getGraphicBufferProducers();
+
+ if (mGbps.size() != otherGbps.size()) {
+ return mGbps.size() < otherGbps.size();
+ }
+
+ for (size_t i = 0; i < mGbps.size(); i++) {
+ if (mGbps[i] != otherGbps[i]) {
+ return mGbps[i] < otherGbps[i];
+ }
+ }
+
+ return false;
+}
}; // namespace android
diff --git a/camera/cameraserver/Android.mk b/camera/cameraserver/Android.mk
index 8c06833..bdafff1 100644
--- a/camera/cameraserver/Android.mk
+++ b/camera/cameraserver/Android.mk
@@ -24,7 +24,11 @@
liblog \
libutils \
libbinder \
- android.hardware.camera.common@1.0
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2 \
+ android.hidl.manager@1.0
LOCAL_MODULE:= cameraserver
LOCAL_32_BIT_ONLY := true
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 6f2351f..c5fc646 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -20,4 +20,5 @@
name: "libcamera2ndk.ndk",
symbol_file: "libcamera2ndk.map.txt",
first_version: "24",
+ unversioned_until: "current",
}
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index f656008..ba2100c 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -335,7 +335,7 @@
return ACAMERA_ERROR_NOT_ENOUGH_MEMORY;
}
out->numCameras = numCameras;
- out->cameraIds = new const char*[numCameras] {nullptr};
+ out->cameraIds = new const char*[numCameras];
if (!out->cameraIds) {
ALOGE("Allocate memory for ACameraIdList failed!");
deleteCameraIdList(out);
diff --git a/include/camera/camera2/OutputConfiguration.h b/include/camera/camera2/OutputConfiguration.h
index cb04c0e..2961e2a 100644
--- a/include/camera/camera2/OutputConfiguration.h
+++ b/include/camera/camera2/OutputConfiguration.h
@@ -38,7 +38,7 @@
SURFACE_TYPE_SURFACE_VIEW = 0,
SURFACE_TYPE_SURFACE_TEXTURE = 1
};
- sp<IGraphicBufferProducer> getGraphicBufferProducer() const;
+ const std::vector<sp<IGraphicBufferProducer>>& getGraphicBufferProducers() const;
int getRotation() const;
int getSurfaceSetID() const;
int getSurfaceType() const;
@@ -65,19 +65,18 @@
int surfaceSetID = INVALID_SET_ID);
bool operator == (const OutputConfiguration& other) const {
- return (mGbp == other.mGbp &&
- mRotation == other.mRotation &&
+ return ( mRotation == other.mRotation &&
mSurfaceSetID == other.mSurfaceSetID &&
mSurfaceType == other.mSurfaceType &&
mWidth == other.mWidth &&
- mHeight == other.mHeight);
+ mHeight == other.mHeight &&
+ gbpsEqual(other));
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
}
bool operator < (const OutputConfiguration& other) const {
if (*this == other) return false;
- if (mGbp != other.mGbp) return mGbp < other.mGbp;
if (mSurfaceSetID != other.mSurfaceSetID) {
return mSurfaceSetID < other.mSurfaceSetID;
}
@@ -90,15 +89,20 @@
if (mHeight != other.mHeight) {
return mHeight < other.mHeight;
}
+ if (mRotation != other.mRotation) {
+ return mRotation < other.mRotation;
+ }
- return mRotation < other.mRotation;
+ return gbpsLessThan(other);
}
bool operator > (const OutputConfiguration& other) const {
return (*this != other && !(*this < other));
}
+ bool gbpsEqual(const OutputConfiguration& other) const;
+ bool gbpsLessThan(const OutputConfiguration& other) const;
private:
- sp<IGraphicBufferProducer> mGbp;
+ std::vector<sp<IGraphicBufferProducer>> mGbps;
int mRotation;
int mSurfaceSetID;
int mSurfaceType;
diff --git a/services/audioflinger/AudioMixer.h b/include/media/AudioMixer.h
similarity index 99%
rename from services/audioflinger/AudioMixer.h
rename to include/media/AudioMixer.h
index 540caac..87ada76 100644
--- a/services/audioflinger/AudioMixer.h
+++ b/include/media/AudioMixer.h
@@ -22,15 +22,14 @@
#include <sys/types.h>
#include <media/AudioBufferProvider.h>
+#include <media/AudioResampler.h>
#include <media/AudioResamplerPublic.h>
+#include <media/BufferProviders.h>
#include <media/nbaio/NBLog.h>
#include <system/audio.h>
#include <utils/Compat.h>
#include <utils/threads.h>
-#include "AudioResampler.h"
-#include "BufferProviders.h"
-
// FIXME This is actually unity gain, which might not be max in future, expressed in U.12
#define MAX_GAIN_INT AudioMixer::UNITY_GAIN_INT
diff --git a/services/audioflinger/AudioResampler.h b/include/media/AudioResampler.h
similarity index 100%
rename from services/audioflinger/AudioResampler.h
rename to include/media/AudioResampler.h
diff --git a/services/audioflinger/BufferProviders.h b/include/media/BufferProviders.h
similarity index 95%
rename from services/audioflinger/BufferProviders.h
rename to include/media/BufferProviders.h
index 2a857fe..d5899ea 100644
--- a/services/audioflinger/BufferProviders.h
+++ b/include/media/BufferProviders.h
@@ -21,13 +21,18 @@
#include <sys/types.h>
#include <media/AudioBufferProvider.h>
+#include <media/AudioResamplerPublic.h>
#include <system/audio.h>
#include <system/audio_effect.h>
-#include <sonic.h>
#include <utils/StrongPointer.h>
+// external forward declaration from external/sonic/sonic.h
+struct sonicStreamStruct;
+typedef struct sonicStreamStruct *sonicStream;
+
namespace android {
+class EffectBufferHalInterface;
class EffectHalInterface;
class EffectsFactoryHalInterface;
@@ -108,6 +113,8 @@
protected:
sp<EffectsFactoryHalInterface> mEffectsFactory;
sp<EffectHalInterface> mDownmixInterface;
+ sp<EffectBufferHalInterface> mInBuffer;
+ sp<EffectBufferHalInterface> mOutBuffer;
effect_config_t mDownmixConfig;
// effect descriptor for the downmixer used by the mixer
diff --git a/include/media/BufferingSettings.h b/include/media/BufferingSettings.h
index 7dd9d40..e812d2a 100644
--- a/include/media/BufferingSettings.h
+++ b/include/media/BufferingSettings.h
@@ -40,6 +40,8 @@
static const int kNoWatermark = -1;
static bool IsValidBufferingMode(int mode);
+ static bool IsTimeBasedBufferingMode(int mode);
+ static bool IsSizeBasedBufferingMode(int mode);
BufferingMode mInitialBufferingMode; // for prepare
BufferingMode mRebufferingMode; // for playback
@@ -64,6 +66,7 @@
status_t writeToParcel(Parcel* parcel) const override;
status_t readFromParcel(const Parcel* parcel) override;
+ String8 toString() const;
};
} // namespace android
diff --git a/services/audioflinger/LinearMap.h b/include/media/LinearMap.h
similarity index 100%
rename from services/audioflinger/LinearMap.h
rename to include/media/LinearMap.h
diff --git a/include/media/OMXBuffer.h b/include/media/OMXBuffer.h
index aeb1765..697823f 100644
--- a/include/media/OMXBuffer.h
+++ b/include/media/OMXBuffer.h
@@ -58,10 +58,6 @@
// |codecBuffer|'s size (or 0 if |codecBuffer| is NULL).
OMXBuffer(const sp<MediaCodecBuffer> &codecBuffer);
- // Constructs a buffer of type kBufferTypePreset with a specified
- // mRangeLength.
- explicit OMXBuffer(OMX_U32 rangeLength);
-
// Constructs a buffer of type kBufferTypeSharedMem.
OMXBuffer(const sp<IMemory> &mem);
@@ -101,6 +97,7 @@
// kBufferTypePreset
// If the port is operating in byte buffer mode, mRangeLength is the valid
// range length. Otherwise the range info should also be ignored.
+ OMX_U32 mRangeOffset;
OMX_U32 mRangeLength;
// kBufferTypeSharedMem
diff --git a/include/media/PluginLoader.h b/include/media/PluginLoader.h
index 7d54ce4..360af2d 100644
--- a/include/media/PluginLoader.h
+++ b/include/media/PluginLoader.h
@@ -43,7 +43,7 @@
while ((pEntry = readdir(pDir))) {
String8 file(pEntry->d_name);
if (file.getPathExtension() == ".so") {
- String8 path = pluginDir + pEntry->d_name;
+ String8 path = pluginDir + "/" + pEntry->d_name;
T *plugin = loadOne(path, entry);
if (plugin) {
factories.push(plugin);
@@ -77,7 +77,8 @@
libraries.push(library);
return createFactoryFunc();
} else {
- ALOGE("Failed to create plugin factory from %s", path);
+ ALOGE("Failed to create plugin factory from %s at entry %s: %s",
+ path, entry, library->lastError());
}
}
return NULL;
diff --git a/include/media/RecordBufferConverter.h b/include/media/RecordBufferConverter.h
new file mode 100644
index 0000000..2abc45e
--- /dev/null
+++ b/include/media/RecordBufferConverter.h
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_RECORD_BUFFER_CONVERTER_H
+#define ANDROID_RECORD_BUFFER_CONVERTER_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <media/AudioBufferProvider.h>
+#include <system/audio.h>
+
+class AudioResampler;
+class PassthruBufferProvider;
+
+namespace android {
+
+/* The RecordBufferConverter is used for format, channel, and sample rate
+ * conversion for a RecordTrack.
+ *
+ * RecordBufferConverter uses the convert() method rather than exposing a
+ * buffer provider interface; this is to save a memory copy.
+ *
+ * There are legacy conversion requirements for this converter, specifically
+ * due to mono handling, so be careful about modifying.
+ *
+ * Original source audioflinger/Threads.{h,cpp}
+ */
+class RecordBufferConverter
+{
+public:
+ RecordBufferConverter(
+ audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
+ uint32_t srcSampleRate,
+ audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
+ uint32_t dstSampleRate);
+
+ ~RecordBufferConverter();
+
+ /* Converts input data from an AudioBufferProvider by format, channelMask,
+ * and sampleRate to a destination buffer.
+ *
+ * Parameters
+ * dst: buffer to place the converted data.
+ * provider: buffer provider to obtain source data.
+ * frames: number of frames to convert
+ *
+ * Returns the number of frames converted.
+ */
+ size_t convert(void *dst, AudioBufferProvider *provider, size_t frames);
+
+ // returns NO_ERROR if constructor was successful
+ status_t initCheck() const {
+ // mSrcChannelMask set on successful updateParameters
+ return mSrcChannelMask != AUDIO_CHANNEL_INVALID ? NO_ERROR : NO_INIT;
+ }
+
+ // allows dynamic reconfigure of all parameters
+ status_t updateParameters(
+ audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
+ uint32_t srcSampleRate,
+ audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
+ uint32_t dstSampleRate);
+
+ // called to reset resampler buffers on record track discontinuity
+ void reset();
+
+private:
+ // format conversion when not using resampler
+ void convertNoResampler(void *dst, const void *src, size_t frames);
+
+ // format conversion when using resampler; modifies src in-place
+ void convertResampler(void *dst, /*not-a-const*/ void *src, size_t frames);
+
+ // user provided information
+ audio_channel_mask_t mSrcChannelMask;
+ audio_format_t mSrcFormat;
+ uint32_t mSrcSampleRate;
+ audio_channel_mask_t mDstChannelMask;
+ audio_format_t mDstFormat;
+ uint32_t mDstSampleRate;
+
+ // derived information
+ uint32_t mSrcChannelCount;
+ uint32_t mDstChannelCount;
+ size_t mDstFrameSize;
+
+ // format conversion buffer
+ void *mBuf;
+ size_t mBufFrames;
+ size_t mBufFrameSize;
+
+ // resampler info
+ AudioResampler *mResampler;
+
+ bool mIsLegacyDownmix; // legacy stereo to mono conversion needed
+ bool mIsLegacyUpmix; // legacy mono to stereo conversion needed
+ bool mRequiresFloat; // data processing requires float (e.g. resampler)
+ PassthruBufferProvider *mInputConverterProvider; // converts input to float
+ int8_t mIdxAry[sizeof(uint32_t) * 8]; // used for channel mask conversion
+};
+
+// ----------------------------------------------------------------------------
+} // namespace android
+
+#endif // ANDROID_RECORD_BUFFER_CONVERTER_H
diff --git a/include/media/TypeConverter.h b/include/media/TypeConverter.h
index ffe4c1f..e262eef 100644
--- a/include/media/TypeConverter.h
+++ b/include/media/TypeConverter.h
@@ -80,6 +80,16 @@
typedef audio_mode_t Type;
typedef Vector<Type> Collection;
};
+struct UsageTraits
+{
+ typedef audio_usage_t Type;
+ typedef Vector<Type> Collection;
+};
+struct SourceTraits
+{
+ typedef audio_source_t Type;
+ typedef Vector<Type> Collection;
+};
template <typename T>
struct DefaultTraits
{
@@ -215,6 +225,8 @@
typedef TypeConverter<GainModeTraits> GainModeConverter;
typedef TypeConverter<StreamTraits> StreamTypeConverter;
typedef TypeConverter<AudioModeTraits> AudioModeConverter;
+typedef TypeConverter<UsageTraits> UsageTypeConverter;
+typedef TypeConverter<SourceTraits> SourceTypeConverter;
bool deviceFromString(const std::string& literalDevice, audio_devices_t& device);
diff --git a/include/media/audiohal/EffectBufferHalInterface.h b/include/media/audiohal/EffectBufferHalInterface.h
new file mode 100644
index 0000000..102ec56
--- /dev/null
+++ b/include/media/audiohal/EffectBufferHalInterface.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_BUFFER_HAL_INTERFACE_H
+#define ANDROID_HARDWARE_EFFECT_BUFFER_HAL_INTERFACE_H
+
+#include <system/audio_effect.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+// Abstraction for an audio buffer. It may be a "mirror" for
+// a buffer that the effect chain doesn't own, or a buffer owned by
+// the effect chain.
+class EffectBufferHalInterface : public RefBase
+{
+ public:
+ virtual audio_buffer_t* audioBuffer() = 0;
+ virtual void* externalData() const = 0;
+ // To be used when interacting with the code that doesn't know about
+ // "mirrored" buffers.
+ virtual void* ptr() {
+ return externalData() != nullptr ? externalData() : audioBuffer()->raw;
+ }
+
+ virtual void setExternalData(void* external) = 0;
+ virtual void setFrameCount(size_t frameCount) = 0;
+
+ virtual void update() = 0; // copies data from the external buffer, noop for allocated buffers
+ virtual void commit() = 0; // copies data to the external buffer, noop for allocated buffers
+
+ static status_t allocate(size_t size, sp<EffectBufferHalInterface>* buffer);
+ static status_t mirror(void* external, size_t size, sp<EffectBufferHalInterface>* buffer);
+
+ protected:
+ // Subclasses can not be constructed directly by clients.
+ EffectBufferHalInterface() {}
+
+ virtual ~EffectBufferHalInterface() {}
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_BUFFER_HAL_INTERFACE_H
diff --git a/include/media/audiohal/EffectHalInterface.h b/include/media/audiohal/EffectHalInterface.h
index 7bbd3b5..7f9a6fd 100644
--- a/include/media/audiohal/EffectHalInterface.h
+++ b/include/media/audiohal/EffectHalInterface.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
#define ANDROID_HARDWARE_EFFECT_HAL_INTERFACE_H
+#include <media/audiohal/EffectBufferHalInterface.h>
#include <system/audio_effect.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
@@ -26,14 +27,20 @@
class EffectHalInterface : public RefBase
{
public:
+ // Set the input buffer.
+ virtual status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+
+ // Set the output buffer.
+ virtual status_t setOutBuffer(const sp<EffectBufferHalInterface>& buffer) = 0;
+
// Effect process function. Takes input samples as specified
// in input buffer descriptor and output processed samples as specified
// in output buffer descriptor.
- virtual status_t process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) = 0;
+ virtual status_t process() = 0;
// Process reverse stream function. This function is used to pass
// a reference stream to the effect engine.
- virtual status_t processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) = 0;
+ virtual status_t processReverse() = 0;
// Send a command and receive a response to/from effect engine.
virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
@@ -42,6 +49,9 @@
// Returns the effect descriptor.
virtual status_t getDescriptor(effect_descriptor_t *pDescriptor) = 0;
+ // Free resources on the remote side.
+ virtual status_t close() = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
EffectHalInterface() {}
diff --git a/include/media/audiohal/StreamHalInterface.h b/include/media/audiohal/StreamHalInterface.h
index 5296829..7419c34 100644
--- a/include/media/audiohal/StreamHalInterface.h
+++ b/include/media/audiohal/StreamHalInterface.h
@@ -75,6 +75,10 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
+ // Set the priority of the thread that interacts with the HAL
+ // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
+ virtual status_t setHalThreadPriority(int priority) = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
StreamHalInterface() {}
diff --git a/include/media/audiohal/hidl/HalDeathHandler.h b/include/media/audiohal/hidl/HalDeathHandler.h
new file mode 100644
index 0000000..c9b7084
--- /dev/null
+++ b/include/media/audiohal/hidl/HalDeathHandler.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_HIDL_HAL_DEATH_HANDLER_H
+#define ANDROID_HARDWARE_HIDL_HAL_DEATH_HANDLER_H
+
+#include <functional>
+#include <mutex>
+#include <unordered_map>
+
+#include <hidl/HidlSupport.h>
+#include <utils/Singleton.h>
+
+using android::hardware::hidl_death_recipient;
+using android::hidl::base::V1_0::IBase;
+
+namespace android {
+
+class HalDeathHandler : public hidl_death_recipient, private Singleton<HalDeathHandler> {
+ public:
+ typedef std::function<void()> AtExitHandler;
+
+ // Note that the exit handler gets called using a thread from
+ // RPC threadpool, thus it needs to be thread-safe.
+ void registerAtExitHandler(void* cookie, AtExitHandler handler);
+ void unregisterAtExitHandler(void* cookie);
+
+ // hidl_death_recipient
+ virtual void serviceDied(uint64_t cookie, const wp<IBase>& who);
+
+ // Used both for (un)registering handlers, and for passing to
+ // '(un)linkToDeath'.
+ static sp<HalDeathHandler> getInstance();
+
+ private:
+ friend class Singleton<HalDeathHandler>;
+ typedef std::unordered_map<void*, AtExitHandler> Handlers;
+
+ HalDeathHandler();
+ virtual ~HalDeathHandler();
+
+ sp<HalDeathHandler> mSelf; // Allows the singleton instance to live forever.
+ std::mutex mHandlersLock;
+ Handlers mHandlers;
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_HIDL_HAL_DEATH_HANDLER_H
diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h
index be34d02..9130159 100644
--- a/include/media/mediaplayer.h
+++ b/include/media/mediaplayer.h
@@ -219,6 +219,8 @@
status_t setVideoSurfaceTexture(
const sp<IGraphicBufferProducer>& bufferProducer);
status_t setListener(const sp<MediaPlayerListener>& listener);
+ status_t getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */);
+ status_t setBufferingSettings(const BufferingSettings& buffering);
status_t prepare();
status_t prepareAsync();
status_t start();
diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h
index 1297b51..acf2d31 100644
--- a/include/media/nbaio/NBLog.h
+++ b/include/media/nbaio/NBLog.h
@@ -21,7 +21,7 @@
#include <binder/IMemory.h>
#include <utils/Mutex.h>
-#include <audio_utils/roundup.h>
+#include <audio_utils/fifo.h>
namespace android {
@@ -55,8 +55,11 @@
private:
friend class Writer;
Event mEvent; // event type
- size_t mLength; // length of additional data, 0 <= mLength <= 255
+ uint8_t mLength; // length of additional data, 0 <= mLength <= kMaxLength
const void *mData; // event type-specific data
+ static const size_t kMaxLength = 255;
+public:
+ static const size_t kOverhead = 3; // mEvent, mLength, mData[...], duplicate mLength
};
// representation of a single log entry in shared memory
@@ -70,13 +73,17 @@
// byte[2+mLength] duplicate copy of mLength to permit reverse scan
// byte[3+mLength] start of next log entry
-// located in shared memory
+public:
+
+// Located in shared memory, must be POD.
+// Exactly one process must explicitly call the constructor or use placement new.
+// Since this is a POD, the destructor is empty and unnecessary to call it explicitly.
struct Shared {
- Shared() : mRear(0) { }
+ Shared() /* mRear initialized via default constructor */ { }
/*virtual*/ ~Shared() { }
- volatile int32_t mRear; // index one byte past the end of most recent Entry
- char mBuffer[0]; // circular buffer for entries
+ audio_utils_fifo_index mRear; // index one byte past the end of most recent Entry
+ char mBuffer[0]; // circular buffer for entries
};
public:
@@ -117,10 +124,10 @@
// Input parameter 'size' is the desired size of the timeline in byte units.
// The size of the shared memory must be at least Timeline::sharedSize(size).
- Writer(size_t size, void *shared);
- Writer(size_t size, const sp<IMemory>& iMemory);
+ Writer(void *shared, size_t size);
+ Writer(const sp<IMemory>& iMemory, size_t size);
- virtual ~Writer() { }
+ virtual ~Writer();
virtual void log(const char *string);
virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
@@ -138,13 +145,16 @@
sp<IMemory> getIMemory() const { return mIMemory; }
private:
+ // 0 <= length <= kMaxLength
void log(Event event, const void *data, size_t length);
void log(const Entry *entry, bool trusted = false);
- const size_t mSize; // circular buffer size in bytes, must be a power of 2
Shared* const mShared; // raw pointer to shared memory
- const sp<IMemory> mIMemory; // ref-counted version
- int32_t mRear; // my private copy of mShared->mRear
+ sp<IMemory> mIMemory; // ref-counted version, initialized in constructor and then const
+ audio_utils_fifo * const mFifo; // FIFO itself,
+ // non-NULL unless constructor fails
+ audio_utils_fifo_writer * const mFifoWriter; // used to write to FIFO,
+ // non-NULL unless dummy constructor used
bool mEnabled; // whether to actually log
};
@@ -154,7 +164,7 @@
class LockedWriter : public Writer {
public:
LockedWriter();
- LockedWriter(size_t size, void *shared);
+ LockedWriter(void *shared, size_t size);
virtual void log(const char *string);
virtual void logf(const char *fmt, ...) __attribute__ ((format (printf, 2, 3)));
@@ -176,21 +186,24 @@
// Input parameter 'size' is the desired size of the timeline in byte units.
// The size of the shared memory must be at least Timeline::sharedSize(size).
- Reader(size_t size, const void *shared);
- Reader(size_t size, const sp<IMemory>& iMemory);
+ Reader(const void *shared, size_t size);
+ Reader(const sp<IMemory>& iMemory, size_t size);
- virtual ~Reader() { }
+ virtual ~Reader();
void dump(int fd, size_t indent = 0);
bool isIMemory(const sp<IMemory>& iMemory) const;
private:
- const size_t mSize; // circular buffer size in bytes, must be a power of 2
- const Shared* const mShared; // raw pointer to shared memory
- const sp<IMemory> mIMemory; // ref-counted version
- int32_t mFront; // index of oldest acknowledged Entry
+ /*const*/ Shared* const mShared; // raw pointer to shared memory, actually const but not
+ // declared as const because audio_utils_fifo() constructor
+ sp<IMemory> mIMemory; // ref-counted version, assigned only in constructor
int mFd; // file descriptor
int mIndent; // indentation level
+ audio_utils_fifo * const mFifo; // FIFO itself,
+ // non-NULL unless constructor fails
+ audio_utils_fifo_reader * const mFifoReader; // used to read from FIFO,
+ // non-NULL unless constructor fails
void dumpLine(const String8& timestamp, String8& body);
diff --git a/include/media/stagefright/MediaExtractor.h b/include/media/stagefright/MediaExtractor.h
index fbb4a67..b460ef7 100644
--- a/include/media/stagefright/MediaExtractor.h
+++ b/include/media/stagefright/MediaExtractor.h
@@ -72,7 +72,6 @@
virtual ~MediaExtractor() {}
private:
- bool mIsDrm;
typedef bool (*SnifferFunc)(
const sp<DataSource> &source, String8 *mimeType,
diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h
index 8eff914..88a416a 100644
--- a/include/media/stagefright/Utils.h
+++ b/include/media/stagefright/Utils.h
@@ -23,6 +23,7 @@
#include <utils/Errors.h>
#include <utils/RefBase.h>
#include <system/audio.h>
+#include <media/BufferingSettings.h>
#include <media/MediaPlayerInterface.h>
namespace android {
@@ -90,6 +91,9 @@
void readFromAMessage(
const sp<AMessage> &msg, AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */);
+void writeToAMessage(const sp<AMessage> &msg, const BufferingSettings &buffering);
+void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */);
+
AString nameForFd(int fd);
} // namespace android
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
new file mode 100644
index 0000000..03dce0c
--- /dev/null
+++ b/media/libaudioclient/Android.bp
@@ -0,0 +1,45 @@
+cc_library_shared {
+ name: "libaudioclient",
+ srcs: [
+ "AudioEffect.cpp",
+ "AudioPolicy.cpp",
+ "AudioRecord.cpp",
+ "AudioSystem.cpp",
+ "AudioTrack.cpp",
+ "AudioTrackShared.cpp",
+ "IAudioFlinger.cpp",
+ "IAudioFlingerClient.cpp",
+ "IAudioPolicyService.cpp",
+ "IAudioPolicyServiceClient.cpp",
+ "IAudioRecord.cpp",
+ "IAudioTrack.cpp",
+ "IEffect.cpp",
+ "IEffectClient.cpp",
+ "ToneGenerator.cpp",
+ ],
+ shared_libs: [
+ "liblog",
+ "libcutils",
+ "libutils",
+ "libbinder",
+ "libdl",
+ "libaudioutils",
+ ],
+ export_shared_lib_headers: ["libbinder"],
+ // for memory heap analysis
+ static_libs: [
+ "libc_malloc_debug_backtrace",
+ "libc_logging",
+ ],
+ cflags: [
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ "-Wall",
+ ],
+ sanitize: {
+ misc_undefined : [
+ "unsigned-integer-overflow",
+ "signed-integer-overflow",
+ ],
+ },
+}
diff --git a/media/libaudioclient/Android.mk b/media/libaudioclient/Android.mk
deleted file mode 100644
index 348ab50..0000000
--- a/media/libaudioclient/Android.mk
+++ /dev/null
@@ -1,50 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES += \
- AudioEffect.cpp \
- AudioPolicy.cpp \
- AudioRecord.cpp \
- AudioSystem.cpp \
- AudioTrack.cpp \
- AudioTrackShared.cpp \
- IAudioFlinger.cpp \
- IAudioFlingerClient.cpp \
- IAudioPolicyService.cpp \
- IAudioPolicyServiceClient.cpp \
- IAudioRecord.cpp \
- IAudioTrack.cpp \
- IEffect.cpp \
- IEffectClient.cpp \
- ToneGenerator.cpp \
-
-LOCAL_SHARED_LIBRARIES := \
- liblog libcutils libutils libbinder \
- libdl libaudioutils \
-
-LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := libbinder
-
-# for memory heap analysis
-LOCAL_STATIC_LIBRARIES := libc_malloc_debug_backtrace libc_logging
-
-LOCAL_MODULE:= libaudioclient
-
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
-
-LOCAL_C_INCLUDES := \
- $(TOP)/frameworks/native/include/media/openmax \
- $(TOP)/frameworks/av/include/media/ \
- $(TOP)/frameworks/av/media/libstagefright \
- $(TOP)/frameworks/av/media/libmedia/aidl \
- $(call include-path-for, audio-utils)
-
-LOCAL_EXPORT_C_INCLUDE_DIRS := \
- frameworks/av/include/media \
- frameworks/av/media/libmedia/aidl
-
-LOCAL_CFLAGS += -Werror -Wno-error=deprecated-declarations -Wall
-LOCAL_SANITIZE := unsigned-integer-overflow signed-integer-overflow
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/media/libaudioclient/IAudioPolicyService.cpp b/media/libaudioclient/IAudioPolicyService.cpp
index 776e509..f0f413d 100644
--- a/media/libaudioclient/IAudioPolicyService.cpp
+++ b/media/libaudioclient/IAudioPolicyService.cpp
@@ -834,10 +834,15 @@
static_cast <audio_policy_dev_state_t>(data.readInt32());
const char *device_address = data.readCString();
const char *device_name = data.readCString();
- reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,
- state,
- device_address,
- device_name)));
+ if (device_address == nullptr || device_name == nullptr) {
+ ALOGE("Bad Binder transaction: SET_DEVICE_CONNECTION_STATE for device %u", device);
+ reply->writeInt32(static_cast<int32_t> (BAD_VALUE));
+ } else {
+ reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,
+ state,
+ device_address,
+ device_name)));
+ }
return NO_ERROR;
} break;
@@ -846,8 +851,13 @@
audio_devices_t device =
static_cast<audio_devices_t> (data.readInt32());
const char *device_address = data.readCString();
- reply->writeInt32(static_cast<uint32_t> (getDeviceConnectionState(device,
- device_address)));
+ if (device_address == nullptr) {
+ ALOGE("Bad Binder transaction: GET_DEVICE_CONNECTION_STATE for device %u", device);
+ reply->writeInt32(static_cast<int32_t> (AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
+ } else {
+ reply->writeInt32(static_cast<uint32_t> (getDeviceConnectionState(device,
+ device_address)));
+ }
return NO_ERROR;
} break;
@@ -857,9 +867,14 @@
static_cast <audio_devices_t>(data.readInt32());
const char *device_address = data.readCString();
const char *device_name = data.readCString();
- reply->writeInt32(static_cast<uint32_t> (handleDeviceConfigChange(device,
- device_address,
- device_name)));
+ if (device_address == nullptr || device_name == nullptr) {
+ ALOGE("Bad Binder transaction: HANDLE_DEVICE_CONFIG_CHANGE for device %u", device);
+ reply->writeInt32(static_cast<int32_t> (BAD_VALUE));
+ } else {
+ reply->writeInt32(static_cast<uint32_t> (handleDeviceConfigChange(device,
+ device_address,
+ device_name)));
+ }
return NO_ERROR;
} break;
diff --git a/media/libaudiohal/Android.mk b/media/libaudiohal/Android.mk
index e82766f..5e00b77 100644
--- a/media/libaudiohal/Android.mk
+++ b/media/libaudiohal/Android.mk
@@ -3,44 +3,52 @@
include $(CLEAR_VARS)
LOCAL_SHARED_LIBRARIES := \
- libcutils \
+ libcutils \
+ libeffects \
libhardware \
- liblog \
- libutils \
- libeffects
+ liblog \
+ libutils
-ifeq ($(ENABLE_TREBLE), true)
+ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL), true)
-LOCAL_CFLAGS += -DENABLE_TREBLE
+# Use audiohal directly w/o hwbinder middleware.
+# This is for performance comparison and debugging only.
+
+LOCAL_SRC_FILES := \
+ DeviceHalLocal.cpp \
+ DevicesFactoryHalLocal.cpp \
+ EffectBufferHalLocal.cpp \
+ EffectHalLocal.cpp \
+ EffectsFactoryHalLocal.cpp \
+ StreamHalLocal.cpp
+
+else # if !USE_LEGACY_LOCAL_AUDIO_HAL
LOCAL_SRC_FILES := \
ConversionHelperHidl.cpp \
+ HalDeathHandlerHidl.cpp \
DeviceHalHidl.cpp \
DevicesFactoryHalHidl.cpp \
+ EffectBufferHalHidl.cpp \
EffectHalHidl.cpp \
EffectsFactoryHalHidl.cpp \
StreamHalHidl.cpp
LOCAL_SHARED_LIBRARIES += \
+ libbase \
+ libfmq \
libhwbinder \
libhidlbase \
+ libhidlmemory \
libhidltransport \
- libbase \
android.hardware.audio@2.0 \
android.hardware.audio.common@2.0 \
android.hardware.audio.common@2.0-util \
- android.hardware.audio.effect@2.0 \
+ android.hardware.audio.effect@2.0 \
+ android.hidl.memory@1.0 \
libmedia_helper
-else # if !ENABLE_TREBLE
-
-LOCAL_SRC_FILES := \
- DeviceHalLocal.cpp \
- DevicesFactoryHalLocal.cpp \
- EffectHalLocal.cpp \
- EffectsFactoryHalLocal.cpp \
- StreamHalLocal.cpp
-endif # ENABLE_TREBLE
+endif # USE_LEGACY_LOCAL_AUDIO_HAL
LOCAL_MODULE := libaudiohal
diff --git a/media/libaudiohal/ConversionHelperHidl.cpp b/media/libaudiohal/ConversionHelperHidl.cpp
index 1fabfbe..9f9eb75 100644
--- a/media/libaudiohal/ConversionHelperHidl.cpp
+++ b/media/libaudiohal/ConversionHelperHidl.cpp
@@ -82,26 +82,8 @@
}
}
-// static
-void ConversionHelperHidl::crashIfHalIsDead(const Status& status) {
- LOG_ALWAYS_FATAL_IF(
- status.transactionError() == DEAD_OBJECT, "HAL server crashed, need to restart");
-}
-
-status_t ConversionHelperHidl::processReturn(const char* funcName, const Status& status) {
- const status_t st = status.transactionError();
- ALOGE_IF(st, "%s %p %s: %s (from rpc)", mClassName, this, funcName, strerror(-st));
- crashIfHalIsDead(status);
- return st;
-}
-
-status_t ConversionHelperHidl::processReturn(
- const char* funcName, const Status& status, hardware::audio::V2_0::Result retval) {
- const status_t st = status.isOk() ? analyzeResult(retval) : status.transactionError();
- ALOGE_IF(!status.isOk() && st, "%s %p %s: %s (from rpc)",
- mClassName, this, funcName, strerror(-st));
- crashIfHalIsDead(status);
- return st;
+void ConversionHelperHidl::emitError(const char* funcName, const char* description) {
+ ALOGE("%s %p %s: %s (from rpc)", mClassName, this, funcName, description);
}
} // namespace android
diff --git a/media/libaudiohal/ConversionHelperHidl.h b/media/libaudiohal/ConversionHelperHidl.h
index 428daf2..23fb360 100644
--- a/media/libaudiohal/ConversionHelperHidl.h
+++ b/media/libaudiohal/ConversionHelperHidl.h
@@ -30,9 +30,6 @@
namespace android {
class ConversionHelperHidl {
- public:
- static void crashIfHalIsDead(const Status& status);
-
protected:
static status_t keysFromHal(const String8& keys, hidl_vec<hidl_string> *hidlKeys);
static status_t parametersFromHal(const String8& kvPairs, hidl_vec<ParameterValue> *hidlParams);
@@ -40,18 +37,22 @@
ConversionHelperHidl(const char* className);
- status_t processReturn(const char* funcName, const Return<void>& ret) {
- return processReturn(funcName, ret.getStatus());
- }
-
template<typename R, typename T>
status_t processReturn(const char* funcName, const Return<R>& ret, T *retval) {
- if (ret.getStatus().isOk()) {
+ if (ret.isOk()) {
// This way it also works for enum class to unscoped enum conversion.
*retval = static_cast<T>(static_cast<R>(ret));
return OK;
}
- return processReturn(funcName, ret.getStatus());
+ return processReturn(funcName, ret);
+ }
+
+ template<typename T>
+ status_t processReturn(const char* funcName, const Return<T>& ret) {
+ if (!ret.isOk()) {
+ emitError(funcName, ret.description().c_str());
+ }
+ return ret.isOk() ? OK : FAILED_TRANSACTION;
}
status_t processReturn(const char* funcName, const Return<hardware::audio::V2_0::Result>& ret) {
@@ -61,16 +62,19 @@
template<typename T>
status_t processReturn(
const char* funcName, const Return<T>& ret, hardware::audio::V2_0::Result retval) {
- return processReturn(funcName, ret.getStatus(), retval);
+ const status_t st = ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+ if (!ret.isOk()) {
+ emitError(funcName, ret.description().c_str());
+ }
+ return st;
}
private:
const char* mClassName;
static status_t analyzeResult(const hardware::audio::V2_0::Result& result);
- status_t processReturn(const char* funcName, const Status& status);
- status_t processReturn(
- const char* funcName, const Status& status, hardware::audio::V2_0::Result retval);
+
+ void emitError(const char* funcName, const char* description);
};
} // namespace android
diff --git a/media/libaudiohal/DevicesFactoryHalHidl.cpp b/media/libaudiohal/DevicesFactoryHalHidl.cpp
index efcc089..a91f145 100644
--- a/media/libaudiohal/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/DevicesFactoryHalHidl.cpp
@@ -20,6 +20,7 @@
//#define LOG_NDEBUG 0
#include <android/hardware/audio/2.0/IDevice.h>
+#include <media/audiohal/hidl/HalDeathHandler.h>
#include <utils/Log.h>
#include "ConversionHelperHidl.h"
@@ -40,6 +41,11 @@
DevicesFactoryHalHidl::DevicesFactoryHalHidl() {
mDevicesFactory = IDevicesFactory::getService("audio_devices_factory");
+ if (mDevicesFactory != 0) {
+ // It is assumet that DevicesFactory is owned by AudioFlinger
+ // and thus have the same lifespan.
+ mDevicesFactory->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
+ }
}
DevicesFactoryHalHidl::~DevicesFactoryHalHidl() {
@@ -78,13 +84,12 @@
*device = new DeviceHalHidl(result);
}
});
- if (ret.getStatus().isOk()) {
+ if (ret.isOk()) {
if (retval == Result::OK) return OK;
else if (retval == Result::INVALID_ARGUMENTS) return BAD_VALUE;
else return NO_INIT;
}
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().transactionError();
+ return FAILED_TRANSACTION;
}
} // namespace android
diff --git a/media/libaudiohal/EffectBufferHalHidl.cpp b/media/libaudiohal/EffectBufferHalHidl.cpp
new file mode 100644
index 0000000..446d2ef
--- /dev/null
+++ b/media/libaudiohal/EffectBufferHalHidl.cpp
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <atomic>
+
+#define LOG_TAG "EffectBufferHalHidl"
+//#define LOG_NDEBUG 0
+
+#include <android/hidl/memory/1.0/IAllocator.h>
+#include <hidlmemory/mapping.h>
+#include <utils/Log.h>
+
+#include "ConversionHelperHidl.h"
+#include "EffectBufferHalHidl.h"
+
+using ::android::hardware::Return;
+using ::android::hardware::Status;
+using ::android::hidl::memory::V1_0::IAllocator;
+
+namespace android {
+
+// static
+uint64_t EffectBufferHalHidl::makeUniqueId() {
+ static std::atomic<uint64_t> counter{1};
+ return counter++;
+}
+
+// static
+status_t EffectBufferHalInterface::allocate(
+ size_t size, sp<EffectBufferHalInterface>* buffer) {
+ return mirror(nullptr, size, buffer);
+}
+
+// static
+status_t EffectBufferHalInterface::mirror(
+ void* external, size_t size, sp<EffectBufferHalInterface>* buffer) {
+ sp<EffectBufferHalInterface> tempBuffer = new EffectBufferHalHidl(size);
+ status_t result = reinterpret_cast<EffectBufferHalHidl*>(tempBuffer.get())->init();
+ if (result == OK) {
+ tempBuffer->setExternalData(external);
+ *buffer = tempBuffer;
+ }
+ return result;
+}
+
+EffectBufferHalHidl::EffectBufferHalHidl(size_t size)
+ : mBufferSize(size), mExternalData(nullptr), mAudioBuffer{0, {nullptr}} {
+ mHidlBuffer.id = makeUniqueId();
+ mHidlBuffer.frameCount = 0;
+}
+
+EffectBufferHalHidl::~EffectBufferHalHidl() {
+}
+
+status_t EffectBufferHalHidl::init() {
+ sp<IAllocator> ashmem = IAllocator::getService("ashmem");
+ if (ashmem == 0) {
+ ALOGE("Failed to retrieve ashmem allocator service");
+ return NO_INIT;
+ }
+ status_t retval = NO_MEMORY;
+ Return<void> result = ashmem->allocate(
+ mBufferSize,
+ [&](bool success, const hidl_memory& memory) {
+ if (success) {
+ mHidlBuffer.data = memory;
+ retval = OK;
+ }
+ });
+ if (retval == OK) {
+ mMemory = hardware::mapMemory(mHidlBuffer.data);
+ if (mMemory != 0) {
+ mMemory->update();
+ mAudioBuffer.raw = static_cast<void*>(mMemory->getPointer());
+ memset(mAudioBuffer.raw, 0, mMemory->getSize());
+ mMemory->commit();
+ } else {
+ ALOGE("Failed to map allocated ashmem");
+ retval = NO_MEMORY;
+ }
+ }
+ return retval;
+}
+
+audio_buffer_t* EffectBufferHalHidl::audioBuffer() {
+ return &mAudioBuffer;
+}
+
+void* EffectBufferHalHidl::externalData() const {
+ return mExternalData;
+}
+
+void EffectBufferHalHidl::setFrameCount(size_t frameCount) {
+ mHidlBuffer.frameCount = frameCount;
+ mAudioBuffer.frameCount = frameCount;
+}
+
+void EffectBufferHalHidl::setExternalData(void* external) {
+ mExternalData = external;
+}
+
+void EffectBufferHalHidl::update() {
+ if (mExternalData == nullptr) return;
+ mMemory->update();
+ memcpy(mAudioBuffer.raw, mExternalData, mBufferSize);
+ mMemory->commit();
+}
+
+void EffectBufferHalHidl::commit() {
+ if (mExternalData == nullptr) return;
+ memcpy(mExternalData, mAudioBuffer.raw, mBufferSize);
+}
+
+} // namespace android
diff --git a/media/libaudiohal/EffectBufferHalHidl.h b/media/libaudiohal/EffectBufferHalHidl.h
new file mode 100644
index 0000000..4c4ec87
--- /dev/null
+++ b/media/libaudiohal/EffectBufferHalHidl.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_BUFFER_HAL_HIDL_H
+#define ANDROID_HARDWARE_EFFECT_BUFFER_HAL_HIDL_H
+
+#include <android/hardware/audio/effect/2.0/types.h>
+#include <android/hidl/memory/1.0/IMemory.h>
+#include <hidl/HidlSupport.h>
+#include <media/audiohal/EffectBufferHalInterface.h>
+#include <system/audio_effect.h>
+
+using android::hardware::audio::effect::V2_0::AudioBuffer;
+using android::hardware::hidl_memory;
+using android::hidl::memory::V1_0::IMemory;
+
+namespace android {
+
+class EffectBufferHalHidl : public EffectBufferHalInterface
+{
+ public:
+ virtual audio_buffer_t* audioBuffer();
+ virtual void* externalData() const;
+
+ virtual void setExternalData(void* external);
+ virtual void setFrameCount(size_t frameCount);
+
+ virtual void update();
+ virtual void commit();
+
+ const AudioBuffer& hidlBuffer() const { return mHidlBuffer; }
+
+ private:
+ friend class EffectBufferHalInterface;
+
+ static uint64_t makeUniqueId();
+
+ const size_t mBufferSize;
+ void* mExternalData;
+ AudioBuffer mHidlBuffer;
+ sp<IMemory> mMemory;
+ audio_buffer_t mAudioBuffer;
+
+ // Can not be constructed directly by clients.
+ explicit EffectBufferHalHidl(size_t size);
+
+ virtual ~EffectBufferHalHidl();
+
+ status_t init();
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_BUFFER_HAL_HIDL_H
diff --git a/media/libaudiohal/EffectBufferHalLocal.cpp b/media/libaudiohal/EffectBufferHalLocal.cpp
new file mode 100644
index 0000000..20b1339
--- /dev/null
+++ b/media/libaudiohal/EffectBufferHalLocal.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "EffectBufferHalLocal"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include "EffectBufferHalLocal.h"
+
+namespace android {
+
+// static
+status_t EffectBufferHalInterface::allocate(
+ size_t size, sp<EffectBufferHalInterface>* buffer) {
+ *buffer = new EffectBufferHalLocal(size);
+ return OK;
+}
+
+// static
+status_t EffectBufferHalInterface::mirror(
+ void* external, size_t size, sp<EffectBufferHalInterface>* buffer) {
+ *buffer = new EffectBufferHalLocal(external, size);
+ return OK;
+}
+
+EffectBufferHalLocal::EffectBufferHalLocal(size_t size)
+ : mOwnBuffer(new uint8_t[size]),
+ mBufferSize(size),
+ mAudioBuffer{0, {mOwnBuffer.get()}} {
+}
+
+EffectBufferHalLocal::EffectBufferHalLocal(void* external, size_t size)
+ : mOwnBuffer(nullptr),
+ mBufferSize(size),
+ mAudioBuffer{0, {external}} {
+}
+
+EffectBufferHalLocal::~EffectBufferHalLocal() {
+}
+
+audio_buffer_t* EffectBufferHalLocal::audioBuffer() {
+ return &mAudioBuffer;
+}
+
+void* EffectBufferHalLocal::externalData() const {
+ return mAudioBuffer.raw;
+}
+
+void EffectBufferHalLocal::setFrameCount(size_t frameCount) {
+ mAudioBuffer.frameCount = frameCount;
+}
+
+void EffectBufferHalLocal::setExternalData(void* external) {
+ ALOGE_IF(mOwnBuffer != nullptr, "Attempt to set external data for allocated buffer");
+ mAudioBuffer.raw = external;
+}
+
+void EffectBufferHalLocal::update() {
+}
+
+void EffectBufferHalLocal::commit() {
+}
+
+} // namespace android
diff --git a/media/libaudiohal/EffectBufferHalLocal.h b/media/libaudiohal/EffectBufferHalLocal.h
new file mode 100644
index 0000000..df7bd43
--- /dev/null
+++ b/media/libaudiohal/EffectBufferHalLocal.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
+#define ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
+
+#include <memory>
+
+#include <media/audiohal/EffectBufferHalInterface.h>
+#include <system/audio_effect.h>
+
+namespace android {
+
+class EffectBufferHalLocal : public EffectBufferHalInterface
+{
+ public:
+ virtual audio_buffer_t* audioBuffer();
+ virtual void* externalData() const;
+
+ virtual void setExternalData(void* external);
+ virtual void setFrameCount(size_t frameCount);
+
+ virtual void update();
+ virtual void commit();
+
+ private:
+ friend class EffectBufferHalInterface;
+
+ std::unique_ptr<uint8_t[]> mOwnBuffer;
+ const size_t mBufferSize;
+ audio_buffer_t mAudioBuffer;
+
+ // Can not be constructed directly by clients.
+ explicit EffectBufferHalLocal(size_t size);
+ EffectBufferHalLocal(void* external, size_t size);
+
+ virtual ~EffectBufferHalLocal();
+
+ status_t init();
+};
+
+} // namespace android
+
+#endif // ANDROID_HARDWARE_EFFECT_BUFFER_HAL_LOCAL_H
diff --git a/media/libaudiohal/EffectHalHidl.cpp b/media/libaudiohal/EffectHalHidl.cpp
index 1cd1997..6cf6412 100644
--- a/media/libaudiohal/EffectHalHidl.cpp
+++ b/media/libaudiohal/EffectHalHidl.cpp
@@ -21,21 +21,26 @@
#include <utils/Log.h>
#include "ConversionHelperHidl.h"
+#include "EffectBufferHalHidl.h"
#include "EffectHalHidl.h"
#include "HidlUtils.h"
+using ::android::hardware::audio::effect::V2_0::AudioBuffer;
+using ::android::hardware::audio::effect::V2_0::MessageQueueFlagBits;
using ::android::hardware::audio::effect::V2_0::Result;
using ::android::hardware::hidl_vec;
+using ::android::hardware::MQDescriptorSync;
using ::android::hardware::Return;
using ::android::hardware::Status;
namespace android {
EffectHalHidl::EffectHalHidl(const sp<IEffect>& effect, uint64_t effectId)
- : mEffect(effect), mEffectId(effectId) {
+ : mEffect(effect), mEffectId(effectId), mBuffersChanged(true) {
}
EffectHalHidl::~EffectHalHidl() {
+ close();
}
// static
@@ -64,33 +69,121 @@
}
}
-status_t EffectHalHidl::process(audio_buffer_t */*inBuffer*/, audio_buffer_t */*outBuffer*/) {
- // Idea -- intercept set buffer config command, capture audio format, use it
- // for determining frame size in bytes on input and output.
+status_t EffectHalHidl::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ if (mInBuffer == 0 || buffer->audioBuffer() != mInBuffer->audioBuffer()) {
+ mBuffersChanged = true;
+ }
+ mInBuffer = buffer;
return OK;
}
-status_t EffectHalHidl::processReverse(audio_buffer_t */*inBuffer*/, audio_buffer_t */*outBuffer*/) {
+status_t EffectHalHidl::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ if (mOutBuffer == 0 || buffer->audioBuffer() != mOutBuffer->audioBuffer()) {
+ mBuffersChanged = true;
+ }
+ mOutBuffer = buffer;
return OK;
}
+status_t EffectHalHidl::process() {
+ return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS));
+}
+
+status_t EffectHalHidl::processReverse() {
+ return processImpl(static_cast<uint32_t>(MessageQueueFlagBits::REQUEST_PROCESS_REVERSE));
+}
+
+status_t EffectHalHidl::prepareForProcessing() {
+ std::unique_ptr<StatusMQ> tempStatusMQ;
+ Result retval;
+ Return<void> ret = mEffect->prepareForProcessing(
+ [&](Result r, const MQDescriptorSync<Result>& statusMQ) {
+ retval = r;
+ if (retval == Result::OK) {
+ tempStatusMQ.reset(new StatusMQ(statusMQ));
+ if (tempStatusMQ->isValid() && tempStatusMQ->getEventFlagWord()) {
+ EventFlag::createEventFlag(tempStatusMQ->getEventFlagWord(), &mEfGroup);
+ }
+ }
+ });
+ if (!ret.isOk() || retval != Result::OK) {
+ return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+ }
+ if (!tempStatusMQ || !tempStatusMQ->isValid() || !mEfGroup) {
+ ALOGE_IF(!tempStatusMQ, "Failed to obtain status message queue for effects");
+ ALOGE_IF(tempStatusMQ && !tempStatusMQ->isValid(),
+ "Status message queue for effects is invalid");
+ ALOGE_IF(!mEfGroup, "Event flag creation for effects failed");
+ return NO_INIT;
+ }
+ mStatusMQ = std::move(tempStatusMQ);
+ return OK;
+}
+
+status_t EffectHalHidl::processImpl(uint32_t mqFlag) {
+ if (mEffect == 0 || mInBuffer == 0 || mOutBuffer == 0) return NO_INIT;
+ status_t status;
+ if (!mStatusMQ && (status = prepareForProcessing()) != OK) {
+ return status;
+ }
+ if (mBuffersChanged && (status = setProcessBuffers()) != OK) {
+ return status;
+ }
+ // The data is already in the buffers, just need to flush it and wake up the server side.
+ std::atomic_thread_fence(std::memory_order_release);
+ mEfGroup->wake(mqFlag);
+ uint32_t efState = 0;
+retry:
+ status_t ret = mEfGroup->wait(
+ static_cast<uint32_t>(MessageQueueFlagBits::DONE_PROCESSING), &efState, NS_PER_SEC);
+ if (efState & static_cast<uint32_t>(MessageQueueFlagBits::DONE_PROCESSING)) {
+ Result retval = Result::NOT_INITIALIZED;
+ mStatusMQ->read(&retval);
+ if (retval == Result::OK || retval == Result::INVALID_STATE) {
+ // Sync back the changed contents of the buffer.
+ std::atomic_thread_fence(std::memory_order_acquire);
+ }
+ return analyzeResult(retval);
+ }
+ if (ret == -EAGAIN) {
+ // This normally retries no more than once.
+ goto retry;
+ }
+ return ret;
+}
+
+status_t EffectHalHidl::setProcessBuffers() {
+ Return<Result> ret = mEffect->setProcessBuffers(
+ reinterpret_cast<EffectBufferHalHidl*>(mInBuffer.get())->hidlBuffer(),
+ reinterpret_cast<EffectBufferHalHidl*>(mOutBuffer.get())->hidlBuffer());
+ if (ret.isOk() && ret == Result::OK) {
+ mBuffersChanged = false;
+ return OK;
+ }
+ return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
+}
+
status_t EffectHalHidl::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
uint32_t *replySize, void *pReplyData) {
if (mEffect == 0) return NO_INIT;
hidl_vec<uint8_t> hidlData;
- hidlData.setToExternal(reinterpret_cast<uint8_t*>(pCmdData), cmdSize);
+ if (pCmdData != nullptr && cmdSize > 0) {
+ hidlData.setToExternal(reinterpret_cast<uint8_t*>(pCmdData), cmdSize);
+ }
status_t status;
+ uint32_t replySizeStub = 0;
+ if (replySize == nullptr) replySize = &replySizeStub;
Return<void> ret = mEffect->command(cmdCode, hidlData, *replySize,
[&](int32_t s, const hidl_vec<uint8_t>& result) {
status = s;
if (status == 0) {
if (*replySize > result.size()) *replySize = result.size();
- if (pReplyData && *replySize > 0) {
+ if (pReplyData != nullptr && *replySize > 0) {
memcpy(pReplyData, &result[0], *replySize);
}
}
});
- return status;
+ return ret.isOk() ? status : FAILED_TRANSACTION;
}
status_t EffectHalHidl::getDescriptor(effect_descriptor_t *pDescriptor) {
@@ -103,8 +196,13 @@
effectDescriptorToHal(result, pDescriptor);
}
});
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().isOk() ? analyzeResult(retval) : ret.getStatus().transactionError();
+ return ret.isOk() ? analyzeResult(retval) : FAILED_TRANSACTION;
+}
+
+status_t EffectHalHidl::close() {
+ if (mEffect == 0) return NO_INIT;
+ Return<Result> ret = mEffect->close();
+ return ret.isOk() ? analyzeResult(ret) : FAILED_TRANSACTION;
}
} // namespace android
diff --git a/media/libaudiohal/EffectHalHidl.h b/media/libaudiohal/EffectHalHidl.h
index b79bee0..1ed1153 100644
--- a/media/libaudiohal/EffectHalHidl.h
+++ b/media/libaudiohal/EffectHalHidl.h
@@ -19,24 +19,32 @@
#include <android/hardware/audio/effect/2.0/IEffect.h>
#include <media/audiohal/EffectHalInterface.h>
+#include <fmq/EventFlag.h>
+#include <fmq/MessageQueue.h>
#include <system/audio_effect.h>
using ::android::hardware::audio::effect::V2_0::EffectDescriptor;
using ::android::hardware::audio::effect::V2_0::IEffect;
+using ::android::hardware::EventFlag;
+using ::android::hardware::MessageQueue;
namespace android {
class EffectHalHidl : public EffectHalInterface
{
public:
- // Effect process function. Takes input samples as specified
- // in input buffer descriptor and output processed samples as specified
- // in output buffer descriptor.
- virtual status_t process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+ // Set the input buffer.
+ virtual status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer);
+
+ // Set the output buffer.
+ virtual status_t setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
+
+ // Effect process function.
+ virtual status_t process();
// Process reverse stream function. This function is used to pass
// a reference stream to the effect engine.
- virtual status_t processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+ virtual status_t processReverse();
// Send a command and receive a response to/from effect engine.
virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
@@ -45,6 +53,9 @@
// Returns the effect descriptor.
virtual status_t getDescriptor(effect_descriptor_t *pDescriptor);
+ // Free resources on the remote side.
+ virtual status_t close();
+
uint64_t effectId() const { return mEffectId; }
static void effectDescriptorToHal(
@@ -52,8 +63,16 @@
private:
friend class EffectsFactoryHalHidl;
+ typedef MessageQueue<
+ hardware::audio::effect::V2_0::Result, hardware::kSynchronizedReadWrite> StatusMQ;
+
sp<IEffect> mEffect;
const uint64_t mEffectId;
+ sp<EffectBufferHalInterface> mInBuffer;
+ sp<EffectBufferHalInterface> mOutBuffer;
+ bool mBuffersChanged;
+ std::unique_ptr<StatusMQ> mStatusMQ;
+ EventFlag* mEfGroup;
static status_t analyzeResult(const hardware::audio::effect::V2_0::Result& result);
@@ -62,6 +81,10 @@
// The destructor automatically releases the effect.
virtual ~EffectHalHidl();
+
+ status_t prepareForProcessing();
+ status_t processImpl(uint32_t mqFlag);
+ status_t setProcessBuffers();
};
} // namespace android
diff --git a/media/libaudiohal/EffectHalLocal.cpp b/media/libaudiohal/EffectHalLocal.cpp
index 56a365c..dd465c3 100644
--- a/media/libaudiohal/EffectHalLocal.cpp
+++ b/media/libaudiohal/EffectHalLocal.cpp
@@ -34,12 +34,37 @@
mHandle = 0;
}
-status_t EffectHalLocal::process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
- return (*mHandle)->process(mHandle, inBuffer, outBuffer);
+status_t EffectHalLocal::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ mInBuffer = buffer;
+ return OK;
}
-status_t EffectHalLocal::processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer) {
- return (*mHandle)->process_reverse(mHandle, inBuffer, outBuffer);
+status_t EffectHalLocal::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ mOutBuffer = buffer;
+ return OK;
+}
+
+status_t EffectHalLocal::process() {
+ if (mInBuffer == nullptr || mOutBuffer == nullptr) {
+ ALOGE_IF(mInBuffer == nullptr, "Input buffer not set");
+ ALOGE_IF(mOutBuffer == nullptr, "Output buffer not set");
+ return NO_INIT;
+ }
+ return (*mHandle)->process(mHandle, mInBuffer->audioBuffer(), mOutBuffer->audioBuffer());
+}
+
+status_t EffectHalLocal::processReverse() {
+ if ((*mHandle)->process_reverse != NULL) {
+ if (mInBuffer == nullptr || mOutBuffer == nullptr) {
+ ALOGE_IF(mInBuffer == nullptr, "Input buffer not set");
+ ALOGE_IF(mOutBuffer == nullptr, "Output buffer not set");
+ return NO_INIT;
+ }
+ return (*mHandle)->process_reverse(
+ mHandle, mInBuffer->audioBuffer(), mOutBuffer->audioBuffer());
+ } else {
+ return INVALID_OPERATION;
+ }
}
status_t EffectHalLocal::command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
@@ -51,4 +76,8 @@
return (*mHandle)->get_descriptor(mHandle, pDescriptor);
}
+status_t EffectHalLocal::close() {
+ return OK;
+}
+
} // namespace android
diff --git a/media/libaudiohal/EffectHalLocal.h b/media/libaudiohal/EffectHalLocal.h
index 77f774f..b499462 100644
--- a/media/libaudiohal/EffectHalLocal.h
+++ b/media/libaudiohal/EffectHalLocal.h
@@ -25,14 +25,18 @@
class EffectHalLocal : public EffectHalInterface
{
public:
- // Effect process function. Takes input samples as specified
- // in input buffer descriptor and output processed samples as specified
- // in output buffer descriptor.
- virtual status_t process(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+ // Set the input buffer.
+ virtual status_t setInBuffer(const sp<EffectBufferHalInterface>& buffer);
+
+ // Set the output buffer.
+ virtual status_t setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
+
+ // Effect process function.
+ virtual status_t process();
// Process reverse stream function. This function is used to pass
// a reference stream to the effect engine.
- virtual status_t processReverse(audio_buffer_t *inBuffer, audio_buffer_t *outBuffer);
+ virtual status_t processReverse();
// Send a command and receive a response to/from effect engine.
virtual status_t command(uint32_t cmdCode, uint32_t cmdSize, void *pCmdData,
@@ -41,10 +45,15 @@
// Returns the effect descriptor.
virtual status_t getDescriptor(effect_descriptor_t *pDescriptor);
+ // Free resources on the remote side.
+ virtual status_t close();
+
effect_handle_t handle() const { return mHandle; }
private:
effect_handle_t mHandle;
+ sp<EffectBufferHalInterface> mInBuffer;
+ sp<EffectBufferHalInterface> mOutBuffer;
friend class EffectsFactoryHalLocal;
diff --git a/media/libaudiohal/EffectsFactoryHalHidl.cpp b/media/libaudiohal/EffectsFactoryHalHidl.cpp
index bacbe4e..1ab5dad 100644
--- a/media/libaudiohal/EffectsFactoryHalHidl.cpp
+++ b/media/libaudiohal/EffectsFactoryHalHidl.cpp
@@ -43,7 +43,7 @@
return EffectIsNullUuid(pEffectUuid);
}
-EffectsFactoryHalHidl::EffectsFactoryHalHidl() {
+EffectsFactoryHalHidl::EffectsFactoryHalHidl() : ConversionHelperHidl("EffectsFactory"){
mEffectsFactory = IEffectsFactory::getService("audio_effects_factory");
}
@@ -60,12 +60,11 @@
mLastDescriptors = result;
}
});
- if (ret.getStatus().isOk()) {
+ if (ret.isOk()) {
return retval == Result::OK ? OK : NO_INIT;
}
mLastDescriptors.resize(0);
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().transactionError();
+ return processReturn(__FUNCTION__, ret);
}
status_t EffectsFactoryHalHidl::queryNumberEffects(uint32_t *pNumEffects) {
@@ -104,13 +103,12 @@
EffectHalHidl::effectDescriptorToHal(result, pDescriptor);
}
});
- if (ret.getStatus().isOk()) {
+ if (ret.isOk()) {
if (retval == Result::OK) return OK;
else if (retval == Result::INVALID_ARGUMENTS) return NAME_NOT_FOUND;
else return NO_INIT;
}
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().transactionError();
+ return processReturn(__FUNCTION__, ret);
}
status_t EffectsFactoryHalHidl::createEffect(
@@ -128,13 +126,12 @@
*effect = new EffectHalHidl(result, effectId);
}
});
- if (ret.getStatus().isOk()) {
+ if (ret.isOk()) {
if (retval == Result::OK) return OK;
else if (retval == Result::INVALID_ARGUMENTS) return NAME_NOT_FOUND;
else return NO_INIT;
}
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().transactionError();
+ return processReturn(__FUNCTION__, ret);
}
status_t EffectsFactoryHalHidl::dumpEffects(int fd) {
@@ -143,8 +140,7 @@
hidlHandle->data[0] = fd;
Return<void> ret = mEffectsFactory->debugDump(hidlHandle);
native_handle_delete(hidlHandle);
- ConversionHelperHidl::crashIfHalIsDead(ret.getStatus());
- return ret.getStatus().transactionError();
+ return processReturn(__FUNCTION__, ret);
}
} // namespace android
diff --git a/media/libaudiohal/EffectsFactoryHalHidl.h b/media/libaudiohal/EffectsFactoryHalHidl.h
index f16db17..e89f042 100644
--- a/media/libaudiohal/EffectsFactoryHalHidl.h
+++ b/media/libaudiohal/EffectsFactoryHalHidl.h
@@ -27,7 +27,7 @@
using ::android::hardware::audio::effect::V2_0::IEffectsFactory;
using ::android::hardware::hidl_vec;
-class EffectsFactoryHalHidl : public EffectsFactoryHalInterface
+class EffectsFactoryHalHidl : public EffectsFactoryHalInterface, public ConversionHelperHidl
{
public:
// Returns the number of different effects in all loaded libraries.
diff --git a/media/libaudiohal/HalDeathHandlerHidl.cpp b/media/libaudiohal/HalDeathHandlerHidl.cpp
new file mode 100644
index 0000000..a742671
--- /dev/null
+++ b/media/libaudiohal/HalDeathHandlerHidl.cpp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HalDeathHandler"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+
+#include <media/audiohal/hidl/HalDeathHandler.h>
+
+namespace android {
+
+ANDROID_SINGLETON_STATIC_INSTANCE(HalDeathHandler);
+
+// static
+sp<HalDeathHandler> HalDeathHandler::getInstance() {
+ return &Singleton<HalDeathHandler>::getInstance();
+}
+
+HalDeathHandler::HalDeathHandler() : mSelf(this) {
+}
+
+HalDeathHandler::~HalDeathHandler() {
+}
+
+void HalDeathHandler::registerAtExitHandler(void* cookie, AtExitHandler handler) {
+ std::lock_guard<std::mutex> guard(mHandlersLock);
+ mHandlers.insert({cookie, handler});
+}
+
+void HalDeathHandler::unregisterAtExitHandler(void* cookie) {
+ std::lock_guard<std::mutex> guard(mHandlersLock);
+ mHandlers.erase(cookie);
+}
+
+void HalDeathHandler::serviceDied(uint64_t /*cookie*/, const wp<IBase>& /*who*/) {
+ // No matter which of the service objects has died,
+ // we need to run all the registered handlers and crash our process.
+ std::lock_guard<std::mutex> guard(mHandlersLock);
+ for (const auto& handler : mHandlers) {
+ handler.second();
+ }
+ LOG_ALWAYS_FATAL("HAL server crashed, need to restart");
+}
+
+} // namespace android
diff --git a/media/libaudiohal/StreamHalHidl.cpp b/media/libaudiohal/StreamHalHidl.cpp
index 2c6e564..cbc8a08 100644
--- a/media/libaudiohal/StreamHalHidl.cpp
+++ b/media/libaudiohal/StreamHalHidl.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <time.h>
+
#define LOG_TAG "StreamHalHidl"
//#define LOG_NDEBUG 0
@@ -28,18 +30,23 @@
using ::android::hardware::audio::common::V2_0::AudioFormat;
using ::android::hardware::audio::V2_0::AudioDrain;
using ::android::hardware::audio::V2_0::IStreamOutCallback;
-using ::android::hardware::audio::V2_0::ParameterValue;
-using ::android::hardware::audio::V2_0::Result;
-using ::android::hardware::audio::V2_0::TimeSpec;
+using ::android::hardware::audio::V2_0::MessageQueueFlagBits;
using ::android::hardware::audio::V2_0::MmapBufferInfo;
using ::android::hardware::audio::V2_0::MmapPosition;
+using ::android::hardware::audio::V2_0::ParameterValue;
+using ::android::hardware::audio::V2_0::Result;
+using ::android::hardware::audio::V2_0::ThreadPriority;
+using ::android::hardware::audio::V2_0::TimeSpec;
+using ::android::hardware::MQDescriptorSync;
using ::android::hardware::Return;
using ::android::hardware::Void;
namespace android {
StreamHalHidl::StreamHalHidl(IStream *stream)
- : ConversionHelperHidl("Stream"), mStream(stream) {
+ : ConversionHelperHidl("Stream"),
+ mHalThreadPriority(static_cast<int>(ThreadPriority::NORMAL)),
+ mStream(stream) {
}
StreamHalHidl::~StreamHalHidl() {
@@ -176,6 +183,11 @@
return processReturn("getMmapPosition", ret, retval);
}
+status_t StreamHalHidl::setHalThreadPriority(int priority) {
+ mHalThreadPriority = priority;
+ return OK;
+}
+
namespace {
/* Notes on callback ownership.
@@ -229,14 +241,21 @@
} // namespace
StreamOutHalHidl::StreamOutHalHidl(const sp<IStreamOut>& stream)
- : StreamHalHidl(stream.get()), mStream(stream) {
+ : StreamHalHidl(stream.get()), mStream(stream), mEfGroup(nullptr),
+ mGetPresentationPositionNotSupported(false), mPPosFromWrite{ 0, OK, 0, { 0, 0 } } {
}
StreamOutHalHidl::~StreamOutHalHidl() {
- if (mCallback.unsafe_get() && mStream != 0) {
- processReturn("clearCallback", mStream->clearCallback());
+ if (mStream != 0) {
+ if (mCallback.unsafe_get()) {
+ processReturn("clearCallback", mStream->clearCallback());
+ }
+ processReturn("close", mStream->close());
}
mCallback.clear();
+ if (mEfGroup) {
+ EventFlag::deleteEventFlag(&mEfGroup);
+ }
}
status_t StreamOutHalHidl::getFrameSize(size_t *size) {
@@ -256,16 +275,98 @@
status_t StreamOutHalHidl::write(const void *buffer, size_t bytes, size_t *written) {
if (mStream == 0) return NO_INIT;
- hidl_vec<uint8_t> hidlData;
- hidlData.setToExternal(static_cast<uint8_t*>(const_cast<void*>(buffer)), bytes);
+ *written = 0;
+
+ if (bytes == 0 && !mDataMQ) {
+ // Can't determine the size for the MQ buffer. Wait for a non-empty write request.
+ ALOGW_IF(mCallback.unsafe_get(), "First call to async write with 0 bytes");
+ return OK;
+ }
+
+ status_t status;
+ if (!mDataMQ && (status = prepareForWriting(bytes)) != OK) {
+ return status;
+ }
+
+ const size_t availBytes = mDataMQ->availableToWrite();
+ if (bytes > availBytes) { bytes = availBytes; }
+ if (!mDataMQ->write(static_cast<const uint8_t*>(buffer), bytes)) {
+ ALOGW("data message queue write failed");
+ }
+ mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
+
+ // TODO: Remove manual event flag handling once blocking MQ is implemented. b/33815422
+ uint32_t efState = 0;
+retry:
+ status_t ret = mEfGroup->wait(
+ static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL), &efState, NS_PER_SEC);
+ if (efState & static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL)) {
+ WriteStatus writeStatus =
+ { Result::NOT_INITIALIZED, 0, Result::NOT_INITIALIZED, 0, { 0, 0 } };
+ mStatusMQ->read(&writeStatus);
+ if (writeStatus.writeRetval == Result::OK) {
+ status = OK;
+ *written = writeStatus.written;
+ mPPosFromWrite.status = processReturn(
+ "get_presentation_position", writeStatus.presentationPositionRetval);
+ if (mPPosFromWrite.status == OK) {
+ mPPosFromWrite.frames = writeStatus.frames;
+ mPPosFromWrite.ts.tv_sec = writeStatus.timeStamp.tvSec;
+ mPPosFromWrite.ts.tv_nsec = writeStatus.timeStamp.tvNSec;
+ }
+ mPPosFromWrite.obtained = getCurrentTimeMs();
+ } else {
+ status = processReturn("write", writeStatus.writeRetval);
+ }
+ return status;
+ }
+ if (ret == -EAGAIN) {
+ // This normally retries no more than once.
+ goto retry;
+ }
+ return ret;
+}
+
+uint64_t StreamOutHalHidl::getCurrentTimeMs() {
+ struct timespec timeNow;
+ clock_gettime(CLOCK_MONOTONIC, &timeNow);
+ return timeNow.tv_sec * 1000000 + timeNow.tv_nsec / 1000;
+}
+
+status_t StreamOutHalHidl::prepareForWriting(size_t bufferSize) {
+ std::unique_ptr<DataMQ> tempDataMQ;
+ std::unique_ptr<StatusMQ> tempStatusMQ;
Result retval;
- Return<void> ret = mStream->write(
- hidlData,
- [&](Result r, uint64_t w) {
+ Return<void> ret = mStream->prepareForWriting(
+ 1, bufferSize, ThreadPriority(mHalThreadPriority),
+ [&](Result r,
+ const MQDescriptorSync<uint8_t>& dataMQ,
+ const MQDescriptorSync<WriteStatus>& statusMQ) {
retval = r;
- *written = w;
+ if (retval == Result::OK) {
+ tempDataMQ.reset(new DataMQ(dataMQ));
+ tempStatusMQ.reset(new StatusMQ(statusMQ));
+ if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
+ EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
+ }
+ }
});
- return processReturn("write", ret, retval);
+ if (!ret.isOk() || retval != Result::OK) {
+ return processReturn("prepareForWriting", ret, retval);
+ }
+ if (!tempDataMQ || !tempDataMQ->isValid() || !tempStatusMQ || !tempStatusMQ->isValid()
+ || !mEfGroup) {
+ ALOGE_IF(!tempDataMQ, "Failed to obtain data message queue for writing");
+ ALOGE_IF(tempDataMQ && !tempDataMQ->isValid(), "Data message queue for writing is invalid");
+ ALOGE_IF(!tempStatusMQ, "Failed to obtain status message queue for writing");
+ ALOGE_IF(tempStatusMQ && !tempStatusMQ->isValid(),
+ "Status message queue for writing is invalid");
+ ALOGE_IF(!mEfGroup, "Event flag creation for writing failed");
+ return NO_INIT;
+ }
+ mDataMQ = std::move(tempDataMQ);
+ mStatusMQ = std::move(tempStatusMQ);
+ return OK;
}
status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
@@ -342,6 +443,17 @@
status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
if (mStream == 0) return NO_INIT;
+ if (mGetPresentationPositionNotSupported) return INVALID_OPERATION;
+ if (getCurrentTimeMs() - mPPosFromWrite.obtained <= 1000) {
+ // No more than 1 ms passed since the last write, use cached result to avoid binder calls.
+ if (mPPosFromWrite.status == OK) {
+ *frames = mPPosFromWrite.frames;
+ timestamp->tv_sec = mPPosFromWrite.ts.tv_sec;
+ timestamp->tv_nsec = mPPosFromWrite.ts.tv_nsec;
+ }
+ return mPPosFromWrite.status;
+ }
+
Result retval;
Return<void> ret = mStream->getPresentationPosition(
[&](Result r, uint64_t hidlFrames, const TimeSpec& hidlTimeStamp) {
@@ -352,6 +464,9 @@
timestamp->tv_nsec = hidlTimeStamp.tvNSec;
}
});
+ if (ret.isOk() && retval == Result::NOT_SUPPORTED) {
+ mGetPresentationPositionNotSupported = true;
+ }
return processReturn("getPresentationPosition", ret, retval);
}
@@ -378,10 +493,16 @@
StreamInHalHidl::StreamInHalHidl(const sp<IStreamIn>& stream)
- : StreamHalHidl(stream.get()), mStream(stream) {
+ : StreamHalHidl(stream.get()), mStream(stream), mEfGroup(nullptr) {
}
StreamInHalHidl::~StreamInHalHidl() {
+ if (mStream != 0) {
+ processReturn("close", mStream->close());
+ }
+ if (mEfGroup) {
+ EventFlag::deleteEventFlag(&mEfGroup);
+ }
}
status_t StreamInHalHidl::getFrameSize(size_t *size) {
@@ -396,17 +517,83 @@
status_t StreamInHalHidl::read(void *buffer, size_t bytes, size_t *read) {
if (mStream == 0) return NO_INIT;
+ *read = 0;
+
+ if (bytes == 0 && !mDataMQ) {
+ // Can't determine the size for the MQ buffer. Wait for a non-empty read request.
+ return OK;
+ }
+
+ status_t status;
+ if (!mDataMQ) {
+ if ((status = prepareForReading(bytes)) != OK) return status;
+ // Trigger the first read.
+ mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
+ }
+
+ // TODO: Remove manual event flag handling once blocking MQ is implemented. b/33815422
+ uint32_t efState = 0;
+retry:
+ status_t ret = mEfGroup->wait(
+ static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY), &efState, NS_PER_SEC);
+ if (efState & static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY)) {
+ ReadStatus readStatus = { Result::NOT_INITIALIZED, 0 };
+ const size_t availToRead = mDataMQ->availableToRead();
+ if (bytes > availToRead) { bytes = availToRead; }
+ mDataMQ->read(static_cast<uint8_t*>(buffer), bytes);
+ mStatusMQ->read(&readStatus);
+ mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
+ if (readStatus.retval == Result::OK) {
+ ALOGW_IF(availToRead != readStatus.read,
+ "HAL read report inconsistent: mq = %d, status = %d",
+ (int32_t)availToRead, (int32_t)readStatus.read);
+ *read = readStatus.read;
+ } else {
+ status = processReturn("read", readStatus.retval);
+ }
+ return status;
+ }
+ if (ret == -EAGAIN) {
+ // This normally retries no more than once.
+ goto retry;
+ }
+ return ret;
+}
+
+status_t StreamInHalHidl::prepareForReading(size_t bufferSize) {
+ std::unique_ptr<DataMQ> tempDataMQ;
+ std::unique_ptr<StatusMQ> tempStatusMQ;
Result retval;
- Return<void> ret = mStream->read(
- bytes,
- [&](Result r, const hidl_vec<uint8_t>& hidlData) {
+ Return<void> ret = mStream->prepareForReading(
+ 1, bufferSize, ThreadPriority(mHalThreadPriority),
+ [&](Result r,
+ const MQDescriptorSync<uint8_t>& dataMQ,
+ const MQDescriptorSync<ReadStatus>& statusMQ) {
retval = r;
- *read = std::min(hidlData.size(), bytes);
if (retval == Result::OK) {
- memcpy(buffer, &hidlData[0], *read);
+ tempDataMQ.reset(new DataMQ(dataMQ));
+ tempStatusMQ.reset(new StatusMQ(statusMQ));
+ if (tempDataMQ->isValid() && tempDataMQ->getEventFlagWord()) {
+ EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &mEfGroup);
+ }
}
});
- return processReturn("read", ret, retval);
+ if (!ret.isOk() || retval != Result::OK) {
+ return processReturn("prepareForReading", ret, retval);
+ }
+ if (!tempDataMQ || !tempDataMQ->isValid() || !tempStatusMQ || !tempStatusMQ->isValid()
+ || !mEfGroup) {
+ ALOGE_IF(!tempDataMQ, "Failed to obtain data message queue for reading");
+ ALOGE_IF(tempDataMQ && !tempDataMQ->isValid(), "Data message queue for reading is invalid");
+ ALOGE_IF(!tempStatusMQ, "Failed to obtain status message queue for reading");
+ ALOGE_IF(tempStatusMQ && !tempStatusMQ->isValid(),
+ "Status message queue for reading is invalid");
+ ALOGE_IF(!mEfGroup, "Event flag creation for reading failed");
+ return NO_INIT;
+ }
+ mDataMQ = std::move(tempDataMQ);
+ mStatusMQ = std::move(tempStatusMQ);
+ return OK;
}
status_t StreamInHalHidl::getInputFramesLost(uint32_t *framesLost) {
diff --git a/media/libaudiohal/StreamHalHidl.h b/media/libaudiohal/StreamHalHidl.h
index 377acb4..8b5867e 100644
--- a/media/libaudiohal/StreamHalHidl.h
+++ b/media/libaudiohal/StreamHalHidl.h
@@ -20,6 +20,8 @@
#include <android/hardware/audio/2.0/IStream.h>
#include <android/hardware/audio/2.0/IStreamIn.h>
#include <android/hardware/audio/2.0/IStreamOut.h>
+#include <fmq/EventFlag.h>
+#include <fmq/MessageQueue.h>
#include <media/audiohal/StreamHalInterface.h>
#include "ConversionHelperHidl.h"
@@ -27,7 +29,11 @@
using ::android::hardware::audio::V2_0::IStream;
using ::android::hardware::audio::V2_0::IStreamIn;
using ::android::hardware::audio::V2_0::IStreamOut;
+using ::android::hardware::EventFlag;
+using ::android::hardware::MessageQueue;
using ::android::hardware::Return;
+using ReadStatus = ::android::hardware::audio::V2_0::IStreamIn::ReadStatus;
+using WriteStatus = ::android::hardware::audio::V2_0::IStreamOut::WriteStatus;
namespace android {
@@ -80,6 +86,10 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position);
+ // Set the priority of the thread that interacts with the HAL
+ // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
+ virtual status_t setHalThreadPriority(int priority);
+
protected:
// Subclasses can not be constructed directly by clients.
explicit StreamHalHidl(IStream *stream);
@@ -87,6 +97,8 @@
// The destructor automatically closes the stream.
virtual ~StreamHalHidl();
+ int mHalThreadPriority;
+
private:
IStream *mStream;
};
@@ -143,14 +155,29 @@
private:
friend class DeviceHalHidl;
+ typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
+ typedef MessageQueue<WriteStatus, hardware::kSynchronizedReadWrite> StatusMQ;
wp<StreamOutHalInterfaceCallback> mCallback;
sp<IStreamOut> mStream;
+ std::unique_ptr<DataMQ> mDataMQ;
+ std::unique_ptr<StatusMQ> mStatusMQ;
+ EventFlag* mEfGroup;
+ bool mGetPresentationPositionNotSupported;
+ struct {
+ uint64_t obtained;
+ status_t status;
+ uint64_t frames;
+ struct timespec ts;
+ } mPPosFromWrite;
// Can not be constructed directly by clients.
StreamOutHalHidl(const sp<IStreamOut>& stream);
virtual ~StreamOutHalHidl();
+
+ uint64_t getCurrentTimeMs();
+ status_t prepareForWriting(size_t bufferSize);
};
class StreamInHalHidl : public StreamInHalInterface, public StreamHalHidl {
@@ -173,13 +200,20 @@
private:
friend class DeviceHalHidl;
+ typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
+ typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
sp<IStreamIn> mStream;
+ std::unique_ptr<DataMQ> mDataMQ;
+ std::unique_ptr<StatusMQ> mStatusMQ;
+ EventFlag* mEfGroup;
// Can not be constructed directly by clients.
StreamInHalHidl(const sp<IStreamIn>& stream);
virtual ~StreamInHalHidl();
+
+ status_t prepareForReading(size_t bufferSize);
};
} // namespace android
diff --git a/media/libaudiohal/StreamHalLocal.cpp b/media/libaudiohal/StreamHalLocal.cpp
index 61c8898..b25e518 100644
--- a/media/libaudiohal/StreamHalLocal.cpp
+++ b/media/libaudiohal/StreamHalLocal.cpp
@@ -96,6 +96,12 @@
return mStream->dump(mStream, fd);
}
+status_t StreamHalLocal::setHalThreadPriority(int) {
+ // Don't need to do anything as local hal is executed by audioflinger directly
+ // on the same thread.
+ return OK;
+}
+
StreamOutHalLocal::StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device)
: StreamHalLocal(&stream->common, device), mStream(stream) {
}
diff --git a/media/libaudiohal/StreamHalLocal.h b/media/libaudiohal/StreamHalLocal.h
index fbb000a..8c96c1f 100644
--- a/media/libaudiohal/StreamHalLocal.h
+++ b/media/libaudiohal/StreamHalLocal.h
@@ -70,6 +70,10 @@
// Get current read/write position in the mmap buffer
virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
+ // Set the priority of the thread that interacts with the HAL
+ // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
+ virtual status_t setHalThreadPriority(int priority);
+
protected:
// Subclasses can not be constructed directly by clients.
StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device);
diff --git a/media/libaudioprocessing/Android.mk b/media/libaudioprocessing/Android.mk
new file mode 100644
index 0000000..b7ea99e
--- /dev/null
+++ b/media/libaudioprocessing/Android.mk
@@ -0,0 +1,36 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ AudioMixer.cpp.arm \
+ AudioResampler.cpp.arm \
+ AudioResamplerCubic.cpp.arm \
+ AudioResamplerSinc.cpp.arm \
+ AudioResamplerDyn.cpp.arm \
+ BufferProviders.cpp \
+ RecordBufferConverter.cpp \
+
+LOCAL_C_INCLUDES := \
+ $(TOP) \
+ $(call include-path-for, audio-utils) \
+
+LOCAL_SHARED_LIBRARIES := \
+ libaudiohal \
+ libaudioutils \
+ libcutils \
+ liblog \
+ libnbaio \
+ libsonic \
+ libutils \
+
+LOCAL_MODULE := libaudioprocessing
+
+LOCAL_CFLAGS := -Werror -Wall
+
+# uncomment to disable NEON on architectures that actually do support NEON, for benchmarking
+#LOCAL_CFLAGS += -DUSE_NEON=false
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/services/audioflinger/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
similarity index 99%
rename from services/audioflinger/AudioMixer.cpp
rename to media/libaudioprocessing/AudioMixer.cpp
index 945f4b3..a7d9f0f 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -18,7 +18,6 @@
#define LOG_TAG "AudioMixer"
//#define LOG_NDEBUG 0
-#include "Configuration.h"
#include <stdint.h>
#include <string.h>
#include <stdlib.h>
@@ -36,9 +35,9 @@
#include <audio_utils/primitives.h>
#include <audio_utils/format.h>
+#include <media/AudioMixer.h>
#include "AudioMixerOps.h"
-#include "AudioMixer.h"
// The FCC_2 macro refers to the Fixed Channel Count of 2 for the legacy integer mixer.
#ifndef FCC_2
diff --git a/services/audioflinger/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
similarity index 100%
rename from services/audioflinger/AudioMixerOps.h
rename to media/libaudioprocessing/AudioMixerOps.h
diff --git a/services/audioflinger/AudioResampler.cpp b/media/libaudioprocessing/AudioResampler.cpp
similarity index 99%
rename from services/audioflinger/AudioResampler.cpp
rename to media/libaudioprocessing/AudioResampler.cpp
index 43624a0..c761b38 100644
--- a/services/audioflinger/AudioResampler.cpp
+++ b/media/libaudioprocessing/AudioResampler.cpp
@@ -22,11 +22,11 @@
#include <stdlib.h>
#include <sys/types.h>
-#include <android/log.h>
#include <cutils/properties.h>
+#include <log/log.h>
#include <audio_utils/primitives.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
#include "AudioResamplerSinc.h"
#include "AudioResamplerCubic.h"
#include "AudioResamplerDyn.h"
diff --git a/services/audioflinger/AudioResamplerCubic.cpp b/media/libaudioprocessing/AudioResamplerCubic.cpp
similarity index 98%
rename from services/audioflinger/AudioResamplerCubic.cpp
rename to media/libaudioprocessing/AudioResamplerCubic.cpp
index d27dce7..9bcd8e1 100644
--- a/services/audioflinger/AudioResamplerCubic.cpp
+++ b/media/libaudioprocessing/AudioResamplerCubic.cpp
@@ -19,9 +19,9 @@
#include <stdint.h>
#include <string.h>
#include <sys/types.h>
-#include <android/log.h>
-#include "AudioResampler.h"
+#include <log/log.h>
+
#include "AudioResamplerCubic.h"
namespace android {
diff --git a/services/audioflinger/AudioResamplerCubic.h b/media/libaudioprocessing/AudioResamplerCubic.h
similarity index 98%
rename from services/audioflinger/AudioResamplerCubic.h
rename to media/libaudioprocessing/AudioResamplerCubic.h
index f218fd9..defaf33 100644
--- a/services/audioflinger/AudioResamplerCubic.h
+++ b/media/libaudioprocessing/AudioResamplerCubic.h
@@ -21,7 +21,7 @@
#include <sys/types.h>
#include <android/log.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
namespace android {
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/AudioResamplerDyn.cpp b/media/libaudioprocessing/AudioResamplerDyn.cpp
similarity index 98%
rename from services/audioflinger/AudioResamplerDyn.cpp
rename to media/libaudioprocessing/AudioResamplerDyn.cpp
index 21914b9..8f7b982 100644
--- a/services/audioflinger/AudioResamplerDyn.cpp
+++ b/media/libaudioprocessing/AudioResamplerDyn.cpp
@@ -29,9 +29,10 @@
#include <utils/Log.h>
#include <audio_utils/primitives.h>
-#include "AudioResamplerFirOps.h" // USE_NEON and USE_INLINE_ASSEMBLY defined here
+#include "AudioResamplerFirOps.h" // USE_NEON, USE_SSE and USE_INLINE_ASSEMBLY defined here
#include "AudioResamplerFirProcess.h"
#include "AudioResamplerFirProcessNeon.h"
+#include "AudioResamplerFirProcessSSE.h"
#include "AudioResamplerFirGen.h" // requires math.h
#include "AudioResamplerDyn.h"
diff --git a/services/audioflinger/AudioResamplerDyn.h b/media/libaudioprocessing/AudioResamplerDyn.h
similarity index 98%
rename from services/audioflinger/AudioResamplerDyn.h
rename to media/libaudioprocessing/AudioResamplerDyn.h
index 6af9017..1840fc7 100644
--- a/services/audioflinger/AudioResamplerDyn.h
+++ b/media/libaudioprocessing/AudioResamplerDyn.h
@@ -21,7 +21,7 @@
#include <sys/types.h>
#include <android/log.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
namespace android {
diff --git a/services/audioflinger/AudioResamplerFirGen.h b/media/libaudioprocessing/AudioResamplerFirGen.h
similarity index 100%
rename from services/audioflinger/AudioResamplerFirGen.h
rename to media/libaudioprocessing/AudioResamplerFirGen.h
diff --git a/services/audioflinger/AudioResamplerFirOps.h b/media/libaudioprocessing/AudioResamplerFirOps.h
similarity index 92%
rename from services/audioflinger/AudioResamplerFirOps.h
rename to media/libaudioprocessing/AudioResamplerFirOps.h
index 2a26496..2e4cee3 100644
--- a/services/audioflinger/AudioResamplerFirOps.h
+++ b/media/libaudioprocessing/AudioResamplerFirOps.h
@@ -36,6 +36,13 @@
#include <arm_neon.h>
#endif
+#if defined(__SSSE3__) // Should be supported in x86 ABI for both 32 & 64-bit.
+#define USE_SSE (true)
+#include <tmmintrin.h>
+#else
+#define USE_SSE (false)
+#endif
+
template<typename T, typename U>
struct is_same
{
@@ -119,7 +126,7 @@
static inline
int32_t mulAddRL(int left, uint32_t inRL, int16_t v, int32_t a)
{
-#if USE_INLINE_ASSEMBLY
+#if 0 // USE_INLINE_ASSEMBLY Seems to fail with Clang b/34110890
int32_t out;
if (left) {
asm( "smlabb %[out], %[v], %[inRL], %[a] \n"
@@ -142,7 +149,7 @@
static inline
int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a)
{
-#if USE_INLINE_ASSEMBLY
+#if 0 // USE_INLINE_ASSEMBLY Seems to fail with Clang b/34110890
int32_t out;
if (left) {
asm( "smlawb %[out], %[v], %[inRL], %[a] \n"
diff --git a/services/audioflinger/AudioResamplerFirProcess.h b/media/libaudioprocessing/AudioResamplerFirProcess.h
similarity index 100%
rename from services/audioflinger/AudioResamplerFirProcess.h
rename to media/libaudioprocessing/AudioResamplerFirProcess.h
diff --git a/services/audioflinger/AudioResamplerFirProcessNeon.h b/media/libaudioprocessing/AudioResamplerFirProcessNeon.h
similarity index 99%
rename from services/audioflinger/AudioResamplerFirProcessNeon.h
rename to media/libaudioprocessing/AudioResamplerFirProcessNeon.h
index 3de9edd..1ce76a8 100644
--- a/services/audioflinger/AudioResamplerFirProcessNeon.h
+++ b/media/libaudioprocessing/AudioResamplerFirProcessNeon.h
@@ -155,8 +155,8 @@
accum2 = vmlal_s16(accum2, vget_low_s16(negSamp.val[1]), vget_low_s16(negCoef));
accum2 = vmlal_s16(accum2, vget_high_s16(negSamp.val[1]), vget_high_s16(negCoef));
sP -= 16;
- }
} break;
+ }
} while (count -= 8);
// multiply by volume and save
diff --git a/media/libaudioprocessing/AudioResamplerFirProcessSSE.h b/media/libaudioprocessing/AudioResamplerFirProcessSSE.h
new file mode 100644
index 0000000..63ed052
--- /dev/null
+++ b/media/libaudioprocessing/AudioResamplerFirProcessSSE.h
@@ -0,0 +1,215 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_SSE_H
+#define ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_SSE_H
+
+namespace android {
+
+// depends on AudioResamplerFirOps.h, AudioResamplerFirProcess.h
+
+#if USE_SSE
+
+#define TO_STRING2(x) #x
+#define TO_STRING(x) TO_STRING2(x)
+// uncomment to print GCC version, may be relevant for intrinsic optimizations
+/* #pragma message ("GCC version: " TO_STRING(__GNUC__) \
+ "." TO_STRING(__GNUC_MINOR__) \
+ "." TO_STRING(__GNUC_PATCHLEVEL__)) */
+
+//
+// SSEx specializations are enabled for Process() and ProcessL() in AudioResamplerFirProcess.h
+//
+
+template <int CHANNELS, int STRIDE, bool FIXED>
+static inline void ProcessSSEIntrinsic(float* out,
+ int count,
+ const float* coefsP,
+ const float* coefsN,
+ const float* sP,
+ const float* sN,
+ const float* volumeLR,
+ float lerpP,
+ const float* coefsP1,
+ const float* coefsN1)
+{
+ ALOG_ASSERT(count > 0 && (count & 7) == 0); // multiple of 8
+ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(CHANNELS == 1 || CHANNELS == 2);
+
+ sP -= CHANNELS*(4-1); // adjust sP for a loop iteration of four
+
+ __m128 interp;
+ if (!FIXED) {
+ interp = _mm_set1_ps(lerpP);
+ }
+
+ __m128 accL, accR;
+ accL = _mm_setzero_ps();
+ if (CHANNELS == 2) {
+ accR = _mm_setzero_ps();
+ }
+
+ do {
+ __m128 posCoef = _mm_load_ps(coefsP);
+ __m128 negCoef = _mm_load_ps(coefsN);
+ coefsP += 4;
+ coefsN += 4;
+
+ if (!FIXED) { // interpolate
+ __m128 posCoef1 = _mm_load_ps(coefsP1);
+ __m128 negCoef1 = _mm_load_ps(coefsN1);
+ coefsP1 += 4;
+ coefsN1 += 4;
+
+ // Calculate the final coefficient for interpolation
+ // posCoef = interp * (posCoef1 - posCoef) + posCoef
+ // negCoef = interp * (negCoef - negCoef1) + negCoef1
+ posCoef1 = _mm_sub_ps(posCoef1, posCoef);
+ negCoef = _mm_sub_ps(negCoef, negCoef1);
+
+ posCoef1 = _mm_mul_ps(posCoef1, interp);
+ negCoef = _mm_mul_ps(negCoef, interp);
+
+ posCoef = _mm_add_ps(posCoef1, posCoef);
+ negCoef = _mm_add_ps(negCoef, negCoef1);
+ }
+ switch (CHANNELS) {
+ case 1: {
+ __m128 posSamp = _mm_loadu_ps(sP);
+ __m128 negSamp = _mm_loadu_ps(sN);
+ sP -= 4;
+ sN += 4;
+
+ posSamp = _mm_shuffle_ps(posSamp, posSamp, 0x1B);
+ posSamp = _mm_mul_ps(posSamp, posCoef);
+ negSamp = _mm_mul_ps(negSamp, negCoef);
+
+ accL = _mm_add_ps(accL, posSamp);
+ accL = _mm_add_ps(accL, negSamp);
+ } break;
+ case 2: {
+ __m128 posSamp0 = _mm_loadu_ps(sP);
+ __m128 posSamp1 = _mm_loadu_ps(sP+4);
+ __m128 negSamp0 = _mm_loadu_ps(sN);
+ __m128 negSamp1 = _mm_loadu_ps(sN+4);
+ sP -= 8;
+ sN += 8;
+
+ // deinterleave everything and reverse the positives
+ __m128 posSampL = _mm_shuffle_ps(posSamp1, posSamp0, 0x22);
+ __m128 posSampR = _mm_shuffle_ps(posSamp1, posSamp0, 0x77);
+ __m128 negSampL = _mm_shuffle_ps(negSamp0, negSamp1, 0x88);
+ __m128 negSampR = _mm_shuffle_ps(negSamp0, negSamp1, 0xDD);
+
+ posSampL = _mm_mul_ps(posSampL, posCoef);
+ posSampR = _mm_mul_ps(posSampR, posCoef);
+ negSampL = _mm_mul_ps(negSampL, negCoef);
+ negSampR = _mm_mul_ps(negSampR, negCoef);
+
+ accL = _mm_add_ps(accL, posSampL);
+ accR = _mm_add_ps(accR, posSampR);
+ accL = _mm_add_ps(accL, negSampL);
+ accR = _mm_add_ps(accR, negSampR);
+ } break;
+ }
+ } while (count -= 4);
+
+ // multiply by volume and save
+ __m128 vLR = _mm_setzero_ps();
+ __m128 outSamp;
+ vLR = _mm_loadl_pi(vLR, reinterpret_cast<const __m64*>(volumeLR));
+ outSamp = _mm_loadl_pi(vLR, reinterpret_cast<__m64*>(out));
+
+ // combine and funnel down accumulator
+ __m128 outAccum = _mm_setzero_ps();
+ if (CHANNELS == 1) {
+ // duplicate accL to both L and R
+ outAccum = _mm_add_ps(accL, _mm_movehl_ps(accL, accL));
+ outAccum = _mm_add_ps(outAccum, _mm_shuffle_ps(outAccum, outAccum, 0x11));
+ } else if (CHANNELS == 2) {
+ // accR contains R, fold in
+ outAccum = _mm_hadd_ps(accL, accR);
+ outAccum = _mm_hadd_ps(outAccum, outAccum);
+ }
+
+ outAccum = _mm_mul_ps(outAccum, vLR);
+ outSamp = _mm_add_ps(outSamp, outAccum);
+ _mm_storel_pi(reinterpret_cast<__m64*>(out), outSamp);
+}
+
+template<>
+inline void ProcessL<1, 16>(float* const out,
+ int count,
+ const float* coefsP,
+ const float* coefsN,
+ const float* sP,
+ const float* sN,
+ const float* const volumeLR)
+{
+ ProcessSSEIntrinsic<1, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR,
+ 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/);
+}
+
+template<>
+inline void ProcessL<2, 16>(float* const out,
+ int count,
+ const float* coefsP,
+ const float* coefsN,
+ const float* sP,
+ const float* sN,
+ const float* const volumeLR)
+{
+ ProcessSSEIntrinsic<2, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR,
+ 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/);
+}
+
+template<>
+inline void Process<1, 16>(float* const out,
+ int count,
+ const float* coefsP,
+ const float* coefsN,
+ const float* coefsP1,
+ const float* coefsN1,
+ const float* sP,
+ const float* sN,
+ float lerpP,
+ const float* const volumeLR)
+{
+ ProcessSSEIntrinsic<1, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR,
+ lerpP, coefsP1, coefsN1);
+}
+
+template<>
+inline void Process<2, 16>(float* const out,
+ int count,
+ const float* coefsP,
+ const float* coefsN,
+ const float* coefsP1,
+ const float* coefsN1,
+ const float* sP,
+ const float* sN,
+ float lerpP,
+ const float* const volumeLR)
+{
+ ProcessSSEIntrinsic<2, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR,
+ lerpP, coefsP1, coefsN1);
+}
+
+#endif //USE_SSE
+
+} // namespace android
+
+#endif /*ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_SSE_H*/
diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/media/libaudioprocessing/AudioResamplerSinc.cpp
similarity index 100%
rename from services/audioflinger/AudioResamplerSinc.cpp
rename to media/libaudioprocessing/AudioResamplerSinc.cpp
diff --git a/services/audioflinger/AudioResamplerSinc.h b/media/libaudioprocessing/AudioResamplerSinc.h
similarity index 98%
rename from services/audioflinger/AudioResamplerSinc.h
rename to media/libaudioprocessing/AudioResamplerSinc.h
index df8b45a..f6dcf91 100644
--- a/services/audioflinger/AudioResamplerSinc.h
+++ b/media/libaudioprocessing/AudioResamplerSinc.h
@@ -21,7 +21,7 @@
#include <sys/types.h>
#include <android/log.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
namespace android {
diff --git a/services/audioflinger/AudioResamplerSincDown.h b/media/libaudioprocessing/AudioResamplerSincDown.h
similarity index 100%
rename from services/audioflinger/AudioResamplerSincDown.h
rename to media/libaudioprocessing/AudioResamplerSincDown.h
diff --git a/services/audioflinger/AudioResamplerSincUp.h b/media/libaudioprocessing/AudioResamplerSincUp.h
similarity index 100%
rename from services/audioflinger/AudioResamplerSincUp.h
rename to media/libaudioprocessing/AudioResamplerSincUp.h
diff --git a/services/audioflinger/BufferProviders.cpp b/media/libaudioprocessing/BufferProviders.cpp
similarity index 92%
rename from services/audioflinger/BufferProviders.cpp
rename to media/libaudioprocessing/BufferProviders.cpp
index ba5f7b6..8341a1e 100644
--- a/services/audioflinger/BufferProviders.cpp
+++ b/media/libaudioprocessing/BufferProviders.cpp
@@ -19,16 +19,15 @@
#include <audio_utils/primitives.h>
#include <audio_utils/format.h>
+#include <external/sonic/sonic.h>
+#include <media/audiohal/EffectBufferHalInterface.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/audiohal/EffectsFactoryHalInterface.h>
#include <media/AudioResamplerPublic.h>
+#include <media/BufferProviders.h>
#include <system/audio_effects/effect_downmix.h>
-
#include <utils/Log.h>
-#include "Configuration.h"
-#include "BufferProviders.h"
-
#ifndef ARRAY_SIZE
#define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0]))
#endif
@@ -179,17 +178,44 @@
EFFECT_CONFIG_FORMAT | EFFECT_CONFIG_ACC_MODE;
mDownmixConfig.outputCfg.mask = mDownmixConfig.inputCfg.mask;
+ status_t status;
+ status = EffectBufferHalInterface::mirror(
+ nullptr,
+ audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(inputChannelMask),
+ &mInBuffer);
+ if (status != 0) {
+ ALOGE("DownmixerBufferProvider() error %d while creating input buffer", status);
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
+ return;
+ }
+ status = EffectBufferHalInterface::mirror(
+ nullptr,
+ audio_bytes_per_sample(format) * audio_channel_count_from_out_mask(outputChannelMask),
+ &mOutBuffer);
+ if (status != 0) {
+ ALOGE("DownmixerBufferProvider() error %d while creating output buffer", status);
+ mInBuffer.clear();
+ mDownmixInterface.clear();
+ mEffectsFactory.clear();
+ return;
+ }
+ mDownmixInterface->setInBuffer(mInBuffer);
+ mDownmixInterface->setOutBuffer(mOutBuffer);
+
int cmdStatus;
uint32_t replySize = sizeof(int);
// Configure downmixer
- status_t status = mDownmixInterface->command(
+ status = mDownmixInterface->command(
EFFECT_CMD_SET_CONFIG /*cmdCode*/, sizeof(effect_config_t) /*cmdSize*/,
&mDownmixConfig /*pCmdData*/,
&replySize, &cmdStatus /*pReplyData*/);
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while configuring downmixer",
status, cmdStatus);
+ mOutBuffer.clear();
+ mInBuffer.clear();
mDownmixInterface.clear();
mEffectsFactory.clear();
return;
@@ -203,6 +229,8 @@
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while enabling downmixer",
status, cmdStatus);
+ mOutBuffer.clear();
+ mInBuffer.clear();
mDownmixInterface.clear();
mEffectsFactory.clear();
return;
@@ -228,6 +256,8 @@
if (status != 0 || cmdStatus != 0) {
ALOGE("DownmixerBufferProvider() error %d cmdStatus %d while setting downmix type",
status, cmdStatus);
+ mOutBuffer.clear();
+ mInBuffer.clear();
mDownmixInterface.clear();
mEffectsFactory.clear();
return;
@@ -238,18 +268,26 @@
DownmixerBufferProvider::~DownmixerBufferProvider()
{
ALOGV("~DownmixerBufferProvider (%p)", this);
+ if (mDownmixInterface != 0) {
+ mDownmixInterface->close();
+ }
}
void DownmixerBufferProvider::copyFrames(void *dst, const void *src, size_t frames)
{
- mDownmixConfig.inputCfg.buffer.frameCount = frames;
- mDownmixConfig.inputCfg.buffer.raw = const_cast<void *>(src);
- mDownmixConfig.outputCfg.buffer.frameCount = frames;
- mDownmixConfig.outputCfg.buffer.raw = dst;
+ mInBuffer->setExternalData(const_cast<void*>(src));
+ mInBuffer->setFrameCount(frames);
+ mInBuffer->update();
+ mOutBuffer->setExternalData(dst);
+ mOutBuffer->setFrameCount(frames);
+ mOutBuffer->update();
// may be in-place if src == dst.
- status_t res = mDownmixInterface->process(
- &mDownmixConfig.inputCfg.buffer, &mDownmixConfig.outputCfg.buffer);
- ALOGE_IF(res != OK, "DownmixBufferProvider error %d", res);
+ status_t res = mDownmixInterface->process();
+ if (res == OK) {
+ mOutBuffer->commit();
+ } else {
+ ALOGE("DownmixBufferProvider error %d", res);
+ }
}
/* call once in a pthread_once handler. */
diff --git a/media/libaudioprocessing/RecordBufferConverter.cpp b/media/libaudioprocessing/RecordBufferConverter.cpp
new file mode 100644
index 0000000..54151f5
--- /dev/null
+++ b/media/libaudioprocessing/RecordBufferConverter.cpp
@@ -0,0 +1,294 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "RecordBufferConverter"
+//#define LOG_NDEBUG 0
+
+#include <audio_utils/primitives.h>
+#include <audio_utils/format.h>
+#include <media/AudioMixer.h> // for UNITY_GAIN_FLOAT
+#include <media/AudioResampler.h>
+#include <media/BufferProviders.h>
+#include <media/RecordBufferConverter.h>
+#include <utils/Log.h>
+
+#ifndef ARRAY_SIZE
+#define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0]))
+#endif
+
+template <typename T>
+static inline T max(const T& a, const T& b)
+{
+ return a > b ? a : b;
+}
+
+namespace android {
+
+RecordBufferConverter::RecordBufferConverter(
+ audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
+ uint32_t srcSampleRate,
+ audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
+ uint32_t dstSampleRate) :
+ mSrcChannelMask(AUDIO_CHANNEL_INVALID), // updateParameters will set following vars
+ // mSrcFormat
+ // mSrcSampleRate
+ // mDstChannelMask
+ // mDstFormat
+ // mDstSampleRate
+ // mSrcChannelCount
+ // mDstChannelCount
+ // mDstFrameSize
+ mBuf(NULL), mBufFrames(0), mBufFrameSize(0),
+ mResampler(NULL),
+ mIsLegacyDownmix(false),
+ mIsLegacyUpmix(false),
+ mRequiresFloat(false),
+ mInputConverterProvider(NULL)
+{
+ (void)updateParameters(srcChannelMask, srcFormat, srcSampleRate,
+ dstChannelMask, dstFormat, dstSampleRate);
+}
+
+RecordBufferConverter::~RecordBufferConverter() {
+ free(mBuf);
+ delete mResampler;
+ delete mInputConverterProvider;
+}
+
+void RecordBufferConverter::reset() {
+ if (mResampler != NULL) {
+ mResampler->reset();
+ }
+}
+
+size_t RecordBufferConverter::convert(void *dst,
+ AudioBufferProvider *provider, size_t frames)
+{
+ if (mInputConverterProvider != NULL) {
+ mInputConverterProvider->setBufferProvider(provider);
+ provider = mInputConverterProvider;
+ }
+
+ if (mResampler == NULL) {
+ ALOGV("NO RESAMPLING sampleRate:%u mSrcFormat:%#x mDstFormat:%#x",
+ mSrcSampleRate, mSrcFormat, mDstFormat);
+
+ AudioBufferProvider::Buffer buffer;
+ for (size_t i = frames; i > 0; ) {
+ buffer.frameCount = i;
+ status_t status = provider->getNextBuffer(&buffer);
+ if (status != OK || buffer.frameCount == 0) {
+ frames -= i; // cannot fill request.
+ break;
+ }
+ // format convert to destination buffer
+ convertNoResampler(dst, buffer.raw, buffer.frameCount);
+
+ dst = (int8_t*)dst + buffer.frameCount * mDstFrameSize;
+ i -= buffer.frameCount;
+ provider->releaseBuffer(&buffer);
+ }
+ } else {
+ ALOGV("RESAMPLING mSrcSampleRate:%u mDstSampleRate:%u mSrcFormat:%#x mDstFormat:%#x",
+ mSrcSampleRate, mDstSampleRate, mSrcFormat, mDstFormat);
+
+ // reallocate buffer if needed
+ if (mBufFrameSize != 0 && mBufFrames < frames) {
+ free(mBuf);
+ mBufFrames = frames;
+ (void)posix_memalign(&mBuf, 32, mBufFrames * mBufFrameSize);
+ }
+ // resampler accumulates, but we only have one source track
+ memset(mBuf, 0, frames * mBufFrameSize);
+ frames = mResampler->resample((int32_t*)mBuf, frames, provider);
+ // format convert to destination buffer
+ convertResampler(dst, mBuf, frames);
+ }
+ return frames;
+}
+
+status_t RecordBufferConverter::updateParameters(
+ audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
+ uint32_t srcSampleRate,
+ audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
+ uint32_t dstSampleRate)
+{
+ // quick evaluation if there is any change.
+ if (mSrcFormat == srcFormat
+ && mSrcChannelMask == srcChannelMask
+ && mSrcSampleRate == srcSampleRate
+ && mDstFormat == dstFormat
+ && mDstChannelMask == dstChannelMask
+ && mDstSampleRate == dstSampleRate) {
+ return NO_ERROR;
+ }
+
+ ALOGV("RecordBufferConverter updateParameters srcMask:%#x dstMask:%#x"
+ " srcFormat:%#x dstFormat:%#x srcRate:%u dstRate:%u",
+ srcChannelMask, dstChannelMask, srcFormat, dstFormat, srcSampleRate, dstSampleRate);
+ const bool valid =
+ audio_is_input_channel(srcChannelMask)
+ && audio_is_input_channel(dstChannelMask)
+ && audio_is_valid_format(srcFormat) && audio_is_linear_pcm(srcFormat)
+ && audio_is_valid_format(dstFormat) && audio_is_linear_pcm(dstFormat)
+ && (srcSampleRate <= dstSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX)
+ ; // no upsampling checks for now
+ if (!valid) {
+ return BAD_VALUE;
+ }
+
+ mSrcFormat = srcFormat;
+ mSrcChannelMask = srcChannelMask;
+ mSrcSampleRate = srcSampleRate;
+ mDstFormat = dstFormat;
+ mDstChannelMask = dstChannelMask;
+ mDstSampleRate = dstSampleRate;
+
+ // compute derived parameters
+ mSrcChannelCount = audio_channel_count_from_in_mask(srcChannelMask);
+ mDstChannelCount = audio_channel_count_from_in_mask(dstChannelMask);
+ mDstFrameSize = mDstChannelCount * audio_bytes_per_sample(mDstFormat);
+
+ // do we need to resample?
+ delete mResampler;
+ mResampler = NULL;
+ if (mSrcSampleRate != mDstSampleRate) {
+ mResampler = AudioResampler::create(AUDIO_FORMAT_PCM_FLOAT,
+ mSrcChannelCount, mDstSampleRate);
+ mResampler->setSampleRate(mSrcSampleRate);
+ mResampler->setVolume(AudioMixer::UNITY_GAIN_FLOAT, AudioMixer::UNITY_GAIN_FLOAT);
+ }
+
+ // are we running legacy channel conversion modes?
+ mIsLegacyDownmix = (mSrcChannelMask == AUDIO_CHANNEL_IN_STEREO
+ || mSrcChannelMask == AUDIO_CHANNEL_IN_FRONT_BACK)
+ && mDstChannelMask == AUDIO_CHANNEL_IN_MONO;
+ mIsLegacyUpmix = mSrcChannelMask == AUDIO_CHANNEL_IN_MONO
+ && (mDstChannelMask == AUDIO_CHANNEL_IN_STEREO
+ || mDstChannelMask == AUDIO_CHANNEL_IN_FRONT_BACK);
+
+ // do we need to process in float?
+ mRequiresFloat = mResampler != NULL || mIsLegacyDownmix || mIsLegacyUpmix;
+
+ // do we need a staging buffer to convert for destination (we can still optimize this)?
+ // we use mBufFrameSize > 0 to indicate both frame size as well as buffer necessity
+ if (mResampler != NULL) {
+ mBufFrameSize = max(mSrcChannelCount, (uint32_t)FCC_2)
+ * audio_bytes_per_sample(AUDIO_FORMAT_PCM_FLOAT);
+ } else if (mIsLegacyUpmix || mIsLegacyDownmix) { // legacy modes always float
+ mBufFrameSize = mDstChannelCount * audio_bytes_per_sample(AUDIO_FORMAT_PCM_FLOAT);
+ } else if (mSrcChannelMask != mDstChannelMask && mDstFormat != mSrcFormat) {
+ mBufFrameSize = mDstChannelCount * audio_bytes_per_sample(mSrcFormat);
+ } else {
+ mBufFrameSize = 0;
+ }
+ mBufFrames = 0; // force the buffer to be resized.
+
+ // do we need an input converter buffer provider to give us float?
+ delete mInputConverterProvider;
+ mInputConverterProvider = NULL;
+ if (mRequiresFloat && mSrcFormat != AUDIO_FORMAT_PCM_FLOAT) {
+ mInputConverterProvider = new ReformatBufferProvider(
+ audio_channel_count_from_in_mask(mSrcChannelMask),
+ mSrcFormat,
+ AUDIO_FORMAT_PCM_FLOAT,
+ 256 /* provider buffer frame count */);
+ }
+
+ // do we need a remixer to do channel mask conversion
+ if (!mIsLegacyDownmix && !mIsLegacyUpmix && mSrcChannelMask != mDstChannelMask) {
+ (void) memcpy_by_index_array_initialization_from_channel_mask(
+ mIdxAry, ARRAY_SIZE(mIdxAry), mDstChannelMask, mSrcChannelMask);
+ }
+ return NO_ERROR;
+}
+
+void RecordBufferConverter::convertNoResampler(
+ void *dst, const void *src, size_t frames)
+{
+ // src is native type unless there is legacy upmix or downmix, whereupon it is float.
+ if (mBufFrameSize != 0 && mBufFrames < frames) {
+ free(mBuf);
+ mBufFrames = frames;
+ (void)posix_memalign(&mBuf, 32, mBufFrames * mBufFrameSize);
+ }
+ // do we need to do legacy upmix and downmix?
+ if (mIsLegacyUpmix || mIsLegacyDownmix) {
+ void *dstBuf = mBuf != NULL ? mBuf : dst;
+ if (mIsLegacyUpmix) {
+ upmix_to_stereo_float_from_mono_float((float *)dstBuf,
+ (const float *)src, frames);
+ } else /*mIsLegacyDownmix */ {
+ downmix_to_mono_float_from_stereo_float((float *)dstBuf,
+ (const float *)src, frames);
+ }
+ if (mBuf != NULL) {
+ memcpy_by_audio_format(dst, mDstFormat, mBuf, AUDIO_FORMAT_PCM_FLOAT,
+ frames * mDstChannelCount);
+ }
+ return;
+ }
+ // do we need to do channel mask conversion?
+ if (mSrcChannelMask != mDstChannelMask) {
+ void *dstBuf = mBuf != NULL ? mBuf : dst;
+ memcpy_by_index_array(dstBuf, mDstChannelCount,
+ src, mSrcChannelCount, mIdxAry, audio_bytes_per_sample(mSrcFormat), frames);
+ if (dstBuf == dst) {
+ return; // format is the same
+ }
+ }
+ // convert to destination buffer
+ const void *convertBuf = mBuf != NULL ? mBuf : src;
+ memcpy_by_audio_format(dst, mDstFormat, convertBuf, mSrcFormat,
+ frames * mDstChannelCount);
+}
+
+void RecordBufferConverter::convertResampler(
+ void *dst, /*not-a-const*/ void *src, size_t frames)
+{
+ // src buffer format is ALWAYS float when entering this routine
+ if (mIsLegacyUpmix) {
+ ; // mono to stereo already handled by resampler
+ } else if (mIsLegacyDownmix
+ || (mSrcChannelMask == mDstChannelMask && mSrcChannelCount == 1)) {
+ // the resampler outputs stereo for mono input channel (a feature?)
+ // must convert to mono
+ downmix_to_mono_float_from_stereo_float((float *)src,
+ (const float *)src, frames);
+ } else if (mSrcChannelMask != mDstChannelMask) {
+ // convert to mono channel again for channel mask conversion (could be skipped
+ // with further optimization).
+ if (mSrcChannelCount == 1) {
+ downmix_to_mono_float_from_stereo_float((float *)src,
+ (const float *)src, frames);
+ }
+ // convert to destination format (in place, OK as float is larger than other types)
+ if (mDstFormat != AUDIO_FORMAT_PCM_FLOAT) {
+ memcpy_by_audio_format(src, mDstFormat, src, AUDIO_FORMAT_PCM_FLOAT,
+ frames * mSrcChannelCount);
+ }
+ // channel convert and save to dst
+ memcpy_by_index_array(dst, mDstChannelCount,
+ src, mSrcChannelCount, mIdxAry, audio_bytes_per_sample(mDstFormat), frames);
+ return;
+ }
+ // convert to destination format and save to dst
+ memcpy_by_audio_format(dst, mDstFormat, src, AUDIO_FORMAT_PCM_FLOAT,
+ frames * mDstChannelCount);
+}
+
+// ----------------------------------------------------------------------------
+} // namespace android
diff --git a/services/audioflinger/audio-resampler/Android.mk b/media/libaudioprocessing/audio-resampler/Android.mk
similarity index 100%
rename from services/audioflinger/audio-resampler/Android.mk
rename to media/libaudioprocessing/audio-resampler/Android.mk
diff --git a/services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp b/media/libaudioprocessing/audio-resampler/AudioResamplerCoefficients.cpp
similarity index 100%
rename from services/audioflinger/audio-resampler/AudioResamplerCoefficients.cpp
rename to media/libaudioprocessing/audio-resampler/AudioResamplerCoefficients.cpp
diff --git a/services/audioflinger/audio-resampler/filter_coefficients.h b/media/libaudioprocessing/audio-resampler/filter_coefficients.h
similarity index 100%
rename from services/audioflinger/audio-resampler/filter_coefficients.h
rename to media/libaudioprocessing/audio-resampler/filter_coefficients.h
diff --git a/media/libaudioprocessing/tests/Android.mk b/media/libaudioprocessing/tests/Android.mk
new file mode 100644
index 0000000..23e1c3a
--- /dev/null
+++ b/media/libaudioprocessing/tests/Android.mk
@@ -0,0 +1,87 @@
+# Build the unit tests for libaudioprocessing
+
+LOCAL_PATH := $(call my-dir)
+
+#
+# resampler unit test
+#
+include $(CLEAR_VARS)
+
+LOCAL_SHARED_LIBRARIES := \
+ libaudioutils \
+ libaudioprocessing \
+ libcutils \
+ liblog \
+ libutils \
+
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+
+LOCAL_SRC_FILES := \
+ resampler_tests.cpp
+
+LOCAL_MODULE := resampler_tests
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_CFLAGS := -Werror -Wall
+
+include $(BUILD_NATIVE_TEST)
+
+#
+# audio mixer test tool
+#
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ test-mixer.cpp \
+
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+
+LOCAL_STATIC_LIBRARIES := \
+ libsndfile \
+
+LOCAL_SHARED_LIBRARIES := \
+ libaudioprocessing \
+ libaudioutils \
+ libcutils \
+ liblog \
+ libutils \
+
+LOCAL_MODULE := test-mixer
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -Werror -Wall
+
+include $(BUILD_EXECUTABLE)
+
+#
+# build audio resampler test tool
+#
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ test-resampler.cpp \
+
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+
+LOCAL_STATIC_LIBRARIES := \
+ libsndfile \
+
+LOCAL_SHARED_LIBRARIES := \
+ libaudioprocessing \
+ libaudioutils \
+ libcutils \
+ liblog \
+ libutils \
+
+LOCAL_MODULE := test-resampler
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -Werror -Wall
+
+include $(BUILD_EXECUTABLE)
diff --git a/services/audioflinger/tests/README b/media/libaudioprocessing/tests/README
similarity index 61%
rename from services/audioflinger/tests/README
rename to media/libaudioprocessing/tests/README
index 508e960..ed7e2ed 100644
--- a/services/audioflinger/tests/README
+++ b/media/libaudioprocessing/tests/README
@@ -1,9 +1,9 @@
For libsonic dependency:
-pushd external/sonic
+pushd $ANDROID_BUILD_TOP/external/sonic
mm
popd
-To build resampler library:
+To build audio processing library:
pushd ..
Optionally uncomment USE_NEON=false in Android.mk
mm
diff --git a/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh b/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh
new file mode 100755
index 0000000..704d095
--- /dev/null
+++ b/media/libaudioprocessing/tests/build_and_run_all_unit_tests.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+ echo "Android build environment not set"
+ exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+pushd $ANDROID_BUILD_TOP/frameworks/av/media/libaudioprocessing
+pwd
+mm
+
+echo "waiting for device"
+adb root && adb wait-for-device remount
+adb push $OUT/system/lib/libaudioresampler.so /system/lib
+adb push $OUT/system/lib64/libaudioresampler.so /system/lib64
+adb push $OUT/data/nativetest/resampler_tests/resampler_tests /data/nativetest/resampler_tests/resampler_tests
+adb push $OUT/data/nativetest64/resampler_tests/resampler_tests /data/nativetest64/resampler_tests/resampler_tests
+
+sh $ANDROID_BUILD_TOP/frameworks/av/media/libaudioprocessing/tests/run_all_unit_tests.sh
+
+popd
diff --git a/services/audioflinger/tests/mixer_to_wav_tests.sh b/media/libaudioprocessing/tests/mixer_to_wav_tests.sh
similarity index 95%
rename from services/audioflinger/tests/mixer_to_wav_tests.sh
rename to media/libaudioprocessing/tests/mixer_to_wav_tests.sh
index d0482a1..72b02fc 100755
--- a/services/audioflinger/tests/mixer_to_wav_tests.sh
+++ b/media/libaudioprocessing/tests/mixer_to_wav_tests.sh
@@ -37,7 +37,7 @@
# ensure we have mm
. $ANDROID_BUILD_TOP/build/envsetup.sh
-pushd $ANDROID_BUILD_TOP/frameworks/av/services/audioflinger/
+pushd $ANDROID_BUILD_TOP/frameworks/av/media/libaudioprocessing
# build
pwd
@@ -46,7 +46,8 @@
# send to device
echo "waiting for device"
adb root && adb wait-for-device remount
-adb push $OUT/system/lib/libaudioresampler.so /system/lib
+adb push $OUT/system/lib/libaudioprocessing.so /system/lib
+adb push $OUT/system/lib64/libaudioprocessing.so /system/lib64
adb push $OUT/system/bin/test-mixer /system/bin
# createwav creates a series of WAV files testing various
diff --git a/services/audioflinger/tests/resampler_tests.cpp b/media/libaudioprocessing/tests/resampler_tests.cpp
similarity index 81%
rename from services/audioflinger/tests/resampler_tests.cpp
rename to media/libaudioprocessing/tests/resampler_tests.cpp
index 406b960..a23c000 100644
--- a/services/audioflinger/tests/resampler_tests.cpp
+++ b/media/libaudioprocessing/tests/resampler_tests.cpp
@@ -32,13 +32,24 @@
#include <utility>
#include <vector>
-#include <android/log.h>
#include <gtest/gtest.h>
+#include <log/log.h>
#include <media/AudioBufferProvider.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
#include "test_utils.h"
+template <typename T>
+static void printData(T *data, size_t size) {
+ const size_t stride = 8;
+ for (size_t i = 0; i < size; ) {
+ for (size_t j = 0; j < stride && i < size; ++j) {
+ std::cout << data[i++] << ' '; // extra space before newline
+ }
+ std::cout << '\n'; // or endl
+ }
+}
+
void resample(int channels, void *output,
size_t outputFrames, const std::vector<size_t> &outputIncr,
android::AudioBufferProvider *provider, android::AudioResampler *resampler)
@@ -91,7 +102,7 @@
// calculate the output size
size_t outputFrames = ((int64_t) provider.getNumFrames() * outputFreq) / inputFreq;
- size_t outputFrameSize = channels * (useFloat ? sizeof(float) : sizeof(int32_t));
+ size_t outputFrameSize = (channels == 1 ? 2 : channels) * (useFloat ? sizeof(float) : sizeof(int32_t));
size_t outputSize = outputFrameSize * outputFrames;
outputSize &= ~7;
@@ -106,7 +117,7 @@
// set up the reference run
std::vector<size_t> refIncr;
refIncr.push_back(outputFrames);
- void* reference = malloc(outputSize);
+ void* reference = calloc(outputFrames, outputFrameSize);
resample(channels, reference, outputFrames, refIncr, &provider, resampler);
provider.reset();
@@ -127,7 +138,7 @@
outIncr.push_back(1);
outIncr.push_back(2);
outIncr.push_back(3);
- void* test = malloc(outputSize);
+ void* test = calloc(outputFrames, outputFrameSize);
inputIncr.push_back(1);
inputIncr.push_back(3);
provider.setIncr(inputIncr);
@@ -177,7 +188,7 @@
// calculate the output size
size_t outputFrames = ((int64_t) provider.getNumFrames() * outputFreq) / inputFreq;
- size_t outputFrameSize = channels * sizeof(TO);
+ size_t outputFrameSize = (channels == 1 ? 2 : channels) * sizeof(TO);
size_t outputSize = outputFrameSize * outputFrames;
outputSize &= ~7;
@@ -194,7 +205,7 @@
// set up the reference run
std::vector<size_t> refIncr;
refIncr.push_back(outputFrames);
- void* reference = malloc(outputSize);
+ void* reference = calloc(outputFrames, outputFrameSize);
resample(channels, reference, outputFrames, refIncr, &provider, resampler);
TO *out = reinterpret_cast<TO *>(reference);
@@ -204,6 +215,8 @@
const unsigned stopbandFrame = stopband * outputFreq / 1000.;
// check each channel separately
+ if (channels == 1) channels = 2; // workaround (mono duplicates output channel)
+
for (size_t i = 0; i < channels; ++i) {
double passbandEnergy = signalEnergy(out, out + passbandFrame * channels, channels);
double stopbandEnergy = signalEnergy(out + stopbandFrame * channels,
@@ -331,6 +344,34 @@
}
}
+TEST(audioflinger_resampler, stopbandresponse_integer_mono) {
+ // not all of these may work (old resamplers fail on downsampling)
+ static const enum android::AudioResampler::src_quality kQualityArray[] = {
+ //android::AudioResampler::LOW_QUALITY,
+ //android::AudioResampler::MED_QUALITY,
+ //android::AudioResampler::HIGH_QUALITY,
+ //android::AudioResampler::VERY_HIGH_QUALITY,
+ android::AudioResampler::DYN_LOW_QUALITY,
+ android::AudioResampler::DYN_MED_QUALITY,
+ android::AudioResampler::DYN_HIGH_QUALITY,
+ };
+
+ // in this test we assume a maximum transition band between 12kHz and 20kHz.
+ // there must be at least 60dB relative attenuation between stopband and passband.
+ for (size_t i = 0; i < ARRAY_SIZE(kQualityArray); ++i) {
+ testStopbandDownconversion<int16_t, int32_t>(
+ 1, 48000, 32000, 12000, 20000, kQualityArray[i]);
+ }
+
+ // in this test we assume a maximum transition band between 7kHz and 15kHz.
+ // there must be at least 60dB relative attenuation between stopband and passband.
+ // (the weird ratio triggers interpolative resampling)
+ for (size_t i = 0; i < ARRAY_SIZE(kQualityArray); ++i) {
+ testStopbandDownconversion<int16_t, int32_t>(
+ 1, 48000, 22101, 7000, 15000, kQualityArray[i]);
+ }
+}
+
TEST(audioflinger_resampler, stopbandresponse_integer_multichannel) {
// not all of these may work (old resamplers fail on downsampling)
static const enum android::AudioResampler::src_quality kQualityArray[] = {
@@ -387,6 +428,34 @@
}
}
+TEST(audioflinger_resampler, stopbandresponse_float_mono) {
+ // not all of these may work (old resamplers fail on downsampling)
+ static const enum android::AudioResampler::src_quality kQualityArray[] = {
+ //android::AudioResampler::LOW_QUALITY,
+ //android::AudioResampler::MED_QUALITY,
+ //android::AudioResampler::HIGH_QUALITY,
+ //android::AudioResampler::VERY_HIGH_QUALITY,
+ android::AudioResampler::DYN_LOW_QUALITY,
+ android::AudioResampler::DYN_MED_QUALITY,
+ android::AudioResampler::DYN_HIGH_QUALITY,
+ };
+
+ // in this test we assume a maximum transition band between 12kHz and 20kHz.
+ // there must be at least 60dB relative attenuation between stopband and passband.
+ for (size_t i = 0; i < ARRAY_SIZE(kQualityArray); ++i) {
+ testStopbandDownconversion<float, float>(
+ 1, 48000, 32000, 12000, 20000, kQualityArray[i]);
+ }
+
+ // in this test we assume a maximum transition band between 7kHz and 15kHz.
+ // there must be at least 60dB relative attenuation between stopband and passband.
+ // (the weird ratio triggers interpolative resampling)
+ for (size_t i = 0; i < ARRAY_SIZE(kQualityArray); ++i) {
+ testStopbandDownconversion<float, float>(
+ 1, 48000, 22101, 7000, 15000, kQualityArray[i]);
+ }
+}
+
TEST(audioflinger_resampler, stopbandresponse_float_multichannel) {
// not all of these may work (old resamplers fail on downsampling)
static const enum android::AudioResampler::src_quality kQualityArray[] = {
diff --git a/services/audioflinger/tests/run_all_unit_tests.sh b/media/libaudioprocessing/tests/run_all_unit_tests.sh
similarity index 79%
rename from services/audioflinger/tests/run_all_unit_tests.sh
rename to media/libaudioprocessing/tests/run_all_unit_tests.sh
index 113f39e..15a94c2 100755
--- a/services/audioflinger/tests/run_all_unit_tests.sh
+++ b/media/libaudioprocessing/tests/run_all_unit_tests.sh
@@ -8,5 +8,5 @@
echo "waiting for device"
adb root && adb wait-for-device remount
-#adb shell /system/bin/resampler_tests
adb shell /data/nativetest/resampler_tests/resampler_tests
+adb shell /data/nativetest64/resampler_tests/resampler_tests
diff --git a/services/audioflinger/tests/test-mixer.cpp b/media/libaudioprocessing/tests/test-mixer.cpp
similarity index 99%
rename from services/audioflinger/tests/test-mixer.cpp
rename to media/libaudioprocessing/tests/test-mixer.cpp
index 65e22da..75dbf91 100644
--- a/services/audioflinger/tests/test-mixer.cpp
+++ b/media/libaudioprocessing/tests/test-mixer.cpp
@@ -21,7 +21,7 @@
#include <audio_utils/primitives.h>
#include <audio_utils/sndfile.h>
#include <media/AudioBufferProvider.h>
-#include "AudioMixer.h"
+#include <media/AudioMixer.h>
#include "test_utils.h"
/* Testing is typically through creation of an output WAV file from several
diff --git a/services/audioflinger/test-resample.cpp b/media/libaudioprocessing/tests/test-resampler.cpp
similarity index 98%
rename from services/audioflinger/test-resample.cpp
rename to media/libaudioprocessing/tests/test-resampler.cpp
index bae3c5b..fbc9326 100644
--- a/services/audioflinger/test-resample.cpp
+++ b/media/libaudioprocessing/tests/test-resampler.cpp
@@ -29,7 +29,7 @@
#include <audio_utils/sndfile.h>
#include <utils/Vector.h>
#include <media/AudioBufferProvider.h>
-#include "AudioResampler.h"
+#include <media/AudioResampler.h>
using namespace android;
diff --git a/services/audioflinger/tests/test_utils.h b/media/libaudioprocessing/tests/test_utils.h
similarity index 98%
rename from services/audioflinger/tests/test_utils.h
rename to media/libaudioprocessing/tests/test_utils.h
index 283c768..b61a929 100644
--- a/services/audioflinger/tests/test_utils.h
+++ b/media/libaudioprocessing/tests/test_utils.h
@@ -17,6 +17,12 @@
#ifndef ANDROID_AUDIO_TEST_UTILS_H
#define ANDROID_AUDIO_TEST_UTILS_H
+#ifndef LOG_TAG
+#define LOG_TAG "test_utils"
+#endif
+
+#include <log/log.h>
+
#include <audio_utils/sndfile.h>
#ifndef ARRAY_SIZE
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index 323930a..f27d5ca 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -22,7 +22,7 @@
#include <stdlib.h>
#include <string.h>
-#include <android/log.h>
+#include <log/log.h>
#include "EffectDownmix.h"
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index 74d489b..554c14d 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -24,10 +24,10 @@
#include <string.h>
#include <unistd.h>
-#include <android/log.h>
#include <cutils/config_utils.h>
#include <cutils/misc.h>
#include <cutils/properties.h>
+#include <log/log.h>
#include <system/audio_effects/audio_effects_conf.h>
diff --git a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
index 19d408d..9d29cf1 100644
--- a/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/EffectLoudnessEnhancer.cpp
@@ -19,12 +19,13 @@
#include <assert.h>
#include <math.h>
-#include <new>
#include <stdlib.h>
#include <string.h>
#include <time.h>
-#include <android/log.h>
+#include <new>
+
+#include <log/log.h>
#include <audio_effects/effect_loudnessenhancer.h>
#include "dsp/core/dynamic_range_compression.h"
diff --git a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
index 4f9f438..7ea0593 100644
--- a/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
+++ b/media/libeffects/loudness/dsp/core/dynamic_range_compression-inl.h
@@ -21,7 +21,7 @@
#endif
//#define LOG_NDEBUG 0
-#include <android/log.h>
+#include <log/log.h>
namespace le_fx {
diff --git a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
index bdb6818..fb87c79 100644
--- a/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
+++ b/media/libeffects/loudness/dsp/core/interpolator_base-inl.h
@@ -21,7 +21,7 @@
#endif
//#define LOG_NDEBUG 0
-#include <android/log.h>
+#include <log/log.h>
#include "dsp/core/basic.h"
diff --git a/media/libeffects/testlibs/EffectEqualizer.cpp b/media/libeffects/testlibs/EffectEqualizer.cpp
index f5e11a6..db4d009 100644
--- a/media/libeffects/testlibs/EffectEqualizer.cpp
+++ b/media/libeffects/testlibs/EffectEqualizer.cpp
@@ -22,9 +22,10 @@
#include <assert.h>
#include <stdlib.h>
#include <string.h>
+
#include <new>
-#include <android/log.h>
+#include <log/log.h>
#include "AudioEqualizer.h"
#include "AudioBiquadFilter.h"
diff --git a/media/libeffects/testlibs/EffectReverb.c b/media/libeffects/testlibs/EffectReverb.c
index 08bf9ae..fce9bed 100644
--- a/media/libeffects/testlibs/EffectReverb.c
+++ b/media/libeffects/testlibs/EffectReverb.c
@@ -21,7 +21,7 @@
#include <stdlib.h>
#include <string.h>
-#include <android/log.h>
+#include <log/log.h>
#include "EffectReverb.h"
#include "EffectsMath.h"
diff --git a/media/libmedia/BufferingSettings.cpp b/media/libmedia/BufferingSettings.cpp
index 6dc4a53..a69497e 100644
--- a/media/libmedia/BufferingSettings.cpp
+++ b/media/libmedia/BufferingSettings.cpp
@@ -28,6 +28,16 @@
return (mode >= BUFFERING_MODE_NONE && mode < BUFFERING_MODE_COUNT);
}
+// static
+bool BufferingSettings::IsTimeBasedBufferingMode(int mode) {
+ return (mode == BUFFERING_MODE_TIME_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
+}
+
+// static
+bool BufferingSettings::IsSizeBasedBufferingMode(int mode) {
+ return (mode == BUFFERING_MODE_SIZE_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
+}
+
BufferingSettings::BufferingSettings()
: mInitialBufferingMode(BUFFERING_MODE_NONE),
mRebufferingMode(BUFFERING_MODE_NONE),
@@ -70,4 +80,15 @@
return OK;
}
+String8 BufferingSettings::toString() const {
+ String8 s;
+ s.appendFormat("initialMode(%d), rebufferingMode(%d), "
+ "initialMarks(%d ms, %d KB), rebufferingMarks(%d, %d)ms, (%d, %d)KB",
+ mInitialBufferingMode, mRebufferingMode,
+ mInitialWatermarkMs, mInitialWatermarkKB,
+ mRebufferingWatermarkLowMs, mRebufferingWatermarkHighMs,
+ mRebufferingWatermarkLowKB, mRebufferingWatermarkHighKB);
+ return s;
+}
+
} // namespace android
diff --git a/media/libmedia/OMXBuffer.cpp b/media/libmedia/OMXBuffer.cpp
index 914cd5b..8ea70e4 100644
--- a/media/libmedia/OMXBuffer.cpp
+++ b/media/libmedia/OMXBuffer.cpp
@@ -35,14 +35,10 @@
OMXBuffer::OMXBuffer(const sp<MediaCodecBuffer>& codecBuffer)
: mBufferType(kBufferTypePreset),
+ mRangeOffset(codecBuffer != NULL ? codecBuffer->offset() : 0),
mRangeLength(codecBuffer != NULL ? codecBuffer->size() : 0) {
}
-OMXBuffer::OMXBuffer(OMX_U32 rangeLength)
- : mBufferType(kBufferTypePreset),
- mRangeLength(rangeLength) {
-}
-
OMXBuffer::OMXBuffer(const sp<IMemory> &mem)
: mBufferType(kBufferTypeSharedMem),
mMem(mem) {
@@ -67,6 +63,10 @@
switch(mBufferType) {
case kBufferTypePreset:
{
+ status_t err = parcel->writeUint32(mRangeOffset);
+ if (err != OK) {
+ return err;
+ }
return parcel->writeUint32(mRangeLength);
}
@@ -97,7 +97,14 @@
switch(bufferType) {
case kBufferTypePreset:
{
- mRangeLength = parcel->readUint32();
+ status_t err = parcel->readUint32(&mRangeOffset);
+ if (err != OK) {
+ return err;
+ }
+ err = parcel->readUint32(&mRangeLength);
+ if (err != OK) {
+ return err;
+ }
break;
}
diff --git a/media/libmedia/TypeConverter.cpp b/media/libmedia/TypeConverter.cpp
index c2b2688..25c29f2 100644
--- a/media/libmedia/TypeConverter.cpp
+++ b/media/libmedia/TypeConverter.cpp
@@ -269,6 +269,48 @@
TERMINATOR
};
+template <>
+const UsageTypeConverter::Table UsageTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_UNKNOWN),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MEDIA),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ALARM),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_NOTIFICATION_EVENT),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_ASSISTANCE_SONIFICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_GAME),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_VIRTUAL_SOURCE),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_CNT),
+ MAKE_STRING_FROM_ENUM(AUDIO_USAGE_MAX),
+ TERMINATOR
+};
+
+template <>
+const SourceTypeConverter::Table SourceTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_DEFAULT),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MIC),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_UPLINK),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_DOWNLINK),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_CALL),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CAMCORDER),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_RECOGNITION),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_VOICE_COMMUNICATION),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_REMOTE_SUBMIX),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_UNPROCESSED),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_CNT),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_MAX),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_FM_TUNER),
+ MAKE_STRING_FROM_ENUM(AUDIO_SOURCE_HOTWORD),
+ TERMINATOR
+};
+
template class TypeConverter<OutputDeviceTraits>;
template class TypeConverter<InputDeviceTraits>;
template class TypeConverter<OutputFlagTraits>;
@@ -280,6 +322,8 @@
template class TypeConverter<GainModeTraits>;
template class TypeConverter<StreamTraits>;
template class TypeConverter<AudioModeTraits>;
+template class TypeConverter<UsageTraits>;
+template class TypeConverter<SourceTraits>;
bool deviceFromString(const std::string& literalDevice, audio_devices_t& device) {
return InputDeviceConverter::fromString(literalDevice, device) ||
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 699172b..6bba1f1 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -244,6 +244,28 @@
return mPlayer->setVideoSurfaceTexture(bufferProducer);
}
+status_t MediaPlayer::getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */)
+{
+ ALOGV("getDefaultBufferingSettings");
+
+ Mutex::Autolock _l(mLock);
+ if (mPlayer == 0) {
+ return NO_INIT;
+ }
+ return mPlayer->getDefaultBufferingSettings(buffering);
+}
+
+status_t MediaPlayer::setBufferingSettings(const BufferingSettings& buffering)
+{
+ ALOGV("setBufferingSettings");
+
+ Mutex::Autolock _l(mLock);
+ if (mPlayer == 0) {
+ return NO_INIT;
+ }
+ return mPlayer->setBufferingSettings(buffering);
+}
+
// must call with lock held
status_t MediaPlayer::prepareAsync_l()
{
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 065738e..3199495 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -975,13 +975,8 @@
status_t MediaPlayerService::Client::setBufferingSettings(
const BufferingSettings& buffering)
{
- ALOGV("[%d] setBufferingSettings(%d, %d, %d, %d, %d, %d, %d, %d)",
- mConnId, buffering.mInitialBufferingMode, buffering.mRebufferingMode,
- buffering.mInitialWatermarkMs, buffering.mInitialWatermarkKB,
- buffering.mRebufferingWatermarkLowMs,
- buffering.mRebufferingWatermarkHighMs,
- buffering.mRebufferingWatermarkLowKB,
- buffering.mRebufferingWatermarkHighKB);
+ ALOGV("[%d] setBufferingSettings{%s}",
+ mConnId, buffering.toString().string());
sp<MediaPlayerBase> p = getPlayer();
if (p == 0) return UNKNOWN_ERROR;
return p->setBufferingSettings(buffering);
@@ -995,13 +990,8 @@
if (p == 0) return UNKNOWN_ERROR;
status_t ret = p->getDefaultBufferingSettings(buffering);
if (ret == NO_ERROR) {
- ALOGV("[%d] getDefaultBufferingSettings(%d, %d, %d, %d, %d, %d, %d, %d)",
- mConnId, buffering->mInitialBufferingMode, buffering->mRebufferingMode,
- buffering->mInitialWatermarkMs, buffering->mInitialWatermarkKB,
- buffering->mRebufferingWatermarkLowMs,
- buffering->mRebufferingWatermarkHighMs,
- buffering->mRebufferingWatermarkLowKB,
- buffering->mRebufferingWatermarkHighKB);
+ ALOGV("[%d] getDefaultBufferingSettings{%s}",
+ mConnId, buffering->toString().string());
} else {
ALOGV("[%d] getDefaultBufferingSettings returned %d", mConnId, ret);
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index d1d1077..91a2b7b 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -38,11 +38,12 @@
namespace android {
-static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
-static int64_t kHighWaterMarkUs = 5000000ll; // 5secs
-static int64_t kHighWaterMarkRebufferUs = 15000000ll; // 15secs
-static const ssize_t kLowWaterMarkBytes = 40000;
-static const ssize_t kHighWaterMarkBytes = 200000;
+static const int kLowWaterMarkMs = 2000; // 2secs
+static const int kHighWaterMarkMs = 5000; // 5secs
+static const int kHighWaterMarkRebufferMs = 15000; // 15secs
+
+static const int kLowWaterMarkKB = 40;
+static const int kHighWaterMarkKB = 200;
NuPlayer::GenericSource::GenericSource(
const sp<AMessage> ¬ify,
@@ -237,6 +238,16 @@
return OK;
}
+status_t NuPlayer::GenericSource::getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) {
+ mBufferingMonitor->getDefaultBufferingSettings(buffering);
+ return OK;
+}
+
+status_t NuPlayer::GenericSource::setBufferingSettings(const BufferingSettings& buffering) {
+ return mBufferingMonitor->setBufferingSettings(buffering);
+}
+
status_t NuPlayer::GenericSource::startSources() {
// Start the selected A/V tracks now before we start buffering.
// Widevine sources might re-initialize crypto when starting, if we delay
@@ -618,6 +629,12 @@
break;
}
+ case kWhatGetTrackInfo:
+ {
+ onGetTrackInfo(msg);
+ break;
+ }
+
case kWhatSelectTrack:
{
onSelectTrack(msg);
@@ -868,6 +885,34 @@
}
sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const {
+ sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this);
+ msg->setSize("trackIndex", trackIndex);
+
+ sp<AMessage> response;
+ sp<RefBase> format;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findObject("format", &format));
+ return static_cast<AMessage*>(format.get());
+ } else {
+ return NULL;
+ }
+}
+
+void NuPlayer::GenericSource::onGetTrackInfo(const sp<AMessage>& msg) const {
+ size_t trackIndex;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+
+ sp<AMessage> response = new AMessage;
+ sp<AMessage> format = doGetTrackInfo(trackIndex);
+ response->setObject("format", format);
+
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+}
+
+sp<AMessage> NuPlayer::GenericSource::doGetTrackInfo(size_t trackIndex) const {
size_t trackCount = mSources.size();
if (trackIndex >= trackCount) {
return NULL;
@@ -1435,11 +1480,54 @@
mFirstDequeuedBufferRealUs(-1ll),
mFirstDequeuedBufferMediaUs(-1ll),
mlastDequeuedBufferMediaUs(-1ll) {
+ getDefaultBufferingSettings(&mSettings);
}
NuPlayer::GenericSource::BufferingMonitor::~BufferingMonitor() {
}
+void NuPlayer::GenericSource::BufferingMonitor::getDefaultBufferingSettings(
+ BufferingSettings *buffering /* nonnull */) {
+ buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
+ buffering->mRebufferingMode = BUFFERING_MODE_TIME_THEN_SIZE;
+ buffering->mInitialWatermarkMs = kHighWaterMarkMs;
+ buffering->mRebufferingWatermarkLowMs = kLowWaterMarkMs;
+ buffering->mRebufferingWatermarkHighMs = kHighWaterMarkRebufferMs;
+ buffering->mRebufferingWatermarkLowKB = kLowWaterMarkKB;
+ buffering->mRebufferingWatermarkHighKB = kHighWaterMarkKB;
+
+ ALOGV("BufferingMonitor::getDefaultBufferingSettings{%s}",
+ buffering->toString().string());
+}
+
+status_t NuPlayer::GenericSource::BufferingMonitor::setBufferingSettings(
+ const BufferingSettings &buffering) {
+ ALOGV("BufferingMonitor::setBufferingSettings{%s}",
+ buffering.toString().string());
+
+ Mutex::Autolock _l(mLock);
+ if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
+ || (buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode)
+ && buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs)
+ || (buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
+ && buffering.mRebufferingWatermarkLowKB > buffering.mRebufferingWatermarkHighKB)) {
+ return BAD_VALUE;
+ }
+ mSettings = buffering;
+ if (mSettings.mInitialBufferingMode == BUFFERING_MODE_NONE) {
+ mSettings.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
+ }
+ if (!mSettings.IsTimeBasedBufferingMode(mSettings.mRebufferingMode)) {
+ mSettings.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
+ mSettings.mRebufferingWatermarkHighMs = INT32_MAX;
+ }
+ if (!mSettings.IsSizeBasedBufferingMode(mSettings.mRebufferingMode)) {
+ mSettings.mRebufferingWatermarkLowKB = BufferingSettings::kNoWatermark;
+ mSettings.mRebufferingWatermarkHighKB = INT32_MAX;
+ }
+ return OK;
+}
+
void NuPlayer::GenericSource::BufferingMonitor::prepare(
const sp<NuCachedSource2> &cachedSource,
int64_t durationUs,
@@ -1668,7 +1756,9 @@
stopBufferingIfNecessary_l();
return;
- } else if (cachedDurationUs >= 0ll) {
+ }
+
+ if (cachedDurationUs >= 0ll) {
if (mDurationUs > 0ll) {
int64_t cachedPosUs = getLastReadPosition_l() + cachedDurationUs;
int percentage = 100.0 * cachedPosUs / mDurationUs;
@@ -1679,36 +1769,40 @@
notifyBufferingUpdate_l(percentage);
}
- ALOGV("onPollBuffering_l: cachedDurationUs %.1f sec",
- cachedDurationUs / 1000000.0f);
+ ALOGV("onPollBuffering_l: cachedDurationUs %.1f sec", cachedDurationUs / 1000000.0f);
- if (cachedDurationUs < kLowWaterMarkUs) {
- // Take into account the data cached in downstream components to try to avoid
- // unnecessary pause.
- if (mOffloadAudio && mFirstDequeuedBufferRealUs >= 0) {
- int64_t downStreamCacheUs = mlastDequeuedBufferMediaUs - mFirstDequeuedBufferMediaUs
- - (ALooper::GetNowUs() - mFirstDequeuedBufferRealUs);
- if (downStreamCacheUs > 0) {
- cachedDurationUs += downStreamCacheUs;
+ if (mPrepareBuffering) {
+ if (cachedDurationUs > mSettings.mInitialWatermarkMs * 1000) {
+ stopBufferingIfNecessary_l();
+ }
+ } else if (mSettings.IsTimeBasedBufferingMode(mSettings.mRebufferingMode)) {
+ if (cachedDurationUs < mSettings.mRebufferingWatermarkLowMs * 1000) {
+ // Take into account the data cached in downstream components to try to avoid
+ // unnecessary pause.
+ if (mOffloadAudio && mFirstDequeuedBufferRealUs >= 0) {
+ int64_t downStreamCacheUs =
+ mlastDequeuedBufferMediaUs - mFirstDequeuedBufferMediaUs
+ - (ALooper::GetNowUs() - mFirstDequeuedBufferRealUs);
+ if (downStreamCacheUs > 0) {
+ cachedDurationUs += downStreamCacheUs;
+ }
}
- }
- if (cachedDurationUs < kLowWaterMarkUs) {
- startBufferingIfNecessary_l();
- }
- } else {
- int64_t highWaterMark = mPrepareBuffering ? kHighWaterMarkUs : kHighWaterMarkRebufferUs;
- if (cachedDurationUs > highWaterMark) {
+ if (cachedDurationUs < mSettings.mRebufferingWatermarkLowMs * 1000) {
+ startBufferingIfNecessary_l();
+ }
+ } else if (cachedDurationUs > mSettings.mRebufferingWatermarkHighMs * 1000) {
stopBufferingIfNecessary_l();
}
}
- } else if (cachedDataRemaining >= 0) {
+ } else if (cachedDataRemaining >= 0
+ && mSettings.IsSizeBasedBufferingMode(mSettings.mRebufferingMode)) {
ALOGV("onPollBuffering_l: cachedDataRemaining %zd bytes",
cachedDataRemaining);
- if (cachedDataRemaining < kLowWaterMarkBytes) {
+ if (cachedDataRemaining < (mSettings.mRebufferingWatermarkLowKB << 10)) {
startBufferingIfNecessary_l();
- } else if (cachedDataRemaining > kHighWaterMarkBytes) {
+ } else if (cachedDataRemaining > (mSettings.mRebufferingWatermarkHighKB << 10)) {
stopBufferingIfNecessary_l();
}
}
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index a14056f..e1949f3 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -50,6 +50,10 @@
status_t setDataSource(const sp<DataSource>& dataSource);
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) override;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
virtual void prepareAsync();
virtual void start();
@@ -119,6 +123,9 @@
public:
explicit BufferingMonitor(const sp<AMessage> ¬ify);
+ void getDefaultBufferingSettings(BufferingSettings *buffering /* nonnull */);
+ status_t setBufferingSettings(const BufferingSettings &buffering);
+
// Set up state.
void prepare(const sp<NuCachedSource2> &cachedSource,
int64_t durationUs,
@@ -167,6 +174,7 @@
mutable Mutex mLock;
+ BufferingSettings mSettings;
bool mOffloadAudio;
int64_t mFirstDequeuedBufferRealUs;
int64_t mFirstDequeuedBufferMediaUs;
@@ -245,6 +253,9 @@
void onGetFormatMeta(const sp<AMessage>& msg) const;
sp<MetaData> doGetFormatMeta(bool audio) const;
+ void onGetTrackInfo(const sp<AMessage>& msg) const;
+ sp<AMessage> doGetTrackInfo(size_t trackIndex) const;
+
void onGetSelectedTrack(const sp<AMessage>& msg) const;
ssize_t doGetSelectedTrack(media_track_type type) const;
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 51bfad4..05e6201 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -32,6 +32,11 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/Utils.h>
+// default buffer prepare/ready/underflow marks
+static const int kReadyMarkMs = 5000; // 5 seconds
+static const int kPrepareMarkMs = 1500; // 1.5 seconds
+static const int kUnderflowMarkMs = 1000; // 1 second
+
namespace android {
NuPlayer::HTTPLiveSource::HTTPLiveSource(
@@ -49,6 +54,7 @@
mFetchMetaDataGeneration(0),
mHasMetadata(false),
mMetadataSelected(false) {
+ getDefaultBufferingSettings(&mBufferingSettings);
if (headers) {
mExtraHeaders = *headers;
@@ -76,6 +82,42 @@
}
}
+status_t NuPlayer::HTTPLiveSource::getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) {
+ buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
+ buffering->mRebufferingMode = BUFFERING_MODE_TIME_ONLY;
+ buffering->mInitialWatermarkMs = kPrepareMarkMs;
+ buffering->mRebufferingWatermarkLowMs = kUnderflowMarkMs;
+ buffering->mRebufferingWatermarkHighMs = kReadyMarkMs;
+
+ return OK;
+}
+
+status_t NuPlayer::HTTPLiveSource::setBufferingSettings(const BufferingSettings& buffering) {
+ if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
+ || buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
+ || (buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode)
+ && buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs)) {
+ return BAD_VALUE;
+ }
+
+ mBufferingSettings = buffering;
+
+ if (mBufferingSettings.mInitialBufferingMode == BUFFERING_MODE_NONE) {
+ mBufferingSettings.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
+ }
+ if (mBufferingSettings.mRebufferingMode == BUFFERING_MODE_NONE) {
+ mBufferingSettings.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
+ mBufferingSettings.mRebufferingWatermarkHighMs = INT32_MAX;
+ }
+
+ if (mLiveSession != NULL) {
+ mLiveSession->setBufferingSettings(mBufferingSettings);
+ }
+
+ return OK;
+}
+
void NuPlayer::HTTPLiveSource::prepareAsync() {
if (mLiveLooper == NULL) {
mLiveLooper = new ALooper;
@@ -94,6 +136,7 @@
mLiveLooper->registerHandler(mLiveSession);
+ mLiveSession->setBufferingSettings(mBufferingSettings);
mLiveSession->connectAsync(
mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
}
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index 45fc8c1..2866a6a 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -34,6 +34,10 @@
const char *url,
const KeyedVector<String8, String8> *headers);
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) override;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
virtual void prepareAsync();
virtual void start();
@@ -80,6 +84,7 @@
int32_t mFetchMetaDataGeneration;
bool mHasMetadata;
bool mMetadataSelected;
+ BufferingSettings mBufferingSettings;
void onSessionNotify(const sp<AMessage> &msg);
void pollForRawData(
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index f8a6a4e..4c576a5 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "NuPlayer"
+
+#include <inttypes.h>
+
#include <utils/Log.h>
#include "NuPlayer.h"
@@ -311,6 +314,31 @@
msg->post();
}
+status_t NuPlayer::getDefaultBufferingSettings(
+ BufferingSettings *buffering /* nonnull */) {
+ sp<AMessage> msg = new AMessage(kWhatGetDefaultBufferingSettings, this);
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ if (err == OK) {
+ readFromAMessage(response, buffering);
+ }
+ }
+ return err;
+}
+
+status_t NuPlayer::setBufferingSettings(const BufferingSettings& buffering) {
+ sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
+ writeToAMessage(msg, buffering);
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+ return err;
+}
+
void NuPlayer::prepareAsync() {
(new AMessage(kWhatPrepare, this))->post();
}
@@ -505,6 +533,48 @@
break;
}
+ case kWhatGetDefaultBufferingSettings:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ ALOGV("kWhatGetDefaultBufferingSettings");
+ BufferingSettings buffering;
+ status_t err = OK;
+ if (mSource != NULL) {
+ err = mSource->getDefaultBufferingSettings(&buffering);
+ } else {
+ err = INVALID_OPERATION;
+ }
+ sp<AMessage> response = new AMessage;
+ if (err == OK) {
+ writeToAMessage(response, buffering);
+ }
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatSetBufferingSettings:
+ {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ ALOGV("kWhatSetBufferingSettings");
+ BufferingSettings buffering;
+ readFromAMessage(msg, &buffering);
+ status_t err = OK;
+ if (mSource != NULL) {
+ err = mSource->setBufferingSettings(buffering);
+ } else {
+ err = INVALID_OPERATION;
+ }
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
case kWhatPrepare:
{
mSource->prepareAsync();
@@ -1287,6 +1357,8 @@
} else {
ALOGW("resume called when renderer is gone or not set");
}
+
+ mLastStartedPlayingTimeNs = systemTime();
}
status_t NuPlayer::onInstantiateSecureDecoders() {
@@ -1342,26 +1414,26 @@
flags |= Renderer::FLAG_REAL_TIME;
}
- sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
- sp<MetaData> videoMeta = mSource->getFormatMeta(false /* audio */);
- if (audioMeta == NULL && videoMeta == NULL) {
+ bool hasAudio = (mSource->getFormat(true /* audio */) != NULL);
+ bool hasVideo = (mSource->getFormat(false /* audio */) != NULL);
+ if (!hasAudio && !hasVideo) {
ALOGE("no metadata for either audio or video source");
mSource->stop();
mSourceStarted = false;
notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_MALFORMED);
return;
}
- ALOGV_IF(audioMeta == NULL, "no metadata for audio source"); // video only stream
+ ALOGV_IF(!hasAudio, "no metadata for audio source"); // video only stream
+
+ sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */);
audio_stream_type_t streamType = AUDIO_STREAM_MUSIC;
if (mAudioSink != NULL) {
streamType = mAudioSink->getAudioStreamType();
}
- sp<AMessage> videoFormat = mSource->getFormat(false /* audio */);
-
mOffloadAudio =
- canOffloadStream(audioMeta, (videoFormat != NULL), mSource->isStreaming(), streamType)
+ canOffloadStream(audioMeta, hasVideo, mSource->isStreaming(), streamType)
&& (mPlaybackSettings.mSpeed == 1.f && mPlaybackSettings.mPitch == 1.f);
if (mOffloadAudio) {
flags |= Renderer::FLAG_OFFLOAD_AUDIO;
@@ -1396,6 +1468,8 @@
mAudioDecoder->setRenderer(mRenderer);
}
+ mLastStartedPlayingTimeNs = systemTime();
+
postScanSources();
}
@@ -1414,6 +1488,16 @@
} else {
ALOGW("pause called when renderer is gone or not set");
}
+
+ sp<NuPlayerDriver> driver = mDriver.promote();
+ if (driver != NULL) {
+ int64_t now = systemTime();
+ int64_t played = now - mLastStartedPlayingTimeNs;
+ ALOGD("played from %" PRId64 " to %" PRId64 " = %" PRId64 ,
+ mLastStartedPlayingTimeNs, now, played);
+
+ driver->notifyMorePlayingTimeUs((played+500)/1000);
+ }
}
bool NuPlayer::audioDecoderStillNeeded() {
@@ -1697,6 +1781,16 @@
notifyListener(MEDIA_SET_VIDEO_SIZE, 0, 0);
return;
}
+ int32_t err = OK;
+ inputFormat->findInt32("err", &err);
+ if (err == -EWOULDBLOCK) {
+ ALOGW("Video meta is not available yet!");
+ return;
+ }
+ if (err != OK) {
+ ALOGW("Something is wrong with video meta!");
+ return;
+ }
int32_t displayWidth, displayHeight;
if (outputFormat != NULL) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index c8b0102..cc8c97a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -50,6 +50,9 @@
void setDataSourceAsync(const sp<DataSource> &source);
+ status_t getDefaultBufferingSettings(BufferingSettings* buffering /* nonnull */);
+ status_t setBufferingSettings(const BufferingSettings& buffering);
+
void prepareAsync();
void setVideoSurfaceTextureAsync(
@@ -137,6 +140,8 @@
kWhatGetTrackInfo = 'gTrI',
kWhatGetSelectedTrack = 'gSel',
kWhatSelectTrack = 'selT',
+ kWhatGetDefaultBufferingSettings = 'gDBS',
+ kWhatSetBufferingSettings = 'sBuS',
};
wp<NuPlayerDriver> mDriver;
@@ -158,6 +163,8 @@
int32_t mVideoDecoderGeneration;
int32_t mRendererGeneration;
+ int64_t mLastStartedPlayingTimeNs;
+
int64_t mPreviousSeekTimeUs;
List<sp<Action> > mDeferredActions;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 6ec79e6..b8bb8fe 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -31,8 +31,14 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
+#include <media/IMediaAnalyticsService.h>
+
+static const int kDumpLockRetries = 50;
+static const int kDumpLockSleepUs = 20000;
+
namespace android {
+
NuPlayerDriver::NuPlayerDriver(pid_t pid)
: mState(STATE_IDLE),
mIsAsyncPrepare(false),
@@ -41,14 +47,20 @@
mDurationUs(-1),
mPositionUs(-1),
mSeekInProgress(false),
+ mPlayingTimeUs(0),
mLooper(new ALooper),
mPlayerFlags(0),
+ mAnalyticsItem(NULL),
mAtEOS(false),
mLooping(false),
mAutoLoop(false) {
- ALOGV("NuPlayerDriver(%p)", this);
+ ALOGD("NuPlayerDriver(%p) created, clientPid(%d)", this, pid);
mLooper->setName("NuPlayerDriver Looper");
+ // set up an analytics record
+ mAnalyticsItem = new MediaAnalyticsItem("nuplayer");
+ mAnalyticsItem->generateSessionID();
+
mLooper->start(
false, /* runOnCallingThread */
true, /* canCallJava */
@@ -63,6 +75,15 @@
NuPlayerDriver::~NuPlayerDriver() {
ALOGV("~NuPlayerDriver(%p)", this);
mLooper->stop();
+
+ // finalize any pending metrics, usually a no-op.
+ finalizeMetrics("destructor");
+ logMetrics("destructor");
+
+ if (mAnalyticsItem != NULL) {
+ delete mAnalyticsItem;
+ mAnalyticsItem = NULL;
+ }
}
status_t NuPlayerDriver::initCheck() {
@@ -183,6 +204,26 @@
return OK;
}
+status_t NuPlayerDriver::getDefaultBufferingSettings(BufferingSettings* buffering) {
+ ALOGV("getDefaultBufferingSettings(%p)", this);
+ Mutex::Autolock autoLock(mLock);
+ if (mState == STATE_IDLE) {
+ return INVALID_OPERATION;
+ }
+
+ return mPlayer->getDefaultBufferingSettings(buffering);
+}
+
+status_t NuPlayerDriver::setBufferingSettings(const BufferingSettings& buffering) {
+ ALOGV("setBufferingSettings(%p)", this);
+ Mutex::Autolock autoLock(mLock);
+ if (mState == STATE_IDLE) {
+ return INVALID_OPERATION;
+ }
+
+ return mPlayer->setBufferingSettings(buffering);
+}
+
status_t NuPlayerDriver::prepare() {
ALOGV("prepare(%p)", this);
Mutex::Autolock autoLock(mLock);
@@ -450,8 +491,103 @@
return OK;
}
+void NuPlayerDriver::finalizeMetrics(const char *where) {
+ if (where == NULL) {
+ where = "unknown";
+ }
+ ALOGD("finalizeMetrics(%p) from %s at state %d", this, where, mState);
+
+ // gather the final stats for this record
+ Vector<sp<AMessage>> trackStats;
+ mPlayer->getStats(&trackStats);
+
+ if (trackStats.size() > 0) {
+ for (size_t i = 0; i < trackStats.size(); ++i) {
+ const sp<AMessage> &stats = trackStats.itemAt(i);
+
+ AString mime;
+ stats->findString("mime", &mime);
+
+ AString name;
+ stats->findString("component-name", &name);
+
+ if (mime.startsWith("video/")) {
+ int32_t width, height;
+ mAnalyticsItem->setCString("video/mime", mime.c_str());
+ if (!name.empty()) {
+ mAnalyticsItem->setCString("video/codec", name.c_str());
+ }
+
+ if (stats->findInt32("width", &width)
+ && stats->findInt32("height", &height)) {
+ mAnalyticsItem->setInt32("wid", width);
+ mAnalyticsItem->setInt32("ht", height);
+ }
+
+ int64_t numFramesTotal = 0;
+ int64_t numFramesDropped = 0;
+ stats->findInt64("frames-total", &numFramesTotal);
+ stats->findInt64("frames-dropped-output", &numFramesDropped);
+
+ mAnalyticsItem->setInt64("frames", numFramesTotal);
+ mAnalyticsItem->setInt64("dropped", numFramesDropped);
+
+
+ } else if (mime.startsWith("audio/")) {
+ mAnalyticsItem->setCString("audio/mime", mime.c_str());
+ if (!name.empty()) {
+ mAnalyticsItem->setCString("audio/codec", name.c_str());
+ }
+ }
+ }
+
+ // getDuration() uses mLock for mutex -- careful where we use it.
+ int duration_ms = -1;
+ getDuration(&duration_ms);
+ if (duration_ms != -1) {
+ mAnalyticsItem->setInt64("duration", duration_ms);
+ }
+
+ if (mPlayingTimeUs > 0) {
+ mAnalyticsItem->setInt64("playing", (mPlayingTimeUs+500)/1000 );
+ }
+ }
+}
+
+
+void NuPlayerDriver::logMetrics(const char *where) {
+ if (where == NULL) {
+ where = "unknown";
+ }
+ ALOGD("logMetrics(%p) from %s at state %d", this, where, mState);
+
+ if (mAnalyticsItem == NULL || mAnalyticsItem->isEnabled() == false) {
+ return;
+ }
+
+ // only bother to log non-empty records
+ if (mAnalyticsItem->count() > 0) {
+
+ mAnalyticsItem->setFinalized(true);
+ mAnalyticsItem->selfrecord();
+
+ // re-init in case we prepare() and start() again.
+ delete mAnalyticsItem ;
+ mAnalyticsItem = new MediaAnalyticsItem("nuplayer");
+ if (mAnalyticsItem) {
+ mAnalyticsItem->generateSessionID();
+ }
+ } else {
+ ALOGV("did not have anything to record");
+ }
+}
+
status_t NuPlayerDriver::reset() {
ALOGD("reset(%p) at state %d", this, mState);
+
+ finalizeMetrics("reset");
+ logMetrics("reset");
+
Mutex::Autolock autoLock(mLock);
switch (mState) {
@@ -493,6 +629,7 @@
mDurationUs = -1;
mPositionUs = -1;
mLooping = false;
+ mPlayingTimeUs = 0;
return OK;
}
@@ -624,6 +761,11 @@
mDurationUs = durationUs;
}
+void NuPlayerDriver::notifyMorePlayingTimeUs(int64_t playingUs) {
+ Mutex::Autolock autoLock(mLock);
+ mPlayingTimeUs += playingUs;
+}
+
void NuPlayerDriver::notifySeekComplete() {
ALOGV("notifySeekComplete(%p)", this);
Mutex::Autolock autoLock(mLock);
@@ -657,6 +799,24 @@
AString logString(" NuPlayer\n");
char buf[256] = {0};
+ bool locked = false;
+ for (int i = 0; i < kDumpLockRetries; ++i) {
+ if (mLock.tryLock() == NO_ERROR) {
+ locked = true;
+ break;
+ }
+ usleep(kDumpLockSleepUs);
+ }
+
+ if (locked) {
+ snprintf(buf, sizeof(buf), " state(%d), atEOS(%d), looping(%d), autoLoop(%d)\n",
+ mState, mAtEOS, mLooping, mAutoLoop);
+ mLock.unlock();
+ } else {
+ snprintf(buf, sizeof(buf), " NPD(%p) lock is taken\n", this);
+ }
+ logString.append(buf);
+
for (size_t i = 0; i < trackStats.size(); ++i) {
const sp<AMessage> &stats = trackStats.itemAt(i);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 317b34c..5bfc539 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -16,6 +16,7 @@
#include <media/MediaPlayerInterface.h>
+#include <media/MediaAnalyticsItem.h>
#include <media/stagefright/foundation/ABase.h>
namespace android {
@@ -43,6 +44,11 @@
virtual status_t setVideoSurfaceTexture(
const sp<IGraphicBufferProducer> &bufferProducer);
+
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) override;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
@@ -75,6 +81,7 @@
void notifyResetComplete();
void notifySetSurfaceComplete();
void notifyDuration(int64_t durationUs);
+ void notifyMorePlayingTimeUs(int64_t timeUs);
void notifySeekComplete();
void notifySeekComplete_l();
void notifyListener(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
@@ -112,6 +119,7 @@
int64_t mDurationUs;
int64_t mPositionUs;
bool mSeekInProgress;
+ int64_t mPlayingTimeUs;
// <<<
sp<ALooper> mLooper;
@@ -119,10 +127,15 @@
sp<AudioSink> mAudioSink;
uint32_t mPlayerFlags;
+ MediaAnalyticsItem *mAnalyticsItem;
+
bool mAtEOS;
bool mLooping;
bool mAutoLoop;
+ void finalizeMetrics(const char *where);
+ void logMetrics(const char *where);
+
status_t prepare_l();
status_t start_l();
void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0, const Parcel *in = NULL);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 5197167..0429ef1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -63,6 +63,10 @@
: mNotify(notify) {
}
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) = 0;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) = 0;
+
virtual void prepareAsync() = 0;
virtual void start() = 0;
@@ -77,7 +81,11 @@
// an error or ERROR_END_OF_STREAM if not.
virtual status_t feedMoreTSData() = 0;
+ // Returns non-NULL format when the specified track exists.
+ // When the format has "err" set to -EWOULDBLOCK, source needs more time to get valid meta data.
+ // Returns NULL if the specified track doesn't exist or is invalid;
virtual sp<AMessage> getFormat(bool audio);
+
virtual sp<MetaData> getFormatMeta(bool /* audio */) { return NULL; }
virtual sp<MetaData> getFileFormatMeta() const { return NULL; }
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index fb1f31a..9264e49 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -32,11 +32,11 @@
const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
-// Buffer Underflow/Prepare/StartServer/Overflow Marks
-const int64_t NuPlayer::RTSPSource::kUnderflowMarkUs = 1000000ll;
-const int64_t NuPlayer::RTSPSource::kPrepareMarkUs = 3000000ll;
-const int64_t NuPlayer::RTSPSource::kStartServerMarkUs = 5000000ll;
-const int64_t NuPlayer::RTSPSource::kOverflowMarkUs = 10000000ll;
+// Default Buffer Underflow/Prepare/StartServer/Overflow Marks
+static const int kUnderflowMarkMs = 1000; // 1 second
+static const int kPrepareMarkMs = 3000; // 3 seconds
+//static const int kStartServerMarkMs = 5000;
+static const int kOverflowMarkMs = 10000; // 10 seconds
NuPlayer::RTSPSource::RTSPSource(
const sp<AMessage> ¬ify,
@@ -62,6 +62,7 @@
mSeekGeneration(0),
mEOSTimeoutAudio(0),
mEOSTimeoutVideo(0) {
+ getDefaultBufferingSettings(&mBufferingSettings);
if (headers) {
mExtraHeaders = *headers;
@@ -83,6 +84,34 @@
}
}
+status_t NuPlayer::RTSPSource::getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) {
+ buffering->mInitialBufferingMode = BUFFERING_MODE_TIME_ONLY;
+ buffering->mRebufferingMode = BUFFERING_MODE_TIME_ONLY;
+ buffering->mInitialWatermarkMs = kPrepareMarkMs;
+ buffering->mRebufferingWatermarkLowMs = kUnderflowMarkMs;
+ buffering->mRebufferingWatermarkHighMs = kOverflowMarkMs;
+
+ return OK;
+}
+
+status_t NuPlayer::RTSPSource::setBufferingSettings(const BufferingSettings& buffering) {
+ if (mLooper == NULL) {
+ mBufferingSettings = buffering;
+ return OK;
+ }
+
+ sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
+ writeToAMessage(msg, buffering);
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+ if (err == OK && response != NULL) {
+ CHECK(response->findInt32("err", &err));
+ }
+
+ return err;
+}
+
void NuPlayer::RTSPSource::prepareAsync() {
if (mIsSDP && mHTTPService == NULL) {
notifyPrepared(BAD_VALUE);
@@ -328,7 +357,8 @@
int64_t bufferedDurationUs = src->getBufferedDurationUs(&finalResult);
// isFinished when duration is 0 checks for EOS result only
- if (bufferedDurationUs > kPrepareMarkUs || src->isFinished(/* duration */ 0)) {
+ if (bufferedDurationUs > mBufferingSettings.mInitialWatermarkMs * 1000
+ || src->isFinished(/* duration */ 0)) {
++preparedCount;
}
@@ -336,13 +366,16 @@
++overflowCount;
++finishedCount;
} else {
- if (bufferedDurationUs < kUnderflowMarkUs) {
+ if (bufferedDurationUs < mBufferingSettings.mRebufferingWatermarkLowMs * 1000) {
++underflowCount;
}
- if (bufferedDurationUs > kOverflowMarkUs) {
+ if (bufferedDurationUs > mBufferingSettings.mRebufferingWatermarkHighMs * 1000) {
++overflowCount;
}
- if (bufferedDurationUs < kStartServerMarkUs) {
+ int64_t startServerMarkUs =
+ (mBufferingSettings.mRebufferingWatermarkLowMs
+ + mBufferingSettings.mRebufferingWatermarkHighMs) / 2 * 1000ll;
+ if (bufferedDurationUs < startServerMarkUs) {
++startCount;
}
}
@@ -479,6 +512,36 @@
} else if (msg->what() == kWhatSignalEOS) {
onSignalEOS(msg);
return;
+ } else if (msg->what() == kWhatSetBufferingSettings) {
+ sp<AReplyToken> replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ BufferingSettings buffering;
+ readFromAMessage(msg, &buffering);
+
+ status_t err = OK;
+ if (buffering.IsSizeBasedBufferingMode(buffering.mInitialBufferingMode)
+ || buffering.IsSizeBasedBufferingMode(buffering.mRebufferingMode)
+ || (buffering.mRebufferingWatermarkLowMs > buffering.mRebufferingWatermarkHighMs
+ && buffering.IsTimeBasedBufferingMode(buffering.mRebufferingMode))) {
+ err = BAD_VALUE;
+ } else {
+ if (buffering.mInitialBufferingMode == BUFFERING_MODE_NONE) {
+ buffering.mInitialWatermarkMs = BufferingSettings::kNoWatermark;
+ }
+ if (buffering.mRebufferingMode == BUFFERING_MODE_NONE) {
+ buffering.mRebufferingWatermarkLowMs = BufferingSettings::kNoWatermark;
+ buffering.mRebufferingWatermarkHighMs = INT32_MAX;
+ }
+
+ mBufferingSettings = buffering;
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+
+ return;
}
CHECK_EQ(msg->what(), (int)kWhatNotify);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 363f8bb..0812991 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -40,6 +40,10 @@
uid_t uid = 0,
bool isSDP = false);
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) override;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
virtual void prepareAsync();
virtual void start();
virtual void stop();
@@ -67,6 +71,7 @@
kWhatPerformSeek = 'seek',
kWhatPollBuffering = 'poll',
kWhatSignalEOS = 'eos ',
+ kWhatSetBufferingSettings = 'sBuS',
};
enum State {
@@ -81,12 +86,6 @@
kFlagIncognito = 1,
};
- // Buffer Prepare/Underflow/Overflow/Resume Marks
- static const int64_t kPrepareMarkUs;
- static const int64_t kUnderflowMarkUs;
- static const int64_t kOverflowMarkUs;
- static const int64_t kStartServerMarkUs;
-
struct TrackInfo {
sp<AnotherPacketSource> mSource;
@@ -110,6 +109,7 @@
bool mBuffering;
bool mInPreparationPhase;
bool mEOSPending;
+ BufferingSettings mBufferingSettings;
sp<ALooper> mLooper;
sp<MyHandler> mHandler;
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
index 7f9f913..fc0803b 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp
@@ -51,6 +51,22 @@
}
}
+status_t NuPlayer::StreamingSource::getDefaultBufferingSettings(
+ BufferingSettings *buffering /* nonnull */) {
+ *buffering = BufferingSettings();
+ return OK;
+}
+
+status_t NuPlayer::StreamingSource::setBufferingSettings(
+ const BufferingSettings &buffering) {
+ if (buffering.mInitialBufferingMode != BUFFERING_MODE_NONE
+ || buffering.mRebufferingMode != BUFFERING_MODE_NONE) {
+ return BAD_VALUE;
+ }
+
+ return OK;
+}
+
void NuPlayer::StreamingSource::prepareAsync() {
if (mLooper == NULL) {
mLooper = new ALooper;
@@ -234,8 +250,7 @@
}
status_t err = convertMetaDataToMessage(meta, &format);
if (err != OK) { // format may have been cleared on error
- format = new AMessage;
- format->setInt32("err", err);
+ return NULL;
}
return format;
}
diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h
index db88c7f..2e1d2b3 100644
--- a/media/libmediaplayerservice/nuplayer/StreamingSource.h
+++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h
@@ -32,6 +32,10 @@
const sp<AMessage> ¬ify,
const sp<IStreamSource> &source);
+ virtual status_t getDefaultBufferingSettings(
+ BufferingSettings* buffering /* nonnull */) override;
+ virtual status_t setBufferingSettings(const BufferingSettings& buffering) override;
+
virtual void prepareAsync();
virtual void start();
diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp
index c728e3e..f019df5 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnbaio/NBLog.cpp
@@ -23,7 +23,7 @@
#include <string.h>
#include <time.h>
#include <new>
-#include <cutils/atomic.h>
+#include <audio_utils/roundup.h>
#include <media/nbaio/NBLog.h>
#include <utils/Log.h>
#include <utils/String8.h>
@@ -74,19 +74,30 @@
// ---------------------------------------------------------------------------
NBLog::Writer::Writer()
- : mSize(0), mShared(NULL), mRear(0), mEnabled(false)
+ : mShared(NULL), mFifo(NULL), mFifoWriter(NULL), mEnabled(false)
{
}
-NBLog::Writer::Writer(size_t size, void *shared)
- : mSize(roundup(size)), mShared((Shared *) shared), mRear(0), mEnabled(mShared != NULL)
+NBLog::Writer::Writer(void *shared, size_t size)
+ : mShared((Shared *) shared),
+ mFifo(mShared != NULL ?
+ new audio_utils_fifo(size, sizeof(uint8_t),
+ mShared->mBuffer, mShared->mRear, NULL /*throttlesFront*/) : NULL),
+ mFifoWriter(mFifo != NULL ? new audio_utils_fifo_writer(*mFifo) : NULL),
+ mEnabled(mFifoWriter != NULL)
{
}
-NBLog::Writer::Writer(size_t size, const sp<IMemory>& iMemory)
- : mSize(roundup(size)), mShared(iMemory != 0 ? (Shared *) iMemory->pointer() : NULL),
- mIMemory(iMemory), mRear(0), mEnabled(mShared != NULL)
+NBLog::Writer::Writer(const sp<IMemory>& iMemory, size_t size)
+ : Writer(iMemory != 0 ? (Shared *) iMemory->pointer() : NULL, size)
{
+ mIMemory = iMemory;
+}
+
+NBLog::Writer::~Writer()
+{
+ delete mFifoWriter;
+ delete mFifo;
}
void NBLog::Writer::log(const char *string)
@@ -95,8 +106,8 @@
return;
}
size_t length = strlen(string);
- if (length > 255) {
- length = 255;
+ if (length > Entry::kMaxLength) {
+ length = Entry::kMaxLength;
}
log(EVENT_STRING, string, length);
}
@@ -117,7 +128,7 @@
if (!mEnabled) {
return;
}
- char buffer[256];
+ char buffer[Entry::kMaxLength + 1 /*NUL*/];
int length = vsnprintf(buffer, sizeof(buffer), fmt, ap);
if (length >= (int) sizeof(buffer)) {
length = sizeof(buffer) - 1;
@@ -153,7 +164,10 @@
if (!mEnabled) {
return;
}
- if (data == NULL || length > 255) {
+ if (data == NULL || length > Entry::kMaxLength) {
+ // TODO Perhaps it makes sense to display truncated data or at least a
+ // message that the data is too long? The current behavior can create
+ // a confusion for a programmer debugging their code.
return;
}
switch (event) {
@@ -177,26 +191,16 @@
log(entry->mEvent, entry->mData, entry->mLength);
return;
}
- size_t rear = mRear & (mSize - 1);
- size_t written = mSize - rear; // written = number of bytes that have been written so far
- size_t need = entry->mLength + 3; // mEvent, mLength, data[length], mLength
- // need = number of bytes remaining to write
- if (written > need) {
- written = need;
- }
- size_t i;
+ size_t need = entry->mLength + Entry::kOverhead; // mEvent, mLength, data[length], mLength
+ // need = number of bytes remaining to write
+
// FIXME optimize this using memcpy for the data part of the Entry.
// The Entry could have a method copyTo(ptr, offset, size) to optimize the copy.
- for (i = 0; i < written; ++i) {
- mShared->mBuffer[rear + i] = entry->readAt(i);
+ uint8_t temp[Entry::kMaxLength + Entry::kOverhead];
+ for (size_t i = 0; i < need; i++) {
+ temp[i] = entry->readAt(i);
}
- if (rear + written == mSize && (need -= written) > 0) {
- for (i = 0; i < need; ++i) {
- mShared->mBuffer[i] = entry->readAt(written + i);
- }
- written += need;
- }
- android_atomic_release_store(mRear += written, &mShared->mRear);
+ mFifoWriter->write(temp, need);
}
bool NBLog::Writer::isEnabled() const
@@ -218,8 +222,8 @@
{
}
-NBLog::LockedWriter::LockedWriter(size_t size, void *shared)
- : Writer(size, shared)
+NBLog::LockedWriter::LockedWriter(void *shared, size_t size)
+ : Writer(shared, size)
{
}
@@ -273,60 +277,59 @@
// ---------------------------------------------------------------------------
-NBLog::Reader::Reader(size_t size, const void *shared)
- : mSize(roundup(size)), mShared((const Shared *) shared), mFront(0)
+NBLog::Reader::Reader(const void *shared, size_t size)
+ : mShared((/*const*/ Shared *) shared), /*mIMemory*/
+ mFd(-1), mIndent(0),
+ mFifo(mShared != NULL ?
+ new audio_utils_fifo(size, sizeof(uint8_t),
+ mShared->mBuffer, mShared->mRear, NULL /*throttlesFront*/) : NULL),
+ mFifoReader(mFifo != NULL ? new audio_utils_fifo_reader(*mFifo) : NULL)
{
}
-NBLog::Reader::Reader(size_t size, const sp<IMemory>& iMemory)
- : mSize(roundup(size)), mShared(iMemory != 0 ? (const Shared *) iMemory->pointer() : NULL),
- mIMemory(iMemory), mFront(0)
+NBLog::Reader::Reader(const sp<IMemory>& iMemory, size_t size)
+ : Reader(iMemory != 0 ? (Shared *) iMemory->pointer() : NULL, size)
{
+ mIMemory = iMemory;
+}
+
+NBLog::Reader::~Reader()
+{
+ delete mFifoReader;
+ delete mFifo;
}
void NBLog::Reader::dump(int fd, size_t indent)
{
- int32_t rear = android_atomic_acquire_load(&mShared->mRear);
- size_t avail = rear - mFront;
- if (avail == 0) {
+ if (mFifoReader == NULL) {
return;
}
- size_t lost = 0;
- if (avail > mSize) {
- lost = avail - mSize;
- mFront += lost;
- avail = mSize;
- }
- size_t remaining = avail; // remaining = number of bytes left to read
- size_t front = mFront & (mSize - 1);
- size_t read = mSize - front; // read = number of bytes that have been read so far
- if (read > remaining) {
- read = remaining;
- }
// make a copy to avoid race condition with writer
- uint8_t *copy = new uint8_t[avail];
- // copy first part of circular buffer up until the wraparound point
- memcpy(copy, &mShared->mBuffer[front], read);
- if (front + read == mSize) {
- if ((remaining -= read) > 0) {
- // copy second part of circular buffer starting at beginning
- memcpy(©[read], mShared->mBuffer, remaining);
- read += remaining;
- // remaining = 0 but not necessary
- }
- }
- mFront += read;
+ size_t capacity = mFifo->capacity();
+
+ // TODO Stack-based allocation of large objects may fail.
+ // Currently the log buffers are a page or two, which should be safe.
+ // But if the log buffers ever get a lot larger,
+ // then change this to allocate from heap when necessary.
+ static size_t kReasonableStackObjectSize = 32768;
+ ALOGW_IF(capacity > kReasonableStackObjectSize, "Stack-based allocation of object may fail");
+ uint8_t copy[capacity];
+
+ size_t lost;
+ ssize_t actual = mFifoReader->read(copy, capacity, NULL /*timeout*/, &lost);
+ ALOG_ASSERT(actual <= capacity);
+ size_t avail = actual > 0 ? (size_t) actual : 0;
size_t i = avail;
Event event;
size_t length;
struct timespec ts;
time_t maxSec = -1;
- while (i >= 3) {
+ while (i >= Entry::kOverhead) {
length = copy[i - 1];
- if (length + 3 > i || copy[i - length - 2] != length) {
+ if (length + Entry::kOverhead > i || copy[i - length - 2] != length) {
break;
}
- event = (Event) copy[i - length - 3];
+ event = (Event) copy[i - length - Entry::kOverhead];
if (event == EVENT_TIMESTAMP) {
if (length != sizeof(struct timespec)) {
// corrupt
@@ -337,7 +340,7 @@
maxSec = ts.tv_sec;
}
}
- i -= length + 3;
+ i -= length + Entry::kOverhead;
}
mFd = fd;
mIndent = indent;
@@ -362,7 +365,7 @@
event = (Event) copy[i];
length = copy[i + 1];
const void *data = ©[i + 2];
- size_t advance = length + 3;
+ size_t advance = length + Entry::kOverhead;
switch (event) {
case EVENT_STRING:
body.appendFormat("%.*s", (int) length, (const char *) data);
@@ -376,7 +379,7 @@
long deltaTotal = 0;
size_t j = i;
for (;;) {
- j += sizeof(struct timespec) + 3;
+ j += sizeof(struct timespec) + 3 /*Entry::kOverhead?*/;
if (j >= avail || (Event) copy[j] != EVENT_TIMESTAMP) {
break;
}
@@ -398,7 +401,7 @@
deltaTotal += delta;
prevNsec = tsNext.tv_nsec;
}
- size_t n = (j - i) / (sizeof(struct timespec) + 3);
+ size_t n = (j - i) / (sizeof(struct timespec) + 3 /*Entry::kOverhead?*/);
if (deferredTimestamp) {
dumpLine(timestamp, body);
deferredTimestamp = false;
@@ -432,8 +435,6 @@
if (deferredTimestamp) {
dumpLine(timestamp, body);
}
- // FIXME it would be more efficient to put a char mCopy[256] as a member variable of the dumper
- delete[] copy;
}
void NBLog::Reader::dumpLine(const String8& timestamp, String8& body)
diff --git a/media/liboboe/Android.bp b/media/liboboe/Android.bp
index 0d22e65..bfcc049 100644
--- a/media/liboboe/Android.bp
+++ b/media/liboboe/Android.bp
@@ -24,4 +24,5 @@
name: "liboboe.ndk",
symbol_file: "liboboe.map.txt",
first_version: "26",
+ unversioned_until: "current",
}
diff --git a/media/liboboe/examples/Android.mk b/media/liboboe/examples/Android.mk
new file mode 100644
index 0000000..5053e7d
--- /dev/null
+++ b/media/liboboe/examples/Android.mk
@@ -0,0 +1 @@
+include $(call all-subdir-makefiles)
diff --git a/media/liboboe/examples/write_sine/Android.mk b/media/liboboe/examples/write_sine/Android.mk
new file mode 100644
index 0000000..b56328b
--- /dev/null
+++ b/media/liboboe/examples/write_sine/Android.mk
@@ -0,0 +1,6 @@
+# include $(call all-subdir-makefiles)
+
+# Just include static/ for now.
+LOCAL_PATH := $(call my-dir)
+#include $(LOCAL_PATH)/jni/Android.mk
+include $(LOCAL_PATH)/static/Android.mk
diff --git a/media/liboboe/examples/write_sine/README.md b/media/liboboe/examples/write_sine/README.md
new file mode 100644
index 0000000..9f7ee87
--- /dev/null
+++ b/media/liboboe/examples/write_sine/README.md
@@ -0,0 +1,7 @@
+# cd to this directory
+mkdir -p jni/include/oboe
+ln -s $PLATFORM/frameworks/av/media/liboboe/include/oboe/*.h jni/include/oboe
+ln -s $PLATFORM/out/target/product/$TARGET_PRODUCT/symbols/out/soong/ndk/platforms/android-current/arch-arm64/usr/lib/liboboe.so jni
+$NDK/ndk-build
+adb push libs/arm64-v8a/write_sine_threaded /data
+adb shell /data/write_sine_threaded
diff --git a/media/liboboe/examples/write_sine/jni/Android.mk b/media/liboboe/examples/write_sine/jni/Android.mk
new file mode 100644
index 0000000..51a5a85
--- /dev/null
+++ b/media/liboboe/examples/write_sine/jni/Android.mk
@@ -0,0 +1,35 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := tests
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/liboboe/include
+
+LOCAL_SRC_FILES:= frameworks/av/media/liboboe/src/write_sine.cpp
+LOCAL_SHARED_LIBRARIES := libaudioutils libmedia libtinyalsa \
+ libbinder libcutils libutils
+LOCAL_STATIC_LIBRARIES := libsndfile
+LOCAL_MODULE := write_sine_ndk
+LOCAL_SHARED_LIBRARIES += liboboe_prebuilt
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := tests
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/liboboe/include
+
+LOCAL_SRC_FILES:= frameworks/av/media/liboboe/src/write_sine_threaded.cpp
+LOCAL_SHARED_LIBRARIES := libaudioutils libmedia libtinyalsa \
+ libbinder libcutils libutils
+LOCAL_STATIC_LIBRARIES := libsndfile
+LOCAL_MODULE := write_sine_threaded_ndk
+LOCAL_SHARED_LIBRARIES += liboboe_prebuilt
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := liboboe_prebuilt
+LOCAL_SRC_FILES := liboboe.so
+LOCAL_EXPORT_C_INCLUDES := $(LOCAL_PATH)/include
+include $(PREBUILT_SHARED_LIBRARY)
diff --git a/media/liboboe/examples/write_sine/jni/Application.mk b/media/liboboe/examples/write_sine/jni/Application.mk
new file mode 100644
index 0000000..e74475c
--- /dev/null
+++ b/media/liboboe/examples/write_sine/jni/Application.mk
@@ -0,0 +1,3 @@
+# TODO remove then when we support other architectures
+APP_ABI := arm64-v8a
+APP_CPPFLAGS += -std=c++11
diff --git a/media/liboboe/examples/write_sine/src/SineGenerator.h b/media/liboboe/examples/write_sine/src/SineGenerator.h
new file mode 100644
index 0000000..ade7527
--- /dev/null
+++ b/media/liboboe/examples/write_sine/src/SineGenerator.h
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SINE_GENERATOR_H
+#define SINE_GENERATOR_H
+
+#include <math.h>
+
+class SineGenerator
+{
+public:
+ SineGenerator() {}
+ virtual ~SineGenerator() = default;
+
+ void setup(double frequency, double frameRate) {
+ mFrameRate = frameRate;
+ mPhaseIncrement = frequency * M_PI * 2 / frameRate;
+ }
+
+ void setSweep(double frequencyLow, double frequencyHigh, double seconds) {
+ mPhaseIncrementLow = frequencyLow * M_PI * 2 / mFrameRate;
+ mPhaseIncrementHigh = frequencyHigh * M_PI * 2 / mFrameRate;
+
+ double numFrames = seconds * mFrameRate;
+ mUpScaler = pow((frequencyHigh / frequencyLow), (1.0 / numFrames));
+ mDownScaler = 1.0 / mUpScaler;
+ mGoingUp = true;
+ mSweeping = true;
+ }
+
+ void render(int16_t *buffer, int32_t channelStride, int32_t numFrames) {
+ int sampleIndex = 0;
+ for (int i = 0; i < numFrames; i++) {
+ buffer[sampleIndex] = (int16_t) (32767 * sin(mPhase) * mAmplitude);
+ sampleIndex += channelStride;
+ advancePhase();
+ }
+ }
+ void render(float *buffer, int32_t channelStride, int32_t numFrames) {
+ int sampleIndex = 0;
+ for (int i = 0; i < numFrames; i++) {
+ buffer[sampleIndex] = sin(mPhase) * mAmplitude;
+ sampleIndex += channelStride;
+ advancePhase();
+ }
+ }
+
+private:
+ void advancePhase() {
+ mPhase += mPhaseIncrement;
+ if (mPhase > M_PI * 2) {
+ mPhase -= M_PI * 2;
+ }
+ if (mSweeping) {
+ if (mGoingUp) {
+ mPhaseIncrement *= mUpScaler;
+ if (mPhaseIncrement > mPhaseIncrementHigh) {
+ mGoingUp = false;
+ }
+ } else {
+ mPhaseIncrement *= mDownScaler;
+ if (mPhaseIncrement < mPhaseIncrementLow) {
+ mGoingUp = true;
+ }
+ }
+ }
+ }
+
+ double mAmplitude = 0.01;
+ double mPhase = 0.0;
+ double mPhaseIncrement = 440 * M_PI * 2 / 48000;
+ double mFrameRate = 48000;
+ double mPhaseIncrementLow;
+ double mPhaseIncrementHigh;
+ double mUpScaler = 1.0;
+ double mDownScaler = 1.0;
+ bool mGoingUp = false;
+ bool mSweeping = false;
+};
+
+#endif /* SINE_GENERATOR_H */
+
diff --git a/media/liboboe/examples/write_sine/src/write_sine.cpp b/media/liboboe/examples/write_sine/src/write_sine.cpp
new file mode 100644
index 0000000..084665c
--- /dev/null
+++ b/media/liboboe/examples/write_sine/src/write_sine.cpp
@@ -0,0 +1,207 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Play sine waves using Oboe.
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+#include <oboe/OboeDefinitions.h>
+#include <oboe/OboeAudio.h>
+#include "SineGenerator.h"
+
+#define SAMPLE_RATE 48000
+#define NUM_SECONDS 10
+
+static const char *getSharingModeText(oboe_sharing_mode_t mode) {
+ const char *modeText = "unknown";
+ switch (mode) {
+ case OBOE_SHARING_MODE_EXCLUSIVE:
+ modeText = "EXCLUSIVE";
+ break;
+ case OBOE_SHARING_MODE_LEGACY:
+ modeText = "LEGACY";
+ break;
+ case OBOE_SHARING_MODE_SHARED:
+ modeText = "SHARED";
+ break;
+ case OBOE_SHARING_MODE_PUBLIC_MIX:
+ modeText = "PUBLIC_MIX";
+ break;
+ default:
+ break;
+ }
+ return modeText;
+}
+
+int main(int argc, char **argv)
+{
+ (void)argc; // unused
+
+ oboe_result_t result = OBOE_OK;
+
+ const int requestedSamplesPerFrame = 2;
+ int actualSamplesPerFrame = 0;
+ const int requestedSampleRate = SAMPLE_RATE;
+ int actualSampleRate = 0;
+ const oboe_audio_format_t requestedDataFormat = OBOE_AUDIO_FORMAT_PCM16;
+ oboe_audio_format_t actualDataFormat = OBOE_AUDIO_FORMAT_PCM16;
+
+ const oboe_sharing_mode_t requestedSharingMode = OBOE_SHARING_MODE_EXCLUSIVE;
+ //const oboe_sharing_mode_t requestedSharingMode = OBOE_SHARING_MODE_LEGACY;
+ oboe_sharing_mode_t actualSharingMode = OBOE_SHARING_MODE_LEGACY;
+
+ OboeStreamBuilder oboeBuilder = OBOE_STREAM_BUILDER_NONE;
+ OboeStream oboeStream = OBOE_STREAM_NONE;
+ oboe_stream_state_t state = OBOE_STREAM_STATE_UNINITIALIZED;
+ oboe_size_frames_t framesPerBurst = 0;
+ oboe_size_frames_t framesToPlay = 0;
+ oboe_size_frames_t framesLeft = 0;
+ int32_t xRunCount = 0;
+ int16_t *data = nullptr;
+
+ SineGenerator sineOsc1;
+ SineGenerator sineOsc2;
+
+ // Make printf print immediately so that debug info is not stuck
+ // in a buffer if we hang or crash.
+ setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+
+ printf("%s - Play a sine wave using Oboe\n", argv[0]);
+
+ // Use an OboeStreamBuilder to contain requested parameters.
+ result = Oboe_createStreamBuilder(&oboeBuilder);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+
+ // Request stream properties.
+ result = OboeStreamBuilder_setSampleRate(oboeBuilder, requestedSampleRate);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+ result = OboeStreamBuilder_setSamplesPerFrame(oboeBuilder, requestedSamplesPerFrame);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+ result = OboeStreamBuilder_setFormat(oboeBuilder, requestedDataFormat);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+ result = OboeStreamBuilder_setSharingMode(oboeBuilder, requestedSharingMode);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+
+ // Create an OboeStream using the Builder.
+ result = OboeStreamBuilder_openStream(oboeBuilder, &oboeStream);
+ printf("oboeStream 0x%08x\n", oboeStream);
+ if (result != OBOE_OK) {
+ goto finish;
+ }
+
+ result = OboeStream_getState(oboeStream, &state);
+ printf("after open, state = %s\n", Oboe_convertStreamStateToText(state));
+
+ // Check to see what kind of stream we actually got.
+ result = OboeStream_getSampleRate(oboeStream, &actualSampleRate);
+ printf("SampleRate: requested = %d, actual = %d\n", requestedSampleRate, actualSampleRate);
+
+ sineOsc1.setup(440.0, actualSampleRate);
+ sineOsc2.setup(660.0, actualSampleRate);
+
+ result = OboeStream_getSamplesPerFrame(oboeStream, &actualSamplesPerFrame);
+ printf("SamplesPerFrame: requested = %d, actual = %d\n",
+ requestedSamplesPerFrame, actualSamplesPerFrame);
+
+ result = OboeStream_getSharingMode(oboeStream, &actualSharingMode);
+ printf("SharingMode: requested = %s, actual = %s\n",
+ getSharingModeText(requestedSharingMode),
+ getSharingModeText(actualSharingMode));
+
+ // This is the number of frames that are read in one chunk by a DMA controller
+ // or a DSP or a mixer.
+ result = OboeStream_getFramesPerBurst(oboeStream, &framesPerBurst);
+ printf("DataFormat: original framesPerBurst = %d\n",framesPerBurst);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - OboeStream_getFramesPerBurst() returned %d\n", result);
+ goto finish;
+ }
+ // Some DMA might use very short bursts of 16 frames. We don't need to write such small
+ // buffers. But it helps to use a multiple of the burst size for predictable scheduling.
+ while (framesPerBurst < 48) {
+ framesPerBurst *= 2;
+ }
+ printf("DataFormat: final framesPerBurst = %d\n",framesPerBurst);
+
+ OboeStream_getFormat(oboeStream, &actualDataFormat);
+ printf("DataFormat: requested = %d, actual = %d\n", requestedDataFormat, actualDataFormat);
+ // TODO handle other data formats
+
+ // Allocate a buffer for the audio data.
+ data = new int16_t[framesPerBurst * actualSamplesPerFrame];
+ if (data == nullptr) {
+ fprintf(stderr, "ERROR - could not allocate data buffer\n");
+ result = OBOE_ERROR_NO_MEMORY;
+ goto finish;
+ }
+
+ // Start the stream.
+ printf("call OboeStream_requestStart()\n");
+ result = OboeStream_requestStart(oboeStream);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - OboeStream_requestStart() returned %d\n", result);
+ goto finish;
+ }
+
+ result = OboeStream_getState(oboeStream, &state);
+ printf("after start, state = %s\n", Oboe_convertStreamStateToText(state));
+
+ // Play for a while.
+ framesToPlay = actualSampleRate * NUM_SECONDS;
+ framesLeft = framesToPlay;
+ while (framesLeft > 0) {
+ // Render sine waves to left and right channels.
+ sineOsc1.render(&data[0], actualSamplesPerFrame, framesPerBurst);
+ if (actualSamplesPerFrame > 1) {
+ sineOsc2.render(&data[1], actualSamplesPerFrame, framesPerBurst);
+ }
+
+ // Write audio data to the stream.
+ oboe_nanoseconds_t timeoutNanos = 100 * OBOE_NANOS_PER_MILLISECOND;
+ int minFrames = (framesToPlay < framesPerBurst) ? framesToPlay : framesPerBurst;
+ int actual = OboeStream_write(oboeStream, data, minFrames, timeoutNanos);
+ if (actual < 0) {
+ fprintf(stderr, "ERROR - OboeStream_write() returned %zd\n", actual);
+ goto finish;
+ } else if (actual == 0) {
+ fprintf(stderr, "WARNING - OboeStream_write() returned %zd\n", actual);
+ goto finish;
+ }
+ framesLeft -= actual;
+ }
+
+ result = OboeStream_getXRunCount(oboeStream, &xRunCount);
+ printf("OboeStream_getXRunCount %d\n", xRunCount);
+
+finish:
+ delete[] data;
+ OboeStream_close(oboeStream);
+ OboeStreamBuilder_delete(oboeBuilder);
+ printf("exiting - Oboe result = %d = %s\n", result, Oboe_convertResultToText(result));
+ return (result != OBOE_OK) ? EXIT_FAILURE : EXIT_SUCCESS;
+}
+
diff --git a/media/liboboe/examples/write_sine/src/write_sine_threaded.cpp b/media/liboboe/examples/write_sine/src/write_sine_threaded.cpp
new file mode 100644
index 0000000..aedcc6e
--- /dev/null
+++ b/media/liboboe/examples/write_sine/src/write_sine_threaded.cpp
@@ -0,0 +1,315 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Play sine waves using an Oboe background thread.
+
+#include <assert.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <math.h>
+#include <time.h>
+#include <oboe/OboeDefinitions.h>
+#include <oboe/OboeAudio.h>
+#include "SineGenerator.h"
+
+#define NUM_SECONDS 10
+#define SHARING_MODE OBOE_SHARING_MODE_EXCLUSIVE
+//#define SHARING_MODE OBOE_SHARING_MODE_LEGACY
+
+// Prototype for a callback.
+typedef int audio_callback_proc_t(float *outputBuffer,
+ oboe_size_frames_t numFrames,
+ void *userContext);
+
+static void *SimpleOboePlayerThreadProc(void *arg);
+
+/**
+ * Simple wrapper for Oboe that opens a default stream and then calls
+ * a callback function to fill the output buffers.
+ */
+class SimpleOboePlayer {
+public:
+ SimpleOboePlayer() {}
+ virtual ~SimpleOboePlayer() {
+ close();
+ };
+
+ void setSharingMode(oboe_sharing_mode_t requestedSharingMode) {
+ mRequestedSharingMode = requestedSharingMode;
+ }
+
+ /** Also known as "sample rate"
+ */
+ int32_t getFramesPerSecond() {
+ return mFramesPerSecond;
+ }
+
+ int32_t getSamplesPerFrame() {
+ return mSamplesPerFrame;
+ }
+
+ /**
+ * Open a stream
+ */
+ oboe_result_t open(audio_callback_proc_t *proc, void *userContext) {
+ mCallbackProc = proc;
+ mUserContext = userContext;
+ oboe_result_t result = OBOE_OK;
+
+ // Use an OboeStreamBuilder to contain requested parameters.
+ result = Oboe_createStreamBuilder(&mBuilder);
+ if (result != OBOE_OK) return result;
+
+ result = OboeStreamBuilder_setSharingMode(mBuilder, mRequestedSharingMode);
+ if (result != OBOE_OK) goto finish1;
+
+ // Open an OboeStream using the Builder.
+ result = OboeStreamBuilder_openStream(mBuilder, &mStream);
+ if (result != OBOE_OK) goto finish1;
+
+ // Check to see what kind of stream we actually got.
+ result = OboeStream_getSampleRate(mStream, &mFramesPerSecond);
+ printf("open() mFramesPerSecond = %d\n", mFramesPerSecond);
+ if (result != OBOE_OK) goto finish2;
+ result = OboeStream_getSamplesPerFrame(mStream, &mSamplesPerFrame);
+ printf("open() mSamplesPerFrame = %d\n", mSamplesPerFrame);
+ if (result != OBOE_OK) goto finish2;
+
+ // This is the number of frames that are read in one chunk by a DMA controller
+ // or a DSP or a mixer.
+ result = OboeStream_getFramesPerBurst(mStream, &mFramesPerBurst);
+ if (result != OBOE_OK) goto finish2;
+ // Some DMA might use very short bursts. We don't need to write such small
+ // buffers. But it helps to use a multiple of the burst size for predictable scheduling.
+ while (mFramesPerBurst < 48) {
+ mFramesPerBurst *= 2;
+ }
+ printf("DataFormat: final framesPerBurst = %d\n",mFramesPerBurst);
+
+ result = OboeStream_getFormat(mStream, &mDataFormat);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - OboeStream_getFormat() returned %d\n", result);
+ goto finish2;
+ }
+
+ // Allocate a buffer for the audio data.
+ mOutputBuffer = new float[mFramesPerBurst * mSamplesPerFrame];
+ if (mOutputBuffer == nullptr) {
+ fprintf(stderr, "ERROR - could not allocate data buffer\n");
+ result = OBOE_ERROR_NO_MEMORY;
+ }
+
+ // If needed allocate a buffer for converting float to int16_t.
+ if (mDataFormat == OBOE_AUDIO_FORMAT_PCM16) {
+ mConversionBuffer = new int16_t[mFramesPerBurst * mSamplesPerFrame];
+ if (mConversionBuffer == nullptr) {
+ fprintf(stderr, "ERROR - could not allocate conversion buffer\n");
+ result = OBOE_ERROR_NO_MEMORY;
+ }
+ }
+ return result;
+
+ finish2:
+ OboeStream_close(mStream);
+ mStream = OBOE_HANDLE_INVALID;
+ finish1:
+ OboeStreamBuilder_delete(mBuilder);
+ mBuilder = OBOE_HANDLE_INVALID;
+ return result;
+ }
+
+ oboe_result_t close() {
+ stop();
+ OboeStream_close(mStream);
+ mStream = OBOE_HANDLE_INVALID;
+ OboeStreamBuilder_delete(mBuilder);
+ mBuilder = OBOE_HANDLE_INVALID;
+ delete mOutputBuffer;
+ mOutputBuffer = nullptr;
+ delete mConversionBuffer;
+ mConversionBuffer = nullptr;
+ return OBOE_OK;
+ }
+
+ // Start a thread that will call the callback proc.
+ oboe_result_t start() {
+ mEnabled = true;
+ oboe_nanoseconds_t nanosPerBurst = mFramesPerBurst * OBOE_NANOS_PER_SECOND
+ / mFramesPerSecond;
+ return OboeStream_createThread(mStream, nanosPerBurst,
+ SimpleOboePlayerThreadProc,
+ this);
+ }
+
+ // Tell the thread to stop.
+ oboe_result_t stop() {
+ mEnabled = false;
+ return OboeStream_joinThread(mStream, nullptr, 2 * OBOE_NANOS_PER_SECOND);
+ }
+
+ oboe_result_t callbackLoop() {
+ int32_t framesWritten = 0;
+ int32_t xRunCount = 0;
+ oboe_result_t result = OBOE_OK;
+
+ result = OboeStream_requestStart(mStream);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - OboeStream_requestStart() returned %d\n", result);
+ return result;
+ }
+
+ // Give up after several burst periods have passed.
+ const int burstsPerTimeout = 8;
+ oboe_nanoseconds_t nanosPerTimeout =
+ burstsPerTimeout * mFramesPerBurst * OBOE_NANOS_PER_SECOND
+ / mFramesPerSecond;
+
+ while (mEnabled && result >= 0) {
+ // Call application's callback function to fill the buffer.
+ if (mCallbackProc(mOutputBuffer, mFramesPerBurst, mUserContext)) {
+ mEnabled = false;
+ }
+ // if needed, convert from float to int16_t PCM
+ if (mConversionBuffer != nullptr) {
+ int32_t numSamples = mFramesPerBurst * mSamplesPerFrame;
+ for (int i = 0; i < numSamples; i++) {
+ mConversionBuffer[i] = (int16_t)(32767.0 * mOutputBuffer[i]);
+ }
+ // Write the application data to stream.
+ result = OboeStream_write(mStream, mConversionBuffer, mFramesPerBurst, nanosPerTimeout);
+ } else {
+ // Write the application data to stream.
+ result = OboeStream_write(mStream, mOutputBuffer, mFramesPerBurst, nanosPerTimeout);
+ }
+ framesWritten += result;
+ if (result < 0) {
+ fprintf(stderr, "ERROR - OboeStream_write() returned %zd\n", result);
+ }
+ }
+
+ result = OboeStream_getXRunCount(mStream, &xRunCount);
+ printf("OboeStream_getXRunCount %d\n", xRunCount);
+
+ result = OboeStream_requestStop(mStream);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - OboeStream_requestStart() returned %d\n", result);
+ return result;
+ }
+
+ return result;
+ }
+
+private:
+ OboeStreamBuilder mBuilder = OBOE_HANDLE_INVALID;
+ OboeStream mStream = OBOE_HANDLE_INVALID;
+ float * mOutputBuffer = nullptr;
+ int16_t * mConversionBuffer = nullptr;
+
+ audio_callback_proc_t * mCallbackProc = nullptr;
+ void * mUserContext = nullptr;
+ oboe_sharing_mode_t mRequestedSharingMode = SHARING_MODE;
+ int32_t mSamplesPerFrame = 0;
+ int32_t mFramesPerSecond = 0;
+ oboe_size_frames_t mFramesPerBurst = 0;
+ oboe_audio_format_t mDataFormat = OBOE_AUDIO_FORMAT_PCM16;
+
+ volatile bool mEnabled = false; // used to request that callback exit its loop
+};
+
+static void *SimpleOboePlayerThreadProc(void *arg) {
+ SimpleOboePlayer *player = (SimpleOboePlayer *) arg;
+ player->callbackLoop();
+ return nullptr;
+}
+
+// Application data that gets passed to the callback.
+typedef struct SineThreadedData_s {
+ SineGenerator sineOsc1;
+ SineGenerator sineOsc2;
+ int32_t samplesPerFrame = 0;
+} SineThreadedData_t;
+
+// Callback function that fills the audio output buffer.
+int MyCallbackProc(float *outputBuffer, int32_t numFrames, void *userContext) {
+ SineThreadedData_t *data = (SineThreadedData_t *) userContext;
+ // Render sine waves to left and right channels.
+ data->sineOsc1.render(&outputBuffer[0], data->samplesPerFrame, numFrames);
+ if (data->samplesPerFrame > 1) {
+ data->sineOsc2.render(&outputBuffer[1], data->samplesPerFrame, numFrames);
+ }
+ return 0;
+}
+
+int main(int argc, char **argv)
+{
+ (void)argc; // unused
+ SimpleOboePlayer player;
+ SineThreadedData_t myData;
+ oboe_result_t result;
+
+ // Make printf print immediately so that debug info is not stuck
+ // in a buffer if we hang or crash.
+ setvbuf(stdout, nullptr, _IONBF, (size_t) 0);
+ printf("%s - Play a sine wave using an Oboe Thread\n", argv[0]);
+
+ result = player.open(MyCallbackProc, &myData);
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - player.open() returned %d\n", result);
+ goto error;
+ }
+ printf("player.getFramesPerSecond() = %d\n", player.getFramesPerSecond());
+ printf("player.getSamplesPerFrame() = %d\n", player.getSamplesPerFrame());
+ myData.sineOsc1.setup(440.0, 48000);
+ myData.sineOsc1.setSweep(300.0, 2000.0, 5.0);
+ myData.sineOsc2.setup(660.0, 48000);
+ myData.sineOsc2.setSweep(400.0, 3000.0, 7.0);
+ myData.samplesPerFrame = player.getSamplesPerFrame();
+
+ result = player.start();
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - player.start() returned %d\n", result);
+ goto error;
+ }
+
+ printf("Sleep for %d seconds while audio plays in a background thread.\n", NUM_SECONDS);
+ {
+ // FIXME sleep is not an NDK API
+ // sleep(NUM_SECONDS);
+ const struct timespec request = { .tv_sec = NUM_SECONDS, .tv_nsec = 0 };
+ (void) clock_nanosleep(CLOCK_MONOTONIC, 0 /*flags*/, &request, NULL /*remain*/);
+ }
+ printf("Woke up now.\n");
+
+ result = player.stop();
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - player.stop() returned %d\n", result);
+ goto error;
+ }
+ result = player.close();
+ if (result != OBOE_OK) {
+ fprintf(stderr, "ERROR - player.close() returned %d\n", result);
+ goto error;
+ }
+
+ printf("SUCCESS\n");
+ return EXIT_SUCCESS;
+error:
+ player.close();
+ printf("exiting - Oboe result = %d = %s\n", result, Oboe_convertResultToText(result));
+ return EXIT_FAILURE;
+}
+
diff --git a/media/liboboe/examples/write_sine/static/Android.mk b/media/liboboe/examples/write_sine/static/Android.mk
new file mode 100644
index 0000000..7c8d17c
--- /dev/null
+++ b/media/liboboe/examples/write_sine/static/Android.mk
@@ -0,0 +1,34 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := examples
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/liboboe/include
+
+# TODO reorganize folders to avoid using ../
+LOCAL_SRC_FILES:= ../src/write_sine.cpp
+
+LOCAL_SHARED_LIBRARIES := libaudioutils libmedia \
+ libbinder libcutils libutils \
+ libaudioclient liblog libtinyalsa
+LOCAL_STATIC_LIBRARIES := liboboe
+
+LOCAL_MODULE := write_sine
+include $(BUILD_EXECUTABLE)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_TAGS := tests
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/liboboe/include
+
+LOCAL_SRC_FILES:= ../src/write_sine_threaded.cpp
+
+LOCAL_SHARED_LIBRARIES := libaudioutils libmedia \
+ libbinder libcutils libutils \
+ libaudioclient liblog libtinyalsa
+LOCAL_STATIC_LIBRARIES := liboboe
+
+LOCAL_MODULE := write_sine_threaded
+include $(BUILD_EXECUTABLE)
diff --git a/media/liboboe/examples/write_sine/static/README.md b/media/liboboe/examples/write_sine/static/README.md
new file mode 100644
index 0000000..768f4cb
--- /dev/null
+++ b/media/liboboe/examples/write_sine/static/README.md
@@ -0,0 +1,2 @@
+Makefile for building simple command line examples.
+They link with Oboe as a static library.
diff --git a/media/liboboe/include/oboe/OboeAudio.h b/media/liboboe/include/oboe/OboeAudio.h
index 788cf5f..52e3f69 100644
--- a/media/liboboe/include/oboe/OboeAudio.h
+++ b/media/liboboe/include/oboe/OboeAudio.h
@@ -26,7 +26,6 @@
extern "C" {
#endif
-typedef int32_t OboeDeviceId;
typedef oboe_handle_t OboeStream;
typedef oboe_handle_t OboeStreamBuilder;
@@ -92,10 +91,18 @@
*
* By default, the primary device will be used.
*
+ * @param builder handle provided by Oboe_createStreamBuilder()
+ * @param deviceId platform specific identifier or OBOE_DEVICE_UNSPECIFIED
* @return OBOE_OK or a negative error.
*/
OBOE_API oboe_result_t OboeStreamBuilder_setDeviceId(OboeStreamBuilder builder,
- OboeDeviceId deviceId);
+ oboe_device_id_t deviceId);
+/**
+ * Passes back requested device ID.
+ * @return OBOE_OK or a negative error.
+ */
+OBOE_API oboe_result_t OboeStreamBuilder_getDeviceId(OboeStreamBuilder builder,
+ oboe_device_id_t *deviceId);
/**
* Request a sample rate in Hz.
@@ -111,14 +118,14 @@
* @return OBOE_OK or a negative error.
*/
OBOE_API oboe_result_t OboeStreamBuilder_setSampleRate(OboeStreamBuilder builder,
- oboe_sample_rate_t sampleRate);
+ oboe_sample_rate_t sampleRate);
/**
* Returns sample rate in Hertz (samples per second).
* @return OBOE_OK or a negative error.
*/
OBOE_API oboe_result_t OboeStreamBuilder_getSampleRate(OboeStreamBuilder builder,
- oboe_sample_rate_t *sampleRate);
+ oboe_sample_rate_t *sampleRate);
/**
@@ -362,6 +369,8 @@
// High priority audio threads
// ============================================================
+typedef void *(oboe_audio_thread_proc_t)(void *);
+
/**
* Create a thread associated with a stream. The thread has special properties for
* low latency audio performance. This thread can be used to implement a callback API.
@@ -372,13 +381,14 @@
*
* @param stream A stream created using OboeStreamBuilder_openStream().
* @param periodNanoseconds the estimated period at which the audio thread will need to wake up
- * @param start_routine your thread entry point
+ * @param startRoutine your thread entry point
* @param arg an argument that will be passed to your thread entry point
* @return OBOE_OK or a negative error.
*/
OBOE_API oboe_result_t OboeStream_createThread(OboeStream stream,
oboe_nanoseconds_t periodNanoseconds,
- void *(*startRoutine)(void *), void *arg);
+ oboe_audio_thread_proc_t *threadProc,
+ void *arg);
/**
* Wait until the thread exits or an error occurs.
@@ -408,7 +418,7 @@
*
* @param stream handle provided by OboeStreamBuilder_openStream()
* @param requestedFrames requested number of frames that can be filled without blocking
- * @return actualFrames receives final number of frames
+ * @param actualFrames receives final number of frames
* @return OBOE_OK or a negative error
*/
OBOE_API oboe_result_t OboeStream_setBufferSize(OboeStream stream,
@@ -475,6 +485,13 @@
/**
* @param stream handle provided by OboeStreamBuilder_openStream()
+ * @param deviceId pointer to variable to receive the actual device ID
+ * @return OBOE_OK or a negative error.
+ */
+OBOE_API oboe_result_t OboeStream_getDeviceId(OboeStream stream, oboe_device_id_t *deviceId);
+
+/**
+ * @param stream handle provided by OboeStreamBuilder_openStream()
* @param format pointer to variable to receive the actual data format
* @return OBOE_OK or a negative error.
*/
@@ -554,4 +571,4 @@
}
#endif
-#endif //NATIVEOBOE_OBOEAUDIO_H
+#endif //OBOE_OBOEAUDIO_H
diff --git a/media/liboboe/include/oboe/OboeDefinitions.h b/media/liboboe/include/oboe/OboeDefinitions.h
index d80c958..9d56a24 100644
--- a/media/liboboe/include/oboe/OboeDefinitions.h
+++ b/media/liboboe/include/oboe/OboeDefinitions.h
@@ -25,6 +25,10 @@
typedef int32_t oboe_handle_t; // negative handles are error codes
typedef int32_t oboe_result_t;
+/**
+ * A platform specific identifier for a device.
+ */
+typedef int32_t oboe_device_id_t;
typedef int32_t oboe_sample_rate_t;
/** This is used for small quantities such as the number of frames in a buffer. */
typedef int32_t oboe_size_frames_t;
@@ -38,7 +42,6 @@
typedef int64_t oboe_position_frames_t;
typedef int64_t oboe_nanoseconds_t;
-typedef uint32_t oboe_audio_format_t;
/**
* This is used to represent a value that has not been specified.
@@ -47,6 +50,7 @@
* and would accept whatever it was given.
*/
#define OBOE_UNSPECIFIED 0
+#define OBOE_DEVICE_UNSPECIFIED ((oboe_device_id_t) -1)
#define OBOE_NANOS_PER_MICROSECOND ((int64_t)1000)
#define OBOE_NANOS_PER_MILLISECOND (OBOE_NANOS_PER_MICROSECOND * 1000)
#define OBOE_MILLIS_PER_SECOND 1000
@@ -60,60 +64,15 @@
OBOE_DIRECTION_COUNT // This should always be last.
};
-enum oboe_datatype_t {
- OBOE_AUDIO_DATATYPE_INT16,
- OBOE_AUDIO_DATATYPE_INT32,
- OBOE_AUDIO_DATATYPE_INT824,
- OBOE_AUDIO_DATATYPE_UINT8,
- OBOE_AUDIO_DATATYPE_FLOAT32, // Add new values below.
- OBOE_AUDIO_DATATYPE_COUNT // This should always be last.
+enum oboe_audio_format_t {
+ OBOE_AUDIO_FORMAT_INVALID = -1,
+ OBOE_AUDIO_FORMAT_UNSPECIFIED = 0,
+ OBOE_AUDIO_FORMAT_PCM16, // TODO rename to _PCM_I16
+ OBOE_AUDIO_FORMAT_PCM_FLOAT,
+ OBOE_AUDIO_FORMAT_PCM824, // TODO rename to _PCM_I8_24
+ OBOE_AUDIO_FORMAT_PCM32 // TODO rename to _PCM_I32
};
-enum oboe_content_t {
- OBOE_AUDIO_CONTENT_PCM,
- OBOE_AUDIO_CONTENT_MP3,
- OBOE_AUDIO_CONTENT_AAC,
- OBOE_AUDIO_CONTENT_AC3,
- OBOE_AUDIO_CONTENT_EAC3,
- OBOE_AUDIO_CONTENT_DTS,
- OBOE_AUDIO_CONTENT_DTSHD, // Add new values below.
- OBOE_AUDIO_CONTENT_COUNT // This should always be last.
-};
-
-enum oboe_wrapper_t {
- OBOE_AUDIO_WRAPPER_NONE,
- OBOE_AUDIO_WRAPPER_IEC61937, // Add new values below.
- OBOE_AUDIO_WRAPPER_COUNT // This should always be last.
-};
-
-/**
- * Fields packed into oboe_audio_format_t, from most to least significant bits.
- * Invalid:1
- * Reserved:7
- * Wrapper:8
- * Content:8
- * Data Type:8
- */
-#define OBOE_AUDIO_FORMAT(dataType, content, wrapper) \
- ((oboe_audio_format_t)((wrapper << 16) | (content << 8) | dataType))
-
-#define OBOE_AUDIO_FORMAT_RAW(dataType, content) \
- OBOE_AUDIO_FORMAT(dataType, content, OBOE_AUDIO_WRAPPER_NONE)
-
-#define OBOE_AUDIO_FORMAT_DATA_TYPE(format) \
- ((oboe_datatype_t)(format & 0x0FF))
-
-// Define some common formats.
-#define OBOE_AUDIO_FORMAT_PCM16 \
- OBOE_AUDIO_FORMAT_RAW(OBOE_AUDIO_DATATYPE_INT16, OBOE_AUDIO_CONTENT_PCM)
-#define OBOE_AUDIO_FORMAT_PCM_FLOAT \
- OBOE_AUDIO_FORMAT_RAW(OBOE_AUDIO_DATATYPE_FLOAT32, OBOE_AUDIO_CONTENT_PCM)
-#define OBOE_AUDIO_FORMAT_PCM824 \
- OBOE_AUDIO_FORMAT_RAW(OBOE_AUDIO_DATATYPE_INT824, OBOE_AUDIO_CONTENT_PCM)
-#define OBOE_AUDIO_FORMAT_PCM32 \
- OBOE_AUDIO_FORMAT_RAW(OBOE_AUDIO_DATATYPE_INT32, OBOE_AUDIO_CONTENT_PCM)
-#define OBOE_AUDIO_FORMAT_INVALID ((oboe_audio_format_t)-1)
-
enum {
OBOE_OK,
OBOE_ERROR_BASE = -900, // TODO review
diff --git a/media/liboboe/src/Android.mk b/media/liboboe/src/Android.mk
index 7b9a906..59edcb2 100644
--- a/media/liboboe/src/Android.mk
+++ b/media/liboboe/src/Android.mk
@@ -8,28 +8,49 @@
LOCAL_MODULE := liboboe
LOCAL_MODULE_TAGS := optional
+LIBOBOE_DIR := $(TOP)/frameworks/av/media/liboboe
+LIBOBOE_SRC_DIR := $(LIBOBOE_DIR)/src
+
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-utils) \
frameworks/native/include \
system/core/base/include \
frameworks/native/media/liboboe/include/include \
frameworks/av/media/liboboe/include \
+ frameworks/native/include \
+ $(LOCAL_PATH) \
+ $(LOCAL_PATH)/binding \
+ $(LOCAL_PATH)/client \
$(LOCAL_PATH)/core \
- $(LOCAL_PATH)/utility \
- $(LOCAL_PATH)/legacy
+ $(LOCAL_PATH)/fifo \
+ $(LOCAL_PATH)/legacy \
+ $(LOCAL_PATH)/utility
-LOCAL_SRC_FILES += core/AudioStream.cpp
-LOCAL_SRC_FILES += core/AudioStreamBuilder.cpp
-LOCAL_SRC_FILES += core/OboeAudio.cpp
-LOCAL_SRC_FILES += legacy/AudioStreamRecord.cpp
-LOCAL_SRC_FILES += legacy/AudioStreamTrack.cpp
-LOCAL_SRC_FILES += utility/HandleTracker.cpp
-LOCAL_SRC_FILES += utility/OboeUtilities.cpp
+LOCAL_SRC_FILES = \
+ core/AudioStream.cpp \
+ core/AudioStreamBuilder.cpp \
+ core/OboeAudio.cpp \
+ legacy/AudioStreamRecord.cpp \
+ legacy/AudioStreamTrack.cpp \
+ utility/HandleTracker.cpp \
+ utility/OboeUtilities.cpp \
+ fifo/FifoBuffer.cpp \
+ fifo/FifoControllerBase.cpp \
+ client/AudioEndpoint.cpp \
+ client/AudioStreamInternal.cpp \
+ client/IsochronousClockModel.cpp \
+ binding/SharedMemoryParcelable.cpp \
+ binding/SharedRegionParcelable.cpp \
+ binding/RingBufferParcelable.cpp \
+ binding/AudioEndpointParcelable.cpp \
+ binding/OboeStreamRequest.cpp \
+ binding/OboeStreamConfiguration.cpp \
+ binding/IOboeAudioService.cpp
-LOCAL_CFLAGS += -Wno-unused-parameter
-LOCAL_CFLAGS += -Wall -Werror
+LOCAL_CFLAGS += -Wno-unused-parameter -Wall -Werror
+
# By default, all symbols are hidden.
-LOCAL_CFLAGS += -fvisibility=hidden
+# LOCAL_CFLAGS += -fvisibility=hidden
# OBOE_API is used to explicitly export a function or a variable as a visible symbol.
LOCAL_CFLAGS += -DOBOE_API='__attribute__((visibility("default")))'
@@ -47,24 +68,41 @@
system/core/base/include \
frameworks/native/media/liboboe/include/include \
frameworks/av/media/liboboe/include \
+ $(LOCAL_PATH) \
+ $(LOCAL_PATH)/binding \
+ $(LOCAL_PATH)/client \
$(LOCAL_PATH)/core \
- $(LOCAL_PATH)/utility \
- $(LOCAL_PATH)/legacy
+ $(LOCAL_PATH)/fifo \
+ $(LOCAL_PATH)/legacy \
+ $(LOCAL_PATH)/utility
-LOCAL_SRC_FILES += core/AudioStream.cpp
-LOCAL_SRC_FILES += core/AudioStreamBuilder.cpp
-LOCAL_SRC_FILES += core/OboeAudio.cpp
-LOCAL_SRC_FILES += legacy/AudioStreamRecord.cpp
-LOCAL_SRC_FILES += legacy/AudioStreamTrack.cpp
-LOCAL_SRC_FILES += utility/HandleTracker.cpp
-LOCAL_SRC_FILES += utility/OboeUtilities.cpp
+LOCAL_SRC_FILES = core/AudioStream.cpp \
+ core/AudioStreamBuilder.cpp \
+ core/OboeAudio.cpp \
+ legacy/AudioStreamRecord.cpp \
+ legacy/AudioStreamTrack.cpp \
+ utility/HandleTracker.cpp \
+ utility/OboeUtilities.cpp \
+ fifo/FifoBuffer.cpp \
+ fifo/FifoControllerBase.cpp \
+ client/AudioEndpoint.cpp \
+ client/AudioStreamInternal.cpp \
+ client/IsochronousClockModel.cpp \
+ binding/SharedMemoryParcelable.cpp \
+ binding/SharedRegionParcelable.cpp \
+ binding/RingBufferParcelable.cpp \
+ binding/AudioEndpointParcelable.cpp \
+ binding/OboeStreamRequest.cpp \
+ binding/OboeStreamConfiguration.cpp \
+ binding/IOboeAudioService.cpp
-LOCAL_CFLAGS += -Wno-unused-parameter
-LOCAL_CFLAGS += -Wall -Werror
+LOCAL_CFLAGS += -Wno-unused-parameter -Wall -Werror
+
# By default, all symbols are hidden.
-LOCAL_CFLAGS += -fvisibility=hidden
+# LOCAL_CFLAGS += -fvisibility=hidden
# OBOE_API is used to explicitly export a function or a variable as a visible symbol.
LOCAL_CFLAGS += -DOBOE_API='__attribute__((visibility("default")))'
-LOCAL_SHARED_LIBRARIES := libaudioclient liblog libutils
+LOCAL_SHARED_LIBRARIES := libaudioclient liblog libcutils libutils libbinder
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/liboboe/src/binding/AudioEndpointParcelable.cpp b/media/liboboe/src/binding/AudioEndpointParcelable.cpp
new file mode 100644
index 0000000..096a819
--- /dev/null
+++ b/media/liboboe/src/binding/AudioEndpointParcelable.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+
+#include "binding/OboeServiceDefinitions.h"
+#include "binding/RingBufferParcelable.h"
+#include "binding/AudioEndpointParcelable.h"
+
+using android::NO_ERROR;
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+using namespace oboe;
+
+/**
+ * Container for information about the message queues plus
+ * general stream information needed by Oboe clients.
+ * It contains no addresses, just sizes, offsets and file descriptors for
+ * shared memory that can be passed through Binder.
+ */
+AudioEndpointParcelable::AudioEndpointParcelable() {}
+
+AudioEndpointParcelable::~AudioEndpointParcelable() {}
+
+/**
+ * Add the file descriptor to the table.
+ * @return index in table or negative error
+ */
+int32_t AudioEndpointParcelable::addFileDescriptor(int fd, int32_t sizeInBytes) {
+ if (mNumSharedMemories >= MAX_SHARED_MEMORIES) {
+ return OBOE_ERROR_OUT_OF_RANGE;
+ }
+ int32_t index = mNumSharedMemories++;
+ mSharedMemories[index].setup(fd, sizeInBytes);
+ return index;
+}
+
+/**
+ * The read and write must be symmetric.
+ */
+status_t AudioEndpointParcelable::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32(mNumSharedMemories);
+ for (int i = 0; i < mNumSharedMemories; i++) {
+ mSharedMemories[i].writeToParcel(parcel);
+ }
+ mUpMessageQueueParcelable.writeToParcel(parcel);
+ mDownMessageQueueParcelable.writeToParcel(parcel);
+ mUpDataQueueParcelable.writeToParcel(parcel);
+ mDownDataQueueParcelable.writeToParcel(parcel);
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t AudioEndpointParcelable::readFromParcel(const Parcel* parcel) {
+ parcel->readInt32(&mNumSharedMemories);
+ for (int i = 0; i < mNumSharedMemories; i++) {
+ mSharedMemories[i].readFromParcel(parcel);
+ }
+ mUpMessageQueueParcelable.readFromParcel(parcel);
+ mDownMessageQueueParcelable.readFromParcel(parcel);
+ mUpDataQueueParcelable.readFromParcel(parcel);
+ mDownDataQueueParcelable.readFromParcel(parcel);
+ return NO_ERROR; // TODO check for errors above
+}
+
+oboe_result_t AudioEndpointParcelable::resolve(EndpointDescriptor *descriptor) {
+ // TODO error check
+ mUpMessageQueueParcelable.resolve(mSharedMemories, &descriptor->upMessageQueueDescriptor);
+ mDownMessageQueueParcelable.resolve(mSharedMemories,
+ &descriptor->downMessageQueueDescriptor);
+ mUpDataQueueParcelable.resolve(mSharedMemories, &descriptor->upDataQueueDescriptor);
+ mDownDataQueueParcelable.resolve(mSharedMemories, &descriptor->downDataQueueDescriptor);
+ return OBOE_OK;
+}
+
+oboe_result_t AudioEndpointParcelable::validate() {
+ oboe_result_t result;
+ if (mNumSharedMemories < 0 || mNumSharedMemories >= MAX_SHARED_MEMORIES) {
+ ALOGE("AudioEndpointParcelable invalid mNumSharedMemories = %d", mNumSharedMemories);
+ return OBOE_ERROR_INTERNAL;
+ }
+ for (int i = 0; i < mNumSharedMemories; i++) {
+ result = mSharedMemories[i].validate();
+ if (result != OBOE_OK) {
+ return result;
+ }
+ }
+ if ((result = mUpMessageQueueParcelable.validate()) != OBOE_OK) {
+ ALOGE("AudioEndpointParcelable invalid mUpMessageQueueParcelable = %d", result);
+ return result;
+ }
+ if ((result = mDownMessageQueueParcelable.validate()) != OBOE_OK) {
+ ALOGE("AudioEndpointParcelable invalid mDownMessageQueueParcelable = %d", result);
+ return result;
+ }
+ if ((result = mUpDataQueueParcelable.validate()) != OBOE_OK) {
+ ALOGE("AudioEndpointParcelable invalid mUpDataQueueParcelable = %d", result);
+ return result;
+ }
+ if ((result = mDownDataQueueParcelable.validate()) != OBOE_OK) {
+ ALOGE("AudioEndpointParcelable invalid mDownDataQueueParcelable = %d", result);
+ return result;
+ }
+ return OBOE_OK;
+}
+
+void AudioEndpointParcelable::dump() {
+ ALOGD("AudioEndpointParcelable ======================================= BEGIN");
+ ALOGD("AudioEndpointParcelable mNumSharedMemories = %d", mNumSharedMemories);
+ for (int i = 0; i < mNumSharedMemories; i++) {
+ mSharedMemories[i].dump();
+ }
+ ALOGD("AudioEndpointParcelable mUpMessageQueueParcelable =========");
+ mUpMessageQueueParcelable.dump();
+ ALOGD("AudioEndpointParcelable mDownMessageQueueParcelable =======");
+ mDownMessageQueueParcelable.dump();
+ ALOGD("AudioEndpointParcelable mUpDataQueueParcelable ============");
+ mUpDataQueueParcelable.dump();
+ ALOGD("AudioEndpointParcelable mDownDataQueueParcelable ==========");
+ mDownDataQueueParcelable.dump();
+ ALOGD("AudioEndpointParcelable ======================================= END");
+}
+
diff --git a/media/liboboe/src/binding/AudioEndpointParcelable.h b/media/liboboe/src/binding/AudioEndpointParcelable.h
new file mode 100644
index 0000000..6bdd8a4
--- /dev/null
+++ b/media/liboboe/src/binding/AudioEndpointParcelable.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_AUDIOENDPOINTPARCELABLE_H
+#define BINDING_AUDIOENDPOINTPARCELABLE_H
+
+#include <stdint.h>
+
+//#include <sys/mman.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+
+#include "binding/OboeServiceDefinitions.h"
+#include "binding/RingBufferParcelable.h"
+
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+namespace oboe {
+
+/**
+ * Container for information about the message queues plus
+ * general stream information needed by Oboe clients.
+ * It contains no addresses, just sizes, offsets and file descriptors for
+ * shared memory that can be passed through Binder.
+ */
+class AudioEndpointParcelable : public Parcelable {
+public:
+ AudioEndpointParcelable();
+ virtual ~AudioEndpointParcelable();
+
+ /**
+ * Add the file descriptor to the table.
+ * @return index in table or negative error
+ */
+ int32_t addFileDescriptor(int fd, int32_t sizeInBytes);
+
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t resolve(EndpointDescriptor *descriptor);
+
+ oboe_result_t validate();
+
+ void dump();
+
+public: // TODO add getters
+ // Set capacityInFrames to zero if Queue is unused.
+ RingBufferParcelable mUpMessageQueueParcelable; // server to client
+ RingBufferParcelable mDownMessageQueueParcelable; // to server
+ RingBufferParcelable mUpDataQueueParcelable; // eg. record, could share same queue
+ RingBufferParcelable mDownDataQueueParcelable; // eg. playback
+
+private:
+ int32_t mNumSharedMemories = 0;
+ SharedMemoryParcelable mSharedMemories[MAX_SHARED_MEMORIES];
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_AUDIOENDPOINTPARCELABLE_H
diff --git a/media/liboboe/src/binding/IOboeAudioService.cpp b/media/liboboe/src/binding/IOboeAudioService.cpp
new file mode 100644
index 0000000..a3437b2
--- /dev/null
+++ b/media/liboboe/src/binding/IOboeAudioService.cpp
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/AudioEndpointParcelable.h"
+#include "binding/OboeStreamRequest.h"
+#include "binding/OboeStreamConfiguration.h"
+#include "binding/IOboeAudioService.h"
+
+namespace android {
+
+/**
+ * This is used by the Oboe Client to talk to the Oboe Service.
+ *
+ * The order of parameters in the Parcels must match with code in OboeAudioService.cpp.
+ */
+class BpOboeAudioService : public BpInterface<IOboeAudioService>
+{
+public:
+ explicit BpOboeAudioService(const sp<IBinder>& impl)
+ : BpInterface<IOboeAudioService>(impl)
+ {
+ }
+
+ virtual oboe_handle_t openStream(oboe::OboeStreamRequest &request,
+ oboe::OboeStreamConfiguration &configuration) override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ request.writeToParcel(&data);
+ status_t err = remote()->transact(OPEN_STREAM, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_handle_t stream;
+ reply.readInt32(&stream);
+ configuration.readFromParcel(&reply);
+ return stream;
+ }
+
+ virtual oboe_result_t closeStream(int32_t streamHandle) override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ status_t err = remote()->transact(CLOSE_STREAM, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+ virtual oboe_result_t getStreamDescription(oboe_handle_t streamHandle,
+ AudioEndpointParcelable &parcelable) {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ status_t err = remote()->transact(GET_STREAM_DESCRIPTION, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ parcelable.readFromParcel(&reply);
+ parcelable.dump();
+ oboe_result_t result = parcelable.validate();
+ if (result != OBOE_OK) {
+ return result;
+ }
+ reply.readInt32(&result);
+ return result;
+ }
+
+ // TODO should we wait for a reply?
+ virtual oboe_result_t startStream(oboe_handle_t streamHandle) override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ status_t err = remote()->transact(START_STREAM, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+ virtual oboe_result_t pauseStream(oboe_handle_t streamHandle) override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ status_t err = remote()->transact(PAUSE_STREAM, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+ virtual oboe_result_t flushStream(oboe_handle_t streamHandle) override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ status_t err = remote()->transact(FLUSH_STREAM, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+ virtual void tickle() override { // TODO remove after service thread implemented
+ Parcel data;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ remote()->transact(TICKLE, data, nullptr);
+ }
+
+ virtual oboe_result_t registerAudioThread(oboe_handle_t streamHandle, pid_t clientThreadId,
+ oboe_nanoseconds_t periodNanoseconds)
+ override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ data.writeInt32((int32_t) clientThreadId);
+ data.writeInt64(periodNanoseconds);
+ status_t err = remote()->transact(REGISTER_AUDIO_THREAD, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+ virtual oboe_result_t unregisterAudioThread(oboe_handle_t streamHandle, pid_t clientThreadId)
+ override {
+ Parcel data, reply;
+ // send command
+ data.writeInterfaceToken(IOboeAudioService::getInterfaceDescriptor());
+ data.writeInt32(streamHandle);
+ data.writeInt32((int32_t) clientThreadId);
+ status_t err = remote()->transact(UNREGISTER_AUDIO_THREAD, data, &reply);
+ if (err != NO_ERROR) {
+ return OBOE_ERROR_INTERNAL; // TODO consider another error
+ }
+ // parse reply
+ oboe_result_t res;
+ reply.readInt32(&res);
+ return res;
+ }
+
+};
+
+// Implement an interface to the service.
+// This is here so that you don't have to link with liboboe static library.
+IMPLEMENT_META_INTERFACE(OboeAudioService, "IOboeAudioService");
+
+// The order of parameters in the Parcels must match with code in BpOboeAudioService
+
+status_t BnOboeAudioService::onTransact(uint32_t code, const Parcel& data,
+ Parcel* reply, uint32_t flags) {
+ OboeStream stream;
+ OboeStreamRequest request;
+ OboeStreamConfiguration configuration;
+ pid_t pid;
+ oboe_nanoseconds_t nanoseconds;
+ oboe_result_t result;
+ ALOGV("BnOboeAudioService::onTransact(%i) %i", code, flags);
+ data.checkInterface(this);
+
+ switch(code) {
+ case OPEN_STREAM: {
+ request.readFromParcel(&data);
+ stream = openStream(request, configuration);
+ ALOGD("BnOboeAudioService::onTransact OPEN_STREAM 0x%08X", stream);
+ reply->writeInt32(stream);
+ configuration.writeToParcel(reply);
+ return NO_ERROR;
+ } break;
+
+ case CLOSE_STREAM: {
+ data.readInt32(&stream);
+ ALOGD("BnOboeAudioService::onTransact CLOSE_STREAM 0x%08X", stream);
+ result = closeStream(stream);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case GET_STREAM_DESCRIPTION: {
+ data.readInt32(&stream);
+ ALOGD("BnOboeAudioService::onTransact GET_STREAM_DESCRIPTION 0x%08X", stream);
+ oboe::AudioEndpointParcelable parcelable;
+ result = getStreamDescription(stream, parcelable);
+ if (result != OBOE_OK) {
+ return -1; // FIXME
+ }
+ parcelable.dump();
+ result = parcelable.validate();
+ if (result != OBOE_OK) {
+ return -1; // FIXME
+ }
+ parcelable.writeToParcel(reply);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case START_STREAM: {
+ data.readInt32(&stream);
+ result = startStream(stream);
+ ALOGD("BnOboeAudioService::onTransact START_STREAM 0x%08X, result = %d",
+ stream, result);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case PAUSE_STREAM: {
+ data.readInt32(&stream);
+ result = pauseStream(stream);
+ ALOGD("BnOboeAudioService::onTransact PAUSE_STREAM 0x%08X, result = %d",
+ stream, result);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case FLUSH_STREAM: {
+ data.readInt32(&stream);
+ result = flushStream(stream);
+ ALOGD("BnOboeAudioService::onTransact FLUSH_STREAM 0x%08X, result = %d",
+ stream, result);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case REGISTER_AUDIO_THREAD: {
+ data.readInt32(&stream);
+ data.readInt32(&pid);
+ data.readInt64(&nanoseconds);
+ result = registerAudioThread(stream, pid, nanoseconds);
+ ALOGD("BnOboeAudioService::onTransact REGISTER_AUDIO_THREAD 0x%08X, result = %d",
+ stream, result);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case UNREGISTER_AUDIO_THREAD: {
+ data.readInt32(&stream);
+ data.readInt32(&pid);
+ result = unregisterAudioThread(stream, pid);
+ ALOGD("BnOboeAudioService::onTransact UNREGISTER_AUDIO_THREAD 0x%08X, result = %d",
+ stream, result);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
+
+ case TICKLE: {
+ ALOGV("BnOboeAudioService::onTransact TICKLE");
+ tickle();
+ return NO_ERROR;
+ } break;
+
+ default:
+ // ALOGW("BnOboeAudioService::onTransact not handled %u", code);
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+} /* namespace android */
diff --git a/media/liboboe/src/binding/IOboeAudioService.h b/media/liboboe/src/binding/IOboeAudioService.h
new file mode 100644
index 0000000..4b4c99c
--- /dev/null
+++ b/media/liboboe/src/binding/IOboeAudioService.h
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_IOBOEAUDIOSERVICE_H
+#define BINDING_IOBOEAUDIOSERVICE_H
+
+#include <stdint.h>
+#include <utils/RefBase.h>
+#include <binder/TextOutput.h>
+#include <binder/IInterface.h>
+
+#include <oboe/OboeAudio.h>
+
+#include "binding/OboeServiceDefinitions.h"
+#include "binding/AudioEndpointParcelable.h"
+#include "binding/OboeStreamRequest.h"
+#include "binding/OboeStreamConfiguration.h"
+
+//using android::status_t;
+//using android::IInterface;
+//using android::BnInterface;
+
+using oboe::AudioEndpointParcelable;
+using oboe::OboeStreamRequest;
+using oboe::OboeStreamConfiguration;
+
+namespace android {
+
+// Interface (our AIDL) - Shared by server and client
+class IOboeAudioService : public IInterface {
+public:
+
+ DECLARE_META_INTERFACE(OboeAudioService);
+
+ virtual oboe_handle_t openStream(OboeStreamRequest &request,
+ OboeStreamConfiguration &configuration) = 0;
+
+ virtual oboe_result_t closeStream(int32_t streamHandle) = 0;
+
+ /* Get an immutable description of the in-memory queues
+ * used to communicate with the underlying HAL or Service.
+ */
+ virtual oboe_result_t getStreamDescription(oboe_handle_t streamHandle,
+ AudioEndpointParcelable &parcelable) = 0;
+
+ /**
+ * Start the flow of data.
+ */
+ virtual oboe_result_t startStream(oboe_handle_t streamHandle) = 0;
+
+ /**
+ * Stop the flow of data such that start() can resume without loss of data.
+ */
+ virtual oboe_result_t pauseStream(oboe_handle_t streamHandle) = 0;
+
+ /**
+ * Discard any data held by the underlying HAL or Service.
+ */
+ virtual oboe_result_t flushStream(oboe_handle_t streamHandle) = 0;
+
+ /**
+ * Manage the specified thread as a low latency audio thread.
+ */
+ virtual oboe_result_t registerAudioThread(oboe_handle_t streamHandle, pid_t clientThreadId,
+ oboe_nanoseconds_t periodNanoseconds) = 0;
+
+ virtual oboe_result_t unregisterAudioThread(oboe_handle_t streamHandle,
+ pid_t clientThreadId) = 0;
+
+ /**
+ * Poke server instead of running a background thread.
+ * Cooperative multi-tasking for early development only.
+ * TODO remove tickle() when service has its own thread.
+ */
+ virtual void tickle() { };
+
+};
+
+class BnOboeAudioService : public BnInterface<IOboeAudioService> {
+public:
+ virtual status_t onTransact(uint32_t code, const Parcel& data,
+ Parcel* reply, uint32_t flags = 0);
+
+};
+
+} /* namespace android */
+
+#endif //BINDING_IOBOEAUDIOSERVICE_H
diff --git a/media/liboboe/src/binding/OboeServiceDefinitions.h b/media/liboboe/src/binding/OboeServiceDefinitions.h
new file mode 100644
index 0000000..ad00fe2
--- /dev/null
+++ b/media/liboboe/src/binding/OboeServiceDefinitions.h
@@ -0,0 +1,102 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_OBOESERVICEDEFINITIONS_H
+#define BINDING_OBOESERVICEDEFINITIONS_H
+
+#include <stdint.h>
+#include <utils/RefBase.h>
+#include <binder/TextOutput.h>
+#include <binder/IInterface.h>
+
+#include <oboe/OboeAudio.h>
+
+using android::NO_ERROR;
+using android::IBinder;
+
+namespace android {
+
+enum oboe_commands_t {
+ OPEN_STREAM = IBinder::FIRST_CALL_TRANSACTION,
+ CLOSE_STREAM,
+ GET_STREAM_DESCRIPTION,
+ START_STREAM,
+ PAUSE_STREAM,
+ FLUSH_STREAM,
+ REGISTER_AUDIO_THREAD,
+ UNREGISTER_AUDIO_THREAD,
+ TICKLE
+};
+
+} // namespace android
+
+namespace oboe {
+
+enum oboe_commands_t {
+ OPEN_STREAM = IBinder::FIRST_CALL_TRANSACTION,
+ CLOSE_STREAM,
+ GET_STREAM_DESCRIPTION,
+ START_STREAM,
+ PAUSE_STREAM,
+ FLUSH_STREAM,
+ REGISTER_AUDIO_THREAD,
+ UNREGISTER_AUDIO_THREAD,
+ TICKLE
+};
+
+// TODO Expand this to include all the open parameters.
+typedef struct OboeServiceStreamInfo_s {
+ int32_t deviceId;
+ int32_t samplesPerFrame; // number of channels
+ oboe_sample_rate_t sampleRate;
+ oboe_audio_format_t audioFormat;
+} OboeServiceStreamInfo;
+
+// This must be a fixed width so it can be in shared memory.
+enum RingbufferFlags : uint32_t {
+ NONE = 0,
+ RATE_ISOCHRONOUS = 0x0001,
+ RATE_ASYNCHRONOUS = 0x0002,
+ COHERENCY_DMA = 0x0004,
+ COHERENCY_ACQUIRE_RELEASE = 0x0008,
+ COHERENCY_AUTO = 0x0010,
+};
+
+// This is not passed through Binder.
+// Client side code will convert Binder data and fill this descriptor.
+typedef struct RingBufferDescriptor_s {
+ uint8_t* dataAddress; // offset from read or write block
+ int64_t* writeCounterAddress;
+ int64_t* readCounterAddress;
+ int32_t bytesPerFrame; // index is in frames
+ int32_t framesPerBurst; // for ISOCHRONOUS queues
+ int32_t capacityInFrames; // zero if unused
+ RingbufferFlags flags;
+} RingBufferDescriptor;
+
+// This is not passed through Binder.
+// Client side code will convert Binder data and fill this descriptor.
+typedef struct EndpointDescriptor_s {
+ // Set capacityInFrames to zero if Queue is unused.
+ RingBufferDescriptor upMessageQueueDescriptor; // server to client
+ RingBufferDescriptor downMessageQueueDescriptor; // client to server
+ RingBufferDescriptor upDataQueueDescriptor; // eg. record
+ RingBufferDescriptor downDataQueueDescriptor; // eg. playback
+} EndpointDescriptor;
+
+} // namespace oboe
+
+#endif //BINDING_OBOESERVICEDEFINITIONS_H
diff --git a/media/liboboe/src/binding/OboeServiceMessage.h b/media/liboboe/src/binding/OboeServiceMessage.h
new file mode 100644
index 0000000..aa13571
--- /dev/null
+++ b/media/liboboe/src/binding/OboeServiceMessage.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_OBOE_SERVICE_MESSAGE_H
+#define OBOE_OBOE_SERVICE_MESSAGE_H
+
+#include <stdint.h>
+
+#include <oboe/OboeDefinitions.h>
+
+namespace oboe {
+
+// TODO move this an "include" folder for the service.
+
+struct OboeMessageTimestamp {
+ oboe_position_frames_t position;
+ int64_t deviceOffset; // add to client position to get device position
+ oboe_nanoseconds_t timestamp;
+};
+
+typedef enum oboe_service_event_e : uint32_t {
+ OBOE_SERVICE_EVENT_STARTED,
+ OBOE_SERVICE_EVENT_PAUSED,
+ OBOE_SERVICE_EVENT_FLUSHED,
+ OBOE_SERVICE_EVENT_CLOSED,
+ OBOE_SERVICE_EVENT_DISCONNECTED
+} oboe_service_event_t;
+
+struct OboeMessageEvent {
+ oboe_service_event_t event;
+ int32_t data1;
+ int64_t data2;
+};
+
+typedef struct OboeServiceMessage_s {
+ enum class code : uint32_t {
+ NOTHING,
+ TIMESTAMP,
+ EVENT,
+ };
+
+ code what;
+ union {
+ OboeMessageTimestamp timestamp;
+ OboeMessageEvent event;
+ };
+} OboeServiceMessage;
+
+
+} /* namespace oboe */
+
+#endif //OBOE_OBOE_SERVICE_MESSAGE_H
diff --git a/media/liboboe/src/binding/OboeStreamConfiguration.cpp b/media/liboboe/src/binding/OboeStreamConfiguration.cpp
new file mode 100644
index 0000000..4b8b5b2
--- /dev/null
+++ b/media/liboboe/src/binding/OboeStreamConfiguration.cpp
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/OboeStreamConfiguration.h"
+
+using android::NO_ERROR;
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+using namespace oboe;
+
+OboeStreamConfiguration::OboeStreamConfiguration() {}
+OboeStreamConfiguration::~OboeStreamConfiguration() {}
+
+status_t OboeStreamConfiguration::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32(mDeviceId);
+ parcel->writeInt32(mSampleRate);
+ parcel->writeInt32(mSamplesPerFrame);
+ parcel->writeInt32((int32_t) mAudioFormat);
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t OboeStreamConfiguration::readFromParcel(const Parcel* parcel) {
+ int32_t temp;
+ parcel->readInt32(&mDeviceId);
+ parcel->readInt32(&mSampleRate);
+ parcel->readInt32(&mSamplesPerFrame);
+ parcel->readInt32(&temp);
+ mAudioFormat = (oboe_audio_format_t) temp;
+ return NO_ERROR; // TODO check for errors above
+}
+
+oboe_result_t OboeStreamConfiguration::validate() {
+ // Validate results of the open.
+ if (mSampleRate < 0 || mSampleRate >= 8 * 48000) { // TODO review limits
+ ALOGE("OboeStreamConfiguration.validate(): invalid sampleRate = %d", mSampleRate);
+ return OBOE_ERROR_INTERNAL;
+ }
+
+ if (mSamplesPerFrame < 1 || mSamplesPerFrame >= 32) { // TODO review limits
+ ALOGE("OboeStreamConfiguration.validate() invalid samplesPerFrame = %d", mSamplesPerFrame);
+ return OBOE_ERROR_INTERNAL;
+ }
+
+ switch (mAudioFormat) {
+ case OBOE_AUDIO_FORMAT_PCM16:
+ case OBOE_AUDIO_FORMAT_PCM_FLOAT:
+ case OBOE_AUDIO_FORMAT_PCM824:
+ case OBOE_AUDIO_FORMAT_PCM32:
+ break;
+ default:
+ ALOGE("OboeStreamConfiguration.validate() invalid audioFormat = %d", mAudioFormat);
+ return OBOE_ERROR_INTERNAL;
+ }
+ return OBOE_OK;
+}
+
+void OboeStreamConfiguration::dump() {
+ ALOGD("OboeStreamConfiguration mSampleRate = %d -----", mSampleRate);
+ ALOGD("OboeStreamConfiguration mSamplesPerFrame = %d", mSamplesPerFrame);
+ ALOGD("OboeStreamConfiguration mAudioFormat = %d", (int)mAudioFormat);
+}
diff --git a/media/liboboe/src/binding/OboeStreamConfiguration.h b/media/liboboe/src/binding/OboeStreamConfiguration.h
new file mode 100644
index 0000000..6bc1924
--- /dev/null
+++ b/media/liboboe/src/binding/OboeStreamConfiguration.h
@@ -0,0 +1,86 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_OBOE_STREAM_CONFIGURATION_H
+#define BINDING_OBOE_STREAM_CONFIGURATION_H
+
+#include <stdint.h>
+
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <oboe/OboeDefinitions.h>
+
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+namespace oboe {
+
+class OboeStreamConfiguration : public Parcelable {
+public:
+ OboeStreamConfiguration();
+ virtual ~OboeStreamConfiguration();
+
+ oboe_device_id_t getDeviceId() const {
+ return mDeviceId;
+ }
+
+ void setDeviceId(oboe_device_id_t deviceId) {
+ mDeviceId = deviceId;
+ }
+
+ oboe_sample_rate_t getSampleRate() const {
+ return mSampleRate;
+ }
+
+ void setSampleRate(oboe_sample_rate_t sampleRate) {
+ mSampleRate = sampleRate;
+ }
+
+ int32_t getSamplesPerFrame() const {
+ return mSamplesPerFrame;
+ }
+
+ void setSamplesPerFrame(int32_t samplesPerFrame) {
+ mSamplesPerFrame = samplesPerFrame;
+ }
+
+ oboe_audio_format_t getAudioFormat() const {
+ return mAudioFormat;
+ }
+
+ void setAudioFormat(oboe_audio_format_t audioFormat) {
+ mAudioFormat = audioFormat;
+ }
+
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t validate();
+
+ void dump();
+
+protected:
+ oboe_device_id_t mDeviceId = OBOE_DEVICE_UNSPECIFIED;
+ oboe_sample_rate_t mSampleRate = OBOE_UNSPECIFIED;
+ int32_t mSamplesPerFrame = OBOE_UNSPECIFIED;
+ oboe_audio_format_t mAudioFormat = OBOE_AUDIO_FORMAT_UNSPECIFIED;
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_OBOE_STREAM_CONFIGURATION_H
diff --git a/media/liboboe/src/binding/OboeStreamRequest.cpp b/media/liboboe/src/binding/OboeStreamRequest.cpp
new file mode 100644
index 0000000..5d521d0
--- /dev/null
+++ b/media/liboboe/src/binding/OboeStreamRequest.cpp
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/OboeStreamConfiguration.h"
+#include "binding/OboeStreamRequest.h"
+
+using android::NO_ERROR;
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+using namespace oboe;
+
+OboeStreamRequest::OboeStreamRequest()
+ : mConfiguration()
+ {}
+
+OboeStreamRequest::~OboeStreamRequest() {}
+
+status_t OboeStreamRequest::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32((int32_t) mUserId);
+ parcel->writeInt32((int32_t) mProcessId);
+ mConfiguration.writeToParcel(parcel);
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t OboeStreamRequest::readFromParcel(const Parcel* parcel) {
+ int32_t temp;
+ parcel->readInt32(&temp);
+ mUserId = (uid_t) temp;
+ parcel->readInt32(&temp);
+ mProcessId = (pid_t) temp;
+ mConfiguration.readFromParcel(parcel);
+ return NO_ERROR; // TODO check for errors above
+}
+
+oboe_result_t OboeStreamRequest::validate() {
+ return mConfiguration.validate();
+}
+
+void OboeStreamRequest::dump() {
+ ALOGD("OboeStreamRequest mUserId = %d -----", mUserId);
+ ALOGD("OboeStreamRequest mProcessId = %d", mProcessId);
+ mConfiguration.dump();
+}
diff --git a/media/liboboe/src/binding/OboeStreamRequest.h b/media/liboboe/src/binding/OboeStreamRequest.h
new file mode 100644
index 0000000..aab3c97
--- /dev/null
+++ b/media/liboboe/src/binding/OboeStreamRequest.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_OBOE_STREAM_REQUEST_H
+#define BINDING_OBOE_STREAM_REQUEST_H
+
+#include <stdint.h>
+
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/OboeStreamConfiguration.h"
+
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+namespace oboe {
+
+class OboeStreamRequest : public Parcelable {
+public:
+ OboeStreamRequest();
+ virtual ~OboeStreamRequest();
+
+ uid_t getUserId() const {
+ return mUserId;
+ }
+
+ void setUserId(uid_t userId) {
+ mUserId = userId;
+ }
+
+ pid_t getProcessId() const {
+ return mProcessId;
+ }
+
+ void setProcessId(pid_t processId) {
+ mProcessId = processId;
+ }
+
+ OboeStreamConfiguration &getConfiguration() {
+ return mConfiguration;
+ }
+
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t validate();
+
+ void dump();
+
+protected:
+ OboeStreamConfiguration mConfiguration;
+ uid_t mUserId;
+ pid_t mProcessId;
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_OBOE_STREAM_REQUEST_H
diff --git a/media/liboboe/src/binding/RingBufferParcelable.cpp b/media/liboboe/src/binding/RingBufferParcelable.cpp
new file mode 100644
index 0000000..f097655
--- /dev/null
+++ b/media/liboboe/src/binding/RingBufferParcelable.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <binder/Parcelable.h>
+
+#include "binding/OboeServiceDefinitions.h"
+#include "binding/SharedRegionParcelable.h"
+#include "binding/RingBufferParcelable.h"
+
+using namespace oboe;
+
+RingBufferParcelable::RingBufferParcelable() {}
+RingBufferParcelable::~RingBufferParcelable() {}
+
+// TODO This assumes that all three use the same SharedMemoryParcelable
+void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
+ int32_t dataMemoryOffset,
+ int32_t dataSizeInBytes,
+ int32_t readCounterOffset,
+ int32_t writeCounterOffset,
+ int32_t counterSizeBytes) {
+ mReadCounterParcelable.setup(sharedMemoryIndex, readCounterOffset, counterSizeBytes);
+ mWriteCounterParcelable.setup(sharedMemoryIndex, writeCounterOffset, counterSizeBytes);
+ mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+}
+
+void RingBufferParcelable::setupMemory(int32_t sharedMemoryIndex,
+ int32_t dataMemoryOffset,
+ int32_t dataSizeInBytes) {
+ mReadCounterParcelable.setup(sharedMemoryIndex, 0, 0);
+ mWriteCounterParcelable.setup(sharedMemoryIndex, 0, 0);
+ mDataParcelable.setup(sharedMemoryIndex, dataMemoryOffset, dataSizeInBytes);
+}
+
+int32_t RingBufferParcelable::getBytesPerFrame() {
+ return mBytesPerFrame;
+}
+
+void RingBufferParcelable::setBytesPerFrame(int32_t bytesPerFrame) {
+ mBytesPerFrame = bytesPerFrame;
+}
+
+int32_t RingBufferParcelable::getFramesPerBurst() {
+ return mFramesPerBurst;
+}
+
+void RingBufferParcelable::setFramesPerBurst(int32_t framesPerBurst) {
+ mFramesPerBurst = framesPerBurst;
+}
+
+int32_t RingBufferParcelable::getCapacityInFrames() {
+ return mCapacityInFrames;
+}
+
+void RingBufferParcelable::setCapacityInFrames(int32_t capacityInFrames) {
+ mCapacityInFrames = capacityInFrames;
+}
+
+/**
+ * The read and write must be symmetric.
+ */
+status_t RingBufferParcelable::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32(mCapacityInFrames);
+ if (mCapacityInFrames > 0) {
+ parcel->writeInt32(mBytesPerFrame);
+ parcel->writeInt32(mFramesPerBurst);
+ parcel->writeInt32(mFlags);
+ mReadCounterParcelable.writeToParcel(parcel);
+ mWriteCounterParcelable.writeToParcel(parcel);
+ mDataParcelable.writeToParcel(parcel);
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t RingBufferParcelable::readFromParcel(const Parcel* parcel) {
+ parcel->readInt32(&mCapacityInFrames);
+ if (mCapacityInFrames > 0) {
+ parcel->readInt32(&mBytesPerFrame);
+ parcel->readInt32(&mFramesPerBurst);
+ parcel->readInt32((int32_t *)&mFlags);
+ mReadCounterParcelable.readFromParcel(parcel);
+ mWriteCounterParcelable.readFromParcel(parcel);
+ mDataParcelable.readFromParcel(parcel);
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+oboe_result_t RingBufferParcelable::resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor) {
+ oboe_result_t result;
+
+ result = mReadCounterParcelable.resolve(memoryParcels,
+ (void **) &descriptor->readCounterAddress);
+ if (result != OBOE_OK) {
+ return result;
+ }
+
+ result = mWriteCounterParcelable.resolve(memoryParcels,
+ (void **) &descriptor->writeCounterAddress);
+ if (result != OBOE_OK) {
+ return result;
+ }
+
+ result = mDataParcelable.resolve(memoryParcels, (void **) &descriptor->dataAddress);
+ if (result != OBOE_OK) {
+ return result;
+ }
+
+ descriptor->bytesPerFrame = mBytesPerFrame;
+ descriptor->framesPerBurst = mFramesPerBurst;
+ descriptor->capacityInFrames = mCapacityInFrames;
+ descriptor->flags = mFlags;
+ return OBOE_OK;
+}
+
+oboe_result_t RingBufferParcelable::validate() {
+ oboe_result_t result;
+ if (mCapacityInFrames < 0 || mCapacityInFrames >= 32 * 1024) {
+ ALOGE("RingBufferParcelable invalid mCapacityInFrames = %d", mCapacityInFrames);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if (mBytesPerFrame < 0 || mBytesPerFrame >= 256) {
+ ALOGE("RingBufferParcelable invalid mBytesPerFrame = %d", mBytesPerFrame);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if (mFramesPerBurst < 0 || mFramesPerBurst >= 1024) {
+ ALOGE("RingBufferParcelable invalid mFramesPerBurst = %d", mFramesPerBurst);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if ((result = mReadCounterParcelable.validate()) != OBOE_OK) {
+ ALOGE("RingBufferParcelable invalid mReadCounterParcelable = %d", result);
+ return result;
+ }
+ if ((result = mWriteCounterParcelable.validate()) != OBOE_OK) {
+ ALOGE("RingBufferParcelable invalid mWriteCounterParcelable = %d", result);
+ return result;
+ }
+ if ((result = mDataParcelable.validate()) != OBOE_OK) {
+ ALOGE("RingBufferParcelable invalid mDataParcelable = %d", result);
+ return result;
+ }
+ return OBOE_OK;
+}
+
+
+void RingBufferParcelable::dump() {
+ ALOGD("RingBufferParcelable mCapacityInFrames = %d ---------", mCapacityInFrames);
+ if (mCapacityInFrames > 0) {
+ ALOGD("RingBufferParcelable mBytesPerFrame = %d", mBytesPerFrame);
+ ALOGD("RingBufferParcelable mFramesPerBurst = %d", mFramesPerBurst);
+ ALOGD("RingBufferParcelable mFlags = %u", mFlags);
+ mReadCounterParcelable.dump();
+ mWriteCounterParcelable.dump();
+ mDataParcelable.dump();
+ }
+}
diff --git a/media/liboboe/src/binding/RingBufferParcelable.h b/media/liboboe/src/binding/RingBufferParcelable.h
new file mode 100644
index 0000000..9bb695a
--- /dev/null
+++ b/media/liboboe/src/binding/RingBufferParcelable.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_RINGBUFFER_PARCELABLE_H
+#define BINDING_RINGBUFFER_PARCELABLE_H
+
+#include <stdint.h>
+
+#include <binder/Parcelable.h>
+
+#include "binding/OboeServiceDefinitions.h"
+#include "binding/SharedRegionParcelable.h"
+
+namespace oboe {
+
+class RingBufferParcelable : public Parcelable {
+public:
+ RingBufferParcelable();
+ virtual ~RingBufferParcelable();
+
+ // TODO This assumes that all three use the same SharedMemoryParcelable
+ void setupMemory(int32_t sharedMemoryIndex,
+ int32_t dataMemoryOffset,
+ int32_t dataSizeInBytes,
+ int32_t readCounterOffset,
+ int32_t writeCounterOffset,
+ int32_t counterSizeBytes);
+
+ void setupMemory(int32_t sharedMemoryIndex,
+ int32_t dataMemoryOffset,
+ int32_t dataSizeInBytes);
+
+ int32_t getBytesPerFrame();
+
+ void setBytesPerFrame(int32_t bytesPerFrame);
+
+ int32_t getFramesPerBurst();
+
+ void setFramesPerBurst(int32_t framesPerBurst);
+
+ int32_t getCapacityInFrames();
+
+ void setCapacityInFrames(int32_t capacityInFrames);
+
+ /**
+ * The read and write must be symmetric.
+ */
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t resolve(SharedMemoryParcelable *memoryParcels, RingBufferDescriptor *descriptor);
+
+ oboe_result_t validate();
+
+ void dump();
+
+private:
+ SharedRegionParcelable mReadCounterParcelable;
+ SharedRegionParcelable mWriteCounterParcelable;
+ SharedRegionParcelable mDataParcelable;
+ int32_t mBytesPerFrame = 0; // index is in frames
+ int32_t mFramesPerBurst = 0; // for ISOCHRONOUS queues
+ int32_t mCapacityInFrames = 0; // zero if unused
+ RingbufferFlags mFlags = RingbufferFlags::NONE;
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_RINGBUFFER_PARCELABLE_H
diff --git a/media/liboboe/src/binding/SharedMemoryParcelable.cpp b/media/liboboe/src/binding/SharedMemoryParcelable.cpp
new file mode 100644
index 0000000..5b739c0
--- /dev/null
+++ b/media/liboboe/src/binding/SharedMemoryParcelable.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <oboe/OboeDefinitions.h>
+
+#include <binder/Parcelable.h>
+
+#include "binding/SharedMemoryParcelable.h"
+
+using android::NO_ERROR;
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+using namespace oboe;
+
+SharedMemoryParcelable::SharedMemoryParcelable() {}
+SharedMemoryParcelable::~SharedMemoryParcelable() {};
+
+void SharedMemoryParcelable::setup(int fd, int32_t sizeInBytes) {
+ mFd = fd;
+ mSizeInBytes = sizeInBytes;
+}
+
+status_t SharedMemoryParcelable::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32(mSizeInBytes);
+ if (mSizeInBytes > 0) {
+ parcel->writeDupFileDescriptor(mFd);
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t SharedMemoryParcelable::readFromParcel(const Parcel* parcel) {
+ parcel->readInt32(&mSizeInBytes);
+ if (mSizeInBytes > 0) {
+ mFd = dup(parcel->readFileDescriptor());
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+// TODO Add code to unmmap()
+
+oboe_result_t SharedMemoryParcelable::resolve(int32_t offsetInBytes, int32_t sizeInBytes,
+ void **regionAddressPtr) {
+ if (offsetInBytes < 0) {
+ ALOGE("SharedMemoryParcelable illegal offsetInBytes = %d", offsetInBytes);
+ return OBOE_ERROR_OUT_OF_RANGE;
+ } else if ((offsetInBytes + sizeInBytes) > mSizeInBytes) {
+ ALOGE("SharedMemoryParcelable out of range, offsetInBytes = %d, "
+ "sizeInBytes = %d, mSizeInBytes = %d",
+ offsetInBytes, sizeInBytes, mSizeInBytes);
+ return OBOE_ERROR_OUT_OF_RANGE;
+ }
+ if (mResolvedAddress == nullptr) {
+ mResolvedAddress = (uint8_t *) mmap(0, mSizeInBytes, PROT_READ|PROT_WRITE,
+ MAP_SHARED, mFd, 0);
+ if (mResolvedAddress == nullptr) {
+ ALOGE("SharedMemoryParcelable mmap failed for fd = %d", mFd);
+ return OBOE_ERROR_INTERNAL;
+ }
+ }
+ *regionAddressPtr = mResolvedAddress + offsetInBytes;
+ ALOGD("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
+ ALOGD("SharedMemoryParcelable offset by %d, *regionAddressPtr = %p",
+ offsetInBytes, *regionAddressPtr);
+ return OBOE_OK;
+}
+
+int32_t SharedMemoryParcelable::getSizeInBytes() {
+ return mSizeInBytes;
+}
+
+oboe_result_t SharedMemoryParcelable::validate() {
+ if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE) {
+ ALOGE("SharedMemoryParcelable invalid mSizeInBytes = %d", mSizeInBytes);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if (mSizeInBytes > 0) {
+ if (mFd == -1) {
+ ALOGE("SharedMemoryParcelable uninitialized mFd = %d", mFd);
+ return OBOE_ERROR_INTERNAL;
+ }
+ }
+ return OBOE_OK;
+}
+
+void SharedMemoryParcelable::dump() {
+ ALOGD("SharedMemoryParcelable mFd = %d", mFd);
+ ALOGD("SharedMemoryParcelable mSizeInBytes = %d", mSizeInBytes);
+ ALOGD("SharedMemoryParcelable mResolvedAddress = %p", mResolvedAddress);
+}
diff --git a/media/liboboe/src/binding/SharedMemoryParcelable.h b/media/liboboe/src/binding/SharedMemoryParcelable.h
new file mode 100644
index 0000000..9585779
--- /dev/null
+++ b/media/liboboe/src/binding/SharedMemoryParcelable.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_SHAREDMEMORYPARCELABLE_H
+#define BINDING_SHAREDMEMORYPARCELABLE_H
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+namespace oboe {
+
+// Arbitrary limits for sanity checks. TODO remove after debugging.
+#define MAX_SHARED_MEMORIES (32)
+#define MAX_MMAP_OFFSET (32 * 1024)
+#define MAX_MMAP_SIZE (32 * 1024)
+
+/**
+ * This is a parcelable description of a shared memory referenced by a file descriptor.
+ * It may be divided into several regions.
+ */
+class SharedMemoryParcelable : public Parcelable {
+public:
+ SharedMemoryParcelable();
+ virtual ~SharedMemoryParcelable();
+
+ void setup(int fd, int32_t sizeInBytes);
+
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t resolve(int32_t offsetInBytes, int32_t sizeInBytes, void **regionAddressPtr);
+
+ int32_t getSizeInBytes();
+
+ oboe_result_t validate();
+
+ void dump();
+
+protected:
+ int mFd = -1;
+ int32_t mSizeInBytes = 0;
+ uint8_t *mResolvedAddress = nullptr;
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_SHAREDMEMORYPARCELABLE_H
diff --git a/media/liboboe/src/binding/SharedRegionParcelable.cpp b/media/liboboe/src/binding/SharedRegionParcelable.cpp
new file mode 100644
index 0000000..86ce8f3
--- /dev/null
+++ b/media/liboboe/src/binding/SharedRegionParcelable.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcelable.h>
+
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/SharedMemoryParcelable.h"
+#include "binding/SharedRegionParcelable.h"
+
+using android::NO_ERROR;
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+using namespace oboe;
+
+SharedRegionParcelable::SharedRegionParcelable() {}
+SharedRegionParcelable::~SharedRegionParcelable() {}
+
+void SharedRegionParcelable::setup(int32_t sharedMemoryIndex,
+ int32_t offsetInBytes,
+ int32_t sizeInBytes) {
+ mSharedMemoryIndex = sharedMemoryIndex;
+ mOffsetInBytes = offsetInBytes;
+ mSizeInBytes = sizeInBytes;
+}
+
+status_t SharedRegionParcelable::writeToParcel(Parcel* parcel) const {
+ parcel->writeInt32(mSizeInBytes);
+ if (mSizeInBytes > 0) {
+ parcel->writeInt32(mSharedMemoryIndex);
+ parcel->writeInt32(mOffsetInBytes);
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+status_t SharedRegionParcelable::readFromParcel(const Parcel* parcel) {
+ parcel->readInt32(&mSizeInBytes);
+ if (mSizeInBytes > 0) {
+ parcel->readInt32(&mSharedMemoryIndex);
+ parcel->readInt32(&mOffsetInBytes);
+ }
+ return NO_ERROR; // TODO check for errors above
+}
+
+oboe_result_t SharedRegionParcelable::resolve(SharedMemoryParcelable *memoryParcels,
+ void **regionAddressPtr) {
+ if (mSizeInBytes == 0) {
+ *regionAddressPtr = nullptr;
+ return OBOE_OK;
+ }
+ if (mSharedMemoryIndex < 0) {
+ ALOGE("SharedRegionParcelable invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
+ return OBOE_ERROR_INTERNAL;
+ }
+ SharedMemoryParcelable *memoryParcel = &memoryParcels[mSharedMemoryIndex];
+ return memoryParcel->resolve(mOffsetInBytes, mSizeInBytes, regionAddressPtr);
+}
+
+oboe_result_t SharedRegionParcelable::validate() {
+ if (mSizeInBytes < 0 || mSizeInBytes >= MAX_MMAP_SIZE) {
+ ALOGE("SharedRegionParcelable invalid mSizeInBytes = %d", mSizeInBytes);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if (mSizeInBytes > 0) {
+ if (mOffsetInBytes < 0 || mOffsetInBytes >= MAX_MMAP_OFFSET) {
+ ALOGE("SharedRegionParcelable invalid mOffsetInBytes = %d", mOffsetInBytes);
+ return OBOE_ERROR_INTERNAL;
+ }
+ if (mSharedMemoryIndex < 0 || mSharedMemoryIndex >= MAX_SHARED_MEMORIES) {
+ ALOGE("SharedRegionParcelable invalid mSharedMemoryIndex = %d", mSharedMemoryIndex);
+ return OBOE_ERROR_INTERNAL;
+ }
+ }
+ return OBOE_OK;
+}
+
+void SharedRegionParcelable::dump() {
+ ALOGD("SharedRegionParcelable mSizeInBytes = %d -----", mSizeInBytes);
+ if (mSizeInBytes > 0) {
+ ALOGD("SharedRegionParcelable mSharedMemoryIndex = %d", mSharedMemoryIndex);
+ ALOGD("SharedRegionParcelable mOffsetInBytes = %d", mOffsetInBytes);
+ }
+}
diff --git a/media/liboboe/src/binding/SharedRegionParcelable.h b/media/liboboe/src/binding/SharedRegionParcelable.h
new file mode 100644
index 0000000..bccdaa8
--- /dev/null
+++ b/media/liboboe/src/binding/SharedRegionParcelable.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef BINDING_SHAREDREGIONPARCELABLE_H
+#define BINDING_SHAREDREGIONPARCELABLE_H
+
+#include <stdint.h>
+
+#include <sys/mman.h>
+#include <binder/Parcelable.h>
+
+#include <oboe/OboeDefinitions.h>
+
+#include "binding/SharedMemoryParcelable.h"
+
+using android::status_t;
+using android::Parcel;
+using android::Parcelable;
+
+namespace oboe {
+
+class SharedRegionParcelable : public Parcelable {
+public:
+ SharedRegionParcelable();
+ virtual ~SharedRegionParcelable();
+
+ void setup(int32_t sharedMemoryIndex, int32_t offsetInBytes, int32_t sizeInBytes);
+
+ virtual status_t writeToParcel(Parcel* parcel) const override;
+
+ virtual status_t readFromParcel(const Parcel* parcel) override;
+
+ oboe_result_t resolve(SharedMemoryParcelable *memoryParcels, void **regionAddressPtr);
+
+ oboe_result_t validate();
+
+ void dump();
+
+protected:
+ int32_t mSharedMemoryIndex = -1;
+ int32_t mOffsetInBytes = 0;
+ int32_t mSizeInBytes = 0;
+};
+
+} /* namespace oboe */
+
+#endif //BINDING_SHAREDREGIONPARCELABLE_H
diff --git a/media/liboboe/src/client/AudioEndpoint.cpp b/media/liboboe/src/client/AudioEndpoint.cpp
new file mode 100644
index 0000000..160c37e
--- /dev/null
+++ b/media/liboboe/src/client/AudioEndpoint.cpp
@@ -0,0 +1,194 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeAudio"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <cassert>
+#include <oboe/OboeDefinitions.h>
+
+#include "AudioEndpointParcelable.h"
+#include "AudioEndpoint.h"
+#include "OboeServiceMessage.h"
+
+using namespace android;
+using namespace oboe;
+
+AudioEndpoint::AudioEndpoint()
+ : mOutputFreeRunning(false)
+ , mDataReadCounter(0)
+ , mDataWriteCounter(0)
+{
+}
+
+AudioEndpoint::~AudioEndpoint()
+{
+}
+
+static void AudioEndpoint_validateQueueDescriptor(const char *type,
+ const RingBufferDescriptor *descriptor) {
+ assert(descriptor->capacityInFrames > 0);
+ assert(descriptor->bytesPerFrame > 1);
+ assert(descriptor->dataAddress != nullptr);
+ ALOGD("AudioEndpoint_validateQueueDescriptor %s, dataAddress at %p ====================",
+ type,
+ descriptor->dataAddress);
+ ALOGD("AudioEndpoint_validateQueueDescriptor readCounter at %p, writeCounter at %p",
+ descriptor->readCounterAddress,
+ descriptor->writeCounterAddress);
+
+ // Try to READ from the data area.
+ uint8_t value = descriptor->dataAddress[0];
+ ALOGD("AudioEndpoint_validateQueueDescriptor() dataAddress[0] = %d, then try to write",
+ (int) value);
+ // Try to WRITE to the data area.
+ descriptor->dataAddress[0] = value;
+ ALOGD("AudioEndpoint_validateQueueDescriptor() wrote successfully");
+
+ if (descriptor->readCounterAddress) {
+ fifo_counter_t counter = *descriptor->readCounterAddress;
+ ALOGD("AudioEndpoint_validateQueueDescriptor() *readCounterAddress = %d, now write",
+ (int) counter);
+ *descriptor->readCounterAddress = counter;
+ ALOGD("AudioEndpoint_validateQueueDescriptor() wrote readCounterAddress successfully");
+ }
+ if (descriptor->writeCounterAddress) {
+ fifo_counter_t counter = *descriptor->writeCounterAddress;
+ ALOGD("AudioEndpoint_validateQueueDescriptor() *writeCounterAddress = %d, now write",
+ (int) counter);
+ *descriptor->writeCounterAddress = counter;
+ ALOGD("AudioEndpoint_validateQueueDescriptor() wrote writeCounterAddress successfully");
+ }
+}
+
+void AudioEndpoint_validateDescriptor(const EndpointDescriptor *pEndpointDescriptor) {
+ AudioEndpoint_validateQueueDescriptor("msg", &pEndpointDescriptor->upMessageQueueDescriptor);
+ AudioEndpoint_validateQueueDescriptor("data", &pEndpointDescriptor->downDataQueueDescriptor);
+}
+
+oboe_result_t AudioEndpoint::configure(const EndpointDescriptor *pEndpointDescriptor)
+{
+ oboe_result_t result = OBOE_OK;
+ AudioEndpoint_validateDescriptor(pEndpointDescriptor); // FIXME remove after debugging
+
+ const RingBufferDescriptor *descriptor = &pEndpointDescriptor->upMessageQueueDescriptor;
+ assert(descriptor->bytesPerFrame == sizeof(OboeServiceMessage));
+ assert(descriptor->readCounterAddress != nullptr);
+ assert(descriptor->writeCounterAddress != nullptr);
+ mUpCommandQueue = new FifoBuffer(
+ descriptor->bytesPerFrame,
+ descriptor->capacityInFrames,
+ descriptor->readCounterAddress,
+ descriptor->writeCounterAddress,
+ descriptor->dataAddress
+ );
+ /* TODO mDownCommandQueue
+ if (descriptor->capacityInFrames > 0) {
+ descriptor = &pEndpointDescriptor->downMessageQueueDescriptor;
+ mDownCommandQueue = new FifoBuffer(
+ descriptor->capacityInFrames,
+ descriptor->bytesPerFrame,
+ descriptor->readCounterAddress,
+ descriptor->writeCounterAddress,
+ descriptor->dataAddress
+ );
+ }
+ */
+ descriptor = &pEndpointDescriptor->downDataQueueDescriptor;
+ assert(descriptor->capacityInFrames > 0);
+ assert(descriptor->bytesPerFrame > 1);
+ assert(descriptor->bytesPerFrame < 4 * 16); // FIXME just for initial debugging
+ assert(descriptor->framesPerBurst > 0);
+ assert(descriptor->framesPerBurst < 8 * 1024); // FIXME just for initial debugging
+ assert(descriptor->dataAddress != nullptr);
+ ALOGD("AudioEndpoint::configure() data framesPerBurst = %d", descriptor->framesPerBurst);
+ ALOGD("AudioEndpoint::configure() data readCounterAddress = %p", descriptor->readCounterAddress);
+ mOutputFreeRunning = descriptor->readCounterAddress == nullptr;
+ ALOGD("AudioEndpoint::configure() mOutputFreeRunning = %d", mOutputFreeRunning ? 1 : 0);
+ int64_t *readCounterAddress = (descriptor->readCounterAddress == nullptr)
+ ? &mDataReadCounter
+ : descriptor->readCounterAddress;
+ int64_t *writeCounterAddress = (descriptor->writeCounterAddress == nullptr)
+ ? &mDataWriteCounter
+ : descriptor->writeCounterAddress;
+ mDownDataQueue = new FifoBuffer(
+ descriptor->bytesPerFrame,
+ descriptor->capacityInFrames,
+ readCounterAddress,
+ writeCounterAddress,
+ descriptor->dataAddress
+ );
+ uint32_t threshold = descriptor->capacityInFrames / 2;
+ mDownDataQueue->setThreshold(threshold);
+ return result;
+}
+
+oboe_result_t AudioEndpoint::readUpCommand(OboeServiceMessage *commandPtr)
+{
+ return mUpCommandQueue->read(commandPtr, 1);
+}
+
+oboe_result_t AudioEndpoint::writeDataNow(const void *buffer, int32_t numFrames)
+{
+ return mDownDataQueue->write(buffer, numFrames);
+}
+
+void AudioEndpoint::setDownDataReadCounter(fifo_counter_t framesRead)
+{
+ mDownDataQueue->setReadCounter(framesRead);
+}
+
+fifo_counter_t AudioEndpoint::getDownDataReadCounter()
+{
+ return mDownDataQueue->getReadCounter();
+}
+
+void AudioEndpoint::setDownDataWriteCounter(fifo_counter_t framesRead)
+{
+ mDownDataQueue->setWriteCounter(framesRead);
+}
+
+fifo_counter_t AudioEndpoint::getDownDataWriteCounter()
+{
+ return mDownDataQueue->getWriteCounter();
+}
+
+oboe_size_frames_t AudioEndpoint::setBufferSizeInFrames(oboe_size_frames_t requestedFrames,
+ oboe_size_frames_t *actualFrames)
+{
+ if (requestedFrames < ENDPOINT_DATA_QUEUE_SIZE_MIN) {
+ requestedFrames = ENDPOINT_DATA_QUEUE_SIZE_MIN;
+ }
+ mDownDataQueue->setThreshold(requestedFrames);
+ *actualFrames = mDownDataQueue->getThreshold();
+ return OBOE_OK;
+}
+
+int32_t AudioEndpoint::getBufferSizeInFrames() const
+{
+ return mDownDataQueue->getThreshold();
+}
+
+int32_t AudioEndpoint::getBufferCapacityInFrames() const
+{
+ return (int32_t)mDownDataQueue->getBufferCapacityInFrames();
+}
+
+int32_t AudioEndpoint::getFullFramesAvailable()
+{
+ return mDownDataQueue->getFifoControllerBase()->getFullFramesAvailable();
+}
diff --git a/media/liboboe/src/client/AudioEndpoint.h b/media/liboboe/src/client/AudioEndpoint.h
new file mode 100644
index 0000000..6ae8b72
--- /dev/null
+++ b/media/liboboe/src/client/AudioEndpoint.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_AUDIO_ENDPOINT_H
+#define OBOE_AUDIO_ENDPOINT_H
+
+#include <oboe/OboeAudio.h>
+
+#include "OboeServiceMessage.h"
+#include "AudioEndpointParcelable.h"
+#include "fifo/FifoBuffer.h"
+
+namespace oboe {
+
+#define ENDPOINT_DATA_QUEUE_SIZE_MIN 64
+
+/**
+ * A sink for audio.
+ * Used by the client code.
+ */
+class AudioEndpoint {
+
+public:
+ AudioEndpoint();
+ virtual ~AudioEndpoint();
+
+ /**
+ * Configure based on the EndPointDescriptor_t.
+ */
+ oboe_result_t configure(const EndpointDescriptor *pEndpointDescriptor);
+
+ /**
+ * Read from a command passed up from the Server.
+ * @return 1 if command received, 0 for no command, or negative error.
+ */
+ oboe_result_t readUpCommand(OboeServiceMessage *commandPtr);
+
+ /**
+ * Non-blocking write.
+ * @return framesWritten or a negative error code.
+ */
+ oboe_result_t writeDataNow(const void *buffer, int32_t numFrames);
+
+ /**
+ * Set the read index in the downData queue.
+ * This is needed if the reader is not updating the index itself.
+ */
+ void setDownDataReadCounter(fifo_counter_t framesRead);
+ fifo_counter_t getDownDataReadCounter();
+
+ void setDownDataWriteCounter(fifo_counter_t framesWritten);
+ fifo_counter_t getDownDataWriteCounter();
+
+ /**
+ * The result is not valid until after configure() is called.
+ *
+ * @return true if the output buffer read position is not updated, eg. DMA
+ */
+ bool isOutputFreeRunning() const { return mOutputFreeRunning; }
+
+ int32_t setBufferSizeInFrames(oboe_size_frames_t requestedFrames,
+ oboe_size_frames_t *actualFrames);
+ oboe_size_frames_t getBufferSizeInFrames() const;
+
+ oboe_size_frames_t getBufferCapacityInFrames() const;
+
+ oboe_size_frames_t getFullFramesAvailable();
+
+private:
+ FifoBuffer * mUpCommandQueue;
+ FifoBuffer * mDownDataQueue;
+ bool mOutputFreeRunning;
+ fifo_counter_t mDataReadCounter; // only used if free-running
+ fifo_counter_t mDataWriteCounter; // only used if free-running
+};
+
+} // namespace oboe
+
+#endif //OBOE_AUDIO_ENDPOINT_H
diff --git a/media/liboboe/src/client/AudioStreamInternal.cpp b/media/liboboe/src/client/AudioStreamInternal.cpp
new file mode 100644
index 0000000..0d169e1
--- /dev/null
+++ b/media/liboboe/src/client/AudioStreamInternal.cpp
@@ -0,0 +1,528 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeAudio"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <assert.h>
+
+#include <binder/IServiceManager.h>
+
+#include <oboe/OboeAudio.h>
+
+#include "AudioClock.h"
+#include "AudioEndpointParcelable.h"
+#include "binding/OboeStreamRequest.h"
+#include "binding/OboeStreamConfiguration.h"
+#include "binding/IOboeAudioService.h"
+#include "binding/OboeServiceMessage.h"
+
+#include "AudioStreamInternal.h"
+
+#define LOG_TIMESTAMPS 0
+
+using android::String16;
+using android::IServiceManager;
+using android::defaultServiceManager;
+using android::interface_cast;
+
+using namespace oboe;
+
+// Helper function to get access to the "OboeAudioService" service.
+static sp<IOboeAudioService> getOboeAudioService() {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("OboeAudioService"));
+ // TODO: If the "OboeHack" service is not running, getService times out and binder == 0.
+ sp<IOboeAudioService> service = interface_cast<IOboeAudioService>(binder);
+ return service;
+}
+
+AudioStreamInternal::AudioStreamInternal()
+ : AudioStream()
+ , mClockModel()
+ , mAudioEndpoint()
+ , mServiceStreamHandle(OBOE_HANDLE_INVALID)
+ , mFramesPerBurst(16)
+{
+ // TODO protect against mService being NULL;
+ // TODO Model access to the service on frameworks/av/media/libaudioclient/AudioSystem.cpp
+ mService = getOboeAudioService();
+}
+
+AudioStreamInternal::~AudioStreamInternal() {
+}
+
+oboe_result_t AudioStreamInternal::open(const AudioStreamBuilder &builder) {
+
+ oboe_result_t result = OBOE_OK;
+ OboeStreamRequest request;
+ OboeStreamConfiguration configuration;
+
+ result = AudioStream::open(builder);
+ if (result < 0) {
+ return result;
+ }
+
+ // Build the request.
+ request.setUserId(getuid());
+ request.setProcessId(getpid());
+ request.getConfiguration().setDeviceId(getDeviceId());
+ request.getConfiguration().setSampleRate(getSampleRate());
+ request.getConfiguration().setSamplesPerFrame(getSamplesPerFrame());
+ request.getConfiguration().setAudioFormat(getFormat());
+ request.dump();
+
+ mServiceStreamHandle = mService->openStream(request, configuration);
+ ALOGD("AudioStreamInternal.open(): openStream returned mServiceStreamHandle = 0x%08X",
+ (unsigned int)mServiceStreamHandle);
+ if (mServiceStreamHandle < 0) {
+ result = mServiceStreamHandle;
+ ALOGE("AudioStreamInternal.open(): acquireRealtimeStream oboe_result_t = 0x%08X", result);
+ } else {
+ result = configuration.validate();
+ if (result != OBOE_OK) {
+ close();
+ return result;
+ }
+ // Save results of the open.
+ setSampleRate(configuration.getSampleRate());
+ setSamplesPerFrame(configuration.getSamplesPerFrame());
+ setFormat(configuration.getAudioFormat());
+
+ oboe::AudioEndpointParcelable parcelable;
+ result = mService->getStreamDescription(mServiceStreamHandle, parcelable);
+ if (result != OBOE_OK) {
+ ALOGE("AudioStreamInternal.open(): getStreamDescriptor returns %d", result);
+ mService->closeStream(mServiceStreamHandle);
+ return result;
+ }
+ // resolve parcelable into a descriptor
+ parcelable.resolve(&mEndpointDescriptor);
+
+ // Configure endpoint based on descriptor.
+ mAudioEndpoint.configure(&mEndpointDescriptor);
+
+
+ mFramesPerBurst = mEndpointDescriptor.downDataQueueDescriptor.framesPerBurst;
+ assert(mFramesPerBurst >= 16);
+ assert(mEndpointDescriptor.downDataQueueDescriptor.capacityInFrames < 10 * 1024);
+
+ mClockModel.setSampleRate(getSampleRate());
+ mClockModel.setFramesPerBurst(mFramesPerBurst);
+
+ setState(OBOE_STREAM_STATE_OPEN);
+ }
+ return result;
+}
+
+oboe_result_t AudioStreamInternal::close() {
+ ALOGD("AudioStreamInternal.close(): mServiceStreamHandle = 0x%08X", mServiceStreamHandle);
+ if (mServiceStreamHandle != OBOE_HANDLE_INVALID) {
+ mService->closeStream(mServiceStreamHandle);
+ mServiceStreamHandle = OBOE_HANDLE_INVALID;
+ return OBOE_OK;
+ } else {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+}
+
+oboe_result_t AudioStreamInternal::requestStart()
+{
+ oboe_nanoseconds_t startTime;
+ ALOGD("AudioStreamInternal(): start()");
+ if (mServiceStreamHandle == OBOE_HANDLE_INVALID) {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+ startTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
+ mClockModel.start(startTime);
+ processTimestamp(0, startTime);
+ setState(OBOE_STREAM_STATE_STARTING);
+ return mService->startStream(mServiceStreamHandle);
+}
+
+oboe_result_t AudioStreamInternal::requestPause()
+{
+ ALOGD("AudioStreamInternal(): pause()");
+ if (mServiceStreamHandle == OBOE_HANDLE_INVALID) {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+ mClockModel.stop(Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC));
+ setState(OBOE_STREAM_STATE_PAUSING);
+ return mService->pauseStream(mServiceStreamHandle);
+}
+
+oboe_result_t AudioStreamInternal::requestFlush() {
+ ALOGD("AudioStreamInternal(): flush()");
+ if (mServiceStreamHandle == OBOE_HANDLE_INVALID) {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+ setState(OBOE_STREAM_STATE_FLUSHING);
+ return mService->flushStream(mServiceStreamHandle);
+}
+
+void AudioStreamInternal::onFlushFromServer() {
+ ALOGD("AudioStreamInternal(): onFlushFromServer()");
+ oboe_position_frames_t readCounter = mAudioEndpoint.getDownDataReadCounter();
+ oboe_position_frames_t writeCounter = mAudioEndpoint.getDownDataWriteCounter();
+ // Bump offset so caller does not see the retrograde motion in getFramesRead().
+ oboe_position_frames_t framesFlushed = writeCounter - readCounter;
+ mFramesOffsetFromService += framesFlushed;
+ // Flush written frames by forcing writeCounter to readCounter.
+ // This is because we cannot move the read counter in the hardware.
+ mAudioEndpoint.setDownDataWriteCounter(readCounter);
+}
+
+oboe_result_t AudioStreamInternal::requestStop()
+{
+ // TODO better implementation of requestStop()
+ oboe_result_t result = requestPause();
+ if (result == OBOE_OK) {
+ oboe_stream_state_t state;
+ result = waitForStateChange(OBOE_STREAM_STATE_PAUSING,
+ &state,
+ 500 * OBOE_NANOS_PER_MILLISECOND);// TODO temporary code
+ if (result == OBOE_OK) {
+ result = requestFlush();
+ }
+ }
+ return result;
+}
+
+oboe_result_t AudioStreamInternal::registerThread() {
+ ALOGD("AudioStreamInternal(): registerThread()");
+ if (mServiceStreamHandle == OBOE_HANDLE_INVALID) {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+ return mService->registerAudioThread(mServiceStreamHandle,
+ gettid(),
+ getPeriodNanoseconds());
+}
+
+oboe_result_t AudioStreamInternal::unregisterThread() {
+ ALOGD("AudioStreamInternal(): unregisterThread()");
+ if (mServiceStreamHandle == OBOE_HANDLE_INVALID) {
+ return OBOE_ERROR_INVALID_STATE;
+ }
+ return mService->unregisterAudioThread(mServiceStreamHandle, gettid());
+}
+
+// TODO use oboe_clockid_t all the way down to AudioClock
+oboe_result_t AudioStreamInternal::getTimestamp(clockid_t clockId,
+ oboe_position_frames_t *framePosition,
+ oboe_nanoseconds_t *timeNanoseconds) {
+// TODO implement using real HAL
+ oboe_nanoseconds_t time = AudioClock::getNanoseconds();
+ *framePosition = mClockModel.convertTimeToPosition(time);
+ *timeNanoseconds = time + (10 * OBOE_NANOS_PER_MILLISECOND); // Fake hardware delay
+ return OBOE_OK;
+}
+
+oboe_result_t AudioStreamInternal::updateState() {
+ return processCommands();
+}
+
+#if LOG_TIMESTAMPS
+static void AudioStreamInternal_LogTimestamp(OboeServiceMessage &command) {
+ static int64_t oldPosition = 0;
+ static oboe_nanoseconds_t oldTime = 0;
+ int64_t framePosition = command.timestamp.position;
+ oboe_nanoseconds_t nanoTime = command.timestamp.timestamp;
+ ALOGD("AudioStreamInternal() timestamp says framePosition = %08lld at nanoTime %llu",
+ (long long) framePosition,
+ (long long) nanoTime);
+ int64_t nanosDelta = nanoTime - oldTime;
+ if (nanosDelta > 0 && oldTime > 0) {
+ int64_t framesDelta = framePosition - oldPosition;
+ int64_t rate = (framesDelta * OBOE_NANOS_PER_SECOND) / nanosDelta;
+ ALOGD("AudioStreamInternal() - framesDelta = %08lld", (long long) framesDelta);
+ ALOGD("AudioStreamInternal() - nanosDelta = %08lld", (long long) nanosDelta);
+ ALOGD("AudioStreamInternal() - measured rate = %llu", (unsigned long long) rate);
+ }
+ oldPosition = framePosition;
+ oldTime = nanoTime;
+}
+#endif
+
+oboe_result_t AudioStreamInternal::onTimestampFromServer(OboeServiceMessage *message) {
+ oboe_position_frames_t framePosition = 0;
+#if LOG_TIMESTAMPS
+ AudioStreamInternal_LogTimestamp(command);
+#endif
+ framePosition = message->timestamp.position;
+ processTimestamp(framePosition, message->timestamp.timestamp);
+ return OBOE_OK;
+}
+
+oboe_result_t AudioStreamInternal::onEventFromServer(OboeServiceMessage *message) {
+ oboe_result_t result = OBOE_OK;
+ ALOGD("processCommands() got event %d", message->event.event);
+ switch (message->event.event) {
+ case OBOE_SERVICE_EVENT_STARTED:
+ ALOGD("processCommands() got OBOE_SERVICE_EVENT_STARTED");
+ setState(OBOE_STREAM_STATE_STARTED);
+ break;
+ case OBOE_SERVICE_EVENT_PAUSED:
+ ALOGD("processCommands() got OBOE_SERVICE_EVENT_PAUSED");
+ setState(OBOE_STREAM_STATE_PAUSED);
+ break;
+ case OBOE_SERVICE_EVENT_FLUSHED:
+ ALOGD("processCommands() got OBOE_SERVICE_EVENT_FLUSHED");
+ setState(OBOE_STREAM_STATE_FLUSHED);
+ onFlushFromServer();
+ break;
+ case OBOE_SERVICE_EVENT_CLOSED:
+ ALOGD("processCommands() got OBOE_SERVICE_EVENT_CLOSED");
+ setState(OBOE_STREAM_STATE_CLOSED);
+ break;
+ case OBOE_SERVICE_EVENT_DISCONNECTED:
+ result = OBOE_ERROR_DISCONNECTED;
+ ALOGW("WARNING - processCommands() OBOE_SERVICE_EVENT_DISCONNECTED");
+ break;
+ default:
+ ALOGW("WARNING - processCommands() Unrecognized event = %d",
+ (int) message->event.event);
+ break;
+ }
+ return result;
+}
+
+// Process all the commands coming from the server.
+oboe_result_t AudioStreamInternal::processCommands() {
+ oboe_result_t result = OBOE_OK;
+
+ // Let the service run in case it is a fake service simulator.
+ mService->tickle(); // TODO use real service thread
+
+ while (result == OBOE_OK) {
+ OboeServiceMessage message;
+ if (mAudioEndpoint.readUpCommand(&message) != 1) {
+ break; // no command this time, no problem
+ }
+ switch (message.what) {
+ case OboeServiceMessage::code::TIMESTAMP:
+ result = onTimestampFromServer(&message);
+ break;
+
+ case OboeServiceMessage::code::EVENT:
+ result = onEventFromServer(&message);
+ break;
+
+ default:
+ ALOGW("WARNING - AudioStreamInternal::processCommands() Unrecognized what = %d",
+ (int) message.what);
+ result = OBOE_ERROR_UNEXPECTED_VALUE;
+ break;
+ }
+ }
+ return result;
+}
+
+// Write the data, block if needed and timeoutMillis > 0
+oboe_result_t AudioStreamInternal::write(const void *buffer, int32_t numFrames,
+ oboe_nanoseconds_t timeoutNanoseconds)
+{
+ oboe_result_t result = OBOE_OK;
+ uint8_t* source = (uint8_t*)buffer;
+ oboe_nanoseconds_t currentTimeNanos = AudioClock::getNanoseconds();
+ oboe_nanoseconds_t deadlineNanos = currentTimeNanos + timeoutNanoseconds;
+ int32_t framesLeft = numFrames;
+// ALOGD("AudioStreamInternal::write(%p, %d) at time %08llu , mState = %d ------------------",
+// buffer, numFrames, (unsigned long long) currentTimeNanos, mState);
+
+ // Write until all the data has been written or until a timeout occurs.
+ while (framesLeft > 0) {
+ // The call to writeNow() will not block. It will just write as much as it can.
+ oboe_nanoseconds_t wakeTimeNanos = 0;
+ oboe_result_t framesWritten = writeNow(source, framesLeft,
+ currentTimeNanos, &wakeTimeNanos);
+// ALOGD("AudioStreamInternal::write() writeNow() framesLeft = %d --> framesWritten = %d", framesLeft, framesWritten);
+ if (framesWritten < 0) {
+ result = framesWritten;
+ break;
+ }
+ framesLeft -= (int32_t) framesWritten;
+ source += framesWritten * getBytesPerFrame();
+
+ // Should we block?
+ if (timeoutNanoseconds == 0) {
+ break; // don't block
+ } else if (framesLeft > 0) {
+ //ALOGD("AudioStreamInternal:: original wakeTimeNanos %lld", (long long) wakeTimeNanos);
+ // clip the wake time to something reasonable
+ if (wakeTimeNanos < currentTimeNanos) {
+ wakeTimeNanos = currentTimeNanos;
+ }
+ if (wakeTimeNanos > deadlineNanos) {
+ // If we time out, just return the framesWritten so far.
+ ALOGE("AudioStreamInternal::write(): timed out after %lld nanos", (long long) timeoutNanoseconds);
+ break;
+ }
+
+ //ALOGD("AudioStreamInternal:: sleep until %lld, dur = %lld", (long long) wakeTimeNanos,
+ // (long long) (wakeTimeNanos - currentTimeNanos));
+ AudioClock::sleepForNanos(wakeTimeNanos - currentTimeNanos);
+ currentTimeNanos = AudioClock::getNanoseconds();
+ }
+ }
+
+ // return error or framesWritten
+ return (result < 0) ? result : numFrames - framesLeft;
+}
+
+// Write as much data as we can without blocking.
+oboe_result_t AudioStreamInternal::writeNow(const void *buffer, int32_t numFrames,
+ oboe_nanoseconds_t currentNanoTime, oboe_nanoseconds_t *wakeTimePtr) {
+ {
+ oboe_result_t result = processCommands();
+ if (result != OBOE_OK) {
+ return result;
+ }
+ }
+
+ if (mAudioEndpoint.isOutputFreeRunning()) {
+ // Update data queue based on the timing model.
+ int64_t estimatedReadCounter = mClockModel.convertTimeToPosition(currentNanoTime);
+ mAudioEndpoint.setDownDataReadCounter(estimatedReadCounter);
+ // If the read index passed the write index then consider it an underrun.
+ if (mAudioEndpoint.getFullFramesAvailable() < 0) {
+ mXRunCount++;
+ }
+ }
+ // TODO else query from endpoint cuz set by actual reader, maybe
+
+ // Write some data to the buffer.
+ int32_t framesWritten = mAudioEndpoint.writeDataNow(buffer, numFrames);
+ if (framesWritten > 0) {
+ incrementFramesWritten(framesWritten);
+ }
+ //ALOGD("AudioStreamInternal::writeNow() - tried to write %d frames, wrote %d",
+ // numFrames, framesWritten);
+
+ // Calculate an ideal time to wake up.
+ if (wakeTimePtr != nullptr && framesWritten >= 0) {
+ // By default wake up a few milliseconds from now. // TODO review
+ oboe_nanoseconds_t wakeTime = currentNanoTime + (2 * OBOE_NANOS_PER_MILLISECOND);
+ switch (getState()) {
+ case OBOE_STREAM_STATE_OPEN:
+ case OBOE_STREAM_STATE_STARTING:
+ if (framesWritten != 0) {
+ // Don't wait to write more data. Just prime the buffer.
+ wakeTime = currentNanoTime;
+ }
+ break;
+ case OBOE_STREAM_STATE_STARTED: // When do we expect the next read burst to occur?
+ {
+ uint32_t burstSize = mFramesPerBurst;
+ if (burstSize < 32) {
+ burstSize = 32; // TODO review
+ }
+
+ uint64_t nextReadPosition = mAudioEndpoint.getDownDataReadCounter() + burstSize;
+ wakeTime = mClockModel.convertPositionToTime(nextReadPosition);
+ }
+ break;
+ default:
+ break;
+ }
+ *wakeTimePtr = wakeTime;
+
+ }
+// ALOGD("AudioStreamInternal::writeNow finished: now = %llu, read# = %llu, wrote# = %llu",
+// (unsigned long long)currentNanoTime,
+// (unsigned long long)mAudioEndpoint.getDownDataReadCounter(),
+// (unsigned long long)mAudioEndpoint.getDownDataWriteCounter());
+ return framesWritten;
+}
+
+oboe_result_t AudioStreamInternal::waitForStateChange(oboe_stream_state_t currentState,
+ oboe_stream_state_t *nextState,
+ oboe_nanoseconds_t timeoutNanoseconds)
+
+{
+ oboe_result_t result = processCommands();
+// ALOGD("AudioStreamInternal::waitForStateChange() - processCommands() returned %d", result);
+ if (result != OBOE_OK) {
+ return result;
+ }
+ // TODO replace this polling with a timed sleep on a futex on the message queue
+ int32_t durationNanos = 5 * OBOE_NANOS_PER_MILLISECOND;
+ oboe_stream_state_t state = getState();
+// ALOGD("AudioStreamInternal::waitForStateChange() - state = %d", state);
+ while (state == currentState && timeoutNanoseconds > 0) {
+ // TODO use futex from service message queue
+ if (durationNanos > timeoutNanoseconds) {
+ durationNanos = timeoutNanoseconds;
+ }
+ AudioClock::sleepForNanos(durationNanos);
+ timeoutNanoseconds -= durationNanos;
+
+ result = processCommands();
+ if (result != OBOE_OK) {
+ return result;
+ }
+
+ state = getState();
+// ALOGD("AudioStreamInternal::waitForStateChange() - state = %d", state);
+ }
+ if (nextState != nullptr) {
+ *nextState = state;
+ }
+ return (state == currentState) ? OBOE_ERROR_TIMEOUT : OBOE_OK;
+}
+
+
+void AudioStreamInternal::processTimestamp(uint64_t position, oboe_nanoseconds_t time) {
+ mClockModel.processTimestamp( position, time);
+}
+
+oboe_result_t AudioStreamInternal::setBufferSize(oboe_size_frames_t requestedFrames,
+ oboe_size_frames_t *actualFrames) {
+ return mAudioEndpoint.setBufferSizeInFrames(requestedFrames, actualFrames);
+}
+
+oboe_size_frames_t AudioStreamInternal::getBufferSize() const
+{
+ return mAudioEndpoint.getBufferSizeInFrames();
+}
+
+oboe_size_frames_t AudioStreamInternal::getBufferCapacity() const
+{
+ return mAudioEndpoint.getBufferCapacityInFrames();
+}
+
+oboe_size_frames_t AudioStreamInternal::getFramesPerBurst() const
+{
+ return mEndpointDescriptor.downDataQueueDescriptor.framesPerBurst;
+}
+
+oboe_position_frames_t AudioStreamInternal::getFramesRead()
+{
+ oboe_position_frames_t framesRead =
+ mClockModel.convertTimeToPosition(AudioClock::getNanoseconds())
+ + mFramesOffsetFromService;
+ // Prevent retrograde motion.
+ if (framesRead < mLastFramesRead) {
+ framesRead = mLastFramesRead;
+ } else {
+ mLastFramesRead = framesRead;
+ }
+ ALOGD("AudioStreamInternal::getFramesRead() returns %lld", (long long)framesRead);
+ return framesRead;
+}
+
+// TODO implement getTimestamp
diff --git a/media/liboboe/src/client/AudioStreamInternal.h b/media/liboboe/src/client/AudioStreamInternal.h
new file mode 100644
index 0000000..6f37761
--- /dev/null
+++ b/media/liboboe/src/client/AudioStreamInternal.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_AUDIOSTREAMINTERNAL_H
+#define OBOE_AUDIOSTREAMINTERNAL_H
+
+#include <stdint.h>
+#include <oboe/OboeAudio.h>
+
+#include "binding/IOboeAudioService.h"
+#include "binding/AudioEndpointParcelable.h"
+#include "client/IsochronousClockModel.h"
+#include "client/AudioEndpoint.h"
+#include "core/AudioStream.h"
+
+using android::sp;
+using android::IOboeAudioService;
+
+namespace oboe {
+
+// A stream that talks to the OboeService or directly to a HAL.
+class AudioStreamInternal : public AudioStream {
+
+public:
+ AudioStreamInternal();
+ virtual ~AudioStreamInternal();
+
+ // =========== Begin ABSTRACT methods ===========================
+ virtual oboe_result_t requestStart() override;
+
+ virtual oboe_result_t requestPause() override;
+
+ virtual oboe_result_t requestFlush() override;
+
+ virtual oboe_result_t requestStop() override;
+
+ // TODO use oboe_clockid_t all the way down to AudioClock
+ virtual oboe_result_t getTimestamp(clockid_t clockId,
+ oboe_position_frames_t *framePosition,
+ oboe_nanoseconds_t *timeNanoseconds) override;
+
+
+ virtual oboe_result_t updateState() override;
+ // =========== End ABSTRACT methods ===========================
+
+ virtual oboe_result_t open(const AudioStreamBuilder &builder) override;
+
+ virtual oboe_result_t close() override;
+
+ virtual oboe_result_t write(const void *buffer,
+ int32_t numFrames,
+ oboe_nanoseconds_t timeoutNanoseconds) override;
+
+ virtual oboe_result_t waitForStateChange(oboe_stream_state_t currentState,
+ oboe_stream_state_t *nextState,
+ oboe_nanoseconds_t timeoutNanoseconds) override;
+
+ virtual oboe_result_t setBufferSize(oboe_size_frames_t requestedFrames,
+ oboe_size_frames_t *actualFrames) override;
+
+ virtual oboe_size_frames_t getBufferSize() const override;
+
+ virtual oboe_size_frames_t getBufferCapacity() const override;
+
+ virtual oboe_size_frames_t getFramesPerBurst() const override;
+
+ virtual oboe_position_frames_t getFramesRead() override;
+
+ virtual int32_t getXRunCount() const override {
+ return mXRunCount;
+ }
+
+ virtual oboe_result_t registerThread() override;
+
+ virtual oboe_result_t unregisterThread() override;
+
+protected:
+
+ oboe_result_t processCommands();
+
+/**
+ * Low level write that will not block. It will just write as much as it can.
+ *
+ * It passed back a recommended time to wake up if wakeTimePtr is not NULL.
+ *
+ * @return the number of frames written or a negative error code.
+ */
+ virtual oboe_result_t writeNow(const void *buffer,
+ int32_t numFrames,
+ oboe_nanoseconds_t currentTimeNanos,
+ oboe_nanoseconds_t *wakeTimePtr);
+
+ void onFlushFromServer();
+
+ oboe_result_t onEventFromServer(OboeServiceMessage *message);
+
+ oboe_result_t onTimestampFromServer(OboeServiceMessage *message);
+
+private:
+ IsochronousClockModel mClockModel;
+ AudioEndpoint mAudioEndpoint;
+ oboe_handle_t mServiceStreamHandle;
+ EndpointDescriptor mEndpointDescriptor;
+ sp<IOboeAudioService> mService;
+ // Offset from underlying frame position.
+ oboe_position_frames_t mFramesOffsetFromService = 0;
+ oboe_position_frames_t mLastFramesRead = 0;
+ oboe_size_frames_t mFramesPerBurst;
+ int32_t mXRunCount = 0;
+
+ void processTimestamp(uint64_t position, oboe_nanoseconds_t time);
+};
+
+} /* namespace oboe */
+
+#endif //OBOE_AUDIOSTREAMINTERNAL_H
diff --git a/media/liboboe/src/client/IsochronousClockModel.cpp b/media/liboboe/src/client/IsochronousClockModel.cpp
new file mode 100644
index 0000000..b8e5538
--- /dev/null
+++ b/media/liboboe/src/client/IsochronousClockModel.cpp
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeAudio"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <oboe/OboeDefinitions.h>
+
+#include "IsochronousClockModel.h"
+
+#define MIN_LATENESS_NANOS (10 * OBOE_NANOS_PER_MICROSECOND)
+
+using namespace android;
+using namespace oboe;
+
+IsochronousClockModel::IsochronousClockModel()
+ : mSampleRate(48000)
+ , mFramesPerBurst(64)
+ , mMaxLatenessInNanos(0)
+ , mMarkerFramePosition(0)
+ , mMarkerNanoTime(0)
+ , mState(STATE_STOPPED)
+{
+}
+
+IsochronousClockModel::~IsochronousClockModel() {
+}
+
+void IsochronousClockModel::start(oboe_nanoseconds_t nanoTime)
+{
+ mMarkerNanoTime = nanoTime;
+ mState = STATE_STARTING;
+}
+
+void IsochronousClockModel::stop(oboe_nanoseconds_t nanoTime)
+{
+ mMarkerNanoTime = nanoTime;
+ mMarkerFramePosition = convertTimeToPosition(nanoTime); // TODO should we do this?
+ mState = STATE_STOPPED;
+}
+
+void IsochronousClockModel::processTimestamp(oboe_position_frames_t framePosition,
+ oboe_nanoseconds_t nanoTime) {
+ int64_t framesDelta = framePosition - mMarkerFramePosition;
+ int64_t nanosDelta = nanoTime - mMarkerNanoTime;
+ if (nanosDelta < 1000) {
+ return;
+ }
+
+// ALOGI("processTimestamp() - mMarkerFramePosition = %lld at mMarkerNanoTime %llu",
+// (long long)mMarkerFramePosition,
+// (long long)mMarkerNanoTime);
+// ALOGI("processTimestamp() - framePosition = %lld at nanoTime %llu",
+// (long long)framePosition,
+// (long long)nanoTime);
+
+ int64_t expectedNanosDelta = convertDeltaPositionToTime(framesDelta);
+// ALOGI("processTimestamp() - expectedNanosDelta = %lld, nanosDelta = %llu",
+// (long long)expectedNanosDelta,
+// (long long)nanosDelta);
+
+// ALOGI("processTimestamp() - mSampleRate = %d", mSampleRate);
+// ALOGI("processTimestamp() - mState = %d", mState);
+ switch (mState) {
+ case STATE_STOPPED:
+ break;
+ case STATE_STARTING:
+ mMarkerFramePosition = framePosition;
+ mMarkerNanoTime = nanoTime;
+ mState = STATE_SYNCING;
+ break;
+ case STATE_SYNCING:
+ // This will handle a burst of rapid consumption in the beginning.
+ if (nanosDelta < expectedNanosDelta) {
+ mMarkerFramePosition = framePosition;
+ mMarkerNanoTime = nanoTime;
+ } else {
+ ALOGI("processTimestamp() - advance to STATE_RUNNING");
+ mState = STATE_RUNNING;
+ }
+ break;
+ case STATE_RUNNING:
+ if (nanosDelta < expectedNanosDelta) {
+ // Earlier than expected timestamp.
+ // This data is probably more accurate so use it.
+ // or we may be drifting due to a slow HW clock.
+ mMarkerFramePosition = framePosition;
+ mMarkerNanoTime = nanoTime;
+ ALOGI("processTimestamp() - STATE_RUNNING - %d < %d micros - EARLY",
+ (int) (nanosDelta / 1000), (int)(expectedNanosDelta / 1000));
+ } else if (nanosDelta > (expectedNanosDelta + mMaxLatenessInNanos)) {
+ // Later than expected timestamp.
+ mMarkerFramePosition = framePosition;
+ mMarkerNanoTime = nanoTime - mMaxLatenessInNanos;
+ ALOGI("processTimestamp() - STATE_RUNNING - %d > %d + %d micros - LATE",
+ (int) (nanosDelta / 1000), (int)(expectedNanosDelta / 1000),
+ (int) (mMaxLatenessInNanos / 1000));
+ }
+ break;
+ default:
+ break;
+ }
+ ++mTimestampCount;
+}
+
+void IsochronousClockModel::setSampleRate(int32_t sampleRate) {
+ mSampleRate = sampleRate;
+ update();
+}
+
+void IsochronousClockModel::setFramesPerBurst(int32_t framesPerBurst) {
+ mFramesPerBurst = framesPerBurst;
+ update();
+}
+
+void IsochronousClockModel::update() {
+ int64_t nanosLate = convertDeltaPositionToTime(mFramesPerBurst); // uses mSampleRate
+ mMaxLatenessInNanos = (nanosLate > MIN_LATENESS_NANOS) ? nanosLate : MIN_LATENESS_NANOS;
+}
+
+oboe_nanoseconds_t IsochronousClockModel::convertDeltaPositionToTime(
+ oboe_position_frames_t framesDelta) const {
+ return (OBOE_NANOS_PER_SECOND * framesDelta) / mSampleRate;
+}
+
+int64_t IsochronousClockModel::convertDeltaTimeToPosition(oboe_nanoseconds_t nanosDelta) const {
+ return (mSampleRate * nanosDelta) / OBOE_NANOS_PER_SECOND;
+}
+
+oboe_nanoseconds_t IsochronousClockModel::convertPositionToTime(
+ oboe_position_frames_t framePosition) const {
+ if (mState == STATE_STOPPED) {
+ return mMarkerNanoTime;
+ }
+ oboe_position_frames_t nextBurstIndex = (framePosition + mFramesPerBurst - 1) / mFramesPerBurst;
+ oboe_position_frames_t nextBurstPosition = mFramesPerBurst * nextBurstIndex;
+ oboe_position_frames_t framesDelta = nextBurstPosition - mMarkerFramePosition;
+ oboe_nanoseconds_t nanosDelta = convertDeltaPositionToTime(framesDelta);
+ oboe_nanoseconds_t time = (oboe_nanoseconds_t) (mMarkerNanoTime + nanosDelta);
+// ALOGI("IsochronousClockModel::convertPositionToTime: pos = %llu --> time = %llu",
+// (unsigned long long)framePosition,
+// (unsigned long long)time);
+ return time;
+}
+
+oboe_position_frames_t IsochronousClockModel::convertTimeToPosition(
+ oboe_nanoseconds_t nanoTime) const {
+ if (mState == STATE_STOPPED) {
+ return mMarkerFramePosition;
+ }
+ oboe_nanoseconds_t nanosDelta = nanoTime - mMarkerNanoTime;
+ oboe_position_frames_t framesDelta = convertDeltaTimeToPosition(nanosDelta);
+ oboe_position_frames_t nextBurstPosition = mMarkerFramePosition + framesDelta;
+ oboe_position_frames_t nextBurstIndex = nextBurstPosition / mFramesPerBurst;
+ oboe_position_frames_t position = nextBurstIndex * mFramesPerBurst;
+// ALOGI("IsochronousClockModel::convertTimeToPosition: time = %llu --> pos = %llu",
+// (unsigned long long)nanoTime,
+// (unsigned long long)position);
+// ALOGI("IsochronousClockModel::convertTimeToPosition: framesDelta = %llu, mFramesPerBurst = %d",
+// (long long) framesDelta, mFramesPerBurst);
+ return position;
+}
diff --git a/media/liboboe/src/client/IsochronousClockModel.h b/media/liboboe/src/client/IsochronousClockModel.h
new file mode 100644
index 0000000..97be325
--- /dev/null
+++ b/media/liboboe/src/client/IsochronousClockModel.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_ISOCHRONOUSCLOCKMODEL_H
+#define OBOE_ISOCHRONOUSCLOCKMODEL_H
+
+#include <stdint.h>
+#include <oboe/OboeAudio.h>
+
+namespace oboe {
+
+/**
+ * Model an isochronous data stream using occasional timestamps as input.
+ * This can be used to predict the position of the stream at a given time.
+ *
+ * This class is not thread safe and should only be called from one thread.
+ */
+class IsochronousClockModel {
+
+public:
+ IsochronousClockModel();
+ virtual ~IsochronousClockModel();
+
+ void start(oboe_nanoseconds_t nanoTime);
+ void stop(oboe_nanoseconds_t nanoTime);
+
+ void processTimestamp(oboe_position_frames_t framePosition, oboe_nanoseconds_t nanoTime);
+
+ /**
+ * @param sampleRate rate of the stream in frames per second
+ */
+ void setSampleRate(oboe_sample_rate_t sampleRate);
+
+ oboe_sample_rate_t getSampleRate() const {
+ return mSampleRate;
+ }
+
+ /**
+ * This must be set accurately in order to track the isochronous stream.
+ *
+ * @param framesPerBurst number of frames that stream advance at one time.
+ */
+ void setFramesPerBurst(oboe_size_frames_t framesPerBurst);
+
+ oboe_size_frames_t getFramesPerBurst() const {
+ return mFramesPerBurst;
+ }
+
+ /**
+ * Calculate an estimated time when the stream will be at that position.
+ *
+ * @param framePosition position of the stream in frames
+ * @return time in nanoseconds
+ */
+ oboe_nanoseconds_t convertPositionToTime(oboe_position_frames_t framePosition) const;
+
+ /**
+ * Calculate an estimated position where the stream will be at the specified time.
+ *
+ * @param nanoTime time of interest
+ * @return position in frames
+ */
+ oboe_position_frames_t convertTimeToPosition(oboe_nanoseconds_t nanoTime) const;
+
+ /**
+ * @param framesDelta difference in frames
+ * @return duration in nanoseconds
+ */
+ oboe_nanoseconds_t convertDeltaPositionToTime(oboe_position_frames_t framesDelta) const;
+
+ /**
+ * @param nanosDelta duration in nanoseconds
+ * @return frames that stream will advance in that time
+ */
+ oboe_position_frames_t convertDeltaTimeToPosition(oboe_nanoseconds_t nanosDelta) const;
+
+private:
+ enum clock_model_state_t {
+ STATE_STOPPED,
+ STATE_STARTING,
+ STATE_SYNCING,
+ STATE_RUNNING
+ };
+
+ oboe_sample_rate_t mSampleRate;
+ oboe_size_frames_t mFramesPerBurst;
+ int32_t mMaxLatenessInNanos;
+ oboe_position_frames_t mMarkerFramePosition;
+ oboe_nanoseconds_t mMarkerNanoTime;
+ int32_t mTimestampCount;
+ clock_model_state_t mState;
+
+ void update();
+};
+
+} /* namespace oboe */
+
+#endif //OBOE_ISOCHRONOUSCLOCKMODEL_H
diff --git a/media/liboboe/src/core/AudioStream.cpp b/media/liboboe/src/core/AudioStream.cpp
index f154002..cc654c3 100644
--- a/media/liboboe/src/core/AudioStream.cpp
+++ b/media/liboboe/src/core/AudioStream.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <atomic>
#include <stdint.h>
#include <oboe/OboeAudio.h>
@@ -27,10 +28,10 @@
using namespace oboe;
-/*
- * AudioStream
- */
AudioStream::AudioStream() {
+ // mThread is a pthread_t of unknown size so we need memset.
+ memset(&mThread, 0, sizeof(mThread));
+ setPeriodNanoseconds(0);
}
oboe_result_t AudioStream::open(const AudioStreamBuilder& builder)
@@ -91,23 +92,51 @@
state = getState();
}
- if (nextState != NULL) {
+ if (nextState != nullptr) {
*nextState = state;
}
return (state == currentState) ? OBOE_ERROR_TIMEOUT : OBOE_OK;
}
+// This registers the app's background audio thread with the server before
+// passing control to the app. This gives the server an opportunity to boost
+// the thread's performance characteristics.
+void* AudioStream::wrapUserThread() {
+ void* procResult = nullptr;
+ mThreadRegistrationResult = registerThread();
+ if (mThreadRegistrationResult == OBOE_OK) {
+ // Call application procedure. This may take a very long time.
+ procResult = mThreadProc(mThreadArg);
+ ALOGD("AudioStream::mThreadProc() returned");
+ mThreadRegistrationResult = unregisterThread();
+ }
+ return procResult;
+}
+
+// This is the entry point for the new thread created by createThread().
+// It converts the 'C' function call to a C++ method call.
+static void* AudioStream_internalThreadProc(void* threadArg) {
+ AudioStream *audioStream = (AudioStream *) threadArg;
+ return audioStream->wrapUserThread();
+}
+
oboe_result_t AudioStream::createThread(oboe_nanoseconds_t periodNanoseconds,
- void *(*startRoutine)(void *), void *arg)
+ oboe_audio_thread_proc_t *threadProc,
+ void* threadArg)
{
if (mHasThread) {
return OBOE_ERROR_INVALID_STATE;
}
- if (startRoutine == NULL) {
+ if (threadProc == nullptr) {
return OBOE_ERROR_NULL;
}
- int err = pthread_create(&mThread, NULL, startRoutine, arg);
+ // Pass input parameters to the background thread.
+ mThreadProc = threadProc;
+ mThreadArg = threadArg;
+ setPeriodNanoseconds(periodNanoseconds);
+ int err = pthread_create(&mThread, nullptr, AudioStream_internalThreadProc, this);
if (err != 0) {
+ // TODO convert errno to oboe_result_t
return OBOE_ERROR_INTERNAL;
} else {
mHasThread = true;
@@ -115,7 +144,7 @@
}
}
-oboe_result_t AudioStream::joinThread(void **returnArg, oboe_nanoseconds_t timeoutNanoseconds)
+oboe_result_t AudioStream::joinThread(void** returnArg, oboe_nanoseconds_t timeoutNanoseconds)
{
if (!mHasThread) {
return OBOE_ERROR_INVALID_STATE;
@@ -128,7 +157,7 @@
int err = pthread_join(mThread, returnArg);
#endif
mHasThread = false;
- // TODO Just leaked a thread?
- return err ? OBOE_ERROR_INTERNAL : OBOE_OK;
+ // TODO convert errno to oboe_result_t
+ return err ? OBOE_ERROR_INTERNAL : mThreadRegistrationResult;
}
diff --git a/media/liboboe/src/core/AudioStream.h b/media/liboboe/src/core/AudioStream.h
index 8cbb091..c13ae9f 100644
--- a/media/liboboe/src/core/AudioStream.h
+++ b/media/liboboe/src/core/AudioStream.h
@@ -17,9 +17,11 @@
#ifndef OBOE_AUDIOSTREAM_H
#define OBOE_AUDIOSTREAM_H
-#include <unistd.h>
-#include <sys/types.h>
+#include <atomic>
+#include <stdint.h>
+#include <oboe/OboeDefinitions.h>
#include <oboe/OboeAudio.h>
+
#include "OboeUtilities.h"
#include "MonotonicCounter.h"
@@ -83,10 +85,25 @@
}
virtual oboe_result_t createThread(oboe_nanoseconds_t periodNanoseconds,
- void *(*start_routine)(void *), void *arg);
+ oboe_audio_thread_proc_t *threadProc,
+ void *threadArg);
virtual oboe_result_t joinThread(void **returnArg, oboe_nanoseconds_t timeoutNanoseconds);
+ virtual oboe_result_t registerThread() {
+ return OBOE_OK;
+ }
+
+ virtual oboe_result_t unregisterThread() {
+ return OBOE_OK;
+ }
+
+ /**
+ * Internal function used to call the audio thread passed by the user.
+ * It is unfortunately public because it needs to be called by a static 'C' function.
+ */
+ void* wrapUserThread();
+
// ============== Queries ===========================
virtual oboe_stream_state_t getState() const {
@@ -125,7 +142,7 @@
return mSamplesPerFrame;
}
- OboeDeviceId getDeviceId() const {
+ oboe_device_id_t getDeviceId() const {
return mDeviceId;
}
@@ -220,21 +237,42 @@
mState = state;
}
+
+
+protected:
MonotonicCounter mFramesWritten;
MonotonicCounter mFramesRead;
+ void setPeriodNanoseconds(oboe_nanoseconds_t periodNanoseconds) {
+ mPeriodNanoseconds.store(periodNanoseconds, std::memory_order_release);
+ }
+
+ oboe_nanoseconds_t getPeriodNanoseconds() {
+ return mPeriodNanoseconds.load(std::memory_order_acquire);
+ }
+
private:
// These do not change after open().
int32_t mSamplesPerFrame = OBOE_UNSPECIFIED;
oboe_sample_rate_t mSampleRate = OBOE_UNSPECIFIED;
oboe_stream_state_t mState = OBOE_STREAM_STATE_UNINITIALIZED;
- OboeDeviceId mDeviceId = OBOE_UNSPECIFIED;
+ oboe_device_id_t mDeviceId = OBOE_UNSPECIFIED;
oboe_sharing_mode_t mSharingMode = OBOE_SHARING_MODE_LEGACY;
- oboe_audio_format_t mFormat = OBOE_UNSPECIFIED;
+ oboe_audio_format_t mFormat = OBOE_AUDIO_FORMAT_UNSPECIFIED;
oboe_direction_t mDirection = OBOE_DIRECTION_OUTPUT;
+ // background thread ----------------------------------
bool mHasThread = false;
- pthread_t mThread;
+ pthread_t mThread; // initialized in constructor
+
+ // These are set by the application thread and then read by the audio pthread.
+ std::atomic<oboe_nanoseconds_t> mPeriodNanoseconds; // for tuning SCHED_FIFO threads
+ // TODO make atomic?
+ oboe_audio_thread_proc_t* mThreadProc = nullptr;
+ void* mThreadArg = nullptr;
+ oboe_result_t mThreadRegistrationResult = OBOE_OK;
+
+
};
} /* namespace oboe */
diff --git a/media/liboboe/src/core/AudioStreamBuilder.cpp b/media/liboboe/src/core/AudioStreamBuilder.cpp
index 56e6706..37e1378 100644
--- a/media/liboboe/src/core/AudioStreamBuilder.cpp
+++ b/media/liboboe/src/core/AudioStreamBuilder.cpp
@@ -18,11 +18,17 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
-#include <sys/types.h>
-#include "AudioStream.h"
-#include "AudioStreamBuilder.h"
-#include "AudioStreamRecord.h"
-#include "AudioStreamTrack.h"
+#include <new>
+#include <stdint.h>
+
+#include <oboe/OboeDefinitions.h>
+#include <oboe/OboeAudio.h>
+
+#include "client/AudioStreamInternal.h"
+#include "core/AudioStream.h"
+#include "core/AudioStreamBuilder.h"
+#include "legacy/AudioStreamRecord.h"
+#include "legacy/AudioStreamTrack.h"
using namespace oboe;
@@ -35,15 +41,15 @@
AudioStreamBuilder::~AudioStreamBuilder() {
}
-oboe_result_t AudioStreamBuilder::build(AudioStream **streamPtr) {
+oboe_result_t AudioStreamBuilder::build(AudioStream** streamPtr) {
// TODO Is there a better place to put the code that decides which class to use?
- AudioStream *audioStream = nullptr;
+ AudioStream* audioStream = nullptr;
const oboe_sharing_mode_t sharingMode = getSharingMode();
switch (getDirection()) {
case OBOE_DIRECTION_INPUT:
switch (sharingMode) {
case OBOE_SHARING_MODE_LEGACY:
- audioStream = new AudioStreamRecord();
+ audioStream = new(std::nothrow) AudioStreamRecord();
break;
default:
ALOGE("AudioStreamBuilder(): bad sharing mode = %d", sharingMode);
@@ -54,7 +60,10 @@
case OBOE_DIRECTION_OUTPUT:
switch (sharingMode) {
case OBOE_SHARING_MODE_LEGACY:
- audioStream = new AudioStreamTrack();
+ audioStream = new(std::nothrow) AudioStreamTrack();
+ break;
+ case OBOE_SHARING_MODE_EXCLUSIVE:
+ audioStream = new(std::nothrow) AudioStreamInternal();
break;
default:
ALOGE("AudioStreamBuilder(): bad sharing mode = %d", sharingMode);
diff --git a/media/liboboe/src/core/AudioStreamBuilder.h b/media/liboboe/src/core/AudioStreamBuilder.h
index 3f98ebb..ec17eb6 100644
--- a/media/liboboe/src/core/AudioStreamBuilder.h
+++ b/media/liboboe/src/core/AudioStreamBuilder.h
@@ -17,7 +17,11 @@
#ifndef OBOE_AUDIOSTREAMBUILDER_H
#define OBOE_AUDIOSTREAMBUILDER_H
+#include <stdint.h>
+
+#include <oboe/OboeDefinitions.h>
#include <oboe/OboeAudio.h>
+
#include "AudioStream.h"
namespace oboe {
@@ -38,7 +42,7 @@
/**
* This is also known as channelCount.
*/
- AudioStreamBuilder *setSamplesPerFrame(int samplesPerFrame) {
+ AudioStreamBuilder* setSamplesPerFrame(int samplesPerFrame) {
mSamplesPerFrame = samplesPerFrame;
return this;
}
@@ -47,7 +51,7 @@
return mDirection;
}
- AudioStreamBuilder *setDirection(oboe_direction_t direction) {
+ AudioStreamBuilder* setDirection(oboe_direction_t direction) {
mDirection = direction;
return this;
}
@@ -56,7 +60,7 @@
return mSampleRate;
}
- AudioStreamBuilder *setSampleRate(oboe_sample_rate_t sampleRate) {
+ AudioStreamBuilder* setSampleRate(oboe_sample_rate_t sampleRate) {
mSampleRate = sampleRate;
return this;
}
@@ -74,16 +78,16 @@
return mSharingMode;
}
- AudioStreamBuilder *setSharingMode(oboe_sharing_mode_t sharingMode) {
+ AudioStreamBuilder* setSharingMode(oboe_sharing_mode_t sharingMode) {
mSharingMode = sharingMode;
return this;
}
- OboeDeviceId getDeviceId() const {
+ oboe_device_id_t getDeviceId() const {
return mDeviceId;
}
- AudioStreamBuilder *setDeviceId(OboeDeviceId deviceId) {
+ AudioStreamBuilder* setDeviceId(oboe_device_id_t deviceId) {
mDeviceId = deviceId;
return this;
}
@@ -93,9 +97,9 @@
private:
int32_t mSamplesPerFrame = OBOE_UNSPECIFIED;
oboe_sample_rate_t mSampleRate = OBOE_UNSPECIFIED;
- OboeDeviceId mDeviceId = OBOE_UNSPECIFIED; // TODO need better default
+ oboe_device_id_t mDeviceId = OBOE_DEVICE_UNSPECIFIED;
oboe_sharing_mode_t mSharingMode = OBOE_SHARING_MODE_LEGACY;
- oboe_audio_format_t mFormat = OBOE_UNSPECIFIED;
+ oboe_audio_format_t mFormat = OBOE_AUDIO_FORMAT_UNSPECIFIED;
oboe_direction_t mDirection = OBOE_DIRECTION_OUTPUT;
};
diff --git a/media/liboboe/src/core/OboeAudio.cpp b/media/liboboe/src/core/OboeAudio.cpp
index a02f226..d98ca36 100644
--- a/media/liboboe/src/core/OboeAudio.cpp
+++ b/media/liboboe/src/core/OboeAudio.cpp
@@ -23,21 +23,13 @@
#include <oboe/OboeDefinitions.h>
#include <oboe/OboeAudio.h>
+
#include "AudioStreamBuilder.h"
#include "AudioStream.h"
#include "AudioClock.h"
+#include "client/AudioStreamInternal.h"
#include "HandleTracker.h"
-// temporary, as I stage in the MMAP/NOIRQ support, do not review
-#ifndef OBOE_SUPPORT_MMAP
-#define OBOE_SUPPORT_MMAP 0
-#endif
-
-#if OBOE_SUPPORT_MMAP
-#include "AudioStreamInternal.h"
-#include "OboeServiceGateway.h"
-#endif
-
using namespace oboe;
// This is not the maximum theoretic possible number of handles that the HandlerTracker
@@ -71,6 +63,8 @@
return OBOE_ERROR_NULL; \
}
+// Static data.
+// TODO static constructors are discouraged, alternatives?
static HandleTracker sHandleTracker(OBOE_MAX_HANDLES);
typedef enum
@@ -81,9 +75,6 @@
} oboe_handle_type_t;
static_assert(OBOE_HANDLE_TYPE_COUNT <= HANDLE_TRACKER_MAX_TYPES, "Too many handle types.");
-#if OBOE_SUPPORT_MMAP
-static OboeServiceGateway sOboeServiceGateway;
-#endif
#define OBOE_CASE_ENUM(name) case name: return #name
@@ -165,13 +156,21 @@
}
OBOE_API oboe_result_t OboeStreamBuilder_setDeviceId(OboeStreamBuilder builder,
- OboeDeviceId deviceId)
+ oboe_device_id_t deviceId)
{
AudioStreamBuilder *streamBuilder = CONVERT_BUILDER_HANDLE_OR_RETURN();
streamBuilder->setDeviceId(deviceId);
return OBOE_OK;
}
+OBOE_API oboe_result_t OboeStreamBuilder_getDeviceId(OboeStreamBuilder builder,
+ oboe_device_id_t *deviceId)
+{
+ AudioStreamBuilder *streamBuilder = COMMON_GET_FROM_BUILDER_OR_RETURN(deviceId);
+ *deviceId = streamBuilder->getDeviceId();
+ return OBOE_OK;
+}
+
OBOE_API oboe_result_t OboeStreamBuilder_setSampleRate(OboeStreamBuilder builder,
oboe_sample_rate_t sampleRate)
{
@@ -399,10 +398,10 @@
OBOE_API oboe_result_t OboeStream_createThread(OboeStream stream,
oboe_nanoseconds_t periodNanoseconds,
- void *(*startRoutine)(void *), void *arg)
+ oboe_audio_thread_proc_t *threadProc, void *arg)
{
AudioStream *audioStream = CONVERT_STREAM_HANDLE_OR_RETURN();
- return audioStream->createThread(periodNanoseconds, startRoutine, arg);
+ return audioStream->createThread(periodNanoseconds, threadProc, arg);
}
OBOE_API oboe_result_t OboeStream_joinThread(OboeStream stream,
@@ -513,6 +512,14 @@
return OBOE_OK;
}
+OBOE_API oboe_result_t OboeStream_getDeviceId(OboeStream stream,
+ oboe_device_id_t *deviceId)
+{
+ AudioStream *audioStream = COMMON_GET_FROM_STREAM_OR_RETURN(deviceId);
+ *deviceId = audioStream->getDeviceId();
+ return OBOE_OK;
+}
+
OBOE_API oboe_result_t OboeStream_getSharingMode(OboeStream stream,
oboe_sharing_mode_t *sharingMode)
{
diff --git a/media/liboboe/src/fifo/FifoBuffer.cpp b/media/liboboe/src/fifo/FifoBuffer.cpp
new file mode 100644
index 0000000..c5489f1
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoBuffer.cpp
@@ -0,0 +1,186 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cstring>
+#include <unistd.h>
+
+#define LOG_TAG "FifoBuffer"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "FifoControllerBase.h"
+#include "FifoController.h"
+#include "FifoControllerIndirect.h"
+#include "FifoBuffer.h"
+
+FifoBuffer::FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames)
+ : mFrameCapacity(capacityInFrames)
+ , mBytesPerFrame(bytesPerFrame)
+ , mStorage(nullptr)
+ , mFramesReadCount(0)
+ , mFramesUnderrunCount(0)
+ , mUnderrunCount(0)
+{
+ // TODO Handle possible failures to allocate. Move out of constructor?
+ mFifo = new FifoController(capacityInFrames, capacityInFrames);
+ // allocate buffer
+ int32_t bytesPerBuffer = bytesPerFrame * capacityInFrames;
+ mStorage = new uint8_t[bytesPerBuffer];
+ mStorageOwned = true;
+ ALOGD("FifoBuffer: capacityInFrames = %d, bytesPerFrame = %d",
+ capacityInFrames, bytesPerFrame);
+}
+
+FifoBuffer::FifoBuffer( int32_t bytesPerFrame,
+ fifo_frames_t capacityInFrames,
+ fifo_counter_t * readIndexAddress,
+ fifo_counter_t * writeIndexAddress,
+ void * dataStorageAddress
+ )
+ : mFrameCapacity(capacityInFrames)
+ , mBytesPerFrame(bytesPerFrame)
+ , mStorage(static_cast<uint8_t *>(dataStorageAddress))
+ , mFramesReadCount(0)
+ , mFramesUnderrunCount(0)
+ , mUnderrunCount(0)
+{
+ // TODO Handle possible failures to allocate. Move out of constructor?
+ mFifo = new FifoControllerIndirect(capacityInFrames,
+ capacityInFrames,
+ readIndexAddress,
+ writeIndexAddress);
+ mStorageOwned = false;
+ ALOGD("FifoProcessor: capacityInFrames = %d, bytesPerFrame = %d",
+ capacityInFrames, bytesPerFrame);
+}
+
+FifoBuffer::~FifoBuffer() {
+ if (mStorageOwned) {
+ delete[] mStorage;
+ }
+ delete mFifo;
+}
+
+
+int32_t FifoBuffer::convertFramesToBytes(fifo_frames_t frames) {
+ return frames * mBytesPerFrame;
+}
+
+fifo_frames_t FifoBuffer::read(void *buffer, fifo_frames_t numFrames) {
+ size_t numBytes;
+ fifo_frames_t framesAvailable = mFifo->getFullFramesAvailable();
+ fifo_frames_t framesToRead = numFrames;
+ // Is there enough data in the FIFO
+ if (framesToRead > framesAvailable) {
+ framesToRead = framesAvailable;
+ }
+ if (framesToRead == 0) {
+ return 0;
+ }
+
+ fifo_frames_t readIndex = mFifo->getReadIndex();
+ uint8_t *destination = (uint8_t *) buffer;
+ uint8_t *source = &mStorage[convertFramesToBytes(readIndex)];
+ if ((readIndex + framesToRead) > mFrameCapacity) {
+ // read in two parts, first part here
+ fifo_frames_t frames1 = mFrameCapacity - readIndex;
+ int32_t numBytes = convertFramesToBytes(frames1);
+ memcpy(destination, source, numBytes);
+ destination += numBytes;
+ // read second part
+ source = &mStorage[0];
+ fifo_frames_t frames2 = framesToRead - frames1;
+ numBytes = convertFramesToBytes(frames2);
+ memcpy(destination, source, numBytes);
+ } else {
+ // just read in one shot
+ numBytes = convertFramesToBytes(framesToRead);
+ memcpy(destination, source, numBytes);
+ }
+ mFifo->advanceReadIndex(framesToRead);
+
+ return framesToRead;
+}
+
+fifo_frames_t FifoBuffer::write(const void *buffer, fifo_frames_t framesToWrite) {
+ fifo_frames_t framesAvailable = mFifo->getEmptyFramesAvailable();
+// ALOGD("FifoBuffer::write() framesToWrite = %d, framesAvailable = %d",
+// framesToWrite, framesAvailable);
+ if (framesToWrite > framesAvailable) {
+ framesToWrite = framesAvailable;
+ }
+ if (framesToWrite <= 0) {
+ return 0;
+ }
+
+ size_t numBytes;
+ fifo_frames_t writeIndex = mFifo->getWriteIndex();
+ int byteIndex = convertFramesToBytes(writeIndex);
+ const uint8_t *source = (const uint8_t *) buffer;
+ uint8_t *destination = &mStorage[byteIndex];
+ if ((writeIndex + framesToWrite) > mFrameCapacity) {
+ // write in two parts, first part here
+ fifo_frames_t frames1 = mFrameCapacity - writeIndex;
+ numBytes = convertFramesToBytes(frames1);
+ memcpy(destination, source, numBytes);
+// ALOGD("FifoBuffer::write(%p to %p, numBytes = %d", source, destination, numBytes);
+ // read second part
+ source += convertFramesToBytes(frames1);
+ destination = &mStorage[0];
+ fifo_frames_t framesLeft = framesToWrite - frames1;
+ numBytes = convertFramesToBytes(framesLeft);
+// ALOGD("FifoBuffer::write(%p to %p, numBytes = %d", source, destination, numBytes);
+ memcpy(destination, source, numBytes);
+ } else {
+ // just write in one shot
+ numBytes = convertFramesToBytes(framesToWrite);
+// ALOGD("FifoBuffer::write(%p to %p, numBytes = %d", source, destination, numBytes);
+ memcpy(destination, source, numBytes);
+ }
+ mFifo->advanceWriteIndex(framesToWrite);
+
+ return framesToWrite;
+}
+
+fifo_frames_t FifoBuffer::readNow(void *buffer, fifo_frames_t numFrames) {
+ mLastReadSize = numFrames;
+ fifo_frames_t framesLeft = numFrames;
+ fifo_frames_t framesRead = read(buffer, numFrames);
+ framesLeft -= framesRead;
+ mFramesReadCount += framesRead;
+ mFramesUnderrunCount += framesLeft;
+ // Zero out any samples we could not set.
+ if (framesLeft > 0) {
+ mUnderrunCount++;
+ int32_t bytesToZero = convertFramesToBytes(framesLeft);
+ memset(buffer, 0, bytesToZero);
+ }
+
+ return framesRead;
+}
+
+fifo_frames_t FifoBuffer::getThreshold() {
+ return mFifo->getThreshold();
+}
+
+void FifoBuffer::setThreshold(fifo_frames_t threshold) {
+ mFifo->setThreshold(threshold);
+}
+
+fifo_frames_t FifoBuffer::getBufferCapacityInFrames() {
+ return mFifo->getCapacity();
+}
+
diff --git a/media/liboboe/src/fifo/FifoBuffer.h b/media/liboboe/src/fifo/FifoBuffer.h
new file mode 100644
index 0000000..faa9ae2
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoBuffer.h
@@ -0,0 +1,87 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FIFO_FIFO_BUFFER_H
+#define FIFO_FIFO_BUFFER_H
+
+#include <stdint.h>
+
+#include "FifoControllerBase.h"
+
+class FifoBuffer {
+public:
+ FifoBuffer(int32_t bytesPerFrame, fifo_frames_t capacityInFrames);
+
+ FifoBuffer(int32_t bytesPerFrame,
+ fifo_frames_t capacityInFrames,
+ fifo_counter_t * readCounterAddress,
+ fifo_counter_t * writeCounterAddress,
+ void * dataStorageAddress);
+
+ ~FifoBuffer();
+
+ int32_t convertFramesToBytes(fifo_frames_t frames);
+
+ fifo_frames_t read(void *destination, fifo_frames_t framesToRead);
+
+ fifo_frames_t write(const void *source, fifo_frames_t framesToWrite);
+
+ fifo_frames_t getThreshold();
+ void setThreshold(fifo_frames_t threshold);
+
+ fifo_frames_t getBufferCapacityInFrames();
+
+ fifo_frames_t readNow(void *buffer, fifo_frames_t numFrames);
+
+ int64_t getNextReadTime(int32_t frameRate);
+
+ int32_t getUnderrunCount() const { return mUnderrunCount; }
+
+ FifoControllerBase *getFifoControllerBase() { return mFifo; }
+
+ int32_t getBytesPerFrame() {
+ return mBytesPerFrame;
+ }
+
+ fifo_counter_t getReadCounter() {
+ return mFifo->getReadCounter();
+ }
+
+ void setReadCounter(fifo_counter_t n) {
+ mFifo->setReadCounter(n);
+ }
+
+ fifo_counter_t getWriteCounter() {
+ return mFifo->getWriteCounter();
+ }
+
+ void setWriteCounter(fifo_counter_t n) {
+ mFifo->setWriteCounter(n);
+ }
+
+private:
+ const fifo_frames_t mFrameCapacity;
+ const int32_t mBytesPerFrame;
+ uint8_t * mStorage;
+ bool mStorageOwned; // did this object allocate the storage?
+ FifoControllerBase *mFifo;
+ fifo_counter_t mFramesReadCount;
+ fifo_counter_t mFramesUnderrunCount;
+ int32_t mUnderrunCount; // need? just use frames
+ int32_t mLastReadSize;
+};
+
+#endif //FIFO_FIFO_BUFFER_H
diff --git a/media/liboboe/src/fifo/FifoController.h b/media/liboboe/src/fifo/FifoController.h
new file mode 100644
index 0000000..7434634
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoController.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FIFO_FIFO_CONTROLLER_H
+#define FIFO_FIFO_CONTROLLER_H
+
+#include <stdint.h>
+#include <atomic>
+
+#include "FifoControllerBase.h"
+
+/**
+ * A FIFO with counters contained in the class.
+ */
+class FifoController : public FifoControllerBase
+{
+public:
+ FifoController(fifo_counter_t bufferSize, fifo_counter_t threshold)
+ : FifoControllerBase(bufferSize, threshold)
+ , mReadCounter(0)
+ , mWriteCounter(0)
+ {}
+
+ virtual ~FifoController() {}
+
+ // TODO review use of memory barriers, probably incorrect
+ virtual fifo_counter_t getReadCounter() override {
+ return mReadCounter.load(std::memory_order_acquire);
+ }
+ virtual void setReadCounter(fifo_counter_t n) override {
+ mReadCounter.store(n, std::memory_order_release);
+ }
+ virtual fifo_counter_t getWriteCounter() override {
+ return mWriteCounter.load(std::memory_order_acquire);
+ }
+ virtual void setWriteCounter(fifo_counter_t n) override {
+ mWriteCounter.store(n, std::memory_order_release);
+ }
+
+private:
+ std::atomic<fifo_counter_t> mReadCounter;
+ std::atomic<fifo_counter_t> mWriteCounter;
+};
+
+
+#endif //FIFO_FIFO_CONTROLLER_H
diff --git a/media/liboboe/src/fifo/FifoControllerBase.cpp b/media/liboboe/src/fifo/FifoControllerBase.cpp
new file mode 100644
index 0000000..33a253e
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoControllerBase.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "FifoControllerBase"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include "FifoControllerBase.h"
+
+FifoControllerBase::FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold)
+ : mCapacity(capacity)
+ , mThreshold(threshold)
+{
+}
+
+FifoControllerBase::~FifoControllerBase() {
+}
+
+fifo_frames_t FifoControllerBase::getFullFramesAvailable() {
+ return (fifo_frames_t) (getWriteCounter() - getReadCounter());
+}
+
+fifo_frames_t FifoControllerBase::getReadIndex() {
+ // % works with non-power of two sizes
+ return (fifo_frames_t) (getReadCounter() % mCapacity);
+}
+
+void FifoControllerBase::advanceReadIndex(fifo_frames_t numFrames) {
+ setReadCounter(getReadCounter() + numFrames);
+}
+
+fifo_frames_t FifoControllerBase::getEmptyFramesAvailable() {
+ return (int32_t)(mThreshold - getFullFramesAvailable());
+}
+
+fifo_frames_t FifoControllerBase::getWriteIndex() {
+ // % works with non-power of two sizes
+ return (fifo_frames_t) (getWriteCounter() % mCapacity);
+}
+
+void FifoControllerBase::advanceWriteIndex(fifo_frames_t numFrames) {
+ setWriteCounter(getWriteCounter() + numFrames);
+}
+
+void FifoControllerBase::setThreshold(fifo_frames_t threshold) {
+ mThreshold = threshold;
+}
diff --git a/media/liboboe/src/fifo/FifoControllerBase.h b/media/liboboe/src/fifo/FifoControllerBase.h
new file mode 100644
index 0000000..c543519
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoControllerBase.h
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FIFO_FIFO_CONTROLLER_BASE_H
+#define FIFO_FIFO_CONTROLLER_BASE_H
+
+#include <stdint.h>
+
+typedef int64_t fifo_counter_t;
+typedef int32_t fifo_frames_t;
+
+/**
+ * Manage the read/write indices of a circular buffer.
+ *
+ * The caller is responsible for reading and writing the actual data.
+ * Note that the span of available frames may not be contiguous. They
+ * may wrap around from the end to the beginning of the buffer. In that
+ * case the data must be read or written in at least two blocks of frames.
+ *
+ */
+class FifoControllerBase {
+
+public:
+ /**
+ * Constructor for FifoControllerBase
+ * @param capacity Total size of the circular buffer in frames.
+ * @param threshold Number of frames to fill. Must be less than capacity.
+ */
+ FifoControllerBase(fifo_frames_t capacity, fifo_frames_t threshold);
+
+ virtual ~FifoControllerBase();
+
+ // Abstract methods to be implemented in subclasses.
+ /**
+ * @return Counter used by the reader of the FIFO.
+ */
+ virtual fifo_counter_t getReadCounter() = 0;
+
+ /**
+ * This is normally only used internally.
+ * @param count Number of frames that have been read.
+ */
+ virtual void setReadCounter(fifo_counter_t count) = 0;
+
+ /**
+ * @return Counter used by the reader of the FIFO.
+ */
+ virtual fifo_counter_t getWriteCounter() = 0;
+
+ /**
+ * This is normally only used internally.
+ * @param count Number of frames that have been read.
+ */
+ virtual void setWriteCounter(fifo_counter_t count) = 0;
+
+ /**
+ * This may be negative if an unthrottled reader has read beyond the available data.
+ * @return number of valid frames available to read. Never read more than this.
+ */
+ fifo_frames_t getFullFramesAvailable();
+
+ /**
+ * The index in a circular buffer of the next frame to read.
+ */
+ fifo_frames_t getReadIndex();
+
+ /**
+ * @param numFrames number of frames to advance the read index
+ */
+ void advanceReadIndex(fifo_frames_t numFrames);
+
+ /**
+ * @return number of frames that can be written. Never write more than this.
+ */
+ fifo_frames_t getEmptyFramesAvailable();
+
+ /**
+ * The index in a circular buffer of the next frame to write.
+ */
+ fifo_frames_t getWriteIndex();
+
+ /**
+ * @param numFrames number of frames to advance the write index
+ */
+ void advanceWriteIndex(fifo_frames_t numFrames);
+
+ /**
+ * You can request that the buffer not be filled above a maximum
+ * number of frames.
+ * @param threshold effective size of the buffer
+ */
+ void setThreshold(fifo_frames_t threshold);
+
+ fifo_frames_t getThreshold() const {
+ return mThreshold;
+ }
+
+ fifo_frames_t getCapacity() const {
+ return mCapacity;
+ }
+
+
+private:
+ fifo_frames_t mCapacity;
+ fifo_frames_t mThreshold;
+};
+
+#endif // FIFO_FIFO_CONTROLLER_BASE_H
diff --git a/media/liboboe/src/fifo/FifoControllerIndirect.h b/media/liboboe/src/fifo/FifoControllerIndirect.h
new file mode 100644
index 0000000..1aaf9ea
--- /dev/null
+++ b/media/liboboe/src/fifo/FifoControllerIndirect.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FIFO_FIFO_CONTROLLER_INDIRECT_H
+#define FIFO_FIFO_CONTROLLER_INDIRECT_H
+
+#include <stdint.h>
+#include <atomic>
+
+#include "FifoControllerBase.h"
+
+/**
+ * A FifoControllerBase with counters external to the class.
+ *
+ * The actual copunters may be stored in separate regions of shared memory
+ * with different access rights.
+ */
+class FifoControllerIndirect : public FifoControllerBase {
+
+public:
+ FifoControllerIndirect(fifo_frames_t capacity,
+ fifo_frames_t threshold,
+ fifo_counter_t * readCounterAddress,
+ fifo_counter_t * writeCounterAddress)
+ : FifoControllerBase(capacity, threshold)
+ , mReadCounterAddress((std::atomic<fifo_counter_t> *) readCounterAddress)
+ , mWriteCounterAddress((std::atomic<fifo_counter_t> *) writeCounterAddress)
+ {
+ setReadCounter(0);
+ setWriteCounter(0);
+ }
+ virtual ~FifoControllerIndirect() {};
+
+ // TODO review use of memory barriers, probably incorrect
+ virtual fifo_counter_t getReadCounter() override {
+ return mReadCounterAddress->load(std::memory_order_acquire);
+ }
+
+ virtual void setReadCounter(fifo_counter_t count) override {
+ mReadCounterAddress->store(count, std::memory_order_release);
+ }
+
+ virtual fifo_counter_t getWriteCounter() override {
+ return mWriteCounterAddress->load(std::memory_order_acquire);
+ }
+
+ virtual void setWriteCounter(fifo_counter_t count) override {
+ mWriteCounterAddress->store(count, std::memory_order_release);
+ }
+
+private:
+ std::atomic<fifo_counter_t> * mReadCounterAddress;
+ std::atomic<fifo_counter_t> * mWriteCounterAddress;
+};
+
+#endif //FIFO_FIFO_CONTROLLER_INDIRECT_H
diff --git a/media/liboboe/src/fifo/README.md b/media/liboboe/src/fifo/README.md
new file mode 100644
index 0000000..61ffbae
--- /dev/null
+++ b/media/liboboe/src/fifo/README.md
@@ -0,0 +1,9 @@
+Simple atomic FIFO for passing data between threads or processes.
+This does not require mutexes.
+
+One thread modifies the readCounter and the other thread modifies the writeCounter.
+
+TODO The internal low-level implementation might be merged in some form with audio_utils fifo
+and/or FMQ [after confirming that requirements are met].
+The higher-levels parts related to Oboe use of the FIFO such as API, fds, relative
+location of indices and data buffer, mapping, allocation of memmory will probably be kept as-is.
diff --git a/media/liboboe/src/legacy/AudioStreamRecord.cpp b/media/liboboe/src/legacy/AudioStreamRecord.cpp
index f130cad..5854974 100644
--- a/media/liboboe/src/legacy/AudioStreamRecord.cpp
+++ b/media/liboboe/src/legacy/AudioStreamRecord.cpp
@@ -57,7 +57,7 @@
? 2 : getSamplesPerFrame();
audio_channel_mask_t channelMask = audio_channel_in_mask_from_count(samplesPerFrame);
- AudioRecord::callback_t callback = NULL;
+ AudioRecord::callback_t callback = nullptr;
audio_input_flags_t flags = (audio_input_flags_t) AUDIO_INPUT_FLAG_NONE;
// TODO implement an unspecified Android format then use that.
@@ -75,14 +75,14 @@
0, // size_t frameCount = 0,
callback,
- NULL, // void* user = NULL,
+ nullptr, // void* user = nullptr,
0, // uint32_t notificationFrames = 0,
AUDIO_SESSION_ALLOCATE,
AudioRecord::TRANSFER_DEFAULT,
flags
// int uid = -1,
// pid_t pid = -1,
- // const audio_attributes_t* pAttributes = NULL
+ // const audio_attributes_t* pAttributes = nullptr
);
// Did we get a valid track?
@@ -115,7 +115,7 @@
oboe_result_t AudioStreamRecord::requestStart()
{
- if (mAudioRecord.get() == NULL) {
+ if (mAudioRecord.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
}
// Get current position so we can detect when the track is playing.
@@ -142,7 +142,7 @@
}
oboe_result_t AudioStreamRecord::requestStop() {
- if (mAudioRecord.get() == NULL) {
+ if (mAudioRecord.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
}
setState(OBOE_STREAM_STATE_STOPPING);
diff --git a/media/liboboe/src/legacy/AudioStreamTrack.cpp b/media/liboboe/src/legacy/AudioStreamTrack.cpp
index 5205fc5..b2c4ee1 100644
--- a/media/liboboe/src/legacy/AudioStreamTrack.cpp
+++ b/media/liboboe/src/legacy/AudioStreamTrack.cpp
@@ -58,10 +58,10 @@
int32_t samplesPerFrame = (getSamplesPerFrame() == OBOE_UNSPECIFIED)
? 2 : getSamplesPerFrame();
audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(samplesPerFrame);
- ALOGE("AudioStreamTrack::open(), samplesPerFrame = %d, channelMask = 0x%08x",
+ ALOGD("AudioStreamTrack::open(), samplesPerFrame = %d, channelMask = 0x%08x",
samplesPerFrame, channelMask);
- AudioTrack::callback_t callback = NULL;
+ AudioTrack::callback_t callback = nullptr;
// TODO add more performance options
audio_output_flags_t flags = (audio_output_flags_t) AUDIO_OUTPUT_FLAG_FAST;
size_t frameCount = 0;
@@ -78,14 +78,15 @@
frameCount,
flags,
callback,
- NULL, // user callback data
- 0, // notificationFrames
+ nullptr, // user callback data
+ 0, // notificationFrames
AUDIO_SESSION_ALLOCATE,
AudioTrack::transfer_type::TRANSFER_SYNC // TODO - this does not allow FAST
);
// Did we get a valid track?
status_t status = mAudioTrack->initCheck();
+ ALOGD("AudioStreamTrack::open(), initCheck() returned %d", status);
// FIXME - this should work - if (status != NO_ERROR) {
// But initCheck() is returning 1 !
if (status < 0) {
@@ -116,7 +117,7 @@
oboe_result_t AudioStreamTrack::requestStart()
{
- if (mAudioTrack.get() == NULL) {
+ if (mAudioTrack.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
}
// Get current position so we can detect when the track is playing.
@@ -135,7 +136,7 @@
oboe_result_t AudioStreamTrack::requestPause()
{
- if (mAudioTrack.get() == NULL) {
+ if (mAudioTrack.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
} else if (getState() != OBOE_STREAM_STATE_STARTING
&& getState() != OBOE_STREAM_STATE_STARTED) {
@@ -152,7 +153,7 @@
}
oboe_result_t AudioStreamTrack::requestFlush() {
- if (mAudioTrack.get() == NULL) {
+ if (mAudioTrack.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
} else if (getState() != OBOE_STREAM_STATE_PAUSED) {
return OBOE_ERROR_INVALID_STATE;
@@ -165,7 +166,7 @@
}
oboe_result_t AudioStreamTrack::requestStop() {
- if (mAudioTrack.get() == NULL) {
+ if (mAudioTrack.get() == nullptr) {
return OBOE_ERROR_INVALID_STATE;
}
setState(OBOE_STREAM_STATE_STOPPING);
diff --git a/media/liboboe/src/utility/AudioClock.h b/media/liboboe/src/utility/AudioClock.h
index da2f74a..1a5c209 100644
--- a/media/liboboe/src/utility/AudioClock.h
+++ b/media/liboboe/src/utility/AudioClock.h
@@ -17,10 +17,10 @@
#ifndef UTILITY_AUDIOCLOCK_H
#define UTILITY_AUDIOCLOCK_H
-#include <sys/types.h>
+#include <stdint.h>
#include <time.h>
-#include "oboe/OboeDefinitions.h"
-#include "oboe/OboeAudio.h"
+
+#include <oboe/OboeDefinitions.h>
class AudioClock {
public:
diff --git a/media/liboboe/src/utility/HandleTracker.cpp b/media/liboboe/src/utility/HandleTracker.cpp
index be2a64c..bf5fb63 100644
--- a/media/liboboe/src/utility/HandleTracker.cpp
+++ b/media/liboboe/src/utility/HandleTracker.cpp
@@ -19,6 +19,7 @@
//#define LOG_NDEBUG 0
#include <utils/Log.h>
+#include <new>
#include <stdint.h>
#include <assert.h>
@@ -51,25 +52,28 @@
HandleTracker::HandleTracker(uint32_t maxHandles)
: mMaxHandleCount(maxHandles)
- , mHandleAddresses(nullptr)
, mHandleHeaders(nullptr)
{
assert(maxHandles <= HANDLE_TRACKER_MAX_HANDLES);
// Allocate arrays to hold addresses and validation info.
- mHandleAddresses = (handle_tracker_address_t *) new handle_tracker_address_t[maxHandles];
+ mHandleAddresses = (handle_tracker_address_t *)
+ new(std::nothrow) handle_tracker_address_t[maxHandles];
if (mHandleAddresses != nullptr) {
- mHandleHeaders = new handle_tracker_header_t[maxHandles];
+ mHandleHeaders = new(std::nothrow) handle_tracker_header_t[maxHandles];
+
if (mHandleHeaders != nullptr) {
- // Initialize linked list of free nodes. NULL terminated.
+ handle_tracker_header_t initialHeader = buildHeader(0, 1);
+ // Initialize linked list of free nodes. nullptr terminated.
for (uint32_t i = 0; i < (maxHandles - 1); i++) {
mHandleAddresses[i] = &mHandleAddresses[i + 1]; // point to next node
- mHandleHeaders[i] = 0;
+ mHandleHeaders[i] = initialHeader;
}
mNextFreeAddress = &mHandleAddresses[0];
mHandleAddresses[maxHandles - 1] = nullptr;
mHandleHeaders[maxHandles - 1] = 0;
} else {
delete[] mHandleAddresses; // so the class appears uninitialized
+ mHandleAddresses = nullptr;
}
}
}
@@ -131,7 +135,7 @@
// Generate a handle.
oboe_handle_t handle = buildHandle(inputHeader, index);
- //ALOGD("HandleTracker::put(%p) returns 0x%08x", address, handle);
+ ALOGV("HandleTracker::put(%p) returns 0x%08x", address, handle);
return handle;
}
diff --git a/media/liboboe/src/utility/HandleTracker.h b/media/liboboe/src/utility/HandleTracker.h
index da5b654..4c08321 100644
--- a/media/liboboe/src/utility/HandleTracker.h
+++ b/media/liboboe/src/utility/HandleTracker.h
@@ -41,7 +41,7 @@
/**
* @param maxHandles cannot exceed HANDLE_TRACKER_MAX_HANDLES
*/
- HandleTracker(uint32_t maxHandles);
+ HandleTracker(uint32_t maxHandles = 256);
virtual ~HandleTracker();
/**
@@ -53,7 +53,7 @@
/**
* Store a pointer and return a handle that can be used to retrieve the pointer.
*
- * @param type the type of the object to be tracked
+ * @param expectedType the type of the object to be tracked
* @param address pointer to be converted to a handle
* @return a valid handle or a negative error
*/
diff --git a/media/liboboe/src/utility/OboeUtilities.cpp b/media/liboboe/src/utility/OboeUtilities.cpp
index b28f7c7..d9d2e88 100644
--- a/media/liboboe/src/utility/OboeUtilities.cpp
+++ b/media/liboboe/src/utility/OboeUtilities.cpp
@@ -28,24 +28,19 @@
using namespace android;
oboe_size_bytes_t OboeConvert_formatToSizeInBytes(oboe_audio_format_t format) {
- oboe_datatype_t dataType = OBOE_AUDIO_FORMAT_DATA_TYPE(format);
- oboe_size_bytes_t size;
- switch (dataType) {
- case OBOE_AUDIO_DATATYPE_UINT8:
- size = sizeof(uint8_t);
- break;
- case OBOE_AUDIO_DATATYPE_INT16:
+ oboe_size_bytes_t size = OBOE_ERROR_ILLEGAL_ARGUMENT;
+ switch (format) {
+ case OBOE_AUDIO_FORMAT_PCM16:
size = sizeof(int16_t);
break;
- case OBOE_AUDIO_DATATYPE_INT32:
- case OBOE_AUDIO_DATATYPE_INT824:
+ case OBOE_AUDIO_FORMAT_PCM32:
+ case OBOE_AUDIO_FORMAT_PCM824:
size = sizeof(int32_t);
break;
- case OBOE_AUDIO_DATATYPE_FLOAT32:
+ case OBOE_AUDIO_FORMAT_PCM_FLOAT:
size = sizeof(float);
break;
default:
- size = OBOE_ERROR_ILLEGAL_ARGUMENT;
break;
}
return size;
diff --git a/media/liboboe/tests/Android.mk b/media/liboboe/tests/Android.mk
index f2c65d9..165669b 100644
--- a/media/liboboe/tests/Android.mk
+++ b/media/liboboe/tests/Android.mk
@@ -6,10 +6,9 @@
frameworks/av/media/liboboe/include \
frameworks/av/media/liboboe/src/core \
frameworks/av/media/liboboe/src/utility
-LOCAL_SRC_FILES:= test_oboe_api.cpp
-LOCAL_SHARED_LIBRARIES := libaudioutils libmedia \
- libbinder libcutils libutils \
- libaudioclient liblog
+LOCAL_SRC_FILES := test_oboe_api.cpp
+LOCAL_SHARED_LIBRARIES := libaudioclient libaudioutils libbinder \
+ libcutils liblog libmedia libutils
LOCAL_STATIC_LIBRARIES := liboboe
LOCAL_MODULE := test_oboe_api
include $(BUILD_NATIVE_TEST)
@@ -21,7 +20,23 @@
frameworks/av/media/liboboe/src/core \
frameworks/av/media/liboboe/src/utility
LOCAL_SRC_FILES:= test_handle_tracker.cpp
-LOCAL_SHARED_LIBRARIES := libbinder libcutils libutils liblog
+LOCAL_SHARED_LIBRARIES := libaudioclient libaudioutils libbinder \
+ libcutils liblog libmedia libutils
LOCAL_STATIC_LIBRARIES := liboboe
LOCAL_MODULE := test_handle_tracker
include $(BUILD_NATIVE_TEST)
+
+include $(CLEAR_VARS)
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/media/liboboe/include \
+ frameworks/av/media/liboboe/src \
+ frameworks/av/media/liboboe/src/core \
+ frameworks/av/media/liboboe/src/fifo \
+ frameworks/av/media/liboboe/src/utility
+LOCAL_SRC_FILES:= test_marshalling.cpp
+LOCAL_SHARED_LIBRARIES := libaudioclient libaudioutils libbinder \
+ libcutils liblog libmedia libutils
+LOCAL_STATIC_LIBRARIES := liboboe
+LOCAL_MODULE := test_marshalling
+include $(BUILD_NATIVE_TEST)
diff --git a/media/liboboe/tests/test_handle_tracker.cpp b/media/liboboe/tests/test_handle_tracker.cpp
index ae7384e..a146e76 100644
--- a/media/liboboe/tests/test_handle_tracker.cpp
+++ b/media/liboboe/tests/test_handle_tracker.cpp
@@ -56,7 +56,7 @@
EXPECT_EQ(&data, found);
// should fail the second time
found = tracker.remove(type, dataHandle);
- EXPECT_EQ(NULL, found);
+ EXPECT_EQ(nullptr, found);
}
}
diff --git a/media/liboboe/tests/test_marshalling.cpp b/media/liboboe/tests/test_marshalling.cpp
new file mode 100644
index 0000000..8f4cc2c
--- /dev/null
+++ b/media/liboboe/tests/test_marshalling.cpp
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// Unit tests for Oboe Marshalling of RingBuffer information.
+
+#include <stdlib.h>
+#include <math.h>
+
+#include <binder/Parcel.h>
+#include <binder/Parcelable.h>
+#include <cutils/ashmem.h>
+#include <gtest/gtest.h>
+#include <sys/mman.h>
+
+#include <oboe/OboeDefinitions.h>
+#include <binding/AudioEndpointParcelable.h>
+
+using namespace android;
+using namespace oboe;
+
+// Test adding one value.
+TEST(test_marshalling, oboe_one_read_write) {
+ Parcel parcel;
+ size_t pos = parcel.dataPosition();
+ const int arbitraryValue = 235;
+ parcel.writeInt32(arbitraryValue);
+ parcel.setDataPosition(pos);
+ int32_t y;
+ parcel.readInt32(&y);
+ EXPECT_EQ(arbitraryValue, y);
+}
+
+// Test SharedMemoryParcel.
+TEST(test_marshalling, oboe_shared_memory) {
+ SharedMemoryParcelable sharedMemoryA;
+ SharedMemoryParcelable sharedMemoryB;
+ const size_t memSizeBytes = 840;
+ int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ ASSERT_LE(0, fd);
+ sharedMemoryA.setup(fd, memSizeBytes);
+ void *region1;
+ EXPECT_EQ(OBOE_OK, sharedMemoryA.resolve(0, 16, ®ion1)); // fits in region
+ EXPECT_NE(OBOE_OK, sharedMemoryA.resolve(-2, 16, ®ion1)); // offset is negative
+ EXPECT_NE(OBOE_OK, sharedMemoryA.resolve(0, memSizeBytes + 8, ®ion1)); // size too big
+ EXPECT_NE(OBOE_OK, sharedMemoryA.resolve(memSizeBytes - 8, 16, ®ion1)); // goes past the end
+ int32_t *buffer1 = (int32_t *)region1;
+ buffer1[0] = 98735; // arbitrary value
+
+ Parcel parcel;
+ size_t pos = parcel.dataPosition();
+ sharedMemoryA.writeToParcel(&parcel);
+
+ parcel.setDataPosition(pos);
+ sharedMemoryB.readFromParcel(&parcel);
+ EXPECT_EQ(sharedMemoryA.getSizeInBytes(), sharedMemoryB.getSizeInBytes());
+
+ // should see same value at two different addresses
+ void *region2;
+ EXPECT_EQ(OBOE_OK, sharedMemoryB.resolve(0, 16, ®ion2));
+ int32_t *buffer2 = (int32_t *)region2;
+ EXPECT_NE(buffer1, buffer2);
+ EXPECT_EQ(buffer1[0], buffer2[0]);
+}
+
+// Test SharedRegionParcel.
+TEST(test_marshalling, oboe_shared_region) {
+ SharedMemoryParcelable sharedMemories[2];
+ SharedRegionParcelable sharedRegionA;
+ SharedRegionParcelable sharedRegionB;
+ const size_t memSizeBytes = 840;
+ int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ ASSERT_LE(0, fd);
+ sharedMemories[0].setup(fd, memSizeBytes);
+ int32_t regionOffset1 = 32;
+ int32_t regionSize1 = 16;
+ sharedRegionA.setup(0, regionOffset1, regionSize1);
+
+ void *region1;
+ EXPECT_EQ(OBOE_OK, sharedRegionA.resolve(sharedMemories, ®ion1));
+ int32_t *buffer1 = (int32_t *)region1;
+ buffer1[0] = 336677; // arbitrary value
+
+ Parcel parcel;
+ size_t pos = parcel.dataPosition();
+ sharedRegionA.writeToParcel(&parcel);
+
+ parcel.setDataPosition(pos);
+ sharedRegionB.readFromParcel(&parcel);
+
+ // should see same value
+ void *region2;
+ EXPECT_EQ(OBOE_OK, sharedRegionB.resolve(sharedMemories, ®ion2));
+ int32_t *buffer2 = (int32_t *)region2;
+ EXPECT_EQ(buffer1[0], buffer2[0]);
+}
+
+// Test RingBufferParcelable.
+TEST(test_marshalling, oboe_ring_buffer_parcelable) {
+ SharedMemoryParcelable sharedMemories[2];
+ RingBufferParcelable ringBufferA;
+ RingBufferParcelable ringBufferB;
+
+ const size_t bytesPerFrame = 8;
+ const size_t framesPerBurst = 32;
+ const size_t dataSizeBytes = 2048;
+ const int32_t counterSizeBytes = sizeof(int64_t);
+ const size_t memSizeBytes = dataSizeBytes + (2 * counterSizeBytes);
+
+ int fd = ashmem_create_region("TestMarshalling", memSizeBytes);
+ ASSERT_LE(0, fd);
+ sharedMemories[0].setup(fd, memSizeBytes);
+
+ int32_t sharedMemoryIndex = 0;
+ // arrange indices and data in the shared memory
+ int32_t readOffset = 0;
+ int32_t writeOffset = readOffset + counterSizeBytes;
+ int32_t dataOffset = writeOffset + counterSizeBytes;
+ ringBufferA.setupMemory(sharedMemoryIndex, dataOffset, dataSizeBytes,
+ readOffset, writeOffset, counterSizeBytes);
+ ringBufferA.setFramesPerBurst(framesPerBurst);
+ ringBufferA.setBytesPerFrame(bytesPerFrame);
+ ringBufferA.setCapacityInFrames(dataSizeBytes / bytesPerFrame);
+
+ // setup A
+ RingBufferDescriptor descriptorA;
+ EXPECT_EQ(OBOE_OK, ringBufferA.resolve(sharedMemories, &descriptorA));
+ descriptorA.dataAddress[0] = 95;
+ descriptorA.dataAddress[1] = 57;
+ descriptorA.readCounterAddress[0] = 17;
+ descriptorA.writeCounterAddress[0] = 39;
+
+ // write A to parcel
+ Parcel parcel;
+ size_t pos = parcel.dataPosition();
+ ringBufferA.writeToParcel(&parcel);
+
+ // read B from parcel
+ parcel.setDataPosition(pos);
+ ringBufferB.readFromParcel(&parcel);
+
+ RingBufferDescriptor descriptorB;
+ EXPECT_EQ(OBOE_OK, ringBufferB.resolve(sharedMemories, &descriptorB));
+
+ // A and B should match
+ EXPECT_EQ(descriptorA.dataAddress[0], descriptorB.dataAddress[0]);
+ EXPECT_EQ(descriptorA.dataAddress[1], descriptorB.dataAddress[1]);
+ EXPECT_EQ(descriptorA.readCounterAddress[0], descriptorB.readCounterAddress[0]);
+ EXPECT_EQ(descriptorA.writeCounterAddress[0], descriptorB.writeCounterAddress[0]);
+
+ EXPECT_EQ(ringBufferA.getFramesPerBurst(), ringBufferB.getFramesPerBurst());
+ EXPECT_EQ(ringBufferA.getBytesPerFrame(), ringBufferB.getBytesPerFrame());
+ EXPECT_EQ(ringBufferA.getCapacityInFrames(), ringBufferB.getCapacityInFrames());
+}
diff --git a/media/liboboe/tests/test_oboe_api.cpp b/media/liboboe/tests/test_oboe_api.cpp
index acf3000..0bc469f 100644
--- a/media/liboboe/tests/test_oboe_api.cpp
+++ b/media/liboboe/tests/test_oboe_api.cpp
@@ -32,7 +32,7 @@
const oboe_sample_rate_t requestedSampleRate1 = 48000;
const oboe_sample_rate_t requestedSampleRate2 = 44100;
const int32_t requestedSamplesPerFrame = 2;
- const oboe_audio_format_t requestedDataFormat = OBOE_AUDIO_DATATYPE_INT16;
+ const oboe_audio_format_t requestedDataFormat = OBOE_AUDIO_FORMAT_PCM16;
oboe_sample_rate_t sampleRate = 0;
int32_t samplesPerFrame = 0;
@@ -94,7 +94,6 @@
EXPECT_EQ(OBOE_ERROR_INVALID_HANDLE, OboeStreamBuilder_getSampleRate(oboeBuilder2, &sampleRate));
}
-
// Test creating a default stream with everything unspecified.
TEST(test_oboe_api, oboe_stream_unspecified) {
OboeStreamBuilder oboeBuilder;
@@ -114,18 +113,17 @@
}
// Test Writing to an OboeStream
-TEST(test_oboe_api, oboe_stream) {
+void runtest_oboe_stream(oboe_sharing_mode_t requestedSharingMode) {
const oboe_sample_rate_t requestedSampleRate = 48000;
const oboe_sample_rate_t requestedSamplesPerFrame = 2;
- const oboe_audio_format_t requestedDataFormat = OBOE_AUDIO_DATATYPE_INT16;
- //const oboe_sharing_mode_t requestedSharingMode = OBOE_SHARING_MODE_EXCLUSIVE; // MMAP NOIRQ
- const oboe_sharing_mode_t requestedSharingMode = OBOE_SHARING_MODE_LEGACY; // AudioTrack
+ const oboe_audio_format_t requestedDataFormat = OBOE_AUDIO_FORMAT_PCM16;
oboe_sample_rate_t actualSampleRate = -1;
int32_t actualSamplesPerFrame = -1;
- oboe_audio_format_t actualDataFormat = OBOE_AUDIO_FORMAT_PCM824;
+ oboe_audio_format_t actualDataFormat = OBOE_AUDIO_FORMAT_INVALID;
oboe_sharing_mode_t actualSharingMode;
oboe_size_frames_t framesPerBurst = -1;
+ int writeLoops = 0;
oboe_size_frames_t framesWritten = 0;
oboe_size_frames_t framesPrimed = 0;
@@ -162,22 +160,30 @@
// Check to see what kind of stream we actually got.
EXPECT_EQ(OBOE_OK, OboeStream_getSampleRate(oboeStream, &actualSampleRate));
- EXPECT_TRUE(actualSampleRate >= 44100 && actualSampleRate <= 96000); // TODO what is range?
+ ASSERT_TRUE(actualSampleRate >= 44100 && actualSampleRate <= 96000); // TODO what is range?
EXPECT_EQ(OBOE_OK, OboeStream_getSamplesPerFrame(oboeStream, &actualSamplesPerFrame));
- EXPECT_TRUE(actualSamplesPerFrame >= 1 && actualSamplesPerFrame <= 16); // TODO what is max?
+ ASSERT_TRUE(actualSamplesPerFrame >= 1 && actualSamplesPerFrame <= 16); // TODO what is max?
EXPECT_EQ(OBOE_OK, OboeStream_getSharingMode(oboeStream, &actualSharingMode));
- EXPECT_TRUE(actualSharingMode == OBOE_SHARING_MODE_EXCLUSIVE
- || actualSharingMode == OBOE_SHARING_MODE_LEGACY);
+ ASSERT_TRUE(actualSharingMode == OBOE_SHARING_MODE_EXCLUSIVE
+ || actualSharingMode == OBOE_SHARING_MODE_LEGACY);
+
+ EXPECT_EQ(OBOE_OK, OboeStream_getFormat(oboeStream, &actualDataFormat));
+ EXPECT_NE(OBOE_AUDIO_FORMAT_INVALID, actualDataFormat);
EXPECT_EQ(OBOE_OK, OboeStream_getFramesPerBurst(oboeStream, &framesPerBurst));
- EXPECT_TRUE(framesPerBurst >= 16 && framesPerBurst <= 1024); // TODO what is min/max?
+ ASSERT_TRUE(framesPerBurst >= 16 && framesPerBurst <= 1024); // TODO what is min/max?
// Allocate a buffer for the audio data.
- int16_t *data = new int16_t[framesPerBurst * actualSamplesPerFrame];
- ASSERT_TRUE(NULL != data);
+ // TODO handle possibility of other data formats
+ ASSERT_TRUE(actualDataFormat == OBOE_AUDIO_FORMAT_PCM16);
+ size_t dataSizeSamples = framesPerBurst * actualSamplesPerFrame;
+ int16_t *data = new int16_t[dataSizeSamples];
+ ASSERT_TRUE(nullptr != data);
+ memset(data, 0, sizeof(int16_t) * dataSizeSamples);
+ // Prime the buffer.
timeoutNanos = 0;
do {
framesWritten = OboeStream_write(oboeStream, data, framesPerBurst, timeoutNanos);
@@ -185,67 +191,71 @@
framesTotal += framesWritten;
ASSERT_GE(framesWritten, 0);
ASSERT_LE(framesWritten, framesPerBurst);
- } while(framesWritten > 0);
+ } while (framesWritten > 0);
ASSERT_TRUE(framesTotal > 0);
- // Start and wait for server to respond.
- ASSERT_EQ(OBOE_OK, OboeStream_requestStart(oboeStream));
- ASSERT_EQ(OBOE_OK, OboeStream_waitForStateChange(oboeStream,
- OBOE_STREAM_STATE_STARTING,
- &state,
- DEFAULT_STATE_TIMEOUT));
- EXPECT_EQ(OBOE_STREAM_STATE_STARTED, state);
+ // Start/write/pause more than once to see if it fails after the first time.
+ // Write some data and measure the rate to see if the timing is OK.
+ for (int numLoops = 0; numLoops < 2; numLoops++) {
+ // Start and wait for server to respond.
+ ASSERT_EQ(OBOE_OK, OboeStream_requestStart(oboeStream));
+ ASSERT_EQ(OBOE_OK, OboeStream_waitForStateChange(oboeStream,
+ OBOE_STREAM_STATE_STARTING,
+ &state,
+ DEFAULT_STATE_TIMEOUT));
+ EXPECT_EQ(OBOE_STREAM_STATE_STARTED, state);
- // Write some data while we are running. Read counter should be advancing.
- int loops = 1 * actualSampleRate / framesPerBurst; // 1 second
- ASSERT_LT(2, loops); // detect absurdly high framesPerBurst
- timeoutNanos = 10 * OBOE_NANOS_PER_SECOND * framesPerBurst / actualSampleRate; // bursts
- framesWritten = 1;
- ASSERT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead));
- oboeFramesRead1 = oboeFramesRead;
- oboe_nanoseconds_t beginTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
- do {
- framesWritten = OboeStream_write(oboeStream, data, framesPerBurst, timeoutNanos);
- ASSERT_GE(framesWritten, 0);
- ASSERT_LE(framesWritten, framesPerBurst);
+ // Write some data while we are running. Read counter should be advancing.
+ writeLoops = 1 * actualSampleRate / framesPerBurst; // 1 second
+ ASSERT_LT(2, writeLoops); // detect absurdly high framesPerBurst
+ timeoutNanos = 10 * OBOE_NANOS_PER_SECOND * framesPerBurst / actualSampleRate; // bursts
+ framesWritten = 1;
+ ASSERT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead));
+ oboeFramesRead1 = oboeFramesRead;
+ oboe_nanoseconds_t beginTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
+ do {
+ framesWritten = OboeStream_write(oboeStream, data, framesPerBurst, timeoutNanos);
+ ASSERT_GE(framesWritten, 0);
+ ASSERT_LE(framesWritten, framesPerBurst);
- framesTotal += framesWritten;
- EXPECT_EQ(OBOE_OK, OboeStream_getFramesWritten(oboeStream, &oboeFramesWritten));
- EXPECT_EQ(framesTotal, oboeFramesWritten);
+ framesTotal += framesWritten;
+ EXPECT_EQ(OBOE_OK, OboeStream_getFramesWritten(oboeStream, &oboeFramesWritten));
+ EXPECT_EQ(framesTotal, oboeFramesWritten);
- // Try to get a more accurate measure of the sample rate.
- if (beginTime == 0) {
- EXPECT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead));
- if (oboeFramesRead > oboeFramesRead1) { // is read pointer advancing
- beginTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
- oboeFramesRead1 = oboeFramesRead;
+ // Try to get a more accurate measure of the sample rate.
+ if (beginTime == 0) {
+ EXPECT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead));
+ if (oboeFramesRead > oboeFramesRead1) { // is read pointer advancing
+ beginTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
+ oboeFramesRead1 = oboeFramesRead;
+ }
}
+ } while (framesWritten > 0 && writeLoops-- > 0);
+
+ EXPECT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead2));
+ oboe_nanoseconds_t endTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
+ ASSERT_GT(oboeFramesRead2, 0);
+ ASSERT_GT(oboeFramesRead2, oboeFramesRead1);
+ ASSERT_LE(oboeFramesRead2, oboeFramesWritten);
+
+ // TODO why is legacy so inaccurate?
+ const double rateTolerance = 200.0; // arbitrary tolerance for sample rate
+ if (requestedSharingMode != OBOE_SHARING_MODE_LEGACY) {
+ // Calculate approximate sample rate and compare with stream rate.
+ double seconds = (endTime - beginTime) / (double) OBOE_NANOS_PER_SECOND;
+ double measuredRate = (oboeFramesRead2 - oboeFramesRead1) / seconds;
+ ASSERT_NEAR(actualSampleRate, measuredRate, rateTolerance);
}
- } while (framesWritten > 0 && loops-- > 0);
- EXPECT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead2));
- oboe_nanoseconds_t endTime = Oboe_getNanoseconds(OBOE_CLOCK_MONOTONIC);
- ASSERT_GT(oboeFramesRead2, 0);
- ASSERT_GT(oboeFramesRead2, oboeFramesRead1);
- ASSERT_LE(oboeFramesRead2, oboeFramesWritten);
-
- // TODO why is legacy so inaccurate?
- const double rateTolerance = 200.0; // arbitrary tolerance for sample rate
- if (requestedSharingMode != OBOE_SHARING_MODE_LEGACY) {
- // Calculate approximate sample rate and compare with stream rate.
- double seconds = (endTime - beginTime) / (double) OBOE_NANOS_PER_SECOND;
- double measuredRate = (oboeFramesRead2 - oboeFramesRead1) / seconds;
- ASSERT_NEAR(actualSampleRate, measuredRate, rateTolerance);
+ // Request async pause and wait for server to say that it has completed the pause.
+ ASSERT_EQ(OBOE_OK, OboeStream_requestPause(oboeStream));
+ EXPECT_EQ(OBOE_OK, OboeStream_waitForStateChange(oboeStream,
+ OBOE_STREAM_STATE_PAUSING,
+ &state,
+ DEFAULT_STATE_TIMEOUT));
+ EXPECT_EQ(OBOE_STREAM_STATE_PAUSED, state);
}
- // Request async pause and wait for server to say that it has completed the pause.
- ASSERT_EQ(OBOE_OK, OboeStream_requestPause(oboeStream));
- EXPECT_EQ(OBOE_OK, OboeStream_waitForStateChange(oboeStream,
- OBOE_STREAM_STATE_PAUSING,
- &state,
- DEFAULT_STATE_TIMEOUT));
- EXPECT_EQ(OBOE_STREAM_STATE_PAUSED, state);
-
// Make sure the read counter is not advancing when we are paused.
ASSERT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead));
ASSERT_GE(oboeFramesRead, oboeFramesRead2); // monotonic increase
@@ -255,13 +265,14 @@
ASSERT_EQ(OBOE_OK, OboeStream_getFramesRead(oboeStream, &oboeFramesRead2));
EXPECT_EQ(oboeFramesRead, oboeFramesRead2);
- // Fill up the buffer.
+ // ------------------- TEST FLUSH -----------------
+ // Prime the buffer.
timeoutNanos = 0;
- loops = 100;
+ writeLoops = 100;
do {
framesWritten = OboeStream_write(oboeStream, data, framesPerBurst, timeoutNanos);
framesTotal += framesWritten;
- } while (framesWritten > 0 && loops-- > 0);
+ } while (framesWritten > 0 && writeLoops-- > 0);
EXPECT_EQ(0, framesWritten);
// Flush and wait for server to respond.
@@ -286,6 +297,16 @@
EXPECT_EQ(OBOE_OK, OboeStream_close(oboeStream));
}
+// Test Writing to an OboeStream using LEGACY sharing mode.
+TEST(test_oboe_api, oboe_stream_legacy) {
+ runtest_oboe_stream(OBOE_SHARING_MODE_LEGACY);
+}
+
+// Test Writing to an OboeStream using EXCLUSIVE sharing mode.
+TEST(test_oboe_api, oboe_stream_exclusive) {
+ runtest_oboe_stream(OBOE_SHARING_MODE_EXCLUSIVE);
+}
+
#define OBOE_THREAD_ANSWER 1826375
#define OBOE_THREAD_DURATION_MSEC 500
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f247475..c63ab47 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -5543,6 +5543,7 @@
}
size_t size = buffer->size();
+ size_t offset = buffer->offset();
if (buffer->base() != info->mCodecData->base()) {
ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)",
mCodec->mComponentName.c_str(),
@@ -5560,7 +5561,7 @@
}
size = info->mCodecData->size();
} else {
- info->mCodecData->setRange(0, size);
+ info->mCodecData->setRange(offset, size);
}
if (flags & OMX_BUFFERFLAG_CODECCONFIG) {
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index f3d622b..25dd6b1 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -18,7 +18,6 @@
DataConverter.cpp \
DataSource.cpp \
DataURISource.cpp \
- DRMExtractor.cpp \
ESDS.cpp \
FileSource.cpp \
FLACExtractor.cpp \
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
deleted file mode 100644
index 8ba36d5..0000000
--- a/media/libstagefright/DRMExtractor.cpp
+++ /dev/null
@@ -1,305 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "include/DRMExtractor.h"
-
-#include <arpa/inet.h>
-#include <utils/String8.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/Utils.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaBuffer.h>
-
-#include <drm/drm_framework_common.h>
-#include <utils/Errors.h>
-
-
-namespace android {
-
-class DRMSource : public MediaSource {
-public:
- DRMSource(const sp<IMediaSource> &mediaSource,
- const sp<DecryptHandle> &decryptHandle,
- DrmManagerClient *managerClient,
- int32_t trackId, DrmBuffer *ipmpBox);
-
- virtual status_t start(MetaData *params = NULL);
- virtual status_t stop();
- virtual sp<MetaData> getFormat();
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options = NULL);
-
-protected:
- virtual ~DRMSource();
-
-private:
- sp<IMediaSource> mOriginalMediaSource;
- sp<DecryptHandle> mDecryptHandle;
- DrmManagerClient* mDrmManagerClient;
- size_t mTrackId;
- mutable Mutex mDRMLock;
- size_t mNALLengthSize;
- bool mWantsNALFragments;
-
- DRMSource(const DRMSource &);
- DRMSource &operator=(const DRMSource &);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-DRMSource::DRMSource(const sp<IMediaSource> &mediaSource,
- const sp<DecryptHandle> &decryptHandle,
- DrmManagerClient *managerClient,
- int32_t trackId, DrmBuffer *ipmpBox)
- : mOriginalMediaSource(mediaSource),
- mDecryptHandle(decryptHandle),
- mDrmManagerClient(managerClient),
- mTrackId(trackId),
- mNALLengthSize(0),
- mWantsNALFragments(false) {
- CHECK(mDrmManagerClient);
- mDrmManagerClient->initializeDecryptUnit(
- mDecryptHandle, trackId, ipmpBox);
-
- const char *mime;
- bool success = getFormat()->findCString(kKeyMIMEType, &mime);
- CHECK(success);
-
- if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
- uint32_t type;
- const void *data;
- size_t size;
- CHECK(getFormat()->findData(kKeyAVCC, &type, &data, &size));
-
- const uint8_t *ptr = (const uint8_t *)data;
-
- CHECK(size >= 7);
- CHECK_EQ(ptr[0], 1); // configurationVersion == 1
-
- // The number of bytes used to encode the length of a NAL unit.
- mNALLengthSize = 1 + (ptr[4] & 3);
- }
-}
-
-DRMSource::~DRMSource() {
- Mutex::Autolock autoLock(mDRMLock);
- mDrmManagerClient->finalizeDecryptUnit(mDecryptHandle, mTrackId);
-}
-
-status_t DRMSource::start(MetaData *params) {
- int32_t val;
- if (params && params->findInt32(kKeyWantsNALFragments, &val)
- && val != 0) {
- mWantsNALFragments = true;
- } else {
- mWantsNALFragments = false;
- }
-
- return mOriginalMediaSource->start(params);
-}
-
-status_t DRMSource::stop() {
- return mOriginalMediaSource->stop();
-}
-
-sp<MetaData> DRMSource::getFormat() {
- return mOriginalMediaSource->getFormat();
-}
-
-status_t DRMSource::read(MediaBuffer **buffer, const ReadOptions *options) {
- Mutex::Autolock autoLock(mDRMLock);
- status_t err;
- if ((err = mOriginalMediaSource->read(buffer, options)) != OK) {
- return err;
- }
-
- size_t len = (*buffer)->range_length();
-
- char *src = (char *)(*buffer)->data() + (*buffer)->range_offset();
-
- DrmBuffer encryptedDrmBuffer(src, len);
- DrmBuffer decryptedDrmBuffer;
- decryptedDrmBuffer.length = len;
- decryptedDrmBuffer.data = new char[len];
- DrmBuffer *pDecryptedDrmBuffer = &decryptedDrmBuffer;
-
- if ((err = mDrmManagerClient->decrypt(mDecryptHandle, mTrackId,
- &encryptedDrmBuffer, &pDecryptedDrmBuffer)) != NO_ERROR) {
-
- if (decryptedDrmBuffer.data) {
- delete [] decryptedDrmBuffer.data;
- decryptedDrmBuffer.data = NULL;
- }
-
- return err;
- }
- CHECK(pDecryptedDrmBuffer == &decryptedDrmBuffer);
-
- const char *mime;
- CHECK(getFormat()->findCString(kKeyMIMEType, &mime));
-
- if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC) && !mWantsNALFragments) {
- uint8_t *dstData = (uint8_t*)src;
- size_t srcOffset = 0;
- size_t dstOffset = 0;
-
- len = decryptedDrmBuffer.length;
- while (srcOffset < len) {
- CHECK(srcOffset + mNALLengthSize <= len);
- size_t nalLength = 0;
- const uint8_t* data = (const uint8_t*)(&decryptedDrmBuffer.data[srcOffset]);
-
- switch (mNALLengthSize) {
- case 1:
- nalLength = *data;
- break;
- case 2:
- nalLength = U16_AT(data);
- break;
- case 3:
- nalLength = ((size_t)data[0] << 16) | U16_AT(&data[1]);
- break;
- case 4:
- nalLength = U32_AT(data);
- break;
- default:
- CHECK(!"Should not be here.");
- break;
- }
-
- srcOffset += mNALLengthSize;
-
- size_t end = srcOffset + nalLength;
- if (end > len || end < srcOffset) {
- if (decryptedDrmBuffer.data) {
- delete [] decryptedDrmBuffer.data;
- decryptedDrmBuffer.data = NULL;
- }
-
- return ERROR_MALFORMED;
- }
-
- if (nalLength == 0) {
- continue;
- }
-
- if (dstOffset > SIZE_MAX - 4 ||
- dstOffset + 4 > SIZE_MAX - nalLength ||
- dstOffset + 4 + nalLength > (*buffer)->size()) {
- (*buffer)->release();
- (*buffer) = NULL;
- if (decryptedDrmBuffer.data) {
- delete [] decryptedDrmBuffer.data;
- decryptedDrmBuffer.data = NULL;
- }
- return ERROR_MALFORMED;
- }
-
- dstData[dstOffset++] = 0;
- dstData[dstOffset++] = 0;
- dstData[dstOffset++] = 0;
- dstData[dstOffset++] = 1;
- memcpy(&dstData[dstOffset], &decryptedDrmBuffer.data[srcOffset], nalLength);
- srcOffset += nalLength;
- dstOffset += nalLength;
- }
-
- CHECK_EQ(srcOffset, len);
- (*buffer)->set_range((*buffer)->range_offset(), dstOffset);
-
- } else {
- memcpy(src, decryptedDrmBuffer.data, decryptedDrmBuffer.length);
- (*buffer)->set_range((*buffer)->range_offset(), decryptedDrmBuffer.length);
- }
-
- if (decryptedDrmBuffer.data) {
- delete [] decryptedDrmBuffer.data;
- decryptedDrmBuffer.data = NULL;
- }
-
- return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-DRMExtractor::DRMExtractor(const sp<DataSource> &source, const char* mime)
- : mDataSource(source),
- mDecryptHandle(NULL),
- mDrmManagerClient(NULL) {
- mOriginalExtractor = MediaExtractor::Create(source, mime);
- mOriginalExtractor->getMetaData()->setInt32(kKeyIsDRM, 1);
-
- source->getDrmInfo(mDecryptHandle, &mDrmManagerClient);
-}
-
-DRMExtractor::~DRMExtractor() {
-}
-
-size_t DRMExtractor::countTracks() {
- return mOriginalExtractor->countTracks();
-}
-
-sp<IMediaSource> DRMExtractor::getTrack(size_t index) {
- sp<IMediaSource> originalMediaSource = mOriginalExtractor->getTrack(index);
- originalMediaSource->getFormat()->setInt32(kKeyIsDRM, 1);
-
- int32_t trackID;
- CHECK(getTrackMetaData(index, 0)->findInt32(kKeyTrackID, &trackID));
-
- DrmBuffer ipmpBox;
- ipmpBox.data = mOriginalExtractor->getDrmTrackInfo(trackID, &(ipmpBox.length));
- CHECK(ipmpBox.length > 0);
-
- return interface_cast<IMediaSource>(
- new DRMSource(originalMediaSource, mDecryptHandle, mDrmManagerClient,
- trackID, &ipmpBox));
-}
-
-sp<MetaData> DRMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
- return mOriginalExtractor->getTrackMetaData(index, flags);
-}
-
-sp<MetaData> DRMExtractor::getMetaData() {
- return mOriginalExtractor->getMetaData();
-}
-
-bool SniffDRM(
- const sp<DataSource> &source, String8 *mimeType, float *confidence,
- sp<AMessage> *) {
- sp<DecryptHandle> decryptHandle = source->DrmInitialization();
-
- if (decryptHandle != NULL) {
- if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) {
- *mimeType = String8("drm+container_based+") + decryptHandle->mimeType;
- *confidence = 10.0f;
- } else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) {
- *mimeType = String8("drm+es_based+") + decryptHandle->mimeType;
- *confidence = 10.0f;
- } else {
- return false;
- }
-
- return true;
- }
-
- return false;
-}
-} //namespace android
-
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
old mode 100644
new mode 100755
index c45a3fd..ef288da
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3437,8 +3437,14 @@
mOwner->writeInt32(0);
} else {
int32_t width, height;
- bool success = mMeta->findInt32(kKeyWidth, &width);
- success = success && mMeta->findInt32(kKeyHeight, &height);
+ bool success = mMeta->findInt32(kKeyDisplayWidth, &width);
+ success = success && mMeta->findInt32(kKeyDisplayHeight, &height);
+
+ // Use width/height if display width/height are not present.
+ if (!success) {
+ success = mMeta->findInt32(kKeyWidth, &width);
+ success = success && mMeta->findInt32(kKeyHeight, &height);
+ }
CHECK(success);
mOwner->writeInt32(width << 16); // 32-bit fixed-point value
@@ -3486,13 +3492,22 @@
void MPEG4Writer::Track::writeMdhdBox(uint32_t now) {
int64_t trakDurationUs = getDurationUs();
+ int64_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
mOwner->beginBox("mdhd");
- mOwner->writeInt32(0); // version=0, flags=0
- mOwner->writeInt32(now); // creation time
- mOwner->writeInt32(now); // modification time
- mOwner->writeInt32(mTimeScale); // media timescale
- int32_t mdhdDuration = (trakDurationUs * mTimeScale + 5E5) / 1E6;
- mOwner->writeInt32(mdhdDuration); // use media timescale
+
+ if (mdhdDuration > UINT32_MAX) {
+ mOwner->writeInt32((1 << 24)); // version=1, flags=0
+ mOwner->writeInt64((int64_t)now); // creation time
+ mOwner->writeInt64((int64_t)now); // modification time
+ mOwner->writeInt32(mTimeScale); // media timescale
+ mOwner->writeInt64(mdhdDuration); // media timescale
+ } else {
+ mOwner->writeInt32(0); // version=0, flags=0
+ mOwner->writeInt32(now); // creation time
+ mOwner->writeInt32(now); // modification time
+ mOwner->writeInt32(mTimeScale); // media timescale
+ mOwner->writeInt32((int32_t)mdhdDuration); // use media timescale
+ }
// Language follows the three letter standard ISO-639-2/T
// 'e', 'n', 'g' for "English", for instance.
// Each character is packed as the difference between its ASCII value and 0x60.
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 24f0d20..9eca982 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -2623,14 +2623,7 @@
info->mData->meta()->setInt32("csd", true);
}
- // synchronization boundary for getBufferAndFormat
- sp<MediaCodecBuffer> buffer;
- {
- Mutex::Autolock al(mBufferLock);
- info->mOwnedByClient = false;
- buffer = info->mData;
- info->mData.clear();
- }
+ sp<MediaCodecBuffer> buffer = info->mData;
status_t err = OK;
if (mCrypto != NULL) {
AString *errorDetailMsg;
@@ -2650,6 +2643,13 @@
err = mBufferChannel->queueInputBuffer(buffer);
}
+ if (err == OK) {
+ // synchronization boundary for getBufferAndFormat
+ Mutex::Autolock al(mBufferLock);
+ info->mOwnedByClient = false;
+ info->mData.clear();
+ }
+
return err;
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
index de4d06f..5981b35 100644
--- a/media/libstagefright/MediaCodecSource.cpp
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -689,7 +689,9 @@
sp<MediaCodecBuffer> inbuf;
status_t err = mEncoder->getInputBuffer(bufferIndex, &inbuf);
- if (err != OK || inbuf == NULL) {
+
+ if (err != OK || inbuf == NULL || inbuf->data() == NULL
+ || mbuf->data() == NULL || mbuf->size() == 0) {
mbuf->release();
signalEOS();
break;
@@ -851,7 +853,8 @@
sp<MediaCodecBuffer> outbuf;
status_t err = mEncoder->getOutputBuffer(index, &outbuf);
- if (err != OK || outbuf == NULL) {
+ if (err != OK || outbuf == NULL || outbuf->data() == NULL
+ || outbuf->size() == 0) {
signalEOS();
break;
}
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 49f480d..df4d9bf 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -27,7 +27,6 @@
#include "include/OggExtractor.h"
#include "include/MPEG2PSExtractor.h"
#include "include/MPEG2TSExtractor.h"
-#include "include/DRMExtractor.h"
#include "include/FLACExtractor.h"
#include "include/AACExtractor.h"
#include "include/MidiExtractor.h"
@@ -51,8 +50,7 @@
namespace android {
-MediaExtractor::MediaExtractor():
- mIsDrm(false) {
+MediaExtractor::MediaExtractor() {
if (!LOG_NDEBUG) {
uid_t uid = getuid();
struct passwd *pw = getpwuid(uid);
@@ -148,23 +146,6 @@
ALOGW("creating media extractor in calling process");
return CreateFromService(source, mime);
} else {
- String8 mime8;
- float confidence;
- sp<AMessage> meta;
-
- // Check if it's es-based DRM, since DRMExtractor needs to be created in the media server
- // process, not the extractor process.
- if (SniffDRM(source, &mime8, &confidence, &meta)) {
- const char *drmMime = mime8.string();
- ALOGV("Detected media content as '%s' with confidence %.2f", drmMime, confidence);
- if (!strncmp(drmMime, "drm+es_based+", 13)) {
- // DRMExtractor sets container metadata kKeyIsDRM to 1
- return new DRMExtractor(source, drmMime + 14);
- } else {
- mime = drmMime + 20; // get real mimetype after "drm+container_based+" prefix
- }
- }
-
// remote extractor
ALOGV("get service manager");
sp<IBinder> binder = defaultServiceManager()->getService(String16("media.extractor"));
@@ -187,6 +168,9 @@
ALOGV("MediaExtractor::CreateFromService %s", mime);
RegisterDefaultSniffers();
+ // initialize source decryption if needed
+ source->DrmInitialization();
+
sp<AMessage> meta;
String8 tmp;
@@ -299,9 +283,6 @@
RegisterSniffer_l(SniffMPEG2PS);
RegisterSniffer_l(SniffMidi);
- if (property_get_bool("drm.service.enabled", false)) {
- RegisterSniffer_l(SniffDRM);
- }
gSniffersRegistered = true;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 8061bc6..de5ea9c 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -522,8 +522,6 @@
return ERROR_MALFORMED;
}
- mSyncSampleOffset = data_offset;
-
uint8_t header[8];
if (mDataSource->readAt(
data_offset, header, sizeof(header)) < (ssize_t)sizeof(header)) {
@@ -535,13 +533,13 @@
return ERROR_MALFORMED;
}
- mNumSyncSamples = U32_AT(&header[4]);
+ uint32_t numSyncSamples = U32_AT(&header[4]);
- if (mNumSyncSamples < 2) {
+ if (numSyncSamples < 2) {
ALOGV("Table of sync samples is empty or has only a single entry!");
}
- uint64_t allocSize = (uint64_t)mNumSyncSamples * sizeof(uint32_t);
+ uint64_t allocSize = (uint64_t)numSyncSamples * sizeof(uint32_t);
if (allocSize > kMaxTotalSize) {
ALOGE("Sync sample table size too large.");
return ERROR_OUT_OF_RANGE;
@@ -559,19 +557,21 @@
return ERROR_OUT_OF_RANGE;
}
- mSyncSamples = new (std::nothrow) uint32_t[mNumSyncSamples];
+ mSyncSamples = new (std::nothrow) uint32_t[numSyncSamples];
if (!mSyncSamples) {
ALOGE("Cannot allocate sync sample table with %llu entries.",
- (unsigned long long)mNumSyncSamples);
+ (unsigned long long)numSyncSamples);
return ERROR_OUT_OF_RANGE;
}
- if (mDataSource->readAt(mSyncSampleOffset + 8, mSyncSamples,
+ if (mDataSource->readAt(data_offset + 8, mSyncSamples,
(size_t)allocSize) != (ssize_t)allocSize) {
+ delete[] mSyncSamples;
+ mSyncSamples = NULL;
return ERROR_IO;
}
- for (size_t i = 0; i < mNumSyncSamples; ++i) {
+ for (size_t i = 0; i < numSyncSamples; ++i) {
if (mSyncSamples[i] == 0) {
ALOGE("b/32423862, unexpected zero value in stss");
continue;
@@ -579,6 +579,9 @@
mSyncSamples[i] = ntohl(mSyncSamples[i]) - 1;
}
+ mSyncSampleOffset = data_offset;
+ mNumSyncSamples = numSyncSamples;
+
return OK;
}
@@ -989,4 +992,3 @@
}
} // namespace android
-
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index f2638ed..ec02fb9 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -1770,6 +1770,45 @@
*sync = settings;
}
+void writeToAMessage(const sp<AMessage> &msg, const BufferingSettings &buffering) {
+ msg->setInt32("init-mode", buffering.mInitialBufferingMode);
+ msg->setInt32("rebuffer-mode", buffering.mRebufferingMode);
+ msg->setInt32("init-ms", buffering.mInitialWatermarkMs);
+ msg->setInt32("init-kb", buffering.mInitialWatermarkKB);
+ msg->setInt32("rebuffer-low-ms", buffering.mRebufferingWatermarkLowMs);
+ msg->setInt32("rebuffer-high-ms", buffering.mRebufferingWatermarkHighMs);
+ msg->setInt32("rebuffer-low-kb", buffering.mRebufferingWatermarkLowKB);
+ msg->setInt32("rebuffer-high-kb", buffering.mRebufferingWatermarkHighKB);
+}
+
+void readFromAMessage(const sp<AMessage> &msg, BufferingSettings *buffering /* nonnull */) {
+ int32_t value;
+ if (msg->findInt32("init-mode", &value)) {
+ buffering->mInitialBufferingMode = (BufferingMode)value;
+ }
+ if (msg->findInt32("rebuffer-mode", &value)) {
+ buffering->mRebufferingMode = (BufferingMode)value;
+ }
+ if (msg->findInt32("init-ms", &value)) {
+ buffering->mInitialWatermarkMs = value;
+ }
+ if (msg->findInt32("init-kb", &value)) {
+ buffering->mInitialWatermarkKB = value;
+ }
+ if (msg->findInt32("rebuffer-low-ms", &value)) {
+ buffering->mRebufferingWatermarkLowMs = value;
+ }
+ if (msg->findInt32("rebuffer-high-ms", &value)) {
+ buffering->mRebufferingWatermarkHighMs = value;
+ }
+ if (msg->findInt32("rebuffer-low-kb", &value)) {
+ buffering->mRebufferingWatermarkLowKB = value;
+ }
+ if (msg->findInt32("rebuffer-high-kb", &value)) {
+ buffering->mRebufferingWatermarkHighKB = value;
+ }
+}
+
AString nameForFd(int fd) {
const size_t SIZE = 256;
char buffer[SIZE];
diff --git a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp b/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
index 9d9cd3b..5a47510 100644
--- a/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
+++ b/media/libstagefright/codecs/amrnb/dec/src/a_refl.cpp
@@ -60,7 +60,7 @@
; INCLUDES
----------------------------------------------------------------------------*/
#define LOG_TAG "a_refl"
-#include <android/log.h>
+#include <log/log.h>
#include "a_refl.h"
#include "typedef.h"
diff --git a/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp b/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp
index 5baa2a2..8393d79 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/src/conceal.cpp
@@ -18,7 +18,7 @@
#define LOG_TAG "conceal"
-#include "android/log.h"
+#include "log/log.h"
#include "mp4dec_lib.h" /* video decoder function prototypes */
#include "vlc_decode.h"
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
index c1aaa17..db061c1 100644
--- a/media/libstagefright/filters/GraphicBufferListener.cpp
+++ b/media/libstagefright/filters/GraphicBufferListener.cpp
@@ -22,6 +22,7 @@
#include <media/stagefright/MediaErrors.h>
#include <gui/BufferItem.h>
+#include <utils/String8.h>
#include "GraphicBufferListener.h"
diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp
index 045e044..1b0db33 100644
--- a/media/libstagefright/foundation/AMessage.cpp
+++ b/media/libstagefright/foundation/AMessage.cpp
@@ -22,8 +22,8 @@
#include "AMessage.h"
-#include <android/log.h>
#include <binder/Parcel.h>
+#include <log/log.h>
#include "AAtomizer.h"
#include "ABuffer.h"
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 477280a..e144942 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -49,11 +49,6 @@
const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll;
const int64_t LiveSession::kResumeThresholdUs = 100000ll;
-// Buffer Prepare/Ready/Underflow Marks
-const int64_t LiveSession::kReadyMarkUs = 5000000ll;
-const int64_t LiveSession::kPrepareMarkUs = 1500000ll;
-const int64_t LiveSession::kUnderflowMarkUs = 1000000ll;
-
struct LiveSession::BandwidthEstimator : public RefBase {
BandwidthEstimator();
@@ -495,6 +490,13 @@
return new HTTPDownloader(mHTTPService, mExtraHeaders);
}
+void LiveSession::setBufferingSettings(
+ const BufferingSettings &buffering) {
+ sp<AMessage> msg = new AMessage(kWhatSetBufferingSettings, this);
+ writeToAMessage(msg, buffering);
+ msg->post();
+}
+
void LiveSession::connectAsync(
const char *url, const KeyedVector<String8, String8> *headers) {
sp<AMessage> msg = new AMessage(kWhatConnect, this);
@@ -620,6 +622,12 @@
void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatSetBufferingSettings:
+ {
+ readFromAMessage(msg, &mBufferingSettings);
+ break;
+ }
+
case kWhatConnect:
{
onConnect(msg);
@@ -830,7 +838,10 @@
// If switching up, require a cushion bigger than kUnderflowMark
// to avoid buffering immediately after the switch.
// (If we don't have that cushion we'd rather cancel and try again.)
- int64_t delayUs = switchUp ? (kUnderflowMarkUs + 1000000ll) : 0;
+ int64_t delayUs =
+ switchUp ?
+ (mBufferingSettings.mRebufferingWatermarkLowMs * 1000ll + 1000000ll)
+ : 0;
bool needResumeUntil = false;
sp<AMessage> stopParams = msg;
if (checkSwitchProgress(stopParams, delayUs, &needResumeUntil)) {
@@ -2189,13 +2200,16 @@
}
++activeCount;
- int64_t readyMark = mInPreparationPhase ? kPrepareMarkUs : kReadyMarkUs;
- if (bufferedDurationUs > readyMark
+ int64_t readyMarkUs =
+ (mInPreparationPhase ?
+ mBufferingSettings.mInitialWatermarkMs :
+ mBufferingSettings.mRebufferingWatermarkHighMs) * 1000ll;
+ if (bufferedDurationUs > readyMarkUs
|| mPacketSources[i]->isFinished(0)) {
++readyCount;
}
if (!mPacketSources[i]->isFinished(0)) {
- if (bufferedDurationUs < kUnderflowMarkUs) {
+ if (bufferedDurationUs < mBufferingSettings.mRebufferingWatermarkLowMs * 1000ll) {
++underflowCount;
}
if (bufferedDurationUs > mUpSwitchMark) {
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index a0138be..abf8cf0 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -18,6 +18,7 @@
#define LIVE_SESSION_H_
+#include <media/BufferingSettings.h>
#include <media/stagefright/foundation/AHandler.h>
#include <media/mediaplayer.h>
@@ -72,6 +73,8 @@
uint32_t flags,
const sp<IMediaHTTPService> &httpService);
+ void setBufferingSettings(const BufferingSettings &buffering);
+
int64_t calculateMediaTimeUs(int64_t firstTimeUs, int64_t timeUs, int32_t discontinuitySeq);
status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit);
@@ -129,6 +132,7 @@
kWhatChangeConfiguration2 = 'chC2',
kWhatChangeConfiguration3 = 'chC3',
kWhatPollBuffering = 'poll',
+ kWhatSetBufferingSettings = 'sBuS',
};
// Bandwidth Switch Mark Defaults
@@ -138,9 +142,7 @@
static const int64_t kResumeThresholdUs;
// Buffer Prepare/Ready/Underflow Marks
- static const int64_t kReadyMarkUs;
- static const int64_t kPrepareMarkUs;
- static const int64_t kUnderflowMarkUs;
+ BufferingSettings mBufferingSettings;
struct BandwidthEstimator;
struct BandwidthItem {
diff --git a/media/libstagefright/include/DRMExtractor.h b/media/libstagefright/include/DRMExtractor.h
deleted file mode 100644
index 3dc7df8..0000000
--- a/media/libstagefright/include/DRMExtractor.h
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DRM_EXTRACTOR_H_
-
-#define DRM_EXTRACTOR_H_
-
-#include <media/IMediaSource.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <drm/DrmManagerClient.h>
-
-namespace android {
-
-struct AMessage;
-class DataSource;
-class SampleTable;
-class String8;
-class DecryptHandle;
-
-class DRMExtractor : public MediaExtractor {
-public:
- DRMExtractor(const sp<DataSource> &source, const char *mime);
-
- virtual size_t countTracks();
- virtual sp<IMediaSource> getTrack(size_t index);
- virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
- virtual sp<MetaData> getMetaData();
- virtual const char * name() { return "DRMExtractor"; }
-
-protected:
- virtual ~DRMExtractor();
-
-private:
- sp<DataSource> mDataSource;
-
- sp<IMediaExtractor> mOriginalExtractor;
- sp<DecryptHandle> mDecryptHandle;
- DrmManagerClient* mDrmManagerClient;
-
- DRMExtractor(const DRMExtractor &);
- DRMExtractor &operator=(const DRMExtractor &);
-};
-
-bool SniffDRM(
- const sp<DataSource> &source, String8 *mimeType, float *confidence,
- sp<AMessage> *);
-
-} // namespace android
-
-#endif // DRM_EXTRACTOR_H_
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c20e9fc..ea86a37 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -1559,7 +1559,8 @@
switch (omxBuffer.mBufferType) {
case OMXBuffer::kBufferTypePreset:
return emptyBuffer_l(
- buffer, 0, omxBuffer.mRangeLength, flags, timestamp, fenceFd);
+ buffer, omxBuffer.mRangeOffset, omxBuffer.mRangeLength,
+ flags, timestamp, fenceFd);
case OMXBuffer::kBufferTypeANWBuffer:
return emptyGraphicBuffer_l(
diff --git a/media/libstagefright/omx/hal/1.0/Conversion.h b/media/libstagefright/omx/hal/1.0/Conversion.h
index 44d2c84..d42e5bf 100644
--- a/media/libstagefright/omx/hal/1.0/Conversion.h
+++ b/media/libstagefright/omx/hal/1.0/Conversion.h
@@ -2,7 +2,6 @@
#define ANDROID_HARDWARE_MEDIA_OMX_V1_0__CONVERSION_H
#include <hidl/MQDescriptor.h>
-#include <hidl/Status.h>
#include <unistd.h>
#include <vector>
@@ -147,13 +146,13 @@
*/
/**
- * \brief Convert `binder::Status` to `hardware::Status`.
+ * \brief Convert `binder::Status` to `Return<void>`.
*
* \param[in] l The source `binder::Status`.
- * \return The corresponding `hardware::Status`.
+ * \return The corresponding `Return<void>`.
*/
-// convert: ::android::binder::Status -> ::android::hardware::Status
-inline ::android::hardware::Status toHardwareStatus(
+// convert: ::android::binder::Status -> Return<void>
+inline Return<void> toHardwareStatus(
::android::binder::Status const& l) {
if (l.exceptionCode() == ::android::binder::Status::EX_SERVICE_SPECIFIC) {
return ::android::hardware::Status::fromServiceSpecificError(
@@ -166,36 +165,17 @@
}
/**
- * \brief Convert `hardware::Status` to `binder::Status`.
+ * \brief Convert `Return<void>` to `binder::Status`.
*
- * \param[in] t The source `hardware::Status`.
+ * \param[in] t The source `Return<void>`.
* \return The corresponding `binder::Status`.
*/
-// convert: ::android::hardware::Status -> ::android::binder::Status
+// convert: Return<void> -> ::android::binder::Status
inline ::android::binder::Status toBinderStatus(
- ::android::hardware::Status const& t) {
- if (t.exceptionCode() == ::android::hardware::Status::EX_SERVICE_SPECIFIC) {
- return ::android::binder::Status::fromServiceSpecificError(
- t.serviceSpecificErrorCode(),
- t.exceptionMessage());
- }
+ Return<void> const& t) {
return ::android::binder::Status::fromExceptionCode(
- t.exceptionCode(),
- t.exceptionMessage());
-}
-
-/**
- * \brief Convert `hardware::Return<void>` to `binder::Status`.
- *
- * \param[in] t The source `hardware::Return<void>`.
- * \return The corresponding `binder::Status`.
- *
- * This function simply calls `toBinderStatus(::android::hardware::Status
- * const&)`.
- */
-// convert: ::android::hardware::Return<void> -> ::android::binder::Status
-inline ::android::binder::Status toBinderStatus(Return<void> const& t) {
- return toBinderStatus(t.getStatus());
+ t.isOk() ? OK : UNKNOWN_ERROR,
+ t.description().c_str());
}
/**
@@ -215,8 +195,7 @@
*/
// convert: Status -> status_t
inline status_t toStatusT(Return<Status> const& t) {
- return t.isOk() ? static_cast<status_t>(static_cast<Status>(t)) :
- t.getStatus().transactionError();
+ return t.isOk() ? static_cast<status_t>(static_cast<Status>(t)) : UNKNOWN_ERROR;
}
/**
@@ -227,7 +206,7 @@
*/
// convert: Return<void> -> status_t
inline status_t toStatusT(Return<void> const& t) {
- return t.getStatus().transactionError();
+ return t.isOk() ? OK : UNKNOWN_ERROR;
}
/**
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
index c66a898..83af393 100644
--- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -762,10 +762,16 @@
return OK;
}
-status_t RTPSender::parseAPP(const uint8_t *data, size_t size __unused) {
- if (!memcmp("late", &data[8], 4)) {
- int64_t avgLatencyUs = (int64_t)U64_AT(&data[12]);
- int64_t maxLatencyUs = (int64_t)U64_AT(&data[20]);
+status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
+ static const size_t late_offset = 8;
+ static const char late_string[] = "late";
+ static const size_t avgLatencyUs_offset = late_offset + sizeof(late_string) - 1;
+ static const size_t maxLatencyUs_offset = avgLatencyUs_offset + sizeof(int64_t);
+
+ if ((size >= (maxLatencyUs_offset + sizeof(int64_t)))
+ && !memcmp(late_string, &data[late_offset], sizeof(late_string) - 1)) {
+ int64_t avgLatencyUs = (int64_t)U64_AT(&data[avgLatencyUs_offset]);
+ int64_t maxLatencyUs = (int64_t)U64_AT(&data[maxLatencyUs_offset]);
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatInformSender);
diff --git a/media/mtp/AsyncIO.cpp b/media/mtp/AsyncIO.cpp
index a1a98ab..e77ad38 100644
--- a/media/mtp/AsyncIO.cpp
+++ b/media/mtp/AsyncIO.cpp
@@ -37,15 +37,17 @@
}
void splice_read_func(struct aiocb *aiocbp) {
+ loff_t long_offset = aiocbp->aio_offset;
aiocbp->ret = TEMP_FAILURE_RETRY(splice(aiocbp->aio_fildes,
- (off64_t*) &aiocbp->aio_offset, aiocbp->aio_sink,
+ &long_offset, aiocbp->aio_sink,
NULL, aiocbp->aio_nbytes, 0));
if (aiocbp->ret == -1) aiocbp->error = errno;
}
void splice_write_func(struct aiocb *aiocbp) {
+ loff_t long_offset = aiocbp->aio_offset;
aiocbp->ret = TEMP_FAILURE_RETRY(splice(aiocbp->aio_fildes, NULL,
- aiocbp->aio_sink, (off64_t*) &aiocbp->aio_offset,
+ aiocbp->aio_sink, &long_offset,
aiocbp->aio_nbytes, 0));
if (aiocbp->ret == -1) aiocbp->error = errno;
}
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 10314e9..d0696a8 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -38,6 +38,8 @@
#define cpu_to_le16(x) htole16(x)
#define cpu_to_le32(x) htole32(x)
+#define FUNCTIONFS_ENDPOINT_ALLOC _IOR('g', 131, __u32)
+
namespace {
constexpr char FFS_MTP_EP_IN[] = "/dev/usb-ffs/mtp/ep1";
@@ -105,6 +107,7 @@
.bInterfaceClass = USB_CLASS_STILL_IMAGE,
.bInterfaceSubClass = 1,
.bInterfaceProtocol = 1,
+ .iInterface = 1,
};
const struct usb_interface_descriptor ptp_interface_desc = {
@@ -257,14 +260,23 @@
.intr_comp = ss_intr_comp,
};
+#define STR_INTERFACE "MTP"
const struct {
struct usb_functionfs_strings_head header;
+ struct {
+ __le16 code;
+ const char str1[sizeof(STR_INTERFACE)];
+ } __attribute__((packed)) lang0;
} __attribute__((packed)) strings = {
.header = {
.magic = cpu_to_le32(FUNCTIONFS_STRINGS_MAGIC),
.length = cpu_to_le32(sizeof(strings)),
- .str_count = cpu_to_le32(0),
- .lang_count = cpu_to_le32(0),
+ .str_count = cpu_to_le32(1),
+ .lang_count = cpu_to_le32(1),
+ },
+ .lang0 = {
+ .code = cpu_to_le16(0x0409),
+ .str1 = STR_INTERFACE,
},
};
@@ -467,6 +479,24 @@
mLock.unlock();
}
+class ScopedEndpointBufferAlloc {
+private:
+ const int mFd;
+ const unsigned int mAllocSize;
+public:
+ ScopedEndpointBufferAlloc(int fd, unsigned alloc_size) :
+ mFd(fd),
+ mAllocSize(alloc_size) {
+ if (ioctl(mFd, FUNCTIONFS_ENDPOINT_ALLOC, static_cast<__u32>(mAllocSize)))
+ PLOG(DEBUG) << "FFS endpoint alloc failed!";
+ }
+
+ ~ScopedEndpointBufferAlloc() {
+ if (ioctl(mFd, FUNCTIONFS_ENDPOINT_ALLOC, static_cast<__u32>(0)))
+ PLOG(DEBUG) << "FFS endpoint alloc reset failed!";
+ }
+};
+
/* Read from USB and write to a local file. */
int MtpFfsHandle::receiveFile(mtp_file_range mfr) {
// When receiving files, the incoming length is given in 32 bits.
@@ -494,6 +524,7 @@
bool write = false;
posix_fadvise(mfr.fd, 0, 0, POSIX_FADV_SEQUENTIAL | POSIX_FADV_NOREUSE);
+ ScopedEndpointBufferAlloc(mBulkOut, mMaxRead);
// Break down the file into pieces that fit in buffers
while (file_length > 0 || write) {
@@ -609,6 +640,8 @@
if (writeHandle(mBulkIn, data, packet_size) == -1) return -1;
if (file_length == 0) return 0;
+ ScopedEndpointBufferAlloc(mBulkIn, mMaxWrite);
+
// Break down the file into pieces that fit in buffers
while(file_length > 0) {
if (read) {
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 1ac1eeb..e4e3d8f 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -20,4 +20,5 @@
name: "libmediandk.ndk",
symbol_file: "libmediandk.map.txt",
first_version: "21",
+ unversioned_until: "current",
}
diff --git a/radio/IRadio.cpp b/radio/IRadio.cpp
index ebf3859..5bbe7cb 100644
--- a/radio/IRadio.cpp
+++ b/radio/IRadio.cpp
@@ -172,16 +172,11 @@
virtual status_t getProgramInformation(struct radio_program_info *info)
{
Parcel data, reply;
- if (info == NULL) {
+ if (info == nullptr || info->metadata == nullptr) {
return BAD_VALUE;
}
radio_metadata_t *metadata = info->metadata;
data.writeInterfaceToken(IRadio::getInterfaceDescriptor());
- if (metadata != NULL) {
- data.writeUint32(1);
- } else {
- data.writeUint32(0);
- }
status_t status = remote()->transact(GET_PROGRAM_INFORMATION, data, &reply);
if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
@@ -190,13 +185,13 @@
// restore local metadata pointer
info->metadata = metadata;
- uint32_t metatataSize = reply.readUint32();
- if ((metadata != NULL) && (metatataSize != 0)) {
- radio_metadata_t *newMetadata = (radio_metadata_t *)malloc(metatataSize);
+ uint32_t metadataSize = reply.readUint32();
+ if (metadataSize != 0) {
+ radio_metadata_t *newMetadata = (radio_metadata_t *)malloc(metadataSize);
if (newMetadata == NULL) {
return NO_MEMORY;
}
- reply.read(newMetadata, metatataSize);
+ reply.read(newMetadata, metadataSize);
status = radio_metadata_add_metadata(&info->metadata, newMetadata);
free(newMetadata);
}
@@ -306,21 +301,17 @@
CHECK_INTERFACE(IRadio, data, reply);
struct radio_program_info info;
status_t status;
- // query metadata only if requested by remote side
- if (data.readUint32() == 1) {
- status = radio_metadata_allocate(&info.metadata, 0, 0);
- if (status != NO_ERROR) {
- return status;
- }
- } else {
- info.metadata = NULL;
+
+ status = radio_metadata_allocate(&info.metadata, 0, 0);
+ if (status != NO_ERROR) {
+ return status;
}
status = getProgramInformation(&info);
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->write(&info, sizeof(struct radio_program_info));
- if ((info.metadata != NULL) && (radio_metadata_get_count(info.metadata) > 0)) {
+ if (radio_metadata_get_count(info.metadata) > 0) {
size_t size = radio_metadata_get_size(info.metadata);
reply->writeUint32((uint32_t)size);
reply->write(info.metadata, size);
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 51d785a..aa2cd95 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -28,8 +28,6 @@
AudioStreamOut.cpp \
SpdifStreamOut.cpp \
Effects.cpp \
- AudioMixer.cpp.arm \
- BufferProviders.cpp \
PatchPanel.cpp \
StateQueue.cpp \
BufLog.cpp
@@ -37,12 +35,11 @@
LOCAL_C_INCLUDES := \
$(TOPDIR)frameworks/av/services/audiopolicy \
$(TOPDIR)frameworks/av/services/medialog \
- $(TOPDIR)external/sonic \
$(call include-path-for, audio-utils)
LOCAL_SHARED_LIBRARIES := \
libaudiohal \
- libaudioresampler \
+ libaudioprocessing \
libaudiospdif \
libaudioutils \
libcutils \
@@ -55,7 +52,6 @@
libnbaio \
libpowermanager \
libserviceutility \
- libsonic \
libmediautils \
libmemunreachable \
libmedia_helper
@@ -87,59 +83,4 @@
include $(BUILD_SHARED_LIBRARY)
-#
-# build audio resampler test tool
-#
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- test-resample.cpp \
-
-LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-utils)
-
-LOCAL_STATIC_LIBRARIES := \
- libsndfile
-
-LOCAL_SHARED_LIBRARIES := \
- libaudioresampler \
- libaudioutils \
- libdl \
- libcutils \
- libutils \
- liblog
-
-LOCAL_MODULE:= test-resample
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_EXECUTABLE)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- AudioResampler.cpp.arm \
- AudioResamplerCubic.cpp.arm \
- AudioResamplerSinc.cpp.arm \
- AudioResamplerDyn.cpp.arm
-
-LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-utils)
-
-LOCAL_SHARED_LIBRARIES := \
- libcutils \
- libdl \
- liblog
-
-LOCAL_MODULE := libaudioresampler
-
-LOCAL_CFLAGS := -Werror -Wall
-
-# uncomment to disable NEON on architectures that actually do support NEON, for benchmarking
-#LOCAL_CFLAGS += -DUSE_NEON=false
-
-include $(BUILD_SHARED_LIBRARY)
-
include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index d08309b..a248912 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -46,7 +46,6 @@
#include <system/audio.h>
-#include "AudioMixer.h"
#include "AudioFlinger.h"
#include "ServiceUtilities.h"
@@ -512,8 +511,11 @@
return new NBLog::Writer();
}
success:
+ NBLog::Shared *sharedRawPtr = (NBLog::Shared *) shared->pointer();
+ new((void *) sharedRawPtr) NBLog::Shared(); // placement new here, but the corresponding
+ // explicit destructor not needed since it is POD
mediaLogService->registerWriter(shared, size, name);
- return new NBLog::Writer(size, shared);
+ return new NBLog::Writer(shared, size);
}
void AudioFlinger::unregisterWriter(const sp<NBLog::Writer>& writer)
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index d18ca47..e97d1ed 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -48,19 +48,20 @@
#include <system/audio.h>
#include <system/audio_policy.h>
+#include <media/audiohal/EffectBufferHalInterface.h>
#include <media/audiohal/StreamHalInterface.h>
#include <media/AudioBufferProvider.h>
+#include <media/AudioMixer.h>
#include <media/ExtendedAudioBufferProvider.h>
+#include <media/LinearMap.h>
#include "FastCapture.h"
#include "FastMixer.h"
#include <media/nbaio/NBAIO.h>
#include "AudioWatchdog.h"
-#include "AudioMixer.h"
#include "AudioStreamOut.h"
#include "SpdifStreamOut.h"
#include "AudioHwDevice.h"
-#include "LinearMap.h"
#include <powermanager/IPowerManager.h>
@@ -79,6 +80,7 @@
class EffectsFactoryHalInterface;
class FastMixer;
class PassthruBufferProvider;
+class RecordBufferConverter;
class ServerProxy;
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 6c937a5..343ad25 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -188,6 +188,7 @@
// this object is released which can happen after next process is called.
if (mHandles.size() == 0 && !mPinned) {
mState = DESTROYED;
+ mEffectInterface->close();
}
return mHandles.size();
@@ -275,9 +276,7 @@
{
Mutex::Autolock _l(mLock);
- if (mState == DESTROYED || mEffectInterface == 0 ||
- mConfig.inputCfg.buffer.raw == NULL ||
- mConfig.outputCfg.buffer.raw == NULL) {
+ if (mState == DESTROYED || mEffectInterface == 0 || mInBuffer == 0 || mOutBuffer == 0) {
return;
}
@@ -291,7 +290,7 @@
int ret;
if (isProcessImplemented()) {
// do the actual processing in the effect engine
- ret = mEffectInterface->process(&mConfig.inputCfg.buffer, &mConfig.outputCfg.buffer);
+ ret = mEffectInterface->process();
} else {
if (mConfig.inputCfg.buffer.raw != mConfig.outputCfg.buffer.raw) {
size_t frameCnt = mConfig.inputCfg.buffer.frameCount * FCC_2; //always stereo here
@@ -409,6 +408,12 @@
mConfig.outputCfg.mask = EFFECT_CONFIG_ALL;
mConfig.inputCfg.buffer.frameCount = thread->frameCount();
mConfig.outputCfg.buffer.frameCount = mConfig.inputCfg.buffer.frameCount;
+ if (mInBuffer != 0) {
+ mInBuffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
+ }
+ if (mOutBuffer != 0) {
+ mOutBuffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
+ }
ALOGV("configure() %p thread %p buffer %p framecount %zu",
this, thread.get(), mConfig.inputCfg.buffer.raw, mConfig.inputCfg.buffer.frameCount);
@@ -568,6 +573,7 @@
if (mEffectInterface != 0) {
remove_effect_from_hal_l();
// release effect engine
+ mEffectInterface->close();
mEffectInterface.clear();
}
}
@@ -762,6 +768,28 @@
}
}
+void AudioFlinger::EffectModule::setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ if (buffer != 0) {
+ mConfig.inputCfg.buffer.raw = buffer->audioBuffer()->raw;
+ buffer->setFrameCount(mConfig.inputCfg.buffer.frameCount);
+ } else {
+ mConfig.inputCfg.buffer.raw = NULL;
+ }
+ mInBuffer = buffer;
+ mEffectInterface->setInBuffer(buffer);
+}
+
+void AudioFlinger::EffectModule::setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
+ if (buffer != 0) {
+ mConfig.outputCfg.buffer.raw = buffer->audioBuffer()->raw;
+ buffer->setFrameCount(mConfig.outputCfg.buffer.frameCount);
+ } else {
+ mConfig.outputCfg.buffer.raw = NULL;
+ }
+ mOutBuffer = buffer;
+ mEffectInterface->setOutBuffer(buffer);
+}
+
status_t AudioFlinger::EffectModule::setVolume(uint32_t *left, uint32_t *right, bool controller)
{
Mutex::Autolock _l(mLock);
@@ -1482,7 +1510,7 @@
AudioFlinger::EffectChain::EffectChain(ThreadBase *thread,
audio_session_t sessionId)
: mThread(thread), mSessionId(sessionId), mActiveTrackCnt(0), mTrackCnt(0), mTailBufferCount(0),
- mOwnInBuffer(false), mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
+ mVolumeCtrlIdx(-1), mLeftVolume(UINT_MAX), mRightVolume(UINT_MAX),
mNewLeftVolume(UINT_MAX), mNewRightVolume(UINT_MAX)
{
mStrategy = AudioSystem::getStrategyForStream(AUDIO_STREAM_MUSIC);
@@ -1495,9 +1523,6 @@
AudioFlinger::EffectChain::~EffectChain()
{
- if (mOwnInBuffer) {
- delete[] mInBuffer;
- }
}
// getEffectFromDesc_l() must be called with ThreadBase::mLock held
@@ -1562,7 +1587,8 @@
// (4 bytes frame size)
const size_t frameSize =
audio_bytes_per_sample(AUDIO_FORMAT_PCM_16_BIT) * min(FCC_2, thread->channelCount());
- memset(mInBuffer, 0, thread->frameCount() * frameSize);
+ memset(mInBuffer->audioBuffer()->raw, 0, thread->frameCount() * frameSize);
+ mInBuffer->commit();
}
// Must be called with EffectChain::mLock locked
@@ -1600,9 +1626,15 @@
size_t size = mEffects.size();
if (doProcess) {
+ // Only the input and output buffers of the chain can be external,
+ // and 'update' / 'commit' do nothing for allocated buffers, thus
+ // it's not needed to consider any other buffers here.
+ mInBuffer->update();
+ mOutBuffer->update();
for (size_t i = 0; i < size; i++) {
mEffects[i]->process();
}
+ mOutBuffer->commit();
}
bool doResetVolume = false;
for (size_t i = 0; i < size; i++) {
@@ -1662,9 +1694,11 @@
// accumulation stage. Saturation is done in EffectModule::process() before
// calling the process in effect engine
size_t numSamples = thread->frameCount();
- int32_t *buffer = new int32_t[numSamples];
- memset(buffer, 0, numSamples * sizeof(int32_t));
- effect->setInBuffer((int16_t *)buffer);
+ sp<EffectBufferHalInterface> halBuffer;
+ status_t result = EffectBufferHalInterface::allocate(
+ numSamples * sizeof(int32_t), &halBuffer);
+ if (result != OK) return result;
+ effect->setInBuffer(halBuffer);
// auxiliary effects output samples to chain input buffer for further processing
// by insert effects
effect->setOutBuffer(mInBuffer);
@@ -1775,9 +1809,7 @@
mEffects[i]->release_l();
}
- if (type == EFFECT_FLAG_TYPE_AUXILIARY) {
- delete[] effect->inBuffer();
- } else {
+ if (type != EFFECT_FLAG_TYPE_AUXILIARY) {
if (i == size - 1 && i != 0) {
mEffects[i - 1]->setOutBuffer(mOutBuffer);
mEffects[i - 1]->configure();
@@ -1922,8 +1954,8 @@
result.append("\tIn buffer Out buffer Active tracks:\n");
snprintf(buffer, SIZE, "\t%p %p %d\n",
- mInBuffer,
- mOutBuffer,
+ mInBuffer->audioBuffer(),
+ mOutBuffer->audioBuffer(),
mActiveTrackCnt);
result.append(buffer);
write(fd, result.string(), result.size());
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index dc29ce0..0755c52 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -86,10 +86,14 @@
bool isEnabled() const;
bool isProcessEnabled() const;
- void setInBuffer(int16_t *buffer) { mConfig.inputCfg.buffer.s16 = buffer; }
- int16_t *inBuffer() { return mConfig.inputCfg.buffer.s16; }
- void setOutBuffer(int16_t *buffer) { mConfig.outputCfg.buffer.s16 = buffer; }
- int16_t *outBuffer() { return mConfig.outputCfg.buffer.s16; }
+ void setInBuffer(const sp<EffectBufferHalInterface>& buffer);
+ int16_t *inBuffer() const {
+ return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
+ }
+ void setOutBuffer(const sp<EffectBufferHalInterface>& buffer);
+ int16_t *outBuffer() const {
+ return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
+ }
void setChain(const wp<EffectChain>& chain) { mChain = chain; }
void setThread(const wp<ThreadBase>& thread) { mThread = thread; }
const wp<ThreadBase>& thread() { return mThread; }
@@ -153,6 +157,8 @@
const effect_descriptor_t mDescriptor;// effect descriptor received from effect engine
effect_config_t mConfig; // input and output audio configuration
sp<EffectHalInterface> mEffectInterface; // Effect module HAL
+ sp<EffectBufferHalInterface> mInBuffer; // Buffers for interacting with HAL
+ sp<EffectBufferHalInterface> mOutBuffer;
status_t mStatus; // initialization status
effect_state mState; // current activation state
Vector<EffectHandle *> mHandles; // list of client handles
@@ -301,18 +307,17 @@
void setMode_l(audio_mode_t mode);
void setAudioSource_l(audio_source_t source);
- void setInBuffer(int16_t *buffer, bool ownsBuffer = false) {
+ void setInBuffer(const sp<EffectBufferHalInterface>& buffer) {
mInBuffer = buffer;
- mOwnInBuffer = ownsBuffer;
}
int16_t *inBuffer() const {
- return mInBuffer;
+ return mInBuffer != 0 ? reinterpret_cast<int16_t*>(mInBuffer->ptr()) : NULL;
}
- void setOutBuffer(int16_t *buffer) {
+ void setOutBuffer(const sp<EffectBufferHalInterface>& buffer) {
mOutBuffer = buffer;
}
int16_t *outBuffer() const {
- return mOutBuffer;
+ return mOutBuffer != 0 ? reinterpret_cast<int16_t*>(mOutBuffer->ptr()) : NULL;
}
void incTrackCnt() { android_atomic_inc(&mTrackCnt); }
@@ -394,8 +399,8 @@
mutable Mutex mLock; // mutex protecting effect list
Vector< sp<EffectModule> > mEffects; // list of effect modules
audio_session_t mSessionId; // audio session ID
- int16_t *mInBuffer; // chain input buffer
- int16_t *mOutBuffer; // chain output buffer
+ sp<EffectBufferHalInterface> mInBuffer; // chain input buffer
+ sp<EffectBufferHalInterface> mOutBuffer; // chain output buffer
// 'volatile' here means these are accessed with atomic operations instead of mutex
volatile int32_t mActiveTrackCnt; // number of active tracks connected
@@ -403,7 +408,6 @@
int32_t mTailBufferCount; // current effect tail buffer count
int32_t mMaxTailBuffers; // maximum effect tail buffers
- bool mOwnInBuffer; // true if the chain owns its input buffer
int mVolumeCtrlIdx; // index of insert effect having control over volume
uint32_t mLeftVolume; // previous volume on left channel
uint32_t mRightVolume; // previous volume on right channel
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index 93f7ce5..7182f32 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -39,7 +39,7 @@
#endif
#include <audio_utils/conversion.h>
#include <audio_utils/format.h>
-#include "AudioMixer.h"
+#include <media/AudioMixer.h>
#include "FastMixer.h"
namespace android {
diff --git a/services/audioflinger/FastThreadDumpState.cpp b/services/audioflinger/FastThreadDumpState.cpp
index 9df5c4c..964a725 100644
--- a/services/audioflinger/FastThreadDumpState.cpp
+++ b/services/audioflinger/FastThreadDumpState.cpp
@@ -14,6 +14,7 @@
* limitations under the License.
*/
+#include <audio_utils/roundup.h>
#include "FastThreadDumpState.h"
namespace android {
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 724ce3c..b1ede30 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -29,6 +29,7 @@
#include <cutils/properties.h>
#include <media/AudioParameter.h>
#include <media/AudioResamplerPublic.h>
+#include <media/RecordBufferConverter.h>
#include <media/TypeConverter.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -56,8 +57,6 @@
#include <powermanager/PowerManager.h>
#include "AudioFlinger.h"
-#include "AudioMixer.h"
-#include "BufferProviders.h"
#include "FastMixer.h"
#include "FastCapture.h"
#include "ServiceUtilities.h"
@@ -1265,6 +1264,7 @@
bool chainCreated = false;
bool effectCreated = false;
bool effectRegistered = false;
+ audio_unique_id_t effectId = AUDIO_UNIQUE_ID_USE_UNSPECIFIED;
lStatus = initCheck();
if (lStatus != NO_ERROR) {
@@ -1298,15 +1298,16 @@
ALOGV("createEffect_l() got effect %p on chain %p", effect.get(), chain.get());
if (effect == 0) {
- audio_unique_id_t id = mAudioFlinger->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
+ effectId = mAudioFlinger->nextUniqueId(AUDIO_UNIQUE_ID_USE_EFFECT);
// Check CPU and memory usage
- lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id);
+ lStatus = AudioSystem::registerEffect(
+ desc, mId, chain->strategy(), sessionId, effectId);
if (lStatus != NO_ERROR) {
goto Exit;
}
effectRegistered = true;
// create a new effect module if none present in the chain
- lStatus = chain->createEffect_l(effect, this, desc, id, sessionId, pinned);
+ lStatus = chain->createEffect_l(effect, this, desc, effectId, sessionId, pinned);
if (lStatus != NO_ERROR) {
goto Exit;
}
@@ -1335,7 +1336,7 @@
chain->removeEffect_l(effect);
}
if (effectRegistered) {
- AudioSystem::unregisterEffect(effect->id());
+ AudioSystem::unregisterEffect(effectId);
}
if (chainCreated) {
removeEffectChain_l(chain);
@@ -2759,9 +2760,14 @@
status_t AudioFlinger::PlaybackThread::addEffectChain_l(const sp<EffectChain>& chain)
{
audio_session_t session = chain->sessionId();
- int16_t* buffer = reinterpret_cast<int16_t*>(mEffectBufferEnabled
- ? mEffectBuffer : mSinkBuffer);
- bool ownsBuffer = false;
+ sp<EffectBufferHalInterface> halInBuffer, halOutBuffer;
+ status_t result = EffectBufferHalInterface::mirror(
+ mEffectBufferEnabled ? mEffectBuffer : mSinkBuffer,
+ mEffectBufferEnabled ? mEffectBufferSize : mSinkBufferSize,
+ &halInBuffer);
+ if (result != OK) return result;
+ halOutBuffer = halInBuffer;
+ int16_t *buffer = reinterpret_cast<int16_t*>(halInBuffer->externalData());
ALOGV("addEffectChain_l() %p on thread %p for session %d", chain.get(), this, session);
if (session > AUDIO_SESSION_OUTPUT_MIX) {
@@ -2769,10 +2775,13 @@
// the sink buffer as input
if (mType != DIRECT) {
size_t numSamples = mNormalFrameCount * mChannelCount;
- buffer = new int16_t[numSamples];
- memset(buffer, 0, numSamples * sizeof(int16_t));
- ALOGV("addEffectChain_l() creating new input buffer %p session %d", buffer, session);
- ownsBuffer = true;
+ status_t result = EffectBufferHalInterface::allocate(
+ numSamples * sizeof(int16_t),
+ &halInBuffer);
+ if (result != OK) return result;
+ buffer = halInBuffer->audioBuffer()->s16;
+ ALOGV("addEffectChain_l() creating new input buffer %p session %d",
+ buffer, session);
}
// Attach all tracks with same session ID to this chain.
@@ -2795,9 +2804,8 @@
}
}
chain->setThread(this);
- chain->setInBuffer(buffer, ownsBuffer);
- chain->setOutBuffer(reinterpret_cast<int16_t*>(mEffectBufferEnabled
- ? mEffectBuffer : mSinkBuffer));
+ chain->setInBuffer(halInBuffer);
+ chain->setOutBuffer(halOutBuffer);
// Effect chain for session AUDIO_SESSION_OUTPUT_STAGE is inserted at end of effect
// chains list in order to be processed last as it contains output stage effects.
// Effect chain for session AUDIO_SESSION_OUTPUT_MIX is inserted before
@@ -3649,6 +3657,7 @@
mFastMixer->run("FastMixer", PRIORITY_URGENT_AUDIO);
pid_t tid = mFastMixer->getTid();
sendPrioConfigEvent(getpid_cached, tid, kPriorityFastMixer);
+ stream()->setHalThreadPriority(kPriorityFastMixer);
#ifdef AUDIO_WATCHDOG
// create and start the watchdog
@@ -5911,6 +5920,7 @@
mFastCapture->run("FastCapture", ANDROID_PRIORITY_URGENT_AUDIO);
pid_t tid = mFastCapture->getTid();
sendPrioConfigEvent(getpid_cached, tid, kPriorityFastCapture);
+ stream()->setHalThreadPriority(kPriorityFastCapture);
#ifdef AUDIO_WATCHDOG
// FIXME
#endif
@@ -6929,252 +6939,6 @@
buffer->frameCount = 0;
}
-AudioFlinger::RecordThread::RecordBufferConverter::RecordBufferConverter(
- audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
- uint32_t srcSampleRate,
- audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
- uint32_t dstSampleRate) :
- mSrcChannelMask(AUDIO_CHANNEL_INVALID), // updateParameters will set following vars
- // mSrcFormat
- // mSrcSampleRate
- // mDstChannelMask
- // mDstFormat
- // mDstSampleRate
- // mSrcChannelCount
- // mDstChannelCount
- // mDstFrameSize
- mBuf(NULL), mBufFrames(0), mBufFrameSize(0),
- mResampler(NULL),
- mIsLegacyDownmix(false),
- mIsLegacyUpmix(false),
- mRequiresFloat(false),
- mInputConverterProvider(NULL)
-{
- (void)updateParameters(srcChannelMask, srcFormat, srcSampleRate,
- dstChannelMask, dstFormat, dstSampleRate);
-}
-
-AudioFlinger::RecordThread::RecordBufferConverter::~RecordBufferConverter() {
- free(mBuf);
- delete mResampler;
- delete mInputConverterProvider;
-}
-
-size_t AudioFlinger::RecordThread::RecordBufferConverter::convert(void *dst,
- AudioBufferProvider *provider, size_t frames)
-{
- if (mInputConverterProvider != NULL) {
- mInputConverterProvider->setBufferProvider(provider);
- provider = mInputConverterProvider;
- }
-
- if (mResampler == NULL) {
- ALOGVV("NO RESAMPLING sampleRate:%u mSrcFormat:%#x mDstFormat:%#x",
- mSrcSampleRate, mSrcFormat, mDstFormat);
-
- AudioBufferProvider::Buffer buffer;
- for (size_t i = frames; i > 0; ) {
- buffer.frameCount = i;
- status_t status = provider->getNextBuffer(&buffer);
- if (status != OK || buffer.frameCount == 0) {
- frames -= i; // cannot fill request.
- break;
- }
- // format convert to destination buffer
- convertNoResampler(dst, buffer.raw, buffer.frameCount);
-
- dst = (int8_t*)dst + buffer.frameCount * mDstFrameSize;
- i -= buffer.frameCount;
- provider->releaseBuffer(&buffer);
- }
- } else {
- ALOGVV("RESAMPLING mSrcSampleRate:%u mDstSampleRate:%u mSrcFormat:%#x mDstFormat:%#x",
- mSrcSampleRate, mDstSampleRate, mSrcFormat, mDstFormat);
-
- // reallocate buffer if needed
- if (mBufFrameSize != 0 && mBufFrames < frames) {
- free(mBuf);
- mBufFrames = frames;
- (void)posix_memalign(&mBuf, 32, mBufFrames * mBufFrameSize);
- }
- // resampler accumulates, but we only have one source track
- memset(mBuf, 0, frames * mBufFrameSize);
- frames = mResampler->resample((int32_t*)mBuf, frames, provider);
- // format convert to destination buffer
- convertResampler(dst, mBuf, frames);
- }
- return frames;
-}
-
-status_t AudioFlinger::RecordThread::RecordBufferConverter::updateParameters(
- audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
- uint32_t srcSampleRate,
- audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
- uint32_t dstSampleRate)
-{
- // quick evaluation if there is any change.
- if (mSrcFormat == srcFormat
- && mSrcChannelMask == srcChannelMask
- && mSrcSampleRate == srcSampleRate
- && mDstFormat == dstFormat
- && mDstChannelMask == dstChannelMask
- && mDstSampleRate == dstSampleRate) {
- return NO_ERROR;
- }
-
- ALOGV("RecordBufferConverter updateParameters srcMask:%#x dstMask:%#x"
- " srcFormat:%#x dstFormat:%#x srcRate:%u dstRate:%u",
- srcChannelMask, dstChannelMask, srcFormat, dstFormat, srcSampleRate, dstSampleRate);
- const bool valid =
- audio_is_input_channel(srcChannelMask)
- && audio_is_input_channel(dstChannelMask)
- && audio_is_valid_format(srcFormat) && audio_is_linear_pcm(srcFormat)
- && audio_is_valid_format(dstFormat) && audio_is_linear_pcm(dstFormat)
- && (srcSampleRate <= dstSampleRate * AUDIO_RESAMPLER_DOWN_RATIO_MAX)
- ; // no upsampling checks for now
- if (!valid) {
- return BAD_VALUE;
- }
-
- mSrcFormat = srcFormat;
- mSrcChannelMask = srcChannelMask;
- mSrcSampleRate = srcSampleRate;
- mDstFormat = dstFormat;
- mDstChannelMask = dstChannelMask;
- mDstSampleRate = dstSampleRate;
-
- // compute derived parameters
- mSrcChannelCount = audio_channel_count_from_in_mask(srcChannelMask);
- mDstChannelCount = audio_channel_count_from_in_mask(dstChannelMask);
- mDstFrameSize = mDstChannelCount * audio_bytes_per_sample(mDstFormat);
-
- // do we need to resample?
- delete mResampler;
- mResampler = NULL;
- if (mSrcSampleRate != mDstSampleRate) {
- mResampler = AudioResampler::create(AUDIO_FORMAT_PCM_FLOAT,
- mSrcChannelCount, mDstSampleRate);
- mResampler->setSampleRate(mSrcSampleRate);
- mResampler->setVolume(AudioMixer::UNITY_GAIN_FLOAT, AudioMixer::UNITY_GAIN_FLOAT);
- }
-
- // are we running legacy channel conversion modes?
- mIsLegacyDownmix = (mSrcChannelMask == AUDIO_CHANNEL_IN_STEREO
- || mSrcChannelMask == AUDIO_CHANNEL_IN_FRONT_BACK)
- && mDstChannelMask == AUDIO_CHANNEL_IN_MONO;
- mIsLegacyUpmix = mSrcChannelMask == AUDIO_CHANNEL_IN_MONO
- && (mDstChannelMask == AUDIO_CHANNEL_IN_STEREO
- || mDstChannelMask == AUDIO_CHANNEL_IN_FRONT_BACK);
-
- // do we need to process in float?
- mRequiresFloat = mResampler != NULL || mIsLegacyDownmix || mIsLegacyUpmix;
-
- // do we need a staging buffer to convert for destination (we can still optimize this)?
- // we use mBufFrameSize > 0 to indicate both frame size as well as buffer necessity
- if (mResampler != NULL) {
- mBufFrameSize = max(mSrcChannelCount, FCC_2)
- * audio_bytes_per_sample(AUDIO_FORMAT_PCM_FLOAT);
- } else if (mIsLegacyUpmix || mIsLegacyDownmix) { // legacy modes always float
- mBufFrameSize = mDstChannelCount * audio_bytes_per_sample(AUDIO_FORMAT_PCM_FLOAT);
- } else if (mSrcChannelMask != mDstChannelMask && mDstFormat != mSrcFormat) {
- mBufFrameSize = mDstChannelCount * audio_bytes_per_sample(mSrcFormat);
- } else {
- mBufFrameSize = 0;
- }
- mBufFrames = 0; // force the buffer to be resized.
-
- // do we need an input converter buffer provider to give us float?
- delete mInputConverterProvider;
- mInputConverterProvider = NULL;
- if (mRequiresFloat && mSrcFormat != AUDIO_FORMAT_PCM_FLOAT) {
- mInputConverterProvider = new ReformatBufferProvider(
- audio_channel_count_from_in_mask(mSrcChannelMask),
- mSrcFormat,
- AUDIO_FORMAT_PCM_FLOAT,
- 256 /* provider buffer frame count */);
- }
-
- // do we need a remixer to do channel mask conversion
- if (!mIsLegacyDownmix && !mIsLegacyUpmix && mSrcChannelMask != mDstChannelMask) {
- (void) memcpy_by_index_array_initialization_from_channel_mask(
- mIdxAry, ARRAY_SIZE(mIdxAry), mDstChannelMask, mSrcChannelMask);
- }
- return NO_ERROR;
-}
-
-void AudioFlinger::RecordThread::RecordBufferConverter::convertNoResampler(
- void *dst, const void *src, size_t frames)
-{
- // src is native type unless there is legacy upmix or downmix, whereupon it is float.
- if (mBufFrameSize != 0 && mBufFrames < frames) {
- free(mBuf);
- mBufFrames = frames;
- (void)posix_memalign(&mBuf, 32, mBufFrames * mBufFrameSize);
- }
- // do we need to do legacy upmix and downmix?
- if (mIsLegacyUpmix || mIsLegacyDownmix) {
- void *dstBuf = mBuf != NULL ? mBuf : dst;
- if (mIsLegacyUpmix) {
- upmix_to_stereo_float_from_mono_float((float *)dstBuf,
- (const float *)src, frames);
- } else /*mIsLegacyDownmix */ {
- downmix_to_mono_float_from_stereo_float((float *)dstBuf,
- (const float *)src, frames);
- }
- if (mBuf != NULL) {
- memcpy_by_audio_format(dst, mDstFormat, mBuf, AUDIO_FORMAT_PCM_FLOAT,
- frames * mDstChannelCount);
- }
- return;
- }
- // do we need to do channel mask conversion?
- if (mSrcChannelMask != mDstChannelMask) {
- void *dstBuf = mBuf != NULL ? mBuf : dst;
- memcpy_by_index_array(dstBuf, mDstChannelCount,
- src, mSrcChannelCount, mIdxAry, audio_bytes_per_sample(mSrcFormat), frames);
- if (dstBuf == dst) {
- return; // format is the same
- }
- }
- // convert to destination buffer
- const void *convertBuf = mBuf != NULL ? mBuf : src;
- memcpy_by_audio_format(dst, mDstFormat, convertBuf, mSrcFormat,
- frames * mDstChannelCount);
-}
-
-void AudioFlinger::RecordThread::RecordBufferConverter::convertResampler(
- void *dst, /*not-a-const*/ void *src, size_t frames)
-{
- // src buffer format is ALWAYS float when entering this routine
- if (mIsLegacyUpmix) {
- ; // mono to stereo already handled by resampler
- } else if (mIsLegacyDownmix
- || (mSrcChannelMask == mDstChannelMask && mSrcChannelCount == 1)) {
- // the resampler outputs stereo for mono input channel (a feature?)
- // must convert to mono
- downmix_to_mono_float_from_stereo_float((float *)src,
- (const float *)src, frames);
- } else if (mSrcChannelMask != mDstChannelMask) {
- // convert to mono channel again for channel mask conversion (could be skipped
- // with further optimization).
- if (mSrcChannelCount == 1) {
- downmix_to_mono_float_from_stereo_float((float *)src,
- (const float *)src, frames);
- }
- // convert to destination format (in place, OK as float is larger than other types)
- if (mDstFormat != AUDIO_FORMAT_PCM_FLOAT) {
- memcpy_by_audio_format(src, mDstFormat, src, AUDIO_FORMAT_PCM_FLOAT,
- frames * mSrcChannelCount);
- }
- // channel convert and save to dst
- memcpy_by_index_array(dst, mDstChannelCount,
- src, mSrcChannelCount, mIdxAry, audio_bytes_per_sample(mDstFormat), frames);
- return;
- }
- // convert to destination format and save to dst
- memcpy_by_audio_format(dst, mDstFormat, src, AUDIO_FORMAT_PCM_FLOAT,
- frames * mDstChannelCount);
-}
bool AudioFlinger::RecordThread::checkForNewParameter_l(const String8& keyValuePair,
status_t& status)
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index e43f001..3fb0b07 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -1323,92 +1323,6 @@
// rolling counter that is never cleared
};
- /* The RecordBufferConverter is used for format, channel, and sample rate
- * conversion for a RecordTrack.
- *
- * TODO: Self contained, so move to a separate file later.
- *
- * RecordBufferConverter uses the convert() method rather than exposing a
- * buffer provider interface; this is to save a memory copy.
- */
- class RecordBufferConverter
- {
- public:
- RecordBufferConverter(
- audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
- uint32_t srcSampleRate,
- audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
- uint32_t dstSampleRate);
-
- ~RecordBufferConverter();
-
- /* Converts input data from an AudioBufferProvider by format, channelMask,
- * and sampleRate to a destination buffer.
- *
- * Parameters
- * dst: buffer to place the converted data.
- * provider: buffer provider to obtain source data.
- * frames: number of frames to convert
- *
- * Returns the number of frames converted.
- */
- size_t convert(void *dst, AudioBufferProvider *provider, size_t frames);
-
- // returns NO_ERROR if constructor was successful
- status_t initCheck() const {
- // mSrcChannelMask set on successful updateParameters
- return mSrcChannelMask != AUDIO_CHANNEL_INVALID ? NO_ERROR : NO_INIT;
- }
-
- // allows dynamic reconfigure of all parameters
- status_t updateParameters(
- audio_channel_mask_t srcChannelMask, audio_format_t srcFormat,
- uint32_t srcSampleRate,
- audio_channel_mask_t dstChannelMask, audio_format_t dstFormat,
- uint32_t dstSampleRate);
-
- // called to reset resampler buffers on record track discontinuity
- void reset() {
- if (mResampler != NULL) {
- mResampler->reset();
- }
- }
-
- private:
- // format conversion when not using resampler
- void convertNoResampler(void *dst, const void *src, size_t frames);
-
- // format conversion when using resampler; modifies src in-place
- void convertResampler(void *dst, /*not-a-const*/ void *src, size_t frames);
-
- // user provided information
- audio_channel_mask_t mSrcChannelMask;
- audio_format_t mSrcFormat;
- uint32_t mSrcSampleRate;
- audio_channel_mask_t mDstChannelMask;
- audio_format_t mDstFormat;
- uint32_t mDstSampleRate;
-
- // derived information
- uint32_t mSrcChannelCount;
- uint32_t mDstChannelCount;
- size_t mDstFrameSize;
-
- // format conversion buffer
- void *mBuf;
- size_t mBufFrames;
- size_t mBufFrameSize;
-
- // resampler info
- AudioResampler *mResampler;
-
- bool mIsLegacyDownmix; // legacy stereo to mono conversion needed
- bool mIsLegacyUpmix; // legacy mono to stereo conversion needed
- bool mRequiresFloat; // data processing requires float (e.g. resampler)
- PassthruBufferProvider *mInputConverterProvider; // converts input to float
- int8_t mIdxAry[sizeof(uint32_t) * 8]; // used for channel mask conversion
- };
-
#include "RecordTracks.h"
RecordThread(const sp<AudioFlinger>& audioFlinger,
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 9746075..f2dd884 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -27,12 +27,12 @@
#include <private/media/AudioTrackShared.h>
-#include "AudioMixer.h"
#include "AudioFlinger.h"
#include "ServiceUtilities.h"
#include <media/nbaio/Pipe.h>
#include <media/nbaio/PipeReader.h>
+#include <media/RecordBufferConverter.h>
#include <audio_utils/minifloat.h>
// ----------------------------------------------------------------------------
diff --git a/services/audioflinger/tests/Android.mk b/services/audioflinger/tests/Android.mk
deleted file mode 100644
index a741079..0000000
--- a/services/audioflinger/tests/Android.mk
+++ /dev/null
@@ -1,68 +0,0 @@
-# Build the unit tests for audioflinger
-
-#
-# resampler unit test
-#
-LOCAL_PATH:= $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SHARED_LIBRARIES := \
- liblog \
- libutils \
- libcutils \
- libaudioutils \
- libaudioresampler
-
-LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-utils) \
- frameworks/av/services/audioflinger
-
-LOCAL_SRC_FILES := \
- resampler_tests.cpp
-
-LOCAL_MODULE := resampler_tests
-LOCAL_MODULE_TAGS := tests
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_NATIVE_TEST)
-
-#
-# audio mixer test tool
-#
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- test-mixer.cpp \
- ../AudioMixer.cpp.arm \
- ../BufferProviders.cpp
-
-LOCAL_C_INCLUDES := \
- $(call include-path-for, audio-utils) \
- frameworks/av/services/audioflinger \
- external/sonic
-
-LOCAL_STATIC_LIBRARIES := \
- libsndfile
-
-LOCAL_SHARED_LIBRARIES := \
- libaudiohal \
- libeffects \
- libnbaio \
- libaudioresampler \
- libaudioutils \
- libdl \
- libcutils \
- libutils \
- liblog \
- libsonic
-
-LOCAL_MODULE:= test-mixer
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_CXX_STL := libc++
-
-LOCAL_CFLAGS := -Werror -Wall
-
-include $(BUILD_EXECUTABLE)
diff --git a/services/audioflinger/tests/build_and_run_all_unit_tests.sh b/services/audioflinger/tests/build_and_run_all_unit_tests.sh
deleted file mode 100755
index 7f4d456..0000000
--- a/services/audioflinger/tests/build_and_run_all_unit_tests.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/bin/bash
-
-if [ -z "$ANDROID_BUILD_TOP" ]; then
- echo "Android build environment not set"
- exit -1
-fi
-
-# ensure we have mm
-. $ANDROID_BUILD_TOP/build/envsetup.sh
-
-pushd $ANDROID_BUILD_TOP/frameworks/av/services/audioflinger/
-pwd
-mm
-
-echo "waiting for device"
-adb root && adb wait-for-device remount
-adb push $OUT/system/lib/libaudioresampler.so /system/lib
-adb push $OUT/data/nativetest/resampler_tests /system/bin
-
-sh $ANDROID_BUILD_TOP/frameworks/av/services/audioflinger/tests/run_all_unit_tests.sh
-
-popd
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
index 97a9c94..0bacef7 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h
@@ -43,6 +43,8 @@
void setMix(AudioMix &mix);
+ status_t dump(int fd, int spaces, int index) const;
+
private:
AudioMix mMix; // Audio policy mix descriptor
sp<SwAudioOutputDescriptor> mOutput; // Corresponding output stream
@@ -77,6 +79,8 @@
AudioMix **policyMix);
status_t getInputMixForAttr(audio_attributes_t attr, AudioMix **policyMix);
+
+ status_t dump(int fd) const;
};
}; // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
index 84e3a36..9e705aa 100644
--- a/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
+++ b/services/audiopolicy/common/managerdefinitions/include/TypeConverter.h
@@ -28,7 +28,25 @@
typedef device_category Type;
typedef Vector<Type> Collection;
};
+struct MixTypeTraits
+{
+ typedef int32_t Type;
+ typedef Vector<Type> Collection;
+};
+struct RouteFlagTraits
+{
+ typedef uint32_t Type;
+ typedef Vector<Type> Collection;
+};
+struct RuleTraits
+{
+ typedef uint32_t Type;
+ typedef Vector<Type> Collection;
+};
typedef TypeConverter<DeviceCategoryTraits> DeviceCategoryConverter;
+typedef TypeConverter<MixTypeTraits> MixTypeConverter;
+typedef TypeConverter<RouteFlagTraits> RouteFlagTypeConverter;
+typedef TypeConverter<RuleTraits> RuleTypeConverter;
}; // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
index 1d6787a..c2981a1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp
@@ -21,7 +21,7 @@
#include "AudioGain.h"
#include "TypeConverter.h"
-#include <android/log.h>
+#include <log/log.h>
#include <utils/String8.h>
namespace android {
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
index 02833a9..08930f1 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp
@@ -18,6 +18,7 @@
//#define LOG_NDEBUG 0
#include "AudioPolicyMix.h"
+#include "TypeConverter.h"
#include "HwModule.h"
#include "AudioPort.h"
#include "IOProfile.h"
@@ -51,6 +52,66 @@
return &mMix;
}
+status_t AudioPolicyMix::dump(int fd, int spaces, int index) const
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+
+ snprintf(buffer, SIZE, "%*sAudio Policy Mix %d:\n", spaces, "", index+1);
+ result.append(buffer);
+ std::string mixTypeLiteral;
+ if (!MixTypeConverter::toString(mMix.mMixType, mixTypeLiteral)) {
+ ALOGE("%s: failed to convert mix type %d", __FUNCTION__, mMix.mMixType);
+ return BAD_VALUE;
+ }
+ snprintf(buffer, SIZE, "%*s- mix type: %s\n", spaces, "", mixTypeLiteral.c_str());
+ result.append(buffer);
+ std::string routeFlagLiteral;
+ RouteFlagTypeConverter::maskToString(mMix.mRouteFlags, routeFlagLiteral);
+ snprintf(buffer, SIZE, "%*s- Route Flags: %s\n", spaces, "", routeFlagLiteral.c_str());
+ result.append(buffer);
+ std::string deviceLiteral;
+ deviceToString(mMix.mDeviceType, deviceLiteral);
+ snprintf(buffer, SIZE, "%*s- device type: %s\n", spaces, "", deviceLiteral.c_str());
+ result.append(buffer);
+ snprintf(buffer, SIZE, "%*s- device address: %s\n", spaces, "", mMix.mDeviceAddress.string());
+ result.append(buffer);
+
+ int indexCriterion = 0;
+ for (const auto &criterion : mMix.mCriteria) {
+ snprintf(buffer, SIZE, "%*s- Criterion %d:\n", spaces + 2, "", indexCriterion++);
+ result.append(buffer);
+ std::string usageLiteral;
+ if (!UsageTypeConverter::toString(criterion.mValue.mUsage, usageLiteral)) {
+ ALOGE("%s: failed to convert usage %d", __FUNCTION__, criterion.mValue.mUsage);
+ return BAD_VALUE;
+ }
+ snprintf(buffer, SIZE, "%*s- Usage:%s\n", spaces + 4, "", usageLiteral.c_str());
+ result.append(buffer);
+ if (mMix.mMixType == MIX_TYPE_RECORDERS) {
+ std::string sourceLiteral;
+ if (!SourceTypeConverter::toString(criterion.mValue.mSource, sourceLiteral)) {
+ ALOGE("%s: failed to convert source %d", __FUNCTION__, criterion.mValue.mSource);
+ return BAD_VALUE;
+ }
+ snprintf(buffer, SIZE, "%*s- Source:%s\n", spaces + 4, "", sourceLiteral.c_str());
+ result.append(buffer);
+ }
+ snprintf(buffer, SIZE, "%*s- Uid:%d\n", spaces + 4, "", criterion.mValue.mUid);
+ result.append(buffer);
+ std::string ruleLiteral;
+ if (!RuleTypeConverter::toString(criterion.mRule, ruleLiteral)) {
+ ALOGE("%s: failed to convert source %d", __FUNCTION__,criterion.mRule);
+ return BAD_VALUE;
+ }
+ snprintf(buffer, SIZE, "%*s- Rule:%s\n", spaces + 4, "", ruleLiteral.c_str());
+ result.append(buffer);
+ }
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+}
+
status_t AudioPolicyMixCollection::registerMix(const String8& address, AudioMix mix,
sp<SwAudioOutputDescriptor> desc)
{
@@ -288,4 +349,14 @@
return NO_ERROR;
}
+status_t AudioPolicyMixCollection::dump(int fd) const
+{
+ std::string log("\nAudio Policy Mix:\n");
+ write(fd, log.c_str(), log.size());
+ for (size_t i = 0; i < size(); i++) {
+ valueAt(i)->dump(fd, 2, i);
+ }
+ return NO_ERROR;
+}
+
}; //namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
index f19b43c..dbdcca7 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioSession.cpp
@@ -23,7 +23,7 @@
#include "AudioGain.h"
#include "TypeConverter.h"
-#include <android/log.h>
+#include <log/log.h>
#include <utils/String8.h>
namespace android {
diff --git a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
index 4839683..0362037 100644
--- a/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/TypeConverter.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <media/AudioPolicy.h>
+
#include "TypeConverter.h"
namespace android {
@@ -30,6 +32,37 @@
TERMINATOR
};
+template <>
+const MixTypeConverter::Table MixTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(MIX_TYPE_INVALID),
+ MAKE_STRING_FROM_ENUM(MIX_TYPE_PLAYERS),
+ MAKE_STRING_FROM_ENUM(MIX_TYPE_RECORDERS),
+ TERMINATOR
+};
+
+template <>
+const RouteFlagTypeConverter::Table RouteFlagTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_RENDER),
+ MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_LOOP_BACK),
+ MAKE_STRING_FROM_ENUM(MIX_ROUTE_FLAG_ALL),
+ TERMINATOR
+};
+
+template <>
+const RuleTypeConverter::Table RuleTypeConverter::mTable[] = {
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUSION_MASK),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_USAGE),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET),
+ MAKE_STRING_FROM_ENUM(RULE_MATCH_UID),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_USAGE),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET),
+ MAKE_STRING_FROM_ENUM(RULE_EXCLUDE_UID),
+ TERMINATOR
+};
+
template class TypeConverter<DeviceCategoryTraits>;
+template class TypeConverter<MixTypeTraits>;
+template class TypeConverter<RouteFlagTraits>;
+template class TypeConverter<RuleTraits>;
}; // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 43c1b0a..e71bb01 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -342,6 +342,9 @@
ALOGV("handleDeviceConfigChange(() device: 0x%X, address %s name %s",
device, device_address, device_name);
+ // connect/disconnect only 1 device at a time
+ if (!audio_is_output_device(device) && !audio_is_input_device(device)) return BAD_VALUE;
+
// Check if the device is currently connected
sp<DeviceDescriptor> devDesc =
mHwModules.getDeviceDescriptor(device, device_address, device_name);
@@ -2416,6 +2419,7 @@
mVolumeCurves->dump(fd);
mEffects.dump(fd);
mAudioPatches.dump(fd);
+ mPolicyMixes.dump(fd);
return NO_ERROR;
}
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 52aa143..1e63a05 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -37,9 +37,6 @@
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
- if (!audio_is_output_device(device) && !audio_is_input_device(device)) {
- return BAD_VALUE;
- }
if (state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE &&
state != AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE) {
return BAD_VALUE;
@@ -72,9 +69,6 @@
if (!settingsAllowed()) {
return PERMISSION_DENIED;
}
- if (!audio_is_output_device(device) && !audio_is_input_device(device)) {
- return BAD_VALUE;
- }
ALOGV("handleDeviceConfigChange()");
Mutex::Autolock _l(mLock);
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index ef2e8d9..5b4d10d 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -28,6 +28,7 @@
common/Camera2ClientBase.cpp \
common/CameraDeviceBase.cpp \
common/CameraModule.cpp \
+ common/CameraProviderManager.cpp \
common/FrameProcessorBase.cpp \
api1/CameraClient.cpp \
api1/Camera2Client.cpp \
@@ -47,8 +48,10 @@
device3/Camera3OutputStream.cpp \
device3/Camera3ZslStream.cpp \
device3/Camera3DummyStream.cpp \
+ device3/Camera3SharedOutputStream.cpp \
device3/StatusTracker.cpp \
device3/Camera3BufferManager.cpp \
+ device3/Camera3StreamSplitter.cpp \
gui/RingBufferConsumer.cpp \
utils/CameraTraces.cpp \
utils/AutoConditionLock.cpp \
@@ -63,25 +66,36 @@
libmedia \
libmediautils \
libcamera_client \
+ libcamera_metadata \
libgui \
libhardware \
- libcamera_metadata \
+ libhidlbase \
+ libhidltransport \
libjpeg \
libmemunreachable \
- android.hardware.camera.common@1.0
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2 \
+ android.hidl.manager@1.0
LOCAL_EXPORT_SHARED_LIBRARY_HEADERS := libbinder libcamera_client
LOCAL_C_INCLUDES += \
system/media/private/camera/include \
- frameworks/native/include/media/openmax \
- external/jpeg
+ frameworks/native/include/media/openmax
LOCAL_EXPORT_C_INCLUDE_DIRS := \
frameworks/av/services/camera/libcameraservice
LOCAL_CFLAGS += -Wall -Wextra -Werror
+ifeq ($(ENABLE_TREBLE), true)
+
+ LOCAL_CFLAGS += -DENABLE_TREBLE
+
+endif # ENABLE_TREBLE
+
LOCAL_MODULE:= libcameraservice
include $(BUILD_SHARED_LIBRARY)
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 6314ba5..07d88f6 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -36,10 +36,18 @@
// CameraFlashlight implementation begins
// used by camera service to control flashflight.
/////////////////////////////////////////////////////////////////////
-CameraFlashlight::CameraFlashlight(CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks) :
- mCameraModule(&cameraModule),
- mCallbacks(&callbacks),
+CameraFlashlight::CameraFlashlight(CameraModule* cameraModule,
+ camera_module_callbacks_t* callbacks) :
+ mCameraModule(cameraModule),
+ mCallbacks(callbacks),
+ mFlashlightMapInitialized(false) {
+}
+
+CameraFlashlight::CameraFlashlight(sp<CameraProviderManager> providerManager,
+ camera_module_callbacks_t* callbacks) :
+ mCameraModule(nullptr),
+ mProviderManager(providerManager),
+ mCallbacks(callbacks),
mFlashlightMapInitialized(false) {
}
@@ -55,8 +63,10 @@
status_t res = OK;
- if (mCameraModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) {
- mFlashControl = new ModuleFlashControl(*mCameraModule, *mCallbacks);
+ if (mCameraModule == nullptr) {
+ mFlashControl = new ProviderFlashControl(mProviderManager);
+ } else if (mCameraModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) {
+ mFlashControl = new ModuleFlashControl(*mCameraModule);
if (mFlashControl == NULL) {
ALOGV("%s: cannot create flash control for module api v2.4+",
__FUNCTION__);
@@ -69,7 +79,7 @@
CAMERA_MODULE_API_VERSION_2_0) {
camera_info info;
res = mCameraModule->getCameraInfo(
- atoi(String8(cameraId).string()), &info);
+ atoi(cameraId.string()), &info);
if (res) {
ALOGE("%s: failed to get camera info for camera %s",
__FUNCTION__, cameraId.string());
@@ -157,15 +167,27 @@
status_t CameraFlashlight::findFlashUnits() {
Mutex::Autolock l(mLock);
status_t res;
- int32_t numCameras = mCameraModule->getNumberOfCameras();
+
+ std::vector<String8> cameraIds;
+ if (mCameraModule) {
+ cameraIds.resize(mCameraModule->getNumberOfCameras());
+ for (size_t i = 0; i < cameraIds.size(); i++) {
+ cameraIds[i] = String8::format("%zu", i);
+ }
+ } else {
+ // No module, must be provider
+ std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
+ cameraIds.resize(ids.size());
+ for (size_t i = 0; i < cameraIds.size(); i++) {
+ cameraIds[i] = String8(ids[i].c_str());
+ }
+ }
mHasFlashlightMap.clear();
mFlashlightMapInitialized = false;
- for (int32_t i = 0; i < numCameras; i++) {
+ for (auto &id : cameraIds) {
bool hasFlash = false;
- String8 id = String8::format("%d", i);
-
res = createFlashlightControl(id);
if (res) {
ALOGE("%s: failed to create flash control for %s", __FUNCTION__,
@@ -224,7 +246,7 @@
return NO_INIT;
}
- if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
+ if (mCameraModule && mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
// framework is going to open a camera device, all flash light control
// should be closed for backward compatible support.
mFlashControl.clear();
@@ -274,7 +296,7 @@
if (mOpenedCameraIds.size() != 0)
return OK;
- if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
+ if (mCameraModule && mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
// notify torch available for all cameras with a flash
int numCameras = mCameraModule->getNumberOfCameras();
for (int i = 0; i < numCameras; i++) {
@@ -298,10 +320,35 @@
// ModuleFlashControl implementation begins
// Flash control for camera module v2.4 and above.
/////////////////////////////////////////////////////////////////////
-ModuleFlashControl::ModuleFlashControl(CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks) :
+ProviderFlashControl::ProviderFlashControl(sp<CameraProviderManager> providerManager) :
+ mProviderManager(providerManager) {
+}
+
+ProviderFlashControl::~ProviderFlashControl() {
+}
+
+status_t ProviderFlashControl::hasFlashUnit(const String8& cameraId, bool *hasFlash) {
+ if (!hasFlash) {
+ return BAD_VALUE;
+ }
+ *hasFlash = mProviderManager->hasFlashUnit(cameraId.string());
+ return OK;
+}
+
+status_t ProviderFlashControl::setTorchMode(const String8& cameraId, bool enabled) {
+ ALOGV("%s: set camera %s torch mode to %d", __FUNCTION__,
+ cameraId.string(), enabled);
+
+ return mProviderManager->setTorchMode(cameraId.string(), enabled);
+}
+// ProviderFlashControl implementation ends
+
+/////////////////////////////////////////////////////////////////////
+// ModuleFlashControl implementation begins
+// Flash control for camera module v2.4 and above.
+/////////////////////////////////////////////////////////////////////
+ModuleFlashControl::ModuleFlashControl(CameraModule& cameraModule) :
mCameraModule(&cameraModule) {
- (void) callbacks;
}
ModuleFlashControl::~ModuleFlashControl() {
@@ -477,7 +524,7 @@
}
sp<CameraDeviceBase> device =
- new Camera3Device(atoi(cameraId.string()));
+ new Camera3Device(cameraId);
if (device == NULL) {
return NO_MEMORY;
}
diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h
index 59fc87d..b7c7690 100644
--- a/services/camera/libcameraservice/CameraFlashlight.h
+++ b/services/camera/libcameraservice/CameraFlashlight.h
@@ -17,14 +17,17 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
#define ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H
-#include "hardware/camera_common.h"
-#include "utils/KeyedVector.h"
-#include "utils/SortedVector.h"
-#include "gui/GLConsumer.h"
-#include "gui/Surface.h"
+#include <gui/GLConsumer.h>
+#include <gui/Surface.h>
+#include <hardware/camera_common.h>
+#include <utils/KeyedVector.h>
+#include <utils/SortedVector.h>
+#include "common/CameraProviderManager.h"
+#include "common/CameraModule.h"
#include "common/CameraDeviceBase.h"
#include "device1/CameraHardwareInterface.h"
+
namespace android {
/**
@@ -52,8 +55,10 @@
*/
class CameraFlashlight : public virtual VirtualLightRefBase {
public:
- CameraFlashlight(CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks);
+ CameraFlashlight(CameraModule* cameraModule,
+ camera_module_callbacks_t* callbacks);
+ CameraFlashlight(sp<CameraProviderManager> providerManager,
+ camera_module_callbacks_t* callbacks);
virtual ~CameraFlashlight();
// Find all flash units. This must be called before other methods. All
@@ -88,7 +93,10 @@
bool hasFlashUnitLocked(const String8& cameraId);
sp<FlashControlBase> mFlashControl;
+
CameraModule *mCameraModule;
+ sp<CameraProviderManager> mProviderManager;
+
const camera_module_callbacks_t *mCallbacks;
SortedVector<String8> mOpenedCameraIds;
@@ -100,12 +108,29 @@
};
/**
+ * Flash control for camera provider v2.4 and above.
+ */
+class ProviderFlashControl : public FlashControlBase {
+ public:
+ ProviderFlashControl(sp<CameraProviderManager> providerManager);
+ virtual ~ProviderFlashControl();
+
+ // FlashControlBase
+ status_t hasFlashUnit(const String8& cameraId, bool *hasFlash);
+ status_t setTorchMode(const String8& cameraId, bool enabled);
+
+ private:
+ sp<CameraProviderManager> mProviderManager;
+
+ Mutex mLock;
+};
+
+/**
* Flash control for camera module v2.4 and above.
*/
class ModuleFlashControl : public FlashControlBase {
public:
- ModuleFlashControl(CameraModule& cameraModule,
- const camera_module_callbacks_t& callbacks);
+ ModuleFlashControl(CameraModule& cameraModule);
virtual ~ModuleFlashControl();
// FlashControlBase
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 1d9ccb1..f439590 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -18,6 +18,12 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#ifdef ENABLE_TREBLE
+ #define USE_HIDL true
+#else
+ #define USE_HIDL false
+#endif
+
#include <algorithm>
#include <climits>
#include <stdio.h>
@@ -32,6 +38,7 @@
#include <android/hardware/ICameraClient.h>
#include <android-base/macros.h>
+#include <android-base/parseint.h>
#include <binder/AppOpsManager.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
@@ -54,8 +61,12 @@
#include <private/android_filesystem_config.h>
#include <system/camera_vendor_tags.h>
#include <system/camera_metadata.h>
+
#include <system/camera.h>
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <hidl/ServiceManagement.h>
+
#include "CameraService.h"
#include "api1/CameraClient.h"
#include "api1/Camera2Client.h"
@@ -162,16 +173,11 @@
// ----------------------------------------------------------------------------
-// This is ugly and only safe if we never re-create the CameraService, but
-// should be ok for now.
-static CameraService *gCameraService;
-
CameraService::CameraService() :
mEventLog(DEFAULT_EVENT_LOG_LENGTH),
mNumberOfCameras(0), mNumberOfNormalCameras(0),
- mSoundRef(0), mModule(nullptr) {
+ mSoundRef(0), mInitialized(false), mModule(nullptr) {
ALOGI("CameraService started (pid=%d)", getpid());
- gCameraService = this;
this->camera_device_status_change = android::camera_device_status_change;
this->torch_mode_status_change = android::torch_mode_status_change;
@@ -190,13 +196,27 @@
notifier.noteResetCamera();
notifier.noteResetFlashlight();
+ status_t res = INVALID_OPERATION;
+ if (USE_HIDL) {
+ res = enumerateProviders();
+ } else {
+ res = loadLegacyHalModule();
+ }
+ if (res == OK) {
+ mInitialized = true;
+ }
+
+ CameraService::pingCameraServiceProxy();
+}
+
+status_t CameraService::loadLegacyHalModule() {
camera_module_t *rawModule;
int err = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
(const hw_module_t **)&rawModule);
if (err < 0) {
ALOGE("Could not load camera HAL module: %d (%s)", err, strerror(-err));
logServiceError("Could not load camera HAL module", err);
- return;
+ return INVALID_OPERATION;
}
mModule = new CameraModule(rawModule);
@@ -208,7 +228,7 @@
delete mModule;
mModule = nullptr;
- return;
+ return INVALID_OPERATION;
}
ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
@@ -222,7 +242,7 @@
setUpVendorTags();
}
- mFlashlight = new CameraFlashlight(*mModule, *this);
+ mFlashlight = new CameraFlashlight(mModule, this);
status_t res = mFlashlight->findFlashUnits();
if (res) {
// impossible because we haven't open any camera devices.
@@ -250,7 +270,7 @@
if (checkCameraCapabilities(i, info, &latestStrangeCameraId) != OK) {
delete mModule;
mModule = nullptr;
- return;
+ return INVALID_OPERATION;
}
}
@@ -288,9 +308,62 @@
mModule->setCallbacks(this);
}
- CameraService::pingCameraServiceProxy();
+ return OK;
}
+status_t CameraService::enumerateProviders() {
+ mCameraProviderManager = new CameraProviderManager();
+ status_t res;
+ res = mCameraProviderManager->initialize(this);
+ if (res != OK) {
+ ALOGE("%s: Unable to initialize camera provider manager: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ mNumberOfCameras = mCameraProviderManager->getCameraCount();
+ mNumberOfNormalCameras = mCameraProviderManager->getStandardCameraCount();
+
+ // Setup vendor tags before we call get_camera_info the first time
+ // because HAL might need to setup static vendor keys in get_camera_info
+ // TODO: maybe put this into CameraProviderManager::initialize()?
+ mCameraProviderManager->setUpVendorTags();
+
+ mFlashlight = new CameraFlashlight(mCameraProviderManager, this);
+ res = mFlashlight->findFlashUnits();
+ if (res != OK) {
+ ALOGE("Failed to enumerate flash units: %s (%d)", strerror(-res), res);
+ }
+
+ // TODO: Verify device versions are in support
+
+ for (auto& cameraId : mCameraProviderManager->getCameraDeviceIds()) {
+ hardware::camera::common::V1_0::CameraResourceCost cost;
+ res = mCameraProviderManager->getResourceCost(cameraId, &cost);
+ if (res != OK) {
+ ALOGE("Failed to query device resource cost: %s (%d)", strerror(-res), res);
+ continue;
+ }
+ std::set<String8> conflicting;
+ for (size_t i = 0; i < cost.conflictingDevices.size(); i++) {
+ conflicting.emplace(String8(cost.conflictingDevices[i].c_str()));
+ }
+ String8 id8 = String8(cameraId.c_str());
+
+ Mutex::Autolock lock(mCameraStatesLock);
+ mCameraStates.emplace(id8,
+ std::make_shared<CameraState>(id8, cost.resourceCost, conflicting));
+
+ if (mFlashlight->hasFlashUnit(id8)) {
+ mTorchStatusMap.add(id8,
+ TorchModeStatus::AVAILABLE_OFF);
+ }
+ }
+
+ return OK;
+}
+
+
sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
sp<ICameraServiceProxy> proxyBinder = nullptr;
#ifndef __BRILLO__
@@ -318,7 +391,6 @@
mModule = nullptr;
}
VendorTagDescriptor::clearGlobalVendorTagDescriptor();
- gCameraService = nullptr;
}
void CameraService::onDeviceStatusChanged(const String8& id,
@@ -473,7 +545,7 @@
Status CameraService::getCameraInfo(int cameraId,
CameraInfo* cameraInfo) {
ATRACE_CALL();
- if (!mModule) {
+ if (!mInitialized) {
return STATUS_ERROR(ERROR_DISCONNECTED,
"Camera subsystem is not available");
}
@@ -483,34 +555,42 @@
"CameraId is not valid");
}
- struct camera_info info;
- Status rc = filterGetInfoErrorCode(
- mModule->getCameraInfo(cameraId, &info));
+ Status ret = Status::ok();
+ if (mModule != nullptr) {
+ struct camera_info info;
+ ret = filterGetInfoErrorCode(mModule->getCameraInfo(cameraId, &info));
- if (rc.isOk()) {
- cameraInfo->facing = info.facing;
- cameraInfo->orientation = info.orientation;
- // CameraInfo is for android.hardware.Camera which does not
- // support external camera facing. The closest approximation would be
- // front camera.
- if (cameraInfo->facing == CAMERA_FACING_EXTERNAL) {
- cameraInfo->facing = CAMERA_FACING_FRONT;
+ if (ret.isOk()) {
+ cameraInfo->facing = info.facing;
+ cameraInfo->orientation = info.orientation;
+ // CameraInfo is for android.hardware.Camera which does not
+ // support external camera facing. The closest approximation would be
+ // front camera.
+ if (cameraInfo->facing == CAMERA_FACING_EXTERNAL) {
+ cameraInfo->facing = hardware::CAMERA_FACING_FRONT;
+ }
}
- }
- return rc;
-}
-
-int CameraService::cameraIdToInt(const String8& cameraId) {
- errno = 0;
- size_t pos = 0;
- int ret = stoi(std::string{cameraId.string()}, &pos);
- if (errno != 0 || pos != cameraId.size()) {
- return -1;
+ } else {
+ status_t err = mCameraProviderManager->getCameraInfo(std::to_string(cameraId), cameraInfo);
+ if (err != OK) {
+ ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
+ "Error retrieving camera info from device %d: %s (%d)", cameraId,
+ strerror(-err), err);
+ }
}
return ret;
}
-Status CameraService::getCameraCharacteristics(const String16& id,
+int CameraService::cameraIdToInt(const String8& cameraId) {
+ int id;
+ bool success = base::ParseInt(cameraId.string(), &id, 0);
+ if (!success) {
+ return -1;
+ }
+ return id;
+}
+
+Status CameraService::getCameraCharacteristics(const String16& cameraId,
CameraMetadata* cameraInfo) {
ATRACE_CALL();
if (!cameraInfo) {
@@ -518,32 +598,42 @@
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "cameraInfo is NULL");
}
- if (!mModule) {
- ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
+ if (!mInitialized) {
+ ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
return STATUS_ERROR(ERROR_DISCONNECTED,
"Camera subsystem is not available");;
}
- int cameraId = cameraIdToInt(String8(id));
+ Status ret{};
- if (cameraId < 0 || cameraId >= mNumberOfCameras) {
- ALOGE("%s: Invalid camera id: %d", __FUNCTION__, cameraId);
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "Invalid camera id: %d", cameraId);
- }
+ if (mModule != nullptr) {
+ int id = cameraIdToInt(String8(cameraId));
- int facing;
- Status ret;
- if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0 ||
- getDeviceVersion(cameraId, &facing) < CAMERA_DEVICE_API_VERSION_3_0) {
- return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Can't get camera characteristics"
- " for devices with HAL version < 3.0, %d is version %x", cameraId,
- getDeviceVersion(cameraId, &facing));
- }
- struct camera_info info;
- ret = filterGetInfoErrorCode(mModule->getCameraInfo(cameraId, &info));
- if (ret.isOk()) {
- *cameraInfo = info.static_camera_characteristics;
+ if (id < 0 || id >= mNumberOfCameras) {
+ ALOGE("%s: Invalid camera id: %d", __FUNCTION__, id);
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
+ "Invalid camera id: %d", id);
+ }
+
+ int version = getDeviceVersion(String8(cameraId));
+ if (version < CAMERA_DEVICE_API_VERSION_3_0) {
+ return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Can't get camera characteristics"
+ " for devices with HAL version < 3.0, %d is version %x", id, version);
+ }
+
+ struct camera_info info;
+ ret = filterGetInfoErrorCode(mModule->getCameraInfo(id, &info));
+ if (ret.isOk()) {
+ *cameraInfo = info.static_camera_characteristics;
+ }
+ } else {
+ status_t res = mCameraProviderManager->getCameraCharacteristics(
+ String16::std_string(cameraId), cameraInfo);
+ if (res != OK) {
+ return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION, "Unable to retrieve camera "
+ "characteristics for device %s: %s (%d)", String8(cameraId).string(),
+ strerror(-res), res);
+ }
}
return ret;
@@ -584,8 +674,8 @@
/*out*/
hardware::camera2::params::VendorTagDescriptor* desc) {
ATRACE_CALL();
- if (!mModule) {
- ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
+ if (!mInitialized) {
+ ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem not available");
}
sp<VendorTagDescriptor> globalDescriptor = VendorTagDescriptor::getGlobalVendorTagDescriptor();
@@ -595,24 +685,37 @@
return Status::ok();
}
-int CameraService::getDeviceVersion(int cameraId, int* facing) {
+int CameraService::getDeviceVersion(const String8& cameraId, int* facing) {
ATRACE_CALL();
struct camera_info info;
- if (mModule->getCameraInfo(cameraId, &info) != OK) {
- return -1;
- }
- int deviceVersion;
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
- deviceVersion = info.device_version;
+ int deviceVersion = 0;
+
+ if (mModule != nullptr) {
+ int id = cameraIdToInt(cameraId);
+ if (id < 0) return -1;
+
+ if (mModule->getCameraInfo(id, &info) != OK) {
+ return -1;
+ }
+
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
+ deviceVersion = info.device_version;
+ } else {
+ deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
+ }
+
+ if (facing) {
+ *facing = info.facing;
+ }
} else {
- deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
+ status_t res;
+ hardware::hidl_version maxVersion{0,0};
+ res = mCameraProviderManager->getHighestSupportedVersion(String8::std_string(cameraId),
+ &maxVersion);
+ if (res == NAME_NOT_FOUND) return -1;
+ deviceVersion = HARDWARE_DEVICE_API_VERSION(maxVersion.get_major(), maxVersion.get_minor());
}
-
- if (facing) {
- *facing = info.facing;
- }
-
return deviceVersion;
}
@@ -635,6 +738,8 @@
bool CameraService::setUpVendorTags() {
ATRACE_CALL();
+ if (mModule == nullptr) return false;
+
vendor_tag_ops_t vOps = vendor_tag_ops_t();
// Check if vendor operations have been implemented
@@ -671,7 +776,7 @@
}
Status CameraService::makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const String16& packageName, int cameraId,
+ const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp<BasicClient>* client) {
@@ -683,13 +788,13 @@
case CAMERA_DEVICE_API_VERSION_1_0:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new CameraClient(cameraService, tmp, packageName, cameraId, facing,
- clientPid, clientUid, getpid(), legacyMode);
+ *client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId),
+ facing, clientPid, clientUid, getpid(), legacyMode);
} else { // Camera2 API route
ALOGW("Camera using old HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_DEPRECATED_HAL,
- "Camera device \"%d\" HAL version %d does not support camera2 API",
- cameraId, deviceVersion);
+ "Camera device \"%s\" HAL version %d does not support camera2 API",
+ cameraId.string(), deviceVersion);
}
break;
case CAMERA_DEVICE_API_VERSION_3_0:
@@ -699,8 +804,8 @@
case CAMERA_DEVICE_API_VERSION_3_4:
if (effectiveApiLevel == API_1) { // Camera1 API route
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new Camera2Client(cameraService, tmp, packageName, cameraId, facing,
- clientPid, clientUid, servicePid, legacyMode);
+ *client = new Camera2Client(cameraService, tmp, packageName, cameraIdToInt(cameraId),
+ facing, clientPid, clientUid, servicePid, legacyMode);
} else { // Camera2 API route
sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -712,8 +817,8 @@
// Should not be reachable
ALOGE("Unknown camera device HAL version: %d", deviceVersion);
return STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
- "Camera device \"%d\" has unknown HAL version %d",
- cameraId, deviceVersion);
+ "Camera device \"%s\" has unknown HAL version %d",
+ cameraId.string(), deviceVersion);
}
} else {
// A particular HAL version is requested by caller. Create CameraClient
@@ -722,16 +827,16 @@
halVersion == CAMERA_DEVICE_API_VERSION_1_0) {
// Only support higher HAL version device opened as HAL1.0 device.
sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get());
- *client = new CameraClient(cameraService, tmp, packageName, cameraId, facing,
- clientPid, clientUid, servicePid, legacyMode);
+ *client = new CameraClient(cameraService, tmp, packageName, cameraIdToInt(cameraId),
+ facing, clientPid, clientUid, servicePid, legacyMode);
} else {
// Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet.
ALOGE("Invalid camera HAL version %x: HAL %x device can only be"
" opened as HAL %x device", halVersion, deviceVersion,
CAMERA_DEVICE_API_VERSION_1_0);
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
- "Camera device \"%d\" (HAL version %d) cannot be opened as HAL version %d",
- cameraId, deviceVersion, halVersion);
+ "Camera device \"%s\" (HAL version %d) cannot be opened as HAL version %d",
+ cameraId.string(), deviceVersion, halVersion);
}
}
return Status::ok();
@@ -922,7 +1027,7 @@
int callingPid = getCallingPid();
- if (!mModule) {
+ if (!mInitialized) {
ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)",
callingPid);
return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
@@ -1288,22 +1393,24 @@
ATRACE_CALL();
String8 id = String8::format("%d", cameraId);
- int apiVersion = mModule->getModuleApiVersion();
- if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
- apiVersion < CAMERA_MODULE_API_VERSION_2_3) {
- /*
- * Either the HAL version is unspecified in which case this just creates
- * a camera client selected by the latest device version, or
- * it's a particular version in which case the HAL must supported
- * the open_legacy call
- */
- String8 msg = String8::format("Camera HAL module version %x too old for connectLegacy!",
- apiVersion);
- ALOGE("%s: %s",
- __FUNCTION__, msg.string());
- logRejected(id, getCallingPid(), String8(clientPackageName),
- msg);
- return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+ if (mModule != nullptr) {
+ int apiVersion = mModule->getModuleApiVersion();
+ if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
+ apiVersion < CAMERA_MODULE_API_VERSION_2_3) {
+ /*
+ * Either the HAL version is unspecified in which case this just creates
+ * a camera client selected by the latest device version, or
+ * it's a particular version in which case the HAL must supported
+ * the open_legacy call
+ */
+ String8 msg = String8::format("Camera HAL module version %x too old for connectLegacy!",
+ apiVersion);
+ ALOGE("%s: %s",
+ __FUNCTION__, msg.string());
+ logRejected(id, getCallingPid(), String8(clientPackageName),
+ msg);
+ return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
}
Status ret = Status::ok();
@@ -1438,9 +1545,9 @@
}
int facing = -1;
- int deviceVersion = getDeviceVersion(id, /*out*/&facing);
+ int deviceVersion = getDeviceVersion(cameraId, /*out*/&facing);
sp<BasicClient> tmp = nullptr;
- if(!(ret = makeClient(this, cameraCb, clientPackageName, id, facing, clientPid,
+ if(!(ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid,
clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel,
/*out*/&tmp)).isOk()) {
return ret;
@@ -1450,8 +1557,14 @@
LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state",
__FUNCTION__);
- if ((err = client->initialize(mModule)) != OK) {
- ALOGE("%s: Could not initialize client from HAL module.", __FUNCTION__);
+ if (mModule != nullptr) {
+ err = client->initialize(mModule);
+ } else {
+ err = client->initialize(mCameraProviderManager);
+ }
+
+ if (err != OK) {
+ ALOGE("%s: Could not initialize client from HAL.", __FUNCTION__);
// Errors could be from the HAL module open call or from AppOpsManager
switch(err) {
case BAD_VALUE:
@@ -1513,6 +1626,7 @@
Status CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
+ Mutex::Autolock lock(mServiceLock);
ATRACE_CALL();
if (enabled && clientBinder == nullptr) {
@@ -1534,8 +1648,8 @@
StatusInternal cameraStatus = state->getStatus();
if (cameraStatus != StatusInternal::PRESENT &&
- cameraStatus != StatusInternal::NOT_PRESENT) {
- ALOGE("%s: camera id is invalid %s", __FUNCTION__, id.string());
+ cameraStatus != StatusInternal::NOT_AVAILABLE) {
+ ALOGE("%s: camera id is invalid %s, status %d", __FUNCTION__, id.string(), (int)cameraStatus);
return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
"Camera ID \"%s\" is a not valid camera ID", id.string());
}
@@ -1557,7 +1671,7 @@
}
if (status == TorchModeStatus::NOT_AVAILABLE) {
- if (cameraStatus == StatusInternal::NOT_PRESENT) {
+ if (cameraStatus == StatusInternal::NOT_AVAILABLE) {
ALOGE("%s: torch mode of camera %s is not available because "
"camera is in use", __FUNCTION__, id.string());
return STATUS_ERROR_FMT(ERROR_CAMERA_IN_USE,
@@ -1754,7 +1868,9 @@
/*out*/ bool *isSupported) {
ATRACE_CALL();
- ALOGV("%s: for camera ID = %s", __FUNCTION__, String8(cameraId).string());
+ const String8 id = String8(cameraId);
+
+ ALOGV("%s: for camera ID = %s", __FUNCTION__, id.string());
switch (apiVersion) {
case API_VERSION_1:
@@ -1768,7 +1884,6 @@
int facing = -1;
- int id = cameraIdToInt(String8(cameraId));
int deviceVersion = getDeviceVersion(id, &facing);
switch(deviceVersion) {
@@ -1776,30 +1891,30 @@
case CAMERA_DEVICE_API_VERSION_3_0:
case CAMERA_DEVICE_API_VERSION_3_1:
if (apiVersion == API_VERSION_2) {
- ALOGV("%s: Camera id %d uses HAL version %d <3.2, doesn't support api2 without shim",
- __FUNCTION__, id, deviceVersion);
+ ALOGV("%s: Camera id %s uses HAL version %d <3.2, doesn't support api2 without shim",
+ __FUNCTION__, id.string(), deviceVersion);
*isSupported = false;
} else { // if (apiVersion == API_VERSION_1) {
- ALOGV("%s: Camera id %d uses older HAL before 3.2, but api1 is always supported",
- __FUNCTION__, id);
+ ALOGV("%s: Camera id %s uses older HAL before 3.2, but api1 is always supported",
+ __FUNCTION__, id.string());
*isSupported = true;
}
break;
case CAMERA_DEVICE_API_VERSION_3_2:
case CAMERA_DEVICE_API_VERSION_3_3:
case CAMERA_DEVICE_API_VERSION_3_4:
- ALOGV("%s: Camera id %d uses HAL3.2 or newer, supports api1/api2 directly",
- __FUNCTION__, id);
+ ALOGV("%s: Camera id %s uses HAL3.2 or newer, supports api1/api2 directly",
+ __FUNCTION__, id.string());
*isSupported = true;
break;
case -1: {
- String8 msg = String8::format("Unknown camera ID %d", id);
+ String8 msg = String8::format("Unknown camera ID %s", id.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.string());
}
default: {
- String8 msg = String8::format("Unknown device version %d for device %d",
- deviceVersion, id);
+ String8 msg = String8::format("Unknown device version %x for device %s",
+ deviceVersion, id.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(ERROR_INVALID_OPERATION, msg.string());
}
@@ -1875,6 +1990,8 @@
* Also check that the device HAL version is still in support
*/
int CameraService::checkCameraCapabilities(int id, camera_info info, int *latestStrangeCameraId) {
+ if (mModule == nullptr) return NO_INIT;
+
// device_version undefined in CAMERA_MODULE_API_VERSION_1_0,
// All CAMERA_MODULE_API_VERSION_1_0 devices are backward-compatible
if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
@@ -2184,24 +2301,25 @@
CameraService::Client::Client(const sp<CameraService>& cameraService,
const sp<ICameraClient>& cameraClient,
const String16& clientPackageName,
- int cameraId, int cameraFacing,
+ const String8& cameraIdStr, int cameraFacing,
int clientPid, uid_t clientUid,
int servicePid) :
CameraService::BasicClient(cameraService,
IInterface::asBinder(cameraClient),
clientPackageName,
- cameraId, cameraFacing,
+ cameraIdStr, cameraFacing,
clientPid, clientUid,
- servicePid)
+ servicePid),
+ mCameraId(CameraService::cameraIdToInt(cameraIdStr))
{
int callingPid = getCallingPid();
- LOG1("Client::Client E (pid %d, id %d)", callingPid, cameraId);
+ LOG1("Client::Client E (pid %d, id %d)", callingPid, mCameraId);
mRemoteCallback = cameraClient;
cameraService->loadSound();
- LOG1("Client::Client X (pid %d, id %d)", callingPid, cameraId);
+ LOG1("Client::Client X (pid %d, id %d)", callingPid, mCameraId);
}
// tear down the client
@@ -2209,26 +2327,28 @@
ALOGV("~Client");
mDestructionStarted = true;
- mCameraService->releaseSound();
+ sCameraService->releaseSound();
// unconditionally disconnect. function is idempotent
Client::disconnect();
}
+sp<CameraService> CameraService::BasicClient::BasicClient::sCameraService;
+
CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
const String16& clientPackageName,
- int cameraId, int cameraFacing,
+ const String8& cameraIdStr, int cameraFacing,
int clientPid, uid_t clientUid,
int servicePid):
- mClientPackageName(clientPackageName), mDisconnected(false)
+ mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing),
+ mClientPackageName(clientPackageName), mClientPid(clientPid), mClientUid(clientUid),
+ mServicePid(servicePid),
+ mDisconnected(false),
+ mRemoteBinder(remoteCallback)
{
- mCameraService = cameraService;
- mRemoteBinder = remoteCallback;
- mCameraId = cameraId;
- mCameraFacing = cameraFacing;
- mClientPid = clientPid;
- mClientUid = clientUid;
- mServicePid = servicePid;
+ if (sCameraService == nullptr) {
+ sCameraService = cameraService;
+ }
mOpsActive = false;
mDestructionStarted = false;
@@ -2276,19 +2396,20 @@
}
mDisconnected = true;
- mCameraService->removeByClient(this);
- mCameraService->logDisconnected(String8::format("%d", mCameraId), mClientPid,
+ sCameraService->removeByClient(this);
+ sCameraService->logDisconnected(mCameraIdStr, mClientPid,
String8(mClientPackageName));
sp<IBinder> remote = getRemote();
if (remote != nullptr) {
- remote->unlinkToDeath(mCameraService);
+ remote->unlinkToDeath(sCameraService);
}
finishCameraOps();
// Notify flashlight that a camera device is closed.
- mCameraService->mFlashlight->deviceClosed(String8::format("%d", mCameraId));
- ALOGI("%s: Disconnected client for camera %d for PID %d", __FUNCTION__, mCameraId, mClientPid);
+ sCameraService->mFlashlight->deviceClosed(mCameraIdStr);
+ ALOGI("%s: Disconnected client for camera %s for PID %d", __FUNCTION__, mCameraIdStr.string(),
+ mClientPid);
// client shouldn't be able to call into us anymore
mClientPid = 0;
@@ -2340,14 +2461,14 @@
mClientUid, mClientPackageName);
if (res == AppOpsManager::MODE_ERRORED) {
- ALOGI("Camera %d: Access for \"%s\" has been revoked",
- mCameraId, String8(mClientPackageName).string());
+ ALOGI("Camera %s: Access for \"%s\" has been revoked",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
return PERMISSION_DENIED;
}
if (res == AppOpsManager::MODE_IGNORED) {
- ALOGI("Camera %d: Access for \"%s\" has been restricted",
- mCameraId, String8(mClientPackageName).string());
+ ALOGI("Camera %s: Access for \"%s\" has been restricted",
+ mCameraIdStr.string(), String8(mClientPackageName).string());
// Return the same error as for device policy manager rejection
return -EACCES;
}
@@ -2355,12 +2476,11 @@
mOpsActive = true;
// Transition device availability listeners from PRESENT -> NOT_AVAILABLE
- mCameraService->updateStatus(StatusInternal::NOT_AVAILABLE,
- String8::format("%d", mCameraId));
+ sCameraService->updateStatus(StatusInternal::NOT_AVAILABLE, mCameraIdStr);
// Transition device state to OPEN
- mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
- String8::format("%d", mCameraId));
+ sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
+ mCameraIdStr);
return OK;
}
@@ -2379,12 +2499,12 @@
StatusInternal::ENUMERATING};
// Transition to PRESENT if the camera is not in either of the rejected states
- mCameraService->updateStatus(StatusInternal::PRESENT,
- String8::format("%d", mCameraId), rejected);
+ sCameraService->updateStatus(StatusInternal::PRESENT,
+ mCameraIdStr, rejected);
// Transition device state to CLOSED
- mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
- String8::format("%d", mCameraId));
+ sCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
+ mCameraIdStr);
}
// Always stop watching, even if no camera op is active
if (mOpsCallback != NULL) {
@@ -2416,7 +2536,7 @@
"UNKNOWN");
if (res != AppOpsManager::MODE_ALLOWED) {
- ALOGI("Camera %d: Access for \"%s\" revoked", mCameraId,
+ ALOGI("Camera %s: Access for \"%s\" revoked", mCameraIdStr.string(),
myName.string());
// Reset the client PID to allow server-initiated disconnect,
// and to prevent further calls by client.
@@ -2429,17 +2549,6 @@
// ----------------------------------------------------------------------------
-// Provide client strong pointer for callbacks.
-sp<CameraService::Client> CameraService::Client::getClientFromCookie(void* user) {
- String8 cameraId = String8::format("%d", (int)(intptr_t) user);
- auto clientDescriptor = gCameraService->mActiveClientManager.get(cameraId);
- if (clientDescriptor != nullptr) {
- return sp<Client>{
- static_cast<Client*>(clientDescriptor->getValue().get())};
- }
- return sp<Client>{nullptr};
-}
-
void CameraService::Client::notifyError(int32_t errorCode,
const CaptureResultExtras& resultExtras) {
(void) errorCode;
@@ -2645,8 +2754,8 @@
}
bool hasClient = false;
- if (!mModule) {
- result = String8::format("No camera module available!\n");
+ if (!mInitialized) {
+ result = String8::format("No camera HAL available!\n");
write(fd, result.string(), result.size());
// Dump event log for error information
@@ -2655,6 +2764,13 @@
if (locked) mServiceLock.unlock();
return NO_ERROR;
}
+ if (mModule == nullptr) {
+ mCameraProviderManager->dump(fd, args);
+ // TODO - need way more dumping here
+
+ if (locked) mServiceLock.unlock();
+ return NO_ERROR;
+ }
result = String8::format("Camera module HAL API version: 0x%x\n", mModule->getHalApiVersion());
result.appendFormat("Camera module API version: 0x%x\n", mModule->getModuleApiVersion());
@@ -2692,7 +2808,6 @@
result = String8::format("Camera %s information:\n", cameraId.string());
camera_info info;
- // TODO: Change getCameraInfo + HAL to use String cameraIds
status_t rc = mModule->getCameraInfo(cameraIdToInt(cameraId), &info);
if (rc != OK) {
result.appendFormat(" Error reading static information!\n");
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index a6c2fa8..d463b59 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -38,6 +38,7 @@
#include "CameraFlashlight.h"
#include "common/CameraModule.h"
+#include "common/CameraProviderManager.h"
#include "media/RingBuffer.h"
#include "utils/AutoConditionLock.h"
#include "utils/ClientManager.h"
@@ -57,11 +58,13 @@
class CameraService :
public BinderService<CameraService>,
- public ::android::hardware::BnCameraService,
- public IBinder::DeathRecipient,
- public camera_module_callbacks_t
+ public virtual ::android::hardware::BnCameraService,
+ public virtual IBinder::DeathRecipient,
+ public camera_module_callbacks_t,
+ public virtual CameraProviderManager::StatusListener
{
friend class BinderService<CameraService>;
+ friend class CameraClient;
public:
class Client;
class BasicClient;
@@ -96,11 +99,12 @@
virtual ~CameraService();
/////////////////////////////////////////////////////////////////////
- // HAL Callbacks
+ // HAL Callbacks - implements CameraProviderManager::StatusListener
+
virtual void onDeviceStatusChanged(const String8 &cameraId,
- hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus);
+ hardware::camera::common::V1_0::CameraDeviceStatus newHalStatus) override;
virtual void onTorchStatusChanged(const String8& cameraId,
- hardware::camera::common::V1_0::TorchModeStatus newStatus);
+ hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
/////////////////////////////////////////////////////////////////////
// ICameraService
@@ -108,7 +112,7 @@
virtual binder::Status getCameraInfo(int cameraId,
hardware::CameraInfo* cameraInfo);
- virtual binder::Status getCameraCharacteristics(const String16& id,
+ virtual binder::Status getCameraCharacteristics(const String16& cameraId,
CameraMetadata* cameraInfo);
virtual binder::Status getCameraVendorTagDescriptor(
/*out*/
@@ -185,7 +189,7 @@
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
- int getDeviceVersion(int cameraId, int* facing = NULL);
+ int getDeviceVersion(const String8& cameraId, int* facing = NULL);
/////////////////////////////////////////////////////////////////////
// Shared utilities
@@ -197,6 +201,7 @@
class BasicClient : public virtual RefBase {
public:
virtual status_t initialize(CameraModule *module) = 0;
+ virtual status_t initialize(sp<CameraProviderManager> manager) = 0;
virtual binder::Status disconnect();
// because we can't virtually inherit IInterface, which breaks
@@ -233,7 +238,7 @@
BasicClient(const sp<CameraService>& cameraService,
const sp<IBinder>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraIdStr,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -248,13 +253,13 @@
bool mDestructionStarted;
// these are initialized in the constructor.
- sp<CameraService> mCameraService; // immutable after constructor
- int mCameraId; // immutable after constructor
- int mCameraFacing; // immutable after constructor
- String16 mClientPackageName; // immutable after constructor
+ static sp<CameraService> sCameraService;
+ const String8 mCameraIdStr;
+ const int mCameraFacing;
+ String16 mClientPackageName;
pid_t mClientPid;
- uid_t mClientUid; // immutable after constructor
- pid_t mServicePid; // immutable after constructor
+ const uid_t mClientUid;
+ const pid_t mServicePid;
bool mDisconnected;
// - The app-side Binder interface to receive callbacks from us
@@ -320,7 +325,7 @@
Client(const sp<CameraService>& cameraService,
const sp<hardware::ICameraClient>& cameraClient,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraIdStr,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -343,14 +348,12 @@
// superclass this can be cast to.
virtual bool canCastToApiClient(apiLevel level) const;
protected:
- // Convert client from cookie.
- static sp<CameraService::Client> getClientFromCookie(void* user);
-
// Initialized in constructor
// - The app-side Binder interface to receive callbacks from us
sp<hardware::ICameraClient> mRemoteCallback;
+ int mCameraId; // All API1 clients use integer camera IDs
}; // class Client
/**
@@ -438,6 +441,7 @@
*/
class CameraState {
public:
+
/**
* Make a new CameraState and set the ID, cost, and conflicting devices using the values
* returned in the HAL's camera_info struct for each device.
@@ -506,6 +510,12 @@
// Delay-load the Camera HAL module
virtual void onFirstRef();
+ // Load the legacy HAL module
+ status_t loadLegacyHalModule();
+
+ // Eumerate all camera providers in the system
+ status_t enumerateProviders();
+
// Check if we can connect, before we acquire the service lock.
// The returned originalClientPid is the PID of the original process that wants to connect to
// camera.
@@ -676,7 +686,11 @@
sp<MediaPlayer> mSoundPlayer[NUM_SOUNDS];
int mSoundRef; // reference count (release all MediaPlayer when 0)
- CameraModule* mModule;
+ // Basic flag on whether the camera subsystem is in a usable state
+ bool mInitialized;
+
+ CameraModule* mModule;
+ sp<CameraProviderManager> mCameraProviderManager;
// Guarded by mStatusListenerMutex
std::vector<sp<hardware::ICameraServiceListener>> mListenerList;
@@ -767,7 +781,7 @@
static int getCameraPriorityFromProcState(int procState);
static binder::Status makeClient(const sp<CameraService>& cameraService,
- const sp<IInterface>& cameraCb, const String16& packageName, int cameraId,
+ const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId,
int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode,
int halVersion, int deviceVersion, apiLevel effectiveApiLevel,
/*out*/sp<BasicClient>* client);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index bfbf640..3aec562 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -56,7 +56,8 @@
int servicePid,
bool legacyMode):
Camera2ClientBase(cameraService, cameraClient, clientPackageName,
- cameraId, cameraFacing, clientPid, clientUid, servicePid),
+ String8::format("%d", cameraId), cameraFacing,
+ clientPid, clientUid, servicePid),
mParameters(cameraId, cameraFacing)
{
ATRACE_CALL();
@@ -67,13 +68,22 @@
mLegacyMode = legacyMode;
}
-status_t Camera2Client::initialize(CameraModule *module)
+status_t Camera2Client::initialize(CameraModule *module) {
+ return initializeImpl(module);
+}
+
+status_t Camera2Client::initialize(sp<CameraProviderManager> manager) {
+ return initializeImpl(manager);
+}
+
+template<typename TProviderPtr>
+status_t Camera2Client::initializeImpl(TProviderPtr providerPtr)
{
ATRACE_CALL();
ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId);
status_t res;
- res = Camera2ClientBase::initialize(module);
+ res = Camera2ClientBase::initialize(providerPtr);
if (res != OK) {
return res;
}
@@ -1034,7 +1044,7 @@
}
if (!restart) {
- mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_START);
mStreamingProcessor->updateRecordingRequest(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
@@ -1191,7 +1201,7 @@
return;
};
- mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
// Remove recording stream because the video target may be abandoned soon.
res = stopStream();
@@ -1621,7 +1631,7 @@
}
status_t Camera2Client::commandPlayRecordingSoundL() {
- mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_START);
return OK;
}
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index e2129f5..87c91a0 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -98,7 +98,8 @@
virtual ~Camera2Client();
- status_t initialize(CameraModule *module);
+ virtual status_t initialize(CameraModule *module) override;
+ virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -219,6 +220,9 @@
// Video snapshot jpeg size overriding helper function
status_t overrideVideoSnapshotSize(Parameters ¶ms);
+
+ template<typename TProviderPtr>
+ status_t initializeImpl(TProviderPtr providerPtr);
};
}; // namespace android
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index 266fb03..b83d425 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -41,7 +41,8 @@
int clientPid, int clientUid,
int servicePid, bool legacyMode):
Client(cameraService, cameraClient, clientPackageName,
- cameraId, cameraFacing, clientPid, clientUid, servicePid)
+ String8::format("%d", cameraId), cameraFacing, clientPid,
+ clientUid, servicePid)
{
int callingPid = getCallingPid();
LOG1("CameraClient::CameraClient E (pid %d, id %d)", callingPid, cameraId);
@@ -61,6 +62,15 @@
}
status_t CameraClient::initialize(CameraModule *module) {
+ return initializeImpl<CameraModule*>(module);
+}
+
+status_t CameraClient::initialize(sp<CameraProviderManager> manager) {
+ return initializeImpl<sp<CameraProviderManager>>(manager);
+}
+
+template<typename TProviderPtr>
+status_t CameraClient::initializeImpl(TProviderPtr providerPtr) {
int callingPid = getCallingPid();
status_t res;
@@ -76,7 +86,7 @@
snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId);
mHardware = new CameraHardwareInterface(camera_device_name);
- res = mHardware->initialize(module);
+ res = mHardware->initialize(providerPtr);
if (res != OK) {
ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
__FUNCTION__, mCameraId, strerror(-res), res);
@@ -252,7 +262,7 @@
// Turn off all messages.
disableMsgType(CAMERA_MSG_ALL_MSGS);
mHardware->stopPreview();
- mCameraService->updateProxyDeviceState(
+ sCameraService->updateProxyDeviceState(
ICameraServiceProxy::CAMERA_STATE_IDLE,
String8::format("%d", mCameraId));
mHardware->cancelPicture();
@@ -414,7 +424,7 @@
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
if (result == NO_ERROR) {
- mCameraService->updateProxyDeviceState(
+ sCameraService->updateProxyDeviceState(
ICameraServiceProxy::CAMERA_STATE_ACTIVE,
String8::format("%d", mCameraId));
}
@@ -440,7 +450,7 @@
// start recording mode
enableMsgType(CAMERA_MSG_VIDEO_FRAME);
- mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_START);
result = mHardware->startRecording();
if (result != NO_ERROR) {
ALOGE("mHardware->startRecording() failed with status %d", result);
@@ -457,7 +467,7 @@
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
- mCameraService->updateProxyDeviceState(
+ sCameraService->updateProxyDeviceState(
ICameraServiceProxy::CAMERA_STATE_IDLE,
String8::format("%d", mCameraId));
mPreviewBuffer.clear();
@@ -471,7 +481,7 @@
disableMsgType(CAMERA_MSG_VIDEO_FRAME);
mHardware->stopRecording();
- mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
mPreviewBuffer.clear();
}
@@ -697,7 +707,7 @@
}
return OK;
} else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
- mCameraService->playSound(CameraService::SOUND_RECORDING_START);
+ sCameraService->playSound(CameraService::SOUND_RECORDING_START);
} else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
// Silently ignore this command
return INVALID_OPERATION;
@@ -748,6 +758,16 @@
return false;
}
+sp<CameraClient> CameraClient::getClientFromCookie(void* user) {
+ String8 cameraId = String8::format("%d", (int)(intptr_t) user);
+ auto clientDescriptor = sCameraService->mActiveClientManager.get(cameraId);
+ if (clientDescriptor != nullptr) {
+ return sp<CameraClient>{
+ static_cast<CameraClient*>(clientDescriptor->getValue().get())};
+ }
+ return sp<CameraClient>{nullptr};
+}
+
// Callback messages can be dispatched to internal handlers or pass to our
// client's callback functions, depending on the message type.
//
@@ -767,7 +787,7 @@
int32_t ext2, void* user) {
LOG2("notifyCallback(%d)", msgType);
- sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ sp<CameraClient> client = getClientFromCookie(user);
if (client.get() == nullptr) return;
if (!client->lockIfMessageWanted(msgType)) return;
@@ -787,7 +807,7 @@
const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) {
LOG2("dataCallback(%d)", msgType);
- sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ sp<CameraClient> client = getClientFromCookie(user);
if (client.get() == nullptr) return;
if (!client->lockIfMessageWanted(msgType)) return;
@@ -820,7 +840,7 @@
int32_t msgType, const sp<IMemory>& dataPtr, void* user) {
LOG2("dataCallbackTimestamp(%d)", msgType);
- sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get());
+ sp<CameraClient> client = getClientFromCookie(user);
if (client.get() == nullptr) return;
if (!client->lockIfMessageWanted(msgType)) return;
@@ -837,7 +857,7 @@
// snapshot taken callback
void CameraClient::handleShutter(void) {
if (mPlayShutterSound) {
- mCameraService->playSound(CameraService::SOUND_SHUTTER);
+ sCameraService->playSound(CameraService::SOUND_SHUTTER);
}
sp<hardware::ICameraClient> c = mRemoteCallback;
@@ -850,7 +870,7 @@
// Shutters only happen in response to takePicture, so mark device as
// idle now, until preview is restarted
- mCameraService->updateProxyDeviceState(
+ sCameraService->updateProxyDeviceState(
ICameraServiceProxy::CAMERA_STATE_IDLE,
String8::format("%d", mCameraId));
diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index 4f46fc4..91f00e3 100644
--- a/services/camera/libcameraservice/api1/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
@@ -70,7 +70,8 @@
bool legacyMode = false);
~CameraClient();
- status_t initialize(CameraModule *module);
+ virtual status_t initialize(CameraModule *module) override;
+ virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -78,6 +79,9 @@
private:
+ template<typename TProviderPtr>
+ status_t initializeImpl(TProviderPtr providerPtr);
+
// check whether the calling process matches mClientPid.
status_t checkPid() const;
status_t checkPidAndHardware() const; // also check mHardware != 0
@@ -98,6 +102,8 @@
// internal function used by sendCommand to enable/disable shutter sound.
status_t enableShutterSound(bool enable);
+ static sp<CameraClient> getClientFromCookie(void* user);
+
// these are static callback functions
static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr,
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 32b99ca..6efe4e3 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -917,6 +917,8 @@
} else {
allowZslMode = true;
}
+ // TODO (b/34131351): turn ZSL back on after fixing the issue
+ allowZslMode = false;
ALOGI("%s: allowZslMode: %d slowJpegMode %d", __FUNCTION__, allowZslMode, slowJpegMode);
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 5166eb5..a55c23b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -46,7 +46,7 @@
const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -67,7 +67,7 @@
CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -79,22 +79,30 @@
mRequestIdCounter(0) {
ATRACE_CALL();
- ALOGI("CameraDeviceClient %d: Opened", cameraId);
+ ALOGI("CameraDeviceClient %s: Opened", cameraId.string());
}
-status_t CameraDeviceClient::initialize(CameraModule *module)
-{
+status_t CameraDeviceClient::initialize(CameraModule *module) {
+ return initializeImpl(module);
+}
+
+status_t CameraDeviceClient::initialize(sp<CameraProviderManager> manager) {
+ return initializeImpl(manager);
+}
+
+template<typename TProviderPtr>
+status_t CameraDeviceClient::initializeImpl(TProviderPtr providerPtr) {
ATRACE_CALL();
status_t res;
- res = Camera2ClientBase::initialize(module);
+ res = Camera2ClientBase::initialize(providerPtr);
if (res != OK) {
return res;
}
String8 threadName;
mFrameProcessor = new FrameProcessorBase(mDevice);
- threadName = String8::format("CDU-%d-FrameProc", mCameraId);
+ threadName = String8::format("CDU-%s-FrameProc", mCameraIdStr.string());
mFrameProcessor->run(threadName.string());
mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
@@ -138,25 +146,27 @@
}
if (requests.empty()) {
- ALOGE("%s: Camera %d: Sent null request. Rejecting request.",
- __FUNCTION__, mCameraId);
+ ALOGE("%s: Camera %s: Sent null request. Rejecting request.",
+ __FUNCTION__, mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Empty request list");
}
List<const CameraMetadata> metadataRequestList;
+ std::list<const SurfaceMap> surfaceMapList;
submitInfo->mRequestId = mRequestIdCounter;
uint32_t loopCounter = 0;
for (auto&& request: requests) {
if (request.mIsReprocess) {
if (!mInputStream.configured) {
- ALOGE("%s: Camera %d: no input stream is configured.", __FUNCTION__, mCameraId);
+ ALOGE("%s: Camera %s: no input stream is configured.", __FUNCTION__,
+ mCameraIdStr.string());
return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "No input configured for camera %d but request is for reprocessing",
- mCameraId);
+ "No input configured for camera %s but request is for reprocessing",
+ mCameraIdStr.string());
} else if (streaming) {
- ALOGE("%s: Camera %d: streaming reprocess requests not supported.", __FUNCTION__,
- mCameraId);
+ ALOGE("%s: Camera %s: streaming reprocess requests not supported.", __FUNCTION__,
+ mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Repeating reprocess requests not supported");
}
@@ -164,13 +174,13 @@
CameraMetadata metadata(request.mMetadata);
if (metadata.isEmpty()) {
- ALOGE("%s: Camera %d: Sent empty metadata packet. Rejecting request.",
- __FUNCTION__, mCameraId);
+ ALOGE("%s: Camera %s: Sent empty metadata packet. Rejecting request.",
+ __FUNCTION__, mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Request settings are empty");
} else if (request.mSurfaceList.isEmpty()) {
- ALOGE("%s: Camera %d: Requests must have at least one surface target. "
- "Rejecting request.", __FUNCTION__, mCameraId);
+ ALOGE("%s: Camera %s: Requests must have at least one surface target. "
+ "Rejecting request.", __FUNCTION__, mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Request has no output targets");
}
@@ -182,11 +192,11 @@
}
/**
- * Write in the output stream IDs which we calculate from
- * the capture request's list of surface targets
+ * Write in the output stream IDs and map from stream ID to surface ID
+ * which we calculate from the capture request's list of surface target
*/
+ SurfaceMap surfaceMap;
Vector<int32_t> outputStreamIds;
- outputStreamIds.setCapacity(request.mSurfaceList.size());
for (sp<Surface> surface : request.mSurfaceList) {
if (surface == 0) continue;
@@ -195,17 +205,23 @@
// Trying to submit request with surface that wasn't created
if (idx == NAME_NOT_FOUND) {
- ALOGE("%s: Camera %d: Tried to submit a request with a surface that"
+ ALOGE("%s: Camera %s: Tried to submit a request with a surface that"
" we have not called createStream on",
- __FUNCTION__, mCameraId);
+ __FUNCTION__, mCameraIdStr.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Request targets Surface that is not part of current capture session");
}
- int streamId = mStreamMap.valueAt(idx);
- outputStreamIds.push_back(streamId);
- ALOGV("%s: Camera %d: Appending output stream %d to request",
- __FUNCTION__, mCameraId, streamId);
+ const StreamSurfaceId& streamSurfaceId = mStreamMap.valueAt(idx);
+ if (surfaceMap.find(streamSurfaceId.streamId()) == surfaceMap.end()) {
+ surfaceMap[streamSurfaceId.streamId()] = std::vector<size_t>();
+ outputStreamIds.push_back(streamSurfaceId.streamId());
+ }
+ surfaceMap[streamSurfaceId.streamId()].push_back(streamSurfaceId.surfaceId());
+
+ ALOGV("%s: Camera %s: Appending output stream %d surface %d to request",
+ __FUNCTION__, mCameraIdStr.string(), streamSurfaceId.streamId(),
+ streamSurfaceId.surfaceId());
}
metadata.update(ANDROID_REQUEST_OUTPUT_STREAMS, &outputStreamIds[0],
@@ -217,19 +233,22 @@
metadata.update(ANDROID_REQUEST_ID, &(submitInfo->mRequestId), /*size*/1);
loopCounter++; // loopCounter starts from 1
- ALOGV("%s: Camera %d: Creating request with ID %d (%d of %zu)",
- __FUNCTION__, mCameraId, submitInfo->mRequestId, loopCounter, requests.size());
+ ALOGV("%s: Camera %s: Creating request with ID %d (%d of %zu)",
+ __FUNCTION__, mCameraIdStr.string(), submitInfo->mRequestId,
+ loopCounter, requests.size());
metadataRequestList.push_back(metadata);
+ surfaceMapList.push_back(surfaceMap);
}
mRequestIdCounter++;
if (streaming) {
- err = mDevice->setStreamingRequestList(metadataRequestList, &(submitInfo->mLastFrameNumber));
+ err = mDevice->setStreamingRequestList(metadataRequestList, surfaceMapList,
+ &(submitInfo->mLastFrameNumber));
if (err != OK) {
String8 msg = String8::format(
- "Camera %d: Got error %s (%d) after trying to set streaming request",
- mCameraId, strerror(-err), err);
+ "Camera %s: Got error %s (%d) after trying to set streaming request",
+ mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
msg.string());
@@ -238,11 +257,12 @@
mStreamingRequestId = submitInfo->mRequestId;
}
} else {
- err = mDevice->captureList(metadataRequestList, &(submitInfo->mLastFrameNumber));
+ err = mDevice->captureList(metadataRequestList, surfaceMapList,
+ &(submitInfo->mLastFrameNumber));
if (err != OK) {
String8 msg = String8::format(
- "Camera %d: Got error %s (%d) after trying to submit capture request",
- mCameraId, strerror(-err), err);
+ "Camera %s: Got error %s (%d) after trying to submit capture request",
+ mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
msg.string());
@@ -250,7 +270,7 @@
ALOGV("%s: requestId = %d ", __FUNCTION__, submitInfo->mRequestId);
}
- ALOGV("%s: Camera %d: End of function", __FUNCTION__, mCameraId);
+ ALOGV("%s: Camera %s: End of function", __FUNCTION__, mCameraIdStr.string());
return res;
}
@@ -274,8 +294,8 @@
Mutex::Autolock idLock(mStreamingRequestIdLock);
if (mStreamingRequestId != requestId) {
- String8 msg = String8::format("Camera %d: Canceling request ID %d doesn't match "
- "current request ID %d", mCameraId, requestId, mStreamingRequestId);
+ String8 msg = String8::format("Camera %s: Canceling request ID %d doesn't match "
+ "current request ID %d", mCameraIdStr.string(), requestId, mStreamingRequestId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -283,13 +303,13 @@
err = mDevice->clearStreamingRequest(lastFrameNumber);
if (err == OK) {
- ALOGV("%s: Camera %d: Successfully cleared streaming request",
- __FUNCTION__, mCameraId);
+ ALOGV("%s: Camera %s: Successfully cleared streaming request",
+ __FUNCTION__, mCameraIdStr.string());
mStreamingRequestId = REQUEST_ID_NONE;
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error clearing streaming request: %s (%d)",
- mCameraId, strerror(-err), err);
+ "Camera %s: Error clearing streaming request: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
}
return res;
@@ -302,8 +322,9 @@
}
binder::Status CameraDeviceClient::endConfigure(bool isConstrainedHighSpeed) {
- ALOGV("%s: ending configure (%d input stream, %zu output streams)",
- __FUNCTION__, mInputStream.configured ? 1 : 0, mStreamMap.size());
+ ALOGV("%s: ending configure (%d input stream, %zu output surfaces)",
+ __FUNCTION__, mInputStream.configured ? 1 : 0,
+ mStreamMap.size());
binder::Status res;
if (!(res = checkPidStatus(__FUNCTION__)).isOk()) return res;
@@ -328,8 +349,8 @@
}
if (!isConstrainedHighSpeedSupported) {
String8 msg = String8::format(
- "Camera %d: Try to create a constrained high speed configuration on a device"
- " that doesn't support it.", mCameraId);
+ "Camera %s: Try to create a constrained high speed configuration on a device"
+ " that doesn't support it.", mCameraIdStr.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
msg.string());
@@ -338,13 +359,13 @@
status_t err = mDevice->configureStreams(isConstrainedHighSpeed);
if (err == BAD_VALUE) {
- String8 msg = String8::format("Camera %d: Unsupported set of inputs/outputs provided",
- mCameraId);
+ String8 msg = String8::format("Camera %s: Unsupported set of inputs/outputs provided",
+ mCameraIdStr.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
} else if (err != OK) {
- String8 msg = String8::format("Camera %d: Error configuring streams: %s (%d)",
- mCameraId, strerror(-err), err);
+ String8 msg = String8::format("Camera %s: Error configuring streams: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
}
@@ -366,7 +387,7 @@
}
bool isInput = false;
- ssize_t index = NAME_NOT_FOUND;
+ std::vector<sp<IBinder>> surfaces;
ssize_t dIndex = NAME_NOT_FOUND;
if (mInputStream.configured && mInputStream.id == streamId) {
@@ -374,26 +395,24 @@
} else {
// Guard against trying to delete non-created streams
for (size_t i = 0; i < mStreamMap.size(); ++i) {
- if (streamId == mStreamMap.valueAt(i)) {
- index = i;
+ if (streamId == mStreamMap.valueAt(i).streamId()) {
+ surfaces.push_back(mStreamMap.keyAt(i));
+ }
+ }
+
+ // See if this stream is one of the deferred streams.
+ for (size_t i = 0; i < mDeferredStreams.size(); ++i) {
+ if (streamId == mDeferredStreams[i]) {
+ dIndex = i;
break;
}
}
- if (index == NAME_NOT_FOUND) {
- // See if this stream is one of the deferred streams.
- for (size_t i = 0; i < mDeferredStreams.size(); ++i) {
- if (streamId == mDeferredStreams[i]) {
- dIndex = i;
- break;
- }
- }
- if (dIndex == NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no such"
- " stream created yet", mCameraId, streamId);
- ALOGW("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
- }
+ if (surfaces.empty() && dIndex == NAME_NOT_FOUND) {
+ String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no such"
+ " stream created yet", mCameraIdStr.string(), streamId);
+ ALOGW("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
}
@@ -401,17 +420,21 @@
status_t err = mDevice->deleteStream(streamId);
if (err != OK) {
- String8 msg = String8::format("Camera %d: Unexpected error %s (%d) when deleting stream %d",
- mCameraId, strerror(-err), err, streamId);
+ String8 msg = String8::format("Camera %s: Unexpected error %s (%d) when deleting stream %d",
+ mCameraIdStr.string(), strerror(-err), err, streamId);
ALOGE("%s: %s", __FUNCTION__, msg.string());
res = STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
} else {
if (isInput) {
mInputStream.configured = false;
- } else if (index != NAME_NOT_FOUND) {
- mStreamMap.removeItemsAt(index);
} else {
- mDeferredStreams.removeItemsAt(dIndex);
+ for (auto& surface : surfaces) {
+ mStreamMap.removeItem(surface);
+ }
+
+ if (dIndex != NAME_NOT_FOUND) {
+ mDeferredStreams.removeItemsAt(dIndex);
+ }
}
}
@@ -429,14 +452,39 @@
Mutex::Autolock icl(mBinderSerializationLock);
- sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer();
- bool deferredConsumer = bufferProducer == NULL;
+ const std::vector<sp<IGraphicBufferProducer>>& bufferProducers =
+ outputConfiguration.getGraphicBufferProducers();
+ size_t numBufferProducers = bufferProducers.size();
+
+ if (numBufferProducers > MAX_SURFACES_PER_STREAM) {
+ ALOGE("%s: GraphicBufferProducer count %zu for stream exceeds limit of %d",
+ __FUNCTION__, bufferProducers.size(), MAX_SURFACES_PER_STREAM);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Surface count is too high");
+ }
+ if (numBufferProducers == 0) {
+ ALOGE("%s: GraphicBufferProducer count 0 is not valid", __FUNCTION__);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Malformed surface");
+ }
+ size_t deferredConsumerCnt = 0;
+ for (auto bufferProducer : bufferProducers) {
+ if (bufferProducer == nullptr) {
+ deferredConsumerCnt++;
+ }
+ }
+ if (deferredConsumerCnt > MAX_DEFERRED_SURFACES) {
+ ALOGE("%s: %zu deferred consumer is not supported", __FUNCTION__, deferredConsumerCnt);
+ return STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "More than %d deferred consumer", MAX_DEFERRED_SURFACES);
+ }
+ bool deferredConsumer = deferredConsumerCnt > 0;
+ bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 1;
int surfaceType = outputConfiguration.getSurfaceType();
bool validSurfaceType = ((surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_VIEW) ||
(surfaceType == OutputConfiguration::SURFACE_TYPE_SURFACE_TEXTURE));
+
if (deferredConsumer && !validSurfaceType) {
ALOGE("%s: Target surface is invalid: bufferProducer = %p, surfaceType = %d.",
- __FUNCTION__, bufferProducer.get(), surfaceType);
+ __FUNCTION__, bufferProducers[0].get(), surfaceType);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
}
@@ -444,116 +492,182 @@
return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
}
+ std::vector<sp<Surface>> surfaces;
+ std::vector<sp<IBinder>> binders;
+ int streamWidth, streamHeight, streamFormat;
int width, height, format;
+ int32_t streamConsumerUsage;
int32_t consumerUsage;
- android_dataspace dataSpace;
+ android_dataspace dataSpace, streamDataSpace;
status_t err;
// Create stream for deferred surface case.
- if (deferredConsumer) {
+ if (deferredConsumerOnly) {
return createDeferredSurfaceStreamLocked(outputConfiguration, newStreamId);
}
- // Don't create multiple streams for the same target surface
- {
- ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
- if (index != NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Surface already has a stream created for it "
- "(ID %zd)", mCameraId, index);
- ALOGW("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
+ bool isFirstSurface = true;
+ streamWidth = -1;
+ streamHeight = -1;
+ streamFormat = -1;
+ streamDataSpace = HAL_DATASPACE_UNKNOWN;
+ streamConsumerUsage = 0;
+
+ for (auto& bufferProducer : bufferProducers) {
+ if (bufferProducer == nullptr) {
+ continue;
}
- }
- // HACK b/10949105
- // Query consumer usage bits to set async operation mode for
- // GLConsumer using controlledByApp parameter.
- bool useAsync = false;
- if ((err = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
- &consumerUsage)) != OK) {
- String8 msg = String8::format("Camera %d: Failed to query Surface consumer usage: %s (%d)",
- mCameraId, strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
- ALOGW("%s: Camera %d with consumer usage flag: 0x%x: Forcing asynchronous mode for stream",
- __FUNCTION__, mCameraId, consumerUsage);
- useAsync = true;
- }
+ // Don't create multiple streams for the same target surface
+ {
+ ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
+ if (index != NAME_NOT_FOUND) {
+ String8 msg = String8::format("Camera %s: Surface already has a stream created for it "
+ "(ID %zd)", mCameraIdStr.string(), index);
+ ALOGW("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
+ }
+ }
- int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
- GRALLOC_USAGE_RENDERSCRIPT;
- int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
- GraphicBuffer::USAGE_HW_TEXTURE |
- GraphicBuffer::USAGE_HW_COMPOSER;
- bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
- (consumerUsage & allowedFlags) != 0;
+ // HACK b/10949105
+ // Query consumer usage bits to set async operation mode for
+ // GLConsumer using controlledByApp parameter.
+ bool useAsync = false;
+ if ((err = bufferProducer->query(NATIVE_WINDOW_CONSUMER_USAGE_BITS,
+ &consumerUsage)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface consumer usage: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if (consumerUsage & GraphicBuffer::USAGE_HW_TEXTURE) {
+ ALOGW("%s: Camera %s with consumer usage flag: 0x%x: Forcing asynchronous mode for stream",
+ __FUNCTION__, mCameraIdStr.string(), consumerUsage);
+ useAsync = true;
+ }
- sp<IBinder> binder = IInterface::asBinder(bufferProducer);
- sp<Surface> surface = new Surface(bufferProducer, useAsync);
- ANativeWindow *anw = surface.get();
+ int32_t disallowedFlags = GraphicBuffer::USAGE_HW_VIDEO_ENCODER |
+ GRALLOC_USAGE_RENDERSCRIPT;
+ int32_t allowedFlags = GraphicBuffer::USAGE_SW_READ_MASK |
+ GraphicBuffer::USAGE_HW_TEXTURE |
+ GraphicBuffer::USAGE_HW_COMPOSER;
+ bool flexibleConsumer = (consumerUsage & disallowedFlags) == 0 &&
+ (consumerUsage & allowedFlags) != 0;
- if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
- String8 msg = String8::format("Camera %d: Failed to query Surface width: %s (%d)",
- mCameraId, strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
- String8 msg = String8::format("Camera %d: Failed to query Surface height: %s (%d)",
- mCameraId, strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
- String8 msg = String8::format("Camera %d: Failed to query Surface format: %s (%d)",
- mCameraId, strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
- if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
- reinterpret_cast<int*>(&dataSpace))) != OK) {
- String8 msg = String8::format("Camera %d: Failed to query Surface dataspace: %s (%d)",
- mCameraId, strerror(-err), err);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
- }
+ sp<IBinder> binder = IInterface::asBinder(bufferProducer);
+ sp<Surface> surface = new Surface(bufferProducer, useAsync);
+ ANativeWindow *anw = surface.get();
- // FIXME: remove this override since the default format should be
- // IMPLEMENTATION_DEFINED. b/9487482
- if (format >= HAL_PIXEL_FORMAT_RGBA_8888 &&
- format <= HAL_PIXEL_FORMAT_BGRA_8888) {
- ALOGW("%s: Camera %d: Overriding format %#x to IMPLEMENTATION_DEFINED",
- __FUNCTION__, mCameraId, format);
- format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
- }
+ if ((err = anw->query(anw, NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface width: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface height: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface format: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE,
+ reinterpret_cast<int*>(&dataSpace))) != OK) {
+ String8 msg = String8::format("Camera %s: Failed to query Surface dataspace: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
+ }
- // Round dimensions to the nearest dimensions available for this format
- if (flexibleConsumer && isPublicFormat(format) &&
- !CameraDeviceClient::roundBufferDimensionNearest(width, height,
- format, dataSpace, mDevice->info(), /*out*/&width, /*out*/&height)) {
- String8 msg = String8::format("Camera %d: No supported stream configurations with "
- "format %#x defined, failed to create output stream", mCameraId, format);
- ALOGE("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ // FIXME: remove this override since the default format should be
+ // IMPLEMENTATION_DEFINED. b/9487482
+ if (format >= HAL_PIXEL_FORMAT_RGBA_8888 &&
+ format <= HAL_PIXEL_FORMAT_BGRA_8888) {
+ ALOGW("%s: Camera %s: Overriding format %#x to IMPLEMENTATION_DEFINED",
+ __FUNCTION__, mCameraIdStr.string(), format);
+ format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ }
+ // Round dimensions to the nearest dimensions available for this format
+ if (flexibleConsumer && isPublicFormat(format) &&
+ !CameraDeviceClient::roundBufferDimensionNearest(width, height,
+ format, dataSpace, mDevice->info(), /*out*/&width, /*out*/&height)) {
+ String8 msg = String8::format("Camera %s: No supported stream configurations with "
+ "format %#x defined, failed to create output stream",
+ mCameraIdStr.string(), format);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (isFirstSurface) {
+ streamWidth = width;
+ streamHeight = height;
+ streamFormat = format;
+ streamDataSpace = dataSpace;
+ streamConsumerUsage = consumerUsage;
+ isFirstSurface = false;
+ }
+ if (width != streamWidth) {
+ String8 msg = String8::format("Camera %s:Surface width doesn't match: %d vs %d",
+ mCameraIdStr.string(), width, streamWidth);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (height != streamHeight) {
+ String8 msg = String8::format("Camera %s:Surface height doesn't match: %d vs %d",
+ mCameraIdStr.string(), height, streamHeight);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (format != streamFormat) {
+ String8 msg = String8::format("Camera %s:Surface format doesn't match: %d vs %d",
+ mCameraIdStr.string(), format, streamFormat);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ if (dataSpace != streamDataSpace) {
+ String8 msg = String8::format("Camera %s:Surface dataSpace doesn't match: %d vs %d",
+ mCameraIdStr.string(), dataSpace, streamDataSpace);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+ //At the native side, there isn't a way to check whether 2 surfaces come from the same
+ //surface class type. Use usage flag to approximate the comparison.
+ //TODO: Support surfaces of different surface class type.
+ if (consumerUsage != streamConsumerUsage) {
+ String8 msg = String8::format(
+ "Camera %s:Surface usage flag doesn't match 0x%x vs 0x%x",
+ mCameraIdStr.string(), consumerUsage, streamConsumerUsage);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
+
+ binders.push_back(binder);
+ surfaces.push_back(surface);
}
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
- err = mDevice->createStream(surface, width, height, format, dataSpace,
+ err = mDevice->createStream(surfaces, deferredConsumer, width, height, format, dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, outputConfiguration.getSurfaceSetID());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
- mCameraId, width, height, format, dataSpace, strerror(-err), err);
+ "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
+ mCameraIdStr.string(), width, height, format, dataSpace, strerror(-err), err);
} else {
- mStreamMap.add(binder, streamId);
-
- ALOGV("%s: Camera %d: Successfully created a new stream ID %d for output surface"
- " (%d x %d) with format 0x%x.",
- __FUNCTION__, mCameraId, streamId, width, height, format);
+ int i = 0;
+ for (auto& binder : binders) {
+ ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %d",
+ __FUNCTION__, binder.get(), streamId, i);
+ mStreamMap.add(binder, StreamSurfaceId(streamId, i++));
+ }
+ ALOGV("%s: Camera %s: Successfully created a new stream ID %d for output surface"
+ " (%d x %d) with format 0x%x.",
+ __FUNCTION__, mCameraIdStr.string(), streamId, width, height, format);
// Set transform flags to ensure preview to be rotated correctly.
res = setStreamTransformLocked(streamId);
@@ -590,23 +704,25 @@
consumerUsage |= GraphicBuffer::USAGE_HW_COMPOSER;
}
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
- err = mDevice->createStream(/*surface*/nullptr, width, height, format, dataSpace,
+ std::vector<sp<Surface>> noSurface;
+ err = mDevice->createStream(noSurface, /*hasDeferredConsumer*/true, width,
+ height, format, dataSpace,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, outputConfiguration.getSurfaceSetID(), consumerUsage);
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
- mCameraId, width, height, format, dataSpace, strerror(-err), err);
+ "Camera %s: Error creating output stream (%d x %d, fmt %x, dataSpace %x): %s (%d)",
+ mCameraIdStr.string(), width, height, format, dataSpace, strerror(-err), err);
} else {
// Can not add streamId to mStreamMap here, as the surface is deferred. Add it to
// a separate list to track. Once the deferred surface is set, this id will be
// relocated to mStreamMap.
mDeferredStreams.push_back(streamId);
- ALOGV("%s: Camera %d: Successfully created a new stream ID %d for a deferred surface"
+ ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
- __FUNCTION__, mCameraId, streamId, width, height, format);
+ __FUNCTION__, mCameraIdStr.string(), streamId, width, height, format);
// Set transform flags to ensure preview to be rotated correctly.
res = setStreamTransformLocked(streamId);
@@ -661,8 +777,8 @@
}
if (mInputStream.configured) {
- String8 msg = String8::format("Camera %d: Already has an input stream "
- "configured (ID %zd)", mCameraId, mInputStream.id);
+ String8 msg = String8::format("Camera %s: Already has an input stream "
+ "configured (ID %zd)", mCameraIdStr.string(), mInputStream.id);
ALOGE("%s: %s", __FUNCTION__, msg.string() );
return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
}
@@ -676,13 +792,13 @@
mInputStream.format = format;
mInputStream.id = streamId;
- ALOGV("%s: Camera %d: Successfully created a new input stream ID %d",
- __FUNCTION__, mCameraId, streamId);
+ ALOGV("%s: Camera %s: Successfully created a new input stream ID %d",
+ __FUNCTION__, mCameraIdStr.string(), streamId);
*newStreamId = streamId;
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error creating new input stream: %s (%d)", mCameraId,
+ "Camera %s: Error creating new input stream: %s (%d)", mCameraIdStr.string(),
strerror(-err), err);
}
@@ -706,8 +822,8 @@
status_t err = mDevice->getInputBufferProducer(&producer);
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error getting input Surface: %s (%d)",
- mCameraId, strerror(-err), err);
+ "Camera %s: Error getting input Surface: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
} else {
inputSurface->name = String16("CameraInput");
inputSurface->graphicBufferProducer = producer;
@@ -828,13 +944,13 @@
request->swap(metadata);
} else if (err == BAD_VALUE) {
res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %d: Template ID %d is invalid or not supported: %s (%d)",
- mCameraId, templateId, strerror(-err), err);
+ "Camera %s: Template ID %d is invalid or not supported: %s (%d)",
+ mCameraIdStr.string(), templateId, strerror(-err), err);
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error creating default request for template %d: %s (%d)",
- mCameraId, templateId, strerror(-err), err);
+ "Camera %s: Error creating default request for template %d: %s (%d)",
+ mCameraIdStr.string(), templateId, strerror(-err), err);
}
return res;
}
@@ -882,16 +998,16 @@
Mutex::Autolock idLock(mStreamingRequestIdLock);
if (mStreamingRequestId != REQUEST_ID_NONE) {
String8 msg = String8::format(
- "Camera %d: Try to waitUntilIdle when there are active streaming requests",
- mCameraId);
+ "Camera %s: Try to waitUntilIdle when there are active streaming requests",
+ mCameraIdStr.string());
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION, msg.string());
}
status_t err = mDevice->waitUntilDrained();
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error waiting to drain: %s (%d)",
- mCameraId, strerror(-err), err);
+ "Camera %s: Error waiting to drain: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
}
ALOGV("%s Done", __FUNCTION__);
return res;
@@ -917,7 +1033,7 @@
status_t err = mDevice->flush(lastFrameNumber);
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error flushing device: %s (%d)", mCameraId, strerror(-err), err);
+ "Camera %s: Error flushing device: %s (%d)", mCameraIdStr.string(), strerror(-err), err);
}
return res;
}
@@ -934,15 +1050,15 @@
// Guard against trying to prepare non-created streams
ssize_t index = NAME_NOT_FOUND;
for (size_t i = 0; i < mStreamMap.size(); ++i) {
- if (streamId == mStreamMap.valueAt(i)) {
+ if (streamId == mStreamMap.valueAt(i).streamId()) {
index = i;
break;
}
}
if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no stream "
- "with that ID exists", mCameraId, streamId);
+ String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
+ "with that ID exists", mCameraIdStr.string(), streamId);
ALOGW("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -952,11 +1068,11 @@
status_t err = mDevice->prepare(streamId);
if (err == BAD_VALUE) {
res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %d: Stream %d has already been used, and cannot be prepared",
- mCameraId, streamId);
+ "Camera %s: Stream %d has already been used, and cannot be prepared",
+ mCameraIdStr.string(), streamId);
} else if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error preparing stream %d: %s (%d)", mCameraId, streamId,
+ "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.string(), streamId,
strerror(-err), err);
}
return res;
@@ -974,22 +1090,22 @@
// Guard against trying to prepare non-created streams
ssize_t index = NAME_NOT_FOUND;
for (size_t i = 0; i < mStreamMap.size(); ++i) {
- if (streamId == mStreamMap.valueAt(i)) {
+ if (streamId == mStreamMap.valueAt(i).streamId()) {
index = i;
break;
}
}
if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no stream "
- "with that ID exists", mCameraId, streamId);
+ String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
+ "with that ID exists", mCameraIdStr.string(), streamId);
ALOGW("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
if (maxCount <= 0) {
- String8 msg = String8::format("Camera %d: maxCount (%d) must be greater than 0",
- mCameraId, maxCount);
+ String8 msg = String8::format("Camera %s: maxCount (%d) must be greater than 0",
+ mCameraIdStr.string(), maxCount);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -999,11 +1115,11 @@
status_t err = mDevice->prepare(maxCount, streamId);
if (err == BAD_VALUE) {
res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %d: Stream %d has already been used, and cannot be prepared",
- mCameraId, streamId);
+ "Camera %s: Stream %d has already been used, and cannot be prepared",
+ mCameraIdStr.string(), streamId);
} else if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error preparing stream %d: %s (%d)", mCameraId, streamId,
+ "Camera %s: Error preparing stream %d: %s (%d)", mCameraIdStr.string(), streamId,
strerror(-err), err);
}
@@ -1022,15 +1138,15 @@
// Guard against trying to prepare non-created streams
ssize_t index = NAME_NOT_FOUND;
for (size_t i = 0; i < mStreamMap.size(); ++i) {
- if (streamId == mStreamMap.valueAt(i)) {
+ if (streamId == mStreamMap.valueAt(i).streamId()) {
index = i;
break;
}
}
if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Invalid stream ID (%d) specified, no stream "
- "with that ID exists", mCameraId, streamId);
+ String8 msg = String8::format("Camera %s: Invalid stream ID (%d) specified, no stream "
+ "with that ID exists", mCameraIdStr.string(), streamId);
ALOGW("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
@@ -1040,11 +1156,11 @@
status_t err = mDevice->tearDown(streamId);
if (err == BAD_VALUE) {
res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %d: Stream %d is still in use, cannot be torn down",
- mCameraId, streamId);
+ "Camera %s: Stream %d is still in use, cannot be torn down",
+ mCameraIdStr.string(), streamId);
} else if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error tearing down stream %d: %s (%d)", mCameraId, streamId,
+ "Camera %s: Error tearing down stream %d: %s (%d)", mCameraIdStr.string(), streamId,
strerror(-err), err);
}
@@ -1060,26 +1176,42 @@
Mutex::Autolock icl(mBinderSerializationLock);
- sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer();
+ const std::vector<sp<IGraphicBufferProducer> >& bufferProducers =
+ outputConfiguration.getGraphicBufferProducers();
// Client code should guarantee that the surface is from SurfaceView or SurfaceTexture.
- if (bufferProducer == NULL) {
- ALOGE("%s: bufferProducer must not be null", __FUNCTION__);
+ // And it's also saved in the last entry of graphicBufferProducer list
+ if (bufferProducers.size() == 0) {
+ ALOGE("%s: bufferProducers must not be empty", __FUNCTION__);
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, "Target Surface is invalid");
}
- // Check if this stram id is one of the deferred streams
- ssize_t index = NAME_NOT_FOUND;
- for (size_t i = 0; i < mDeferredStreams.size(); i++) {
- if (streamId == mDeferredStreams[i]) {
- index = i;
- break;
- }
+
+ // Right now, only first surface in the OutputConfiguration is allowed to be
+ // deferred. And all other surfaces are checked to be the same (not null) at
+ // the Java side.
+ sp<IGraphicBufferProducer> bufferProducer = bufferProducers[0];
+ if (bufferProducer == nullptr) {
+ ALOGE("%s: bufferProducer must not be null", __FUNCTION__);
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
+ "Target Surface is invalid");
}
- if (index == NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: deferred surface is set to a unknown stream"
- "(ID %d)", mCameraId, streamId);
- ALOGW("%s: %s", __FUNCTION__, msg.string());
- return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+
+ // Check if this stream id is one of the deferred only streams
+ ssize_t index = NAME_NOT_FOUND;
+ if (bufferProducers.size() == 1) {
+ for (size_t i = 0; i < mDeferredStreams.size(); i++) {
+ if (streamId == mDeferredStreams[i]) {
+ index = i;
+ break;
+ }
+ }
+
+ if (index == NAME_NOT_FOUND) {
+ String8 msg = String8::format("Camera %s: deferred surface is set to a unknown stream"
+ "(ID %d)", mCameraIdStr.string(), streamId);
+ ALOGW("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
}
if (!mDevice.get()) {
@@ -1090,8 +1222,8 @@
{
ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
if (index != NAME_NOT_FOUND) {
- String8 msg = String8::format("Camera %d: Surface already has a stream created "
- " for it (ID %zd)", mCameraId, index);
+ String8 msg = String8::format("Camera %s: Surface already has a stream created "
+ " for it (ID %zd)", mCameraIdStr.string(), index);
ALOGW("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ALREADY_EXISTS, msg.string());
}
@@ -1106,16 +1238,20 @@
err = mDevice->setConsumerSurface(streamId, consumerSurface);
if (err == OK) {
sp<IBinder> binder = IInterface::asBinder(bufferProducer);
- mStreamMap.add(binder, streamId);
- mDeferredStreams.removeItemsAt(index);
+ ALOGV("%s: mStreamMap add binder %p streamId %d, surfaceId %zu", __FUNCTION__,
+ binder.get(), streamId, bufferProducers.size()-1);
+ mStreamMap.add(binder, StreamSurfaceId(streamId, bufferProducers.size()-1));
+ if (index != NAME_NOT_FOUND) {
+ mDeferredStreams.removeItemsAt(index);
+ }
} else if (err == NO_INIT) {
res = STATUS_ERROR_FMT(CameraService::ERROR_ILLEGAL_ARGUMENT,
- "Camera %d: Deferred surface is invalid: %s (%d)",
- mCameraId, strerror(-err), err);
+ "Camera %s: Deferred surface is invalid: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
} else {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
- "Camera %d: Error setting output stream deferred surface: %s (%d)",
- mCameraId, strerror(-err), err);
+ "Camera %s: Error setting output stream deferred surface: %s (%d)",
+ mCameraIdStr.string(), strerror(-err), err);
}
return res;
@@ -1127,8 +1263,8 @@
status_t CameraDeviceClient::dumpClient(int fd, const Vector<String16>& args) {
String8 result;
- result.appendFormat("CameraDeviceClient[%d] (%p) dump:\n",
- mCameraId,
+ result.appendFormat("CameraDeviceClient[%s] (%p) dump:\n",
+ mCameraIdStr.string(),
(getRemoteCallback() != NULL ?
IInterface::asBinder(getRemoteCallback()).get() : NULL) );
result.appendFormat(" Current client UID %u\n", mClientUid);
@@ -1142,9 +1278,11 @@
result.append(" No input stream configured.\n");
}
if (!mStreamMap.isEmpty()) {
- result.append(" Current output stream IDs:\n");
+ result.append(" Current output stream/surface IDs:\n");
for (size_t i = 0; i < mStreamMap.size(); i++) {
- result.appendFormat(" Stream %d\n", mStreamMap.valueAt(i));
+ result.appendFormat(" Stream %d Surface %d\n",
+ mStreamMap.valueAt(i).streamId(),
+ mStreamMap.valueAt(i).surfaceId());
}
} else if (!mDeferredStreams.isEmpty()) {
result.append(" Current deferred surface output stream IDs:\n");
@@ -1221,15 +1359,15 @@
void CameraDeviceClient::detachDevice() {
if (mDevice == 0) return;
- ALOGV("Camera %d: Stopping processors", mCameraId);
+ ALOGV("Camera %s: Stopping processors", mCameraIdStr.string());
mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
FRAME_PROCESSOR_LISTENER_MAX_ID,
/*listener*/this);
mFrameProcessor->requestExit();
- ALOGV("Camera %d: Waiting for threads", mCameraId);
+ ALOGV("Camera %s: Waiting for threads", mCameraIdStr.string());
mFrameProcessor->join();
- ALOGV("Camera %d: Disconnecting device", mCameraId);
+ ALOGV("Camera %s: Disconnecting device", mCameraIdStr.string());
// WORKAROUND: HAL refuses to disconnect while there's streams in flight
{
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 68e453c..047ccf2 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -42,7 +42,7 @@
CameraDeviceClientBase(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -142,14 +142,15 @@
CameraDeviceClient(const sp<CameraService>& cameraService,
const sp<hardware::camera2::ICameraDeviceCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
int servicePid);
virtual ~CameraDeviceClient();
- virtual status_t initialize(CameraModule *module);
+ virtual status_t initialize(CameraModule *module) override;
+ virtual status_t initialize(sp<CameraProviderManager> manager) override;
virtual status_t dump(int fd, const Vector<String16>& args);
@@ -179,6 +180,34 @@
status_t getRotationTransformLocked(/*out*/int32_t* transform);
private:
+ // StreamSurfaceId encapsulates streamId + surfaceId for a particular surface.
+ // streamId specifies the index of the stream the surface belongs to, and the
+ // surfaceId specifies the index of the surface within the stream. (one stream
+ // could contain multiple surfaces.)
+ class StreamSurfaceId final {
+ public:
+ StreamSurfaceId() {
+ mStreamId = -1;
+ mSurfaceId = -1;
+ }
+ StreamSurfaceId(int32_t streamId, int32_t surfaceId) {
+ mStreamId = streamId;
+ mSurfaceId = surfaceId;
+ }
+ int32_t streamId() const {
+ return mStreamId;
+ }
+ int32_t surfaceId() const {
+ return mSurfaceId;
+ }
+
+ private:
+ int32_t mStreamId;
+ int32_t mSurfaceId;
+
+ }; // class StreamSurfaceId
+
+private:
/** ICameraDeviceUser interface-related private members */
/** Preview callback related members */
@@ -186,6 +215,9 @@
static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
+ template<typename TProviderPtr>
+ status_t initializeImpl(TProviderPtr providerPtr);
+
/** Utility members */
binder::Status checkPidStatus(const char* checkLocation);
bool enforceRequestPermissions(CameraMetadata& metadata);
@@ -212,8 +244,8 @@
//check if format is not custom format
static bool isPublicFormat(int32_t format);
- // IGraphicsBufferProducer binder -> Stream ID for output streams
- KeyedVector<sp<IBinder>, int> mStreamMap;
+ // IGraphicsBufferProducer binder -> Stream ID + Surface ID for output streams
+ KeyedVector<sp<IBinder>, StreamSurfaceId> mStreamMap;
struct InputStreamConfiguration {
bool configured;
@@ -234,6 +266,9 @@
// as there are no surfaces available and can not be put into mStreamMap. Once the deferred
// Surface is configured, the stream id will be moved to mStreamMap.
Vector<int32_t> mDeferredStreams;
+
+ static const int32_t MAX_SURFACES_PER_STREAM = 2;
+ static const int32_t MAX_DEFERRED_SURFACES = 1;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 7e26153..93a584b 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -47,7 +47,7 @@
const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -55,10 +55,10 @@
TClientBase(cameraService, remoteCallback, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
mSharedCameraCallbacks(remoteCallback),
- mDeviceVersion(cameraService->getDeviceVersion(cameraId)),
+ mDeviceVersion(cameraService->getDeviceVersion(TClientBase::mCameraIdStr)),
mDeviceActive(false)
{
- ALOGI("Camera %d: Opened. Client: %s (PID %d, UID %d)", cameraId,
+ ALOGI("Camera %s: Opened. Client: %s (PID %d, UID %d)", cameraId.string(),
String8(clientPackageName).string(), clientPid, clientUid);
mInitialClientPid = clientPid;
@@ -80,9 +80,20 @@
template <typename TClientBase>
status_t Camera2ClientBase<TClientBase>::initialize(CameraModule *module) {
+ return initializeImpl(module);
+}
+
+template <typename TClientBase>
+status_t Camera2ClientBase<TClientBase>::initialize(sp<CameraProviderManager> manager) {
+ return initializeImpl(manager);
+}
+
+template <typename TClientBase>
+template <typename TProviderPtr>
+status_t Camera2ClientBase<TClientBase>::initializeImpl(TProviderPtr providerPtr) {
ATRACE_CALL();
- ALOGV("%s: Initializing client for camera %d", __FUNCTION__,
- TClientBase::mCameraId);
+ ALOGV("%s: Initializing client for camera %s", __FUNCTION__,
+ TClientBase::mCameraIdStr.string());
status_t res;
// Verify ops permissions
@@ -92,15 +103,15 @@
}
if (mDevice == NULL) {
- ALOGE("%s: Camera %d: No device connected",
- __FUNCTION__, TClientBase::mCameraId);
+ ALOGE("%s: Camera %s: No device connected",
+ __FUNCTION__, TClientBase::mCameraIdStr.string());
return NO_INIT;
}
- res = mDevice->initialize(module);
+ res = mDevice->initialize(providerPtr);
if (res != OK) {
- ALOGE("%s: Camera %d: unable to initialize device: %s (%d)",
- __FUNCTION__, TClientBase::mCameraId, strerror(-res), res);
+ ALOGE("%s: Camera %s: unable to initialize device: %s (%d)",
+ __FUNCTION__, TClientBase::mCameraIdStr.string(), strerror(-res), res);
return res;
}
@@ -118,8 +129,8 @@
disconnect();
- ALOGI("Closed Camera %d. Client was: %s (PID %d, UID %u)",
- TClientBase::mCameraId,
+ ALOGI("Closed Camera %s. Client was: %s (PID %d, UID %u)",
+ TClientBase::mCameraIdStr.string(),
String8(TClientBase::mClientPackageName).string(),
mInitialClientPid, TClientBase::mClientUid);
}
@@ -128,8 +139,8 @@
status_t Camera2ClientBase<TClientBase>::dumpClient(int fd,
const Vector<String16>& args) {
String8 result;
- result.appendFormat("Camera2ClientBase[%d] (%p) PID: %d, dump:\n",
- TClientBase::mCameraId,
+ result.appendFormat("Camera2ClientBase[%s] (%p) PID: %d, dump:\n",
+ TClientBase::mCameraIdStr.string(),
(TClientBase::getRemoteCallback() != NULL ?
IInterface::asBinder(TClientBase::getRemoteCallback()).get() : NULL),
TClientBase::mClientPid);
@@ -180,13 +191,13 @@
if (callingPid != TClientBase::mClientPid &&
callingPid != TClientBase::mServicePid) return res;
- ALOGV("Camera %d: Shutting down", TClientBase::mCameraId);
+ ALOGV("Camera %s: Shutting down", TClientBase::mCameraIdStr.string());
detachDevice();
CameraService::BasicClient::disconnect();
- ALOGV("Camera %d: Shut down complete complete", TClientBase::mCameraId);
+ ALOGV("Camera %s: Shut down complete complete", TClientBase::mCameraIdStr.string());
return res;
}
@@ -198,7 +209,7 @@
mDevice.clear();
- ALOGV("Camera %d: Detach complete", TClientBase::mCameraId);
+ ALOGV("Camera %s: Detach complete", TClientBase::mCameraIdStr.string());
}
template <typename TClientBase>
@@ -211,10 +222,10 @@
if (TClientBase::mClientPid != 0 &&
getCallingPid() != TClientBase::mClientPid) {
- ALOGE("%s: Camera %d: Connection attempt from pid %d; "
+ ALOGE("%s: Camera %s: Connection attempt from pid %d; "
"current locked to pid %d",
__FUNCTION__,
- TClientBase::mCameraId,
+ TClientBase::mCameraIdStr.string(),
getCallingPid(),
TClientBase::mClientPid);
return BAD_VALUE;
@@ -242,8 +253,7 @@
void Camera2ClientBase<TClientBase>::notifyIdle() {
if (mDeviceActive) {
getCameraService()->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_IDLE,
- String8::format("%d", TClientBase::mCameraId));
+ ICameraServiceProxy::CAMERA_STATE_IDLE, TClientBase::mCameraIdStr);
}
mDeviceActive = false;
@@ -258,8 +268,7 @@
if (!mDeviceActive) {
getCameraService()->updateProxyDeviceState(
- ICameraServiceProxy::CAMERA_STATE_ACTIVE,
- String8::format("%d", TClientBase::mCameraId));
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE, TClientBase::mCameraIdStr);
}
mDeviceActive = true;
@@ -322,7 +331,7 @@
template <typename TClientBase>
int Camera2ClientBase<TClientBase>::getCameraId() const {
- return TClientBase::mCameraId;
+ return std::stoi(TClientBase::mCameraIdStr.string());
}
template <typename TClientBase>
@@ -337,7 +346,7 @@
template <typename TClientBase>
const sp<CameraService>& Camera2ClientBase<TClientBase>::getCameraService() {
- return TClientBase::mCameraService;
+ return TClientBase::sCameraService;
}
template <typename TClientBase>
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index 9fd0a78..a4c08ef 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -49,7 +49,7 @@
Camera2ClientBase(const sp<CameraService>& cameraService,
const sp<TCamCallbacks>& remoteCallback,
const String16& clientPackageName,
- int cameraId,
+ const String8& cameraId,
int cameraFacing,
int clientPid,
uid_t clientUid,
@@ -57,6 +57,7 @@
virtual ~Camera2ClientBase();
virtual status_t initialize(CameraModule *module);
+ virtual status_t initialize(sp<CameraProviderManager> manager);
virtual status_t dumpClient(int fd, const Vector<String16>& args);
/**
@@ -140,6 +141,10 @@
virtual void detachDevice();
bool mDeviceActive;
+
+private:
+ template<typename TProviderPtr>
+ status_t initializeImpl(TProviderPtr providerPtr);
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index f30afe3..a873402 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -17,6 +17,8 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H
#define ANDROID_SERVERS_CAMERA_CAMERADEVICEBASE_H
+#include <list>
+
#include <utils/RefBase.h>
#include <utils/String8.h>
#include <utils/String16.h>
@@ -35,6 +37,11 @@
namespace android {
+class CameraProviderManager;
+
+// Mapping of output stream index to surface ids
+typedef std::unordered_map<int, std::vector<size_t> > SurfaceMap;
+
/**
* Base interface for version >= 2 camera device classes, which interface to
* camera HAL device versions >= 2.
@@ -46,9 +53,10 @@
/**
* The device's camera ID
*/
- virtual int getId() const = 0;
+ virtual const String8& getId() const = 0;
virtual status_t initialize(CameraModule *module) = 0;
+ virtual status_t initialize(sp<CameraProviderManager> manager) = 0;
virtual status_t disconnect() = 0;
virtual status_t dump(int fd, const Vector<String16> &args) = 0;
@@ -70,6 +78,7 @@
* Output lastFrameNumber is the expected last frame number of the list of requests.
*/
virtual status_t captureList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber = NULL) = 0;
/**
@@ -85,6 +94,7 @@
* Output lastFrameNumber is the last frame number of the previous streaming request.
*/
virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber = NULL) = 0;
/**
@@ -114,6 +124,19 @@
uint32_t consumerUsage = 0) = 0;
/**
+ * Create an output stream of the requested size, format, rotation and
+ * dataspace with a number of consumers.
+ *
+ * For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the
+ * logical dimensions of the buffer, not the number of bytes.
+ */
+ virtual status_t createStream(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+ int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ uint32_t consumerUsage = 0) = 0;
+
+ /**
* Create an input stream of width, height, and format.
*
* Return value is the stream ID if non-negative and an error if negative.
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
new file mode 100644
index 0000000..f691dc1
--- /dev/null
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -0,0 +1,1119 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraProviderManager"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include "CameraProviderManager.h"
+
+#include <chrono>
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <hidl/ServiceManagement.h>
+
+namespace android {
+
+using namespace ::android::hardware::camera;
+using namespace ::android::hardware::camera::common::V1_0;
+
+namespace {
+// Hardcoded name for the passthrough HAL implementation, since it can't be discovered via the
+// service manager
+const std::string kLegacyProviderName("legacy/0");
+
+// Slash-separated list of provider types to consider for use via the old camera API
+const std::string kStandardProviderTypes("internal/legacy");
+
+} // anonymous namespace
+
+CameraProviderManager::HardwareServiceInteractionProxy
+CameraProviderManager::sHardwareServiceInteractionProxy{};
+
+CameraProviderManager::~CameraProviderManager() {
+}
+
+status_t CameraProviderManager::initialize(wp<CameraProviderManager::StatusListener> listener,
+ ServiceInteractionProxy* proxy) {
+ int numProviders = 0;
+ {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ if (proxy == nullptr) {
+ ALOGE("%s: No valid service interaction proxy provided", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ mListener = listener;
+ mServiceProxy = proxy;
+
+ // Registering will trigger notifications for all already-known providers
+ bool success = mServiceProxy->registerForNotifications(
+ /* instance name, empty means no filter */ "",
+ this);
+ if (!success) {
+ ALOGE("%s: Unable to register with hardware service manager for notifications "
+ "about camera providers", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ numProviders = mProviders.size();
+ }
+
+ if (numProviders == 0) {
+ // Remote provider might have not been initialized
+ // Wait for a bit and see if we get one registered
+ std::mutex mtx;
+ std::unique_lock<std::mutex> lock(mtx);
+ mProviderRegistered.wait_for(lock, std::chrono::seconds(15));
+ if (mProviders.size() == 0) {
+ ALOGI("%s: Unable to get one registered provider within timeout!",
+ __FUNCTION__);
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ // See if there's a passthrough HAL, but let's not complain if there's not
+ addProvider(kLegacyProviderName, /*expected*/ false);
+ }
+ }
+
+ return OK;
+}
+
+int CameraProviderManager::getCameraCount() const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ int count = 0;
+ for (auto& provider : mProviders) {
+ count += provider->mDevices.size();
+ }
+ return count;
+}
+
+int CameraProviderManager::getStandardCameraCount() const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ int count = 0;
+ for (auto& provider : mProviders) {
+ if (kStandardProviderTypes.find(provider->getType()) != std::string::npos) {
+ count += provider->mDevices.size();
+ }
+ }
+ return count;
+}
+
+std::vector<std::string> CameraProviderManager::getCameraDeviceIds() const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ std::vector<std::string> deviceIds;
+ for (auto& provider : mProviders) {
+ for (auto& deviceInfo : provider->mDevices) {
+ deviceIds.push_back(deviceInfo->mId);
+ }
+ }
+ return deviceIds;
+}
+
+bool CameraProviderManager::isValidDevice(const std::string &id, uint16_t majorVersion) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+ return isValidDeviceLocked(id, majorVersion);
+}
+
+bool CameraProviderManager::isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const {
+ for (auto& provider : mProviders) {
+ for (auto& deviceInfo : provider->mDevices) {
+ if (deviceInfo->mId == id && deviceInfo->mVersion.get_major() == majorVersion) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+bool CameraProviderManager::hasFlashUnit(const std::string &id) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return false;
+
+ return deviceInfo->hasFlashUnit();
+}
+
+status_t CameraProviderManager::getResourceCost(const std::string &id,
+ CameraResourceCost* cost) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ *cost = deviceInfo->mResourceCost;
+ return OK;
+}
+
+status_t CameraProviderManager::getCameraInfo(const std::string &id,
+ hardware::CameraInfo* info) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ return deviceInfo->getCameraInfo(info);
+}
+
+status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
+ CameraMetadata* characteristics) const {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id, /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ return deviceInfo->getCameraCharacteristics(characteristics);
+}
+
+status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
+ hardware::hidl_version *v) {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ hardware::hidl_version maxVersion{0,0};
+ bool found = false;
+ for (auto& provider : mProviders) {
+ for (auto& deviceInfo : provider->mDevices) {
+ if (deviceInfo->mId == id) {
+ if (deviceInfo->mVersion > maxVersion) {
+ maxVersion = deviceInfo->mVersion;
+ found = true;
+ }
+ }
+ }
+ }
+ if (!found) {
+ return NAME_NOT_FOUND;
+ }
+ *v = maxVersion;
+ return OK;
+}
+
+status_t CameraProviderManager::setTorchMode(const std::string &id, bool enabled) {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ return deviceInfo->setTorchMode(enabled);
+}
+
+status_t CameraProviderManager::setUpVendorTags() {
+ // TODO (b/34275821): support aggregating vendor tags for more than one provider
+ for (auto& provider : mProviders) {
+ hardware::hidl_vec<VendorTagSection> vts;
+ Status status;
+ provider->mInterface->getVendorTags(
+ [&](auto s, const auto& vendorTagSecs) {
+ status = s;
+ if (s == Status::OK) {
+ vts = vendorTagSecs;
+ }
+ });
+
+ if (status != Status::OK) {
+ return mapToStatusT(status);
+ }
+
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+
+ // Read all vendor tag definitions into a descriptor
+ sp<VendorTagDescriptor> desc;
+ status_t res;
+ if ((res = HidlVendorTagDescriptor::createDescriptorFromHidl(vts, /*out*/desc))
+ != OK) {
+ ALOGE("%s: Could not generate descriptor from vendor tag operations,"
+ "received error %s (%d). Camera clients will not be able to use"
+ "vendor tags", __FUNCTION__, strerror(res), res);
+ return res;
+ }
+
+ // Set the global descriptor to use with camera metadata
+ VendorTagDescriptor::setAsGlobalVendorTagDescriptor(desc);
+ }
+ return OK;
+}
+
+status_t CameraProviderManager::openSession(const std::string &id,
+ const sp<hardware::camera::device::V3_2::ICameraDeviceCallback>& callback,
+ /*out*/
+ sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session) {
+
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id,
+ /*minVersion*/ {3,0}, /*maxVersion*/ {4,0});
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ auto *deviceInfo3 = static_cast<ProviderInfo::DeviceInfo3*>(deviceInfo);
+
+ Status status;
+ deviceInfo3->mInterface->open(callback, [&status, &session]
+ (Status s, const sp<device::V3_2::ICameraDeviceSession>& cameraSession) {
+ status = s;
+ if (status == Status::OK) {
+ *session = cameraSession;
+ }
+ });
+ return mapToStatusT(status);
+}
+
+status_t CameraProviderManager::openSession(const std::string &id,
+ const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
+ /*out*/
+ sp<hardware::camera::device::V1_0::ICameraDevice> *session) {
+
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id,
+ /*minVersion*/ {1,0}, /*maxVersion*/ {2,0});
+ if (deviceInfo == nullptr) return NAME_NOT_FOUND;
+
+ auto *deviceInfo1 = static_cast<ProviderInfo::DeviceInfo1*>(deviceInfo);
+
+ Status status = deviceInfo1->mInterface->open(callback);
+ if (status == Status::OK) {
+ *session = deviceInfo1->mInterface;
+ }
+ return mapToStatusT(status);
+}
+
+
+hardware::Return<void> CameraProviderManager::onRegistration(
+ const hardware::hidl_string& /*fqName*/,
+ const hardware::hidl_string& name,
+ bool /*preexisting*/) {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ addProvider(name);
+ mProviderRegistered.notify_one();
+ return hardware::Return<void>();
+}
+
+status_t CameraProviderManager::dump(int fd, const Vector<String16>& args) {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ dprintf(fd, "Available camera providers and devices:\n");
+ for (auto& provider : mProviders) {
+ provider->dump(fd, args);
+ }
+ return OK;
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo* CameraProviderManager::findDeviceInfoLocked(
+ const std::string& id,
+ hardware::hidl_version minVersion, hardware::hidl_version maxVersion) const {
+ for (auto& provider : mProviders) {
+ for (auto& deviceInfo : provider->mDevices) {
+ if (deviceInfo->mId == id &&
+ minVersion <= deviceInfo->mVersion && maxVersion >= deviceInfo->mVersion) {
+ return deviceInfo.get();
+ }
+ }
+ }
+ return nullptr;
+}
+
+
+status_t CameraProviderManager::addProvider(const std::string& newProvider, bool expected) {
+ for (const auto& providerInfo : mProviders) {
+ if (providerInfo->mProviderName == newProvider) {
+ ALOGW("%s: Camera provider HAL with name '%s' already registered", __FUNCTION__,
+ newProvider.c_str());
+ return ALREADY_EXISTS;
+ }
+ }
+ sp<provider::V2_4::ICameraProvider> interface =
+ mServiceProxy->getService(newProvider);
+
+ if (interface == nullptr) {
+ if (expected) {
+ ALOGW("%s: Camera provider HAL '%s' is not actually available", __FUNCTION__,
+ newProvider.c_str());
+ return BAD_VALUE;
+ } else {
+ // Not guaranteed to be found, so not an error if it wasn't
+ return OK;
+ }
+ }
+
+ sp<ProviderInfo> providerInfo =
+ new ProviderInfo(newProvider, interface, this);
+ status_t res = providerInfo->initialize();
+ if (res != OK) {
+ return res;
+ }
+
+ mProviders.push_back(providerInfo);
+
+ return OK;
+}
+
+status_t CameraProviderManager::removeProvider(const std::string& provider) {
+ for (auto it = mProviders.begin(); it != mProviders.end(); it++) {
+ if ((*it)->mProviderName == provider) {
+ mProviders.erase(it);
+ return OK;
+ }
+ }
+ ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
+ provider.c_str());
+ return NAME_NOT_FOUND;
+}
+
+/**** Methods for ProviderInfo ****/
+
+
+CameraProviderManager::ProviderInfo::ProviderInfo(
+ const std::string &providerName,
+ sp<provider::V2_4::ICameraProvider>& interface,
+ CameraProviderManager *manager) :
+ mProviderName(providerName),
+ mInterface(interface),
+ mManager(manager) {
+ (void) mManager;
+}
+
+status_t CameraProviderManager::ProviderInfo::initialize() {
+ status_t res = parseProviderName(mProviderName, &mType, &mId);
+ if (res != OK) {
+ ALOGE("%s: Invalid provider name, ignoring", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ ALOGI("Connecting to new camera provider: %s, isRemote? %d",
+ mProviderName.c_str(), mInterface->isRemote());
+ Status status = mInterface->setCallback(this);
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to register callbacks with camera provider '%s'",
+ __FUNCTION__, mProviderName.c_str());
+ return mapToStatusT(status);
+ }
+ // TODO: Register for hw binder death notifications as well
+
+ // Get initial list of camera devices, if any
+ std::vector<std::string> devices;
+ mInterface->getCameraIdList([&status, &devices](
+ Status idStatus,
+ const hardware::hidl_vec<hardware::hidl_string>& cameraDeviceNames) {
+ status = idStatus;
+ if (status == Status::OK) {
+ for (size_t i = 0; i < cameraDeviceNames.size(); i++) {
+ devices.push_back(cameraDeviceNames[i]);
+ }
+ } });
+
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to query for camera devices from provider '%s'",
+ __FUNCTION__, mProviderName.c_str());
+ return mapToStatusT(status);
+ }
+
+ for (auto& device : devices) {
+ status_t res = addDevice(device);
+ if (res != OK) {
+ ALOGE("%s: Unable to enumerate camera device '%s': %s (%d)",
+ __FUNCTION__, device.c_str(), strerror(-res), res);
+ }
+ }
+
+ ALOGI("Camera provider %s ready with %zu camera devices",
+ mProviderName.c_str(), mDevices.size());
+
+ return OK;
+}
+
+const std::string& CameraProviderManager::ProviderInfo::getType() const {
+ return mType;
+}
+
+status_t CameraProviderManager::ProviderInfo::addDevice(const std::string& name,
+ CameraDeviceStatus initialStatus, /*out*/ std::string* parsedId) {
+
+ ALOGI("Enumerating new camera device: %s", name.c_str());
+
+ uint16_t major, minor;
+ std::string type, id;
+
+ status_t res = parseDeviceName(name, &major, &minor, &type, &id);
+ if (res != OK) {
+ return res;
+ }
+ if (type != mType) {
+ ALOGE("%s: Device type %s does not match provider type %s", __FUNCTION__,
+ type.c_str(), mType.c_str());
+ return BAD_VALUE;
+ }
+ if (mManager->isValidDeviceLocked(id, major)) {
+ ALOGE("%s: Device %s: ID %s is already in use for device major version %d", __FUNCTION__,
+ name.c_str(), id.c_str(), major);
+ return BAD_VALUE;
+ }
+
+ std::unique_ptr<DeviceInfo> deviceInfo;
+ switch (major) {
+ case 1:
+ deviceInfo = initializeDeviceInfo<DeviceInfo1>(name, id, minor);
+ break;
+ case 3:
+ deviceInfo = initializeDeviceInfo<DeviceInfo3>(name, id, minor);
+ break;
+ default:
+ ALOGE("%s: Device %s: Unknown HIDL device HAL major version %d:", __FUNCTION__,
+ name.c_str(), major);
+ return BAD_VALUE;
+ }
+ if (deviceInfo == nullptr) return BAD_VALUE;
+ deviceInfo->mStatus = initialStatus;
+
+ mDevices.push_back(std::move(deviceInfo));
+
+ if (parsedId != nullptr) {
+ *parsedId = id;
+ }
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
+ dprintf(fd, " %s: %zu devices:\n", mProviderName.c_str(), mDevices.size());
+
+ for (auto& device : mDevices) {
+ dprintf(fd, " %s: Resource cost: %d\n", device->mName.c_str(),
+ device->mResourceCost.resourceCost);
+ if (device->mResourceCost.conflictingDevices.size() > 0) {
+ dprintf(fd, " Conflicting devices:\n");
+ for (size_t i = 0; i < device->mResourceCost.conflictingDevices.size(); i++) {
+ dprintf(fd, " %s\n",
+ device->mResourceCost.conflictingDevices[i].c_str());
+ }
+ }
+ }
+ return OK;
+}
+
+hardware::Return<void> CameraProviderManager::ProviderInfo::cameraDeviceStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ CameraDeviceStatus newStatus) {
+ sp<StatusListener> listener;
+ std::string id;
+ {
+ std::lock_guard<std::mutex> lock(mManager->mStatusListenerMutex);
+ bool known = false;
+ for (auto& deviceInfo : mDevices) {
+ if (deviceInfo->mName == cameraDeviceName) {
+ ALOGI("Camera device %s status is now %s, was %s", cameraDeviceName.c_str(),
+ deviceStatusToString(newStatus), deviceStatusToString(deviceInfo->mStatus));
+ deviceInfo->mStatus = newStatus;
+ // TODO: Handle device removal (NOT_PRESENT)
+ id = deviceInfo->mId;
+ known = true;
+ break;
+ }
+ }
+ // Previously unseen device; status must not be NOT_PRESENT
+ if (!known) {
+ if (newStatus == CameraDeviceStatus::NOT_PRESENT) {
+ ALOGW("Camera provider %s says an unknown camera device %s is not present. Curious.",
+ mProviderName.c_str(), cameraDeviceName.c_str());
+ return hardware::Void();
+ }
+ addDevice(cameraDeviceName, newStatus, &id);
+ }
+ listener = mManager->mListener.promote();
+ }
+ // Call without lock held to allow reentrancy into provider manager
+ if (listener != nullptr) {
+ listener->onDeviceStatusChanged(String8(id.c_str()), newStatus);
+ }
+ return hardware::Void();
+}
+
+hardware::Return<void> CameraProviderManager::ProviderInfo::torchModeStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ TorchModeStatus newStatus) {
+ sp<StatusListener> listener;
+ std::string id;
+ {
+ std::lock_guard<std::mutex> lock(mManager->mStatusListenerMutex);
+ bool known = false;
+ for (auto& deviceInfo : mDevices) {
+ if (deviceInfo->mName == cameraDeviceName) {
+ ALOGI("Camera device %s torch status is now %s", cameraDeviceName.c_str(),
+ torchStatusToString(newStatus));
+ id = deviceInfo->mId;
+ known = true;
+ break;
+ }
+ }
+ if (!known) {
+ ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
+ mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
+ return hardware::Void();
+ }
+ listener = mManager->mListener.promote();
+ }
+ // Call without lock held to allow reentrancy into provider manager
+ if (listener != nullptr) {
+ listener->onTorchStatusChanged(String8(id.c_str()), newStatus);
+ }
+ return hardware::Void();
+}
+
+
+template<class DeviceInfoT>
+std::unique_ptr<CameraProviderManager::ProviderInfo::DeviceInfo>
+ CameraProviderManager::ProviderInfo::initializeDeviceInfo(
+ const std::string &name,
+ const std::string &id, uint16_t minorVersion) const {
+ Status status;
+
+ auto cameraInterface =
+ getDeviceInterface<typename DeviceInfoT::InterfaceT>(name);
+ if (cameraInterface == nullptr) return nullptr;
+
+ CameraResourceCost resourceCost;
+ cameraInterface->getResourceCost([&status, &resourceCost](
+ Status s, CameraResourceCost cost) {
+ status = s;
+ resourceCost = cost;
+ });
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to obtain resource costs for camera device %s: %s", __FUNCTION__,
+ name.c_str(), statusToString(status));
+ return nullptr;
+ }
+ return std::unique_ptr<DeviceInfo>(
+ new DeviceInfoT(name, id, minorVersion, resourceCost, cameraInterface));
+}
+
+template<class InterfaceT>
+sp<InterfaceT>
+CameraProviderManager::ProviderInfo::getDeviceInterface(const std::string &name) const {
+ ALOGE("%s: Device %s: Unknown HIDL device HAL major version %d:", __FUNCTION__,
+ name.c_str(), InterfaceT::version.get_major());
+ return nullptr;
+}
+
+template<>
+sp<device::V1_0::ICameraDevice>
+CameraProviderManager::ProviderInfo::getDeviceInterface
+ <device::V1_0::ICameraDevice>(const std::string &name) const {
+ Status status;
+ sp<device::V1_0::ICameraDevice> cameraInterface;
+ mInterface->getCameraDeviceInterface_V1_x(name, [&status, &cameraInterface](
+ Status s, sp<device::V1_0::ICameraDevice> interface) {
+ status = s;
+ cameraInterface = interface;
+ });
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
+ name.c_str(), statusToString(status));
+ return nullptr;
+ }
+ return cameraInterface;
+}
+
+template<>
+sp<device::V3_2::ICameraDevice>
+CameraProviderManager::ProviderInfo::getDeviceInterface
+ <device::V3_2::ICameraDevice>(const std::string &name) const {
+ Status status;
+ sp<device::V3_2::ICameraDevice> cameraInterface;
+ mInterface->getCameraDeviceInterface_V3_x(name, [&status, &cameraInterface](
+ Status s, sp<device::V3_2::ICameraDevice> interface) {
+ status = s;
+ cameraInterface = interface;
+ });
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to obtain interface for camera device %s: %s", __FUNCTION__,
+ name.c_str(), statusToString(status));
+ return nullptr;
+ }
+ return cameraInterface;
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo::~DeviceInfo() {}
+
+template<class InterfaceT>
+status_t CameraProviderManager::ProviderInfo::DeviceInfo::setTorchMode(InterfaceT& interface,
+ bool enabled) {
+ Status s = interface->setTorchMode(enabled ? TorchMode::ON : TorchMode::OFF);
+ return mapToStatusT(s);
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo1::DeviceInfo1(const std::string& name,
+ const std::string &id,
+ uint16_t minorVersion,
+ const CameraResourceCost& resourceCost,
+ sp<InterfaceT> interface) :
+ DeviceInfo(name, id, hardware::hidl_version{1, minorVersion}, resourceCost),
+ mInterface(interface) {
+ // Get default parameters and initialize flash unit availability
+ // Requires powering on the camera device
+ Status status = mInterface->open(nullptr);
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to open camera device %s to check for a flash unit: %s (%d)", __FUNCTION__,
+ mId.c_str(), CameraProviderManager::statusToString(status), status);
+ return;
+ }
+ mInterface->getParameters([this](const hardware::hidl_string& parms) {
+ mDefaultParameters.unflatten(String8(parms.c_str()));
+ });
+
+ const char *flashMode =
+ mDefaultParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) {
+ mHasFlashUnit = true;
+ } else {
+ mHasFlashUnit = false;
+ }
+
+ mInterface->close();
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo1::~DeviceInfo1() {}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo1::setTorchMode(bool enabled) {
+ return DeviceInfo::setTorchMode(mInterface, enabled);
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo1::getCameraInfo(
+ hardware::CameraInfo *info) const {
+ if (info == nullptr) return BAD_VALUE;
+
+ Status status;
+ device::V1_0::CameraInfo cInfo;
+ mInterface->getCameraInfo([&status, &cInfo](Status s, device::V1_0::CameraInfo camInfo) {
+ status = s;
+ cInfo = camInfo;
+ });
+ if (status != Status::OK) {
+ return mapToStatusT(status);
+ }
+
+ switch(cInfo.facing) {
+ case device::V1_0::CameraFacing::BACK:
+ info->facing = hardware::CAMERA_FACING_BACK;
+ break;
+ case device::V1_0::CameraFacing::EXTERNAL:
+ // Map external to front for legacy API
+ case device::V1_0::CameraFacing::FRONT:
+ info->facing = hardware::CAMERA_FACING_FRONT;
+ break;
+ default:
+ ALOGW("%s: Unknown camera facing: %d", __FUNCTION__, cInfo.facing);
+ info->facing = hardware::CAMERA_FACING_BACK;
+ }
+ info->orientation = cInfo.orientation;
+
+ return OK;
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo3::DeviceInfo3(const std::string& name,
+ const std::string &id,
+ uint16_t minorVersion,
+ const CameraResourceCost& resourceCost,
+ sp<InterfaceT> interface) :
+ DeviceInfo(name, id, hardware::hidl_version{3, minorVersion}, resourceCost),
+ mInterface(interface) {
+ // Get camera characteristics and initialize flash unit availability
+ Status status;
+ mInterface->getCameraCharacteristics([&status, this](Status s,
+ device::V3_2::CameraMetadata metadata) {
+ status = s;
+ if (s == Status::OK) {
+ camera_metadata_t *buffer =
+ reinterpret_cast<camera_metadata_t*>(metadata.data());
+ mCameraCharacteristics = buffer;
+ }
+ });
+ if (status != Status::OK) {
+ ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
+ __FUNCTION__, mId.c_str(), CameraProviderManager::statusToString(status), status);
+ return;
+ }
+ camera_metadata_entry flashAvailable =
+ mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
+ if (flashAvailable.count == 1 &&
+ flashAvailable.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_TRUE) {
+ mHasFlashUnit = true;
+ } else {
+ mHasFlashUnit = false;
+ }
+}
+
+CameraProviderManager::ProviderInfo::DeviceInfo3::~DeviceInfo3() {}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::setTorchMode(bool enabled) {
+ return DeviceInfo::setTorchMode(mInterface, enabled);
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
+ hardware::CameraInfo *info) const {
+ if (info == nullptr) return BAD_VALUE;
+
+ camera_metadata_ro_entry facing =
+ mCameraCharacteristics.find(ANDROID_LENS_FACING);
+ if (facing.count == 1) {
+ switch (facing.data.u8[0]) {
+ case ANDROID_LENS_FACING_BACK:
+ info->facing = hardware::CAMERA_FACING_BACK;
+ break;
+ case ANDROID_LENS_FACING_EXTERNAL:
+ // Map external to front for legacy API
+ case ANDROID_LENS_FACING_FRONT:
+ info->facing = hardware::CAMERA_FACING_FRONT;
+ break;
+ }
+ } else {
+ ALOGE("%s: Unable to find android.lens.facing static metadata", __FUNCTION__);
+ return NAME_NOT_FOUND;
+ }
+
+ camera_metadata_ro_entry orientation =
+ mCameraCharacteristics.find(ANDROID_SENSOR_ORIENTATION);
+ if (orientation.count == 1) {
+ info->orientation = orientation.data.i32[0];
+ } else {
+ ALOGE("%s: Unable to find android.sensor.orientation static metadata", __FUNCTION__);
+ return NAME_NOT_FOUND;
+ }
+
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
+ CameraMetadata *characteristics) const {
+ if (characteristics == nullptr) return BAD_VALUE;
+
+ *characteristics = mCameraCharacteristics;
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::parseProviderName(const std::string& name,
+ std::string *type, uint32_t *id) {
+ // Format must be "<type>/<id>"
+#define ERROR_MSG_PREFIX "%s: Invalid provider name '%s'. " \
+ "Should match '<type>/<id>' - "
+
+ if (!type || !id) return INVALID_OPERATION;
+
+ std::string::size_type slashIdx = name.find('/');
+ if (slashIdx == std::string::npos || slashIdx == name.size() - 1) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not have / separator between type and id",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+
+ std::string typeVal = name.substr(0, slashIdx);
+
+ char *endPtr;
+ errno = 0;
+ long idVal = strtol(name.c_str() + slashIdx + 1, &endPtr, 10);
+ if (errno != 0) {
+ ALOGE(ERROR_MSG_PREFIX
+ "cannot parse provider id as an integer: %s (%d)",
+ __FUNCTION__, name.c_str(), strerror(errno), errno);
+ return BAD_VALUE;
+ }
+ if (endPtr != name.c_str() + name.size()) {
+ ALOGE(ERROR_MSG_PREFIX
+ "provider id has unexpected length",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ if (idVal < 0) {
+ ALOGE(ERROR_MSG_PREFIX
+ "id is negative: %ld",
+ __FUNCTION__, name.c_str(), idVal);
+ return BAD_VALUE;
+ }
+
+#undef ERROR_MSG_PREFIX
+
+ *type = typeVal;
+ *id = static_cast<uint32_t>(idVal);
+
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::parseDeviceName(const std::string& name,
+ uint16_t *major, uint16_t *minor, std::string *type, std::string *id) {
+
+ // Format must be "device@<major>.<minor>/<type>/<id>"
+
+#define ERROR_MSG_PREFIX "%s: Invalid device name '%s'. " \
+ "Should match 'device@<major>.<minor>/<type>/<id>' - "
+
+ if (!major || !minor || !type || !id) return INVALID_OPERATION;
+
+ // Verify starting prefix
+ const char expectedPrefix[] = "device@";
+
+ if (name.find(expectedPrefix) != 0) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not start with '%s'",
+ __FUNCTION__, name.c_str(), expectedPrefix);
+ return BAD_VALUE;
+ }
+
+ // Extract major/minor versions
+ constexpr std::string::size_type atIdx = sizeof(expectedPrefix) - 2;
+ std::string::size_type dotIdx = name.find('.', atIdx);
+ if (dotIdx == std::string::npos) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not have @<major>. version section",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ std::string::size_type typeSlashIdx = name.find('/', dotIdx);
+ if (typeSlashIdx == std::string::npos) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not have .<minor>/ version section",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+
+ char *endPtr;
+ errno = 0;
+ long majorVal = strtol(name.c_str() + atIdx + 1, &endPtr, 10);
+ if (errno != 0) {
+ ALOGE(ERROR_MSG_PREFIX
+ "cannot parse major version: %s (%d)",
+ __FUNCTION__, name.c_str(), strerror(errno), errno);
+ return BAD_VALUE;
+ }
+ if (endPtr != name.c_str() + dotIdx) {
+ ALOGE(ERROR_MSG_PREFIX
+ "major version has unexpected length",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ long minorVal = strtol(name.c_str() + dotIdx + 1, &endPtr, 10);
+ if (errno != 0) {
+ ALOGE(ERROR_MSG_PREFIX
+ "cannot parse minor version: %s (%d)",
+ __FUNCTION__, name.c_str(), strerror(errno), errno);
+ return BAD_VALUE;
+ }
+ if (endPtr != name.c_str() + typeSlashIdx) {
+ ALOGE(ERROR_MSG_PREFIX
+ "minor version has unexpected length",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ if (majorVal < 0 || majorVal > UINT16_MAX || minorVal < 0 || minorVal > UINT16_MAX) {
+ ALOGE(ERROR_MSG_PREFIX
+ "major/minor version is out of range of uint16_t: %ld.%ld",
+ __FUNCTION__, name.c_str(), majorVal, minorVal);
+ return BAD_VALUE;
+ }
+
+ // Extract type and id
+
+ std::string::size_type instanceSlashIdx = name.find('/', typeSlashIdx + 1);
+ if (instanceSlashIdx == std::string::npos) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not have /<type>/ component",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ std::string typeVal = name.substr(typeSlashIdx + 1, instanceSlashIdx - typeSlashIdx - 1);
+
+ if (instanceSlashIdx == name.size() - 1) {
+ ALOGE(ERROR_MSG_PREFIX
+ "does not have an /<id> component",
+ __FUNCTION__, name.c_str());
+ return BAD_VALUE;
+ }
+ std::string idVal = name.substr(instanceSlashIdx + 1);
+
+#undef ERROR_MSG_PREFIX
+
+ *major = static_cast<uint16_t>(majorVal);
+ *minor = static_cast<uint16_t>(minorVal);
+ *type = typeVal;
+ *id = idVal;
+
+ return OK;
+}
+
+
+
+CameraProviderManager::ProviderInfo::~ProviderInfo() {
+ // Destruction of ProviderInfo is only supposed to happen when the respective
+ // CameraProvider interface dies, so do not unregister callbacks.
+
+}
+
+status_t CameraProviderManager::mapToStatusT(const Status& s) {
+ switch(s) {
+ case Status::OK:
+ return OK;
+ case Status::ILLEGAL_ARGUMENT:
+ return BAD_VALUE;
+ case Status::CAMERA_IN_USE:
+ return -EBUSY;
+ case Status::MAX_CAMERAS_IN_USE:
+ return -EUSERS;
+ case Status::METHOD_NOT_SUPPORTED:
+ return UNKNOWN_TRANSACTION;
+ case Status::OPERATION_NOT_SUPPORTED:
+ return INVALID_OPERATION;
+ case Status::CAMERA_DISCONNECTED:
+ return DEAD_OBJECT;
+ case Status::INTERNAL_ERROR:
+ return INVALID_OPERATION;
+ }
+ ALOGW("Unexpected HAL status code %d", s);
+ return INVALID_OPERATION;
+}
+
+const char* CameraProviderManager::statusToString(const Status& s) {
+ switch(s) {
+ case Status::OK:
+ return "OK";
+ case Status::ILLEGAL_ARGUMENT:
+ return "ILLEGAL_ARGUMENT";
+ case Status::CAMERA_IN_USE:
+ return "CAMERA_IN_USE";
+ case Status::MAX_CAMERAS_IN_USE:
+ return "MAX_CAMERAS_IN_USE";
+ case Status::METHOD_NOT_SUPPORTED:
+ return "METHOD_NOT_SUPPORTED";
+ case Status::OPERATION_NOT_SUPPORTED:
+ return "OPERATION_NOT_SUPPORTED";
+ case Status::CAMERA_DISCONNECTED:
+ return "CAMERA_DISCONNECTED";
+ case Status::INTERNAL_ERROR:
+ return "INTERNAL_ERROR";
+ }
+ ALOGW("Unexpected HAL status code %d", s);
+ return "UNKNOWN_ERROR";
+}
+
+const char* CameraProviderManager::deviceStatusToString(const CameraDeviceStatus& s) {
+ switch(s) {
+ case CameraDeviceStatus::NOT_PRESENT:
+ return "NOT_PRESENT";
+ case CameraDeviceStatus::PRESENT:
+ return "PRESENT";
+ case CameraDeviceStatus::ENUMERATING:
+ return "ENUMERATING";
+ }
+ ALOGW("Unexpected HAL device status code %d", s);
+ return "UNKNOWN_STATUS";
+}
+
+const char* CameraProviderManager::torchStatusToString(const TorchModeStatus& s) {
+ switch(s) {
+ case TorchModeStatus::NOT_AVAILABLE:
+ return "NOT_AVAILABLE";
+ case TorchModeStatus::AVAILABLE_OFF:
+ return "AVAILABLE_OFF";
+ case TorchModeStatus::AVAILABLE_ON:
+ return "AVAILABLE_ON";
+ }
+ ALOGW("Unexpected HAL torch mode status code %d", s);
+ return "UNKNOWN_STATUS";
+}
+
+
+status_t HidlVendorTagDescriptor::createDescriptorFromHidl(
+ const hardware::hidl_vec<hardware::camera::common::V1_0::VendorTagSection>& vts,
+ /*out*/
+ sp<VendorTagDescriptor>& descriptor) {
+
+ int tagCount = 0;
+
+ for (size_t s = 0; s < vts.size(); s++) {
+ tagCount += vts[s].tags.size();
+ }
+
+ if (tagCount < 0 || tagCount > INT32_MAX) {
+ ALOGE("%s: tag count %d from vendor tag sections is invalid.", __FUNCTION__, tagCount);
+ return BAD_VALUE;
+ }
+
+ Vector<uint32_t> tagArray;
+ LOG_ALWAYS_FATAL_IF(tagArray.resize(tagCount) != tagCount,
+ "%s: too many (%u) vendor tags defined.", __FUNCTION__, tagCount);
+
+
+ sp<HidlVendorTagDescriptor> desc = new HidlVendorTagDescriptor();
+ desc->mTagCount = tagCount;
+
+ SortedVector<String8> sections;
+ KeyedVector<uint32_t, String8> tagToSectionMap;
+
+ int idx = 0;
+ for (size_t s = 0; s < vts.size(); s++) {
+ const hardware::camera::common::V1_0::VendorTagSection& section = vts[s];
+ const char *sectionName = section.sectionName.c_str();
+ if (sectionName == NULL) {
+ ALOGE("%s: no section name defined for vendor tag section %zu.", __FUNCTION__, s);
+ return BAD_VALUE;
+ }
+ String8 sectionString(sectionName);
+ sections.add(sectionString);
+
+ for (size_t j = 0; j < section.tags.size(); j++) {
+ uint32_t tag = section.tags[j].tagId;
+ if (tag < CAMERA_METADATA_VENDOR_TAG_BOUNDARY) {
+ ALOGE("%s: vendor tag %d not in vendor tag section.", __FUNCTION__, tag);
+ return BAD_VALUE;
+ }
+
+ tagArray.editItemAt(idx++) = section.tags[j].tagId;
+
+ const char *tagName = section.tags[j].tagName.c_str();
+ if (tagName == NULL) {
+ ALOGE("%s: no tag name defined for vendor tag %d.", __FUNCTION__, tag);
+ return BAD_VALUE;
+ }
+ desc->mTagToNameMap.add(tag, String8(tagName));
+ tagToSectionMap.add(tag, sectionString);
+
+ int tagType = (int) section.tags[j].tagType;
+ if (tagType < 0 || tagType >= NUM_TYPES) {
+ ALOGE("%s: tag type %d from vendor ops does not exist.", __FUNCTION__, tagType);
+ return BAD_VALUE;
+ }
+ desc->mTagToTypeMap.add(tag, tagType);
+ }
+ }
+
+ desc->mSections = sections;
+
+ for (size_t i = 0; i < tagArray.size(); ++i) {
+ uint32_t tag = tagArray[i];
+ String8 sectionString = tagToSectionMap.valueFor(tag);
+
+ // Set up tag to section index map
+ ssize_t index = sections.indexOf(sectionString);
+ LOG_ALWAYS_FATAL_IF(index < 0, "index %zd must be non-negative", index);
+ desc->mTagToSectionMap.add(tag, static_cast<uint32_t>(index));
+
+ // Set up reverse mapping
+ ssize_t reverseIndex = -1;
+ if ((reverseIndex = desc->mReverseMapping.indexOfKey(sectionString)) < 0) {
+ KeyedVector<String8, uint32_t>* nameMapper = new KeyedVector<String8, uint32_t>();
+ reverseIndex = desc->mReverseMapping.add(sectionString, nameMapper);
+ }
+ desc->mReverseMapping[reverseIndex]->add(desc->mTagToNameMap.valueFor(tag), tag);
+ }
+
+ descriptor = desc;
+ return OK;
+}
+
+
+} // namespace android
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
new file mode 100644
index 0000000..f21e07d
--- /dev/null
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -0,0 +1,379 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_H
+#define ANDROID_SERVERS_CAMERA_CAMERAPROVIDER_H
+
+#include <vector>
+#include <string>
+#include <mutex>
+#include <condition_variable>
+
+#include <camera/CameraParameters2.h>
+#include <camera/CameraMetadata.h>
+#include <camera/CameraBase.h>
+#include <utils/Errors.h>
+#include <android/hardware/camera/common/1.0/types.h>
+#include <android/hardware/camera/provider/2.4/ICameraProvider.h>
+//#include <android/hardware/camera/provider/2.4/ICameraProviderCallbacks.h>
+#include <android/hidl/manager/1.0/IServiceNotification.h>
+#include <camera/VendorTagDescriptor.h>
+
+namespace android {
+
+/**
+ * The vendor tag descriptor class that takes HIDL vendor tag information as
+ * input. Not part of VendorTagDescriptor class because that class is used
+ * in AIDL generated sources which don't have access to HIDL headers.
+ */
+class HidlVendorTagDescriptor : public VendorTagDescriptor {
+public:
+ /**
+ * Create a VendorTagDescriptor object from the HIDL VendorTagSection
+ * vector.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ static status_t createDescriptorFromHidl(
+ const hardware::hidl_vec<hardware::camera::common::V1_0::VendorTagSection>& vts,
+ /*out*/
+ sp<VendorTagDescriptor>& descriptor);
+};
+
+/**
+ * A manager for all camera providers available on an Android device.
+ *
+ * Responsible for enumerating providers and the individual camera devices
+ * they export, both at startup and as providers and devices are added/removed.
+ *
+ * Provides methods for requesting information about individual devices and for
+ * opening them for active use.
+ *
+ */
+class CameraProviderManager : virtual public hidl::manager::V1_0::IServiceNotification {
+public:
+
+ ~CameraProviderManager();
+
+ // Tiny proxy for the static methods in a HIDL interface that communicate with the hardware
+ // service manager, to be replacable in unit tests with a fake.
+ struct ServiceInteractionProxy {
+ virtual bool registerForNotifications(
+ const std::string &serviceName,
+ const sp<hidl::manager::V1_0::IServiceNotification>
+ ¬ification) = 0;
+ virtual sp<hardware::camera::provider::V2_4::ICameraProvider> getService(
+ const std::string &serviceName) = 0;
+ virtual ~ServiceInteractionProxy() {}
+ };
+
+ // Standard use case - call into the normal generated static methods which invoke
+ // the real hardware service manager
+ struct HardwareServiceInteractionProxy : public ServiceInteractionProxy {
+ virtual bool registerForNotifications(
+ const std::string &serviceName,
+ const sp<hidl::manager::V1_0::IServiceNotification>
+ ¬ification) override {
+ return hardware::camera::provider::V2_4::ICameraProvider::registerForNotifications(
+ serviceName, notification);
+ }
+ virtual sp<hardware::camera::provider::V2_4::ICameraProvider> getService(
+ const std::string &serviceName) override {
+ return hardware::camera::provider::V2_4::ICameraProvider::getService(serviceName);
+ }
+ };
+
+ /**
+ * Listener interface for device/torch status changes
+ */
+ struct StatusListener : virtual public RefBase {
+ ~StatusListener() {}
+
+ virtual void onDeviceStatusChanged(const String8 &cameraId,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus) = 0;
+ virtual void onTorchStatusChanged(const String8 &cameraId,
+ hardware::camera::common::V1_0::TorchModeStatus newStatus) = 0;
+ };
+
+ /**
+ * Initialize the manager and give it a status listener; optionally accepts a service
+ * interaction proxy.
+ *
+ * The default proxy communicates via the hardware service manager; alternate proxies can be
+ * used for testing. The lifetime of the proxy must exceed the lifetime of the manager.
+ */
+ status_t initialize(wp<StatusListener> listener,
+ ServiceInteractionProxy *proxy = &sHardwareServiceInteractionProxy);
+
+ /**
+ * Retrieve the total number of available cameras. This value may change dynamically as cameras
+ * are added or removed.
+ */
+ int getCameraCount() const;
+
+ /**
+ * Retrieve the number of 'standard' cameras; these are internal and
+ * backwards-compatible. This is the set of cameras that will be
+ * accessible via the old camera API, with IDs in range of
+ * [0, getStandardCameraCount()-1]. This value is not expected to change dynamically.
+ */
+ int getStandardCameraCount() const;
+
+ std::vector<std::string> getCameraDeviceIds() const;
+
+ /**
+ * Return true if a device with a given ID and major version exists
+ */
+ bool isValidDevice(const std::string &id, uint16_t majorVersion) const;
+
+ /**
+ * Return true if a device with a given ID has a flash unit. Returns false
+ * for devices that are unknown.
+ */
+ bool hasFlashUnit(const std::string &id) const;
+
+ /**
+ * Return the resource cost of this camera device
+ */
+ status_t getResourceCost(const std::string &id,
+ hardware::camera::common::V1_0::CameraResourceCost* cost) const;
+
+ /**
+ * Return the old camera API camera info
+ */
+ status_t getCameraInfo(const std::string &id,
+ hardware::CameraInfo* info) const;
+
+ /**
+ * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
+ * not have a v3 or newer HAL version.
+ */
+ status_t getCameraCharacteristics(const std::string &id,
+ CameraMetadata* characteristics) const;
+
+ /**
+ * Return the highest supported device interface version for this ID
+ */
+ status_t getHighestSupportedVersion(const std::string &id,
+ hardware::hidl_version *v);
+
+ /**
+ * Turn on or off the flashlight on a given camera device.
+ * May fail if the device is in active use, or if the device doesn't exist, etc.
+ */
+ status_t setTorchMode(const std::string &id, bool enabled);
+
+ /**
+ * Setup vendor tags for all registered providers
+ */
+ status_t setUpVendorTags();
+
+ /**
+ * Open an active session to a camera device.
+ *
+ * This fully powers on the camera device hardware, and returns a handle to a
+ * session to be used for hardware configuration and operation.
+ */
+ status_t openSession(const std::string &id,
+ const sp<hardware::camera::device::V3_2::ICameraDeviceCallback>& callback,
+ /*out*/
+ sp<hardware::camera::device::V3_2::ICameraDeviceSession> *session);
+
+ status_t openSession(const std::string &id,
+ const sp<hardware::camera::device::V1_0::ICameraDeviceCallback>& callback,
+ /*out*/
+ sp<hardware::camera::device::V1_0::ICameraDevice> *session);
+
+ /**
+ * IServiceNotification::onRegistration
+ * Invoked by the hardware service manager when a new camera provider is registered
+ */
+ virtual hardware::Return<void> onRegistration(const hardware::hidl_string& fqName,
+ const hardware::hidl_string& name,
+ bool preexisting) override;
+
+ /**
+ * Dump out information about available providers and devices
+ */
+ status_t dump(int fd, const Vector<String16>& args);
+
+ /**
+ * Conversion methods between HAL Status and status_t and strings
+ */
+ static status_t mapToStatusT(const hardware::camera::common::V1_0::Status& s);
+ static const char* statusToString(const hardware::camera::common::V1_0::Status& s);
+
+private:
+ // All private members, unless otherwise noted, expect mInterfaceMutex to be locked before use
+ mutable std::mutex mInterfaceMutex;
+
+ std::condition_variable mProviderRegistered;
+
+ // the status listener update callbacks will lock mStatusMutex
+ mutable std::mutex mStatusListenerMutex;
+ wp<StatusListener> mListener;
+ ServiceInteractionProxy* mServiceProxy;
+
+ static HardwareServiceInteractionProxy sHardwareServiceInteractionProxy;
+
+ struct ProviderInfo : virtual public hardware::camera::provider::V2_4::ICameraProviderCallback {
+ const std::string mProviderName;
+ const sp<hardware::camera::provider::V2_4::ICameraProvider> mInterface;
+
+ ProviderInfo(const std::string &providerName,
+ sp<hardware::camera::provider::V2_4::ICameraProvider>& interface,
+ CameraProviderManager *manager);
+ ~ProviderInfo();
+
+ status_t initialize();
+
+ const std::string& getType() const;
+
+ status_t addDevice(const std::string& name,
+ hardware::camera::common::V1_0::CameraDeviceStatus initialStatus =
+ hardware::camera::common::V1_0::CameraDeviceStatus::PRESENT,
+ /*out*/ std::string *parsedId = nullptr);
+
+ status_t dump(int fd, const Vector<String16>& args) const;
+
+ // ICameraProviderCallbacks interface - these lock the parent mInterfaceMutex
+ virtual hardware::Return<void> cameraDeviceStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ hardware::camera::common::V1_0::CameraDeviceStatus newStatus) override;
+ virtual hardware::Return<void> torchModeStatusChange(
+ const hardware::hidl_string& cameraDeviceName,
+ hardware::camera::common::V1_0::TorchModeStatus newStatus) override;
+
+ // Basic device information, common to all camera devices
+ struct DeviceInfo {
+ const std::string mName; // Full instance name
+ const std::string mId; // ID section of full name
+ const hardware::hidl_version mVersion;
+
+ const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
+
+ hardware::camera::common::V1_0::CameraDeviceStatus mStatus;
+
+ bool hasFlashUnit() const { return mHasFlashUnit; }
+ virtual status_t setTorchMode(bool enabled) = 0;
+ virtual status_t getCameraInfo(hardware::CameraInfo *info) const = 0;
+ virtual status_t getCameraCharacteristics(CameraMetadata *characteristics) const {
+ (void) characteristics;
+ return INVALID_OPERATION;
+ }
+
+ DeviceInfo(const std::string& name, const std::string &id,
+ const hardware::hidl_version& version,
+ const hardware::camera::common::V1_0::CameraResourceCost& resourceCost) :
+ mName(name), mId(id), mVersion(version), mResourceCost(resourceCost),
+ mStatus(hardware::camera::common::V1_0::CameraDeviceStatus::PRESENT),
+ mHasFlashUnit(false) {}
+ virtual ~DeviceInfo();
+ protected:
+ bool mHasFlashUnit;
+
+ template<class InterfaceT>
+ static status_t setTorchMode(InterfaceT& interface, bool enabled);
+ };
+ std::vector<std::unique_ptr<DeviceInfo>> mDevices;
+
+ // HALv1-specific camera fields, including the actual device interface
+ struct DeviceInfo1 : public DeviceInfo {
+ typedef hardware::camera::device::V1_0::ICameraDevice InterfaceT;
+ const sp<InterfaceT> mInterface;
+
+ virtual status_t setTorchMode(bool enabled) override;
+ virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+
+ DeviceInfo1(const std::string& name, const std::string &id,
+ uint16_t minorVersion,
+ const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
+ sp<InterfaceT> interface);
+ virtual ~DeviceInfo1();
+ private:
+ CameraParameters2 mDefaultParameters;
+ };
+
+ // HALv3-specific camera fields, including the actual device interface
+ struct DeviceInfo3 : public DeviceInfo {
+ typedef hardware::camera::device::V3_2::ICameraDevice InterfaceT;
+ const sp<InterfaceT> mInterface;
+
+ virtual status_t setTorchMode(bool enabled) override;
+ virtual status_t getCameraInfo(hardware::CameraInfo *info) const override;
+ virtual status_t getCameraCharacteristics(
+ CameraMetadata *characteristics) const override;
+
+ DeviceInfo3(const std::string& name, const std::string &id,
+ uint16_t minorVersion,
+ const hardware::camera::common::V1_0::CameraResourceCost& resourceCost,
+ sp<InterfaceT> interface);
+ virtual ~DeviceInfo3();
+ private:
+ CameraMetadata mCameraCharacteristics;
+ };
+
+ private:
+ std::string mType;
+ uint32_t mId;
+
+ CameraProviderManager *mManager;
+
+ // Templated method to instantiate the right kind of DeviceInfo and call the
+ // right CameraProvider getCameraDeviceInterface_* method.
+ template<class DeviceInfoT>
+ std::unique_ptr<DeviceInfo> initializeDeviceInfo(const std::string &name,
+ const std::string &id, uint16_t minorVersion) const;
+
+ // Helper for initializeDeviceInfo to use the right CameraProvider get method.
+ template<class InterfaceT>
+ sp<InterfaceT> getDeviceInterface(const std::string &name) const;
+
+ // Parse provider instance name for type and id
+ static status_t parseProviderName(const std::string& name,
+ std::string *type, uint32_t *id);
+
+ // Parse device instance name for device version, type, and id.
+ static status_t parseDeviceName(const std::string& name,
+ uint16_t *major, uint16_t *minor, std::string *type, std::string *id);
+ };
+
+ // Utility to find a DeviceInfo by ID; pointer is only valid while mInterfaceMutex is held
+ // and the calling code doesn't mutate the list of providers or their lists of devices.
+ // Finds the first device of the given ID that falls within the requested version range
+ // minVersion <= deviceVersion < maxVersion
+ // No guarantees on the order of traversal
+ ProviderInfo::DeviceInfo* findDeviceInfoLocked(const std::string& id,
+ hardware::hidl_version minVersion = hardware::hidl_version{0,0},
+ hardware::hidl_version maxVersion = hardware::hidl_version{1000,0}) const;
+
+ status_t addProvider(const std::string& newProvider, bool expected = true);
+ status_t removeProvider(const std::string& provider);
+
+ bool isValidDeviceLocked(const std::string &id, uint16_t majorVersion) const;
+
+ std::vector<sp<ProviderInfo>> mProviders;
+
+ static const char* deviceStatusToString(
+ const hardware::camera::common::V1_0::CameraDeviceStatus&);
+ static const char* torchStatusToString(
+ const hardware::camera::common::V1_0::TorchModeStatus&);
+
+};
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/common/FrameProcessorBase.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 2ef3057..9e78f88 100644
--- a/services/camera/libcameraservice/common/FrameProcessorBase.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -123,7 +123,7 @@
ATRACE_CALL();
CaptureResult result;
- ALOGV("%s: Camera %d: Process new frames", __FUNCTION__, device->getId());
+ ALOGV("%s: Camera %s: Process new frames", __FUNCTION__, device->getId().string());
while ( (res = device->getNextResult(&result)) == OK) {
@@ -133,8 +133,8 @@
entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame number",
- __FUNCTION__, device->getId());
+ ALOGE("%s: Camera %s: Error reading frame number",
+ __FUNCTION__, device->getId().string());
break;
}
ATRACE_INT("cam2_frame", entry.data.i32[0]);
@@ -149,8 +149,8 @@
}
}
if (res != NOT_ENOUGH_DATA) {
- ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
- __FUNCTION__, device->getId(), strerror(-res), res);
+ ALOGE("%s: Camera %s: Error getting next frame: %s (%d)",
+ __FUNCTION__, device->getId().string(), strerror(-res), res);
return;
}
@@ -159,8 +159,8 @@
bool FrameProcessorBase::processSingleFrame(CaptureResult &result,
const sp<CameraDeviceBase> &device) {
- ALOGV("%s: Camera %d: Process single frame (is empty? %d)",
- __FUNCTION__, device->getId(), result.mMetadata.isEmpty());
+ ALOGV("%s: Camera %s: Process single frame (is empty? %d)",
+ __FUNCTION__, device->getId().string(), result.mMetadata.isEmpty());
return processListeners(result, device) == OK;
}
@@ -178,8 +178,8 @@
entry = result.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
if (entry.count != 0 &&
entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
- ALOGV("%s: Camera %d: This is a partial result",
- __FUNCTION__, device->getId());
+ ALOGV("%s: Camera %s: This is a partial result",
+ __FUNCTION__, device->getId().string());
isPartialResult = true;
}
}
@@ -190,7 +190,7 @@
// include CaptureResultExtras.
entry = result.mMetadata.find(ANDROID_REQUEST_ID);
if (entry.count == 0) {
- ALOGE("%s: Camera %d: Error reading frame id", __FUNCTION__, device->getId());
+ ALOGE("%s: Camera %s: Error reading frame id", __FUNCTION__, device->getId().string());
return BAD_VALUE;
}
int32_t requestId = entry.data.i32[0];
@@ -215,8 +215,8 @@
item++;
}
}
- ALOGV("%s: Camera %d: Got %zu range listeners out of %zu", __FUNCTION__,
- device->getId(), listeners.size(), mRangeListeners.size());
+ ALOGV("%s: Camera %s: Got %zu range listeners out of %zu", __FUNCTION__,
+ device->getId().string(), listeners.size(), mRangeListeners.size());
List<sp<FilteredListener> >::iterator item = listeners.begin();
for (; item != listeners.end(); item++) {
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 952bae1..c8210b7 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -27,6 +27,9 @@
#include <system/window.h>
#include <hardware/camera.h>
+#include <common/CameraModule.h>
+#include <common/CameraProviderManager.h>
+
namespace android {
typedef void (*notify_callback)(int32_t msgType,
@@ -124,6 +127,12 @@
return rc;
}
+ status_t initialize(sp<CameraProviderManager> manager) {
+ (void) manager;
+ ALOGE("%s: Not supported yet", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
/** Set the ANativeWindow to which preview frames are sent */
status_t setPreviewWindow(const sp<ANativeWindow>& buf)
{
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 3705e8f..1675584 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -26,7 +26,7 @@
#endif
// Convenience macro for transient errors
-#define CLOGE(fmt, ...) ALOGE("Camera %d: %s: " fmt, mId, __FUNCTION__, \
+#define CLOGE(fmt, ...) ALOGE("Camera %s: %s: " fmt, mId.string(), __FUNCTION__, \
##__VA_ARGS__)
// Convenience macros for transitioning to the error state
@@ -53,16 +53,18 @@
#include "device3/Camera3InputStream.h"
#include "device3/Camera3ZslStream.h"
#include "device3/Camera3DummyStream.h"
+#include "device3/Camera3SharedOutputStream.h"
#include "CameraService.h"
using namespace android::camera3;
+using namespace android::hardware::camera;
+using namespace android::hardware::camera::device::V3_2;
namespace android {
-Camera3Device::Camera3Device(int id):
+Camera3Device::Camera3Device(const String8 &id):
mId(id),
mIsConstrainedHighSpeedConfiguration(false),
- mHal3Device(NULL),
mStatus(STATUS_UNINITIALIZED),
mStatusWaiters(0),
mUsePartialResult(false),
@@ -77,17 +79,17 @@
ATRACE_CALL();
camera3_callback_ops::notify = &sNotify;
camera3_callback_ops::process_capture_result = &sProcessCaptureResult;
- ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
+ ALOGV("%s: Created device for camera %s", __FUNCTION__, mId.string());
}
Camera3Device::~Camera3Device()
{
ATRACE_CALL();
- ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId);
+ ALOGV("%s: Tearing down for camera id %s", __FUNCTION__, mId.string());
disconnect();
}
-int Camera3Device::getId() const {
+const String8& Camera3Device::getId() const {
return mId;
}
@@ -101,7 +103,7 @@
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
+ ALOGV("%s: Initializing device for camera %s", __FUNCTION__, mId.string());
if (mStatus != STATUS_UNINITIALIZED) {
CLOGE("Already initialized!");
return INVALID_OPERATION;
@@ -110,12 +112,11 @@
/** Open HAL device */
status_t res;
- String8 deviceName = String8::format("%d", mId);
camera3_device_t *device;
- ATRACE_BEGIN("camera3->open");
- res = module->open(deviceName.string(),
+ ATRACE_BEGIN("CameraHal::open");
+ res = module->open(mId.string(),
reinterpret_cast<hw_device_t**>(&device));
ATRACE_END();
@@ -125,17 +126,17 @@
}
/** Cross-check device version */
- if (device->common.version < CAMERA_DEVICE_API_VERSION_3_0) {
+ if (device->common.version < CAMERA_DEVICE_API_VERSION_3_2) {
SET_ERR_L("Could not open camera: "
"Camera device should be at least %x, reports %x instead",
- CAMERA_DEVICE_API_VERSION_3_0,
+ CAMERA_DEVICE_API_VERSION_3_2,
device->common.version);
device->common.close(&device->common);
return BAD_VALUE;
}
camera_info info;
- res = module->getCameraInfo(mId, &info);
+ res = module->getCameraInfo(atoi(mId), &info);
if (res != OK) return res;
if (info.device_version != device->common.version) {
@@ -148,7 +149,7 @@
/** Initialize device with callback functions */
- ATRACE_BEGIN("camera3->initialize");
+ ATRACE_BEGIN("CameraHal::initialize");
res = device->ops->initialize(device, this);
ATRACE_END();
@@ -156,16 +157,64 @@
SET_ERR_L("Unable to initialize HAL device: %s (%d)",
strerror(-res), res);
device->common.close(&device->common);
- return BAD_VALUE;
+ return res;
}
+ /** Everything is good to go */
+
+ mDeviceVersion = device->common.version;
+ mDeviceInfo = info.static_camera_characteristics;
+ mInterface = std::make_unique<HalInterface>(device);
+
+ return initializeCommonLocked();
+}
+
+status_t Camera3Device::initialize(sp<CameraProviderManager> manager) {
+ ATRACE_CALL();
+ Mutex::Autolock il(mInterfaceLock);
+ Mutex::Autolock l(mLock);
+
+ ALOGV("%s: Initializing HIDL device for camera %s", __FUNCTION__, mId.string());
+ if (mStatus != STATUS_UNINITIALIZED) {
+ CLOGE("Already initialized!");
+ return INVALID_OPERATION;
+ }
+ if (manager == nullptr) return INVALID_OPERATION;
+
+ sp<ICameraDeviceSession> session;
+ ATRACE_BEGIN("CameraHal::openSession");
+ status_t res = manager->openSession(String8::std_string(mId), this,
+ /*out*/ &session);
+ ATRACE_END();
+ if (res != OK) {
+ SET_ERR_L("Could not open camera session: %s (%d)", strerror(-res), res);
+ return res;
+ }
+
+ res = manager->getCameraCharacteristics(String8::std_string(mId), &mDeviceInfo);
+ if (res != OK) {
+ SET_ERR_L("Could not retrive camera characteristics: %s (%d)", strerror(-res), res);
+ session->close();
+ return res;
+ }
+
+ // TODO: camera service will absorb 3_2/3_3/3_4 differences in the future
+ // for now use 3_4 to keep legacy devices working
+ mDeviceVersion = CAMERA_DEVICE_API_VERSION_3_4;
+ mInterface = std::make_unique<HalInterface>(session);
+
+ return initializeCommonLocked();
+}
+
+status_t Camera3Device::initializeCommonLocked() {
+
/** Start up status tracker thread */
mStatusTracker = new StatusTracker(this);
- res = mStatusTracker->run(String8::format("C3Dev-%d-Status", mId).string());
+ status_t res = mStatusTracker->run(String8::format("C3Dev-%s-Status", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start status tracking thread: %s (%d)",
strerror(-res), res);
- device->common.close(&device->common);
+ mInterface->close();
mStatusTracker.clear();
return res;
}
@@ -177,38 +226,31 @@
mBufferManager = new Camera3BufferManager();
bool aeLockAvailable = false;
- camera_metadata_ro_entry aeLockAvailableEntry;
- res = find_camera_metadata_ro_entry(info.static_camera_characteristics,
- ANDROID_CONTROL_AE_LOCK_AVAILABLE, &aeLockAvailableEntry);
- if (res == OK && aeLockAvailableEntry.count > 0) {
+ camera_metadata_entry aeLockAvailableEntry = mDeviceInfo.find(
+ ANDROID_CONTROL_AE_LOCK_AVAILABLE);
+ if (aeLockAvailableEntry.count > 0) {
aeLockAvailable = (aeLockAvailableEntry.data.u8[0] ==
ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE);
}
/** Start up request queue thread */
- mRequestThread = new RequestThread(this, mStatusTracker, device, aeLockAvailable);
- res = mRequestThread->run(String8::format("C3Dev-%d-ReqQueue", mId).string());
+ mRequestThread = new RequestThread(this, mStatusTracker, mInterface.get(), mDeviceVersion,
+ aeLockAvailable);
+ res = mRequestThread->run(String8::format("C3Dev-%s-ReqQueue", mId.string()).string());
if (res != OK) {
SET_ERR_L("Unable to start request queue thread: %s (%d)",
strerror(-res), res);
- device->common.close(&device->common);
+ mInterface->close();
mRequestThread.clear();
return res;
}
mPreparerThread = new PreparerThread();
- /** Everything is good to go */
-
- mDeviceVersion = device->common.version;
- mDeviceInfo = info.static_camera_characteristics;
- mHal3Device = device;
-
// Determine whether we need to derive sensitivity boost values for older devices.
// If post-RAW sensitivity boost range is listed, so should post-raw sensitivity control
// be listed (as the default value 100)
- if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_4 &&
- mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
+ if (mDeviceInfo.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE)) {
mDerivePostRawSensKey = true;
}
@@ -312,7 +354,7 @@
mStatusTracker->join();
}
- camera3_device_t *hal3Device;
+ HalInterface* interface;
{
Mutex::Autolock l(mLock);
@@ -320,20 +362,16 @@
mStatusTracker.clear();
mBufferManager.clear();
- hal3Device = mHal3Device;
+ interface = mInterface.get();
}
// Call close without internal mutex held, as the HAL close may need to
// wait on assorted callbacks,etc, to complete before it can return.
- if (hal3Device != NULL) {
- ATRACE_BEGIN("camera3->close");
- hal3Device->common.close(&hal3Device->common);
- ATRACE_END();
- }
+ interface->close();
{
Mutex::Autolock l(mLock);
- mHal3Device = NULL;
+ mInterface->clear();
internalUpdateStatusLocked(STATUS_UNINITIALIZED);
}
@@ -447,12 +485,80 @@
}
}
+hardware::graphics::common::V1_0::PixelFormat Camera3Device::mapToPixelFormat(
+ int frameworkFormat) {
+ return (hardware::graphics::common::V1_0::PixelFormat) frameworkFormat;
+}
+
+DataspaceFlags Camera3Device::mapToHidlDataspace(
+ android_dataspace dataSpace) {
+ return dataSpace;
+}
+
+ConsumerUsageFlags Camera3Device::mapToConsumerUsage(
+ uint32_t usage) {
+ return usage;
+}
+
+StreamRotation Camera3Device::mapToStreamRotation(camera3_stream_rotation_t rotation) {
+ switch (rotation) {
+ case CAMERA3_STREAM_ROTATION_0:
+ return StreamRotation::ROTATION_0;
+ case CAMERA3_STREAM_ROTATION_90:
+ return StreamRotation::ROTATION_90;
+ case CAMERA3_STREAM_ROTATION_180:
+ return StreamRotation::ROTATION_180;
+ case CAMERA3_STREAM_ROTATION_270:
+ return StreamRotation::ROTATION_270;
+ }
+ ALOGE("%s: Unknown stream rotation %d", __FUNCTION__, rotation);
+ return StreamRotation::ROTATION_0;
+}
+
+StreamConfigurationMode Camera3Device::mapToStreamConfigurationMode(
+ camera3_stream_configuration_mode_t operationMode) {
+ switch(operationMode) {
+ case CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE:
+ return StreamConfigurationMode::NORMAL_MODE;
+ case CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE:
+ return StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE;
+ case CAMERA3_VENDOR_STREAM_CONFIGURATION_MODE_START:
+ // Needs to be mapped by vendor extensions
+ break;
+ }
+ ALOGE("%s: Unknown stream configuration mode %d", __FUNCTION__, operationMode);
+ return StreamConfigurationMode::NORMAL_MODE;
+}
+
+camera3_buffer_status_t Camera3Device::mapHidlBufferStatus(BufferStatus status) {
+ switch (status) {
+ case BufferStatus::OK: return CAMERA3_BUFFER_STATUS_OK;
+ case BufferStatus::ERROR: return CAMERA3_BUFFER_STATUS_ERROR;
+ }
+ return CAMERA3_BUFFER_STATUS_ERROR;
+}
+
+int Camera3Device::mapToFrameworkFormat(
+ hardware::graphics::common::V1_0::PixelFormat pixelFormat) {
+ return static_cast<uint32_t>(pixelFormat);
+}
+
+uint32_t Camera3Device::mapConsumerToFrameworkUsage(
+ ConsumerUsageFlags usage) {
+ return usage;
+}
+
+uint32_t Camera3Device::mapProducerToFrameworkUsage(
+ ProducerUsageFlags usage) {
+ return usage;
+}
+
ssize_t Camera3Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
// Get max jpeg size (area-wise).
Size maxJpegResolution = getMaxJpegResolution();
if (maxJpegResolution.width == 0) {
- ALOGE("%s: Camera %d: Can't find valid available jpeg sizes in static metadata!",
- __FUNCTION__, mId);
+ ALOGE("%s: Camera %s: Can't find valid available jpeg sizes in static metadata!",
+ __FUNCTION__, mId.string());
return BAD_VALUE;
}
@@ -460,7 +566,8 @@
ssize_t maxJpegBufferSize = 0;
camera_metadata_ro_entry jpegBufMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
if (jpegBufMaxSize.count == 0) {
- ALOGE("%s: Camera %d: Can't find maximum JPEG size in static metadata!", __FUNCTION__, mId);
+ ALOGE("%s: Camera %s: Can't find maximum JPEG size in static metadata!", __FUNCTION__,
+ mId.string());
return BAD_VALUE;
}
maxJpegBufferSize = jpegBufMaxSize.data.i32[0];
@@ -482,8 +589,8 @@
const int FLOATS_PER_POINT=4;
camera_metadata_ro_entry maxPointCount = mDeviceInfo.find(ANDROID_DEPTH_MAX_DEPTH_SAMPLES);
if (maxPointCount.count == 0) {
- ALOGE("%s: Camera %d: Can't find maximum depth point cloud size in static metadata!",
- __FUNCTION__, mId);
+ ALOGE("%s: Camera %s: Can't find maximum depth point cloud size in static metadata!",
+ __FUNCTION__, mId.string());
return BAD_VALUE;
}
ssize_t maxBytesForPointCloud = sizeof(android_depth_points) +
@@ -500,8 +607,8 @@
mDeviceInfo.find(ANDROID_SENSOR_OPAQUE_RAW_SIZE);
size_t count = rawOpaqueSizes.count;
if (count == 0 || (count % PER_CONFIGURATION_SIZE)) {
- ALOGE("%s: Camera %d: bad opaque RAW size static metadata length(%zu)!",
- __FUNCTION__, mId, count);
+ ALOGE("%s: Camera %s: bad opaque RAW size static metadata length(%zu)!",
+ __FUNCTION__, mId.string(), count);
return BAD_VALUE;
}
@@ -512,8 +619,8 @@
}
}
- ALOGE("%s: Camera %d: cannot find size for %dx%d opaque RAW image!",
- __FUNCTION__, mId, width, height);
+ ALOGE("%s: Camera %s: cannot find size for %dx%d opaque RAW image!",
+ __FUNCTION__, mId.string(), width, height);
return BAD_VALUE;
}
@@ -527,11 +634,11 @@
bool gotLock = tryLockSpinRightRound(mLock);
ALOGW_IF(!gotInterfaceLock,
- "Camera %d: %s: Unable to lock interface lock, proceeding anyway",
- mId, __FUNCTION__);
+ "Camera %s: %s: Unable to lock interface lock, proceeding anyway",
+ mId.string(), __FUNCTION__);
ALOGW_IF(!gotLock,
- "Camera %d: %s: Unable to lock main lock, proceeding anyway",
- mId, __FUNCTION__);
+ "Camera %s: %s: Unable to lock main lock, proceeding anyway",
+ mId.string(), __FUNCTION__);
bool dumpTemplates = false;
@@ -624,12 +731,11 @@
};
for (int i = 1; i < CAMERA3_TEMPLATE_COUNT; i++) {
- const camera_metadata_t *templateRequest;
- templateRequest =
- mHal3Device->ops->construct_default_request_settings(
- mHal3Device, i);
+ camera_metadata_t *templateRequest = nullptr;
+ mInterface->constructDefaultRequestSettings(
+ (camera3_request_template_t) i, &templateRequest);
lines = String8::format(" HAL Request %s:\n", templateNames[i-1]);
- if (templateRequest == NULL) {
+ if (templateRequest == nullptr) {
lines.append(" Not supported\n");
write(fd, lines.string(), lines.size());
} else {
@@ -637,15 +743,16 @@
dump_indented_camera_metadata(templateRequest,
fd, /*verbosity*/2, /*indentation*/8);
}
+ free_camera_metadata(templateRequest);
}
}
mTagMonitor.dumpMonitoredMetadata(fd);
- if (mHal3Device != NULL) {
+ if (mInterface->valid()) {
lines = String8(" HAL device dump:\n");
write(fd, lines.string(), lines.size());
- mHal3Device->ops->dump(mHal3Device, fd);
+ mInterface->dump(fd);
}
if (gotLock) mLock.unlock();
@@ -686,7 +793,9 @@
}
status_t Camera3Device::convertMetadataListToRequestListLocked(
- const List<const CameraMetadata> &metadataList, bool repeating,
+ const List<const CameraMetadata> &metadataList,
+ const std::list<const SurfaceMap> &surfaceMaps,
+ bool repeating,
RequestList *requestList) {
if (requestList == NULL) {
CLOGE("requestList cannot be NULL.");
@@ -694,9 +803,11 @@
}
int32_t burstId = 0;
- for (List<const CameraMetadata>::const_iterator it = metadataList.begin();
- it != metadataList.end(); ++it) {
- sp<CaptureRequest> newRequest = setUpRequestLocked(*it);
+ List<const CameraMetadata>::const_iterator metadataIt = metadataList.begin();
+ std::list<const SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
+ for (; metadataIt != metadataList.end() && surfaceMapIt != surfaceMaps.end();
+ ++metadataIt, ++surfaceMapIt) {
+ sp<CaptureRequest> newRequest = setUpRequestLocked(*metadataIt, *surfaceMapIt);
if (newRequest == 0) {
CLOGE("Can't create capture request");
return BAD_VALUE;
@@ -706,12 +817,12 @@
// Setup burst Id and request Id
newRequest->mResultExtras.burstId = burstId++;
- if (it->exists(ANDROID_REQUEST_ID)) {
- if (it->find(ANDROID_REQUEST_ID).count == 0) {
+ if (metadataIt->exists(ANDROID_REQUEST_ID)) {
+ if (metadataIt->find(ANDROID_REQUEST_ID).count == 0) {
CLOGE("RequestID entry exists; but must not be empty in metadata");
return BAD_VALUE;
}
- newRequest->mResultExtras.requestId = it->find(ANDROID_REQUEST_ID).data.i32[0];
+ newRequest->mResultExtras.requestId = metadataIt->find(ANDROID_REQUEST_ID).data.i32[0];
} else {
CLOGE("RequestID does not exist in metadata");
return BAD_VALUE;
@@ -721,6 +832,10 @@
ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId);
}
+ if (metadataIt != metadataList.end() || surfaceMapIt != surfaceMaps.end()) {
+ ALOGE("%s: metadataList and surfaceMaps are not the same size!", __FUNCTION__);
+ return BAD_VALUE;
+ }
// Setup batch size if this is a high speed video recording request.
if (mIsConstrainedHighSpeedConfiguration && requestList->size() > 0) {
@@ -740,12 +855,31 @@
ATRACE_CALL();
List<const CameraMetadata> requests;
+ std::list<const SurfaceMap> surfaceMaps;
+ convertToRequestList(requests, surfaceMaps, request);
+
+ return captureList(requests, surfaceMaps, /*lastFrameNumber*/NULL);
+}
+
+void Camera3Device::convertToRequestList(List<const CameraMetadata>& requests,
+ std::list<const SurfaceMap>& surfaceMaps,
+ const CameraMetadata& request) {
requests.push_back(request);
- return captureList(requests, /*lastFrameNumber*/NULL);
+
+ SurfaceMap surfaceMap;
+ camera_metadata_ro_entry streams = request.find(ANDROID_REQUEST_OUTPUT_STREAMS);
+ // With no surface list passed in, stream and surface will have 1-to-1
+ // mapping. So the surface index is 0 for each stream in the surfaceMap.
+ for (size_t i = 0; i < streams.count; i++) {
+ surfaceMap[streams.data.i32[i]].push_back(0);
+ }
+ surfaceMaps.push_back(surfaceMap);
}
status_t Camera3Device::submitRequestsHelper(
- const List<const CameraMetadata> &requests, bool repeating,
+ const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
+ bool repeating,
/*out*/
int64_t *lastFrameNumber) {
ATRACE_CALL();
@@ -760,8 +894,8 @@
RequestList requestList;
- res = convertMetadataListToRequestListLocked(requests, repeating,
- /*out*/&requestList);
+ res = convertMetadataListToRequestListLocked(requests, surfaceMaps,
+ repeating, /*out*/&requestList);
if (res != OK) {
// error logged by previous call
return res;
@@ -779,7 +913,7 @@
SET_ERR_L("Can't transition to active in %f seconds!",
kActiveTimeout/1e9);
}
- ALOGV("Camera %d: Capture request %" PRId32 " enqueued", mId,
+ ALOGV("Camera %s: Capture request %" PRId32 " enqueued", mId.string(),
(*(requestList.begin()))->mResultExtras.requestId);
} else {
CLOGE("Cannot queue request. Impossible.");
@@ -789,11 +923,150 @@
return res;
}
+hardware::Return<void> Camera3Device::processCaptureResult(
+ const device::V3_2::CaptureResult& result) {
+ camera3_capture_result r;
+ status_t res;
+ r.frame_number = result.frameNumber;
+ if (result.result.size() != 0) {
+ r.result = reinterpret_cast<const camera_metadata_t*>(result.result.data());
+ size_t expected_metadata_size = result.result.size();
+ if ((res = validate_camera_metadata_structure(r.result, &expected_metadata_size)) != OK) {
+ ALOGE("%s: Frame %d: Invalid camera metadata received by camera service from HAL: %s (%d)",
+ __FUNCTION__, result.frameNumber, strerror(-res), res);
+ return hardware::Void();
+ }
+ } else {
+ r.result = nullptr;
+ }
+
+ std::vector<camera3_stream_buffer_t> outputBuffers(result.outputBuffers.size());
+ std::vector<buffer_handle_t> outputBufferHandles(result.outputBuffers.size());
+ for (size_t i = 0; i < result.outputBuffers.size(); i++) {
+ auto& bDst = outputBuffers[i];
+ const StreamBuffer &bSrc = result.outputBuffers[i];
+
+ ssize_t idx = mOutputStreams.indexOfKey(bSrc.streamId);
+ if (idx == -1) {
+ ALOGE("%s: Frame %d: Buffer %zu: Invalid output stream id %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return hardware::Void();
+ }
+ bDst.stream = mOutputStreams.valueAt(idx)->asHalStream();
+
+ buffer_handle_t *buffer;
+ res = mInterface->popInflightBuffer(result.frameNumber, bSrc.streamId, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Buffer %zu: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, i, bSrc.streamId);
+ return hardware::Void();
+ }
+ bDst.buffer = buffer;
+ bDst.status = mapHidlBufferStatus(bSrc.status);
+ bDst.acquire_fence = -1;
+ if (bSrc.releaseFence == nullptr) {
+ bDst.release_fence = -1;
+ } else if (bSrc.releaseFence->numFds == 1) {
+ bDst.release_fence = dup(bSrc.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for buffer %zu, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, i, bSrc.releaseFence->numFds);
+ return hardware::Void();
+ }
+ }
+ r.num_output_buffers = outputBuffers.size();
+ r.output_buffers = outputBuffers.data();
+
+ camera3_stream_buffer_t inputBuffer;
+ if (result.inputBuffer.streamId == -1) {
+ r.input_buffer = nullptr;
+ } else {
+ if (mInputStream->getId() != result.inputBuffer.streamId) {
+ ALOGE("%s: Frame %d: Invalid input stream id %d", __FUNCTION__,
+ result.frameNumber, result.inputBuffer.streamId);
+ return hardware::Void();
+ }
+ inputBuffer.stream = mInputStream->asHalStream();
+ buffer_handle_t *buffer;
+ res = mInterface->popInflightBuffer(result.frameNumber, result.inputBuffer.streamId,
+ &buffer);
+ if (res != OK) {
+ ALOGE("%s: Frame %d: Input buffer: No in-flight buffer for stream %d",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.streamId);
+ return hardware::Void();
+ }
+ inputBuffer.buffer = buffer;
+ inputBuffer.status = mapHidlBufferStatus(result.inputBuffer.status);
+ inputBuffer.acquire_fence = -1;
+ if (result.inputBuffer.releaseFence == nullptr) {
+ inputBuffer.release_fence = -1;
+ } else if (result.inputBuffer.releaseFence->numFds == 1) {
+ inputBuffer.release_fence = dup(result.inputBuffer.releaseFence->data[0]);
+ } else {
+ ALOGE("%s: Frame %d: Invalid release fence for input buffer, fd count is %d, not 1",
+ __FUNCTION__, result.frameNumber, result.inputBuffer.releaseFence->numFds);
+ return hardware::Void();
+ }
+ r.input_buffer = &inputBuffer;
+ }
+
+ r.partial_result = result.partialResult;
+
+ processCaptureResult(&r);
+
+ return hardware::Void();
+}
+
+hardware::Return<void> Camera3Device::notify(
+ const NotifyMsg& msg) {
+ camera3_notify_msg m;
+ switch (msg.type) {
+ case MsgType::ERROR:
+ m.type = CAMERA3_MSG_ERROR;
+ m.message.error.frame_number = msg.msg.error.frameNumber;
+ if (msg.msg.error.errorStreamId >= 0) {
+ ssize_t idx = mOutputStreams.indexOfKey(msg.msg.error.errorStreamId);
+ if (idx == -1) {
+ ALOGE("%s: Frame %d: Invalid error stream id %d",
+ __FUNCTION__, m.message.error.frame_number, msg.msg.error.errorStreamId);
+ return hardware::Void();
+ }
+ m.message.error.error_stream = mOutputStreams.valueAt(idx)->asHalStream();
+ } else {
+ m.message.error.error_stream = nullptr;
+ }
+ switch (msg.msg.error.errorCode) {
+ case ErrorCode::ERROR_DEVICE:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
+ break;
+ case ErrorCode::ERROR_REQUEST:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+ break;
+ case ErrorCode::ERROR_RESULT:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
+ break;
+ case ErrorCode::ERROR_BUFFER:
+ m.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+ break;
+ }
+ break;
+ case MsgType::SHUTTER:
+ m.type = CAMERA3_MSG_SHUTTER;
+ m.message.shutter.frame_number = msg.msg.shutter.frameNumber;
+ m.message.shutter.timestamp = msg.msg.shutter.timestamp;
+ break;
+ }
+ notify(&m);
+
+ return hardware::Void();
+}
+
status_t Camera3Device::captureList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber) {
ATRACE_CALL();
- return submitRequestsHelper(requests, /*repeating*/false, lastFrameNumber);
+ return submitRequestsHelper(requests, surfaceMaps, /*repeating*/false, lastFrameNumber);
}
status_t Camera3Device::setStreamingRequest(const CameraMetadata &request,
@@ -801,19 +1074,23 @@
ATRACE_CALL();
List<const CameraMetadata> requests;
- requests.push_back(request);
- return setStreamingRequestList(requests, /*lastFrameNumber*/NULL);
+ std::list<const SurfaceMap> surfaceMaps;
+ convertToRequestList(requests, surfaceMaps, request);
+
+ return setStreamingRequestList(requests, /*surfaceMap*/surfaceMaps,
+ /*lastFrameNumber*/NULL);
}
status_t Camera3Device::setStreamingRequestList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
int64_t *lastFrameNumber) {
ATRACE_CALL();
- return submitRequestsHelper(requests, /*repeating*/true, lastFrameNumber);
+ return submitRequestsHelper(requests, surfaceMaps, /*repeating*/true, lastFrameNumber);
}
sp<Camera3Device::CaptureRequest> Camera3Device::setUpRequestLocked(
- const CameraMetadata &request) {
+ const CameraMetadata &request, const SurfaceMap &surfaceMap) {
status_t res;
if (mStatus == STATUS_UNCONFIGURED || mNeedConfig) {
@@ -829,7 +1106,7 @@
}
}
- sp<CaptureRequest> newRequest = createCaptureRequest(request);
+ sp<CaptureRequest> newRequest = createCaptureRequest(request, surfaceMap);
return newRequest;
}
@@ -854,7 +1131,7 @@
SET_ERR_L("Unexpected status: %d", mStatus);
return INVALID_OPERATION;
}
- ALOGV("Camera %d: Clearing repeating request", mId);
+ ALOGV("Camera %s: Clearing repeating request", mId.string());
return mRequestThread->clearRepeatingRequests(lastFrameNumber);
}
@@ -871,8 +1148,8 @@
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- ALOGV("Camera %d: Creating new input stream %d: %d x %d, format %d",
- mId, mNextStreamId, width, height, format);
+ ALOGV("Camera %s: Creating new input stream %d: %d x %d, format %d",
+ mId.string(), mNextStreamId, width, height, format);
status_t res;
bool wasActive = false;
@@ -928,7 +1205,7 @@
internalResumeLocked();
}
- ALOGV("Camera %d: Created input stream", mId);
+ ALOGV("Camera %s: Created input stream", mId.string());
return OK;
}
@@ -942,8 +1219,8 @@
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- ALOGV("Camera %d: Creating ZSL stream %d: %d x %d, depth %d",
- mId, mNextStreamId, width, height, depth);
+ ALOGV("Camera %s: Creating ZSL stream %d: %d x %d, depth %d",
+ mId.string(), mNextStreamId, width, height, depth);
status_t res;
bool wasActive = false;
@@ -1008,18 +1285,37 @@
internalResumeLocked();
}
- ALOGV("Camera %d: Created ZSL stream", mId);
+ ALOGV("Camera %s: Created ZSL stream", mId.string());
return OK;
}
status_t Camera3Device::createStream(sp<Surface> consumer,
- uint32_t width, uint32_t height, int format, android_dataspace dataSpace,
- camera3_stream_rotation_t rotation, int *id, int streamSetId, uint32_t consumerUsage) {
+ uint32_t width, uint32_t height, int format,
+ android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+ int streamSetId, uint32_t consumerUsage) {
+ ATRACE_CALL();
+
+ if (consumer == nullptr) {
+ ALOGE("%s: consumer must not be null", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ std::vector<sp<Surface>> consumers;
+ consumers.push_back(consumer);
+
+ return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
+ format, dataSpace, rotation, id, streamSetId, consumerUsage);
+}
+
+status_t Camera3Device::createStream(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+ int streamSetId, uint32_t consumerUsage) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
- ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
- " consumer usage 0x%x", mId, mNextStreamId, width, height, format, dataSpace, rotation,
+ ALOGV("Camera %s: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d"
+ " consumer usage 0x%x", mId.string(), mNextStreamId, width, height, format, dataSpace, rotation,
consumerUsage);
status_t res;
@@ -1058,18 +1354,24 @@
streamSetId = CAMERA3_STREAM_SET_ID_INVALID;
}
+ if (consumers.size() == 0 && !hasDeferredConsumer) {
+ ALOGE("%s: Number of consumers cannot be smaller than 1", __FUNCTION__);
+ return BAD_VALUE;
+ }
// HAL3.1 doesn't support deferred consumer stream creation as it requires buffer registration
// which requires a consumer surface to be available.
- if (consumer == nullptr && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
+ if (hasDeferredConsumer && mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
ALOGE("HAL3.1 doesn't support deferred consumer stream creation");
return BAD_VALUE;
}
- if (consumer == nullptr && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ if (hasDeferredConsumer && format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
ALOGE("Deferred consumer stream creation only support IMPLEMENTATION_DEFINED format");
return BAD_VALUE;
}
+ bool streamSharing = consumers.size() > 1 || (consumers.size() > 0 && hasDeferredConsumer);
+
// Use legacy dataspace values for older HALs
if (mDeviceVersion <= CAMERA_DEVICE_API_VERSION_3_3) {
dataSpace = mapToLegacyDataspace(dataSpace);
@@ -1089,7 +1391,7 @@
return BAD_VALUE;
}
}
- newStream = new Camera3OutputStream(mNextStreamId, consumer,
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, streamSetId);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
@@ -1098,15 +1400,19 @@
SET_ERR_L("Invalid RAW opaque buffer size %zd", rawOpaqueBufferSize);
return BAD_VALUE;
}
- newStream = new Camera3OutputStream(mNextStreamId, consumer,
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, streamSetId);
- } else if (consumer == nullptr) {
+ } else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, streamSetId);
+ } else if (streamSharing) {
+ newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
+ hasDeferredConsumer, width, height, format, consumerUsage,
+ dataSpace, rotation, mTimestampOffset, streamSetId);
} else {
- newStream = new Camera3OutputStream(mNextStreamId, consumer,
+ newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, streamSetId);
}
@@ -1142,7 +1448,7 @@
}
internalResumeLocked();
}
- ALOGV("Camera %d: Created new stream", mId);
+ ALOGV("Camera %s: Created new stream", mId.string());
return OK;
}
@@ -1231,12 +1537,12 @@
Mutex::Autolock l(mLock);
status_t res;
- ALOGV("%s: Camera %d: Deleting stream %d", __FUNCTION__, mId, id);
+ ALOGV("%s: Camera %s: Deleting stream %d", __FUNCTION__, mId.string(), id);
// CameraDevice semantics require device to already be idle before
// deleteStream is called, unlike for createStream.
if (mStatus == STATUS_ACTIVE) {
- ALOGV("%s: Camera %d: Device not idle", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Device not idle", __FUNCTION__, mId.string());
return -EBUSY;
}
@@ -1343,18 +1649,20 @@
return OK;
}
- const camera_metadata_t *rawRequest;
- ATRACE_BEGIN("camera3->construct_default_request_settings");
- rawRequest = mHal3Device->ops->construct_default_request_settings(
- mHal3Device, templateId);
- ATRACE_END();
- if (rawRequest == NULL) {
+ camera_metadata_t *rawRequest;
+ status_t res = mInterface->constructDefaultRequestSettings(
+ (camera3_request_template_t) templateId, &rawRequest);
+ if (res == BAD_VALUE) {
ALOGI("%s: template %d is not supported on this camera device",
__FUNCTION__, templateId);
- return BAD_VALUE;
+ return res;
+ } else if (res != OK) {
+ CLOGE("Unable to construct request template %d: %s (%d)",
+ templateId, strerror(-res), res);
+ return res;
}
- mRequestTemplateCache[templateId] = rawRequest;
+ mRequestTemplateCache[templateId].acquire(rawRequest);
// Derive some new keys for backward compatibility
if (mDerivePostRawSensKey && !mRequestTemplateCache[templateId].exists(
@@ -1394,7 +1702,7 @@
return INVALID_OPERATION;
}
- ALOGV("%s: Camera %d: Waiting until idle", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Waiting until idle", __FUNCTION__, mId.string());
status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
if (res != OK) {
SET_ERR_L("Error waiting for HAL to drain: %s (%d)", strerror(-res),
@@ -1415,7 +1723,7 @@
mRequestThread->setPaused(true);
mPauseStateNotify = true;
- ALOGV("%s: Camera %d: Internal wait until idle", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Internal wait until idle", __FUNCTION__, mId.string());
status_t res = waitUntilStateThenRelock(/*active*/ false, kShutdownTimeout);
if (res != OK) {
SET_ERR_L("Can't idle device in %f seconds!",
@@ -1514,8 +1822,8 @@
if (res == TIMED_OUT) {
return res;
} else if (res != OK) {
- ALOGW("%s: Camera %d: No frame in %" PRId64 " ns: %s (%d)",
- __FUNCTION__, mId, timeout, strerror(-res), res);
+ ALOGW("%s: Camera %s: No frame in %" PRId64 " ns: %s (%d)",
+ __FUNCTION__, mId.string(), timeout, strerror(-res), res);
return res;
}
}
@@ -1617,7 +1925,7 @@
status_t Camera3Device::flush(int64_t *frameNumber) {
ATRACE_CALL();
- ALOGV("%s: Camera %d: Flushing all requests", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Flushing all requests", __FUNCTION__, mId.string());
Mutex::Autolock il(mInterfaceLock);
{
@@ -1626,7 +1934,7 @@
}
status_t res;
- if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
+ if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
res = mRequestThread->flush();
} else {
Mutex::Autolock l(mLock);
@@ -1642,7 +1950,7 @@
status_t Camera3Device::prepare(int maxCount, int streamId) {
ATRACE_CALL();
- ALOGV("%s: Camera %d: Preparing stream %d", __FUNCTION__, mId, streamId);
+ ALOGV("%s: Camera %s: Preparing stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -1670,15 +1978,15 @@
status_t Camera3Device::tearDown(int streamId) {
ATRACE_CALL();
- ALOGV("%s: Camera %d: Tearing down stream %d", __FUNCTION__, mId, streamId);
+ ALOGV("%s: Camera %s: Tearing down stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
// Teardown can only be accomplished on devices that don't require register_stream_buffers,
// since we cannot call register_stream_buffers except right after configure_streams.
- if (mHal3Device->common.version < CAMERA_DEVICE_API_VERSION_3_2) {
+ if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
ALOGE("%s: Unable to tear down streams on device HAL v%x",
- __FUNCTION__, mHal3Device->common.version);
+ __FUNCTION__, mDeviceVersion);
return NO_INIT;
}
@@ -1702,7 +2010,7 @@
status_t Camera3Device::addBufferListenerForStream(int streamId,
wp<Camera3StreamBufferListener> listener) {
ATRACE_CALL();
- ALOGV("%s: Camera %d: Adding buffer listener for stream %d", __FUNCTION__, mId, streamId);
+ ALOGV("%s: Camera %s: Adding buffer listener for stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -1740,7 +2048,7 @@
if (mStatus != STATUS_ACTIVE && mStatus != STATUS_CONFIGURED) {
return;
}
- ALOGV("%s: Camera %d: Now %s", __FUNCTION__, mId,
+ ALOGV("%s: Camera %s: Now %s", __FUNCTION__, mId.string(),
idle ? "idle" : "active");
internalUpdateStatusLocked(idle ? STATUS_CONFIGURED : STATUS_ACTIVE);
@@ -1761,7 +2069,7 @@
status_t Camera3Device::setConsumerSurface(int streamId, sp<Surface> consumer) {
ATRACE_CALL();
- ALOGV("%s: Camera %d: set consumer surface for stream %d", __FUNCTION__, mId, streamId);
+ ALOGV("%s: Camera %s: set consumer surface for stream %d", __FUNCTION__, mId.string(), streamId);
Mutex::Autolock il(mInterfaceLock);
Mutex::Autolock l(mLock);
@@ -1782,16 +2090,18 @@
return res;
}
- if (!stream->isConfiguring()) {
- CLOGE("Stream %d was already fully configured.", streamId);
- return INVALID_OPERATION;
- }
+ if (stream->isConsumerConfigurationDeferred()) {
+ if (!stream->isConfiguring()) {
+ CLOGE("Stream %d was already fully configured.", streamId);
+ return INVALID_OPERATION;
+ }
- res = stream->finishConfiguration(mHal3Device);
- if (res != OK) {
- SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
- stream->getId(), strerror(-res), res);
- return res;
+ res = stream->finishConfiguration();
+ if (res != OK) {
+ SET_ERR_L("Can't finish configuring output stream %d: %s (%d)",
+ stream->getId(), strerror(-res), res);
+ return res;
+ }
}
return OK;
@@ -1802,7 +2112,7 @@
*/
sp<Camera3Device::CaptureRequest> Camera3Device::createCaptureRequest(
- const CameraMetadata &request) {
+ const CameraMetadata &request, const SurfaceMap &surfaceMap) {
ATRACE_CALL();
status_t res;
@@ -1821,7 +2131,7 @@
// Lazy completion of stream configuration (allocation/registration)
// on first use
if (mInputStream->isConfiguring()) {
- res = mInputStream->finishConfiguration(mHal3Device);
+ res = mInputStream->finishConfiguration();
if (res != OK) {
SET_ERR_L("Unable to finish configuring input stream %d:"
" %s (%d)",
@@ -1857,16 +2167,23 @@
mOutputStreams.editValueAt(idx);
// It is illegal to include a deferred consumer output stream into a request
- if (stream->isConsumerConfigurationDeferred()) {
- CLOGE("Stream %d hasn't finished configuration yet due to deferred consumer",
- stream->getId());
- return NULL;
+ auto iter = surfaceMap.find(streams.data.i32[i]);
+ if (iter != surfaceMap.end()) {
+ const std::vector<size_t>& surfaces = iter->second;
+ for (const auto& surface : surfaces) {
+ if (stream->isConsumerConfigurationDeferred(surface)) {
+ CLOGE("Stream %d surface %zu hasn't finished configuration yet "
+ "due to deferred consumer", stream->getId(), surface);
+ return NULL;
+ }
+ }
+ newRequest->mOutputSurfaces[i] = surfaces;
}
// Lazy completion of stream configuration (allocation/registration)
// on first use
if (stream->isConfiguring()) {
- res = stream->finishConfiguration(mHal3Device);
+ res = stream->finishConfiguration();
if (res != OK) {
SET_ERR_L("Unable to finish configuring stream %d: %s (%d)",
stream->getId(), strerror(-res), res);
@@ -1949,7 +2266,7 @@
}
// Start configuring the streams
- ALOGV("%s: Camera %d: Starting stream configuration", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Starting stream configuration", __FUNCTION__, mId.string());
camera3_stream_configuration config;
config.operation_mode = mIsConstrainedHighSpeedConfiguration ?
@@ -1995,9 +2312,8 @@
// Do the HAL configuration; will potentially touch stream
// max_buffers, usage, priv fields.
- ATRACE_BEGIN("camera3->configure_streams");
- res = mHal3Device->ops->configure_streams(mHal3Device, &config);
- ATRACE_END();
+
+ res = mInterface->configureStreams(&config);
if (res == BAD_VALUE) {
// HAL rejected this set of streams as unsupported, clean up config
@@ -2018,7 +2334,7 @@
// faster
if (mInputStream != NULL && mInputStream->isConfiguring()) {
- res = mInputStream->finishConfiguration(mHal3Device);
+ res = mInputStream->finishConfiguration();
if (res != OK) {
CLOGE("Can't finish configuring input stream %d: %s (%d)",
mInputStream->getId(), strerror(-res), res);
@@ -2031,7 +2347,7 @@
sp<Camera3OutputStreamInterface> outputStream =
mOutputStreams.editValueAt(i);
if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
- res = outputStream->finishConfiguration(mHal3Device);
+ res = outputStream->finishConfiguration();
if (res != OK) {
CLOGE("Can't finish configuring output stream %d: %s (%d)",
outputStream->getId(), strerror(-res), res);
@@ -2068,7 +2384,7 @@
internalUpdateStatusLocked((mDummyStreamId == NO_STREAM) ?
STATUS_CONFIGURED : STATUS_UNCONFIGURED);
- ALOGV("%s: Camera %d: Stream configuration complete", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Stream configuration complete", __FUNCTION__, mId.string());
// tear down the deleted streams after configure streams.
mDeletedStreams.clear();
@@ -2083,12 +2399,12 @@
if (mDummyStreamId != NO_STREAM) {
// Should never be adding a second dummy stream when one is already
// active
- SET_ERR_L("%s: Camera %d: A dummy stream already exists!",
- __FUNCTION__, mId);
+ SET_ERR_L("%s: Camera %s: A dummy stream already exists!",
+ __FUNCTION__, mId.string());
return INVALID_OPERATION;
}
- ALOGV("%s: Camera %d: Adding a dummy stream", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Adding a dummy stream", __FUNCTION__, mId.string());
sp<Camera3OutputStreamInterface> dummyStream =
new Camera3DummyStream(mNextStreamId);
@@ -2112,7 +2428,7 @@
if (mDummyStreamId == NO_STREAM) return OK;
if (mOutputStreams.size() == 1) return OK;
- ALOGV("%s: Camera %d: Removing the dummy stream", __FUNCTION__, mId);
+ ALOGV("%s: Camera %s: Removing the dummy stream", __FUNCTION__, mId.string());
// Ok, have a dummy stream and there's at least one other output stream,
// so remove the dummy
@@ -2166,7 +2482,7 @@
void Camera3Device::setErrorStateLockedV(const char *fmt, va_list args) {
// Print out all error messages to log
String8 errorCause = String8::formatV(fmt, args);
- ALOGE("Camera %d: %s", mId, errorCause.string());
+ ALOGE("Camera %s: %s", mId.string(), errorCause.string());
// But only do error state transition steps for the first error
if (mStatus == STATUS_ERROR || mStatus == STATUS_UNINITIALIZED) return;
@@ -2639,8 +2955,8 @@
Camera3Stream::cast(msg.error_stream);
streamId = stream->getId();
}
- ALOGV("Camera %d: %s: HAL error, frame %d, stream %d: %d",
- mId, __FUNCTION__, msg.frame_number,
+ ALOGV("Camera %s: %s: HAL error, frame %d, stream %d: %d",
+ mId.string(), __FUNCTION__, msg.frame_number,
streamId, msg.error_code);
CaptureResultExtras resultExtras;
@@ -2661,8 +2977,8 @@
resultExtras = r.resultExtras;
} else {
resultExtras.frameNumber = msg.frame_number;
- ALOGE("Camera %d: %s: cannot find in-flight request on "
- "frame %" PRId64 " error", mId, __FUNCTION__,
+ ALOGE("Camera %s: %s: cannot find in-flight request on "
+ "frame %" PRId64 " error", mId.string(), __FUNCTION__,
resultExtras.frameNumber);
}
}
@@ -2670,7 +2986,7 @@
if (listener != NULL) {
listener->notifyError(errorCode, resultExtras);
} else {
- ALOGE("Camera %d: %s: no listener available", mId, __FUNCTION__);
+ ALOGE("Camera %s: %s: no listener available", mId.string(), __FUNCTION__);
}
break;
default:
@@ -2715,8 +3031,8 @@
}
}
- ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
- mId, __FUNCTION__,
+ ALOGVV("Camera %s: %s: Shutter fired for frame %d (id %d) at %" PRId64,
+ mId.string(), __FUNCTION__,
msg.frame_number, r.resultExtras.requestId, msg.timestamp);
// Call listener, if any
if (listener != NULL) {
@@ -2762,17 +3078,407 @@
}
/**
+ * HalInterface inner class methods
+ */
+
+Camera3Device::HalInterface::HalInterface(camera3_device_t *device) :
+ mHal3Device(device) {}
+
+Camera3Device::HalInterface::HalInterface(sp<ICameraDeviceSession> &session) :
+ mHal3Device(nullptr),
+ mHidlSession(session) {}
+
+Camera3Device::HalInterface::HalInterface() :
+ mHal3Device(nullptr) {}
+
+Camera3Device::HalInterface::HalInterface(const HalInterface& other) :
+ mHal3Device(other.mHal3Device), mHidlSession(other.mHidlSession) {}
+
+bool Camera3Device::HalInterface::valid() {
+ return (mHal3Device != nullptr) || (mHidlSession != nullptr);
+}
+
+void Camera3Device::HalInterface::clear() {
+ mHal3Device = nullptr;
+ mHidlSession.clear();
+}
+
+status_t Camera3Device::HalInterface::constructDefaultRequestSettings(
+ camera3_request_template_t templateId,
+ /*out*/ camera_metadata_t **requestTemplate) {
+ ATRACE_NAME("CameraHal::constructDefaultRequestSettings");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ const camera_metadata *r;
+ r = mHal3Device->ops->construct_default_request_settings(
+ mHal3Device, templateId);
+ if (r == nullptr) return BAD_VALUE;
+ *requestTemplate = clone_camera_metadata(r);
+ if (requestTemplate == nullptr) {
+ ALOGE("%s: Unable to clone camera metadata received from HAL",
+ __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ } else {
+ common::V1_0::Status status;
+ RequestTemplate id;
+ switch (templateId) {
+ case CAMERA3_TEMPLATE_PREVIEW:
+ id = RequestTemplate::PREVIEW;
+ break;
+ case CAMERA3_TEMPLATE_STILL_CAPTURE:
+ id = RequestTemplate::STILL_CAPTURE;
+ break;
+ case CAMERA3_TEMPLATE_VIDEO_RECORD:
+ id = RequestTemplate::VIDEO_RECORD;
+ break;
+ case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+ id = RequestTemplate::VIDEO_SNAPSHOT;
+ break;
+ case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+ id = RequestTemplate::ZERO_SHUTTER_LAG;
+ break;
+ case CAMERA3_TEMPLATE_MANUAL:
+ id = RequestTemplate::MANUAL;
+ break;
+ default:
+ // Unknown template ID
+ return BAD_VALUE;
+ }
+ mHidlSession->constructDefaultRequestSettings(id,
+ [&status, &requestTemplate]
+ (common::V1_0::Status s, const device::V3_2::CameraMetadata& request) {
+ status = s;
+ if (status == common::V1_0::Status::OK) {
+ const camera_metadata *r =
+ reinterpret_cast<const camera_metadata_t*>(request.data());
+ size_t expectedSize = request.size();
+ int ret = validate_camera_metadata_structure(r, &expectedSize);
+ if (ret == OK) {
+ *requestTemplate = clone_camera_metadata(r);
+ if (*requestTemplate == nullptr) {
+ ALOGE("%s: Unable to clone camera metadata received from HAL",
+ __FUNCTION__);
+ status = common::V1_0::Status::INTERNAL_ERROR;
+ }
+ } else {
+ ALOGE("%s: Malformed camera metadata received from HAL", __FUNCTION__);
+ status = common::V1_0::Status::INTERNAL_ERROR;
+ }
+ }
+ });
+ res = CameraProviderManager::mapToStatusT(status);
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::configureStreams(camera3_stream_configuration *config) {
+ ATRACE_NAME("CameraHal::configureStreams");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ res = mHal3Device->ops->configure_streams(mHal3Device, config);
+ } else {
+ // Convert stream config to HIDL
+ std::set<int> activeStreams;
+ StreamConfiguration requestedConfiguration;
+ requestedConfiguration.streams.resize(config->num_streams);
+ for (size_t i = 0; i < config->num_streams; i++) {
+ Stream &dst = requestedConfiguration.streams[i];
+ camera3_stream_t *src = config->streams[i];
+
+ int streamId = Camera3Stream::cast(src)->getId();
+ StreamType streamType;
+ switch (src->stream_type) {
+ case CAMERA3_STREAM_OUTPUT:
+ streamType = StreamType::OUTPUT;
+ break;
+ case CAMERA3_STREAM_INPUT:
+ streamType = StreamType::INPUT;
+ break;
+ default:
+ ALOGE("%s: Stream %d: Unsupported stream type %d",
+ __FUNCTION__, streamId, config->streams[i]->stream_type);
+ return BAD_VALUE;
+ }
+ dst.id = streamId;
+ dst.streamType = streamType;
+ dst.width = src->width;
+ dst.height = src->height;
+ dst.format = mapToPixelFormat(src->format);
+ dst.usage = mapToConsumerUsage(src->usage);
+ dst.dataSpace = mapToHidlDataspace(src->data_space);
+ dst.rotation = mapToStreamRotation((camera3_stream_rotation_t) src->rotation);
+
+ activeStreams.insert(streamId);
+ // Create Buffer ID map if necessary
+ if (mBufferIdMaps.count(streamId) == 0) {
+ mBufferIdMaps.emplace(streamId, BufferIdMap{});
+ }
+ }
+ // remove BufferIdMap for deleted streams
+ for(auto it = mBufferIdMaps.begin(); it != mBufferIdMaps.end();) {
+ int streamId = it->first;
+ bool active = activeStreams.count(streamId) > 0;
+ if (!active) {
+ it = mBufferIdMaps.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ requestedConfiguration.operationMode = mapToStreamConfigurationMode(
+ (camera3_stream_configuration_mode_t) config->operation_mode);
+
+ // Invoke configureStreams
+
+ HalStreamConfiguration finalConfiguration;
+ common::V1_0::Status status;
+ mHidlSession->configureStreams(requestedConfiguration,
+ [&status, &finalConfiguration]
+ (common::V1_0::Status s, const HalStreamConfiguration& halConfiguration) {
+ finalConfiguration = halConfiguration;
+ status = s;
+ });
+ if (status != common::V1_0::Status::OK ) {
+ return CameraProviderManager::mapToStatusT(status);
+ }
+
+ // And convert output stream configuration from HIDL
+
+ for (size_t i = 0; i < config->num_streams; i++) {
+ camera3_stream_t *dst = config->streams[i];
+ int streamId = Camera3Stream::cast(dst)->getId();
+
+ // Start scan at i, with the assumption that the stream order matches
+ size_t realIdx = i;
+ bool found = false;
+ for (size_t idx = 0; idx < finalConfiguration.streams.size(); idx++) {
+ if (finalConfiguration.streams[realIdx].id == streamId) {
+ found = true;
+ break;
+ }
+ realIdx = (realIdx >= finalConfiguration.streams.size()) ? 0 : realIdx + 1;
+ }
+ if (!found) {
+ ALOGE("%s: Stream %d not found in stream configuration response from HAL",
+ __FUNCTION__, streamId);
+ return INVALID_OPERATION;
+ }
+ HalStream &src = finalConfiguration.streams[realIdx];
+
+ int overrideFormat = mapToFrameworkFormat(src.overrideFormat);
+ if (dst->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ if (dst->format != overrideFormat) {
+ ALOGE("%s: Stream %d: Format override not allowed for format 0x%x", __FUNCTION__,
+ streamId, dst->format);
+ }
+ } else {
+ // Override allowed with IMPLEMENTATION_DEFINED
+ dst->format = overrideFormat;
+ }
+
+ if (dst->stream_type == CAMERA3_STREAM_INPUT) {
+ if (src.producerUsage != 0) {
+ ALOGE("%s: Stream %d: INPUT streams must have 0 for producer usage",
+ __FUNCTION__, streamId);
+ return INVALID_OPERATION;
+ }
+ dst->usage = mapConsumerToFrameworkUsage(src.consumerUsage);
+ } else {
+ // OUTPUT
+ if (src.consumerUsage != 0) {
+ ALOGE("%s: Stream %d: OUTPUT streams must have 0 for consumer usage",
+ __FUNCTION__, streamId);
+ return INVALID_OPERATION;
+ }
+ dst->usage = mapProducerToFrameworkUsage(src.producerUsage);
+ }
+ dst->max_buffers = src.maxBuffers;
+ }
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::processCaptureRequest(
+ camera3_capture_request_t *request) {
+ ATRACE_NAME("CameraHal::processCaptureRequest");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ res = mHal3Device->ops->process_capture_request(mHal3Device, request);
+ } else {
+ device::V3_2::CaptureRequest captureRequest;
+ captureRequest.frameNumber = request->frame_number;
+ std::vector<native_handle_t*> handlesCreated;
+ // A null request settings maps to a size-0 CameraMetadata
+ if (request->settings != nullptr) {
+ captureRequest.settings.setToExternal(
+ reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(request->settings)),
+ get_camera_metadata_size(request->settings));
+ }
+
+ {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+ if (request->input_buffer != nullptr) {
+ int32_t streamId = Camera3Stream::cast(request->input_buffer->stream)->getId();
+ buffer_handle_t buf = *(request->input_buffer->buffer);
+ auto pair = getBufferId(buf, streamId);
+ bool isNewBuffer = pair.first;
+ uint64_t bufferId = pair.second;
+ captureRequest.inputBuffer.streamId = streamId;
+ captureRequest.inputBuffer.bufferId = bufferId;
+ captureRequest.inputBuffer.buffer = (isNewBuffer) ? buf : nullptr;
+ captureRequest.inputBuffer.status = BufferStatus::OK;
+ native_handle_t *acquireFence = nullptr;
+ if (request->input_buffer->acquire_fence != -1) {
+ acquireFence = native_handle_create(1,0);
+ acquireFence->data[0] = request->input_buffer->acquire_fence;
+ handlesCreated.push_back(acquireFence);
+ }
+ captureRequest.inputBuffer.acquireFence = acquireFence;
+ captureRequest.inputBuffer.releaseFence = nullptr;
+
+ pushInflightBufferLocked(captureRequest.frameNumber, streamId,
+ request->input_buffer->buffer,
+ request->input_buffer->acquire_fence);
+ } else {
+ captureRequest.inputBuffer.streamId = -1;
+ captureRequest.inputBuffer.bufferId = BUFFER_ID_NO_BUFFER;
+ }
+
+ captureRequest.outputBuffers.resize(request->num_output_buffers);
+ for (size_t i = 0; i < request->num_output_buffers; i++) {
+ const camera3_stream_buffer_t *src = request->output_buffers + i;
+ StreamBuffer &dst = captureRequest.outputBuffers[i];
+ int32_t streamId = Camera3Stream::cast(src->stream)->getId();
+ buffer_handle_t buf = *(src->buffer);
+ auto pair = getBufferId(buf, streamId);
+ bool isNewBuffer = pair.first;
+ dst.streamId = streamId;
+ dst.bufferId = pair.second;
+ dst.buffer = isNewBuffer ? buf : nullptr;
+ dst.status = BufferStatus::OK;
+ native_handle_t *acquireFence = nullptr;
+ if (src->acquire_fence != -1) {
+ acquireFence = native_handle_create(1,0);
+ acquireFence->data[0] = src->acquire_fence;
+ handlesCreated.push_back(acquireFence);
+ }
+ dst.acquireFence = acquireFence;
+ dst.releaseFence = nullptr;
+
+ pushInflightBufferLocked(captureRequest.frameNumber, streamId,
+ src->buffer, src->acquire_fence);
+ }
+ }
+ common::V1_0::Status status = mHidlSession->processCaptureRequest(captureRequest);
+
+ for (auto& handle : handlesCreated) {
+ native_handle_delete(handle);
+ }
+ res = CameraProviderManager::mapToStatusT(status);
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::flush() {
+ ATRACE_NAME("CameraHal::flush");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ res = mHal3Device->ops->flush(mHal3Device);
+ } else {
+ res = CameraProviderManager::mapToStatusT(mHidlSession->flush());
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::dump(int fd) {
+ ATRACE_NAME("CameraHal::dump");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ mHal3Device->ops->dump(mHal3Device, fd);
+ } else {
+ // Handled by CameraProviderManager::dump
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::close() {
+ ATRACE_NAME("CameraHal::close()");
+ if (!valid()) return INVALID_OPERATION;
+ status_t res = OK;
+
+ if (mHal3Device != nullptr) {
+ mHal3Device->common.close(&mHal3Device->common);
+ } else {
+ mHidlSession->close();
+ }
+ return res;
+}
+
+status_t Camera3Device::HalInterface::pushInflightBufferLocked(
+ int32_t frameNumber, int32_t streamId, buffer_handle_t *buffer, int acquireFence) {
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ auto pair = std::make_pair(buffer, acquireFence);
+ mInflightBufferMap[key] = pair;
+ return OK;
+}
+
+status_t Camera3Device::HalInterface::popInflightBuffer(
+ int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer) {
+ std::lock_guard<std::mutex> lock(mInflightLock);
+
+ uint64_t key = static_cast<uint64_t>(frameNumber) << 32 | static_cast<uint64_t>(streamId);
+ auto it = mInflightBufferMap.find(key);
+ if (it == mInflightBufferMap.end()) return NAME_NOT_FOUND;
+ auto pair = it->second;
+ *buffer = pair.first;
+ int acquireFence = pair.second;
+ if (acquireFence > 0) {
+ ::close(acquireFence);
+ }
+ mInflightBufferMap.erase(it);
+ return OK;
+}
+
+std::pair<bool, uint64_t> Camera3Device::HalInterface::getBufferId(
+ const buffer_handle_t& buf, int streamId) {
+ std::lock_guard<std::mutex> lock(mBufferIdMapLock);
+
+ BufferIdMap& bIdMap = mBufferIdMaps.at(streamId);
+ auto it = bIdMap.find(buf);
+ if (it == bIdMap.end()) {
+ bIdMap[buf] = mNextBufferId++;
+ return std::make_pair(true, mNextBufferId - 1);
+ } else {
+ return std::make_pair(false, it->second);
+ }
+}
+
+/**
* RequestThread inner class methods
*/
Camera3Device::RequestThread::RequestThread(wp<Camera3Device> parent,
sp<StatusTracker> statusTracker,
- camera3_device_t *hal3Device,
+ HalInterface* interface,
+ uint32_t deviceVersion,
bool aeLockAvailable) :
Thread(/*canCallJava*/false),
mParent(parent),
mStatusTracker(statusTracker),
- mHal3Device(hal3Device),
+ mInterface(interface),
+ mDeviceVersion(deviceVersion),
mListener(nullptr),
mId(getId(parent)),
mReconfigured(false),
@@ -2789,6 +3495,8 @@
mStatusId = statusTracker->addComponent();
}
+Camera3Device::RequestThread::~RequestThread() {}
+
void Camera3Device::RequestThread::setNotificationListener(
wp<NotificationListener> listener) {
Mutex::Autolock l(mRequestLock);
@@ -2843,10 +3551,11 @@
return OK;
}
-int Camera3Device::RequestThread::getId(const wp<Camera3Device> &device) {
+const String8& Camera3Device::RequestThread::getId(const wp<Camera3Device> &device) {
+ static String8 deadId("<DeadDevice>");
sp<Camera3Device> d = device.promote();
- if (d != NULL) return d->mId;
- return 0;
+ if (d != nullptr) return d->mId;
+ return deadId;
}
status_t Camera3Device::RequestThread::queueTriggerLocked(
@@ -2976,8 +3685,8 @@
ATRACE_CALL();
Mutex::Autolock l(mFlushLock);
- if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
- return mHal3Device->ops->flush(mHal3Device);
+ if (mDeviceVersion >= CAMERA_DEVICE_API_VERSION_3_1) {
+ return mInterface->flush();
}
return -ENOTSUP;
@@ -3025,7 +3734,7 @@
request->mAeTriggerCancelOverride.applyAeLock = false;
request->mAeTriggerCancelOverride.applyAePrecaptureTrigger = false;
- if (mHal3Device->common.version > CAMERA_DEVICE_API_VERSION_3_2) {
+ if (mDeviceVersion > CAMERA_DEVICE_API_VERSION_3_2) {
return;
}
@@ -3165,9 +3874,7 @@
for (auto& nextRequest : mNextRequests) {
// Submit request and block until ready for next one
ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
- ATRACE_BEGIN("camera3->process_capture_request");
- res = mHal3Device->ops->process_capture_request(mHal3Device, &nextRequest.halRequest);
- ATRACE_END();
+ res = mInterface->processCaptureRequest(&nextRequest.halRequest);
if (res != OK) {
// Should only get a failure here for malformed requests or device-level
@@ -3341,6 +4048,14 @@
return TIMED_OUT;
}
halRequest->num_output_buffers++;
+
+ res = outputStream->notifyRequestedSurfaces(halRequest->frame_number,
+ captureRequest->mOutputSurfaces[i]);
+ if (res != OK) {
+ ALOGE("RequestThread: Cannot register output surfaces: %s (%d)",
+ strerror(-res), res);
+ return INVALID_OPERATION;
+ }
}
totalNumBuffers += halRequest->num_output_buffers;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ac9dfc2..9b869a9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -17,6 +17,9 @@
#ifndef ANDROID_SERVERS_CAMERA3DEVICE_H
#define ANDROID_SERVERS_CAMERA3DEVICE_H
+#include <utility>
+#include <unordered_map>
+
#include <utils/Condition.h>
#include <utils/Errors.h>
#include <utils/List.h>
@@ -24,7 +27,12 @@
#include <utils/Thread.h>
#include <utils/KeyedVector.h>
#include <utils/Timers.h>
+
+#include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <android/hardware/camera/device/3.2/ICameraDeviceSession.h>
+#include <android/hardware/camera/device/3.2/ICameraDeviceCallback.h>
#include <hardware/camera3.h>
+
#include <camera/CaptureResult.h>
#include "common/CameraDeviceBase.h"
@@ -55,17 +63,18 @@
class Camera3OutputStreamInterface;
class Camera3StreamInterface;
-}
+} // namespace camera3
/**
* CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0 or higher.
*/
class Camera3Device :
public CameraDeviceBase,
+ virtual public hardware::camera::device::V3_2::ICameraDeviceCallback,
private camera3_callback_ops {
public:
- explicit Camera3Device(int id);
+ explicit Camera3Device(const String8& id);
virtual ~Camera3Device();
@@ -73,91 +82,100 @@
* CameraDeviceBase interface
*/
- virtual int getId() const;
+ const String8& getId() const override;
// Transitions to idle state on success.
- virtual status_t initialize(CameraModule *module);
- virtual status_t disconnect();
- virtual status_t dump(int fd, const Vector<String16> &args);
- virtual const CameraMetadata& info() const;
+ status_t initialize(CameraModule *module) override;
+ status_t initialize(sp<CameraProviderManager> manager) override;
+ status_t disconnect() override;
+ status_t dump(int fd, const Vector<String16> &args) override;
+ const CameraMetadata& info() const override;
// Capture and setStreamingRequest will configure streams if currently in
// idle state
- virtual status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL);
- virtual status_t captureList(const List<const CameraMetadata> &requests,
- int64_t *lastFrameNumber = NULL);
- virtual status_t setStreamingRequest(const CameraMetadata &request,
- int64_t *lastFrameNumber = NULL);
- virtual status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
- int64_t *lastFrameNumber = NULL);
- virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL);
+ status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL) override;
+ status_t captureList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
+ int64_t *lastFrameNumber = NULL) override;
+ status_t setStreamingRequest(const CameraMetadata &request,
+ int64_t *lastFrameNumber = NULL) override;
+ status_t setStreamingRequestList(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
+ int64_t *lastFrameNumber = NULL) override;
+ status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL) override;
- virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout);
+ status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) override;
// Actual stream creation/deletion is delayed until first request is submitted
// If adding streams while actively capturing, will pause device before adding
// stream, reconfiguring device, and unpausing. If the client create a stream
// with nullptr consumer surface, the client must then call setConsumer()
// and finish the stream configuration before starting output streaming.
- virtual status_t createStream(sp<Surface> consumer,
+ status_t createStream(sp<Surface> consumer,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
- uint32_t consumerUsage = 0);
- virtual status_t createInputStream(
+ uint32_t consumerUsage = 0) override;
+ status_t createStream(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id,
+ int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
+ uint32_t consumerUsage = 0) override;
+
+ status_t createInputStream(
uint32_t width, uint32_t height, int format,
- int *id);
- virtual status_t createZslStream(
+ int *id) override;
+ status_t createZslStream(
uint32_t width, uint32_t height,
int depth,
/*out*/
int *id,
sp<camera3::Camera3ZslStream>* zslStream);
- virtual status_t createReprocessStreamFromStream(int outputId, int *id);
+ status_t createReprocessStreamFromStream(int outputId, int *id) override;
- virtual status_t getStreamInfo(int id,
+ status_t getStreamInfo(int id,
uint32_t *width, uint32_t *height,
- uint32_t *format, android_dataspace *dataSpace);
- virtual status_t setStreamTransform(int id, int transform);
+ uint32_t *format, android_dataspace *dataSpace) override;
+ status_t setStreamTransform(int id, int transform) override;
- virtual status_t deleteStream(int id);
- virtual status_t deleteReprocessStream(int id);
+ status_t deleteStream(int id) override;
+ status_t deleteReprocessStream(int id) override;
- virtual status_t configureStreams(bool isConstraiedHighSpeed = false);
- virtual status_t getInputBufferProducer(
- sp<IGraphicBufferProducer> *producer);
+ status_t configureStreams(bool isConstraiedHighSpeed = false) override;
+ status_t getInputBufferProducer(
+ sp<IGraphicBufferProducer> *producer) override;
- virtual status_t createDefaultRequest(int templateId, CameraMetadata *request);
+ status_t createDefaultRequest(int templateId, CameraMetadata *request) override;
// Transitions to the idle state on success
- virtual status_t waitUntilDrained();
+ status_t waitUntilDrained() override;
- virtual status_t setNotifyCallback(wp<NotificationListener> listener);
- virtual bool willNotify3A();
- virtual status_t waitForNextFrame(nsecs_t timeout);
- virtual status_t getNextResult(CaptureResult *frame);
+ status_t setNotifyCallback(wp<NotificationListener> listener) override;
+ bool willNotify3A() override;
+ status_t waitForNextFrame(nsecs_t timeout) override;
+ status_t getNextResult(CaptureResult *frame) override;
- virtual status_t triggerAutofocus(uint32_t id);
- virtual status_t triggerCancelAutofocus(uint32_t id);
- virtual status_t triggerPrecaptureMetering(uint32_t id);
+ status_t triggerAutofocus(uint32_t id) override;
+ status_t triggerCancelAutofocus(uint32_t id) override;
+ status_t triggerPrecaptureMetering(uint32_t id) override;
- virtual status_t pushReprocessBuffer(int reprocessStreamId,
- buffer_handle_t *buffer, wp<BufferReleasedListener> listener);
+ status_t pushReprocessBuffer(int reprocessStreamId,
+ buffer_handle_t *buffer, wp<BufferReleasedListener> listener) override;
- virtual status_t flush(int64_t *lastFrameNumber = NULL);
+ status_t flush(int64_t *lastFrameNumber = NULL) override;
- virtual status_t prepare(int streamId);
+ status_t prepare(int streamId) override;
- virtual status_t tearDown(int streamId);
+ status_t tearDown(int streamId) override;
- virtual status_t addBufferListenerForStream(int streamId,
- wp<camera3::Camera3StreamBufferListener> listener);
+ status_t addBufferListenerForStream(int streamId,
+ wp<camera3::Camera3StreamBufferListener> listener) override;
- virtual status_t prepare(int maxCount, int streamId);
+ status_t prepare(int maxCount, int streamId) override;
- virtual uint32_t getDeviceVersion();
+ uint32_t getDeviceVersion() override;
- virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
+ ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const override;
ssize_t getPointCloudBufferSize() const;
ssize_t getRawOpaqueBufferSize(int32_t width, int32_t height) const;
@@ -168,7 +186,7 @@
* Set the deferred consumer surface to the output stream and finish the deferred
* consumer configuration.
*/
- virtual status_t setConsumerSurface(int streamId, sp<Surface> consumer);
+ status_t setConsumerSurface(int streamId, sp<Surface> consumer) override;
private:
static const size_t kDumpLockAttempts = 10;
@@ -198,14 +216,106 @@
Mutex mLock;
// Camera device ID
- const int mId;
+ const String8 mId;
// Flag indicating is the current active stream configuration is constrained high speed.
bool mIsConstrainedHighSpeedConfiguration;
/**** Scope for mLock ****/
- camera3_device_t *mHal3Device;
+ /**
+ * Adapter for legacy HAL / HIDL HAL interface calls; calls either into legacy HALv3 or the
+ * HIDL HALv3 interfaces.
+ */
+ class HalInterface {
+ public:
+ HalInterface(camera3_device_t *device);
+ HalInterface(sp<hardware::camera::device::V3_2::ICameraDeviceSession> &session);
+ HalInterface(const HalInterface &other);
+ HalInterface();
+
+ // Returns true if constructed with a valid device or session, and not yet cleared
+ bool valid();
+
+ // Reset this HalInterface object (does not call close())
+ void clear();
+
+ // Calls into the HAL interface
+
+ // Caller takes ownership of requestTemplate
+ status_t constructDefaultRequestSettings(camera3_request_template_t templateId,
+ /*out*/ camera_metadata_t **requestTemplate);
+ status_t configureStreams(/*inout*/ camera3_stream_configuration *config);
+ status_t processCaptureRequest(camera3_capture_request_t *request);
+ status_t flush();
+ status_t dump(int fd);
+ status_t close();
+
+ // Find a buffer_handle_t based on frame number and stream ID
+ status_t popInflightBuffer(int32_t frameNumber, int32_t streamId,
+ /*out*/ buffer_handle_t **buffer);
+
+ private:
+ camera3_device_t *mHal3Device;
+ sp<hardware::camera::device::V3_2::ICameraDeviceSession> mHidlSession;
+
+ std::mutex mInflightLock;
+
+ status_t pushInflightBufferLocked(int32_t frameNumber, int32_t streamId,
+ buffer_handle_t *buffer, int acquireFence);
+ // Cache of buffer handles keyed off (frameNumber << 32 | streamId)
+ // value is a pair of (buffer_handle_t*, acquire_fence FD)
+ std::unordered_map<uint64_t, std::pair<buffer_handle_t*, int>> mInflightBufferMap;
+
+ struct BufferHasher {
+ size_t operator()(const buffer_handle_t& buf) const {
+ if (buf == nullptr)
+ return 0;
+
+ size_t result = 1;
+ result = 31 * result + buf->numFds;
+ result = 31 * result + buf->numInts;
+ int length = buf->numFds + buf->numInts;
+ for (int i = 0; i < length; i++) {
+ result = 31 * result + buf->data[i];
+ }
+ return result;
+ }
+ };
+
+ struct BufferComparator {
+ bool operator()(const buffer_handle_t& buf1, const buffer_handle_t& buf2) const {
+ if (buf1->numFds == buf2->numFds && buf1->numInts == buf2->numInts) {
+ int length = buf1->numFds + buf1->numInts;
+ for (int i = 0; i < length; i++) {
+ if (buf1->data[i] != buf2->data[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+ };
+
+ std::mutex mBufferIdMapLock; // protecting mBufferIdMaps and mNextBufferId
+ typedef std::unordered_map<const buffer_handle_t, uint64_t,
+ BufferHasher, BufferComparator> BufferIdMap;
+ // stream ID -> per stream buffer ID map
+ std::unordered_map<int, BufferIdMap> mBufferIdMaps;
+ uint64_t mNextBufferId = 1; // 0 means no buffer
+ static const uint64_t BUFFER_ID_NO_BUFFER = 0;
+
+ // method to extract buffer's unique ID
+ // TODO: we should switch to use gralloc mapper's getBackingStore API
+ // once we ran in binderized gralloc mode, but before that is ready,
+ // we need to rely on the conventional buffer queue behavior where
+ // buffer_handle_t's FD won't change.
+ // return pair of (newlySeenBuffer?, bufferId)
+ std::pair<bool, uint64_t> getBufferId(const buffer_handle_t& buf, int streamId);
+ };
+
+ std::unique_ptr<HalInterface> mInterface;
CameraMetadata mDeviceInfo;
@@ -288,6 +398,7 @@
camera3_stream_buffer_t mInputBuffer;
Vector<sp<camera3::Camera3OutputStreamInterface> >
mOutputStreams;
+ SurfaceMap mOutputSurfaces;
CaptureResultExtras mResultExtras;
// Used to cancel AE precapture trigger for devices doesn't support
// CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
@@ -306,17 +417,41 @@
status_t convertMetadataListToRequestListLocked(
const List<const CameraMetadata> &metadataList,
+ const std::list<const SurfaceMap> &surfaceMaps,
bool repeating,
/*out*/
RequestList *requestList);
- status_t submitRequestsHelper(const List<const CameraMetadata> &requests, bool repeating,
+ void convertToRequestList(List<const CameraMetadata>& requests,
+ std::list<const SurfaceMap>& surfaceMaps,
+ const CameraMetadata& request);
+
+ status_t submitRequestsHelper(const List<const CameraMetadata> &requests,
+ const std::list<const SurfaceMap> &surfaceMaps,
+ bool repeating,
int64_t *lastFrameNumber = NULL);
+
+ /**
+ * Implementation of android::hardware::camera::device::V3_2::ICameraDeviceCallback
+ */
+
+ hardware::Return<void> processCaptureResult(
+ const hardware::camera::device::V3_2::CaptureResult& result) override;
+ hardware::Return<void> notify(
+ const hardware::camera::device::V3_2::NotifyMsg& msg) override;
+
+ /**
+ * Common initialization code shared by both HAL paths
+ *
+ * Must be called with mLock and mInterfaceLock held.
+ */
+ status_t initializeCommonLocked();
+
/**
* Get the last request submitted to the hal by the request thread.
*
- * Takes mLock.
+ * Must be called with mLock held.
*/
virtual CameraMetadata getLatestRequestLocked();
@@ -365,13 +500,15 @@
* Do common work for setting up a streaming or single capture request.
* On success, will transition to ACTIVE if in IDLE.
*/
- sp<CaptureRequest> setUpRequestLocked(const CameraMetadata &request);
+ sp<CaptureRequest> setUpRequestLocked(const CameraMetadata &request,
+ const SurfaceMap &surfaceMap);
/**
* Build a CaptureRequest request from the CameraDeviceBase request
* settings.
*/
- sp<CaptureRequest> createCaptureRequest(const CameraMetadata &request);
+ sp<CaptureRequest> createCaptureRequest(const CameraMetadata &request,
+ const SurfaceMap &surfaceMap);
/**
* Take the currently-defined set of streams and configure the HAL to use
@@ -434,6 +571,24 @@
*/
static android_dataspace mapToLegacyDataspace(android_dataspace dataSpace);
+ /**
+ * Helper functions to map between framework and HIDL values
+ */
+ static hardware::graphics::common::V1_0::PixelFormat mapToPixelFormat(int frameworkFormat);
+ static hardware::camera::device::V3_2::DataspaceFlags mapToHidlDataspace(
+ android_dataspace dataSpace);
+ static hardware::camera::device::V3_2::ConsumerUsageFlags mapToConsumerUsage(uint32_t usage);
+ static hardware::camera::device::V3_2::StreamRotation mapToStreamRotation(
+ camera3_stream_rotation_t rotation);
+ static hardware::camera::device::V3_2::StreamConfigurationMode mapToStreamConfigurationMode(
+ camera3_stream_configuration_mode_t operationMode);
+ static camera3_buffer_status_t mapHidlBufferStatus(hardware::camera::device::V3_2::BufferStatus status);
+ static int mapToFrameworkFormat(hardware::graphics::common::V1_0::PixelFormat pixelFormat);
+ static uint32_t mapConsumerToFrameworkUsage(
+ hardware::camera::device::V3_2::ConsumerUsageFlags usage);
+ static uint32_t mapProducerToFrameworkUsage(
+ hardware::camera::device::V3_2::ProducerUsageFlags usage);
+
struct RequestTrigger {
// Metadata tag number, e.g. android.control.aePrecaptureTrigger
uint32_t metadataTag;
@@ -460,8 +615,10 @@
RequestThread(wp<Camera3Device> parent,
sp<camera3::StatusTracker> statusTracker,
- camera3_device_t *hal3Device,
+ HalInterface* interface,
+ uint32_t deviceVersion,
bool aeLockAvailable);
+ ~RequestThread();
void setNotificationListener(wp<NotificationListener> listener);
@@ -541,7 +698,7 @@
virtual bool threadLoop();
private:
- static int getId(const wp<Camera3Device> &device);
+ static const String8& getId(const wp<Camera3Device> &device);
status_t queueTriggerLocked(RequestTrigger trigger);
// Mix-in queued triggers into this request
@@ -602,11 +759,12 @@
wp<Camera3Device> mParent;
wp<camera3::StatusTracker> mStatusTracker;
- camera3_device_t *mHal3Device;
+ HalInterface* mInterface;
+ uint32_t mDeviceVersion;
wp<NotificationListener> mListener;
- const int mId; // The camera ID
+ const String8& mId; // The camera ID
int mStatusId; // The RequestThread's component ID for
// status tracking
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index 5123785..7f61c7a 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -83,6 +83,14 @@
return OK;
}
+status_t Camera3DummyStream::notifyRequestedSurfaces(uint32_t frame_number,
+ const std::vector<size_t>& surface_ids) {
+ (void) frame_number;
+ (void) surface_ids;
+ // Do nothing
+ return OK;
+}
+
status_t Camera3DummyStream::configureQueueLocked() {
// Do nothing
return OK;
@@ -103,7 +111,7 @@
return false;
}
-bool Camera3DummyStream::isConsumerConfigurationDeferred() const {
+bool Camera3DummyStream::isConsumerConfigurationDeferred(size_t /*surface_id*/) const {
return false;
}
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 18e8a23..37efbbb 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -56,6 +56,9 @@
virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+ virtual status_t notifyRequestedSurfaces(uint32_t frame_number,
+ const std::vector<size_t>& surface_ids);
+
/**
* Return if this output stream is for video encoding.
*/
@@ -64,7 +67,7 @@
/**
* Return if the consumer configuration of this stream is deferred.
*/
- virtual bool isConsumerConfigurationDeferred() const;
+ virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
/**
* Set the consumer surface to the output stream.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 7229929..1e76a27 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -124,6 +124,7 @@
int format,
android_dataspace dataSpace,
camera3_stream_rotation_t rotation,
+ uint32_t consumerUsage, nsecs_t timestampOffset,
int setId) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
@@ -132,7 +133,8 @@
mTraceFirstBuffer(true),
mUseMonoTimestamp(false),
mUseBufferManager(false),
- mConsumerUsage(0) {
+ mTimestampOffset(timestampOffset),
+ mConsumerUsage(consumerUsage) {
if (setId > CAMERA3_STREAM_SET_ID_INVALID) {
mBufferReleasedListener = new BufferReleasedListener(this);
@@ -373,6 +375,24 @@
return res;
}
+ if ((res = configureConsumerQueueLocked()) != OK) {
+ return res;
+ }
+
+ // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
+ // We need skip these cases as timeout will disable the non-blocking (async) mode.
+ if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
+ mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
+ }
+
+ return OK;
+}
+
+status_t Camera3OutputStream::configureConsumerQueueLocked() {
+ status_t res;
+
+ mTraceFirstBuffer = true;
+
ALOG_ASSERT(mConsumer != 0, "mConsumer should never be NULL");
// Configure consumer-side ANativeWindow interface. The listener may be used
@@ -470,12 +490,7 @@
if (res != OK) {
ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
__FUNCTION__, mTransform, strerror(-res), res);
- }
-
- // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
- // We need skip these cases as timeout will disable the non-blocking (async) mode.
- if (!(isConsumedByHWComposer() || isConsumedByHWTexture())) {
- mConsumer->setDequeueTimeout(kDequeueBufferTimeout);
+ return res;
}
/**
@@ -568,14 +583,24 @@
status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) const {
status_t res;
- int32_t u = 0;
+
if (mConsumer == nullptr) {
// mConsumerUsage was sanitized before the Camera3OutputStream was constructed.
*usage = mConsumerUsage;
return OK;
}
- res = static_cast<ANativeWindow*>(mConsumer.get())->query(mConsumer.get(),
+ res = getEndpointUsageForSurface(usage, mConsumer);
+
+ return res;
+}
+
+status_t Camera3OutputStream::getEndpointUsageForSurface(uint32_t *usage,
+ const sp<Surface>& surface) const {
+ status_t res;
+ int32_t u = 0;
+
+ res = static_cast<ANativeWindow*>(surface.get())->query(surface.get(),
NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
// If an opaque output stream's endpoint is ImageReader, add
@@ -587,8 +612,8 @@
// 3. GRALLOC_USAGE_HW_COMPOSER
// 4. GRALLOC_USAGE_HW_VIDEO_ENCODER
if (camera3_stream::format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
- (u & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_COMPOSER |
- GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
+ (u & (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER |
+ GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_VIDEO_ENCODER)) == 0) {
u |= GRALLOC_USAGE_HW_CAMERA_ZSL;
}
@@ -676,8 +701,17 @@
return OK;
}
-bool Camera3OutputStream::isConsumerConfigurationDeferred() const {
+status_t Camera3OutputStream::notifyRequestedSurfaces(uint32_t /*frame_number*/,
+ const std::vector<size_t>& /*surface_ids*/) {
+ return OK;
+}
+
+bool Camera3OutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
Mutex::Autolock l(mLock);
+
+ if (surface_id != 0) {
+ ALOGE("%s: surface_id for Camera3OutputStream should be 0!", __FUNCTION__);
+ }
return mConsumer == nullptr;
}
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 12d497e..26ea63f 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -135,7 +135,7 @@
/**
* Return if the consumer configuration of this stream is deferred.
*/
- virtual bool isConsumerConfigurationDeferred() const;
+ virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
/**
* Set the consumer surface to the output stream.
@@ -158,6 +158,9 @@
virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd);
+ virtual status_t notifyRequestedSurfaces(uint32_t frame_number,
+ const std::vector<size_t>& surface_ids);
+
/**
* Set the graphic buffer manager to get/return the stream buffers.
*
@@ -169,6 +172,7 @@
Camera3OutputStream(int id, camera3_stream_type_t type,
uint32_t width, uint32_t height, int format,
android_dataspace dataSpace, camera3_stream_rotation_t rotation,
+ uint32_t consumerUsage = 0, nsecs_t timestampOffset = 0,
int setId = CAMERA3_STREAM_SET_ID_INVALID);
/**
@@ -183,12 +187,19 @@
virtual status_t disconnectLocked();
+ status_t getEndpointUsageForSurface(uint32_t *usage,
+ const sp<Surface>& surface) const;
+ status_t configureConsumerQueueLocked();
+
+ // Consumer as the output of camera HAL
sp<Surface> mConsumer;
- private:
+ uint32_t getPresetConsumerUsage() const { return mConsumerUsage; }
static const nsecs_t kDequeueBufferTimeout = 1000000000; // 1 sec
+ private:
+
int mTransform;
virtual status_t setTransformLocked(int transform);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index 3f83c89..6a911c6 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -43,7 +43,7 @@
/**
* Return if the consumer configuration of this stream is deferred.
*/
- virtual bool isConsumerConfigurationDeferred() const = 0;
+ virtual bool isConsumerConfigurationDeferred(size_t surface_id = 0) const = 0;
/**
* Set the consumer surface to the output stream.
@@ -59,6 +59,20 @@
*
*/
virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd) = 0;
+
+ /**
+ * Notify which surfaces are requested for a particular frame number.
+ *
+ * Mulitple surfaces could share the same output stream, but a request may
+ * be only for a subset of surfaces. In this case, the
+ * Camera3OutputStreamInterface object needs to manage the output surfaces on
+ * a per request basis.
+ *
+ * If there is only one surface for this output stream, calling this
+ * function is a no-op.
+ */
+ virtual status_t notifyRequestedSurfaces(uint32_t frame_number,
+ const std::vector<size_t>& surface_ids) = 0;
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
new file mode 100644
index 0000000..b419e06
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Camera3SharedOutputStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+Camera3SharedOutputStream::Camera3SharedOutputStream(int id,
+ const std::vector<sp<Surface>>& surfaces,
+ bool hasDeferredSurface,
+ uint32_t width, uint32_t height, int format,
+ uint32_t consumerUsage, android_dataspace dataSpace,
+ camera3_stream_rotation_t rotation,
+ nsecs_t timestampOffset, int setId) :
+ Camera3OutputStream(id, CAMERA3_STREAM_OUTPUT, width, height,
+ format, dataSpace, rotation, consumerUsage,
+ timestampOffset, setId),
+ mSurfaces(surfaces),
+ mDeferred(hasDeferredSurface) {
+}
+
+Camera3SharedOutputStream::~Camera3SharedOutputStream() {
+ disconnectLocked();
+}
+
+status_t Camera3SharedOutputStream::connectStreamSplitterLocked() {
+ status_t res = OK;
+
+ mStreamSplitter = new Camera3StreamSplitter();
+
+ uint32_t usage;
+ getEndpointUsage(&usage);
+
+ res = mStreamSplitter->connect(mSurfaces, usage, camera3_stream::max_buffers, mConsumer);
+ if (res != OK) {
+ ALOGE("%s: Failed to connect to stream splitter: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ return res;
+}
+
+status_t Camera3SharedOutputStream::notifyRequestedSurfaces(uint32_t /*frame_number*/,
+ const std::vector<size_t>& surface_ids) {
+ Mutex::Autolock l(mLock);
+ status_t res = OK;
+
+ if (mStreamSplitter != nullptr) {
+ res = mStreamSplitter->notifyRequestedSurfaces(surface_ids);
+ }
+
+ return res;
+}
+
+bool Camera3SharedOutputStream::isConsumerConfigurationDeferred(size_t surface_id) const {
+ Mutex::Autolock l(mLock);
+ return (mDeferred && surface_id >= mSurfaces.size());
+}
+
+status_t Camera3SharedOutputStream::setConsumer(sp<Surface> surface) {
+ if (surface == nullptr) {
+ ALOGE("%s: it's illegal to set a null consumer surface!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ if (!mDeferred) {
+ ALOGE("%s: Current stream isn't deferred!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+
+ mSurfaces.push_back(surface);
+
+ return mStreamSplitter->addOutput(surface, camera3_stream::max_buffers);
+}
+
+status_t Camera3SharedOutputStream::configureQueueLocked() {
+ status_t res;
+
+ if ((res = Camera3IOStreamBase::configureQueueLocked()) != OK) {
+ return res;
+ }
+
+ res = connectStreamSplitterLocked();
+ if (res != OK) {
+ ALOGE("Cannot connect to stream splitter: %s(%d)", strerror(-res), res);
+ return res;
+ }
+
+ res = configureConsumerQueueLocked();
+ if (res != OK) {
+ ALOGE("Failed to configureConsumerQueueLocked: %s(%d)", strerror(-res), res);
+ return res;
+ }
+
+ return OK;
+}
+
+status_t Camera3SharedOutputStream::disconnectLocked() {
+ status_t res;
+ res = Camera3OutputStream::disconnectLocked();
+
+ if (mStreamSplitter != nullptr) {
+ mStreamSplitter->disconnect();
+ }
+
+ return res;
+}
+
+status_t Camera3SharedOutputStream::getEndpointUsage(uint32_t *usage) const {
+
+ status_t res;
+ uint32_t u = 0;
+
+ if (mConsumer == nullptr) {
+ // Called before shared buffer queue is constructed.
+ *usage = getPresetConsumerUsage();
+
+ for (auto surface : mSurfaces) {
+ if (surface != nullptr) {
+ res = getEndpointUsageForSurface(&u, surface);
+ *usage |= u;
+ }
+ }
+ } else {
+ // Called after shared buffer queue is constructed.
+ res = getEndpointUsageForSurface(&u, mConsumer);
+ *usage |= u;
+ }
+
+ return res;
+}
+
+} // namespace camera3
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
new file mode 100644
index 0000000..1b37d7c
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA3_SHARED_OUTPUT_STREAM_H
+#define ANDROID_SERVERS_CAMERA3_SHARED_OUTPUT_STREAM_H
+
+#include "Camera3StreamSplitter.h"
+#include "Camera3OutputStream.h"
+
+namespace android {
+
+namespace camera3 {
+
+class Camera3SharedOutputStream :
+ public Camera3OutputStream {
+public:
+ /**
+ * Set up a stream for formats that have 2 dimensions, with multiple
+ * surfaces. A valid stream set id needs to be set to support buffer
+ * sharing between multiple streams.
+ */
+ Camera3SharedOutputStream(int id, const std::vector<sp<Surface>>& surfaces,
+ bool hasDeferredSurface, uint32_t width, uint32_t height, int format,
+ uint32_t consumerUsage, android_dataspace dataSpace,
+ camera3_stream_rotation_t rotation, nsecs_t timestampOffset,
+ int setId = CAMERA3_STREAM_SET_ID_INVALID);
+
+ virtual ~Camera3SharedOutputStream();
+
+ virtual status_t notifyRequestedSurfaces(uint32_t frame_number,
+ const std::vector<size_t>& surface_ids);
+
+ virtual bool isConsumerConfigurationDeferred(size_t surface_id) const;
+
+ virtual status_t setConsumer(sp<Surface> consumer);
+
+private:
+ // Surfaces passed in constructor from app
+ std::vector<sp<Surface> > mSurfaces;
+
+ /**
+ * The Camera3StreamSplitter object this stream uses for stream
+ * sharing.
+ */
+ sp<Camera3StreamSplitter> mStreamSplitter;
+
+ /**
+ * Initialize stream splitter.
+ */
+ status_t connectStreamSplitterLocked();
+
+ virtual status_t configureQueueLocked();
+
+ virtual status_t disconnectLocked();
+
+ virtual status_t getEndpointUsage(uint32_t *usage) const;
+
+ bool mDeferred;
+
+}; // class Camera3SharedOutputStream
+
+} // namespace camera3
+
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA3_SHARED_OUTPUT_STREAM_H
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 3ffd9d1..c3b7565 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -166,7 +166,7 @@
return (mState == STATE_IN_CONFIG) || (mState == STATE_IN_RECONFIG);
}
-status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) {
+status_t Camera3Stream::finishConfiguration() {
ATRACE_CALL();
Mutex::Autolock l(mLock);
switch (mState) {
@@ -216,14 +216,6 @@
return res;
}
- res = registerBuffersLocked(hal3Device);
- if (res != OK) {
- ALOGE("%s: Unable to register stream buffers with HAL: %s (%d)",
- __FUNCTION__, strerror(-res), res);
- mState = STATE_ERROR;
- return res;
- }
-
mState = STATE_CONFIGURED;
return res;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 1ff215d..471b393 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -144,6 +144,10 @@
int getFormat() const;
android_dataspace getDataSpace() const;
+ camera3_stream* asHalStream() override {
+ return this;
+ }
+
/**
* Start the stream configuration process. Returns a handle to the stream's
* information to be passed into the HAL device's configure_streams call.
@@ -165,11 +169,10 @@
bool isConfiguring() const;
/**
- * Completes the stream configuration process. During this call, the stream
- * may call the device's register_stream_buffers() method. The stream
- * information structure returned by startConfiguration() may no longer be
- * modified after this call, but can still be read until the destruction of
- * the stream.
+ * Completes the stream configuration process. The stream information
+ * structure returned by startConfiguration() may no longer be modified
+ * after this call, but can still be read until the destruction of the
+ * stream.
*
* Returns:
* OK on a successful configuration
@@ -178,7 +181,7 @@
* INVALID_OPERATION in case connecting to the consumer failed or consumer
* doesn't exist yet.
*/
- status_t finishConfiguration(camera3_device *hal3Device);
+ status_t finishConfiguration();
/**
* Cancels the stream configuration process. This returns the stream to the
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 6cb7a54..ceea08a 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -72,6 +72,11 @@
virtual android_dataspace getDataSpace() const = 0;
/**
+ * Get a HAL3 handle for the stream, without starting stream configuration.
+ */
+ virtual camera3_stream* asHalStream() = 0;
+
+ /**
* Start the stream configuration process. Returns a handle to the stream's
* information to be passed into the HAL device's configure_streams call.
*
@@ -104,7 +109,7 @@
* NO_MEMORY in case of an error registering buffers
* INVALID_OPERATION in case connecting to the consumer failed
*/
- virtual status_t finishConfiguration(camera3_device *hal3Device) = 0;
+ virtual status_t finishConfiguration() = 0;
/**
* Cancels the stream configuration process. This returns the stream to the
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
new file mode 100644
index 0000000..b935141
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -0,0 +1,441 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "Camera3StreamSplitter"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <gui/BufferItem.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/BufferQueue.h>
+#include <gui/Surface.h>
+
+#include <ui/GraphicBuffer.h>
+
+#include <binder/ProcessState.h>
+
+#include <utils/Trace.h>
+
+#include "Camera3StreamSplitter.h"
+
+namespace android {
+
+status_t Camera3StreamSplitter::connect(const std::vector<sp<Surface> >& surfaces,
+ uint32_t consumerUsage, size_t hal_max_buffers,
+ sp<Surface>& consumer) {
+ if (consumer != nullptr) {
+ ALOGE("%s: output Surface is not NULL", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+ status_t res = OK;
+
+ if (mOutputs.size() > 0 || mConsumer != nullptr) {
+ ALOGE("%s: StreamSplitter already connected", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ // Add output surfaces. This has to be before creating internal buffer queue
+ // in order to get max consumer side buffers.
+ for (size_t i = 0; i < surfaces.size(); i++) {
+ if (surfaces[i] != nullptr) {
+ res = addOutputLocked(surfaces[i], hal_max_buffers,
+ OutputType::NonDeferred);
+ if (res != OK) {
+ ALOGE("%s: Failed to add output surface: %s(%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+ }
+
+ // Create buffer queue for input
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+
+ mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage,
+ mMaxConsumerBuffers);
+ if (mBufferItemConsumer == nullptr) {
+ return NO_MEMORY;
+ }
+ mConsumer->setConsumerName(getUniqueConsumerName());
+
+ mSurface = new Surface(mProducer);
+ if (mSurface == nullptr) {
+ return NO_MEMORY;
+ }
+ consumer = mSurface;
+
+ res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
+
+ return res;
+}
+
+void Camera3StreamSplitter::disconnect() {
+ Mutex::Autolock lock(mMutex);
+
+ for (auto& output : mOutputs) {
+ output->disconnect(NATIVE_WINDOW_API_CAMERA);
+ }
+ mOutputs.clear();
+
+ if (mConsumer != nullptr) {
+ mConsumer->consumerDisconnect();
+ mConsumer.clear();
+ }
+
+ if (mBuffers.size() > 0) {
+ ALOGI("%zu buffers still being tracked", mBuffers.size());
+ }
+}
+
+Camera3StreamSplitter::~Camera3StreamSplitter() {
+ disconnect();
+}
+
+status_t Camera3StreamSplitter::addOutput(
+ sp<Surface>& outputQueue, size_t hal_max_buffers) {
+ Mutex::Autolock lock(mMutex);
+ return addOutputLocked(outputQueue, hal_max_buffers, OutputType::Deferred);
+}
+
+status_t Camera3StreamSplitter::addOutputLocked(
+ const sp<Surface>& outputQueue, size_t hal_max_buffers,
+ OutputType outputType) {
+ if (outputQueue == nullptr) {
+ ALOGE("addOutput: outputQueue must not be NULL");
+ return BAD_VALUE;
+ }
+ if (hal_max_buffers < 1) {
+ ALOGE("%s: Camera HAL requested max_buffer count: %zu, requires at least 1",
+ __FUNCTION__, hal_max_buffers);
+ return BAD_VALUE;
+ }
+
+ sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
+ // Connect to the buffer producer
+ IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
+ sp<OutputListener> listener(new OutputListener(this, gbp));
+ IInterface::asBinder(gbp)->linkToDeath(listener);
+ status_t status = gbp->connect(listener, NATIVE_WINDOW_API_CAMERA,
+ /* producerControlledByApp */ true, &queueBufferOutput);
+ if (status != NO_ERROR) {
+ ALOGE("addOutput: failed to connect (%d)", status);
+ return status;
+ }
+
+ // Query consumer side buffer count, and update overall buffer count
+ int maxConsumerBuffers = 0;
+ status = static_cast<ANativeWindow*>(outputQueue.get())->query(
+ outputQueue.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
+ if (status != OK) {
+ ALOGE("%s: Unable to query consumer undequeued buffer count"
+ " for surface", __FUNCTION__);
+ return status;
+ }
+
+ if (maxConsumerBuffers > mMaxConsumerBuffers) {
+ if (outputType == OutputType::Deferred) {
+ ALOGE("%s: Fatal: Deferred surface has higher consumer buffer count"
+ " %d than what's already configured %d", __FUNCTION__,
+ maxConsumerBuffers, mMaxConsumerBuffers);
+ return BAD_VALUE;
+ }
+ mMaxConsumerBuffers = maxConsumerBuffers;
+ }
+
+ ALOGV("%s: Consumer wants %d buffers, HAL wants %zu", __FUNCTION__,
+ maxConsumerBuffers, hal_max_buffers);
+ size_t totalBufferCount = maxConsumerBuffers + hal_max_buffers;
+ status = native_window_set_buffer_count(outputQueue.get(),
+ totalBufferCount);
+ if (status != OK) {
+ ALOGE("%s: Unable to set buffer count for surface %p",
+ __FUNCTION__, outputQueue.get());
+ return status;
+ }
+
+ // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
+ // We need skip these cases as timeout will disable the non-blocking (async) mode.
+ int32_t usage = 0;
+ static_cast<ANativeWindow*>(outputQueue.get())->query(
+ outputQueue.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS, &usage);
+ if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
+ outputQueue->setDequeueTimeout(kDequeueBufferTimeout);
+ }
+
+ status = gbp->allowAllocation(false);
+ if (status != OK) {
+ ALOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
+ return status;
+ }
+
+ // Add new entry into mOutputs
+ mOutputs.push_back(gbp);
+ return NO_ERROR;
+}
+
+String8 Camera3StreamSplitter::getUniqueConsumerName() {
+ static volatile int32_t counter = 0;
+ return String8::format("Camera3StreamSplitter-%d", android_atomic_inc(&counter));
+}
+
+status_t Camera3StreamSplitter::notifyRequestedSurfaces(
+ const std::vector<size_t>& surfaces) {
+ ATRACE_CALL();
+ Mutex::Autolock lock(mMutex);
+
+ mRequestedSurfaces.push_back(surfaces);
+ return OK;
+}
+
+
+void Camera3StreamSplitter::onFrameAvailable(const BufferItem& /* item */) {
+ ATRACE_CALL();
+ Mutex::Autolock lock(mMutex);
+
+ // The current policy is that if any one consumer is consuming buffers too
+ // slowly, the splitter will stall the rest of the outputs by not acquiring
+ // any more buffers from the input. This will cause back pressure on the
+ // input queue, slowing down its producer.
+
+ // If there are too many outstanding buffers, we block until a buffer is
+ // released back to the input in onBufferReleased
+ while (mOutstandingBuffers >= mMaxConsumerBuffers) {
+ mReleaseCondition.wait(mMutex);
+
+ // If the splitter is abandoned while we are waiting, the release
+ // condition variable will be broadcast, and we should just return
+ // without attempting to do anything more (since the input queue will
+ // also be abandoned).
+ if (mIsAbandoned) {
+ return;
+ }
+ }
+ // If the splitter is abandoned without reaching mMaxConsumerBuffers, just
+ // return without attempting to do anything more.
+ if (mIsAbandoned) {
+ return;
+ }
+
+ ++mOutstandingBuffers;
+
+ // Acquire and detach the buffer from the input
+ BufferItem bufferItem;
+ status_t status = mConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "acquiring buffer from input failed (%d)", status);
+
+ ALOGV("acquired buffer %#" PRIx64 " from input",
+ bufferItem.mGraphicBuffer->getId());
+
+ status = mConsumer->detachBuffer(bufferItem.mSlot);
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "detaching buffer from input failed (%d)", status);
+
+ IGraphicBufferProducer::QueueBufferInput queueInput(
+ bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp,
+ bufferItem.mDataSpace, bufferItem.mCrop,
+ static_cast<int32_t>(bufferItem.mScalingMode),
+ bufferItem.mTransform, bufferItem.mFence);
+
+ // Attach and queue the buffer to each of the outputs
+ std::vector<std::vector<size_t> >::iterator surfaces = mRequestedSurfaces.begin();
+ if (surfaces != mRequestedSurfaces.end()) {
+
+ LOG_ALWAYS_FATAL_IF(surfaces->size() == 0,
+ "requested surface ids shouldn't be empty");
+
+ // Initialize our reference count for this buffer
+ mBuffers[bufferItem.mGraphicBuffer->getId()] =
+ std::unique_ptr<BufferTracker>(
+ new BufferTracker(bufferItem.mGraphicBuffer, surfaces->size()));
+
+ for (auto id : *surfaces) {
+
+ LOG_ALWAYS_FATAL_IF(id >= mOutputs.size(),
+ "requested surface id exceeding max registered ids");
+
+ int slot = BufferItem::INVALID_BUFFER_SLOT;
+ status = mOutputs[id]->attachBuffer(&slot, bufferItem.mGraphicBuffer);
+ if (status == NO_INIT) {
+ // If we just discovered that this output has been abandoned, note
+ // that, decrement the reference count so that we still release this
+ // buffer eventually, and move on to the next output
+ onAbandonedLocked();
+ mBuffers[bufferItem.mGraphicBuffer->getId()]->
+ decrementReferenceCountLocked();
+ continue;
+ } else if (status == WOULD_BLOCK) {
+ // If the output is async, attachBuffer may return WOULD_BLOCK
+ // indicating number of dequeued buffers has reached limit. In
+ // this case, simply decrement the reference count, and move on
+ // to the next output.
+ // TODO: Do we need to report BUFFER_ERROR for this result?
+ mBuffers[bufferItem.mGraphicBuffer->getId()]->
+ decrementReferenceCountLocked();
+ continue;
+ } else if (status == TIMED_OUT) {
+ // If attachBuffer times out due to the value set by
+ // setDequeueTimeout, simply decrement the reference count, and
+ // move on to the next output.
+ // TODO: Do we need to report BUFFER_ERROR for this result?
+ mBuffers[bufferItem.mGraphicBuffer->getId()]->
+ decrementReferenceCountLocked();
+ continue;
+ } else {
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "attaching buffer to output failed (%d)", status);
+ }
+
+ IGraphicBufferProducer::QueueBufferOutput queueOutput;
+ status = mOutputs[id]->queueBuffer(slot, queueInput, &queueOutput);
+ if (status == NO_INIT) {
+ // If we just discovered that this output has been abandoned, note
+ // that, increment the release count so that we still release this
+ // buffer eventually, and move on to the next output
+ onAbandonedLocked();
+ mBuffers[bufferItem.mGraphicBuffer->getId()]->
+ decrementReferenceCountLocked();
+ continue;
+ } else {
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "queueing buffer to output failed (%d)", status);
+ }
+
+ ALOGV("queued buffer %#" PRIx64 " to output %p",
+ bufferItem.mGraphicBuffer->getId(), mOutputs[id].get());
+ }
+
+ mRequestedSurfaces.erase(surfaces);
+ }
+}
+
+void Camera3StreamSplitter::onBufferReleasedByOutput(
+ const sp<IGraphicBufferProducer>& from) {
+ ATRACE_CALL();
+ Mutex::Autolock lock(mMutex);
+
+ sp<GraphicBuffer> buffer;
+ sp<Fence> fence;
+ status_t status = from->detachNextBuffer(&buffer, &fence);
+ if (status == NO_INIT) {
+ // If we just discovered that this output has been abandoned, note that,
+ // but we can't do anything else, since buffer is invalid
+ onAbandonedLocked();
+ return;
+ } else {
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "detaching buffer from output failed (%d)", status);
+ }
+
+ ALOGV("detached buffer %#" PRIx64 " from output %p",
+ buffer->getId(), from.get());
+
+ BufferTracker& tracker = *(mBuffers[buffer->getId()]);
+
+ // Merge the release fence of the incoming buffer so that the fence we send
+ // back to the input includes all of the outputs' fences
+ tracker.mergeFence(fence);
+
+ // Check to see if this is the last outstanding reference to this buffer
+ size_t referenceCount = tracker.decrementReferenceCountLocked();
+ ALOGV("buffer %#" PRIx64 " reference count %zu", buffer->getId(),
+ referenceCount);
+ if (referenceCount > 0) {
+ return;
+ }
+
+ // If we've been abandoned, we can't return the buffer to the input, so just
+ // stop tracking it and move on
+ if (mIsAbandoned) {
+ mBuffers.erase(buffer->getId());
+ return;
+ }
+
+ // Attach and release the buffer back to the input
+ int consumerSlot = BufferItem::INVALID_BUFFER_SLOT;
+ status = mConsumer->attachBuffer(&consumerSlot, tracker.getBuffer());
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "attaching buffer to input failed (%d)", status);
+
+ status = mConsumer->releaseBuffer(consumerSlot, /* frameNumber */ 0,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, tracker.getMergedFence());
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "releasing buffer to input failed (%d)", status);
+
+ ALOGV("released buffer %#" PRIx64 " to input", buffer->getId());
+
+ // We no longer need to track the buffer once it has been returned to the
+ // input
+ mBuffers.erase(buffer->getId());
+
+ // Notify any waiting onFrameAvailable calls
+ --mOutstandingBuffers;
+ mReleaseCondition.signal();
+}
+
+void Camera3StreamSplitter::onAbandonedLocked() {
+ ALOGE("one of my outputs has abandoned me");
+ if (!mIsAbandoned && mConsumer != nullptr) {
+ mConsumer->consumerDisconnect();
+ }
+ mIsAbandoned = true;
+ mReleaseCondition.broadcast();
+}
+
+Camera3StreamSplitter::OutputListener::OutputListener(
+ wp<Camera3StreamSplitter> splitter,
+ wp<IGraphicBufferProducer> output)
+ : mSplitter(splitter), mOutput(output) {}
+
+void Camera3StreamSplitter::OutputListener::onBufferReleased() {
+ sp<Camera3StreamSplitter> splitter = mSplitter.promote();
+ sp<IGraphicBufferProducer> output = mOutput.promote();
+ if (splitter != nullptr && output != nullptr) {
+ splitter->onBufferReleasedByOutput(output);
+ }
+}
+
+void Camera3StreamSplitter::OutputListener::binderDied(const wp<IBinder>& /* who */) {
+ sp<Camera3StreamSplitter> splitter = mSplitter.promote();
+ if (splitter != nullptr) {
+ Mutex::Autolock lock(splitter->mMutex);
+ splitter->onAbandonedLocked();
+ }
+}
+
+Camera3StreamSplitter::BufferTracker::BufferTracker(
+ const sp<GraphicBuffer>& buffer, size_t referenceCount)
+ : mBuffer(buffer), mMergedFence(Fence::NO_FENCE),
+ mReferenceCount(referenceCount) {}
+
+void Camera3StreamSplitter::BufferTracker::mergeFence(const sp<Fence>& with) {
+ mMergedFence = Fence::merge(String8("Camera3StreamSplitter"), mMergedFence, with);
+}
+
+size_t Camera3StreamSplitter::BufferTracker::decrementReferenceCountLocked() {
+ if (mReferenceCount > 0)
+ --mReferenceCount;
+ return mReferenceCount;
+}
+
+} // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
new file mode 100644
index 0000000..5a25712
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -0,0 +1,209 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_STREAMSPLITTER_H
+#define ANDROID_SERVERS_STREAMSPLITTER_H
+
+#include <gui/IConsumerListener.h>
+#include <gui/IProducerListener.h>
+#include <gui/BufferItemConsumer.h>
+
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+#include <utils/Timers.h>
+
+namespace android {
+
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+
+// Camera3StreamSplitter is an autonomous class that manages one input BufferQueue
+// and multiple output BufferQueues. By using the buffer attach and detach logic
+// in BufferQueue, it is able to present the illusion of a single split
+// BufferQueue, where each buffer queued to the input is available to be
+// acquired by each of the outputs, and is able to be dequeued by the input
+// again only once all of the outputs have released it.
+class Camera3StreamSplitter : public BnConsumerListener {
+public:
+
+ // Constructor
+ Camera3StreamSplitter() = default;
+
+ // Connect to the stream splitter by creating buffer queue and connecting it
+ // with output surfaces.
+ status_t connect(const std::vector<sp<Surface> >& surfaces,
+ uint32_t consumerUsage, size_t hal_max_buffers,
+ sp<Surface>& consumer);
+
+ // addOutput adds an output BufferQueue to the splitter. The splitter
+ // connects to outputQueue as a CPU producer, and any buffers queued
+ // to the input will be queued to each output. It is assumed that all of the
+ // outputs are added before any buffers are queued on the input. If any
+ // output is abandoned by its consumer, the splitter will abandon its input
+ // queue (see onAbandoned).
+ //
+ // A return value other than NO_ERROR means that an error has occurred and
+ // outputQueue has not been added to the splitter. BAD_VALUE is returned if
+ // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
+ // of other error codes.
+ status_t addOutput(sp<Surface>& outputQueue, size_t hal_max_buffers);
+
+ // Request surfaces for a particular frame number. The requested surfaces
+ // are stored in a FIFO queue. And when the buffer becomes available from the
+ // input queue, the registered surfaces are used to decide which output is
+ // the buffer sent to.
+ status_t notifyRequestedSurfaces(const std::vector<size_t>& surfaces);
+
+ // Disconnect the buffer queue from output surfaces.
+ void disconnect();
+
+private:
+ // From IConsumerListener
+ //
+ // During this callback, we store some tracking information, detach the
+ // buffer from the input, and attach it to each of the outputs. This call
+ // can block if there are too many outstanding buffers. If it blocks, it
+ // will resume when onBufferReleasedByOutput releases a buffer back to the
+ // input.
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // From IConsumerListener
+ // We don't care about released buffers because we detach each buffer as
+ // soon as we acquire it. See the comment for onBufferReleased below for
+ // some clarifying notes about the name.
+ void onBuffersReleased() override {}
+
+ // From IConsumerListener
+ // We don't care about sideband streams, since we won't be splitting them
+ void onSidebandStreamChanged() override {}
+
+ // This is the implementation of the onBufferReleased callback from
+ // IProducerListener. It gets called from an OutputListener (see below), and
+ // 'from' is which producer interface from which the callback was received.
+ //
+ // During this callback, we detach the buffer from the output queue that
+ // generated the callback, update our state tracking to see if this is the
+ // last output releasing the buffer, and if so, release it to the input.
+ // If we release the buffer to the input, we allow a blocked
+ // onFrameAvailable call to proceed.
+ void onBufferReleasedByOutput(const sp<IGraphicBufferProducer>& from);
+
+ // When this is called, the splitter disconnects from (i.e., abandons) its
+ // input queue and signals any waiting onFrameAvailable calls to wake up.
+ // It still processes callbacks from other outputs, but only detaches their
+ // buffers so they can continue operating until they run out of buffers to
+ // acquire. This must be called with mMutex locked.
+ void onAbandonedLocked();
+
+ // This is a thin wrapper class that lets us determine which BufferQueue
+ // the IProducerListener::onBufferReleased callback is associated with. We
+ // create one of these per output BufferQueue, and then pass the producer
+ // into onBufferReleasedByOutput above.
+ class OutputListener : public BnProducerListener,
+ public IBinder::DeathRecipient {
+ public:
+ OutputListener(wp<Camera3StreamSplitter> splitter,
+ wp<IGraphicBufferProducer> output);
+ virtual ~OutputListener() = default;
+
+ // From IProducerListener
+ void onBufferReleased() override;
+
+ // From IBinder::DeathRecipient
+ void binderDied(const wp<IBinder>& who) override;
+
+ private:
+ wp<Camera3StreamSplitter> mSplitter;
+ wp<IGraphicBufferProducer> mOutput;
+ };
+
+ class BufferTracker {
+ public:
+ BufferTracker(const sp<GraphicBuffer>& buffer, size_t referenceCount);
+ ~BufferTracker() = default;
+
+ const sp<GraphicBuffer>& getBuffer() const { return mBuffer; }
+ const sp<Fence>& getMergedFence() const { return mMergedFence; }
+
+ void mergeFence(const sp<Fence>& with);
+
+ // Returns the new value
+ // Only called while mMutex is held
+ size_t decrementReferenceCountLocked();
+
+ private:
+
+ // Disallow copying
+ BufferTracker(const BufferTracker& other);
+ BufferTracker& operator=(const BufferTracker& other);
+
+ sp<GraphicBuffer> mBuffer; // One instance that holds this native handle
+ sp<Fence> mMergedFence;
+ size_t mReferenceCount;
+ };
+
+ // A deferred output is an output being added to the splitter after
+ // connect() call, whereas a non deferred output is added within connect()
+ // call.
+ enum class OutputType { NonDeferred, Deferred };
+
+ // Must be accessed through RefBase
+ virtual ~Camera3StreamSplitter();
+
+ status_t addOutputLocked(const sp<Surface>& outputQueue,
+ size_t hal_max_buffers, OutputType outputType);
+
+ // Get unique name for the buffer queue consumer
+ static String8 getUniqueConsumerName();
+
+ // Max consumer side buffers for deferred surface. This will be used as a
+ // lower bound for overall consumer side max buffers.
+ static const int MAX_BUFFERS_DEFERRED_OUTPUT = 2;
+ int mMaxConsumerBuffers = MAX_BUFFERS_DEFERRED_OUTPUT;
+
+ static const nsecs_t kDequeueBufferTimeout = s2ns(1); // 1 sec
+
+ // mIsAbandoned is set to true when an output dies. Once the Camera3StreamSplitter
+ // has been abandoned, it will continue to detach buffers from other
+ // outputs, but it will disconnect from the input and not attempt to
+ // communicate with it further.
+ bool mIsAbandoned = false;
+
+ Mutex mMutex;
+ Condition mReleaseCondition;
+ int mOutstandingBuffers = 0;
+
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
+ sp<BufferItemConsumer> mBufferItemConsumer;
+ sp<Surface> mSurface;
+
+ std::vector<sp<IGraphicBufferProducer> > mOutputs;
+ // Tracking which outputs should the buffer be attached and queued
+ // to for each input buffer.
+ std::vector<std::vector<size_t> > mRequestedSurfaces;
+
+ // Map of GraphicBuffer IDs (GraphicBuffer::getId()) to buffer tracking
+ // objects (which are mostly for counting how many outputs have released the
+ // buffer, but also contain merged release fences).
+ std::unordered_map<uint64_t, std::unique_ptr<BufferTracker> > mBuffers;
+};
+
+} // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/tests/Android.mk b/services/camera/libcameraservice/tests/Android.mk
new file mode 100644
index 0000000..179643b
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Android.mk
@@ -0,0 +1,38 @@
+# Copyright 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= $(call all-cpp-files-under, .)
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libcameraservice \
+ libhidlbase \
+ liblog \
+ libutils \
+ android.hardware.camera.common@1.0 \
+ android.hardware.camera.provider@2.4 \
+ android.hardware.camera.device@1.0 \
+ android.hardware.camera.device@3.2
+
+LOCAL_C_INCLUDES += \
+
+LOCAL_CFLAGS += -Wall -Wextra -Werror
+
+LOCAL_MODULE:= cameraservice_test
+LOCAL_MODULE_TAGS := tests
+
+include $(BUILD_NATIVE_TEST)
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
new file mode 100644
index 0000000..eb934ba
--- /dev/null
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -0,0 +1,141 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "CameraProviderManagerTest"
+
+#include "../common/CameraProviderManager.h"
+#include <android/hidl/manager/1.0/IServiceManager.h>
+#include <android/hidl/manager/1.0/IServiceNotification.h>
+
+#include <gtest/gtest.h>
+
+using namespace android;
+using namespace android::hardware::camera;
+using android::hardware::camera::common::V1_0::Status;
+
+/**
+ * Basic test implementation of a camera provider
+ */
+struct TestICameraProvider : virtual public provider::V2_4::ICameraProvider {
+ sp<provider::V2_4::ICameraProviderCallbacks> mCallbacks;
+
+ std::vector<hardware::hidl_string> mDeviceNames;
+
+ TestICameraProvider() {
+ mDeviceNames.push_back("device@3.2/test/0");
+ mDeviceNames.push_back("device@1.0/test/0");
+ mDeviceNames.push_back("device@3.2/test/1");
+ }
+
+ virtual hardware::Return<Status> setCallbacks(
+ const sp<provider::V2_4::ICameraProviderCallbacks>& callbacks) override {
+ mCallbacks = callbacks;
+ return hardware::Return<Status>(Status::OK);
+ }
+
+ using getVendorTags_cb = std::function<void(Status status,
+ const hardware::hidl_vec<common::V1_0::VendorTagSection>& sections)>;
+ virtual hardware::Return<void> getVendorTags(getVendorTags_cb _hidl_cb) override {
+ hardware::hidl_vec<common::V1_0::VendorTagSection> sections;
+ _hidl_cb(Status::OK, sections);
+ return hardware::Void();
+ }
+
+ using getCameraIdList_cb = std::function<void(Status status,
+ const hardware::hidl_vec<hardware::hidl_string>& cameraDeviceNames)>;
+ virtual hardware::Return<void> getCameraIdList(getCameraIdList_cb _hidl_cb) override {
+ _hidl_cb(Status::OK, mDeviceNames);
+ return hardware::Void();
+ }
+
+ using getCameraDeviceInterface_V1_x_cb = std::function<void(Status status,
+ const sp<device::V1_0::ICameraDevice>& device)>;
+ virtual hardware::Return<void> getCameraDeviceInterface_V1_x(
+ const hardware::hidl_string& cameraDeviceName,
+ getCameraDeviceInterface_V1_x_cb _hidl_cb) override {
+ (void) cameraDeviceName;
+ _hidl_cb(Status::OK, nullptr);
+ return hardware::Void();
+ }
+
+ using getCameraDeviceInterface_V3_x_cb = std::function<void(Status status,
+ const sp<device::V3_2::ICameraDevice>& device)>;
+ virtual hardware::Return<void> getCameraDeviceInterface_V3_x(
+ const hardware::hidl_string& cameraDeviceName,
+ getCameraDeviceInterface_V3_x_cb _hidl_cb) override {
+ (void) cameraDeviceName;
+ _hidl_cb(Status::OK, nullptr);
+ return hardware::Void();
+ }
+
+};
+
+/**
+ * Simple test version of the interaction proxy, to use to inject onRegistered calls to the
+ * CameraProviderManager
+ */
+struct TestInteractionProxy : public CameraProviderManager::ServiceInteractionProxy {
+ sp<hidl::manager::V1_0::IServiceNotification> mManagerNotificationInterface;
+ const sp<TestICameraProvider> mTestCameraProvider;
+
+ TestInteractionProxy() :
+ mTestCameraProvider(new TestICameraProvider()) {
+
+ }
+ std::string mLastRequestedServiceName;
+
+ virtual ~TestInteractionProxy() {}
+
+ virtual bool registerForNotifications(
+ const std::string &serviceName,
+ const sp<hidl::manager::V1_0::IServiceNotification> ¬ification) override {
+ (void) serviceName;
+ mManagerNotificationInterface = notification;
+ return true;
+ }
+
+ virtual sp<hardware::camera::provider::V2_4::ICameraProvider> getService(
+ const std::string &serviceName) override {
+ mLastRequestedServiceName = serviceName;
+ return mTestCameraProvider;
+ }
+
+};
+
+TEST(CameraProviderManagerTest, InitializeTest) {
+
+ status_t res;
+ sp<CameraProviderManager> providerManager = new CameraProviderManager();
+ TestInteractionProxy serviceProxy{};
+
+ res = providerManager->initialize(&serviceProxy);
+ ASSERT_EQ(res, OK) << "Unable to initialize provider manager";
+
+ hardware::hidl_string legacyInstanceName = "legacy/0";
+ ASSERT_EQ(serviceProxy.mLastRequestedServiceName, legacyInstanceName) <<
+ "Legacy instance not requested from service manager";
+
+ hardware::hidl_string testProviderFqInterfaceName =
+ "android.hardware.camera.provider@2.4::ICameraProvider";
+ hardware::hidl_string testProviderInstanceName = "test/0";
+ serviceProxy.mManagerNotificationInterface->onRegistration(
+ testProviderFqInterfaceName,
+ testProviderInstanceName, false);
+
+ ASSERT_EQ(serviceProxy.mLastRequestedServiceName, testProviderInstanceName) <<
+ "Incorrect instance requested from service manager";
+}
diff --git a/services/mediacodec/minijail/minijail.cpp b/services/mediacodec/minijail/minijail.cpp
index 7926380..463f161 100644
--- a/services/mediacodec/minijail/minijail.cpp
+++ b/services/mediacodec/minijail/minijail.cpp
@@ -19,7 +19,8 @@
#include <unistd.h>
-#include <android/log.h>
+#include <log/log.h>
+
#include <libminijail.h>
#include "minijail.h"
diff --git a/services/mediaextractor/minijail/minijail.cpp b/services/mediaextractor/minijail/minijail.cpp
index 8291633..c44d00d 100644
--- a/services/mediaextractor/minijail/minijail.cpp
+++ b/services/mediaextractor/minijail/minijail.cpp
@@ -19,7 +19,8 @@
#include <unistd.h>
-#include <android/log.h>
+#include <log/log.h>
+
#include <libminijail.h>
#include "minijail.h"
diff --git a/services/medialog/Android.mk b/services/medialog/Android.mk
index a1da63d..423b186 100644
--- a/services/medialog/Android.mk
+++ b/services/medialog/Android.mk
@@ -4,7 +4,7 @@
LOCAL_SRC_FILES := MediaLogService.cpp IMediaLogService.cpp
-LOCAL_SHARED_LIBRARIES := libbinder libutils liblog libnbaio
+LOCAL_SHARED_LIBRARIES := libbinder libutils liblog libnbaio libaudioutils
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
diff --git a/services/medialog/MediaLogService.cpp b/services/medialog/MediaLogService.cpp
index f85aa13..ab2f925 100644
--- a/services/medialog/MediaLogService.cpp
+++ b/services/medialog/MediaLogService.cpp
@@ -35,7 +35,7 @@
shared->size() < NBLog::Timeline::sharedSize(size)) {
return;
}
- sp<NBLog::Reader> reader(new NBLog::Reader(size, shared));
+ sp<NBLog::Reader> reader(new NBLog::Reader(shared, size));
NamedReader namedReader(reader, name);
Mutex::Autolock _l(mLock);
mNamedReaders.add(namedReader);
diff --git a/services/oboeservice/Android.mk b/services/oboeservice/Android.mk
new file mode 100644
index 0000000..07b4d76
--- /dev/null
+++ b/services/oboeservice/Android.mk
@@ -0,0 +1,52 @@
+LOCAL_PATH:= $(call my-dir)
+
+# Oboe Service
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := oboeservice
+LOCAL_MODULE_TAGS := optional
+
+LIBOBOE_DIR := ../../media/liboboe
+LIBOBOE_SRC_DIR := $(LIBOBOE_DIR)/src
+
+LOCAL_C_INCLUDES := \
+ $(call include-path-for, audio-utils) \
+ frameworks/native/include \
+ system/core/base/include \
+ $(TOP)/frameworks/native/media/liboboe/include/include \
+ $(TOP)/frameworks/av/media/liboboe/include \
+ frameworks/native/include \
+ $(TOP)/external/tinyalsa/include \
+ $(TOP)/frameworks/av/media/liboboe/src \
+ $(TOP)/frameworks/av/media/liboboe/src/binding \
+ $(TOP)/frameworks/av/media/liboboe/src/client \
+ $(TOP)/frameworks/av/media/liboboe/src/core \
+ $(TOP)/frameworks/av/media/liboboe/src/fifo \
+ $(TOP)/frameworks/av/media/liboboe/src/utility
+
+# TODO These could be in a liboboe_common library
+LOCAL_SRC_FILES += \
+ $(LIBOBOE_SRC_DIR)/utility/HandleTracker.cpp \
+ $(LIBOBOE_SRC_DIR)/utility/OboeUtilities.cpp \
+ $(LIBOBOE_SRC_DIR)/fifo/FifoBuffer.cpp \
+ $(LIBOBOE_SRC_DIR)/fifo/FifoControllerBase.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/SharedMemoryParcelable.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/SharedRegionParcelable.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/RingBufferParcelable.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/AudioEndpointParcelable.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/OboeStreamRequest.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/OboeStreamConfiguration.cpp \
+ $(LIBOBOE_SRC_DIR)/binding/IOboeAudioService.cpp \
+ SharedRingBuffer.cpp \
+ FakeAudioHal.cpp \
+ OboeAudioService.cpp \
+ OboeServiceStreamBase.cpp \
+ OboeServiceStreamFakeHal.cpp \
+ OboeServiceMain.cpp
+
+LOCAL_CFLAGS += -Wno-unused-parameter
+LOCAL_CFLAGS += -Wall -Werror
+
+LOCAL_SHARED_LIBRARIES := libbinder libcutils libutils liblog libtinyalsa
+
+include $(BUILD_EXECUTABLE)
diff --git a/services/oboeservice/FakeAudioHal.cpp b/services/oboeservice/FakeAudioHal.cpp
new file mode 100644
index 0000000..7fa2eef
--- /dev/null
+++ b/services/oboeservice/FakeAudioHal.cpp
@@ -0,0 +1,227 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Simple fake HAL that supports ALSA MMAP/NOIRQ mode.
+ */
+
+#include <iostream>
+#include <math.h>
+#include <limits>
+#include <string.h>
+#include <unistd.h>
+
+#define __force
+#define __bitwise
+#define __user
+#include <sound/asound.h>
+
+#include "tinyalsa/asoundlib.h"
+
+#include "FakeAudioHal.h"
+
+//using namespace oboe;
+
+using sample_t = int16_t;
+using std::cout;
+using std::endl;
+
+#undef SNDRV_PCM_IOCTL_SYNC_PTR
+#define SNDRV_PCM_IOCTL_SYNC_PTR 0xc0884123
+#define PCM_ERROR_MAX 128
+
+const int SAMPLE_RATE = 48000; // Hz
+const int CHANNEL_COUNT = 2;
+
+struct pcm {
+ int fd;
+ unsigned int flags;
+ int running:1;
+ int prepared:1;
+ int underruns;
+ unsigned int buffer_size;
+ unsigned int boundary;
+ char error[PCM_ERROR_MAX];
+ struct pcm_config config;
+ struct snd_pcm_mmap_status *mmap_status;
+ struct snd_pcm_mmap_control *mmap_control;
+ struct snd_pcm_sync_ptr *sync_ptr;
+ void *mmap_buffer;
+ unsigned int noirq_frames_per_msec;
+ int wait_for_avail_min;
+};
+
+static int pcm_sync_ptr(struct pcm *pcm, int flags) {
+ if (pcm->sync_ptr) {
+ pcm->sync_ptr->flags = flags;
+ if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_SYNC_PTR, pcm->sync_ptr) < 0)
+ return -1;
+ }
+ return 0;
+}
+
+int pcm_get_hw_ptr(struct pcm* pcm, unsigned int* hw_ptr) {
+ if (!hw_ptr || !pcm) return -EINVAL;
+
+ int result = pcm_sync_ptr(pcm, SNDRV_PCM_SYNC_PTR_HWSYNC);
+ if (!result) {
+ *hw_ptr = pcm->sync_ptr->s.status.hw_ptr;
+ }
+
+ return result;
+}
+
+typedef struct stream_tracker {
+ struct pcm * pcm;
+ int framesPerBurst;
+ sample_t * hwBuffer;
+ int32_t capacityInFrames;
+ int32_t capacityInBytes;
+} stream_tracker_t;
+
+#define FRAMES_PER_BURST_QUALCOMM 192
+#define FRAMES_PER_BURST_NVIDIA 128
+
+int fake_hal_open(int card_id, int device_id, fake_hal_stream_ptr *streamPP) {
+ int framesPerBurst = FRAMES_PER_BURST_QUALCOMM; // TODO update as needed
+ int periodCount = 32;
+ unsigned int offset1;
+ unsigned int frames1;
+ void *area = nullptr;
+ int mmapAvail = 0;
+
+ // Configuration for an ALSA stream.
+ pcm_config cfg;
+ memset(&cfg, 0, sizeof(cfg));
+ cfg.channels = CHANNEL_COUNT;
+ cfg.format = PCM_FORMAT_S16_LE;
+ cfg.rate = SAMPLE_RATE;
+ cfg.period_count = periodCount;
+ cfg.period_size = framesPerBurst;
+ cfg.start_threshold = 0; // for NOIRQ, should just start, was framesPerBurst;
+ cfg.stop_threshold = INT32_MAX;
+ cfg.silence_size = 0;
+ cfg.silence_threshold = 0;
+ cfg.avail_min = framesPerBurst;
+
+ stream_tracker_t *streamTracker = (stream_tracker_t *) malloc(sizeof(stream_tracker_t));
+ if (streamTracker == nullptr) {
+ return -1;
+ }
+ memset(streamTracker, 0, sizeof(stream_tracker_t));
+
+ streamTracker->pcm = pcm_open(card_id, device_id, PCM_OUT | PCM_MMAP | PCM_NOIRQ, &cfg);
+ if (streamTracker->pcm == nullptr) {
+ cout << "Could not open device." << endl;
+ free(streamTracker);
+ return -1;
+ }
+
+ streamTracker->framesPerBurst = cfg.period_size; // Get from ALSA
+ streamTracker->capacityInFrames = pcm_get_buffer_size(streamTracker->pcm);
+ streamTracker->capacityInBytes = pcm_frames_to_bytes(streamTracker->pcm, streamTracker->capacityInFrames);
+ std::cout << "fake_hal_open() streamTracker->framesPerBurst = " << streamTracker->framesPerBurst << std::endl;
+ std::cout << "fake_hal_open() streamTracker->capacityInFrames = " << streamTracker->capacityInFrames << std::endl;
+
+ if (pcm_is_ready(streamTracker->pcm) < 0) {
+ cout << "Device is not ready." << endl;
+ goto error;
+ }
+
+ if (pcm_prepare(streamTracker->pcm) < 0) {
+ cout << "Device could not be prepared." << endl;
+ cout << "For Marlin, please enter:" << endl;
+ cout << " adb shell" << endl;
+ cout << " tinymix \"QUAT_MI2S_RX Audio Mixer MultiMedia8\" 1" << endl;
+ goto error;
+ }
+ mmapAvail = pcm_mmap_avail(streamTracker->pcm);
+ if (mmapAvail <= 0) {
+ cout << "fake_hal_open() mmap_avail is <=0" << endl;
+ goto error;
+ }
+ cout << "fake_hal_open() mmap_avail = " << mmapAvail << endl;
+
+ // Where is the memory mapped area?
+ if (pcm_mmap_begin(streamTracker->pcm, &area, &offset1, &frames1) < 0) {
+ cout << "fake_hal_open() pcm_mmap_begin failed" << endl;
+ goto error;
+ }
+
+ // Clear the buffer.
+ memset((sample_t*) area, 0, streamTracker->capacityInBytes);
+ streamTracker->hwBuffer = (sample_t*) area;
+ streamTracker->hwBuffer[0] = 32000; // impulse
+
+ // Prime the buffer so it can start.
+ if (pcm_mmap_commit(streamTracker->pcm, 0, framesPerBurst) < 0) {
+ cout << "fake_hal_open() pcm_mmap_commit failed" << endl;
+ goto error;
+ }
+
+ *streamPP = streamTracker;
+ return 1;
+
+error:
+ fake_hal_close(streamTracker);
+ return -1;
+}
+
+int fake_hal_get_mmap_info(fake_hal_stream_ptr stream, mmap_buffer_info *info) {
+ stream_tracker_t *streamTracker = (stream_tracker_t *) stream;
+ info->fd = streamTracker->pcm->fd; // TODO use tinyalsa function
+ info->hw_buffer = streamTracker->hwBuffer;
+ info->burst_size_in_frames = streamTracker->framesPerBurst;
+ info->buffer_capacity_in_frames = streamTracker->capacityInFrames;
+ info->buffer_capacity_in_bytes = streamTracker->capacityInBytes;
+ info->sample_rate = SAMPLE_RATE;
+ info->channel_count = CHANNEL_COUNT;
+ return 0;
+}
+
+int fake_hal_start(fake_hal_stream_ptr stream) {
+ stream_tracker_t *streamTracker = (stream_tracker_t *) stream;
+ if (pcm_start(streamTracker->pcm) < 0) {
+ cout << "fake_hal_start failed" << endl;
+ return -1;
+ }
+ return 0;
+}
+
+int fake_hal_pause(fake_hal_stream_ptr stream) {
+ stream_tracker_t *streamTracker = (stream_tracker_t *) stream;
+ if (pcm_stop(streamTracker->pcm) < 0) {
+ cout << "fake_hal_stop failed" << endl;
+ return -1;
+ }
+ return 0;
+}
+
+int fake_hal_get_frame_counter(fake_hal_stream_ptr stream, int *frame_counter) {
+ stream_tracker_t *streamTracker = (stream_tracker_t *) stream;
+ if (pcm_get_hw_ptr(streamTracker->pcm, (unsigned int *)frame_counter) < 0) {
+ cout << "fake_hal_get_frame_counter failed" << endl;
+ return -1;
+ }
+ return 0;
+}
+
+int fake_hal_close(fake_hal_stream_ptr stream) {
+ stream_tracker_t *streamTracker = (stream_tracker_t *) stream;
+ pcm_close(streamTracker->pcm);
+ free(streamTracker);
+ return 0;
+}
+
diff --git a/services/oboeservice/FakeAudioHal.h b/services/oboeservice/FakeAudioHal.h
new file mode 100644
index 0000000..d6f28b2
--- /dev/null
+++ b/services/oboeservice/FakeAudioHal.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Simple fake HAL that supports ALSA MMAP/NOIRQ mode.
+ */
+
+#ifndef FAKE_AUDIO_HAL_H
+#define FAKE_AUDIO_HAL_H
+
+//namespace oboe {
+
+using sample_t = int16_t;
+struct mmap_buffer_info {
+ int fd;
+ int32_t burst_size_in_frames;
+ int32_t buffer_capacity_in_frames;
+ int32_t buffer_capacity_in_bytes;
+ int32_t sample_rate;
+ int32_t channel_count;
+ sample_t *hw_buffer;
+};
+
+typedef void *fake_hal_stream_ptr;
+
+//extern "C"
+//{
+
+int fake_hal_open(int card_id, int device_id, fake_hal_stream_ptr *stream_pp);
+
+int fake_hal_get_mmap_info(fake_hal_stream_ptr stream, mmap_buffer_info *info);
+
+int fake_hal_start(fake_hal_stream_ptr stream);
+
+int fake_hal_pause(fake_hal_stream_ptr stream);
+
+int fake_hal_get_frame_counter(fake_hal_stream_ptr stream, int *frame_counter);
+
+int fake_hal_close(fake_hal_stream_ptr stream);
+
+//} /* "C" */
+
+//} /* namespace oboe */
+
+#endif // FAKE_AUDIO_HAL_H
diff --git a/services/oboeservice/OboeAudioService.cpp b/services/oboeservice/OboeAudioService.cpp
new file mode 100644
index 0000000..caddc1d
--- /dev/null
+++ b/services/oboeservice/OboeAudioService.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeService"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <time.h>
+#include <pthread.h>
+
+#include <oboe/OboeDefinitions.h>
+
+#include "HandleTracker.h"
+#include "IOboeAudioService.h"
+#include "OboeService.h"
+#include "OboeAudioService.h"
+#include "OboeServiceStreamFakeHal.h"
+
+using namespace android;
+using namespace oboe;
+
+typedef enum
+{
+ OBOE_HANDLE_TYPE_STREAM,
+ OBOE_HANDLE_TYPE_COUNT
+} oboe_service_handle_type_t;
+static_assert(OBOE_HANDLE_TYPE_COUNT <= HANDLE_TRACKER_MAX_TYPES, "Too many handle types.");
+
+oboe_handle_t OboeAudioService::openStream(oboe::OboeStreamRequest &request,
+ oboe::OboeStreamConfiguration &configuration) {
+ OboeServiceStreamBase *serviceStream = new OboeServiceStreamFakeHal();
+ ALOGD("OboeAudioService::openStream(): created serviceStream = %p", serviceStream);
+ oboe_result_t result = serviceStream->open(request, configuration);
+ if (result < 0) {
+ ALOGE("OboeAudioService::openStream(): open returned %d", result);
+ return result;
+ } else {
+ OboeStream handle = mHandleTracker.put(OBOE_HANDLE_TYPE_STREAM, serviceStream);
+ ALOGD("OboeAudioService::openStream(): handle = 0x%08X", handle);
+ if (handle < 0) {
+ delete serviceStream;
+ }
+ return handle;
+ }
+}
+
+oboe_result_t OboeAudioService::closeStream(oboe_handle_t streamHandle) {
+ OboeServiceStreamBase *serviceStream = (OboeServiceStreamBase *)
+ mHandleTracker.remove(OBOE_HANDLE_TYPE_STREAM,
+ streamHandle);
+ ALOGI("OboeAudioService.closeStream(0x%08X)", streamHandle);
+ if (serviceStream != nullptr) {
+ ALOGD("OboeAudioService::closeStream(): deleting serviceStream = %p", serviceStream);
+ delete serviceStream;
+ return OBOE_OK;
+ }
+ return OBOE_ERROR_INVALID_HANDLE;
+}
+
+OboeServiceStreamBase *OboeAudioService::convertHandleToServiceStream(
+ oboe_handle_t streamHandle) const {
+ return (OboeServiceStreamBase *) mHandleTracker.get(OBOE_HANDLE_TYPE_STREAM,
+ (oboe_handle_t)streamHandle);
+}
+
+oboe_result_t OboeAudioService::getStreamDescription(
+ oboe_handle_t streamHandle,
+ oboe::AudioEndpointParcelable &parcelable) {
+ ALOGI("OboeAudioService::getStreamDescriptor(), streamHandle = 0x%08x", streamHandle);
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::getStreamDescriptor(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ return serviceStream->getDescription(parcelable);
+}
+
+oboe_result_t OboeAudioService::startStream(oboe_handle_t streamHandle) {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::startStream(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ mLatestHandle = streamHandle;
+ return serviceStream->start();
+}
+
+oboe_result_t OboeAudioService::pauseStream(oboe_handle_t streamHandle) {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::pauseStream(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ return serviceStream->pause();
+}
+
+oboe_result_t OboeAudioService::flushStream(oboe_handle_t streamHandle) {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::flushStream(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ return serviceStream->flush();
+}
+
+void OboeAudioService::tickle() {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(mLatestHandle);
+ //ALOGI("OboeAudioService::tickle(), serviceStream = %p", serviceStream);
+ if (serviceStream != nullptr) {
+ serviceStream->tickle();
+ }
+}
+
+oboe_result_t OboeAudioService::registerAudioThread(oboe_handle_t streamHandle,
+ pid_t clientThreadId,
+ oboe_nanoseconds_t periodNanoseconds) {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::registerAudioThread(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ ALOGE("OboeAudioService::registerAudioThread(), serviceStream == nullptr");
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ if (serviceStream->getRegisteredThread() != OboeServiceStreamBase::ILLEGAL_THREAD_ID) {
+ ALOGE("OboeAudioService::registerAudioThread(), thread already registered");
+ return OBOE_ERROR_INVALID_ORDER;
+ }
+ serviceStream->setRegisteredThread(clientThreadId);
+ // Boost client thread to SCHED_FIFO
+ struct sched_param sp;
+ memset(&sp, 0, sizeof(sp));
+ sp.sched_priority = 2; // TODO use 'requestPriority' function from frameworks/av/media/utils
+ int err = sched_setscheduler(clientThreadId, SCHED_FIFO, &sp);
+ if (err != 0){
+ ALOGE("OboeAudioService::sched_setscheduler() failed, errno = %d, priority = %d",
+ errno, sp.sched_priority);
+ return OBOE_ERROR_INTERNAL;
+ } else {
+ return OBOE_OK;
+ }
+}
+
+oboe_result_t OboeAudioService::unregisterAudioThread(oboe_handle_t streamHandle,
+ pid_t clientThreadId) {
+ OboeServiceStreamBase *serviceStream = convertHandleToServiceStream(streamHandle);
+ ALOGI("OboeAudioService::unregisterAudioThread(), serviceStream = %p", serviceStream);
+ if (serviceStream == nullptr) {
+ ALOGE("OboeAudioService::unregisterAudioThread(), serviceStream == nullptr");
+ return OBOE_ERROR_INVALID_HANDLE;
+ }
+ if (serviceStream->getRegisteredThread() != clientThreadId) {
+ ALOGE("OboeAudioService::unregisterAudioThread(), wrong thread");
+ return OBOE_ERROR_ILLEGAL_ARGUMENT;
+ }
+ serviceStream->setRegisteredThread(0);
+ return OBOE_OK;
+}
diff --git a/services/oboeservice/OboeAudioService.h b/services/oboeservice/OboeAudioService.h
new file mode 100644
index 0000000..df3cbf8
--- /dev/null
+++ b/services/oboeservice/OboeAudioService.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_OBOE_AUDIO_SERVICE_H
+#define OBOE_OBOE_AUDIO_SERVICE_H
+
+#include <time.h>
+#include <pthread.h>
+
+#include <binder/BinderService.h>
+
+#include <oboe/OboeDefinitions.h>
+#include <oboe/OboeAudio.h>
+#include "HandleTracker.h"
+#include "IOboeAudioService.h"
+#include "OboeService.h"
+#include "OboeServiceStreamBase.h"
+
+using namespace android;
+namespace oboe {
+
+class OboeAudioService :
+ public BinderService<OboeAudioService>,
+ public BnOboeAudioService
+{
+ friend class BinderService<OboeAudioService>; // for OboeAudioService()
+public:
+// TODO why does this fail? static const char* getServiceName() ANDROID_API { return "media.audio_oboe"; }
+ static const char* getServiceName() { return "media.audio_oboe"; }
+
+ virtual oboe_handle_t openStream(OboeStreamRequest &request,
+ OboeStreamConfiguration &configuration);
+
+ virtual oboe_result_t closeStream(oboe_handle_t streamHandle);
+
+ virtual oboe_result_t getStreamDescription(
+ oboe_handle_t streamHandle,
+ AudioEndpointParcelable &parcelable);
+
+ virtual oboe_result_t startStream(oboe_handle_t streamHandle);
+
+ virtual oboe_result_t pauseStream(oboe_handle_t streamHandle);
+
+ virtual oboe_result_t flushStream(oboe_handle_t streamHandle);
+
+ virtual oboe_result_t registerAudioThread(oboe_handle_t streamHandle,
+ pid_t pid, oboe_nanoseconds_t periodNanoseconds) ;
+
+ virtual oboe_result_t unregisterAudioThread(oboe_handle_t streamHandle, pid_t pid);
+
+ virtual void tickle();
+
+private:
+
+ OboeServiceStreamBase *convertHandleToServiceStream(oboe_handle_t streamHandle) const;
+
+ HandleTracker mHandleTracker;
+ oboe_handle_t mLatestHandle = OBOE_ERROR_INVALID_HANDLE; // TODO until we have service threads
+};
+
+} /* namespace oboe */
+
+#endif //OBOE_OBOE_AUDIO_SERVICE_H
diff --git a/services/oboeservice/OboeService.h b/services/oboeservice/OboeService.h
new file mode 100644
index 0000000..a24f525
--- /dev/null
+++ b/services/oboeservice/OboeService.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_OBOE_SERVICE_H
+#define OBOE_OBOE_SERVICE_H
+
+#include <stdint.h>
+
+#include <oboe/OboeAudio.h>
+
+#include "binding/RingBufferParcelable.h"
+
+namespace oboe {
+
+// TODO move this an "include" folder for the service.
+
+struct OboeMessageTimestamp {
+ oboe_position_frames_t position;
+ int64_t deviceOffset; // add to client position to get device position
+ oboe_nanoseconds_t timestamp;
+};
+
+typedef enum oboe_service_event_e : uint32_t {
+ OBOE_SERVICE_EVENT_STARTED,
+ OBOE_SERVICE_EVENT_PAUSED,
+ OBOE_SERVICE_EVENT_FLUSHED,
+ OBOE_SERVICE_EVENT_CLOSED,
+ OBOE_SERVICE_EVENT_DISCONNECTED
+} oboe_service_event_t;
+
+struct OboeMessageEvent {
+ oboe_service_event_t event;
+ int32_t data1;
+ int64_t data2;
+};
+
+typedef struct OboeServiceMessage_s {
+ enum class code : uint32_t {
+ NOTHING,
+ TIMESTAMP,
+ EVENT,
+ };
+
+ code what;
+ union {
+ OboeMessageTimestamp timestamp;
+ OboeMessageEvent event;
+ };
+} OboeServiceMessage;
+
+
+} /* namespace oboe */
+
+#endif //OBOE_OBOE_SERVICE_H
diff --git a/services/oboeservice/OboeServiceMain.cpp b/services/oboeservice/OboeServiceMain.cpp
new file mode 100644
index 0000000..18bcf2b
--- /dev/null
+++ b/services/oboeservice/OboeServiceMain.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeService"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <math.h>
+
+#include <utils/RefBase.h>
+#include <binder/TextOutput.h>
+
+#include <binder/IInterface.h>
+#include <binder/IBinder.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <binder/IPCThreadState.h>
+
+#include <cutils/ashmem.h>
+#include <sys/mman.h>
+
+#include "OboeService.h"
+#include "IOboeAudioService.h"
+#include "OboeAudioService.h"
+
+using namespace android;
+using namespace oboe;
+
+/**
+ * This is used to test the OboeService as a standalone application.
+ * It is not used when the OboeService is integrated with AudioFlinger.
+ */
+int main(int argc, char **argv) {
+ printf("Test OboeService %s\n", argv[1]);
+ ALOGD("This is the OboeAudioService");
+
+ defaultServiceManager()->addService(String16("OboeAudioService"), new OboeAudioService());
+ android::ProcessState::self()->startThreadPool();
+ printf("OboeAudioService service is now ready\n");
+ IPCThreadState::self()->joinThreadPool();
+ printf("OboeAudioService service thread joined\n");
+
+ return 0;
+}
diff --git a/services/oboeservice/OboeServiceStreamBase.cpp b/services/oboeservice/OboeServiceStreamBase.cpp
new file mode 100644
index 0000000..6b7e4e5
--- /dev/null
+++ b/services/oboeservice/OboeServiceStreamBase.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeService"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "IOboeAudioService.h"
+#include "OboeService.h"
+#include "OboeServiceStreamBase.h"
+#include "AudioEndpointParcelable.h"
+
+using namespace android;
+using namespace oboe;
+
+/**
+ * Construct the AudioCommandQueues and the AudioDataQueue
+ * and fill in the endpoint parcelable.
+ */
+
+OboeServiceStreamBase::OboeServiceStreamBase()
+ : mUpMessageQueue(nullptr)
+{
+ // TODO could fail so move out of constructor
+ mUpMessageQueue = new SharedRingBuffer();
+ mUpMessageQueue->allocate(sizeof(OboeServiceMessage), QUEUE_UP_CAPACITY_COMMANDS);
+}
+
+OboeServiceStreamBase::~OboeServiceStreamBase() {
+ delete mUpMessageQueue;
+}
+
+void OboeServiceStreamBase::sendServiceEvent(oboe_service_event_t event,
+ int32_t data1,
+ int64_t data2) {
+ OboeServiceMessage command;
+ command.what = OboeServiceMessage::code::EVENT;
+ command.event.event = event;
+ command.event.data1 = data1;
+ command.event.data2 = data2;
+ mUpMessageQueue->getFifoBuffer()->write(&command, 1);
+}
+
+
diff --git a/services/oboeservice/OboeServiceStreamBase.h b/services/oboeservice/OboeServiceStreamBase.h
new file mode 100644
index 0000000..736c754
--- /dev/null
+++ b/services/oboeservice/OboeServiceStreamBase.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_OBOE_SERVICE_STREAM_BASE_H
+#define OBOE_OBOE_SERVICE_STREAM_BASE_H
+
+#include "IOboeAudioService.h"
+#include "OboeService.h"
+#include "AudioStream.h"
+#include "fifo/FifoBuffer.h"
+#include "SharedRingBuffer.h"
+#include "AudioEndpointParcelable.h"
+
+namespace oboe {
+
+// We expect the queue to only have a few commands.
+// This should be way more than we need.
+#define QUEUE_UP_CAPACITY_COMMANDS (128)
+
+class OboeServiceStreamBase {
+
+public:
+ OboeServiceStreamBase();
+ virtual ~OboeServiceStreamBase();
+
+ enum {
+ ILLEGAL_THREAD_ID = 0
+ };
+
+ /**
+ * Fill in a parcelable description of stream.
+ */
+ virtual oboe_result_t getDescription(oboe::AudioEndpointParcelable &parcelable) = 0;
+
+ /**
+ * Open the device.
+ */
+ virtual oboe_result_t open(oboe::OboeStreamRequest &request,
+ oboe::OboeStreamConfiguration &configuration) = 0;
+
+ /**
+ * Start the flow of data.
+ */
+ virtual oboe_result_t start() = 0;
+
+ /**
+ * Stop the flow of data such that start() can resume with loss of data.
+ */
+ virtual oboe_result_t pause() = 0;
+
+ /**
+ * Discard any data held by the underlying HAL or Service.
+ */
+ virtual oboe_result_t flush() = 0;
+
+ virtual oboe_result_t close() = 0;
+
+ virtual void tickle() = 0;
+
+ virtual void sendServiceEvent(oboe_service_event_t event,
+ int32_t data1 = 0,
+ int64_t data2 = 0);
+
+ virtual void setRegisteredThread(pid_t pid) {
+ mRegisteredClientThread = pid;
+ }
+ virtual pid_t getRegisteredThread() {
+ return mRegisteredClientThread;
+ }
+
+protected:
+
+ pid_t mRegisteredClientThread = ILLEGAL_THREAD_ID;
+
+ SharedRingBuffer * mUpMessageQueue;
+
+ oboe_sample_rate_t mSampleRate = 0;
+ oboe_size_bytes_t mBytesPerFrame = 0;
+ oboe_size_frames_t mFramesPerBurst = 0;
+ oboe_size_frames_t mCapacityInFrames = 0;
+ oboe_size_bytes_t mCapacityInBytes = 0;
+};
+
+} /* namespace oboe */
+
+#endif //OBOE_OBOE_SERVICE_STREAM_BASE_H
diff --git a/services/oboeservice/OboeServiceStreamFakeHal.cpp b/services/oboeservice/OboeServiceStreamFakeHal.cpp
new file mode 100644
index 0000000..dbbc860
--- /dev/null
+++ b/services/oboeservice/OboeServiceStreamFakeHal.cpp
@@ -0,0 +1,191 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeService"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "AudioClock.h"
+#include "AudioEndpointParcelable.h"
+
+#include "OboeServiceStreamBase.h"
+#include "OboeServiceStreamFakeHal.h"
+
+#include "FakeAudioHal.h"
+
+using namespace android;
+using namespace oboe;
+
+// HACK values for Marlin
+#define CARD_ID 0
+#define DEVICE_ID 19
+
+/**
+ * Construct the audio message queuues and message queues.
+ */
+
+OboeServiceStreamFakeHal::OboeServiceStreamFakeHal()
+ : OboeServiceStreamBase()
+ , mStreamId(nullptr)
+ , mPreviousFrameCounter(0)
+{
+}
+
+OboeServiceStreamFakeHal::~OboeServiceStreamFakeHal() {
+ ALOGD("OboeServiceStreamFakeHal::~OboeServiceStreamFakeHal() call close()");
+ close();
+}
+
+oboe_result_t OboeServiceStreamFakeHal::open(oboe::OboeStreamRequest &request,
+ oboe::OboeStreamConfiguration &configuration) {
+ // Open stream on HAL and pass information about the ring buffer to the client.
+ mmap_buffer_info mmapInfo;
+ oboe_result_t error;
+
+ // Open HAL
+ error = fake_hal_open(CARD_ID, DEVICE_ID, &mStreamId);
+ if(error < 0) {
+ ALOGE("Could not open card %d, device %d", CARD_ID, DEVICE_ID);
+ return error;
+ }
+
+ // Get information about the shared audio buffer.
+ error = fake_hal_get_mmap_info(mStreamId, &mmapInfo);
+ if (error < 0) {
+ ALOGE("fake_hal_get_mmap_info returned %d", error);
+ fake_hal_close(mStreamId);
+ mStreamId = nullptr;
+ return error;
+ }
+ mHalFileDescriptor = mmapInfo.fd;
+ mFramesPerBurst = mmapInfo.burst_size_in_frames;
+ mCapacityInFrames = mmapInfo.buffer_capacity_in_frames;
+ mCapacityInBytes = mmapInfo.buffer_capacity_in_bytes;
+ mSampleRate = mmapInfo.sample_rate;
+ mBytesPerFrame = mmapInfo.channel_count * sizeof(int16_t); // FIXME based on data format
+ ALOGD("OboeServiceStreamFakeHal::open() mmapInfo.burst_size_in_frames = %d",
+ mmapInfo.burst_size_in_frames);
+ ALOGD("OboeServiceStreamFakeHal::open() mmapInfo.buffer_capacity_in_frames = %d",
+ mmapInfo.buffer_capacity_in_frames);
+ ALOGD("OboeServiceStreamFakeHal::open() mmapInfo.buffer_capacity_in_bytes = %d",
+ mmapInfo.buffer_capacity_in_bytes);
+
+ // Fill in OboeStreamConfiguration
+ configuration.setSampleRate(mSampleRate);
+ configuration.setSamplesPerFrame(mmapInfo.channel_count);
+ configuration.setAudioFormat(OBOE_AUDIO_FORMAT_PCM16);
+ return OBOE_OK;
+}
+
+/**
+ * Get an immutable description of the in-memory queues
+ * used to communicate with the underlying HAL or Service.
+ */
+oboe_result_t OboeServiceStreamFakeHal::getDescription(AudioEndpointParcelable &parcelable) {
+ // Gather information on the message queue.
+ mUpMessageQueue->fillParcelable(parcelable,
+ parcelable.mUpMessageQueueParcelable);
+
+ // Gather information on the data queue.
+ // TODO refactor into a SharedRingBuffer?
+ int fdIndex = parcelable.addFileDescriptor(mHalFileDescriptor, mCapacityInBytes);
+ parcelable.mDownDataQueueParcelable.setupMemory(fdIndex, 0, mCapacityInBytes);
+ parcelable.mDownDataQueueParcelable.setBytesPerFrame(mBytesPerFrame);
+ parcelable.mDownDataQueueParcelable.setFramesPerBurst(mFramesPerBurst);
+ parcelable.mDownDataQueueParcelable.setCapacityInFrames(mCapacityInFrames);
+ return OBOE_OK;
+}
+
+/**
+ * Start the flow of data.
+ */
+oboe_result_t OboeServiceStreamFakeHal::start() {
+ if (mStreamId == nullptr) return OBOE_ERROR_NULL;
+ oboe_result_t result = fake_hal_start(mStreamId);
+ sendServiceEvent(OBOE_SERVICE_EVENT_STARTED);
+ mState = OBOE_STREAM_STATE_STARTED;
+ return result;
+}
+
+/**
+ * Stop the flow of data such that start() can resume with loss of data.
+ */
+oboe_result_t OboeServiceStreamFakeHal::pause() {
+ if (mStreamId == nullptr) return OBOE_ERROR_NULL;
+ sendCurrentTimestamp();
+ oboe_result_t result = fake_hal_pause(mStreamId);
+ sendServiceEvent(OBOE_SERVICE_EVENT_PAUSED);
+ mState = OBOE_STREAM_STATE_PAUSED;
+ mFramesRead.reset32();
+ ALOGD("OboeServiceStreamFakeHal::pause() sent OBOE_SERVICE_EVENT_PAUSED");
+ return result;
+}
+
+/**
+ * Discard any data held by the underlying HAL or Service.
+ */
+oboe_result_t OboeServiceStreamFakeHal::flush() {
+ if (mStreamId == nullptr) return OBOE_ERROR_NULL;
+ // TODO how do we flush an MMAP/NOIRQ buffer? sync pointers?
+ ALOGD("OboeServiceStreamFakeHal::pause() send OBOE_SERVICE_EVENT_FLUSHED");
+ sendServiceEvent(OBOE_SERVICE_EVENT_FLUSHED);
+ mState = OBOE_STREAM_STATE_FLUSHED;
+ return OBOE_OK;
+}
+
+oboe_result_t OboeServiceStreamFakeHal::close() {
+ oboe_result_t result = OBOE_OK;
+ if (mStreamId != nullptr) {
+ result = fake_hal_close(mStreamId);
+ mStreamId = nullptr;
+ }
+ return result;
+}
+
+void OboeServiceStreamFakeHal::sendCurrentTimestamp() {
+ int frameCounter = 0;
+ int error = fake_hal_get_frame_counter(mStreamId, &frameCounter);
+ if (error < 0) {
+ ALOGE("OboeServiceStreamFakeHal::sendCurrentTimestamp() error %d",
+ error);
+ } else if (frameCounter != mPreviousFrameCounter) {
+ OboeServiceMessage command;
+ command.what = OboeServiceMessage::code::TIMESTAMP;
+ mFramesRead.update32(frameCounter);
+ command.timestamp.position = mFramesRead.get();
+ ALOGV("OboeServiceStreamFakeHal::sendCurrentTimestamp() HAL frames = %d, pos = %d",
+ frameCounter, (int)mFramesRead.get());
+ command.timestamp.timestamp = AudioClock::getNanoseconds();
+ mUpMessageQueue->getFifoBuffer()->write(&command, 1);
+ mPreviousFrameCounter = frameCounter;
+ }
+}
+
+void OboeServiceStreamFakeHal::tickle() {
+ if (mStreamId != nullptr) {
+ switch (mState) {
+ case OBOE_STREAM_STATE_STARTING:
+ case OBOE_STREAM_STATE_STARTED:
+ case OBOE_STREAM_STATE_PAUSING:
+ case OBOE_STREAM_STATE_STOPPING:
+ sendCurrentTimestamp();
+ break;
+ default:
+ break;
+ }
+ }
+}
+
diff --git a/services/oboeservice/OboeServiceStreamFakeHal.h b/services/oboeservice/OboeServiceStreamFakeHal.h
new file mode 100644
index 0000000..b026d34
--- /dev/null
+++ b/services/oboeservice/OboeServiceStreamFakeHal.h
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_OBOE_SERVICE_STREAM_FAKE_HAL_H
+#define OBOE_OBOE_SERVICE_STREAM_FAKE_HAL_H
+
+#include "OboeService.h"
+#include "OboeServiceStreamBase.h"
+#include "FakeAudioHal.h"
+#include "MonotonicCounter.h"
+#include "AudioEndpointParcelable.h"
+
+namespace oboe {
+
+class OboeServiceStreamFakeHal : public OboeServiceStreamBase {
+
+public:
+ OboeServiceStreamFakeHal();
+ virtual ~OboeServiceStreamFakeHal();
+
+ virtual oboe_result_t getDescription(AudioEndpointParcelable &parcelable) override;
+
+ virtual oboe_result_t open(oboe::OboeStreamRequest &request,
+ oboe::OboeStreamConfiguration &configuration) override;
+
+ /**
+ * Start the flow of data.
+ */
+ virtual oboe_result_t start() override;
+
+ /**
+ * Stop the flow of data such that start() can resume with loss of data.
+ */
+ virtual oboe_result_t pause() override;
+
+ /**
+ * Discard any data held by the underlying HAL or Service.
+ */
+ virtual oboe_result_t flush() override;
+
+ virtual oboe_result_t close() override;
+
+ virtual void tickle() override;
+
+protected:
+
+ void sendCurrentTimestamp();
+
+private:
+ fake_hal_stream_ptr mStreamId; // Move to HAL
+
+ MonotonicCounter mFramesWritten;
+ MonotonicCounter mFramesRead;
+ int mHalFileDescriptor = -1;
+ int mPreviousFrameCounter = 0; // from HAL
+
+ oboe_stream_state_t mState = OBOE_STREAM_STATE_UNINITIALIZED;
+};
+
+} // namespace oboe
+
+#endif //OBOE_OBOE_SERVICE_STREAM_FAKE_HAL_H
diff --git a/services/oboeservice/SharedRingBuffer.cpp b/services/oboeservice/SharedRingBuffer.cpp
new file mode 100644
index 0000000..c3df5ce
--- /dev/null
+++ b/services/oboeservice/SharedRingBuffer.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "OboeService"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "AudioClock.h"
+#include "AudioEndpointParcelable.h"
+
+//#include "OboeServiceStreamBase.h"
+//#include "OboeServiceStreamFakeHal.h"
+
+#include "SharedRingBuffer.h"
+
+using namespace android;
+using namespace oboe;
+
+SharedRingBuffer::~SharedRingBuffer()
+{
+ if (mSharedMemory != nullptr) {
+ delete mFifoBuffer;
+ munmap(mSharedMemory, mSharedMemorySizeInBytes);
+ close(mFileDescriptor);
+ mSharedMemory = nullptr;
+ }
+}
+
+oboe_result_t SharedRingBuffer::allocate(fifo_frames_t bytesPerFrame,
+ fifo_frames_t capacityInFrames) {
+ mCapacityInFrames = capacityInFrames;
+
+ // Create shared memory large enough to hold the data and the read and write counters.
+ mDataMemorySizeInBytes = bytesPerFrame * capacityInFrames;
+ mSharedMemorySizeInBytes = mDataMemorySizeInBytes + (2 * (sizeof(fifo_counter_t)));
+ mFileDescriptor = ashmem_create_region("OboeSharedRingBuffer", mSharedMemorySizeInBytes);
+ if (mFileDescriptor < 0) {
+ ALOGE("SharedRingBuffer::allocate() ashmem_create_region() failed %d", errno);
+ return OBOE_ERROR_INTERNAL;
+ }
+ int err = ashmem_set_prot_region(mFileDescriptor, PROT_READ|PROT_WRITE); // TODO error handling?
+ if (err < 0) {
+ ALOGE("SharedRingBuffer::allocate() ashmem_set_prot_region() failed %d", errno);
+ close(mFileDescriptor);
+ return OBOE_ERROR_INTERNAL; // TODO convert errno to a better OBOE_ERROR;
+ }
+
+ // Map the fd to memory addresses.
+ mSharedMemory = (uint8_t *) mmap(0, mSharedMemorySizeInBytes,
+ PROT_READ|PROT_WRITE,
+ MAP_SHARED,
+ mFileDescriptor, 0);
+ if (mSharedMemory == MAP_FAILED) {
+ ALOGE("SharedRingBuffer::allocate() mmap() failed %d", errno);
+ close(mFileDescriptor);
+ return OBOE_ERROR_INTERNAL; // TODO convert errno to a better OBOE_ERROR;
+ }
+
+ // Get addresses for our counters and data from the shared memory.
+ fifo_counter_t *readCounterAddress =
+ (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_READ_OFFSET];
+ fifo_counter_t *writeCounterAddress =
+ (fifo_counter_t *) &mSharedMemory[SHARED_RINGBUFFER_WRITE_OFFSET];
+ uint8_t *dataAddress = &mSharedMemory[SHARED_RINGBUFFER_DATA_OFFSET];
+
+ mFifoBuffer = new(std::nothrow) FifoBuffer(bytesPerFrame, capacityInFrames,
+ readCounterAddress, writeCounterAddress, dataAddress);
+ return (mFifoBuffer == nullptr) ? OBOE_ERROR_NO_MEMORY : OBOE_OK;
+}
+
+void SharedRingBuffer::fillParcelable(AudioEndpointParcelable &endpointParcelable,
+ RingBufferParcelable &ringBufferParcelable) {
+ int fdIndex = endpointParcelable.addFileDescriptor(mFileDescriptor, mSharedMemorySizeInBytes);
+ ringBufferParcelable.setupMemory(fdIndex,
+ SHARED_RINGBUFFER_DATA_OFFSET,
+ mDataMemorySizeInBytes,
+ SHARED_RINGBUFFER_READ_OFFSET,
+ SHARED_RINGBUFFER_WRITE_OFFSET,
+ sizeof(fifo_counter_t));
+ ringBufferParcelable.setBytesPerFrame(mFifoBuffer->getBytesPerFrame());
+ ringBufferParcelable.setFramesPerBurst(1);
+ ringBufferParcelable.setCapacityInFrames(mCapacityInFrames);
+}
diff --git a/services/oboeservice/SharedRingBuffer.h b/services/oboeservice/SharedRingBuffer.h
new file mode 100644
index 0000000..3cc1c2d
--- /dev/null
+++ b/services/oboeservice/SharedRingBuffer.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OBOE_SHARED_RINGBUFFER_H
+#define OBOE_SHARED_RINGBUFFER_H
+
+#include <stdint.h>
+#include <cutils/ashmem.h>
+#include <sys/mman.h>
+
+#include "fifo/FifoBuffer.h"
+#include "RingBufferParcelable.h"
+#include "AudioEndpointParcelable.h"
+
+namespace oboe {
+
+// Determine the placement of the counters and data in shared memory.
+#define SHARED_RINGBUFFER_READ_OFFSET 0
+#define SHARED_RINGBUFFER_WRITE_OFFSET sizeof(fifo_counter_t)
+#define SHARED_RINGBUFFER_DATA_OFFSET (SHARED_RINGBUFFER_WRITE_OFFSET + sizeof(fifo_counter_t))
+
+/**
+ * Atomic FIFO that uses shared memory.
+ */
+class SharedRingBuffer {
+public:
+ SharedRingBuffer() {}
+
+ virtual ~SharedRingBuffer();
+
+ oboe_result_t allocate(fifo_frames_t bytesPerFrame, fifo_frames_t capacityInFrames);
+
+ void fillParcelable(AudioEndpointParcelable &endpointParcelable,
+ RingBufferParcelable &ringBufferParcelable);
+
+ FifoBuffer * getFifoBuffer() {
+ return mFifoBuffer;
+ }
+
+private:
+ int mFileDescriptor = -1;
+ FifoBuffer * mFifoBuffer = nullptr;
+ uint8_t * mSharedMemory = nullptr;
+ int32_t mSharedMemorySizeInBytes = 0;
+ int32_t mDataMemorySizeInBytes = 0;
+ fifo_frames_t mCapacityInFrames = 0;
+};
+
+} /* namespace oboe */
+
+#endif //OBOE_SHARED_RINGBUFFER_H
diff --git a/services/radio/Android.mk b/services/radio/Android.mk
index 4344506..1b50dc3 100644
--- a/services/radio/Android.mk
+++ b/services/radio/Android.mk
@@ -30,9 +30,13 @@
libradio \
libradio_metadata
-ifeq ($(ENABLE_TREBLE),true)
+ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL),true)
+# libhardware configuration
+LOCAL_SRC_FILES += \
+ RadioHalLegacy.cpp
+else
# Treble configuration
-LOCAL_CFLAGS += -DENABLE_TREBLE
+
LOCAL_SRC_FILES += \
HidlUtils.cpp \
RadioHalHidl.cpp
@@ -42,14 +46,10 @@
libhidlbase \
libhidltransport \
libbase \
+ libaudiohal \
android.hardware.broadcastradio@1.0
-else
-# libhardware configuration
-LOCAL_SRC_FILES += \
- RadioHalLegacy.cpp
endif
-
LOCAL_CFLAGS += -Wall -Wextra -Werror
LOCAL_MULTILIB := $(AUDIOSERVER_MULTILIB)
diff --git a/services/radio/HidlUtils.cpp b/services/radio/HidlUtils.cpp
index bfced7a..3b33386 100644
--- a/services/radio/HidlUtils.cpp
+++ b/services/radio/HidlUtils.cpp
@@ -122,8 +122,7 @@
//static
void HidlUtils::convertProgramInfoFromHal(radio_program_info_t *info,
- const ProgramInfo *halInfo,
- bool withMetadata)
+ const ProgramInfo *halInfo)
{
info->channel = halInfo->channel;
info->sub_channel = halInfo->subChannel;
@@ -131,10 +130,8 @@
info->stereo = halInfo->stereo;
info->digital = halInfo->digital;
info->signal_strength = halInfo->signalStrength;
- if (withMetadata && halInfo->metadata.size() != 0) {
- convertMetaDataFromHal(&info->metadata, halInfo->metadata,
- halInfo->channel, halInfo->subChannel);
- }
+ convertMetaDataFromHal(&info->metadata, halInfo->metadata,
+ halInfo->channel, halInfo->subChannel);
}
//static
diff --git a/services/radio/HidlUtils.h b/services/radio/HidlUtils.h
index 091abb7..c771060 100644
--- a/services/radio/HidlUtils.h
+++ b/services/radio/HidlUtils.h
@@ -38,8 +38,7 @@
static void convertBandConfigToHal(BandConfig *halConfig,
const radio_hal_band_config_t *config);
static void convertProgramInfoFromHal(radio_program_info_t *info,
- const ProgramInfo *halInfo,
- bool withMetadata);
+ const ProgramInfo *halInfo);
static void convertMetaDataFromHal(radio_metadata_t **metadata,
const hidl_vec<MetaData>& halMetadata,
uint32_t channel,
diff --git a/services/radio/RadioHalHidl.cpp b/services/radio/RadioHalHidl.cpp
index c7a899a..032d3fd 100644
--- a/services/radio/RadioHalHidl.cpp
+++ b/services/radio/RadioHalHidl.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "RadioHalHidl"
//#define LOG_NDEBUG 0
+#include <media/audiohal/hidl/HalDeathHandler.h>
#include <utils/Log.h>
#include <utils/misc.h>
#include <system/radio_metadata.h>
@@ -39,11 +40,6 @@
return new RadioHalHidl(classId);
}
-void RadioHalHidl::crashIfHalIsDead(const Status& status) {
- LOG_ALWAYS_FATAL_IF(
- status.transactionError() == DEAD_OBJECT, "HAL server crashed, need to restart");
-}
-
int RadioHalHidl::getProperties(radio_hal_properties_t *properties)
{
ALOGV("%s IN", __FUNCTION__);
@@ -52,7 +48,7 @@
return -ENODEV;
}
Properties halProperties;
- Result halResult;
+ Result halResult = Result::NOT_INITIALIZED;
Return<void> hidlReturn =
module->getProperties([&](Result result, const Properties& properties) {
halResult = result;
@@ -61,7 +57,6 @@
}
});
- crashIfHalIsDead(hidlReturn.getStatus());
if (halResult == Result::OK) {
HidlUtils::convertPropertiesFromHal(properties, &halProperties);
}
@@ -80,7 +75,7 @@
sp<Tuner> tunerImpl = new Tuner(callback, this);
BandConfig halConfig;
- Result halResult;
+ Result halResult = Result::NOT_INITIALIZED;
sp<ITuner> halTuner;
HidlUtils::convertBandConfigToHal(&halConfig, config);
@@ -93,7 +88,6 @@
}
});
- crashIfHalIsDead(hidlReturn.getStatus());
if (halResult == Result::OK) {
tunerImpl->setHalTuner(halTuner);
tuner = tunerImpl;
@@ -154,7 +148,6 @@
HidlUtils::convertBandConfigToHal(&halConfig, config);
Return<Result> hidlResult = mHalTuner->setConfiguration(halConfig);
- checkHidlStatus(hidlResult.getStatus());
return HidlUtils::convertHalResult(hidlResult);
}
@@ -173,8 +166,7 @@
halConfig = config;
}
});
- status_t status = checkHidlStatus(hidlReturn.getStatus());
- if (status == NO_ERROR && halResult == Result::OK) {
+ if (hidlReturn.isOk() && halResult == Result::OK) {
HidlUtils::convertBandConfigFromHal(config, &halConfig);
}
return HidlUtils::convertHalResult(halResult);
@@ -188,7 +180,6 @@
}
Return<Result> hidlResult =
mHalTuner->scan(static_cast<Direction>(direction), skip_sub_channel);
- checkHidlStatus(hidlResult.getStatus());
return HidlUtils::convertHalResult(hidlResult);
}
@@ -200,7 +191,6 @@
}
Return<Result> hidlResult =
mHalTuner->step(static_cast<Direction>(direction), skip_sub_channel);
- checkHidlStatus(hidlResult.getStatus());
return HidlUtils::convertHalResult(hidlResult);
}
@@ -212,7 +202,6 @@
}
Return<Result> hidlResult =
mHalTuner->tune(channel, sub_channel);
- checkHidlStatus(hidlResult.getStatus());
return HidlUtils::convertHalResult(hidlResult);
}
@@ -223,7 +212,6 @@
return -ENODEV;
}
Return<Result> hidlResult = mHalTuner->cancel();
- checkHidlStatus(hidlResult.getStatus());
return HidlUtils::convertHalResult(hidlResult);
}
@@ -233,19 +221,20 @@
if (mHalTuner == 0) {
return -ENODEV;
}
+ if (info == nullptr || info->metadata == nullptr) {
+ return BAD_VALUE;
+ }
ProgramInfo halInfo;
Result halResult;
- bool withMetaData = (info->metadata != NULL);
Return<void> hidlReturn = mHalTuner->getProgramInformation(
- withMetaData, [&](Result result, const ProgramInfo& info) {
- halResult = result;
- if (result == Result::OK) {
- halInfo = info;
- }
- });
- status_t status = checkHidlStatus(hidlReturn.getStatus());
- if (status == NO_ERROR && halResult == Result::OK) {
- HidlUtils::convertProgramInfoFromHal(info, &halInfo, withMetaData);
+ [&](Result result, const ProgramInfo& info) {
+ halResult = result;
+ if (result == Result::OK) {
+ halInfo = info;
+ }
+ });
+ if (hidlReturn.isOk() && halResult == Result::OK) {
+ HidlUtils::convertProgramInfoFromHal(info, &halInfo);
}
return HidlUtils::convertHalResult(halResult);
}
@@ -276,11 +265,9 @@
memset(&event, 0, sizeof(radio_hal_event_t));
event.type = RADIO_EVENT_TUNED;
event.status = HidlUtils::convertHalResult(result);
- HidlUtils::convertProgramInfoFromHal(&event.info, &info, true);
+ HidlUtils::convertProgramInfoFromHal(&event.info, &info);
onCallback(&event);
- if (event.info.metadata != NULL) {
- radio_metadata_deallocate(event.info.metadata);
- }
+ radio_metadata_deallocate(event.info.metadata);
return Return<void>();
}
@@ -290,7 +277,7 @@
radio_hal_event_t event;
memset(&event, 0, sizeof(radio_hal_event_t));
event.type = RADIO_EVENT_AF_SWITCH;
- HidlUtils::convertProgramInfoFromHal(&event.info, &info, true);
+ HidlUtils::convertProgramInfoFromHal(&event.info, &info);
onCallback(&event);
if (event.info.metadata != NULL) {
radio_metadata_deallocate(event.info.metadata);
@@ -347,11 +334,27 @@
RadioHalHidl::Tuner::Tuner(sp<TunerCallbackInterface> callback, sp<RadioHalHidl> module)
: TunerInterface(), mHalTuner(NULL), mCallback(callback), mParentModule(module)
{
+ // Make sure the handler we are passing in only deals with const members,
+ // as it can be called on an arbitrary thread.
+ const auto& self = this;
+ HalDeathHandler::getInstance()->registerAtExitHandler(
+ this, [&self]() { self->sendHwFailureEvent(); });
}
RadioHalHidl::Tuner::~Tuner()
{
+ HalDeathHandler::getInstance()->unregisterAtExitHandler(this);
+}
+
+void RadioHalHidl::Tuner::setHalTuner(sp<ITuner>& halTuner) {
+ if (mHalTuner != 0) {
+ mHalTuner->unlinkToDeath(HalDeathHandler::getInstance());
+ }
+ mHalTuner = halTuner;
+ if (mHalTuner != 0) {
+ mHalTuner->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
+ }
}
void RadioHalHidl::Tuner::handleHwFailure()
@@ -361,24 +364,19 @@
if (parentModule != 0) {
parentModule->clearService();
}
+ sendHwFailureEvent();
+ mHalTuner.clear();
+}
+
+void RadioHalHidl::Tuner::sendHwFailureEvent() const
+{
radio_hal_event_t event;
memset(&event, 0, sizeof(radio_hal_event_t));
event.type = RADIO_EVENT_HW_FAILURE;
onCallback(&event);
- mHalTuner.clear();
}
-status_t RadioHalHidl::Tuner::checkHidlStatus(Status hidlStatus)
-{
- status_t status = hidlStatus.transactionError();
- if (status == DEAD_OBJECT) {
- handleHwFailure();
- }
- RadioHalHidl::crashIfHalIsDead(hidlStatus);
- return status;
-}
-
-void RadioHalHidl::Tuner::onCallback(radio_hal_event_t *halEvent)
+void RadioHalHidl::Tuner::onCallback(radio_hal_event_t *halEvent) const
{
if (mCallback != 0) {
mCallback->onEvent(halEvent);
diff --git a/services/radio/RadioHalHidl.h b/services/radio/RadioHalHidl.h
index 1657a40..38e181a 100644
--- a/services/radio/RadioHalHidl.h
+++ b/services/radio/RadioHalHidl.h
@@ -45,8 +45,6 @@
public:
RadioHalHidl(radio_class_t classId);
- static void crashIfHalIsDead(const Status& status);
-
// RadioInterface
virtual int getProperties(radio_hal_properties_t *properties);
virtual int openTuner(const radio_hal_band_config_t *config,
@@ -80,18 +78,18 @@
virtual Return<void> newMetadata(uint32_t channel, uint32_t subChannel,
const ::android::hardware::hidl_vec<MetaData>& metadata);
- void setHalTuner(sp<ITuner>& halTuner) { mHalTuner = halTuner; }
+ void setHalTuner(sp<ITuner>& halTuner);
sp<ITuner> getHalTuner() { return mHalTuner; }
private:
virtual ~Tuner();
- void onCallback(radio_hal_event_t *halEvent);
+ void onCallback(radio_hal_event_t *halEvent) const;
void handleHwFailure();
- status_t checkHidlStatus(Status hidlStatus);
+ void sendHwFailureEvent() const;
sp<ITuner> mHalTuner;
- sp<TunerCallbackInterface> mCallback;
+ const sp<TunerCallbackInterface> mCallback;
wp<RadioHalHidl> mParentModule;
};
diff --git a/services/soundtrigger/Android.mk b/services/soundtrigger/Android.mk
index e1e1fb1..3e7a7ce 100644
--- a/services/soundtrigger/Android.mk
+++ b/services/soundtrigger/Android.mk
@@ -34,23 +34,23 @@
libserviceutility
-ifeq ($(ENABLE_TREBLE),true)
+ifeq ($(USE_LEGACY_LOCAL_AUDIO_HAL),true)
+# libhardware configuration
+LOCAL_SRC_FILES += \
+ SoundTriggerHalLegacy.cpp
+else
# Treble configuration
-LOCAL_CFLAGS += -DENABLE_TREBLE
LOCAL_SRC_FILES += \
SoundTriggerHalHidl.cpp
LOCAL_SHARED_LIBRARIES += \
- libhwbinder \
- libhidlbase \
- libhidltransport \
- libbase \
- android.hardware.soundtrigger@2.0 \
- android.hardware.audio.common@2.0
-else
-# libhardware configuration
-LOCAL_SRC_FILES += \
- SoundTriggerHalLegacy.cpp
+ libhwbinder \
+ libhidlbase \
+ libhidltransport \
+ libbase \
+ libaudiohal \
+ android.hardware.soundtrigger@2.0 \
+ android.hardware.audio.common@2.0
endif
diff --git a/services/soundtrigger/SoundTriggerHalHidl.cpp b/services/soundtrigger/SoundTriggerHalHidl.cpp
index c027799..7cc8a2b 100644
--- a/services/soundtrigger/SoundTriggerHalHidl.cpp
+++ b/services/soundtrigger/SoundTriggerHalHidl.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "SoundTriggerHalHidl"
//#define LOG_NDEBUG 0
+#include <media/audiohal/hidl/HalDeathHandler.h>
#include <utils/Log.h>
#include "SoundTriggerHalHidl.h"
#include <hwbinder/IPCThreadState.h>
@@ -53,13 +54,13 @@
});
}
- if (hidlReturn.getStatus().isOk()) {
+ if (hidlReturn.isOk()) {
if (ret == 0) {
convertPropertiesFromHal(properties, &halProperties);
}
} else {
- ret = (int)hidlReturn.getStatus().transactionError();
- crashIfHalIsDead(ret);
+ ALOGE("getProperties error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
ALOGI("getProperties ret %d", ret);
return ret;
@@ -123,7 +124,7 @@
delete halSoundModel;
- if (hidlReturn.getStatus().isOk()) {
+ if (hidlReturn.isOk()) {
if (ret == 0) {
AutoMutex lock(mLock);
*handle = (sound_model_handle_t)modelId;
@@ -131,12 +132,10 @@
mSoundModels.add(*handle, model);
}
} else {
- ret = (int)hidlReturn.getStatus().transactionError();
- ALOGE("loadSoundModel error %d", ret);
- crashIfHalIsDead(ret);
+ ALOGE("loadSoundModel error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
-
return ret;
}
@@ -158,13 +157,13 @@
AutoMutex lock(mHalLock);
hidlReturn = soundtrigger->unloadSoundModel(model->mHalHandle);
}
- int ret = (int)hidlReturn.getStatus().transactionError();
- ALOGE_IF(ret != 0, "unloadSoundModel error %d", ret);
- crashIfHalIsDead(ret);
- if (ret == 0) {
- ret = hidlReturn;
+
+ if (!hidlReturn.isOk()) {
+ ALOGE("unloadSoundModel error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
- return ret;
+
+ return hidlReturn;
}
int SoundTriggerHalHidl::startRecognition(sound_model_handle_t handle,
@@ -197,13 +196,11 @@
delete halConfig;
- int ret = (int)hidlReturn.getStatus().transactionError();
- ALOGE_IF(ret != 0, "startRecognition error %d", ret);
- crashIfHalIsDead(ret);
- if (ret == 0) {
- ret = hidlReturn;
+ if (!hidlReturn.isOk()) {
+ ALOGE("startRecognition error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
- return ret;
+ return hidlReturn;
}
int SoundTriggerHalHidl::stopRecognition(sound_model_handle_t handle)
@@ -225,13 +222,11 @@
hidlReturn = soundtrigger->stopRecognition(model->mHalHandle);
}
- int ret = (int)hidlReturn.getStatus().transactionError();
- ALOGE_IF(ret != 0, "stopRecognition error %d", ret);
- crashIfHalIsDead(ret);
- if (ret == 0) {
- ret = hidlReturn;
+ if (!hidlReturn.isOk()) {
+ ALOGE("stopRecognition error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
- return ret;
+ return hidlReturn;
}
int SoundTriggerHalHidl::stopAllRecognitions()
@@ -247,13 +242,11 @@
hidlReturn = soundtrigger->stopAllRecognitions();
}
- int ret = (int)hidlReturn.getStatus().transactionError();
- ALOGE_IF(ret != 0, "stopAllRecognitions error %d", ret);
- crashIfHalIsDead(ret);
- if (ret == 0) {
- ret = hidlReturn;
+ if (!hidlReturn.isOk()) {
+ ALOGE("stopAllRecognitions error %s", hidlReturn.description().c_str());
+ return FAILED_TRANSACTION;
}
- return ret;
+ return hidlReturn;
}
SoundTriggerHalHidl::SoundTriggerHalHidl(const char *moduleName)
@@ -275,15 +268,13 @@
std::string serviceName = "sound_trigger.";
serviceName.append(mModuleName);
mISoundTrigger = ISoundTriggerHw::getService(serviceName);
+ if (mISoundTrigger != 0) {
+ mISoundTrigger->linkToDeath(HalDeathHandler::getInstance(), 0 /*cookie*/);
+ }
}
return mISoundTrigger;
}
-void SoundTriggerHalHidl::crashIfHalIsDead(int ret)
-{
- LOG_ALWAYS_FATAL_IF(ret == -EPIPE, "HAL server crashed, need to restart");
-}
-
sp<SoundTriggerHalHidl::SoundModel> SoundTriggerHalHidl::getModel(sound_model_handle_t handle)
{
AutoMutex lock(mLock);
diff --git a/services/soundtrigger/SoundTriggerHalHidl.h b/services/soundtrigger/SoundTriggerHalHidl.h
index b235e1c..916fcc4 100644
--- a/services/soundtrigger/SoundTriggerHalHidl.h
+++ b/services/soundtrigger/SoundTriggerHalHidl.h
@@ -25,7 +25,6 @@
#include <android/hardware/soundtrigger/2.0/types.h>
#include <android/hardware/soundtrigger/2.0/ISoundTriggerHw.h>
#include <android/hardware/soundtrigger/2.0/ISoundTriggerHwCallback.h>
-#include <android/hardware/soundtrigger/2.0/BnSoundTriggerHwCallback.h>
namespace android {
@@ -143,7 +142,6 @@
uint32_t nextUniqueId();
sp<ISoundTriggerHw> getService();
- void crashIfHalIsDead(int ret);
sp<SoundModel> getModel(sound_model_handle_t handle);
sp<SoundModel> removeModel(sound_model_handle_t handle);