Merge "Camera: Add automotive metadata section"
diff --git a/camera/CameraSessionStats.cpp b/camera/CameraSessionStats.cpp
index bc83ec1..2a07ffc 100644
--- a/camera/CameraSessionStats.cpp
+++ b/camera/CameraSessionStats.cpp
@@ -118,6 +118,12 @@
return err;
}
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+ ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+ return err;
+ }
+
mWidth = width;
mHeight = height;
mFormat = format;
@@ -132,6 +138,7 @@
mHistogramBins = std::move(histogramBins);
mHistogramCounts = std::move(histogramCounts);
mDynamicRangeProfile = dynamicRangeProfile;
+ mStreamUseCase = streamUseCase;
return OK;
}
@@ -214,6 +221,11 @@
return err;
}
+ if ((err = parcel->writeInt32(mStreamUseCase)) != OK) {
+ ALOGE("%s: Failed to write stream use case!", __FUNCTION__);
+ return err;
+ }
+
return OK;
}
diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp
index 15c9dc9..0982bba 100644
--- a/camera/camera2/OutputConfiguration.cpp
+++ b/camera/camera2/OutputConfiguration.cpp
@@ -81,6 +81,10 @@
return mDynamicRangeProfile;
}
+int OutputConfiguration::getStreamUseCase() const {
+ return mStreamUseCase;
+}
+
OutputConfiguration::OutputConfiguration() :
mRotation(INVALID_ROTATION),
mSurfaceSetID(INVALID_SET_ID),
@@ -90,7 +94,8 @@
mIsDeferred(false),
mIsShared(false),
mIsMultiResolution(false),
- mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
}
OutputConfiguration::OutputConfiguration(const android::Parcel& parcel) :
@@ -177,6 +182,12 @@
return err;
}
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
+ if ((err = parcel->readInt32(&streamUseCase)) != OK) {
+ ALOGE("%s: Failed to read stream use case from parcel", __FUNCTION__);
+ return err;
+ }
+
mRotation = rotation;
mSurfaceSetID = setID;
mSurfaceType = surfaceType;
@@ -185,6 +196,7 @@
mIsDeferred = isDeferred != 0;
mIsShared = isShared != 0;
mIsMultiResolution = isMultiResolution != 0;
+ mStreamUseCase = streamUseCase;
for (auto& surface : surfaceShims) {
ALOGV("%s: OutputConfiguration: %p, name %s", __FUNCTION__,
surface.graphicBufferProducer.get(),
@@ -196,8 +208,9 @@
mDynamicRangeProfile = dynamicProfile;
ALOGV("%s: OutputConfiguration: rotation = %d, setId = %d, surfaceType = %d,"
- " physicalCameraId = %s, isMultiResolution = %d", __FUNCTION__, mRotation,
- mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(), mIsMultiResolution);
+ " physicalCameraId = %s, isMultiResolution = %d, streamUseCase = %d", __FUNCTION__,
+ mRotation, mSurfaceSetID, mSurfaceType, String8(mPhysicalCameraId).string(),
+ mIsMultiResolution, mStreamUseCase);
return err;
}
@@ -213,6 +226,7 @@
mPhysicalCameraId = physicalId;
mIsMultiResolution = false;
mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+ mStreamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
}
OutputConfiguration::OutputConfiguration(
@@ -222,7 +236,8 @@
: mGbps(gbps), mRotation(rotation), mSurfaceSetID(surfaceSetID), mSurfaceType(surfaceType),
mWidth(width), mHeight(height), mIsDeferred(false), mIsShared(isShared),
mPhysicalCameraId(physicalCameraId), mIsMultiResolution(false),
- mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) { }
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) { }
status_t OutputConfiguration::writeToParcel(android::Parcel* parcel) const {
@@ -272,6 +287,9 @@
err = parcel->writeInt32(mDynamicRangeProfile ? 1 : 0);
if (err != OK) return err;
+ err = parcel->writeInt32(mStreamUseCase);
+ if (err != OK) return err;
+
return OK;
}
diff --git a/camera/include/camera/CameraSessionStats.h b/camera/include/camera/CameraSessionStats.h
index 1209a20..ab2d903 100644
--- a/camera/include/camera/CameraSessionStats.h
+++ b/camera/include/camera/CameraSessionStats.h
@@ -64,19 +64,24 @@
// Dynamic range profile
int mDynamicRangeProfile;
+ // Stream use case
+ int mStreamUseCase;
CameraStreamStats() :
mWidth(0), mHeight(0), mFormat(0), mDataSpace(0), mUsage(0),
mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
mMaxHalBuffers(0), mMaxAppBuffers(0), mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
- mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {}
+ mDynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ mStreamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
CameraStreamStats(int width, int height, int format, int dataSpace, int64_t usage,
- int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile)
+ int maxHalBuffers, int maxAppBuffers, int dynamicRangeProfile,
+ int streamUseCase)
: mWidth(width), mHeight(height), mFormat(format), mDataSpace(dataSpace),
mUsage(usage), mRequestCount(0), mErrorCount(0), mStartLatencyMs(0),
mMaxHalBuffers(maxHalBuffers), mMaxAppBuffers(maxAppBuffers),
mHistogramType(HISTOGRAM_TYPE_UNKNOWN),
- mDynamicRangeProfile(dynamicRangeProfile) {}
+ mDynamicRangeProfile(dynamicRangeProfile),
+ mStreamUseCase(streamUseCase) {}
virtual status_t readFromParcel(const android::Parcel* parcel) override;
virtual status_t writeToParcel(android::Parcel* parcel) const override;
diff --git a/camera/include/camera/camera2/OutputConfiguration.h b/camera/include/camera/camera2/OutputConfiguration.h
index 1631903..f1cb6bd 100644
--- a/camera/include/camera/camera2/OutputConfiguration.h
+++ b/camera/include/camera/camera2/OutputConfiguration.h
@@ -49,6 +49,7 @@
bool isShared() const;
String16 getPhysicalCameraId() const;
bool isMultiResolution() const;
+ int getStreamUseCase() const;
// set of sensor pixel mode resolutions allowed {MAX_RESOLUTION, DEFAULT_MODE};
const std::vector<int32_t>& getSensorPixelModesUsed() const;
@@ -91,7 +92,8 @@
mPhysicalCameraId == other.mPhysicalCameraId &&
mIsMultiResolution == other.mIsMultiResolution &&
sensorPixelModesUsedEqual(other) &&
- mDynamicRangeProfile == other.mDynamicRangeProfile);
+ mDynamicRangeProfile == other.mDynamicRangeProfile &&
+ mStreamUseCase == other.mStreamUseCase );
}
bool operator != (const OutputConfiguration& other) const {
return !(*this == other);
@@ -131,6 +133,9 @@
if (mDynamicRangeProfile != other.mDynamicRangeProfile) {
return mDynamicRangeProfile < other.mDynamicRangeProfile;
}
+ if (mStreamUseCase != other.mStreamUseCase) {
+ return mStreamUseCase < other.mStreamUseCase;
+ }
return gbpsLessThan(other);
}
@@ -156,6 +161,7 @@
bool mIsMultiResolution;
std::vector<int32_t> mSensorPixelModesUsed;
int mDynamicRangeProfile;
+ int mStreamUseCase;
};
} // namespace params
} // namespace camera2
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 75e2ad8..4911429 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -4213,6 +4213,55 @@
*/
ACAMERA_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED = // byte (acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t)
ACAMERA_SCALER_START + 24,
+ /**
+ * <p>The stream use cases supported by this camera device.</p>
+ *
+ * <p>Type: int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>The stream use case indicates the purpose of a particular camera stream from
+ * the end-user perspective. Some examples of camera use cases are: preview stream for
+ * live viewfinder shown to the user, still capture for generating high quality photo
+ * capture, video record for encoding the camera output for the purpose of future playback,
+ * and video call for live realtime video conferencing.</p>
+ * <p>With this flag, the camera device can optimize the image processing pipeline
+ * parameters, such as tuning, sensor mode, and ISP settings, indepedent of
+ * the properties of the immediate camera output surface. For example, if the output
+ * surface is a SurfaceTexture, the stream use case flag can be used to indicate whether
+ * the camera frames eventually go to display, video encoder,
+ * still image capture, or all of them combined.</p>
+ * <p>The application sets the use case of a camera stream by calling
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>.</p>
+ * <p>A camera device with
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+ * capability must support the following stream use cases:</p>
+ * <ul>
+ * <li>DEFAULT</li>
+ * <li>PREVIEW</li>
+ * <li>STILL_CAPTURE</li>
+ * <li>VIDEO_RECORD</li>
+ * <li>PREVIEW_VIDEO_STILL</li>
+ * <li>VIDEO_CALL</li>
+ * </ul>
+ * <p>The guaranteed stream combinations related to stream use case for a camera device with
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE">CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE</a>
+ * capability is documented in the camera device
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">guideline</a>. The
+ * application is strongly recommended to use one of the guaranteed stream combintations.
+ * If the application creates a session with a stream combination not in the guaranteed
+ * list, or with mixed DEFAULT and non-DEFAULT use cases within the same session,
+ * the camera device may ignore some stream use cases due to hardware constraints
+ * and implementation details.</p>
+ * <p>For stream combinations not covered by the stream use case mandatory lists, such as
+ * reprocessable session, constrained high speed session, or RAW stream combinations, the
+ * application should leave stream use cases within the session as DEFAULT.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES = // int32[n] (acamera_metadata_enum_android_scaler_available_stream_use_cases_t)
+ ACAMERA_SCALER_START + 25,
ACAMERA_SCALER_END,
/**
@@ -9238,6 +9287,35 @@
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR
= 16,
+ /**
+ * <p>The camera device supports selecting a per-stream use case via
+ * <a href="https://developer.android.com/reference/android/hardware/camera2/params/OutputConfiguration.html#setStreamUseCase">OutputConfiguration#setStreamUseCase</a>
+ * so that the device can optimize camera pipeline parameters such as tuning, sensor
+ * mode, or ISP settings for a specific user scenario.
+ * Some sample usages of this capability are:
+ * * Distinguish high quality YUV captures from a regular YUV stream where
+ * the image quality may not be as good as the JPEG stream, or
+ * * Use one stream to serve multiple purposes: viewfinder, video recording and
+ * still capture. This is common with applications that wish to apply edits equally
+ * to preview, saved images, and saved videos.</p>
+ * <p>This capability requires the camera device to support the following
+ * stream use cases:
+ * * DEFAULT for backward compatibility where the application doesn't set
+ * a stream use case
+ * * PREVIEW for live viewfinder and in-app image analysis
+ * * STILL_CAPTURE for still photo capture
+ * * VIDEO_RECORD for recording video clips
+ * * PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video
+ * recording, and still capture.
+ * * VIDEO_CALL for long running video calls</p>
+ * <p><a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#SCALER_AVAILABLE_STREAM_USE_CASES">CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES</a>
+ * lists all of the supported stream use cases.</p>
+ * <p>Refer to <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#createCaptureSession">CameraDevice#createCaptureSession</a> for the
+ * mandatory stream combinations involving stream use cases, which can also be queried
+ * via <a href="https://developer.android.com/reference/android/hardware/camera2/params/MandatoryStreamCombination.html">MandatoryStreamCombination</a>.</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE = 19,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
// ACAMERA_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP
@@ -9516,6 +9594,76 @@
} acamera_metadata_enum_android_scaler_multi_resolution_stream_supported_t;
+// ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES
+typedef enum acamera_metadata_enum_acamera_scaler_available_stream_use_cases {
+ /**
+ * <p>Default stream use case.</p>
+ * <p>This use case is the same as when the application doesn't set any use case for
+ * the stream. The camera device uses the properties of the output target, such as
+ * format, dataSpace, or surface class type, to optimize the image processing pipeline.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT = 0x0,
+
+ /**
+ * <p>Live stream shown to the user.</p>
+ * <p>Optimized for performance and usability as a viewfinder, but not necessarily for
+ * image quality. The output is not meant to be persisted as saved images or video.</p>
+ * <p>No stall if android.control.<em> are set to FAST; may have stall if android.control.</em>
+ * are set to HIGH_QUALITY. This use case has the same behavior as the default
+ * SurfaceView and SurfaceTexture targets. Additionally, this use case can be used for
+ * in-app image analysis.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW = 0x1,
+
+ /**
+ * <p>Still photo capture.</p>
+ * <p>Optimized for high-quality high-resolution capture, and not expected to maintain
+ * preview-like frame rates.</p>
+ * <p>The stream may have stalls regardless of whether ACAMERA_CONTROL_* is HIGH_QUALITY.
+ * This use case has the same behavior as the default JPEG and RAW related formats.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE = 0x2,
+
+ /**
+ * <p>Recording video clips.</p>
+ * <p>Optimized for high-quality video capture, including high-quality image stabilization
+ * if supported by the device and enabled by the application. As a result, may produce
+ * output frames with a substantial lag from real time, to allow for highest-quality
+ * stabilization or other processing. As such, such an output is not suitable for drawing
+ * to screen directly, and is expected to be persisted to disk or similar for later
+ * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed
+ * to have video stabilization applied when the video stabilization control is set
+ * to ON, as opposed to PREVIEW_STABILIZATION.</p>
+ * <p>This use case has the same behavior as the default MediaRecorder and MediaCodec
+ * targets.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD = 0x3,
+
+ /**
+ * <p>One single stream used for combined purposes of preview, video, and still capture.</p>
+ * <p>For such multi-purpose streams, the camera device aims to make the best tradeoff
+ * between the individual use cases. For example, the STILL_CAPTURE use case by itself
+ * may have stalls for achieving best image quality. But if combined with PREVIEW and
+ * VIDEO_RECORD, the camera device needs to trade off the additional image processing
+ * for speed so that preview and video recording aren't slowed down.</p>
+ * <p>Similarly, VIDEO_RECORD may produce frames with a substantial lag, but
+ * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video
+ * stabilization with this use case, the device must support and the app must select the
+ * PREVIEW_STABILIZATION mode for video stabilization.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL = 0x4,
+
+ /**
+ * <p>Long-running video call optimized for both power efficienty and video quality.</p>
+ * <p>The camera sensor may run in a lower-resolution mode to reduce power consumption
+ * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL
+ * outputs are expected to work in dark conditions, so are usually accompanied with
+ * variable frame rate settings to allow sufficient exposure time in low light.</p>
+ */
+ ACAMERA_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL = 0x5,
+
+} acamera_metadata_enum_android_scaler_available_stream_use_cases_t;
+
// ACAMERA_SENSOR_REFERENCE_ILLUMINANT1
typedef enum acamera_metadata_enum_acamera_sensor_reference_illuminant1 {
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ec16bc2..9783855 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -70,6 +70,10 @@
using namespace android;
+namespace {
+ constexpr static int PIXEL_FORMAT_RGBA_1010102_AS_8888 = -HAL_PIXEL_FORMAT_RGBA_1010102;
+}
+
static long gNumRepetitions;
static long gMaxNumFrames; // 0 means decode all available.
static long gReproduceBug; // if not -1.
@@ -626,7 +630,14 @@
fprintf(stderr, " -m max-number-of-frames-to-decode in each pass\n");
fprintf(stderr, " -b bug to reproduce\n");
fprintf(stderr, " -i(nfo) dump codec info (profiles and color formats supported, details)\n");
- fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
+ fprintf(stderr, " -t(humbnail) extract video thumbnail or album art (/sdcard/out.jpg)\n");
+ fprintf(stderr, " -P(ixelFormat) pixel format to use for raw thumbnail "
+ "(/sdcard/out.raw)\n");
+ fprintf(stderr, " %d: RGBA_565\n", HAL_PIXEL_FORMAT_RGB_565);
+ fprintf(stderr, " %d: RGBA_8888\n", HAL_PIXEL_FORMAT_RGBA_8888);
+ fprintf(stderr, " %d: BGRA_8888\n", HAL_PIXEL_FORMAT_BGRA_8888);
+ fprintf(stderr, " %d: RGBA_1010102\n", HAL_PIXEL_FORMAT_RGBA_1010102);
+ fprintf(stderr, " %d: RGBA_1010102 as RGBA_8888\n", PIXEL_FORMAT_RGBA_1010102_AS_8888);
fprintf(stderr, " -s(oftware) prefer software codec\n");
fprintf(stderr, " -r(hardware) force to use hardware codec\n");
fprintf(stderr, " -o playback audio\n");
@@ -784,6 +795,7 @@
bool useSurfaceTexAlloc = false;
bool dumpStream = false;
bool dumpPCMStream = false;
+ int32_t pixelFormat = 0; // thumbnail pixel format
String8 dumpStreamFilename;
gNumRepetitions = 1;
gMaxNumFrames = 0;
@@ -797,7 +809,7 @@
sp<android::ALooper> looper;
int res;
- while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:")) >= 0) {
+ while ((res = getopt(argc, argv, "vhaqn:lm:b:itsrow:kN:xSTd:D:P:")) >= 0) {
switch (res) {
case 'a':
{
@@ -838,6 +850,7 @@
break;
}
+ case 'P':
case 'm':
case 'n':
case 'b':
@@ -853,6 +866,8 @@
gNumRepetitions = x;
} else if (res == 'm') {
gMaxNumFrames = x;
+ } else if (res == 'P') {
+ pixelFormat = x;
} else {
CHECK_EQ(res, 'b');
gReproduceBug = x;
@@ -975,24 +990,71 @@
close(fd);
fd = -1;
+ uint32_t retrieverPixelFormat = HAL_PIXEL_FORMAT_RGB_565;
+ if (pixelFormat == PIXEL_FORMAT_RGBA_1010102_AS_8888) {
+ retrieverPixelFormat = HAL_PIXEL_FORMAT_RGBA_1010102;
+ } else if (pixelFormat) {
+ retrieverPixelFormat = pixelFormat;
+ }
sp<IMemory> mem =
retriever->getFrameAtTime(-1,
MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC,
- HAL_PIXEL_FORMAT_RGB_565,
- false /*metaOnly*/);
+ retrieverPixelFormat, false /*metaOnly*/);
if (mem != NULL) {
failed = false;
- printf("getFrameAtTime(%s) => OK\n", filename);
+ printf("getFrameAtTime(%s) format=%d => OK\n", filename, retrieverPixelFormat);
VideoFrame *frame = (VideoFrame *)mem->unsecurePointer();
- CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
- frame->getFlattenedData(),
- frame->mWidth, frame->mHeight), 0);
+ if (pixelFormat) {
+ int bpp = 0;
+ switch (pixelFormat) {
+ case HAL_PIXEL_FORMAT_RGB_565:
+ bpp = 2;
+ break;
+ case PIXEL_FORMAT_RGBA_1010102_AS_8888:
+ // convert RGBA_1010102 to RGBA_8888
+ {
+ uint32_t *data = (uint32_t *)frame->getFlattenedData();
+ uint32_t *end = data + frame->mWidth * frame->mHeight;
+ for (; data < end; ++data) {
+ *data =
+ // pick out 8-bit R, G, B values and move them to the
+ // correct position
+ ( (*data & 0x3fc) >> 2) | // R
+ ( (*data & 0xff000) >> 4) | // G
+ ( (*data & 0x3fc00000) >> 6) | // B
+ // pick out 2-bit A and expand to 8-bits
+ (((*data & 0xc0000000) >> 6) * 0x55);
+ }
+ }
+
+ FALLTHROUGH_INTENDED;
+
+ case HAL_PIXEL_FORMAT_RGBA_1010102:
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ bpp = 4;
+ break;
+ }
+ if (bpp) {
+ FILE *out = fopen("/sdcard/out.raw", "wb");
+ fwrite(frame->getFlattenedData(), bpp * frame->mWidth, frame->mHeight, out);
+ fclose(out);
+
+ printf("write out %d x %d x %dbpp\n", frame->mWidth, frame->mHeight, bpp);
+ } else {
+ printf("unknown pixel format.\n");
+ }
+ } else {
+ CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+ frame->getFlattenedData(),
+ frame->mWidth, frame->mHeight), 0);
+ }
}
- {
+ if (!pixelFormat) {
mem = retriever->extractAlbumArt();
if (mem != NULL) {
diff --git a/drm/libmediadrm/Android.bp b/drm/libmediadrm/Android.bp
index bf0cdd5..408d216 100644
--- a/drm/libmediadrm/Android.bp
+++ b/drm/libmediadrm/Android.bp
@@ -34,6 +34,7 @@
"CryptoHalHidl.cpp",
"CryptoHalAidl.cpp",
"DrmUtils.cpp",
+ "DrmHalListener.cpp",
],
local_include_dirs: [
diff --git a/drm/libmediadrm/CryptoHalAidl.cpp b/drm/libmediadrm/CryptoHalAidl.cpp
index a688728..03782ef 100644
--- a/drm/libmediadrm/CryptoHalAidl.cpp
+++ b/drm/libmediadrm/CryptoHalAidl.cpp
@@ -109,7 +109,12 @@
// skip negative convert check as count of enum elements are 2
aidldb.type = static_cast<BufferType>((int32_t)buffer.type);
aidldb.nonsecureMemory = hidlSharedBufferToAidlSharedBuffer(buffer.nonsecureMemory);
- aidldb.secureMemory = ::android::makeToAidl(buffer.secureMemory.getNativeHandle());
+ auto handle = buffer.secureMemory.getNativeHandle();
+ if (handle) {
+ aidldb.secureMemory = ::android::makeToAidl(handle);
+ } else {
+ aidldb.secureMemory = {.fds = {}, .ints = {}};
+ }
return aidldb;
}
@@ -130,6 +135,13 @@
return String8(string.c_str());
}
+static std::vector<uint8_t> toStdVec(const uint8_t* ptr, size_t n) {
+ if (!ptr) {
+ return std::vector<uint8_t>();
+ }
+ return std::vector<uint8_t>(ptr, ptr + n);
+}
+
// -------Hidl interface related end--------------
CryptoHalAidl::CryptoHalAidl()
@@ -335,8 +347,8 @@
status_t err = UNKNOWN_ERROR;
mLock.unlock();
- std::vector<uint8_t> keyIdAidl = std::vector<uint8_t>(keyId, keyId + 16);
- std::vector<uint8_t> ivAidl = std::vector<uint8_t>(iv, iv + 16);
+ std::vector<uint8_t> keyIdAidl(toStdVec(keyId, 16));
+ std::vector<uint8_t> ivAidl(toStdVec(iv, 16));
DecryptResult result;
err = mPlugin->decrypt(secure, keyIdAidl, ivAidl, aMode, aPattern, stdSubSamples,
hidlSharedBufferToAidlSharedBuffer(hSource), offset,
diff --git a/drm/libmediadrm/DrmHal.cpp b/drm/libmediadrm/DrmHal.cpp
index fe8b9f6..aa40793 100644
--- a/drm/libmediadrm/DrmHal.cpp
+++ b/drm/libmediadrm/DrmHal.cpp
@@ -26,7 +26,7 @@
DrmHal::DrmHal() {
mDrmHalHidl = sp<DrmHalHidl>::make();
- mDrmHalAidl = ndk::SharedRefBase::make<DrmHalAidl>();
+ mDrmHalAidl = sp<DrmHalAidl>::make();
}
DrmHal::~DrmHal() {}
diff --git a/drm/libmediadrm/DrmHalAidl.cpp b/drm/libmediadrm/DrmHalAidl.cpp
index 7df57a3..9f640e0 100644
--- a/drm/libmediadrm/DrmHalAidl.cpp
+++ b/drm/libmediadrm/DrmHalAidl.cpp
@@ -59,7 +59,6 @@
using AttributeHidl = ::android::hardware::drm::V1_1::DrmMetricGroup::Attribute;
using IDrmPluginAidl = ::aidl::android::hardware::drm::IDrmPlugin;
using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
-using KeyStatusAidl = ::aidl::android::hardware::drm::KeyStatus;
using ::android::hardware::hidl_vec;
namespace {
@@ -247,6 +246,12 @@
return keySetIds;
}
+static hidl_vec<uint8_t> toHidlVec(const Vector<uint8_t>& vector) {
+ hidl_vec<uint8_t> vec;
+ vec.setToExternal(const_cast<uint8_t*>(vector.array()), vector.size());
+ return vec;
+}
+
static DrmPlugin::OfflineLicenseState toOfflineLicenseState(OfflineLicenseState licenseState) {
switch (licenseState) {
case OfflineLicenseState::USABLE:
@@ -258,24 +263,17 @@
}
}
-template <typename T = uint8_t>
-static hidl_vec<T> toHidlVec(const Vector<T>& vector) {
- hidl_vec<T> vec;
- vec.setToExternal(const_cast<T*>(vector.array()), vector.size());
- return vec;
-}
-
Mutex DrmHalAidl::mLock;
static hidl_vec<DrmMetricGroupHidl> toDrmMetricGroupHidl(std::vector<DrmMetricGroupAidl> result) {
- Vector<DrmMetricGroupHidl> resultHidl;
+ std::vector<DrmMetricGroupHidl> resultHidl;
for (auto r : result) {
DrmMetricGroupHidl re;
- Vector<DrmMetricHidl> tmpMetric;
+ std::vector<DrmMetricHidl> tmpMetric;
for (auto m : r.metrics) {
DrmMetricHidl me;
me.name = m.name;
- Vector<AttributeHidl> aTmp;
+ std::vector<AttributeHidl> aTmp;
for (auto attr : m.attributes) {
AttributeHidl attrHidl;
attrHidl.name = attr.name;
@@ -300,9 +298,9 @@
aTmp.push_back(attrHidl);
}
- me.attributes = toHidlVec<AttributeHidl>(aTmp);
+ me.attributes = aTmp;
- Vector<ValueHidl> vTmp;
+ std::vector<ValueHidl> vTmp;
for (auto value : m.values) {
ValueHidl valueHidl;
valueHidl.componentName = value.name;
@@ -326,15 +324,15 @@
vTmp.push_back(valueHidl);
}
- me.values = toHidlVec<ValueHidl>(vTmp);
+ me.values = vTmp;
tmpMetric.push_back(me);
}
- re.metrics = toHidlVec<DrmMetricHidl>(tmpMetric);
+ re.metrics = tmpMetric;
resultHidl.push_back(re);
}
- return toHidlVec<DrmMetricGroupHidl>(resultHidl);
+ return resultHidl;
}
// DrmSessionClient Definition
@@ -396,7 +394,8 @@
// DrmHalAidl methods
DrmHalAidl::DrmHalAidl()
- : mFactories(makeDrmFactories()),
+ : mListener(::ndk::SharedRefBase::make<DrmHalListener>(&mMetrics)),
+ mFactories(makeDrmFactories()),
mInitCheck((mFactories.size() == 0) ? ERROR_UNSUPPORTED : NO_INIT) {}
status_t DrmHalAidl::initCheck() const {
@@ -427,8 +426,7 @@
}
status_t DrmHalAidl::setListener(const sp<IDrmClient>& listener) {
- Mutex::Autolock lock(mEventLock);
- mListener = listener;
+ mListener->setListener(listener);
return NO_ERROR;
}
@@ -437,8 +435,9 @@
Mutex::Autolock autoLock(mLock);
*isSupported = false;
Uuid uuidAidl = toAidlUuid(uuid);
- SecurityLevel levelAidl = static_cast<SecurityLevel>((int32_t)level);
+ SecurityLevel levelAidl = toAidlSecurityLevel(level);
std::string mimeTypeStr = mimeType.string();
+
for (ssize_t i = mFactories.size() - 1; i >= 0; i--) {
if (mFactories[i]
->isCryptoSchemeSupported(uuidAidl, mimeTypeStr, levelAidl, isSupported)
@@ -477,9 +476,14 @@
mInitCheck = ERROR_UNSUPPORTED;
} else {
mInitCheck = OK;
-
- if (!mPlugin->setListener(shared_from_this()).isOk()) {
+ // Stored pointer mListener upcast to base BnDrmPluginListener
+ ::ndk::ScopedAStatus status = mPlugin
+ ->setListener(std::static_pointer_cast<BnDrmPluginListener>(mListener));
+ if (!status.isOk()) {
mInitCheck = DEAD_OBJECT;
+ ALOGE("setListener failed: ex %d svc err %d",
+ status.getExceptionCode(),
+ status.getServiceSpecificError());
}
if (mInitCheck != OK) {
@@ -555,6 +559,7 @@
mMetrics.mCloseSessionCounter.Increment(response);
return response;
}
+
mMetrics.mCloseSessionCounter.Increment(DEAD_OBJECT);
return DEAD_OBJECT;
}
@@ -1203,124 +1208,22 @@
::ndk::ScopedAStatus DrmHalAidl::onEvent(EventTypeAidl eventTypeAidl,
const std::vector<uint8_t>& sessionId,
const std::vector<uint8_t>& data) {
- ::ndk::ScopedAStatus _aidl_status;
- mMetrics.mEventCounter.Increment((uint32_t)eventTypeAidl);
-
- mEventLock.lock();
- sp<IDrmClient> listener = mListener;
- mEventLock.unlock();
-
- if (listener != NULL) {
- Mutex::Autolock lock(mNotifyLock);
- DrmPlugin::EventType eventType;
- switch (eventTypeAidl) {
- case EventTypeAidl::PROVISION_REQUIRED:
- eventType = DrmPlugin::kDrmPluginEventProvisionRequired;
- break;
- case EventTypeAidl::KEY_NEEDED:
- eventType = DrmPlugin::kDrmPluginEventKeyNeeded;
- break;
- case EventTypeAidl::KEY_EXPIRED:
- eventType = DrmPlugin::kDrmPluginEventKeyExpired;
- break;
- case EventTypeAidl::VENDOR_DEFINED:
- eventType = DrmPlugin::kDrmPluginEventVendorDefined;
- break;
- case EventTypeAidl::SESSION_RECLAIMED:
- eventType = DrmPlugin::kDrmPluginEventSessionReclaimed;
- break;
- default:
- return _aidl_status;
- }
-
- listener->sendEvent(eventType, toHidlVec(toVector(sessionId)), toHidlVec(toVector(data)));
- }
-
- return _aidl_status;
+ return mListener->onEvent(eventTypeAidl, sessionId, data);
}
::ndk::ScopedAStatus DrmHalAidl::onExpirationUpdate(const std::vector<uint8_t>& sessionId,
int64_t expiryTimeInMS) {
- ::ndk::ScopedAStatus _aidl_status;
- mEventLock.lock();
- sp<IDrmClient> listener = mListener;
- mEventLock.unlock();
-
- if (listener != NULL) {
- Mutex::Autolock lock(mNotifyLock);
- listener->sendExpirationUpdate(toHidlVec(toVector(sessionId)), expiryTimeInMS);
- }
-
- return _aidl_status;
+ return mListener->onExpirationUpdate(sessionId, expiryTimeInMS);
}
::ndk::ScopedAStatus DrmHalAidl::onKeysChange(const std::vector<uint8_t>& sessionId,
const std::vector<KeyStatus>& keyStatusListAidl,
bool hasNewUsableKey) {
- ::ndk::ScopedAStatus _aidl_status;
- mEventLock.lock();
- sp<IDrmClient> listener = mListener;
- mEventLock.unlock();
-
- if (listener != NULL) {
- std::vector<DrmKeyStatus> keyStatusList;
- size_t nKeys = keyStatusListAidl.size();
- for (size_t i = 0; i < nKeys; ++i) {
- const KeyStatus& keyStatus = keyStatusListAidl[i];
- uint32_t type;
- switch (keyStatus.type) {
- case KeyStatusType::USABLE:
- type = DrmPlugin::kKeyStatusType_Usable;
- break;
- case KeyStatusType::EXPIRED:
- type = DrmPlugin::kKeyStatusType_Expired;
- break;
- case KeyStatusType::OUTPUTNOTALLOWED:
- type = DrmPlugin::kKeyStatusType_OutputNotAllowed;
- break;
- case KeyStatusType::STATUSPENDING:
- type = DrmPlugin::kKeyStatusType_StatusPending;
- break;
- case KeyStatusType::USABLEINFUTURE:
- type = DrmPlugin::kKeyStatusType_UsableInFuture;
- break;
- case KeyStatusType::INTERNALERROR:
- default:
- type = DrmPlugin::kKeyStatusType_InternalError;
- break;
- }
- keyStatusList.push_back({type, toHidlVec(toVector(keyStatus.keyId))});
- mMetrics.mKeyStatusChangeCounter.Increment((uint32_t)keyStatus.type);
- }
-
- Mutex::Autolock lock(mNotifyLock);
- listener->sendKeysChange(toHidlVec(toVector(sessionId)), keyStatusList, hasNewUsableKey);
- }
- else {
- // There's no listener. But we still want to count the key change
- // events.
- size_t nKeys = keyStatusListAidl.size();
-
- for (size_t i = 0; i < nKeys; i++) {
- mMetrics.mKeyStatusChangeCounter.Increment((uint32_t)keyStatusListAidl[i].type);
- }
- }
-
- return _aidl_status;
+ return mListener->onKeysChange(sessionId, keyStatusListAidl, hasNewUsableKey);
}
::ndk::ScopedAStatus DrmHalAidl::onSessionLostState(const std::vector<uint8_t>& sessionId) {
- ::ndk::ScopedAStatus _aidl_status;
- mEventLock.lock();
- sp<IDrmClient> listener = mListener;
- mEventLock.unlock();
-
- if (listener != NULL) {
- Mutex::Autolock lock(mNotifyLock);
- listener->sendSessionLostState(toHidlVec(toVector(sessionId)));
- }
-
- return _aidl_status;
+ return mListener->onSessionLostState(sessionId);
}
} // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/DrmHalListener.cpp b/drm/libmediadrm/DrmHalListener.cpp
new file mode 100644
index 0000000..e39264f
--- /dev/null
+++ b/drm/libmediadrm/DrmHalListener.cpp
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "DrmHalListener"
+
+#include <mediadrm/DrmHalListener.h>
+
+using ::aidl::android::hardware::drm::KeyStatusType;
+using ::android::hardware::hidl_vec;
+
+namespace android {
+
+static const Vector<uint8_t> toVector(const std::vector<uint8_t>& vec) {
+ Vector<uint8_t> vector;
+ vector.appendArray(vec.data(), vec.size());
+ return *const_cast<const Vector<uint8_t>*>(&vector);
+}
+
+template <typename T = uint8_t>
+static hidl_vec<T> toHidlVec(const Vector<T>& vector) {
+ hidl_vec<T> vec;
+ vec.setToExternal(const_cast<T*>(vector.array()), vector.size());
+ return vec;
+}
+
+DrmHalListener::DrmHalListener(MediaDrmMetrics* metrics)
+ : mMetrics(metrics) {}
+
+DrmHalListener::~DrmHalListener() {}
+
+void DrmHalListener::setListener(sp<IDrmClient> listener) {
+ Mutex::Autolock lock(mEventLock);
+ mListener = listener;
+}
+
+::ndk::ScopedAStatus DrmHalListener::onEvent(EventTypeAidl eventTypeAidl,
+ const std::vector<uint8_t>& sessionId,
+ const std::vector<uint8_t>& data) {
+ mMetrics->mEventCounter.Increment((uint32_t)eventTypeAidl);
+
+ mEventLock.lock();
+ sp<IDrmClient> listener = mListener;
+ mEventLock.unlock();
+
+ if (listener != NULL) {
+ Mutex::Autolock lock(mNotifyLock);
+ DrmPlugin::EventType eventType;
+ switch (eventTypeAidl) {
+ case EventTypeAidl::PROVISION_REQUIRED:
+ eventType = DrmPlugin::kDrmPluginEventProvisionRequired;
+ break;
+ case EventTypeAidl::KEY_NEEDED:
+ eventType = DrmPlugin::kDrmPluginEventKeyNeeded;
+ break;
+ case EventTypeAidl::KEY_EXPIRED:
+ eventType = DrmPlugin::kDrmPluginEventKeyExpired;
+ break;
+ case EventTypeAidl::VENDOR_DEFINED:
+ eventType = DrmPlugin::kDrmPluginEventVendorDefined;
+ break;
+ case EventTypeAidl::SESSION_RECLAIMED:
+ eventType = DrmPlugin::kDrmPluginEventSessionReclaimed;
+ break;
+ default:
+ return ::ndk::ScopedAStatus::ok();
+ }
+
+ listener->sendEvent(eventType, toHidlVec(toVector(sessionId)), toHidlVec(toVector(data)));
+ }
+
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onExpirationUpdate(const std::vector<uint8_t>& sessionId,
+ int64_t expiryTimeInMS) {
+ mEventLock.lock();
+ sp<IDrmClient> listener = mListener;
+ mEventLock.unlock();
+
+ if (listener != NULL) {
+ Mutex::Autolock lock(mNotifyLock);
+ listener->sendExpirationUpdate(toHidlVec(toVector(sessionId)), expiryTimeInMS);
+ }
+
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onKeysChange(const std::vector<uint8_t>& sessionId,
+ const std::vector<KeyStatusAidl>& keyStatusListAidl,
+ bool hasNewUsableKey) {
+ mEventLock.lock();
+ sp<IDrmClient> listener = mListener;
+ mEventLock.unlock();
+
+ if (listener != NULL) {
+ std::vector<DrmKeyStatus> keyStatusList;
+ size_t nKeys = keyStatusListAidl.size();
+ for (size_t i = 0; i < nKeys; ++i) {
+ const KeyStatusAidl keyStatus = keyStatusListAidl[i];
+ uint32_t type;
+ switch (keyStatus.type) {
+ case KeyStatusType::USABLE:
+ type = DrmPlugin::kKeyStatusType_Usable;
+ break;
+ case KeyStatusType::EXPIRED:
+ type = DrmPlugin::kKeyStatusType_Expired;
+ break;
+ case KeyStatusType::OUTPUTNOTALLOWED:
+ type = DrmPlugin::kKeyStatusType_OutputNotAllowed;
+ break;
+ case KeyStatusType::STATUSPENDING:
+ type = DrmPlugin::kKeyStatusType_StatusPending;
+ break;
+ case KeyStatusType::USABLEINFUTURE:
+ type = DrmPlugin::kKeyStatusType_UsableInFuture;
+ break;
+ case KeyStatusType::INTERNALERROR:
+ default:
+ type = DrmPlugin::kKeyStatusType_InternalError;
+ break;
+ }
+ keyStatusList.push_back({type, toHidlVec(toVector(keyStatus.keyId))});
+ mMetrics->mKeyStatusChangeCounter.Increment((uint32_t)keyStatus.type);
+ }
+
+ Mutex::Autolock lock(mNotifyLock);
+ listener->sendKeysChange(toHidlVec(toVector(sessionId)), keyStatusList, hasNewUsableKey);
+ }
+ else {
+ // There's no listener. But we still want to count the key change
+ // events.
+ size_t nKeys = keyStatusListAidl.size();
+
+ for (size_t i = 0; i < nKeys; i++) {
+ mMetrics->mKeyStatusChangeCounter.Increment((uint32_t)keyStatusListAidl[i].type);
+ }
+ }
+
+ return ::ndk::ScopedAStatus::ok();
+}
+
+::ndk::ScopedAStatus DrmHalListener::onSessionLostState(const std::vector<uint8_t>& sessionId) {
+ ::ndk::ScopedAStatus _aidl_status;
+ mEventLock.lock();
+ sp<IDrmClient> listener = mListener;
+ mEventLock.unlock();
+
+ if (listener != NULL) {
+ Mutex::Autolock lock(mNotifyLock);
+ listener->sendSessionLostState(toHidlVec(toVector(sessionId)));
+ }
+
+ return ::ndk::ScopedAStatus::ok();
+}
+
+} // namespace android
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHal.h b/drm/libmediadrm/include/mediadrm/CryptoHal.h
index 5be59f0..32a6741 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHal.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHal.h
@@ -54,6 +54,6 @@
DISALLOW_EVIL_CONSTRUCTORS(CryptoHal);
};
-}
+} // namespace android
-#endif
\ No newline at end of file
+#endif // CRYPTO_HAL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
index a25b091..fc7f7fb 100644
--- a/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/CryptoHalAidl.h
@@ -89,4 +89,4 @@
} // namespace android
-#endif // CRYPTO_HAL_H_
+#endif // CRYPTO_HAL_AIDL_H_
diff --git a/drm/libmediadrm/include/mediadrm/DrmHal.h b/drm/libmediadrm/include/mediadrm/DrmHal.h
index bb58585..f5e75ac 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHal.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHal.h
@@ -120,10 +120,10 @@
private:
sp<IDrm> mDrmHalHidl;
- std::shared_ptr<IDrm> mDrmHalAidl;
+ sp<IDrm> mDrmHalAidl;
DISALLOW_EVIL_CONSTRUCTORS(DrmHal);
};
} // namespace android
-#endif
\ No newline at end of file
+#endif // DRM_HAL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
index 6720734..3feda90 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalAidl.h
@@ -17,16 +17,16 @@
#ifndef DRM_HAL_AIDL_H_
#define DRM_HAL_AIDL_H_
+#include <memory>
#include <aidl/android/hardware/drm/BnDrmPluginListener.h>
#include <aidl/android/hardware/drm/IDrmFactory.h>
#include <aidl/android/hardware/drm/IDrmPlugin.h>
#include <aidl/android/media/BnResourceManagerClient.h>
#include <mediadrm/DrmMetrics.h>
#include <mediadrm/DrmSessionManager.h>
+#include <mediadrm/DrmHalListener.h>
#include <mediadrm/IDrm.h>
-#include <memory>
-using ::aidl::android::hardware::drm::BnDrmPluginListener;
using IDrmPluginAidl = ::aidl::android::hardware::drm::IDrmPlugin;
using IDrmFactoryAidl = ::aidl::android::hardware::drm::IDrmFactory;
using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
@@ -34,9 +34,7 @@
using ::aidl::android::hardware::drm::Uuid;
namespace android {
-struct DrmHalAidl : public IDrm,
- public BnDrmPluginListener,
- std::enable_shared_from_this<BnDrmPluginListener> {
+struct DrmHalAidl : public IDrm{
struct DrmSessionClient;
DrmHalAidl();
virtual ~DrmHalAidl();
@@ -107,7 +105,7 @@
bool* required) const;
virtual status_t setPlaybackId(Vector<uint8_t> const& sessionId, const char* playbackId);
virtual status_t getLogMessages(Vector<drm::V1_4::LogMessage>& logs) const;
- // Methods of IDrmPluginListenerAidl
+
::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
const std::vector<uint8_t>& in_sessionId,
const std::vector<uint8_t>& in_data);
@@ -117,17 +115,14 @@
const std::vector<KeyStatusAidl>& in_keyStatusList,
bool in_hasNewUsableKey);
::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
-
private:
static Mutex mLock;
- sp<IDrmClient> mListener;
- mutable Mutex mEventLock;
- mutable Mutex mNotifyLock;
+ mutable MediaDrmMetrics mMetrics;
+ std::shared_ptr<DrmHalListener> mListener;
const std::vector<std::shared_ptr<IDrmFactoryAidl>> mFactories;
std::shared_ptr<IDrmPluginAidl> mPlugin;
std::vector<std::shared_ptr<IDrmFactoryAidl>> makeDrmFactories();
status_t mInitCheck;
- mutable MediaDrmMetrics mMetrics;
std::vector<std::shared_ptr<DrmSessionClient>> mOpenSessions;
void cleanup();
void closeOpenSessions();
@@ -140,4 +135,4 @@
} // namespace android
-#endif
\ No newline at end of file
+#endif // DRM_HAL_AIDL_H_
\ No newline at end of file
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
index 91dc700..94ef285 100644
--- a/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
+++ b/drm/libmediadrm/include/mediadrm/DrmHalHidl.h
@@ -17,10 +17,6 @@
#ifndef DRM_HAL_HIDL_H_
#define DRM_HAL_HIDL_H_
-#include <android/hardware/drm/1.0/IDrmFactory.h>
-#include <android/hardware/drm/1.0/IDrmPlugin.h>
-#include <android/hardware/drm/1.1/IDrmFactory.h>
-#include <android/hardware/drm/1.1/IDrmPlugin.h>
#include <android/hardware/drm/1.2/IDrmFactory.h>
#include <android/hardware/drm/1.2/IDrmPlugin.h>
#include <android/hardware/drm/1.2/IDrmPluginListener.h>
@@ -255,4 +251,4 @@
} // namespace android
-#endif // DRM_HAL_H_
+#endif // DRM_HAL_HIDL_H_
diff --git a/drm/libmediadrm/include/mediadrm/DrmHalListener.h b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
new file mode 100644
index 0000000..22361ad
--- /dev/null
+++ b/drm/libmediadrm/include/mediadrm/DrmHalListener.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DRM_HAL_LISTENER_H_
+#define DRM_HAL_LISTENER_H_
+
+#include <aidl/android/hardware/drm/BnDrmPluginListener.h>
+#include <mediadrm/DrmMetrics.h>
+#include <mediadrm/IDrmClient.h>
+
+using EventTypeAidl = ::aidl::android::hardware::drm::EventType;
+using KeyStatusAidl = ::aidl::android::hardware::drm::KeyStatus;
+using aidl::android::hardware::drm::BnDrmPluginListener;
+
+namespace android {
+struct DrmHalListener : public BnDrmPluginListener {
+ explicit DrmHalListener(MediaDrmMetrics* mMetrics);
+ ~DrmHalListener();
+ ::ndk::ScopedAStatus onEvent(EventTypeAidl in_eventType,
+ const std::vector<uint8_t>& in_sessionId,
+ const std::vector<uint8_t>& in_data);
+ ::ndk::ScopedAStatus onExpirationUpdate(const std::vector<uint8_t>& in_sessionId,
+ int64_t in_expiryTimeInMS);
+ ::ndk::ScopedAStatus onKeysChange(const std::vector<uint8_t>& in_sessionId,
+ const std::vector<KeyStatusAidl>& in_keyStatusList,
+ bool in_hasNewUsableKey);
+ ::ndk::ScopedAStatus onSessionLostState(const std::vector<uint8_t>& in_sessionId);
+ void setListener(sp<IDrmClient> listener);
+private:
+ mutable MediaDrmMetrics* mMetrics;
+ sp<IDrmClient> mListener;
+ mutable Mutex mEventLock;
+ mutable Mutex mNotifyLock;
+};
+} // namespace android
+
+#endif // DRM_HAL_LISTENER_H_
\ No newline at end of file
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
index 168a661..35c2382 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmFactory.cpp
@@ -72,7 +72,8 @@
// This should match the in_mimeTypes handed by InitDataParser.
*_aidl_return = in_mimeType == kIsoBmffVideoMimeType || in_mimeType == kIsoBmffAudioMimeType ||
in_mimeType == kCencInitDataFormat || in_mimeType == kWebmVideoMimeType ||
- in_mimeType == kWebmAudioMimeType || in_mimeType == kWebmInitDataFormat;
+ in_mimeType == kWebmAudioMimeType || in_mimeType == kWebmInitDataFormat ||
+ in_mimeType.empty();
return ::ndk::ScopedAStatus::ok();
}
@@ -85,7 +86,9 @@
ALOGD("%s mime type is not supported by crypto scheme", in_mimeType.c_str());
}
*_aidl_return = isClearKeyUUID(in_uuid.uuid.data()) && isSupportedMimeType &&
- in_securityLevel == SecurityLevel::SW_SECURE_CRYPTO;
+ (in_securityLevel == SecurityLevel::SW_SECURE_CRYPTO ||
+ in_securityLevel == SecurityLevel::DEFAULT ||
+ in_securityLevel == SecurityLevel::UNKNOWN);
return ::ndk::ScopedAStatus::ok();
}
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 92bea66..5478bcd 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -21,6 +21,7 @@
#include <inttypes.h>
#include <stdio.h>
#include <chrono>
+#include <set>
#include "AidlUtils.h"
#include "ClearKeyDrmProperties.h"
@@ -188,7 +189,8 @@
const std::vector<uint8_t> scopeId = in_scope;
::android::sp<Session> session;
- if (in_keyType == KeyType::STREAMING || in_keyType == KeyType::OFFLINE) {
+ std::set<KeyType> init_types{KeyType::STREAMING, KeyType::OFFLINE};
+ if (init_types.count(in_keyType)) {
std::vector<uint8_t> sessionId(scopeId.begin(), scopeId.end());
session = mSessionLibrary->findSession(sessionId);
if (!session.get()) {
@@ -402,8 +404,8 @@
auto itr = mSecureStops.find(in_secureStopId.secureStopId);
if (itr != mSecureStops.end()) {
ClearkeySecureStop clearkeyStop = itr->second;
- stop.assign(clearkeyStop.id.begin(), clearkeyStop.id.end());
- stop.assign(clearkeyStop.data.begin(), clearkeyStop.data.end());
+ stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+ stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
}
mSecureStopLock.unlock();
@@ -439,9 +441,9 @@
std::vector<::aidl::android::hardware::drm::SecureStop> stops;
for (auto itr = mSecureStops.begin(); itr != mSecureStops.end(); ++itr) {
ClearkeySecureStop clearkeyStop = itr->second;
- std::vector<uint8_t> stop = {};
- stop.assign(clearkeyStop.id.begin(), clearkeyStop.id.end());
- stop.assign(clearkeyStop.data.begin(), clearkeyStop.data.end());
+ std::vector<uint8_t> stop{};
+ stop.insert(stop.end(), clearkeyStop.id.begin(), clearkeyStop.id.end());
+ stop.insert(stop.end(), clearkeyStop.data.begin(), clearkeyStop.data.end());
SecureStop secureStop;
secureStop.opaqueData = stop;
@@ -476,7 +478,7 @@
return toNdkScopedAStatus(Status::ERROR_DRM_INVALID_STATE);
}
- *_aidl_return = itr->second;
+ *_aidl_return = SecurityLevel::SW_SECURE_CRYPTO;
return toNdkScopedAStatus(Status::OK);
}
diff --git a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
index 987e328..17d4a22 100644
--- a/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
+++ b/drm/mediadrm/plugins/clearkey/common/include/clearkeydrm/SessionLibrary.h
@@ -23,7 +23,7 @@
namespace clearkeydrm {
-class SessionLibrary : public ::android::RefBase {
+class SessionLibrary {
public:
static SessionLibrary* get();
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index ed7e50b..f40aa17 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -10,12 +10,8 @@
filegroup {
name: "audio_core_hal_client_sources",
srcs: [
- "DeviceHalLocal.cpp",
- "DevicesFactoryHalHybrid.cpp",
- "DevicesFactoryHalLocal.cpp",
"DeviceHalHidl.cpp",
"DevicesFactoryHalHidl.cpp",
- "StreamHalLocal.cpp",
"StreamHalHidl.cpp",
],
}
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index 3180b7d..16863e4 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -311,13 +311,10 @@
sinkMetadata.tracks[0].destination.device(std::move(hidlOutputDevice));
}
#endif
-#if MAJOR_VERSION == 7 && MINOR_VERSION == 1
- Return<void> ret = mDevice->openInputStream_7_1(
-#else
Return<void> ret = mDevice->openInputStream(
-#endif
handle, hidlDevice, hidlConfig, hidlFlags, sinkMetadata,
- [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& result,
+ [&](Result r,
+ const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& result,
const AudioConfig& suggestedConfig) {
retval = r;
if (retval == Result::OK) {
@@ -515,6 +512,18 @@
return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
}
+error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
+ if (mDevice == 0) return NO_INIT;
+ audio_hw_sync_t value;
+ Result result;
+ Return<void> ret = mDevice->getHwAvSync([&value, &result](Result r, audio_hw_sync_t v) {
+ value = v;
+ result = r;
+ });
+ RETURN_IF_ERROR(processReturn("getHwAvSync", ret, result));
+ return value;
+}
+
status_t DeviceHalHidl::dump(int fd, const Vector<String16>& args) {
if (mDevice == 0) return NO_INIT;
native_handle_t* hidlHandle = native_handle_create(1, 0);
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index cd9535e..8a97a55 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -134,6 +134,8 @@
status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
+ error::Result<audio_hw_sync_t> getHwAvSync() override;
+
status_t dump(int fd, const Vector<String16>& args) override;
private:
diff --git a/media/libaudiohal/impl/DeviceHalLocal.cpp b/media/libaudiohal/impl/DeviceHalLocal.cpp
deleted file mode 100644
index e473e41..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.cpp
+++ /dev/null
@@ -1,256 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DeviceHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <media/AudioParameter.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-
-DeviceHalLocal::DeviceHalLocal(audio_hw_device_t *dev)
- : mDev(dev) {
-}
-
-DeviceHalLocal::~DeviceHalLocal() {
- int status = audio_hw_device_close(mDev);
- ALOGW_IF(status, "Error closing audio hw device %p: %s", mDev, strerror(-status));
- mDev = 0;
-}
-
-status_t DeviceHalLocal::getSupportedDevices(uint32_t *devices) {
- if (mDev->get_supported_devices == NULL) return INVALID_OPERATION;
- *devices = mDev->get_supported_devices(mDev);
- return OK;
-}
-
-status_t DeviceHalLocal::initCheck() {
- return mDev->init_check(mDev);
-}
-
-status_t DeviceHalLocal::setVoiceVolume(float volume) {
- return mDev->set_voice_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMasterVolume(float volume) {
- if (mDev->set_master_volume == NULL) return INVALID_OPERATION;
- return mDev->set_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::getMasterVolume(float *volume) {
- if (mDev->get_master_volume == NULL) return INVALID_OPERATION;
- return mDev->get_master_volume(mDev, volume);
-}
-
-status_t DeviceHalLocal::setMode(audio_mode_t mode) {
- return mDev->set_mode(mDev, mode);
-}
-
-status_t DeviceHalLocal::setMicMute(bool state) {
- return mDev->set_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMicMute(bool *state) {
- return mDev->get_mic_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setMasterMute(bool state) {
- if (mDev->set_master_mute == NULL) return INVALID_OPERATION;
- return mDev->set_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::getMasterMute(bool *state) {
- if (mDev->get_master_mute == NULL) return INVALID_OPERATION;
- return mDev->get_master_mute(mDev, state);
-}
-
-status_t DeviceHalLocal::setParameters(const String8& kvPairs) {
- return mDev->set_parameters(mDev, kvPairs.string());
-}
-
-status_t DeviceHalLocal::getParameters(const String8& keys, String8 *values) {
- char *halValues = mDev->get_parameters(mDev, keys.string());
- if (halValues != NULL) {
- values->setTo(halValues);
- free(halValues);
- } else {
- values->clear();
- }
- return OK;
-}
-
-status_t DeviceHalLocal::getInputBufferSize(
- const struct audio_config *config, size_t *size) {
- *size = mDev->get_input_buffer_size(mDev, config);
- return OK;
-}
-
-status_t DeviceHalLocal::openOutputStream(
- audio_io_handle_t handle,
- audio_devices_t deviceType,
- audio_output_flags_t flags,
- struct audio_config *config,
- const char *address,
- sp<StreamOutHalInterface> *outStream) {
- audio_stream_out_t *halStream;
- ALOGV("open_output_stream handle: %d devices: %x flags: %#x"
- "srate: %d format %#x channels %x address %s",
- handle, deviceType, flags,
- config->sample_rate, config->format, config->channel_mask,
- address);
- int openResut = mDev->open_output_stream(
- mDev, handle, deviceType, flags, config, &halStream, address);
- if (openResut == OK) {
- *outStream = new StreamOutHalLocal(halStream, this);
- }
- ALOGV("open_output_stream status %d stream %p", openResut, halStream);
- return openResut;
-}
-
-status_t DeviceHalLocal::openInputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- struct audio_config *config,
- audio_input_flags_t flags,
- const char *address,
- audio_source_t source,
- audio_devices_t /*outputDevice*/,
- const char */*outputDeviceAddress*/,
- sp<StreamInHalInterface> *inStream) {
- audio_stream_in_t *halStream;
- ALOGV("open_input_stream handle: %d devices: %x flags: %#x "
- "srate: %d format %#x channels %x address %s source %d",
- handle, devices, flags,
- config->sample_rate, config->format, config->channel_mask,
- address, source);
- int openResult = mDev->open_input_stream(
- mDev, handle, devices, config, &halStream, flags, address, source);
- if (openResult == OK) {
- *inStream = new StreamInHalLocal(halStream, this);
- }
- ALOGV("open_input_stream status %d stream %p", openResult, inStream);
- return openResult;
-}
-
-status_t DeviceHalLocal::supportsAudioPatches(bool *supportsPatches) {
- *supportsPatches = version() >= AUDIO_DEVICE_API_VERSION_3_0;
- return OK;
-}
-
-status_t DeviceHalLocal::createAudioPatch(
- unsigned int num_sources,
- const struct audio_port_config *sources,
- unsigned int num_sinks,
- const struct audio_port_config *sinks,
- audio_patch_handle_t *patch) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- return mDev->create_audio_patch(
- mDev, num_sources, sources, num_sinks, sinks, patch);
- } else {
- return INVALID_OPERATION;
- }
-}
-
-status_t DeviceHalLocal::releaseAudioPatch(audio_patch_handle_t patch) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0) {
- return mDev->release_audio_patch(mDev, patch);
- } else {
- return INVALID_OPERATION;
- }
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port *port) {
- return mDev->get_audio_port(mDev, port);
-}
-
-status_t DeviceHalLocal::getAudioPort(struct audio_port_v7 *port) {
-#if MAJOR_VERSION >= 7
- if (version() >= AUDIO_DEVICE_API_VERSION_3_2) {
- // get_audio_port_v7 is mandatory if legacy HAL support this API version.
- return mDev->get_audio_port_v7(mDev, port);
- }
-#endif
- struct audio_port audioPort = {};
- audio_populate_audio_port(port, &audioPort);
- status_t status = getAudioPort(&audioPort);
- if (status == NO_ERROR) {
- audio_populate_audio_port_v7(&audioPort, port);
- }
- return status;
-}
-
-status_t DeviceHalLocal::setAudioPortConfig(const struct audio_port_config *config) {
- if (version() >= AUDIO_DEVICE_API_VERSION_3_0)
- return mDev->set_audio_port_config(mDev, config);
- else
- return INVALID_OPERATION;
-}
-
-#if MAJOR_VERSION == 2
-status_t DeviceHalLocal::getMicrophones(
- std::vector<media::MicrophoneInfo> *microphones __unused) {
- return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t DeviceHalLocal::getMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
- if (mDev->get_microphones == NULL) return INVALID_OPERATION;
- size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
- audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
- status_t status = mDev->get_microphones(mDev, &mic_array[0], &actual_mics);
- for (size_t i = 0; i < actual_mics; i++) {
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
- microphones->push_back(microphoneInfo);
- }
- return status;
-}
-#endif
-
-// Local HAL implementation does not support effects
-status_t DeviceHalLocal::addDeviceEffect(
- audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::removeDeviceEffect(
- audio_port_handle_t device __unused, sp<EffectHalInterface> effect __unused) {
- return INVALID_OPERATION;
-}
-
-status_t DeviceHalLocal::setConnectedState(const struct audio_port_v7 *port, bool connected) {
- AudioParameter param(String8(port->ext.device.address));
- const String8 key(connected ?
- AudioParameter::keyDeviceConnect : AudioParameter::keyDeviceDisconnect);
- param.addInt(key, port->ext.device.type);
- return setParameters(param.toString());
-}
-
-status_t DeviceHalLocal::dump(int fd, const Vector<String16>& /* args */) {
- return mDev->dump(mDev, fd);
-}
-
-void DeviceHalLocal::closeOutputStream(struct audio_stream_out *stream_out) {
- mDev->close_output_stream(mDev, stream_out);
-}
-
-void DeviceHalLocal::closeInputStream(struct audio_stream_in *stream_in) {
- mDev->close_input_stream(mDev, stream_in);
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DeviceHalLocal.h b/media/libaudiohal/impl/DeviceHalLocal.h
deleted file mode 100644
index 3e586cf..0000000
--- a/media/libaudiohal/impl/DeviceHalLocal.h
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
-
-#include <hardware/audio.h>
-#include <media/audiohal/DeviceHalInterface.h>
-
-namespace android {
-
-class DeviceHalLocal : public DeviceHalInterface
-{
- public:
- // Sets the value of 'devices' to a bitmask of 1 or more values of audio_devices_t.
- virtual status_t getSupportedDevices(uint32_t *devices);
-
- // Check to see if the audio hardware interface has been initialized.
- virtual status_t initCheck();
-
- // Set the audio volume of a voice call. Range is between 0.0 and 1.0.
- virtual status_t setVoiceVolume(float volume);
-
- // Set the audio volume for all audio activities other than voice call.
- virtual status_t setMasterVolume(float volume);
-
- // Get the current master volume value for the HAL.
- virtual status_t getMasterVolume(float *volume);
-
- // Called when the audio mode changes.
- virtual status_t setMode(audio_mode_t mode);
-
- // Muting control.
- virtual status_t setMicMute(bool state);
- virtual status_t getMicMute(bool *state);
- virtual status_t setMasterMute(bool state);
- virtual status_t getMasterMute(bool *state);
-
- // Set global audio parameters.
- virtual status_t setParameters(const String8& kvPairs);
-
- // Get global audio parameters.
- virtual status_t getParameters(const String8& keys, String8 *values);
-
- // Returns audio input buffer size according to parameters passed.
- virtual status_t getInputBufferSize(const struct audio_config *config,
- size_t *size);
-
- // Creates and opens the audio hardware output stream. The stream is closed
- // by releasing all references to the returned object.
- virtual status_t openOutputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- audio_output_flags_t flags,
- struct audio_config *config,
- const char *address,
- sp<StreamOutHalInterface> *outStream);
-
- // Creates and opens the audio hardware input stream. The stream is closed
- // by releasing all references to the returned object.
- virtual status_t openInputStream(
- audio_io_handle_t handle,
- audio_devices_t devices,
- struct audio_config *config,
- audio_input_flags_t flags,
- const char *address,
- audio_source_t source,
- audio_devices_t outputDevice,
- const char *outputDeviceAddress,
- sp<StreamInHalInterface> *inStream);
-
- // Returns whether createAudioPatch and releaseAudioPatch operations are supported.
- virtual status_t supportsAudioPatches(bool *supportsPatches);
-
- // Creates an audio patch between several source and sink ports.
- virtual status_t createAudioPatch(
- unsigned int num_sources,
- const struct audio_port_config *sources,
- unsigned int num_sinks,
- const struct audio_port_config *sinks,
- audio_patch_handle_t *patch);
-
- // Releases an audio patch.
- virtual status_t releaseAudioPatch(audio_patch_handle_t patch);
-
- // Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port *port);
-
- // Fills the list of supported attributes for a given audio port.
- virtual status_t getAudioPort(struct audio_port_v7 *port);
-
- // Set audio port configuration.
- virtual status_t setAudioPortConfig(const struct audio_port_config *config);
-
- // List microphones
- virtual status_t getMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
- status_t addDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
- status_t removeDeviceEffect(audio_port_handle_t device, sp<EffectHalInterface> effect) override;
-
- status_t getMmapPolicyInfos(
- media::audio::common::AudioMMapPolicyType policyType __unused,
- std::vector<media::audio::common::AudioMMapPolicyInfo> *policyInfos __unused) override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- int32_t getAAudioMixerBurstCount() override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- int32_t getAAudioHardwareBurstMinUsec() override {
- // This function will only be available on AIDL HAL.
- return INVALID_OPERATION;
- }
-
- status_t setConnectedState(const struct audio_port_v7 *port, bool connected) override;
-
- status_t dump(int fd, const Vector<String16>& args) override;
-
- void closeOutputStream(struct audio_stream_out *stream_out);
- void closeInputStream(struct audio_stream_in *stream_in);
-
- uint32_t version() const { return mDev->common.version; }
-
- private:
- audio_hw_device_t *mDev;
-
- friend class DevicesFactoryHalLocal;
-
- // Can not be constructed directly by clients.
- explicit DeviceHalLocal(audio_hw_device_t *dev);
-
- // The destructor automatically closes the device.
- virtual ~DeviceHalLocal();
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICE_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
index f475729..8f3c907 100644
--- a/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
+++ b/media/libaudiohal/impl/DevicesFactoryHalHidl.cpp
@@ -227,4 +227,10 @@
return mDeviceFactories;
}
+// Main entry-point to the shared library.
+extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
+ auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
+ return service ? new DevicesFactoryHalHidl(service) : nullptr;
+}
+
} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp b/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
deleted file mode 100644
index d684c27..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.cpp
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalHybrid"
-//#define LOG_NDEBUG 0
-
-#include "DevicesFactoryHalHidl.h"
-#include "DevicesFactoryHalHybrid.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-
-DevicesFactoryHalHybrid::DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory)
- : mLocalFactory(new DevicesFactoryHalLocal()),
- mHidlFactory(new DevicesFactoryHalHidl(hidlFactory)) {
-}
-
-status_t DevicesFactoryHalHybrid::openDevice(const char *name, sp<DeviceHalInterface> *device) {
- if (mHidlFactory != 0 && strcmp(AUDIO_HARDWARE_MODULE_ID_A2DP, name) != 0 &&
- strcmp(AUDIO_HARDWARE_MODULE_ID_HEARING_AID, name) != 0) {
- return mHidlFactory->openDevice(name, device);
- }
- return mLocalFactory->openDevice(name, device);
-}
-
-status_t DevicesFactoryHalHybrid::getHalPids(std::vector<pid_t> *pids) {
- if (mHidlFactory != 0) {
- return mHidlFactory->getHalPids(pids);
- }
- return INVALID_OPERATION;
-}
-
-status_t DevicesFactoryHalHybrid::setCallbackOnce(sp<DevicesFactoryHalCallback> callback) {
- if (mHidlFactory) {
- return mHidlFactory->setCallbackOnce(callback);
- }
- return INVALID_OPERATION;
-}
-
-extern "C" __attribute__((visibility("default"))) void* createIDevicesFactory() {
- auto service = hardware::audio::CPP_VERSION::IDevicesFactory::getService();
- return service ? new DevicesFactoryHalHybrid(service) : nullptr;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h b/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
deleted file mode 100644
index 221584c..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalHybrid.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
-
-#include PATH(android/hardware/audio/FILE_VERSION/IDevicesFactory.h)
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-using ::android::hardware::audio::CPP_VERSION::IDevicesFactory;
-
-namespace android {
-
-class DevicesFactoryHalHybrid : public DevicesFactoryHalInterface
-{
- public:
- DevicesFactoryHalHybrid(sp<IDevicesFactory> hidlFactory);
-
- // Opens a device with the specified name. To close the device, it is
- // necessary to release references to the returned object.
- virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
- status_t getHalPids(std::vector<pid_t> *pids) override;
-
- status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback) override;
-
- float getHalVersion() const override {
- return MAJOR_VERSION + (float)MINOR_VERSION / 10;
- }
-
- private:
- sp<DevicesFactoryHalInterface> mLocalFactory;
- sp<DevicesFactoryHalInterface> mHidlFactory;
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_HYBRID_H
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp b/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
deleted file mode 100644
index 13a9acd..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.cpp
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "DevicesFactoryHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <string.h>
-
-#include <hardware/audio.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "DevicesFactoryHalLocal.h"
-
-namespace android {
-
-static status_t load_audio_interface(const char *if_name, audio_hw_device_t **dev)
-{
- const hw_module_t *mod;
- int rc;
-
- rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
- if (rc) {
- ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- goto out;
- }
- rc = audio_hw_device_open(mod, dev);
- if (rc) {
- ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__,
- AUDIO_HARDWARE_MODULE_ID, if_name, strerror(-rc));
- goto out;
- }
- if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
- ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
- rc = BAD_VALUE;
- audio_hw_device_close(*dev);
- goto out;
- }
- return OK;
-
-out:
- *dev = NULL;
- return rc;
-}
-
-status_t DevicesFactoryHalLocal::openDevice(const char *name, sp<DeviceHalInterface> *device) {
- audio_hw_device_t *dev;
- status_t rc = load_audio_interface(name, &dev);
- if (rc == OK) {
- *device = new DeviceHalLocal(dev);
- }
- return rc;
-}
-
-} // namespace android
diff --git a/media/libaudiohal/impl/DevicesFactoryHalLocal.h b/media/libaudiohal/impl/DevicesFactoryHalLocal.h
deleted file mode 100644
index a0da125..0000000
--- a/media/libaudiohal/impl/DevicesFactoryHalLocal.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-#define ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
-
-#include <media/audiohal/DevicesFactoryHalInterface.h>
-#include <utils/Errors.h>
-#include <utils/RefBase.h>
-
-#include "DeviceHalLocal.h"
-
-namespace android {
-
-class DevicesFactoryHalLocal : public DevicesFactoryHalInterface
-{
- public:
- // Opens a device with the specified name. To close the device, it is
- // necessary to release references to the returned object.
- virtual status_t openDevice(const char *name, sp<DeviceHalInterface> *device);
-
- status_t getHalPids(std::vector<pid_t> *pids __unused) override {
- return INVALID_OPERATION;
- }
-
- status_t setCallbackOnce(sp<DevicesFactoryHalCallback> callback __unused) override {
- return INVALID_OPERATION;
- }
-
- float getHalVersion() const override {
- return MAJOR_VERSION + (float)MINOR_VERSION / 10;
- }
-
- private:
- friend class DevicesFactoryHalHybrid;
-
- // Can not be constructed directly by clients.
- DevicesFactoryHalLocal() {}
-
- virtual ~DevicesFactoryHalLocal() {}
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_DEVICES_FACTORY_HAL_LOCAL_H
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index b2f1cf3..263ee67 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -39,7 +39,7 @@
namespace android {
-using ReadCommand = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadCommand;
+using ReadCommand = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadCommand;
using namespace ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION;
using namespace ::android::hardware::audio::CORE_TYPES_CPP_VERSION;
@@ -271,6 +271,32 @@
return err == 0;
}
+status_t StreamHalHidl::legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) {
+ LOG_ALWAYS_FATAL_IF(port.type != AUDIO_PORT_TYPE_DEVICE, "port type must be device");
+ char* address;
+ if (strcmp(port.ext.device.address, "") != 0) {
+ // FIXME: we only support address on first sink with HAL version < 3.0
+ address = audio_device_address_to_parameter(port.ext.device.type, port.ext.device.address);
+ } else {
+ address = (char*)calloc(1, 1);
+ }
+ AudioParameter param = AudioParameter(String8(address));
+ free(address);
+ param.addInt(String8(AudioParameter::keyRouting), (int)type);
+ if (source.has_value()) {
+ param.addInt(String8(AudioParameter::keyInputSource), (int)source.value());
+ }
+ return setParameters(param.toString());
+}
+
+status_t StreamHalHidl::legacyReleaseAudioPatch() {
+ AudioParameter param;
+ param.addInt(String8(AudioParameter::keyRouting), 0);
+ return setParameters(param.toString());
+}
+
namespace {
/* Notes on callback ownership.
@@ -905,9 +931,14 @@
callback->onRecommendedLatencyModeChanged(modes);
}
+status_t StreamOutHalHidl::exit() {
+ // FIXME this is using hard-coded strings but in the future, this functionality will be
+ // converted to use audio HAL extensions required to support tunneling
+ return setParameters(String8("exiting=1"));
+}
StreamInHalHidl::StreamInHalHidl(
- const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream)
+ const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream)
: StreamHalHidl(stream.get()), mStream(stream), mReaderClient(0), mEfGroup(nullptr) {
}
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 03342ef..4e80e88 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -20,7 +20,7 @@
#include <atomic>
#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
-#include PATH(android/hardware/audio/FILE_VERSION/IStreamIn.h)
+#include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
#include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
#include <fmq/EventFlag.h>
#include <fmq/MessageQueue.h>
@@ -35,8 +35,9 @@
using ::android::hardware::EventFlag;
using ::android::hardware::MessageQueue;
using ::android::hardware::Return;
-using ReadParameters = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadParameters;
-using ReadStatus = ::android::hardware::audio::CPP_VERSION::IStreamIn::ReadStatus;
+using ReadParameters =
+ ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadParameters;
+using ReadStatus = ::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn::ReadStatus;
using WriteCommand = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteCommand;
using WriteStatus = ::android::hardware::audio::CPP_VERSION::IStreamOut::WriteStatus;
@@ -88,6 +89,12 @@
// (must match the priority of the audioflinger's thread that calls 'read' / 'write')
virtual status_t setHalThreadPriority(int priority);
+ status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) override;
+
+ status_t legacyReleaseAudioPatch() override;
+
protected:
// Subclasses can not be constructed directly by clients.
explicit StreamHalHidl(IStream *stream);
@@ -196,6 +203,8 @@
void onRecommendedLatencyModeChanged(const std::vector<audio_latency_mode_t>& modes);
+ status_t exit() override;
+
private:
friend class DeviceHalHidl;
typedef MessageQueue<WriteCommand, hardware::kSynchronizedReadWrite> CommandMQ;
@@ -262,7 +271,7 @@
typedef MessageQueue<uint8_t, hardware::kSynchronizedReadWrite> DataMQ;
typedef MessageQueue<ReadStatus, hardware::kSynchronizedReadWrite> StatusMQ;
- const sp<::android::hardware::audio::CPP_VERSION::IStreamIn> mStream;
+ const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn> mStream;
std::unique_ptr<CommandMQ> mCommandMQ;
std::unique_ptr<DataMQ> mDataMQ;
std::unique_ptr<StatusMQ> mStatusMQ;
@@ -270,7 +279,8 @@
EventFlag* mEfGroup;
// Can not be constructed directly by clients.
- StreamInHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamIn>& stream);
+ StreamInHalHidl(
+ const sp<::android::hardware::audio::CORE_TYPES_CPP_VERSION::IStreamIn>& stream);
virtual ~StreamInHalHidl();
diff --git a/media/libaudiohal/impl/StreamHalLocal.cpp b/media/libaudiohal/impl/StreamHalLocal.cpp
deleted file mode 100644
index 477f510..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.cpp
+++ /dev/null
@@ -1,520 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "StreamHalLocal"
-//#define LOG_NDEBUG 0
-
-#include <audio_utils/Metadata.h>
-#include <hardware/audio.h>
-#include <media/AudioParameter.h>
-#include <utils/Log.h>
-
-#include "DeviceHalLocal.h"
-#include "ParameterUtils.h"
-#include "StreamHalLocal.h"
-
-namespace android {
-
-StreamHalLocal::StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device)
- : mDevice(device),
- mStream(stream) {
- // Instrument audio signal power logging.
- // Note: This assumes channel mask, format, and sample rate do not change after creation.
- if (mStream != nullptr /* && mStreamPowerLog.isUserDebugOrEngBuild() */) {
- mStreamPowerLog.init(mStream->get_sample_rate(mStream),
- mStream->get_channels(mStream),
- mStream->get_format(mStream));
- }
-}
-
-StreamHalLocal::~StreamHalLocal() {
- mStream = 0;
- mDevice.clear();
-}
-
-status_t StreamHalLocal::getBufferSize(size_t *size) {
- *size = mStream->get_buffer_size(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::getAudioProperties(audio_config_base_t *configBase) {
- configBase->sample_rate = mStream->get_sample_rate(mStream);
- configBase->channel_mask = mStream->get_channels(mStream);
- configBase->format = mStream->get_format(mStream);
- return OK;
-}
-
-status_t StreamHalLocal::setParameters(const String8& kvPairs) {
- return mStream->set_parameters(mStream, kvPairs.string());
-}
-
-status_t StreamHalLocal::getParameters(const String8& keys, String8 *values) {
- char *halValues = mStream->get_parameters(mStream, keys.string());
- if (halValues != NULL) {
- values->setTo(halValues);
- free(halValues);
- } else {
- values->clear();
- }
- return OK;
-}
-
-status_t StreamHalLocal::addEffect(sp<EffectHalInterface>) {
- LOG_ALWAYS_FATAL("Local streams can not have effects");
- return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::removeEffect(sp<EffectHalInterface>) {
- LOG_ALWAYS_FATAL("Local streams can not have effects");
- return INVALID_OPERATION;
-}
-
-status_t StreamHalLocal::standby() {
- return mStream->standby(mStream);
-}
-
-status_t StreamHalLocal::dump(int fd, const Vector<String16>& args) {
- (void) args;
- status_t status = mStream->dump(mStream, fd);
- mStreamPowerLog.dump(fd);
- return status;
-}
-
-status_t StreamHalLocal::setHalThreadPriority(int) {
- // Don't need to do anything as local hal is executed by audioflinger directly
- // on the same thread.
- return OK;
-}
-
-StreamOutHalLocal::StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device)
- : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamOutHalLocal::~StreamOutHalLocal() {
- mCallback.clear();
- mDevice->closeOutputStream(mStream);
- mStream = 0;
-}
-
-status_t StreamOutHalLocal::getFrameSize(size_t *size) {
- *size = audio_stream_out_frame_size(mStream);
- return OK;
-}
-
-status_t StreamOutHalLocal::getLatency(uint32_t *latency) {
- *latency = mStream->get_latency(mStream);
- return OK;
-}
-
-status_t StreamOutHalLocal::setVolume(float left, float right) {
- if (mStream->set_volume == NULL) return INVALID_OPERATION;
- return mStream->set_volume(mStream, left, right);
-}
-
-status_t StreamOutHalLocal::selectPresentation(int presentationId, int programId) {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyPresentationId), presentationId);
- param.addInt(String8(AudioParameter::keyProgramId), programId);
- return setParameters(param.toString());
-}
-
-status_t StreamOutHalLocal::write(const void *buffer, size_t bytes, size_t *written) {
- ssize_t writeResult = mStream->write(mStream, buffer, bytes);
- if (writeResult > 0) {
- *written = writeResult;
- mStreamPowerLog.log(buffer, *written);
- return OK;
- } else {
- *written = 0;
- return writeResult;
- }
-}
-
-status_t StreamOutHalLocal::getRenderPosition(uint32_t *dspFrames) {
- return mStream->get_render_position(mStream, dspFrames);
-}
-
-status_t StreamOutHalLocal::getNextWriteTimestamp(int64_t *timestamp) {
- if (mStream->get_next_write_timestamp == NULL) return INVALID_OPERATION;
- return mStream->get_next_write_timestamp(mStream, timestamp);
-}
-
-status_t StreamOutHalLocal::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
- if (mStream->set_callback == NULL) return INVALID_OPERATION;
- status_t result = mStream->set_callback(mStream, StreamOutHalLocal::asyncCallback, this);
- if (result == OK) {
- mCallback = callback;
- }
- return result;
-}
-
-// static
-int StreamOutHalLocal::asyncCallback(stream_callback_event_t event, void*, void *cookie) {
- // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
- // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
- // already running, because the destructor is invoked after the refcount has been atomically
- // decremented.
- wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
- sp<StreamOutHalLocal> self = weakSelf.promote();
- if (self == 0) return 0;
- sp<StreamOutHalInterfaceCallback> callback = self->mCallback.promote();
- if (callback == 0) return 0;
- ALOGV("asyncCallback() event %d", event);
- switch (event) {
- case STREAM_CBK_EVENT_WRITE_READY:
- callback->onWriteReady();
- break;
- case STREAM_CBK_EVENT_DRAIN_READY:
- callback->onDrainReady();
- break;
- case STREAM_CBK_EVENT_ERROR:
- callback->onError();
- break;
- default:
- ALOGW("asyncCallback() unknown event %d", event);
- break;
- }
- return 0;
-}
-
-status_t StreamOutHalLocal::supportsPauseAndResume(bool *supportsPause, bool *supportsResume) {
- *supportsPause = mStream->pause != NULL;
- *supportsResume = mStream->resume != NULL;
- return OK;
-}
-
-status_t StreamOutHalLocal::pause() {
- if (mStream->pause == NULL) return INVALID_OPERATION;
- return mStream->pause(mStream);
-}
-
-status_t StreamOutHalLocal::resume() {
- if (mStream->resume == NULL) return INVALID_OPERATION;
- return mStream->resume(mStream);
-}
-
-status_t StreamOutHalLocal::supportsDrain(bool *supportsDrain) {
- *supportsDrain = mStream->drain != NULL;
- return OK;
-}
-
-status_t StreamOutHalLocal::drain(bool earlyNotify) {
- if (mStream->drain == NULL) return INVALID_OPERATION;
- return mStream->drain(mStream, earlyNotify ? AUDIO_DRAIN_EARLY_NOTIFY : AUDIO_DRAIN_ALL);
-}
-
-status_t StreamOutHalLocal::flush() {
- if (mStream->flush == NULL) return INVALID_OPERATION;
- return mStream->flush(mStream);
-}
-
-status_t StreamOutHalLocal::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
- if (mStream->get_presentation_position == NULL) return INVALID_OPERATION;
- return mStream->get_presentation_position(mStream, frames, timestamp);
-}
-
-void StreamOutHalLocal::doUpdateSourceMetadata(const SourceMetadata& sourceMetadata) {
- std::vector<playback_track_metadata> halTracks;
- halTracks.reserve(sourceMetadata.tracks.size());
- for (auto& metadata : sourceMetadata.tracks) {
- playback_track_metadata halTrackMetadata;
- playback_track_metadata_from_v7(&halTrackMetadata, &metadata);
- halTracks.push_back(halTrackMetadata);
- }
- const source_metadata_t halMetadata = {
- .track_count = halTracks.size(),
- .tracks = halTracks.data(),
- };
- mStream->update_source_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamOutHalLocal::doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata) {
- const source_metadata_v7_t metadata {
- .track_count = sourceMetadata.tracks.size(),
- // const cast is fine as it is in a const structure
- .tracks = const_cast<playback_track_metadata_v7*>(sourceMetadata.tracks.data()),
- };
- mStream->update_source_metadata_v7(mStream, &metadata);
-}
-#endif
-
-status_t StreamOutHalLocal::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
-#if MAJOR_VERSION < 7
- if (mStream->update_source_metadata == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadata(sourceMetadata);
-#else
- if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
- if (mStream->update_source_metadata == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadata(sourceMetadata);
- } else {
- if (mStream->update_source_metadata_v7 == nullptr) {
- return INVALID_OPERATION;
- }
- doUpdateSourceMetadataV7(sourceMetadata);
- }
-#endif
- return OK;
-}
-
-
-status_t StreamOutHalLocal::start() {
- if (mStream->start == NULL) return INVALID_OPERATION;
- return mStream->start(mStream);
-}
-
-status_t StreamOutHalLocal::stop() {
- if (mStream->stop == NULL) return INVALID_OPERATION;
- return mStream->stop(mStream);
-}
-
-status_t StreamOutHalLocal::createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) {
- if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
- return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamOutHalLocal::getMmapPosition(struct audio_mmap_position *position) {
- if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
- return mStream->get_mmap_position(mStream, position);
-}
-
-status_t StreamOutHalLocal::getDualMonoMode(audio_dual_mono_mode_t* mode) {
- if (mStream->get_dual_mono_mode == nullptr) return INVALID_OPERATION;
- return mStream->get_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::setDualMonoMode(audio_dual_mono_mode_t mode) {
- if (mStream->set_dual_mono_mode == nullptr) return INVALID_OPERATION;
- return mStream->set_dual_mono_mode(mStream, mode);
-}
-
-status_t StreamOutHalLocal::getAudioDescriptionMixLevel(float* leveldB) {
- if (mStream->get_audio_description_mix_level == nullptr) return INVALID_OPERATION;
- return mStream->get_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::setAudioDescriptionMixLevel(float leveldB) {
- if (mStream->set_audio_description_mix_level == nullptr) return INVALID_OPERATION;
- return mStream->set_audio_description_mix_level(mStream, leveldB);
-}
-
-status_t StreamOutHalLocal::getPlaybackRateParameters(audio_playback_rate_t* playbackRate) {
- if (mStream->get_playback_rate_parameters == nullptr) return INVALID_OPERATION;
- return mStream->get_playback_rate_parameters(mStream, playbackRate);
-}
-
-status_t StreamOutHalLocal::setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) {
- if (mStream->set_playback_rate_parameters == nullptr) return INVALID_OPERATION;
- return mStream->set_playback_rate_parameters(mStream, &playbackRate);
-}
-
-status_t StreamOutHalLocal::setEventCallback(
- const sp<StreamOutHalInterfaceEventCallback>& callback) {
- if (mStream->set_event_callback == nullptr) {
- return INVALID_OPERATION;
- }
- stream_event_callback_t asyncCallback =
- callback == nullptr ? nullptr : StreamOutHalLocal::asyncEventCallback;
- status_t result = mStream->set_event_callback(mStream, asyncCallback, this);
- if (result == OK) {
- mEventCallback = callback;
- }
- return result;
-}
-
-// static
-int StreamOutHalLocal::asyncEventCallback(
- stream_event_callback_type_t event, void *param, void *cookie) {
- // We act as if we gave a wp<StreamOutHalLocal> to HAL. This way we should handle
- // correctly the case when the callback is invoked while StreamOutHalLocal's destructor is
- // already running, because the destructor is invoked after the refcount has been atomically
- // decremented.
- wp<StreamOutHalLocal> weakSelf(static_cast<StreamOutHalLocal*>(cookie));
- sp<StreamOutHalLocal> self = weakSelf.promote();
- if (self == nullptr) return 0;
- sp<StreamOutHalInterfaceEventCallback> callback = self->mEventCallback.promote();
- if (callback.get() == nullptr) return 0;
- switch (event) {
- case STREAM_EVENT_CBK_TYPE_CODEC_FORMAT_CHANGED:
- // void* param is the byte string buffer from byte_string_from_audio_metadata().
- // As the byte string buffer may have embedded zeroes, we cannot use strlen()
- callback->onCodecFormatChanged(std::basic_string<uint8_t>(
- (const uint8_t*)param,
- audio_utils::metadata::dataByteStringLen((const uint8_t*)param)));
- break;
- default:
- ALOGW("%s unknown event %d", __func__, event);
- break;
- }
- return 0;
-}
-
-StreamInHalLocal::StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device)
- : StreamHalLocal(&stream->common, device), mStream(stream) {
-}
-
-StreamInHalLocal::~StreamInHalLocal() {
- mDevice->closeInputStream(mStream);
- mStream = 0;
-}
-
-status_t StreamInHalLocal::getFrameSize(size_t *size) {
- *size = audio_stream_in_frame_size(mStream);
- return OK;
-}
-
-status_t StreamInHalLocal::setGain(float gain) {
- return mStream->set_gain(mStream, gain);
-}
-
-status_t StreamInHalLocal::read(void *buffer, size_t bytes, size_t *read) {
- ssize_t readResult = mStream->read(mStream, buffer, bytes);
- if (readResult > 0) {
- *read = readResult;
- mStreamPowerLog.log( buffer, *read);
- return OK;
- } else {
- *read = 0;
- return readResult;
- }
-}
-
-status_t StreamInHalLocal::getInputFramesLost(uint32_t *framesLost) {
- *framesLost = mStream->get_input_frames_lost(mStream);
- return OK;
-}
-
-status_t StreamInHalLocal::getCapturePosition(int64_t *frames, int64_t *time) {
- if (mStream->get_capture_position == NULL) return INVALID_OPERATION;
- return mStream->get_capture_position(mStream, frames, time);
-}
-
-void StreamInHalLocal::doUpdateSinkMetadata(const SinkMetadata& sinkMetadata) {
- std::vector<record_track_metadata> halTracks;
- halTracks.reserve(sinkMetadata.tracks.size());
- for (auto& metadata : sinkMetadata.tracks) {
- record_track_metadata halTrackMetadata;
- record_track_metadata_from_v7(&halTrackMetadata, &metadata);
- halTracks.push_back(halTrackMetadata);
- }
- const sink_metadata_t halMetadata = {
- .track_count = halTracks.size(),
- .tracks = halTracks.data(),
- };
- mStream->update_sink_metadata(mStream, &halMetadata);
-}
-
-#if MAJOR_VERSION >= 7
-void StreamInHalLocal::doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata) {
- const sink_metadata_v7_t halMetadata {
- .track_count = sinkMetadata.tracks.size(),
- // const cast is fine as it is in a const structure
- .tracks = const_cast<record_track_metadata_v7*>(sinkMetadata.tracks.data()),
- };
- mStream->update_sink_metadata_v7(mStream, &halMetadata);
-}
-#endif
-
-status_t StreamInHalLocal::updateSinkMetadata(const SinkMetadata& sinkMetadata) {
-#if MAJOR_VERSION < 7
- if (mStream->update_sink_metadata == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadata(sinkMetadata);
-#else
- if (mDevice->version() < AUDIO_DEVICE_API_VERSION_3_2) {
- if (mStream->update_sink_metadata == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadata(sinkMetadata);
- } else {
- if (mStream->update_sink_metadata_v7 == nullptr) {
- return INVALID_OPERATION; // not supported by the HAL
- }
- doUpdateSinkMetadataV7(sinkMetadata);
- }
-#endif
- return OK;
-}
-
-status_t StreamInHalLocal::start() {
- if (mStream->start == NULL) return INVALID_OPERATION;
- return mStream->start(mStream);
-}
-
-status_t StreamInHalLocal::stop() {
- if (mStream->stop == NULL) return INVALID_OPERATION;
- return mStream->stop(mStream);
-}
-
-status_t StreamInHalLocal::createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) {
- if (mStream->create_mmap_buffer == NULL) return INVALID_OPERATION;
- return mStream->create_mmap_buffer(mStream, minSizeFrames, info);
-}
-
-status_t StreamInHalLocal::getMmapPosition(struct audio_mmap_position *position) {
- if (mStream->get_mmap_position == NULL) return INVALID_OPERATION;
- return mStream->get_mmap_position(mStream, position);
-}
-
-#if MAJOR_VERSION == 2
-status_t StreamInHalLocal::getActiveMicrophones(
- std::vector<media::MicrophoneInfo> *microphones __unused) {
- return INVALID_OPERATION;
-}
-#elif MAJOR_VERSION >= 4
-status_t StreamInHalLocal::getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones) {
- if (mStream->get_active_microphones == NULL) return INVALID_OPERATION;
- size_t actual_mics = AUDIO_MICROPHONE_MAX_COUNT;
- audio_microphone_characteristic_t mic_array[AUDIO_MICROPHONE_MAX_COUNT];
- status_t status = mStream->get_active_microphones(mStream, &mic_array[0], &actual_mics);
- for (size_t i = 0; i < actual_mics; i++) {
- media::MicrophoneInfo microphoneInfo = media::MicrophoneInfo(mic_array[i]);
- microphones->push_back(microphoneInfo);
- }
- return status;
-}
-#endif
-
-#if MAJOR_VERSION < 5
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(
- audio_microphone_direction_t direction __unused) {
- return INVALID_OPERATION;
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom __unused) {
- return INVALID_OPERATION;
-}
-#else
-status_t StreamInHalLocal::setPreferredMicrophoneDirection(audio_microphone_direction_t direction) {
- if (mStream->set_microphone_direction == NULL) return INVALID_OPERATION;
- return mStream->set_microphone_direction(mStream, direction);
-}
-
-status_t StreamInHalLocal::setPreferredMicrophoneFieldDimension(float zoom) {
- if (mStream->set_microphone_field_dimension == NULL) return INVALID_OPERATION;
- return mStream->set_microphone_field_dimension(mStream, zoom);
-
-}
-#endif
-
-} // namespace android
diff --git a/media/libaudiohal/impl/StreamHalLocal.h b/media/libaudiohal/impl/StreamHalLocal.h
deleted file mode 100644
index 770137f..0000000
--- a/media/libaudiohal/impl/StreamHalLocal.h
+++ /dev/null
@@ -1,262 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-#define ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
-
-#include <media/audiohal/StreamHalInterface.h>
-#include "StreamPowerLog.h"
-
-namespace android {
-
-class DeviceHalLocal;
-
-class StreamHalLocal : public virtual StreamHalInterface
-{
- public:
- // Return size of input/output buffer in bytes for this stream - eg. 4800.
- virtual status_t getBufferSize(size_t *size);
-
- // Return the base configuration of the stream:
- // - channel mask;
- // - format - e.g. AUDIO_FORMAT_PCM_16_BIT;
- // - sampling rate in Hz - eg. 44100.
- virtual status_t getAudioProperties(audio_config_base_t *configBase);
-
- // Set audio stream parameters.
- virtual status_t setParameters(const String8& kvPairs);
-
- // Get audio stream parameters.
- virtual status_t getParameters(const String8& keys, String8 *values);
-
- // Add or remove the effect on the stream.
- virtual status_t addEffect(sp<EffectHalInterface> effect);
- virtual status_t removeEffect(sp<EffectHalInterface> effect);
-
- // Put the audio hardware input/output into standby mode.
- virtual status_t standby();
-
- virtual status_t dump(int fd, const Vector<String16>& args) override;
-
- // Start a stream operating in mmap mode.
- virtual status_t start() = 0;
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop() = 0;
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info) = 0;
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position) = 0;
-
- // Set the priority of the thread that interacts with the HAL
- // (must match the priority of the audioflinger's thread that calls 'read' / 'write')
- virtual status_t setHalThreadPriority(int priority);
-
- protected:
- // Subclasses can not be constructed directly by clients.
- StreamHalLocal(audio_stream_t *stream, sp<DeviceHalLocal> device);
-
- // The destructor automatically closes the stream.
- virtual ~StreamHalLocal();
-
- sp<DeviceHalLocal> mDevice;
-
- // mStreamPowerLog is used for audio signal power logging.
- StreamPowerLog mStreamPowerLog;
-
- private:
- audio_stream_t *mStream;
-};
-
-class StreamOutHalLocal : public StreamOutHalInterface, public StreamHalLocal {
- public:
- // Return the frame size (number of bytes per sample) of a stream.
- virtual status_t getFrameSize(size_t *size);
-
- // Return the audio hardware driver estimated latency in milliseconds.
- virtual status_t getLatency(uint32_t *latency);
-
- // Use this method in situations where audio mixing is done in the hardware.
- virtual status_t setVolume(float left, float right);
-
- // Selects the audio presentation (if available).
- virtual status_t selectPresentation(int presentationId, int programId);
-
- // Write audio buffer to driver.
- virtual status_t write(const void *buffer, size_t bytes, size_t *written);
-
- // Return the number of audio frames written by the audio dsp to DAC since
- // the output has exited standby.
- virtual status_t getRenderPosition(uint32_t *dspFrames);
-
- // Get the local time at which the next write to the audio driver will be presented.
- virtual status_t getNextWriteTimestamp(int64_t *timestamp);
-
- // Set the callback for notifying completion of non-blocking write and drain.
- virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
-
- // Returns whether pause and resume operations are supported.
- virtual status_t supportsPauseAndResume(bool *supportsPause, bool *supportsResume);
-
- // Notifies to the audio driver to resume playback following a pause.
- virtual status_t pause();
-
- // Notifies to the audio driver to resume playback following a pause.
- virtual status_t resume();
-
- // Returns whether drain operation is supported.
- virtual status_t supportsDrain(bool *supportsDrain);
-
- // Requests notification when data buffered by the driver/hardware has been played.
- virtual status_t drain(bool earlyNotify);
-
- // Notifies to the audio driver to flush the queued data.
- virtual status_t flush();
-
- // Return a recent count of the number of audio frames presented to an external observer.
- virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
-
- // Start a stream operating in mmap mode.
- virtual status_t start();
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop();
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info);
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
- // Called when the metadata of the stream's source has been changed.
- status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
-
- // Returns the Dual Mono mode presentation setting.
- status_t getDualMonoMode(audio_dual_mono_mode_t* mode) override;
-
- // Sets the Dual Mono mode presentation on the output device.
- status_t setDualMonoMode(audio_dual_mono_mode_t mode) override;
-
- // Returns the Audio Description Mix level in dB.
- status_t getAudioDescriptionMixLevel(float* leveldB) override;
-
- // Sets the Audio Description Mix level in dB.
- status_t setAudioDescriptionMixLevel(float leveldB) override;
-
- // Retrieves current playback rate parameters.
- status_t getPlaybackRateParameters(audio_playback_rate_t* playbackRate) override;
-
- // Sets the playback rate parameters that control playback behavior.
- status_t setPlaybackRateParameters(const audio_playback_rate_t& playbackRate) override;
-
- status_t setEventCallback(const sp<StreamOutHalInterfaceEventCallback>& callback) override;
-
- status_t setLatencyMode(audio_latency_mode_t mode __unused) override {
- return INVALID_OPERATION;
- }
- status_t getRecommendedLatencyModes(
- std::vector<audio_latency_mode_t> *modes __unused) override {
- return INVALID_OPERATION;
- }
- status_t setLatencyModeCallback(
- const sp<StreamOutHalInterfaceLatencyModeCallback>& callback __unused) override {
- return INVALID_OPERATION;
- }
-
- private:
- audio_stream_out_t *mStream;
- wp<StreamOutHalInterfaceCallback> mCallback;
- wp<StreamOutHalInterfaceEventCallback> mEventCallback;
-
- friend class DeviceHalLocal;
-
- // Can not be constructed directly by clients.
- StreamOutHalLocal(audio_stream_out_t *stream, sp<DeviceHalLocal> device);
-
- virtual ~StreamOutHalLocal();
-
- static int asyncCallback(stream_callback_event_t event, void *param, void *cookie);
-
- static int asyncEventCallback(stream_event_callback_type_t event, void *param, void *cookie);
-
- void doUpdateSourceMetadataV7(const SourceMetadata& sourceMetadata);
- void doUpdateSourceMetadata(const SourceMetadata& sourceMetadata);
-};
-
-class StreamInHalLocal : public StreamInHalInterface, public StreamHalLocal {
- public:
- // Return the frame size (number of bytes per sample) of a stream.
- virtual status_t getFrameSize(size_t *size);
-
- // Set the input gain for the audio driver.
- virtual status_t setGain(float gain);
-
- // Read audio buffer in from driver.
- virtual status_t read(void *buffer, size_t bytes, size_t *read);
-
- // Return the amount of input frames lost in the audio driver.
- virtual status_t getInputFramesLost(uint32_t *framesLost);
-
- // Return a recent count of the number of audio frames received and
- // the clock time associated with that frame count.
- virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
-
- // Start a stream operating in mmap mode.
- virtual status_t start();
-
- // Stop a stream operating in mmap mode.
- virtual status_t stop();
-
- // Retrieve information on the data buffer in mmap mode.
- virtual status_t createMmapBuffer(int32_t minSizeFrames,
- struct audio_mmap_buffer_info *info);
-
- // Get current read/write position in the mmap buffer
- virtual status_t getMmapPosition(struct audio_mmap_position *position);
-
- // Get active microphones
- virtual status_t getActiveMicrophones(std::vector<media::MicrophoneInfo> *microphones);
-
- // Sets microphone direction (for processing)
- virtual status_t setPreferredMicrophoneDirection(audio_microphone_direction_t direction);
-
- // Sets microphone zoom (for processing)
- virtual status_t setPreferredMicrophoneFieldDimension(float zoom);
-
- // Called when the metadata of the stream's sink has been changed.
- status_t updateSinkMetadata(const SinkMetadata& sinkMetadata) override;
-
- private:
- audio_stream_in_t *mStream;
-
- friend class DeviceHalLocal;
-
- // Can not be constructed directly by clients.
- StreamInHalLocal(audio_stream_in_t *stream, sp<DeviceHalLocal> device);
-
- virtual ~StreamInHalLocal();
-
- void doUpdateSinkMetadata(const SinkMetadata& sinkMetadata);
- void doUpdateSinkMetadataV7(const SinkMetadata& sinkMetadata);
-};
-
-} // namespace android
-
-#endif // ANDROID_HARDWARE_STREAM_HAL_LOCAL_H
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 9ca7118..d27ad4c 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -19,6 +19,7 @@
#include <android/media/audio/common/AudioMMapPolicyInfo.h>
#include <android/media/audio/common/AudioMMapPolicyType.h>
+#include <error/Result.h>
#include <media/audiohal/EffectHalInterface.h>
#include <media/MicrophoneInfo.h>
#include <system/audio.h>
@@ -131,6 +132,8 @@
// Update the connection status of an external device.
virtual status_t setConnectedState(const struct audio_port_v7 *port, bool connected) = 0;
+ virtual error::Result<audio_hw_sync_t> getHwAvSync() = 0;
+
virtual status_t dump(int fd, const Vector<String16>& args) = 0;
protected:
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index e12fe77..1d52b7d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -89,6 +89,12 @@
// (must match the priority of the audioflinger's thread that calls 'read' / 'write')
virtual status_t setHalThreadPriority(int priority) = 0;
+ virtual status_t legacyCreateAudioPatch(const struct audio_port_config& port,
+ std::optional<audio_source_t> source,
+ audio_devices_t type) = 0;
+
+ virtual status_t legacyReleaseAudioPatch() = 0;
+
protected:
// Subclasses can not be constructed directly by clients.
StreamHalInterface() {}
@@ -242,6 +248,11 @@
virtual status_t setLatencyModeCallback(
const sp<StreamOutHalInterfaceLatencyModeCallback>& callback) = 0;
+ /**
+ * Signal the end of audio output, interrupting an ongoing 'write' operation.
+ */
+ virtual status_t exit() = 0;
+
protected:
virtual ~StreamOutHalInterface() {}
};
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 5b53331..479906f 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -2493,21 +2493,17 @@
if (dev == nullptr) {
return AUDIO_HW_SYNC_INVALID;
}
- String8 reply;
- AudioParameter param;
- if (dev->getParameters(String8(AudioParameter::keyHwAvSync), &reply) == OK) {
- param = AudioParameter(reply);
- }
- int value;
- if (param.getInt(String8(AudioParameter::keyHwAvSync), value) != NO_ERROR) {
+ error::Result<audio_hw_sync_t> result = dev->getHwAvSync();
+ if (!result.ok()) {
ALOGW("getAudioHwSyncForSession error getting sync for session %d", sessionId);
return AUDIO_HW_SYNC_INVALID;
}
+ audio_hw_sync_t value = VALUE_OR_FATAL(result);
// allow only one session for a given HW A/V sync ID.
for (size_t i = 0; i < mHwAvSyncIds.size(); i++) {
- if (mHwAvSyncIds.valueAt(i) == (audio_hw_sync_t)value) {
+ if (mHwAvSyncIds.valueAt(i) == value) {
ALOGV("getAudioHwSyncForSession removing ID %d for session %d",
value, mHwAvSyncIds.keyAt(i));
mHwAvSyncIds.removeItemsAt(i);
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index dd278f0..ae5772d 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2104,10 +2104,8 @@
void AudioFlinger::PlaybackThread::preExit()
{
ALOGV(" preExit()");
- // FIXME this is using hard-coded strings but in the future, this functionality will be
- // converted to use audio HAL extensions required to support tunneling
- status_t result = mOutput->stream->setParameters(String8("exiting=1"));
- ALOGE_IF(result != OK, "Error when setting parameters on exit: %d", result);
+ status_t result = mOutput->stream->exit();
+ ALOGE_IF(result != OK, "Error when calling exit(): %d", result);
}
void AudioFlinger::PlaybackThread::dumpTracks_l(int fd, const Vector<String16>& args __unused)
@@ -4585,19 +4583,7 @@
patch->sinks,
handle);
} else {
- char *address;
- if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
- //FIXME: we only support address on first sink with HAL version < 3.0
- address = audio_device_address_to_parameter(
- patch->sinks[0].ext.device.type,
- patch->sinks[0].ext.device.address);
- } else {
- address = (char *)calloc(1, 1);
- }
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting), (int)type);
- status = mOutput->stream->setParameters(param.toString());
+ status = mOutput->stream->legacyCreateAudioPatch(patch->sinks[0], std::nullopt, type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
const std::string patchSinksAsString = patchSinksToString(patch);
@@ -4642,9 +4628,7 @@
sp<DeviceHalInterface> hwDevice = mOutput->audioHwDev->hwDevice();
status = hwDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mOutput->stream->setParameters(param.toString());
+ status = mOutput->stream->legacyReleaseAudioPatch();
}
return status;
}
@@ -9130,21 +9114,9 @@
patch->sinks,
handle);
} else {
- char *address;
- if (strcmp(patch->sources[0].ext.device.address, "") != 0) {
- address = audio_device_address_to_parameter(
- patch->sources[0].ext.device.type,
- patch->sources[0].ext.device.address);
- } else {
- address = (char *)calloc(1, 1);
- }
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting),
- (int)patch->sources[0].ext.device.type);
- param.addInt(String8(AudioParameter::keyInputSource),
- (int)patch->sinks[0].ext.mix.usecase.source);
- status = mInput->stream->setParameters(param.toString());
+ status = mInput->stream->legacyCreateAudioPatch(patch->sources[0],
+ patch->sinks[0].ext.mix.usecase.source,
+ patch->sources[0].ext.device.type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
@@ -9176,9 +9148,7 @@
sp<DeviceHalInterface> hwDevice = mInput->audioHwDev->hwDevice();
status = hwDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mInput->stream->setParameters(param.toString());
+ status = mInput->stream->legacyReleaseAudioPatch();
}
return status;
}
@@ -9897,29 +9867,18 @@
}
if (mAudioHwDev->supportsAudioPatches()) {
- status = mHalDevice->createAudioPatch(patch->num_sources,
- patch->sources,
- patch->num_sinks,
- patch->sinks,
- handle);
+ status = mHalDevice->createAudioPatch(patch->num_sources, patch->sources, patch->num_sinks,
+ patch->sinks, handle);
} else {
- char *address;
- if (strcmp(patch->sinks[0].ext.device.address, "") != 0) {
- //FIXME: we only support address on first sink with HAL version < 3.0
- address = audio_device_address_to_parameter(
- patch->sinks[0].ext.device.type,
- patch->sinks[0].ext.device.address);
+ audio_port_config port;
+ std::optional<audio_source_t> source;
+ if (isOutput()) {
+ port = patch->sinks[0];
} else {
- address = (char *)calloc(1, 1);
+ port = patch->sources[0];
+ source = patch->sinks[0].ext.mix.usecase.source;
}
- AudioParameter param = AudioParameter(String8(address));
- free(address);
- param.addInt(String8(AudioParameter::keyRouting), (int)type);
- if (!isOutput()) {
- param.addInt(String8(AudioParameter::keyInputSource),
- (int)patch->sinks[0].ext.mix.usecase.source);
- }
- status = mHalStream->setParameters(param.toString());
+ status = mHalStream->legacyCreateAudioPatch(port, source, type);
*handle = AUDIO_PATCH_HANDLE_NONE;
}
@@ -9958,9 +9917,7 @@
if (supportsAudioPatches) {
status = mHalDevice->releaseAudioPatch(handle);
} else {
- AudioParameter param;
- param.addInt(String8(AudioParameter::keyRouting), 0);
- status = mHalStream->setParameters(param.toString());
+ status = mHalStream->legacyReleaseAudioPatch();
}
return status;
}
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index efbae47..11fa991 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -145,9 +145,9 @@
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
if (entry.count > 0) {
- const auto it = std::find(entry.data.i32, entry.data.i32 + entry.count,
+ const auto it = std::find(entry.data.u8, entry.data.u8 + entry.count,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT);
- if (it != entry.data.i32 + entry.count) {
+ if (it != entry.data.u8 + entry.count) {
entry = deviceInfo.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP);
if (entry.count > 0 || ((entry.count % 2) != 0)) {
int standardBitmap = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
@@ -860,6 +860,7 @@
bool deferredConsumerOnly = deferredConsumer && numBufferProducers == 0;
bool isMultiResolution = outputConfiguration.isMultiResolution();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int streamUseCase = outputConfiguration.getStreamUseCase();
res = SessionConfigurationUtils::checkSurfaceType(numBufferProducers, deferredConsumer,
outputConfiguration.getSurfaceType());
@@ -903,7 +904,8 @@
sp<Surface> surface;
res = SessionConfigurationUtils::createSurfaceFromGbp(streamInfo,
isStreamInfoValid, surface, bufferProducer, mCameraIdStr,
- mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile);
+ mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+ streamUseCase);
if (!res.isOk())
return res;
@@ -949,7 +951,7 @@
static_cast<camera_stream_rotation_t>(outputConfiguration.getRotation()),
&streamId, physicalCameraId, streamInfo.sensorPixelModesUsed, &surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared, isMultiResolution,
- streamInfo.dynamicRangeProfile);
+ /*consumerUsage*/0, streamInfo.dynamicRangeProfile, streamInfo.streamUseCase);
}
if (err != OK) {
@@ -1044,7 +1046,8 @@
&surfaceIds,
outputConfiguration.getSurfaceSetID(), isShared,
outputConfiguration.isMultiResolution(), consumerUsage,
- outputConfiguration.getDynamicRangeProfile());
+ outputConfiguration.getDynamicRangeProfile(),
+ outputConfiguration.getStreamUseCase());
if (err != OK) {
res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
@@ -1058,7 +1061,8 @@
mStreamInfoMap.emplace(std::piecewise_construct, std::forward_as_tuple(streamId),
std::forward_as_tuple(width, height, format, dataSpace, consumerUsage,
overriddenSensorPixelModesUsed,
- outputConfiguration.getDynamicRangeProfile()));
+ outputConfiguration.getDynamicRangeProfile(),
+ outputConfiguration.getStreamUseCase()));
ALOGV("%s: Camera %s: Successfully created a new stream ID %d for a deferred surface"
" (%d x %d) stream with format 0x%x.",
@@ -1246,6 +1250,7 @@
}
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
+ int streamUseCase = outputConfiguration.getStreamUseCase();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
@@ -1254,7 +1259,8 @@
sp<Surface> surface;
res = SessionConfigurationUtils::createSurfaceFromGbp(outInfo,
/*isStreamInfoValid*/ false, surface, newOutputsMap.valueAt(i), mCameraIdStr,
- mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile);
+ mDevice->infoPhysical(physicalCameraId), sensorPixelModesUsed, dynamicRangeProfile,
+ streamUseCase);
if (!res.isOk())
return res;
@@ -1612,6 +1618,7 @@
const std::vector<int32_t> &sensorPixelModesUsed =
outputConfiguration.getSensorPixelModesUsed();
int dynamicRangeProfile = outputConfiguration.getDynamicRangeProfile();
+ int streamUseCase= outputConfiguration.getStreamUseCase();
for (auto& bufferProducer : bufferProducers) {
// Don't create multiple streams for the same target surface
ssize_t index = mStreamMap.indexOfKey(IInterface::asBinder(bufferProducer));
@@ -1624,7 +1631,8 @@
sp<Surface> surface;
res = SessionConfigurationUtils::createSurfaceFromGbp(mStreamInfoMap[streamId],
true /*isStreamInfoValid*/, surface, bufferProducer, mCameraIdStr,
- mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile);
+ mDevice->infoPhysical(physicalId), sensorPixelModesUsed, dynamicRangeProfile,
+ streamUseCase);
if (!res.isOk())
return res;
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index e936cb6..6ddf500 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -183,7 +183,8 @@
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) = 0;
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
/**
* Create an output stream of the requested size, format, rotation and
@@ -201,7 +202,8 @@
int streamSetId = camera3::CAMERA3_STREAM_SET_ID_INVALID,
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) = 0;
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) = 0;
/**
* Create an input stream of width, height, and format.
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 64f5abf..4568209 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -714,6 +714,10 @@
bool isLogicalCameraLocked(const std::string& id, std::vector<std::string>* physicalCameraIds);
+ // No method corresponding to the same provider / member belonging to the
+ // same provider should be used after this method is called since it'll lead
+ // to invalid memory access (especially since this is called by ProviderInfo methods on hal
+ // service death).
status_t removeProvider(const std::string& provider);
sp<StatusListener> getStatusListener() const;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 992027a..c8f6310 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -978,7 +978,7 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int dynamicRangeProfile) {
+ uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
ATRACE_CALL();
if (consumer == nullptr) {
@@ -991,7 +991,8 @@
return createStream(consumers, /*hasDeferredConsumer*/ false, width, height,
format, dataSpace, rotation, id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
- streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile);
+ streamSetId, isShared, isMultiResolution, consumerUsage, dynamicRangeProfile,
+ streamUseCase);
}
static bool isRawFormat(int format) {
@@ -1011,7 +1012,7 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation, int *id,
const String8& physicalCameraId, const std::unordered_set<int32_t> &sensorPixelModesUsed,
std::vector<int> *surfaceIds, int streamSetId, bool isShared, bool isMultiResolution,
- uint64_t consumerUsage, int dynamicRangeProfile) {
+ uint64_t consumerUsage, int dynamicRangeProfile, int streamUseCase) {
ATRACE_CALL();
Mutex::Autolock il(mInterfaceLock);
@@ -1089,7 +1090,7 @@
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile);
+ isMultiResolution, dynamicRangeProfile, streamUseCase);
} else if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
bool maxResolution =
sensorPixelModesUsed.find(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION) !=
@@ -1103,22 +1104,22 @@
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, rawOpaqueBufferSize, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile);
+ isMultiResolution, dynamicRangeProfile, streamUseCase);
} else if (isShared) {
newStream = new Camera3SharedOutputStream(mNextStreamId, consumers,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- mUseHalBufManager, dynamicRangeProfile);
+ mUseHalBufManager, dynamicRangeProfile, streamUseCase);
} else if (consumers.size() == 0 && hasDeferredConsumer) {
newStream = new Camera3OutputStream(mNextStreamId,
width, height, format, consumerUsage, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile);
+ isMultiResolution, dynamicRangeProfile, streamUseCase);
} else {
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, format, dataSpace, rotation,
mTimestampOffset, physicalCameraId, sensorPixelModesUsed, streamSetId,
- isMultiResolution, dynamicRangeProfile);
+ isMultiResolution, dynamicRangeProfile, streamUseCase);
}
size_t consumerCount = consumers.size();
@@ -1812,14 +1813,16 @@
streamIds.push_back(stream->getId());
Camera3Stream* camera3Stream = Camera3Stream::cast(stream->asHalStream());
int64_t usage = 0LL;
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT;
if (camera3Stream != nullptr) {
usage = camera3Stream->getUsage();
+ streamUseCase = camera3Stream->getStreamUseCase();
}
streamStats.emplace_back(stream->getWidth(), stream->getHeight(),
stream->getFormat(), stream->getDataSpace(), usage,
stream->getMaxHalBuffers(),
stream->getMaxTotalBuffers() - stream->getMaxHalBuffers(),
- stream->getDynamicRangeProfile());
+ stream->getDynamicRangeProfile(), streamUseCase);
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 6c4ba49..d466ae4 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -134,7 +134,8 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int dynamicRangeProfile =
- ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) override;
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
status_t createStream(const std::vector<sp<Surface>>& consumers,
bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -146,7 +147,8 @@
bool isShared = false, bool isMultiResolution = false,
uint64_t consumerUsage = 0,
int dynamicRangeProfile =
- ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) override;
+ ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) override;
status_t createInputStream(
uint32_t width, uint32_t height, int format, bool isMultiResolution,
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index ba97367..dc3a6f3 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -34,11 +34,11 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile, int streamUseCase) :
Camera3Stream(id, type,
width, height, maxSize, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile),
+ dynamicRangeProfile, streamUseCase),
mTotalBufferCount(0),
mHandoutTotalBufferCount(0),
mHandoutOutputBufferCount(0),
@@ -89,6 +89,7 @@
lines.appendFormat(" Physical camera id: %s\n", camera_stream::physical_camera_id);
}
lines.appendFormat(" Dynamic Range Profile: 0x%x", camera_stream::dynamic_range_profile);
+ lines.appendFormat(" Stream use case: %d\n", camera_stream::use_case);
lines.appendFormat(" Frames produced: %d, last timestamp: %" PRId64 " ns\n",
mFrameCount, mLastTimestamp);
lines.appendFormat(" Total buffers: %zu, currently dequeued: %zu\n",
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 518ee42..f2b1536 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -38,7 +38,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
public:
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 98cbbcf..95d19ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -46,11 +46,12 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile,
+ int streamUseCase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile),
+ dynamicRangeProfile, streamUseCase),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -75,10 +76,11 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile,
+ int streamUseCase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height, maxSize,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
- setId, isMultiResolution, dynamicRangeProfile),
+ setId, isMultiResolution, dynamicRangeProfile, streamUseCase),
mConsumer(consumer),
mTransform(0),
mTraceFirstBuffer(true),
@@ -110,11 +112,12 @@
camera_stream_rotation_t rotation, nsecs_t timestampOffset,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile,
+ int streamUseCase) :
Camera3IOStreamBase(id, CAMERA_STREAM_OUTPUT, width, height,
/*maxSize*/0, format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile),
+ dynamicRangeProfile, streamUseCase),
mConsumer(nullptr),
mTransform(0),
mTraceFirstBuffer(true),
@@ -152,12 +155,12 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
uint64_t consumerUsage, nsecs_t timestampOffset,
int setId, bool isMultiResolution,
- int dynamicRangeProfile) :
+ int dynamicRangeProfile, int streamUseCase) :
Camera3IOStreamBase(id, type, width, height,
/*maxSize*/0,
format, dataSpace, rotation,
physicalCameraId, sensorPixelModesUsed, setId, isMultiResolution,
- dynamicRangeProfile),
+ dynamicRangeProfile, streamUseCase),
mTransform(0),
mTraceFirstBuffer(true),
mUseMonoTimestamp(false),
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index d9bf62a..7d2d32e 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -90,7 +90,8 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
/**
* Set up a stream for formats that have a variable buffer size for the same
* dimensions, such as compressed JPEG.
@@ -103,7 +104,8 @@
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
/**
* Set up a stream with deferred consumer for formats that have 2 dimensions, such as
* RAW and YUV. The consumer must be set before using this stream for output. A valid
@@ -115,7 +117,8 @@
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
virtual ~Camera3OutputStream();
@@ -243,7 +246,8 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0,
int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
/**
* Note that we release the lock briefly in this function
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 0e2671a..fbd1e56 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -33,10 +33,12 @@
camera_stream_rotation_t rotation,
nsecs_t timestampOffset, const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool useHalBufManager, int dynamicProfile) :
+ int setId, bool useHalBufManager, int dynamicProfile,
+ int streamUseCase) :
Camera3OutputStream(id, CAMERA_STREAM_OUTPUT, width, height,
format, dataSpace, rotation, physicalCameraId, sensorPixelModesUsed,
- consumerUsage, timestampOffset, setId, dynamicProfile),
+ consumerUsage, timestampOffset, setId, /*isMultiResolution*/false,
+ dynamicProfile, streamUseCase),
mUseHalBufManager(useHalBufManager) {
size_t consumerCount = std::min(surfaces.size(), kMaxOutputs);
if (surfaces.size() > consumerCount) {
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index fafa26f..223d52b 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -41,7 +41,8 @@
const std::unordered_set<int32_t> &sensorPixelModesUsed,
int setId = CAMERA3_STREAM_SET_ID_INVALID,
bool useHalBufManager = false,
- int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
+ int dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD,
+ int streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT);
virtual ~Camera3SharedOutputStream();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 83f9a98..6b093b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -54,7 +54,8 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile) :
+ int setId, bool isMultiResolution, int dynamicRangeProfile,
+ int streamUseCase) :
camera_stream(),
mId(id),
mSetId(setId),
@@ -91,6 +92,7 @@
camera_stream::physical_camera_id = mPhysicalCameraId.string();
camera_stream::sensor_pixel_modes_used = sensorPixelModesUsed;
camera_stream::dynamic_range_profile = dynamicRangeProfile;
+ camera_stream::use_case = streamUseCase;
if ((format == HAL_PIXEL_FORMAT_BLOB || format == HAL_PIXEL_FORMAT_RAW_OPAQUE) &&
maxSize == 0) {
@@ -175,6 +177,10 @@
return camera_stream::max_buffers;
}
+int Camera3Stream::getStreamUseCase() const {
+ return camera_stream::use_case;
+}
+
void Camera3Stream::setOfflineProcessingSupport(bool support) {
mSupportOfflineProcessing = support;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index bbbea8d..ada570b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -178,6 +178,7 @@
android_dataspace getOriginalDataSpace() const;
int getMaxHalBuffers() const;
const String8& physicalCameraId() const;
+ int getStreamUseCase() const;
void setOfflineProcessingSupport(bool) override;
bool getOfflineProcessingSupport() const override;
@@ -505,7 +506,8 @@
android_dataspace dataSpace, camera_stream_rotation_t rotation,
const String8& physicalCameraId,
const std::unordered_set<int32_t> &sensorPixelModesUsed,
- int setId, bool isMultiResolution, int dynamicRangeProfile);
+ int setId, bool isMultiResolution, int dynamicRangeProfile,
+ int streamUseCase);
wp<Camera3StreamBufferFreedListener> mBufferFreedListener;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index ef10f0d..7b1597b 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -65,6 +65,7 @@
std::unordered_set<int32_t> sensor_pixel_modes_used;
int dynamic_range_profile;
+ int use_case;
} camera_stream_t;
typedef struct camera_stream_buffer {
@@ -109,16 +110,19 @@
bool supportsOffline = false;
std::unordered_set<int32_t> sensorPixelModesUsed;
int dynamicRangeProfile;
+ int streamUseCase;
OutputStreamInfo() :
width(-1), height(-1), format(-1), dataSpace(HAL_DATASPACE_UNKNOWN),
consumerUsage(0),
- dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {}
+ dynamicRangeProfile(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD),
+ streamUseCase(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {}
OutputStreamInfo(int _width, int _height, int _format, android_dataspace _dataSpace,
uint64_t _consumerUsage, const std::unordered_set<int32_t>& _sensorPixelModesUsed,
- int _dynamicRangeProfile) :
+ int _dynamicRangeProfile, int _streamUseCase) :
width(_width), height(_height), format(_format),
dataSpace(_dataSpace), consumerUsage(_consumerUsage),
- sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile){}
+ sensorPixelModesUsed(_sensorPixelModesUsed), dynamicRangeProfile(_dynamicRangeProfile),
+ streamUseCase(_streamUseCase) {}
};
/**
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 7b7a2a2..87cf99a 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -65,6 +65,7 @@
using namespace android::hardware::camera;
using namespace android::hardware::camera::device::V3_2;
using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
+using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
namespace android {
@@ -969,8 +970,16 @@
__FUNCTION__, src->dynamic_range_profile);
return BAD_VALUE;
}
+ if (src->use_case != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT &&
+ mHidlSession_3_8 == nullptr) {
+ ALOGE("%s: Camera device doesn't support non-default stream use case %d!",
+ __FUNCTION__, src->use_case);
+ return BAD_VALUE;
+ }
dst3_8.v3_7 = dst3_7;
dst3_8.dynamicRangeProfile = mapToHidlDynamicProfile(src->dynamic_range_profile);
+ dst3_8.useCase =
+ static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(src->use_case);
activeStreams.insert(streamId);
// Create Buffer ID map if necessary
mBufferRecords.tryCreateBufferCache(streamId);
@@ -992,7 +1001,6 @@
requestedConfiguration3_4.sessionParams.setToExternal(
reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
sessionParamSize);
- requestedConfiguration3_7.operationMode = operationMode;
requestedConfiguration3_7.sessionParams.setToExternal(
reinterpret_cast<uint8_t*>(const_cast<camera_metadata_t*>(sessionParams)),
sessionParamSize);
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index f826d83..548fb0b 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -31,6 +31,7 @@
using android::hardware::camera2::ICameraDeviceUser;
using android::hardware::camera::metadata::V3_6::CameraMetadataEnumAndroidSensorPixelMode;
using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap;
+using android::hardware::camera::metadata::V3_8::CameraMetadataEnumAndroidScalerAvailableStreamUseCases;
namespace android {
namespace camera3 {
@@ -310,11 +311,30 @@
}
}
+bool isStreamUseCaseSupported(int streamUseCase,
+ const CameraMetadata &deviceInfo) {
+ camera_metadata_ro_entry_t availableStreamUseCases =
+ deviceInfo.find(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES);
+
+ if (availableStreamUseCases.count == 0 &&
+ streamUseCase == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+ return true;
+ }
+
+ for (size_t i = 0; i < availableStreamUseCases.count; i++) {
+ if (availableStreamUseCases.data.i32[i] == streamUseCase) {
+ return true;
+ }
+ }
+ return false;
+}
+
binder::Status createSurfaceFromGbp(
OutputStreamInfo& streamInfo, bool isStreamInfoValid,
sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
- const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile){
+ const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
+ int streamUseCase) {
// bufferProducer must be non-null
if (gbp == nullptr) {
String8 msg = String8::format("Camera %s: Surface is NULL", logicalCameraId.string());
@@ -427,6 +447,13 @@
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
+ if (!SessionConfigurationUtils::isStreamUseCaseSupported(streamUseCase,
+ physicalCameraMetadata)) {
+ String8 msg = String8::format("Camera %s: stream use case %d not supported,"
+ " failed to create output stream", logicalCameraId.string(), streamUseCase);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
+ }
if (!isStreamInfoValid) {
streamInfo.width = width;
@@ -436,6 +463,7 @@
streamInfo.consumerUsage = consumerUsage;
streamInfo.sensorPixelModesUsed = overriddenSensorPixelModes;
streamInfo.dynamicRangeProfile = dynamicRangeProfile;
+ streamInfo.streamUseCase = streamUseCase;
return binder::Status::ok();
}
if (width != streamInfo.width) {
@@ -506,6 +534,8 @@
stream->dynamicRangeProfile =
static_cast<CameraMetadataEnumAndroidRequestAvailableDynamicRangeProfilesMap> (
streamInfo.dynamicRangeProfile);
+ stream->useCase = static_cast<CameraMetadataEnumAndroidScalerAvailableStreamUseCases>(
+ streamInfo.streamUseCase);
}
binder::Status checkPhysicalCameraId(
@@ -666,6 +696,7 @@
return res;
}
+ int streamUseCase = it.getStreamUseCase();
if (deferredConsumer) {
streamInfo.width = it.getWidth();
streamInfo.height = it.getHeight();
@@ -686,6 +717,7 @@
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT,
"Deferred surface sensor pixel modes not valid");
}
+ streamInfo.streamUseCase = streamUseCase;
mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
&streamConfiguration.streams[streamIdx++]);
isStreamInfoValid = true;
@@ -698,7 +730,8 @@
for (auto& bufferProducer : bufferProducers) {
sp<Surface> surface;
res = createSurfaceFromGbp(streamInfo, isStreamInfoValid, surface, bufferProducer,
- logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile);
+ logicalCameraId, metadataChosen, sensorPixelModesUsed, dynamicRangeProfile,
+ streamUseCase);
if (!res.isOk())
return res;
@@ -851,6 +884,11 @@
// image
return false;
}
+ if (static_cast<int32_t>(streamConfigV38.streams[i].useCase) !=
+ ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+ // ICameraDevice older than 3.8 doesn't support stream use case
+ return false;
+ }
streamConfigV37.streams[i] = streamConfigV38.streams[i].v3_7;
}
streamConfigV37.operationMode = streamConfigV38.operationMode;
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
index 9a5dc2c..8dfc11d 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.h
@@ -94,13 +94,15 @@
// Create a Surface from an IGraphicBufferProducer. Returns error if
// IGraphicBufferProducer's property doesn't match with streamInfo
binder::Status createSurfaceFromGbp(
-camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
-sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
-const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
-const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile);
+ camera3::OutputStreamInfo& streamInfo, bool isStreamInfoValid,
+ sp<Surface>& surface, const sp<IGraphicBufferProducer>& gbp,
+ const String8 &logicalCameraId, const CameraMetadata &physicalCameraMetadata,
+ const std::vector<int32_t> &sensorPixelModesUsed, int dynamicRangeProfile,
+ int streamUseCase);
+
void mapStreamInfo(const camera3::OutputStreamInfo &streamInfo,
camera3::camera_stream_rotation_t rotation, String8 physicalId, int32_t groupId,
- hardware::camera::device::V3_7::Stream *stream /*out*/);
+ hardware::camera::device::V3_8::Stream *stream /*out*/);
//check if format is 10-bit output compatible
bool is10bitCompatibleFormat(int32_t format);
@@ -111,6 +113,8 @@
// Check if the device supports a given dynamicRangeProfile
bool isDynamicRangeProfileSupported(int dynamicRangeProfile, const CameraMetadata& staticMeta);
+bool isStreamUseCaseSupported(int streamUseCase, const CameraMetadata &deviceInfo);
+
// Check that the physicalCameraId passed in is spported by the camera
// device.
binder::Status checkPhysicalCameraId(